diff --git a/.gitignore b/.gitignore
index c11bd69955f1624a10ca36bdea9898d7e29e24b4..0235d7090c43b8f7b00253c49ba51aa3a9564fb5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,11 @@
 *.pyc
 .*.swp
 *~
+__pycache__
+ubuntu-bionic-18.04-cloudimg-console.log
+.vagrant
+jupyter_openbis_extension.egg-info
+venv
+.ipynb_checkpoints
+.DS_Store
+*.ipynb
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000000000000000000000000000000000000..a6b9eff7d081fe75d77cee5623105348ac43cc44
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,20 @@
+## new in jupyter-openbis-extension 0.1.1
+
+- first version that cleanly works with pip install only
+- bugfixes in setup.py
+- typos in .json files in jupyter-config
+- CHANGELOG added
+
+## new in jupyter-openbis-extension 0.1.0
+
+- separation of connection, down- and upload
+- upload let you specify dataset type
+- dataset type specific input fields (properties)
+- input control for all properties
+- checks for missing properties
+- downloaded datasets are new parent datasets by default
+- simplified way to upload files as part of the new dataset
+
+## new in jupyter-openbis-extension 0.0.1
+
+- first published version
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/README.md b/README.md
index 2988e5d682d2e9df26ea3a1787dd617db8a21a6b..130717de33680951cac6d2625826828d601903c6 100644
--- a/README.md
+++ b/README.md
@@ -4,6 +4,28 @@
 
 The jupyter-openbis-extension needs at least **Python 3.3** in order to run. The Jupyter notebook server starts this extension during startup and must therefore run under the same Python version. The kernel can be anything (Python 2.7, Julia, R, Perl...)
 
+This extension has been successfully tested with Safari 12.0.3, Chrome 72.0 and Firefox 66.0. There is a known incompatibility before Firefox 61.0b13.
+
+## Development with Vagrant
+
+If you want to use a predefined development environment, follow these steps:
+
+1. Install latest version of VirtualBox (<https://www.virtualbox.org>)
+
+2. Install latest version of Vagrant (<https://www.vagrantup.com/downloads.html>)
+
+3. vagrant plugin install vagrant-vbguest vagrant-notify-forwarder vagrant-disksize
+ 
+4. cd vagrant
+
+5. vagrant up
+
+After the setup is complete, you'll have
+
+* Jupyter with openBIS extension running at <http://localhost:8888>.
+* openBIS running at <http://localhost:8122>, with credentials admin/password.
+* Experiment /DEFAULT/DEFAULT with a lot of datasets for testing.
+
 ## Install the extension
 
 If you haven't done yet: download the latest jupyter-openbis-extension from pip. It will automatically install the various dependencies, e.g. Pandas and NumPy.
@@ -12,6 +34,19 @@ If you haven't done yet: download the latest jupyter-openbis-extension from pip.
 pip install --upgrade jupyter-openbis-extension
 ```
 
+or, if you would like to develop:
+
+```
+$ git clone git@sissource.ethz.ch:sispub/jupyter-openbis-extension.git
+$ cd jupyter-openbis-extension
+$ virtualenv venv
+$ source venv/bin/activate
+(venv) $ pip install -e .
+(venv) $ jupyter serverextension enable --py jupyter-openbis-extension
+(venv) $ jupyter nbextension install --py jupyter-openbis-extension --user --symlink
+(venv) $ jupyter nbextension enable jupyter-openbis-extension --user --py
+```
+
 **Register the Jupyter server extension** which will communicate both with openBIS and the notebook:
 
 ```
@@ -50,20 +85,37 @@ connections:
       username            : username
       password            : password
 ```
+**Note  1**: You do not need the usernames or passwords anymore. With the current version, you are able to enter username and password directly from within a Jupyter notebook.
+
+**Note 2**: You can place this file in any of these directories (on Mac OS X):
+
+```
+/Users/your_username/jupyter-openbis-extension/notebooks
+/Users/your_username/.jupyter
+/Users/your_username/.pyenv/versions/3.6.0/etc/jupyter  # or wherever your jupyter installation is located
+/usr/local/etc/jupyter
+/etc/jupyter
+```
+These directories are produced by Jupyters' `nb_server_app.config_file_paths` method.
+
+## Launching Jupyter notebook
 
-Now you are **ready to launch!**
+Now you are **ready to launch jupyter notebook!**
 
 ```
 $ jupyter notebook
 ```
-Observe the terminal. It should tell you that the server(s) have been successfully connected: 
+Observe the terminal. It should tell you which server(s) have been successfully connected (and which not):
 
 ```
 $ jupyter notebook
-connected to TEST local openBIS instance
-connected to PRODUCTION openBIS instance
+Registered: https://localhost:8443
+Cannot establish connection to: https://localhost:8443
+Registered: https://openbis.example.com
+Successfully connected to: https://openbis.example.com
 ```
-**Congratulations!** 
+**Congratulations!** You can retry non-successful connections later, directly from the GUI.
+
 
 ## Uninstall Jupyter extension
 
@@ -142,4 +194,4 @@ Known nbextensions:
     notebook section
       jupyter-openbis-extension/dialog  enabled 
       - Validating: OK
-```
\ No newline at end of file
+```
diff --git a/jupyter-config/jupyter_notebook_config.d/jupyter_openbis_extension.json b/jupyter-config/jupyter_notebook_config.d/jupyter_openbis_extension.json
new file mode 100644
index 0000000000000000000000000000000000000000..501e38c9704dcd0325c7fa818d07a68d51e1873f
--- /dev/null
+++ b/jupyter-config/jupyter_notebook_config.d/jupyter_openbis_extension.json
@@ -0,0 +1,7 @@
+{
+  "NotebookApp": {
+    "nbserver_extensions": {
+      "jupyter-openbis-extension.server": true
+    }
+  }
+}
diff --git a/jupyter-config/nbconfig/notebook.d/jupyter_openbis_extension.json b/jupyter-config/nbconfig/notebook.d/jupyter_openbis_extension.json
new file mode 100644
index 0000000000000000000000000000000000000000..58099bb242893414e23628044cf28ae431dc8589
--- /dev/null
+++ b/jupyter-config/nbconfig/notebook.d/jupyter_openbis_extension.json
@@ -0,0 +1,5 @@
+{
+  "load_extensions": {
+    "jupyter-openbis-extension/main": true
+  }
+}
diff --git a/jupyter-openbis-extension/__init__.py b/jupyter-openbis-extension/__init__.py
index 22b2aabac22028215549cb4634d33428f05c7124..d83477046abe1a933d40e4b4800f5bb73093e7f3 100644
--- a/jupyter-openbis-extension/__init__.py
+++ b/jupyter-openbis-extension/__init__.py
@@ -1,7 +1,7 @@
 name = 'jupyter-openbis-extension.server'
 __author__ = 'Swen Vermeul'
 __email__ = 'swen@ethz.ch'
-__version__ = '0.0.2'
+__version__ = '0.1.1'
 
 def _jupyter_server_extension_paths():
     return [{
@@ -19,7 +19,7 @@ def _jupyter_nbextension_paths():
     #    # directory in the `nbextension/` namespace
         'dest': "openbis",
         # _also_ in the `nbextension/` namespace
-        'require' : "openbis/dialog"
+        'require' : "openbis/main"
     }]
 
 def load_jupyter_server_extension(nbapp):
diff --git a/jupyter-openbis-extension/__pycache__/extension.cpython-36.pyc b/jupyter-openbis-extension/__pycache__/extension.cpython-36.pyc
deleted file mode 100644
index d5f03417cffd6d3922495cb9dca6bf4efab96353..0000000000000000000000000000000000000000
Binary files a/jupyter-openbis-extension/__pycache__/extension.cpython-36.pyc and /dev/null differ
diff --git a/jupyter-openbis-extension/connection.py b/jupyter-openbis-extension/connection.py
new file mode 100644
index 0000000000000000000000000000000000000000..be36cbb321ce71e2b164d9d06762937d18912706
--- /dev/null
+++ b/jupyter-openbis-extension/connection.py
@@ -0,0 +1,167 @@
+import os
+from pybis import Openbis
+from notebook.base.handlers import IPythonHandler
+
+openbis_connections = {}
+
+def register_connection(connection_info):
+
+    conn = OpenBISConnection(
+        name                = connection_info.get('name'),
+        url                 = connection_info.get('url'),
+        verify_certificates = connection_info.get('verify_certificates', False),
+        username            = connection_info.get('username'),
+        password            = connection_info.get('password'),
+        status              = 'not connected',
+    )
+    openbis_connections[conn.name] = conn
+    return conn
+
+
+class OpenBISConnection:
+    """register an openBIS connection
+    """
+
+    def __init__(self, **kwargs):
+        for needed_key in ['name', 'url']:
+            if needed_key not in kwargs:
+                raise KeyError("{} is missing".format(needed_key))
+
+        for key in kwargs:
+            setattr(self, key, kwargs[key])
+
+        openbis = Openbis(
+            url = self.url,
+            verify_certificates = self.verify_certificates
+        )
+        self.openbis = openbis
+        self.status = "not connected"
+
+    def is_session_active(self):
+        return self.openbis.is_session_active()
+
+    def check_status(self):
+        if self.openbis.is_session_active():
+            self.status = "connected"
+        else:
+            self.status = "not connected"
+
+    def login(self, username=None, password=None):
+        if username is None:
+            username=self.username
+        if password is None:
+            password=self.password
+        self.openbis.login(
+            username = username,
+            password = password
+        )
+        # store username and password in memory
+        self.username = username
+        self.password = password
+        self.status  = 'connected'
+
+    def get_info(self):
+        return {
+            'name'    : self.name,
+            'url'     : self.url,
+            'status'  : self.status,
+            'username': self.username,
+            'password': self.password,
+        }
+
+class OpenBISConnections(IPythonHandler):
+
+    def post(self):
+        """create a new connection
+
+        :return: a new connection object
+        """
+        data = self.get_json_body()
+        conn = register_connection(data)
+        if conn.username and conn.password:
+            try:
+                conn.login()
+            except Exception:
+                pass
+        self.get()
+        return
+
+    def get(self):
+        """returns all available openBIS connections
+        """
+
+        connections= []
+        for conn in openbis_connections.values():
+            conn.check_status()
+            connections.append(conn.get_info())
+
+        self.write({
+            'status'     : 200,
+            'connections': connections,
+            'cwd'        : os.getcwd()
+        })
+        return
+
+
+class OpenBISConnectionHandler(IPythonHandler):
+    """Handle the requests to /openbis/conn
+    """
+
+    def put(self, connection_name):
+        """reconnect to a current connection
+        :return: an updated connection object
+        """
+        data = self.get_json_body()
+
+        try:
+            conn = openbis_connections[connection_name]
+        except KeyError:
+            self.set_status(404)
+            self.write({
+                "reason" : 'No such connection: {}'.format(data)
+            })
+            return
+
+        try:
+            conn.login(data.get('username'), data.get('password'))
+        except ConnectionError:
+            self.set_status(500)
+            self.write({
+                "reason": "Could not establish connection to {}".format(connection_name)
+            })
+            return
+        except ValueError:
+            self.set_status(401)
+            self.write({
+                "reason": "Incorrect username or password for {}".format(connection_name)
+            })
+            return
+
+        self.write({
+            'status'     : 200,
+            'connection': conn.get_info(),
+            'cwd'        : os.getcwd()
+        })
+
+    def get(self, connection_name):
+        """returns  information about a connection name
+        """
+
+        try:
+            conn = openbis_connections[connection_name]
+        except KeyError:
+            self.set_status(404)
+            self.write({
+                "reason" : 'No such connection: {}'.format(connection_name)
+            })
+            return
+
+        conn.check_status()
+
+        self.write({
+            'status'     : 200,
+            'connection': conn.get_info(),
+            'cwd'        : os.getcwd()
+        })
+        return
+
diff --git a/jupyter-openbis-extension/dataset.py b/jupyter-openbis-extension/dataset.py
new file mode 100644
index 0000000000000000000000000000000000000000..9a656c74d94a806a82ff8216b7dd1937f4a56d42
--- /dev/null
+++ b/jupyter-openbis-extension/dataset.py
@@ -0,0 +1,294 @@
+import os
+from urllib.parse import unquote
+from notebook.base.handlers import IPythonHandler
+from .connection import openbis_connections
+
+class DataSetDownloadHandler(IPythonHandler):
+    """Handle the requests for /openbis/dataset/connection/permId"""
+
+
+    def download_data(self, conn, permId, downloadPath=None):
+        if not conn.is_session_active():
+            try:
+                conn.login()
+            except Exception as exc:
+                self.set_status(500)
+                self.write({
+                    "reason" : 'connection to {} could not be established: {}'.format(conn.name, exc)
+                })
+                return
+
+        try:
+            dataset = conn.openbis.get_dataset(permId)
+        except Exception as exc:
+            self.set_status(404)
+            self.write({
+                "reason" : 'No such dataSet found: {}'.format(permId)
+            })
+            return
+
+        # dataset was found, download the data to the disk
+        try:
+            destination = dataset.download(destination=downloadPath)
+        except Exception as exc:
+            self.set_status(500)
+            self.write({
+                "reason": 'Data for DataSet {} could not be downloaded: {}'.format(permId, exc)
+            })
+            return
+
+        # return success message
+        path = os.path.join(downloadPath, dataset.permId)
+        self.write({
+            'url'       : conn.url,
+            'permId'    : dataset.permId,
+            'path'      : path,
+            'dataStore' : dataset.dataStore,
+            'location'  : dataset.physicalData.location,
+            'size'      : dataset.physicalData.size,
+            'statusText': 'Data for DataSet {} was successfully downloaded to: {}.'.format(dataset.permId, path)
+        })
+
+    def get(self, **params):
+        """Handle a request to /openbis/dataset/connection_name/permId
+        download the data and return a message
+        """
+
+        try:
+            conn = openbis_connections[params['connection_name']]
+        except KeyError:
+            self.set_status(404)
+            self.write({
+                "reason":'connection {} was not found'.format(params['connection_name'])
+            })
+            return
+
+        results = self.download_data(conn=conn, permId=params['permId'], downloadPath=params['downloadPath'])
+
+
+class DataSetTypesHandler(IPythonHandler):
+    def get(self, **params):
+        """Handle a request to /openbis/datasetTypes/connection_name
+        """
+
+        try:
+            conn = openbis_connections[params['connection_name']]
+        except KeyError:
+            self.set_status(404)
+            self.write({
+                "reason":'connection {} was not found'.format(params['connection_name'])
+            })
+            return
+
+        try:
+            dataset_types = conn.openbis.get_dataset_types()
+            dts = dataset_types.df.to_dict(orient='records')
+
+            # get property assignments for every dataset-type
+            # and add it to the dataset collection
+            for dt in dts:
+                dataset_type = conn.openbis.get_dataset_type(dt['code'])
+                pa = dataset_type.get_propertyAssignments(including_vocabulary=True)
+                pa_dicts = pa.to_dict(orient='records')
+                for pa_dict in pa_dicts:
+                    if pa_dict['dataType'] == 'CONTROLLEDVOCABULARY':
+                        terms = conn.openbis.get_terms(pa_dict['vocabulary']['code'])
+                        pa_dict['terms'] = terms.df[['code','label','description','official','ordinal']].to_dict(orient='records')
+
+                dt['propertyAssignments'] = pa_dicts
+
+            self.write({
+                "dataSetTypes": dts
+            })
+            return
+
+        except Exception as e:
+            print(e)
+            self.set_status(500)
+            self.write({
+                "reason":'Could not fetch dataset-types: {}'.format(e)
+            })
+            return
+
+
+class DataSetUploadHandler(IPythonHandler):
+    """Handle the POST requests for /openbis/dataset/connection_name"""
+
+    def upload_data(self, conn, data):
+        if not conn.is_session_active():
+            try:
+                conn.login()
+            except Exception as e:
+                print(e)
+                self.set_status(500)
+                self.write({
+                    "reason": 'connection to {} could not be established: {}'.format(conn.name, e)
+                })
+                return
+
+        errors = []
+        print("--------------_HERE_-----------1")
+
+        sample = None
+        experiment = None
+
+        if (data.get('entityIdentifier')):
+            try:
+                sample = conn.openbis.get_sample(data.get('entityIdentifier'))
+            except Exception as e:
+                pass
+            if sample is None:
+                try:
+                    experiment = conn.openbis.get_experiment(data.get('entityIdentifier'))
+                except Exception as e:
+                    pass
+
+            if sample is None and experiment is None:
+                errors.append(
+                    {"entityIdentifier" : 'No such sample or experiment: {}'.format(data.get('entityIdentifier')) }
+                )
+        else:
+            errors.append(
+                {"entityIdentifier": "please provide a sample or experiment identifier"}
+            )
+
+        print("--------------_HERE_-----------2")
+
+        parents = []
+        if data.get('parents'):
+            parents = data.get('parents')
+            for parent in parents:
+                try:
+                    conn.openbis.get_dataset(parent)
+                except Exception as e:
+                    errors.append({
+                        "parent": "Parent DataSet not found: {}".format(parent)
+                    })
+
+        print("--------------_HERE_-----------2a")
+
+        filenames = []
+        for filename in data.get('files'):
+            filename = unquote(filename)
+            if os.path.isfile(filename):
+                filenames.append(filename)
+            else:
+                errors.append({
+                    "file": "File not found: {}".format(filename)
+                })
+        print("--------------_HERE_-----------3")
+
+        try:
+            dataset = conn.openbis.new_dataset(
+                type   = data.get('type'),
+                sample = sample,
+                parents = parents,
+                experiment = experiment,
+                files  = filenames,
+            )
+        except Exception as e:
+            print(e)
+            errors.append({
+                "create": 'Error while creating the dataset: {}'.format(e)
+            })
+        print("--------------_HERE_-----------4")
+
+        # try to set the properties
+        if data.get('props'):
+            props = data.get('props')
+            for prop, value in props.items():
+                try:
+                    setattr(dataset.props, prop.lower(), value)
+                except Exception as e:
+                    errors.append({
+                        "prop."+prop : str(e)
+                    })
+
+        # check if any mandatory property is missing
+        for prop_name, prop in dataset.props._property_names.items():
+            if prop['mandatory']:
+                if getattr(dataset.props, prop_name) is None or getattr(dataset.props, prop_name) == "":
+                    errors.append({
+                        "prop."+prop_name : "is mandatory"
+                    })
+        print("--------------_HERE_-----------5")
+
+        # write errors back if already occured
+        if errors:
+            self.set_status(500)
+            self.write({ "errors": errors })
+            return
+        print("--------------_HERE_-----------6")
+
+        try:
+            dataset.save()
+        except Exception as e:
+            errors.append({
+                "save": 'Error while saving the dataset: {}'.format(e)
+            })
+        print("--------------_HERE_-----------7")
+
+        # write errors back if they occured
+        if errors:
+            self.set_status(500)
+            self.write({ "errors": errors })
+        else:
+            # ...or return a success message
+            self.write({
+                'status': 200,
+                'statusText': 'Jupyter Notebook was successfully uploaded to: {} with permId: {}'.format(conn.name, dataset.permId)
+            })
+
+        print('Jupyter Notebook was successfully uploaded to: {} with permId: {}'.format(conn.name, dataset.permId))
+        print("--------------_HERE_-----------8")
+
+    def post(self, **params):
+        """Handle a request to /openbis/dataset/connection_name/permId
+        download the data and return a message
+        """
+
+        try:
+            conn = openbis_connections[params['connection_name']]
+        except KeyError:
+            self.write({
+                "reason": 'connection {} was not found'.format(params['connection_name'])
+            })
+            return
+
+        data = self.get_json_body()
+        print("Received DATA")
+        print(data)
+        self.upload_data(conn=conn,data=data)
+
+
+class FileListHandler(IPythonHandler):
+
+    def get(self, **params):
+        """
+        Returns the file list of the current working directory
+
+        :param params:
+        :return: dictionary of files, key is the fully qualified name,
+                 value is the relative name (for display)
+        """
+
+        cwd = os.getcwd()
+        files = {}
+        for (dirpath, dirnames, filenames) in os.walk(cwd):
+            if filenames:
+                for filename in filenames:
+                    # ignore hidden files
+                    if filename.startswith('.'):
+                        continue
+                    # ignore hidden folders
+                    if os.path.relpath(dirpath) != '.' \
+                            and os.path.relpath(dirpath).startswith('.'):
+                        continue
+                    fqn = os.path.join(dirpath, filename)
+                    files[fqn] = os.path.relpath(fqn, cwd)
+
+        self.set_status(200)
+        self.write({
+            "files": files
+        })
+
diff --git a/jupyter-openbis-extension/sample.py b/jupyter-openbis-extension/sample.py
new file mode 100644
index 0000000000000000000000000000000000000000..752dfe710c37eb4cf0af6fdf48d8b3fd63310f8a
--- /dev/null
+++ b/jupyter-openbis-extension/sample.py
@@ -0,0 +1,89 @@
+from notebook.base.handlers import IPythonHandler
+import numpy as np
+import os
+from .connection import openbis_connections
+from urllib.parse import parse_qs
+
+
+
+def get_entity_for_identifier(conn, identifier):
+    try:
+        entity = conn.openbis.get_sample(identifier)
+    except Exception as exc:
+        pass
+
+    if entity is None:
+        try:
+            entity = conn.openbis.get_experiment(identifier)
+        except Exception as exc:
+            pass
+
+    return entity
+
+def get_datasets(entity, start_with=None, count=None):
+
+    datasets = entity.get_datasets(start_with=start_with, count=count)
+    totalCount = datasets.totalCount
+    df = datasets.df
+    df.replace({np.nan:None}, inplace=True)  # replace NaN with None, otherwise we cannot convert it correctly
+    datasets_dict = df.to_dict(orient='records')   # is too stupid to handle NaN
+
+    return {
+        "datasets_dict": datasets_dict,
+        "totalCount": totalCount
+    }
+
+
+class SampleHandler(IPythonHandler):
+    """Handle the requests for /openbis/sample/connection/permId"""
+
+    def get(self, **params):
+        """Handle a request to /openbis/sample/connection_name/permId
+        download the dataset list and return a message
+        """
+        try:
+            conn = openbis_connections[params['connection_name']]
+        except KeyError:
+            self.set_status(500)
+            self.write({
+                "reason" : 'connection {} was not found'.format(
+                    params['connection_name']
+                )
+            })
+            return
+
+        if not conn.is_session_active():
+            try:
+                conn.login()
+            except Exception as exc:
+                self.set_status(500)
+                self.write({
+                    "reason" : 'connection to {} could not be established: {}'.format(conn.name, exc)
+                })
+                return
+
+        entity = get_entity_for_identifier(conn, params['identifier'])
+        if entity is None:
+            self.set_status(404)
+            self.write({
+                "reason" : 'No such Sample or Experiment: {}'.format(params['identifier'])
+            })
+            return None
+
+        querystring = parse_qs(self.request.query)
+        start_with = querystring.get('start_with', ['0'])[0]
+        count      = querystring.get('count', ['10'])[0]
+
+        datasets = get_datasets(entity, start_with=start_with, count=count)
+        if datasets is not None:
+            self.set_status(200)
+            self.write({
+                "dataSets"    : datasets.get('datasets_dict'),
+                "entity_attrs": entity.attrs.all(),
+                "entity_props": entity.props.all(),
+                "start_with"  : start_with,
+                "count"       : count,
+                "totalCount"  : datasets.get('totalCount'),
+                "cwd"         : os.getcwd()
+            })
+
diff --git a/jupyter-openbis-extension/server.py b/jupyter-openbis-extension/server.py
index 996a942b3b379a113ffc035a5599f81a1ee6853f..97d4909c5b85e9b1b1b5358257a6e933b0599a0b 100644
--- a/jupyter-openbis-extension/server.py
+++ b/jupyter-openbis-extension/server.py
@@ -1,13 +1,10 @@
 from notebook.utils import url_path_join
-from notebook.base.handlers import IPythonHandler
-from pybis import Openbis
-import numpy as np
-
 import os
-from urllib.parse import unquote
 import yaml
 
-openbis_connections = {}
+from .connection import OpenBISConnections, OpenBISConnectionHandler, register_connection
+from .dataset import DataSetTypesHandler, DataSetDownloadHandler, DataSetUploadHandler, FileListHandler
+from .sample import SampleHandler
 
 
 def _jupyter_server_extension_paths():
@@ -31,8 +28,8 @@ def _load_configuration(paths, filename='openbis-connections.yaml'):
                     config = yaml.safe_load(stream)
                     for connection in config['connections']:
                         connections.append(connection)
-                except yaml.YAMLexception as exc:
-                    print(exc)
+                except yaml.YAMLexception as e:
+                    print(e)
                     return None
 
     return connections
@@ -69,6 +66,15 @@ def load_jupyter_server_extension(nb_server_app):
     host_pattern = '.*$'
     base_url = web_app.settings['base_url']
 
+    # get the file list
+    web_app.add_handlers(
+        host_pattern,
+        [(url_path_join( base_url, '/general/filelist'),
+            FileListHandler
+        )]
+    )
+
+
     # DataSet download
     web_app.add_handlers(
         host_pattern, 
@@ -81,10 +87,7 @@ def load_jupyter_server_extension(nb_server_app):
 
     # DataSet upload
     web_app.add_handlers( host_pattern, [(
-            url_path_join(
-                base_url,
-                '/openbis/dataset/(?P<connection_name>.*)'
-            ),
+            url_path_join( base_url, '/openbis/dataset/(?P<connection_name>.*)' ),
             DataSetUploadHandler
         )]
     )
@@ -93,9 +96,7 @@ def load_jupyter_server_extension(nb_server_app):
     web_app.add_handlers(
         host_pattern,
         [(
-            url_path_join(
-                base_url,
-                '/openbis/datasetTypes/(?P<connection_name>.*)'
+            url_path_join( base_url, '/openbis/datasetTypes/(?P<connection_name>.*)'
             ),
             DataSetTypesHandler
         )]
@@ -107,7 +108,7 @@ def load_jupyter_server_extension(nb_server_app):
         [(  
             url_path_join(
                 base_url,
-                '/openbis/sample/(?P<connection_name>.*)?/(?P<permId>.*)'
+                '/openbis/sample/(?P<connection_name>.*)?/(?P<identifier>.*)'
             ), 
             SampleHandler
         )]
@@ -137,392 +138,4 @@ def load_jupyter_server_extension(nb_server_app):
         )]
     )
 
-    print("pybis loaded: {}".format(Openbis))
-
-
-def register_connection(connection_info):
-
-    conn = OpenBISConnection(
-        name                = connection_info.get('name'),
-        url                 = connection_info.get('url'),
-        verify_certificates = connection_info.get('verify_certificates', False),
-        username            = connection_info.get('username'),
-        password            = connection_info.get('password'),
-        status              = 'not connected',
-    )
-    openbis_connections[conn.name] = conn
-    return conn
-
-
-class OpenBISConnection:
-    """register an openBIS connection
-    """
-
-    def __init__(self, **kwargs):
-        for needed_key in ['name', 'url']:
-            if needed_key not in kwargs:
-                raise KeyError("{} is missing".format(needed_key))
-
-        for key in kwargs:
-            setattr(self, key, kwargs[key])
-
-        openbis = Openbis(
-            url = self.url,
-            verify_certificates = self.verify_certificates
-        )
-        self.openbis = openbis
-        self.status = "not connected"
-
-    def is_session_active(self):
-        return self.openbis.is_session_active()
-
-    def check_status(self):
-        if self.openbis.is_session_active():
-            self.status = "connected"
-        else:
-            self.status = "not connected"
-
-    def login(self, username=None, password=None):
-        if username is None:
-            username=self.username
-        if password is None:
-            password=self.password
-        self.openbis.login(
-            username = username,
-            password = password
-        )
-        # store username and password in memory
-        self.username = username
-        self.password = password
-        self.status  = 'connected'
-
-    def get_info(self):
-        return {
-            'name'    : self.name,
-            'url'     : self.url,
-            'status'  : self.status,
-            'username': self.username,
-            'password': self.password,
-        }
-
-class OpenBISConnections(IPythonHandler):
-
-    def post(self):
-        """create a new connection
-
-        :return: a new connection object
-        """
-        data = self.get_json_body()
-        conn = register_connection(data)
-        if conn.username and conn.password:
-            try:
-                conn.login()
-            except Exception:
-                pass
-        self.get()
-        return
-
-    def get(self):
-        """returns all available openBIS connections
-        """
-
-        connections= []
-        for conn in openbis_connections.values():
-            conn.check_status()
-            connections.append(conn.get_info())
-
-        self.write({
-            'status'     : 200,
-            'connections': connections,
-            'cwd'        : os.getcwd()
-        })
-        return
-
-
-class OpenBISConnectionHandler(IPythonHandler):
-    """Handle the requests to /openbis/conn
-    """
-
-    def put(self, connection_name):
-        """reconnect to a current connection
-        :return: an updated connection object
-        """
-        data = self.get_json_body()
-
-        try:
-            conn = openbis_connections[connection_name]
-        except KeyError:
-            self.set_status(404)
-            self.write({
-                "reason" : 'No such connection: {}'.format(data)
-            })
-            return
-
-        try:
-            conn.login(data.get('username'), data.get('password'))
-        except ConnectionError:
-            self.set_status(500)
-            self.write({
-                "reason": "Could not establish connection to {}".format(connection_name)
-            })
-            return
-        except ValueError:
-            self.set_status(401)
-            self.write({
-                "reason": "Incorrect username or password for {}".format(connection_name)
-            })
-            return
-
-        self.write({
-            'status'     : 200,
-            'connection': conn.get_info(),
-            'cwd'        : os.getcwd()
-        })
-
-    def get(self, connection_name):
-        """returns  information about a connection name
-        """
-
-        try:
-            conn = openbis_connections[connection_name]
-        except KeyError:
-            self.set_status(404)
-            self.write({
-                "reason" : 'No such connection: {}'.format(data)
-            })
-            return
-
-        conn.check_status()
-
-        self.write({
-            'status'     : 200,
-            'connection': conn.get_info(),
-            'cwd'        : os.getcwd()
-        })
-        return
-
-
-class SampleHandler(IPythonHandler):
-    """Handle the requests for /openbis/sample/connection/permId"""
-
-    def get_datasets(self, conn, permId):
-        if not conn.is_session_active():
-            try:
-                conn.login()
-            except Exception as exc:
-                self.write({
-                    "reason" : 'connection to {} could not be established: {}'.format(conn.name, exc)
-                })
-
-        sample = None
-        try:
-            sample = conn.openbis.get_sample(permId)
-        except Exception as exc:
-            self.set_status(404)
-            self.write({
-                "reason" : 'No such sample: {}'.format(permId)
-            })
-        if sample is None:
-            return
-
-        datasets = sample.get_datasets().df
-        datasets.replace({np.nan:None}, inplace=True)  # replace NaN with None, otherwise we cannot convert it correctly
-        return datasets.to_dict(orient='records')   # is too stupid to handle NaN
-
-
-    def get(self, **params):
-        """Handle a request to /openbis/sample/connection_name/permId
-        download the data and return a message
-        """
-        try:
-            conn = openbis_connections[params['connection_name']]
-        except KeyError:
-            self.write({
-                "reason" : 'connection {} was not found'.format(
-                    params['connection_name']
-                )
-            })
-            return
-        
-        datasets = self.get_datasets(conn, params['permId'])
-        if datasets is not None:
-            self.set_status(200)
-            self.write({
-                "dataSets": datasets
-            })
-
-
-
-class DataSetDownloadHandler(IPythonHandler):
-    """Handle the requests for /openbis/dataset/connection/permId"""
-
-
-    def download_data(self, conn, permId, downloadPath=None):
-        if not conn.is_session_active():
-            try:
-                conn.login()
-            except Exception as exc:
-                self.set_status(500)
-                self.write({
-                    "reason" : 'connection to {} could not be established: {}'.format(conn.name, exc)
-                })
-                return
-
-        try:
-            dataset = conn.openbis.get_dataset(permId)
-        except Exception as exc:
-            self.set_status(404)
-            self.write({
-                "reason" : 'No such dataSet found: {}'.format(permId)
-            })
-            return
-
-        # dataset was found, download the data to the disk
-        try: 
-            destination = dataset.download(destination=downloadPath)
-        except Exception as exc:
-            self.set_status(500)
-            self.write({
-                "reason": 'Data for DataSet {} could not be downloaded: {}'.format(permId, exc)
-            })
-            return
-            
-        # return success message
-        path = os.path.join(downloadPath, dataset.permId)
-        self.write({
-            'url'       : conn.url,
-            'permId'    : dataset.permId,
-            'path'      : path,
-            'dataStore' : dataset.dataStore,
-            'location'  : dataset.physicalData.location,
-            'size'      : dataset.physicalData.size,
-            'statusText': 'Data for DataSet {} was successfully downloaded to: {}.'.format(dataset.permId, path)
-        })
-
-    def get(self, **params):
-        """Handle a request to /openbis/dataset/connection_name/permId
-        download the data and return a message
-        """
-
-        try:
-            conn = openbis_connections[params['connection_name']]
-        except KeyError:
-            self.set_status(404)
-            self.write({
-                "reason":'connection {} was not found'.format(params['connection_name'])
-            })
-            return
-        
-        results = self.download_data(conn=conn, permId=params['permId'], downloadPath=params['downloadPath'])
-
-
-class DataSetTypesHandler(IPythonHandler):
-    def get(self, **params):
-        """Handle a request to /openbis/datasetTypes/connection_name
-        """
-
-        try:
-            conn = openbis_connections[params['connection_name']]
-        except KeyError:
-            self.set_status(404)
-            self.write({
-                "reason":'connection {} was not found'.format(params['connection_name'])
-            })
-            return
-
-        try:
-            dataset_types = conn.openbis.get_dataset_types()
-            dts = dataset_types.df.to_dict(orient='records')
-
-            # get property assignments for every dataset-type
-            # and add it to the dataset collection
-            for dt in dts:
-                dataset_type = conn.openbis.get_dataset_type(dt['code'])
-                pa = dataset_type.get_propertyAssignments()
-                pa_dict = pa.to_dict(orient='records')
-                dt['propertyAssignments'] = pa_dict
-
-            self.write({
-                "dataSetTypes": dts
-            })
-            return
-
-        except Exception as e:
-            self.set_status(500)
-            self.write({
-                "reason":'Could not fetch dataset-types: {}'.format(e)
-            })
-            return
-
-
-class DataSetUploadHandler(IPythonHandler):
-    """Handle the requests for /openbis/dataset/connection"""
-
-    def upload_data(self, conn, data):
-        if not conn.is_session_active():
-            try:
-                conn.login()
-            except Exception as exc:
-                self.write({
-                    "reason": 'connection to {} could not be established: {}'.format(conn.name, exc)
-                })
-                return
-
-        try:
-            sample = conn.openbis.get_sample(data.get('sampleIdentifier'))
-        except Exception as exc:
-            self.set_status(404)
-            self.write({
-                "reason" : 'No such sample: {}'.format(data.get('sampleIdentifier'))
-            })
-            return
-
-        filenames = []
-        for filename in data.get('files'):
-            filename = unquote(filename)
-            filenames.append(filename)
-
-        try: 
-            ds = conn.openbis.new_dataset(
-                name        = data.get('name'),
-                description = data.get('description'),
-                type        = data.get('type'),
-                sample      = sample,
-                files       = filenames
-            ) 
-        except Exception as exc:
-            self.write({
-                "reason": 'Error while creating the dataset: {}'.format(exc)
-            })
-            return
-
-        try:
-            ds.save()
-        except Exception as exc:
-            self.write({
-                "reason": 'Error while saving the dataset: {}'.format(exc)
-            })
-            return
-        
-            
-        # return success message
-        self.write({
-            'status': 200,
-            'statusText': 'Jupyter Notebook was successfully uploaded to: {} with permId: {}'.format(conn.name, ds.permId)
-        })
-
-    def post(self, **params):
-        """Handle a request to /openbis/dataset/connection_name/permId
-        download the data and return a message
-        """
-
-        try:
-            conn = openbis_connections[params['connection_name']]
-        except KeyError:
-            self.write({
-                "reason": 'connection {} was not found'.format(params['connection_name'])
-            })
-            return
 
-        data = self.get_json_body()
-        results = self.upload_data(conn=conn,data=data)
diff --git a/jupyter-openbis-extension/static/common.js b/jupyter-openbis-extension/static/common.js
new file mode 100644
index 0000000000000000000000000000000000000000..15280a1e178c749e5e17292270e226bd061452ca
--- /dev/null
+++ b/jupyter-openbis-extension/static/common.js
@@ -0,0 +1,47 @@
+define([
+        "base/js/namespace"
+    ],
+    function (IPython) {
+
+        function createFeedback(type, content) {
+            var close = document.createElement("BUTTON")
+            close.className = "close"
+            close.setAttribute("data-dismiss", "alert")
+            close.setAttribute("aria-label", "Close")
+            var x = document.createElement("SPAN")
+            x.setAttribute("aria-hidden", true)
+            x.innerHTML = "&times;"
+            close.appendChild(x)
+
+            var feedbackBox = document.createElement("DIV")
+            feedbackBox.className = "openbis-feedback alert alert-dismissible alert-" + type
+            feedbackBox.setAttribute("role", "alert")
+            feedbackBox.innerHTML = content
+            feedbackBox.prepend(close)
+
+            var nb_container = document.getElementById('notebook-container')
+            nb_container.prepend(feedbackBox)
+        }
+
+        function getCookie(cname) {
+            var name = cname + "=";
+            var decodedCookie = decodeURIComponent(document.cookie);
+            var ca = decodedCookie.split(';');
+            for(var i = 0; i <ca.length; i++) {
+                var c = ca[i];
+                while (c.charAt(0) === ' ') {
+                    c = c.substring(1);
+                }
+                if (c.indexOf(name) === 0) {
+                    return c.substring(name.length, c.length);
+                }
+            }
+            return "";
+        }
+
+        return {
+            createFeedback: createFeedback,
+            getCookie: getCookie
+        }
+    }
+)
\ No newline at end of file
diff --git a/jupyter-openbis-extension/static/connectionDialog.js b/jupyter-openbis-extension/static/connectionDialog.js
new file mode 100644
index 0000000000000000000000000000000000000000..700ade9865d855b0a5395d4fac7483adb157289b
--- /dev/null
+++ b/jupyter-openbis-extension/static/connectionDialog.js
@@ -0,0 +1,249 @@
+define(
+    [
+        "base/js/dialog",
+        "jquery",
+        "./state",
+        "./connections"
+    ],
+    function (dialog, $, state, connections) {
+
+        var currentDownloadPath = null
+
+        function show_available_connections(env, data, conn_table, onclick_cbf) {
+            if (!currentDownloadPath) {
+                currentDownloadPath = data.cwd
+            }
+
+            var table = document.createElement("TABLE")
+            table.className = 'table-bordered table-striped table-condensed'
+            var thead = table.createTHead()
+            var thead_row = thead.insertRow(0)
+            var titles = ['', 'Name', 'URL', 'Status', 'Username / Password']
+            for (title of titles) {
+                thead_row.insertCell().textContent = title
+            }
+
+            tbody = table.createTBody()
+            for (connection of data.connections) {
+                var conn = document.createElement("INPUT")
+                conn.type = "radio"
+                conn.name = "connection_name"
+                conn.value = connection.name
+                conn.setAttribute("url", connection.url)
+
+                conn.checked = connection.name === state.connection.candidateName;
+                if (onclick_cbf === undefined) {
+                    conn.onclick = function () {
+                        state.connection.candidateName = this.value
+                    }
+                } else {
+                    conn.onclick = onclick_cbf
+                }
+
+                var row = tbody.insertRow()
+                row.insertCell().appendChild(conn)
+                row.insertCell().textContent = connection.name
+                row.insertCell().textContent = connection.url
+
+                var status_cell = row.insertCell()
+
+                var status_badge = document.createElement("SPAN")
+                status_badge.id = connection.name + "-badge"
+                status_badge.textContent = connection.status
+                if (connection.status === "connected") {
+                    status_badge.className = "label label-success"
+                } else {
+                    status_badge.className = "label label-danger"
+                }
+                status_cell.appendChild(status_badge)
+
+                var username = document.createElement("INPUT")
+                username.type = "text"
+                username.name = "username"
+                username.autocomplete = "on"
+                username.value = connection.username
+                username.setAttribute("form", connection.name)
+
+                var password = document.createElement("INPUT")
+                password.type = "password"
+                password.name = "password"
+                password.autocomplete = "current-password"
+                password.value = connection.password
+                password.setAttribute("form", connection.name)
+
+                // Username / Password form
+                var pwform = document.createElement("FORM")
+                pwform.id = connection.name
+                pwform.onsubmit = function (event) {
+                    var form_data = new FormData(this)
+                    var status_badge = document.getElementById(this.id + "-badge")
+                    connections.connect(env, this.id,
+                            form_data.get("username"), form_data.get("password")
+                        )
+                        .then(function (response) {
+                            //console.log(response)
+                            if (status_badge.nextElementSibling !== null) {
+                                status_badge.parentNode.removeChild(status_badge.nextElementSibling)
+                            }
+                            if (response.ok) {
+                                status_badge.textContent = "connected"
+                                status_badge.className = "label label-success"
+                            } else {
+                                status_badge.textContent = "not connected"
+                                status_badge.className = "label label-danger"
+                                message = document.createElement("p")
+                                if (response.status === 401) {
+                                    message.textContent = "username/password incorrect"
+                                } else if (response.status === 500) {
+                                    message.textContent = "Connection error"
+                                } else {
+                                    message.textContent = "General error"
+                                }
+                                status_badge.parentNode.insertBefore(message, status_badge.nextSibling)
+                            }
+                        })
+                        .catch(error => console.error("Error while attempting to reconnect: ", error))
+
+                    return false
+                }
+
+
+                var connect_button = document.createElement("BUTTON")
+                connect_button.className = "btn btn-primary btn-xs"
+                connect_button.textContent = "connect"
+
+                pwform.appendChild(username)
+                pwform.appendChild(password)
+                pwform.appendChild(connect_button)
+
+                var cell = row.insertCell()
+                cell.appendChild(pwform)
+            }
+
+            // add row for new connection
+            var row = tbody.insertRow()
+
+            var conn_form = document.createElement("FORM")
+            conn_form.id = "new_connection"
+            conn_form.onsubmit = function (event) {
+                var inputs = document.querySelectorAll("input[form=new_connection]")
+
+                data = {}
+                for (input of inputs) {
+                    data[input.name] = input.value
+                }
+                for (missing of ['connection_name', 'url', 'username', 'password']) {
+                    if (data[missing] === "") {
+                        alert("Please provide: " + missing)
+                        return false
+                    }
+                }
+                connections.create(env, data.connection_name, data.url, data.username, data.password)
+                    .then(function (response) {
+                        if (response.ok) {
+                            response.json()
+                                .then(function (data) {
+                                    show_available_connections(env, data, conn_table)
+                                })
+                        }
+                    })
+                return false
+            }
+            var conn_name = document.createElement("INPUT")
+            conn_name.type = "input"
+            conn_name.name = "connection_name"
+            conn_name.setAttribute("form", conn_form.id)
+            conn_name.placeholder = "openBIS instance name"
+            row.insertCell().appendChild(conn_form)
+            row.insertCell().appendChild(conn_name)
+
+            var conn_url = document.createElement("INPUT")
+            conn_url.type = "input"
+            conn_url.name = "url"
+            conn_url.setAttribute("form", conn_form.id)
+            conn_url.placeholder = "https://openbis.domain:port"
+            row.insertCell().appendChild(conn_url)
+            row.insertCell()
+
+            var username = document.createElement("INPUT")
+            username.autocomplete = "off"
+            username.type = "text"
+            username.name = "username"
+            username.setAttribute("form", conn_form.id)
+            username.placeholder = "username"
+            var password = document.createElement("INPUT")
+            password.type = "password"
+            password.name = "password"
+            password.autocomplete = "new-password"
+            password.setAttribute("form", conn_form.id)
+            var create_btn = document.createElement("BUTTON")
+            create_btn.setAttribute("form", conn_form.id)
+            create_btn.textContent = "create"
+            var uname_pw_cell = row.insertCell()
+            uname_pw_cell.appendChild(username)
+            uname_pw_cell.appendChild(password)
+            uname_pw_cell.appendChild(create_btn)
+
+            conn_table.innerHTML = ""
+            table_title = document.createElement("STRONG")
+            table_title.textContent = "Please choose a connection"
+            conn_table.appendChild(table_title)
+            conn_table.appendChild(table)
+        }
+
+        return {
+            help: 'configure openBIS connections',
+            icon: 'fa-sliders',
+            help_index: '',
+            handler: function (env) {
+                conn_table = document.createElement("DIV")
+                var dst_title = document.createElement("STRONG")
+                dst_title.textContent = "DataSet type"
+                var dataset_types = document.createElement("SELECT")
+                dataset_types.id = "dataset_type"
+                dataset_types.className = "form-control select-xs"
+
+                var input_fields = document.createElement("DIV")
+                conn_table.id = "openbis_connections"
+
+                var onclick_cbf = function () {
+                    state.connection.candidateName = this.value
+                }
+
+                connections.list(env)
+                    .done(function (data) {
+                        show_available_connections(env, data, conn_table, onclick_cbf)
+                    })
+                    .fail(function (data) {
+                        alert(data.status)
+                    })
+
+                var uploadDialogBox = $('<div/>').append(conn_table)
+
+                function onOk() {
+                    state.connection.name = state.connection.candidateName
+                }
+
+                function onCancel() {
+                    state.connection.candidateName = state.connection.name
+                }
+
+                dialog.modal({
+                    body: uploadDialogBox,
+                    title: 'Choose openBIS connection',
+                    buttons: {
+                        'Cancel': {
+                            click: onCancel
+                        },
+                        'Choose connection': {
+                            class: 'btn-primary btn-large',
+                            click: onOk
+                        }
+                    },
+                    notebook: env.notebook,
+                    keyboard_manager: env.notebook.keyboard_manager
+                })
+            }
+        }
+    }
+)
\ No newline at end of file
diff --git a/jupyter-openbis-extension/static/connections.js b/jupyter-openbis-extension/static/connections.js
new file mode 100644
index 0000000000000000000000000000000000000000..2365b7b07c7e813b637da62cdfdad17e198077ef
--- /dev/null
+++ b/jupyter-openbis-extension/static/connections.js
@@ -0,0 +1,67 @@
+define([
+        "base/js/utils",
+        "./common"
+    ],
+    function (utils, common) {
+
+        function list(env) {
+
+            var connectionsUrl = env.notebook.base_url + 'openbis/conns'
+            var settings = {
+                url: connectionsUrl,
+                processData: false,
+                type: 'GET',
+                dataType: 'json',
+                contentType: 'application/json'
+            }
+            return utils.ajax(settings)
+        }
+
+
+        function connect(env, connection, username, password) {
+            var url = env.notebook.base_url + 'openbis/conn/' + connection
+            body = {
+                "username": username,
+                "password": password
+            }
+
+            var xsrf_token = common.getCookie('_xsrf')
+            return fetch(url, {
+                method: "PUT",
+                headers: {
+                    "Content-Type": "application/json",
+                    "X-XSRFToken": xsrf_token,
+                    "credentials": "same-origin",
+                },
+                body: JSON.stringify(body)
+            })
+        }
+
+        function create(env, connection_name, connection_url, username, password) {
+            var endpoint = env.notebook.base_url + 'openbis/conns'
+            body = {
+                "name": connection_name,
+                "url": connection_url,
+                "username": username,
+                "password": password
+            }
+
+            var xsrf_token = common.getCookie('_xsrf')
+            return fetch(endpoint, {
+                method: "POST",
+                headers: {
+                    "Content-Type": "application/json",
+                    "X-XSRFToken": xsrf_token,
+                    "credentials": "same-origin",
+                },
+                body: JSON.stringify(body)
+            })
+        }
+
+        return {
+            list: list,
+            create: create,
+            connect: connect
+        }
+    }
+)
\ No newline at end of file
diff --git a/jupyter-openbis-extension/static/dialog.js b/jupyter-openbis-extension/static/dialog.js
deleted file mode 100644
index 8c999c89682dff741940e0bba896a84448ff209f..0000000000000000000000000000000000000000
--- a/jupyter-openbis-extension/static/dialog.js
+++ /dev/null
@@ -1,826 +0,0 @@
-define([
-        "base/js/namespace",
-        "base/js/dialog",
-        "base/js/utils",
-        "jquery"
-    ],
-    function(IPython, dialog, utils, $) {
-        var currentConnection = ''
-        var currentSampleIdentifier = null
-        var currentDownloadPath = null
-        var ds_type = document.createElement("SELECT")
-        ds_type.size = '40'
-        ds_type.className = "select-xs"
-        ds_type.id = "dataset_type"
-        var conn_table = null
-
-        function show_available_connections(env, data, conn_table, onclick_cbf) {
-            if (!currentDownloadPath) {
-                currentDownloadPath = data.cwd
-            }
-
-            var table = document.createElement("TABLE")
-            table.className = 'table-bordered table-striped table-condensed'
-            var thead = table.createTHead()
-            var thead_row = thead.insertRow(0)
-            var titles = ['', 'Name', 'URL', 'Status', 'Username / Password']
-            for (title of titles) {
-                thead_row.insertCell().textContent = title
-            }
-
-            tbody = table.createTBody()
-            for (connection of data.connections) {
-                var conn = document.createElement("INPUT")
-                conn.type = "radio"
-                conn.name = "connection_name"
-                conn.value = connection.name
-                conn.setAttribute("url", connection.url)
-
-                conn.checked = connection.name === currentConnection;
-                if (onclick_cbf === undefined) {
-                    conn.onclick = function() {
-                        currentConnection = this.value
-                    }
-                }
-                else {
-                    conn.onclick = onclick_cbf
-                }
-
-                var row = tbody.insertRow()
-                row.insertCell().appendChild(conn)
-                row.insertCell().textContent = connection.name
-                row.insertCell().textContent = connection.url
-
-                var status_cell = row.insertCell()
-
-                var status_badge = document.createElement("SPAN")
-                status_badge.id = connection.name + "-badge"
-                status_badge.textContent = connection.status
-                if (connection.status === "connected") {
-                    status_badge.className = "label label-success"
-                }
-                else {
-                        status_badge.className = "label label-danger"
-                }
-                status_cell.appendChild(status_badge)
-
-                var username = document.createElement("INPUT")
-                username.type = "text"
-                username.name = "username"
-                username.autocomplete = "on"
-                username.value = connection.username
-                username.setAttribute("form", connection.name)
-
-                var password = document.createElement("INPUT")
-                password.type = "password"
-                password.name = "password"
-                password.autocomplete = "current-password"
-                password.value = connection.password
-                password.setAttribute("form", connection.name)
-
-                // Username / Password form
-                var pwform = document.createElement("FORM")
-                pwform.id = connection.name
-                pwform.onsubmit = function(event) {
-                    var form_data = new FormData(this)
-                    var status_badge = document.getElementById(this.id + "-badge")
-                    reconnect_connection(env, this.id,
-                        form_data.get("username"), form_data.get("password")
-                    )
-                        .then( function(response) {
-                            //console.log(response)
-                            if (status_badge.nextElementSibling !== null) {
-                                status_badge.parentNode.removeChild(status_badge.nextElementSibling)
-                            }
-                            if (response.ok) {
-                                status_badge.textContent = "connected"
-                                status_badge.className = "label label-success"
-                            }
-                            else {
-                                status_badge.textContent = "not connected"
-                                status_badge.className = "label label-danger"
-                                message = document.createElement("p")
-                                if (response.status === 401) {
-                                    message.textContent = "username/password incorrect"
-                                }
-                                else if (response.status === 500) {
-                                    message.textContent = "Connection error"
-                                }
-                                else {
-                                    message.textContent = "General error"
-                                }
-                                status_badge.parentNode.insertBefore(message, status_badge.nextSibling)
-                            }
-                        })
-                        .catch( error => console.error("Error while attempting to reconnect: ",error) )
-
-                    return false
-                }
-
-
-                var connect_button = document.createElement("BUTTON")
-                connect_button.className = "btn btn-primary btn-xs"
-                connect_button.textContent  = "connect"
-
-                pwform.appendChild(username)
-                pwform.appendChild(password)
-                pwform.appendChild(connect_button)
-
-                var cell = row.insertCell()
-                cell.appendChild(pwform)
-            }
-
-            // add row for new connection
-            var row = tbody.insertRow()
-
-            var conn_form = document.createElement("FORM")
-            conn_form.id = "new_connection"
-            conn_form.onsubmit = function(event) {
-                var inputs = document.querySelectorAll("input[form=new_connection]")
-
-                data = {}
-                for (input of inputs) {
-                    data[input.name] = input.value
-                }
-                for (missing of ['connection_name','url', 'username', 'password']) {
-                    if (data[missing] === ""){
-                        alert("Please provide: " + missing)
-                        return false
-                    }
-                }
-                new_connection(
-                    env, data.connection_name, data.url, data.username, data.password
-                )
-                    .then( function(response){
-                        if (response.ok) {
-                            response.json()
-                                .then( function(data){
-                                    show_available_connections(env, data, conn_table)
-                                })
-                        }
-                    })
-                return false
-            }
-            var conn_name = document.createElement("INPUT")
-            conn_name.type = "input"
-            conn_name.name = "connection_name"
-            conn_name.setAttribute("form", conn_form.id)
-            conn_name.placeholder = "openBIS instance name"
-            row.insertCell().appendChild(conn_form)
-            row.insertCell().appendChild(conn_name)
-
-            var conn_url = document.createElement("INPUT")
-            conn_url.type = "input"
-            conn_url.name = "url"
-            conn_url.setAttribute("form", conn_form.id)
-            conn_url.placeholder = "https://openbis.domain:port"
-            row.insertCell().appendChild(conn_url)
-            row.insertCell()
-
-            var username = document.createElement("INPUT")
-            username.autocomplete = "off"
-            username.type = "text"
-            username.name = "username"
-            username.setAttribute("form", conn_form.id)
-            username.placeholder = "username"
-            var password = document.createElement("INPUT")
-            password.type = "password"
-            password.name = "password"
-            password.autocomplete = "new-password"
-            password.setAttribute("form", conn_form.id)
-            var create_btn = document.createElement("BUTTON")
-            create_btn.setAttribute("form", conn_form.id)
-            create_btn.textContent = "create"
-            var uname_pw_cell = row.insertCell()
-            uname_pw_cell.appendChild(username)
-            uname_pw_cell.appendChild(password)
-            uname_pw_cell.appendChild(create_btn)
-
-            conn_table.innerHTML = ""
-            table_title = document.createElement("STRONG")
-            table_title.textContent = "Please choose a connection"
-            conn_table.appendChild(table_title)
-            conn_table.appendChild(table)
-        }
-
-        function show_datasets_table( data, datasets_table) {
-            var table = document.createElement("TABLE")
-            table.className = "table-bordered table-striped table-condensed text-nowrap"
-            var thead = table.createTHead()
-            var t_row = thead.insertRow()
-            var titles = ['', 'permId', 'Type', 'Experiment', 'Registration Date', 'Status', 'Size']
-            titles.forEach( function(title) {
-                t_row.insertCell().textContent = title
-            })
-            var tbody = table.createTBody()
-
-            for (dataSet of data.dataSets) {
-
-                var permId = document.createElement("INPUT")
-                permId.type = "checkbox"
-                permId.name = "permId"
-                permId.value = dataSet.permId
-
-                var row = tbody.insertRow()
-                row.insertCell().appendChild(permId)
-                row.insertCell().textContent = dataSet.permId
-                row.insertCell().textContent = dataSet.type
-                row.insertCell().textContent = dataSet.experiment
-                row.insertCell().textContent = dataSet.registrationDate
-                row.insertCell().textContent = dataSet.status
-                row.insertCell().textContent = dataSet.size
-            }
-
-            while (datasets_table.firstChild) {
-                datasets_table.removeChild(datasets_table.firstChild);
-            }
-            datasets_table.appendChild(table)
-        }
-
-        // gets the status of the avialable openBIS connections
-        function getOpenBisConnections(env) {
-
-             var connectionsUrl = env.notebook.base_url + 'openbis/conns'
-             var settings = {
-                 url: connectionsUrl,
-                 processData: false,
-                 type: 'GET',
-                 dataType: 'json',
-                 contentType: 'application/json'
-             }
-             return utils.ajax(settings)
-        }
-
-        function reconnect_connection(env, connection, username, password){
-            var url = env.notebook.base_url + 'openbis/conn/' + connection
-            body = {
-                "username": username,
-                "password": password
-            }
-
-            var cookie = decodeURIComponent(document.cookie)
-            var xsrf_token = cookie.split("_xsrf=")[1]
-
-            return fetch(url, {
-                method: "PUT",
-                headers: {
-                    "Content-Type": "application/json",
-                    "X-XSRFToken": xsrf_token,
-                },
-                body: JSON.stringify(body)
-            })
-        }
-
-        function new_connection(env, connection_name, connection_url, username, password){
-            var endpoint = env.notebook.base_url + 'openbis/conns'
-            body = {
-                "name"    : connection_name,
-                "url"     : connection_url,
-                "username": username,
-                "password": password
-            }
-
-            var cookie = decodeURIComponent(document.cookie)
-            var xsrf_token = cookie.split("_xsrf=")[1]
-
-            return fetch(endpoint, {
-                method: "POST",
-                headers: {
-                    "Content-Type": "application/json",
-                    "X-XSRFToken": xsrf_token,
-                },
-                body: JSON.stringify(body)
-            })
-        }
-
-        function getDatasetTypes(env, connection_name, dataset_types, input_fields) {
-            // get all DatasetTypes of a given connection
-
-            var url = env.notebook.base_url + 'openbis/datasetTypes/'+ connection_name
-            fetch(url)
-                .then( function(response) {
-                    if (response.ok) {
-                        response.json()
-                            .then( function(data) {
-                                //console.log(data.dataSetTypes)
-
-                                var change_input_fields = function() {
-                                    // remove existing input fields
-                                    while (input_fields.firstChild) {
-                                        input_fields.removeChild(input_fields.firstChild)
-                                    }
-
-                                    // for every property assignment, create an input field.
-                                    for (pa of dts[dataset_type.selectedIndex].propertyAssignments) {
-                                        //var input_title = document.createTextNode(pa.label + ": ")
-                                        var input_field = document.createElement("INPUT")
-                                        input_field.type = "text"
-                                        input_field.name = pa.code
-                                        input_field.placeholder = pa.description ? pa.label + ": " + pa.description : pa.label
-                                        input_field.size = 90
-
-                                        //input_fields.appendChild(input_title)
-                                        input_fields.appendChild(input_field)
-                                        input_fields.appendChild(document.createElement("BR"))
-                                    }
-                                }
-                                dataset_types.onchange = change_input_fields
-
-                                // remove the old and add the new dataset-types
-                                dts = data.dataSetTypes
-                                while (dataset_types.firstChild) {
-                                    dataset_types.removeChild(dataset_types.firstChild);
-                                }
-                                for (dt of dts) {
-                                    var option = document.createElement("OPTION")
-                                    option.value = dt.code
-                                    option.textContent = dt.description ? dt.code + ": " + dt.description : dt.code
-                                    dataset_types.appendChild(option)
-                                }
-                                // change the input fields, since we just received new datasetTypes
-                                change_input_fields()
-
-                            })
-                            .catch( function(error){
-                                console.error("Error while parsing dataset types", error)
-                            })
-
-                    }
-                    else {
-                        while (dataset_types.firstChild) {
-                            dataset_types.removeChild(dataset_types.firstChild);
-                        }
-                    }
-                })
-                .catch (function(error) {
-                    console.error("Error while fetching dataset types:", error)
-                })
-        }
-
-        function createFeedback(type, content) {
-            var close = document.createElement("BUTTON")
-            close.className = "close"
-            close.setAttribute("data-dismiss", "alert")
-            close.setAttribute("aria-label", "Close")
-            var x = document.createElement("SPAN")
-            x.setAttribute("aria-hidden", true)
-            x.innerHTML = "&times;"
-            close.appendChild(x)
-
-            var feedbackBox = document.createElement( "DIV" )
-            feedbackBox.className = "openbis-feedback alert alert-dismissible alert-" + type
-            feedbackBox.setAttribute("role","alert")
-            feedbackBox.innerHTML = content
-            feedbackBox.prepend(close)
-
-            var nb_container = document.getElementById('notebook-container')
-            nb_container.prepend(feedbackBox)
-        }
-
-        function writeMetaData(data) {
-            var notebook = IPython.notebook
-            if (typeof notebook.metadata.openbis_connections === 'undefined') {
-                notebook.metadata.openbis_connections = {}
-            }
-            if (typeof notebook.metadata.openbis_connections[data.url] === 'undefined') {
-                notebook.metadata.openbis_connections[data.url] = {}
-            }
-            // store metadata about the downloaded files into the notebook-metadata
-            if (data.permId) {
-                notebook.metadata.openbis_connections[data.url][data.permId] = {
-                    "permId": data.permId,
-                    "path": data.path,
-                    "dataStore": data.dataStore,
-                    "location": data.location,
-                    "size": data.size,
-                    "status": data.statusText
-                }
-            }
-
-        }
-
-        var fetchDatasetFromOpenBis = {
-            help: 'Download openBIS datasets to your local harddrive',
-            icon: 'fa-download',
-            help_index: '',
-            handler: function (env) {
-                conn_table = document.createElement("DIV")
-                conn_table.id = "openbis_connections"
-                getOpenBisConnections(env)
-                    .done(function (data) {
-                        show_available_connections(env, data, conn_table)
-                    })
-                    .fail(function (data) {
-                        alert(data.status)
-                    })
-                    .always(function () {
-                        showDownloadDialog()
-                    })
-
-                function showDownloadDialog() {
-                    // This function gets called after loading the openBIS connections
-                    // to make sure we can display the download path provided by the server.
-
-                    // show DataSets for Sample identifier/permid
-                    var showDataSets = document.createElement("DIV")
-                    var title = document.createElement("STRONG")
-                    title.textContent = "Sample identfier/permId: "
-                    showDataSets.appendChild(title)
-                    showDataSets.style.marginTop = '10px'
-
-                    var sampleIdentifier = document.createElement("INPUT")
-                    sampleIdentifier.type = "text"
-                    sampleIdentifier.name = "sampleIdentifier"
-                    sampleIdentifier.size = 40
-                    sampleIdentifier.placeholder = "sample identifier or permId"
-                    sampleIdentifier.value = currentSampleIdentifier
-
-                    var datasets_table = document.createElement("DIV")
-                    datasets_table.id = "dataset_table"
-                    datasets_table.className = "output output_scroll"
-                    datasets_table.style.maxHeight = "10em"
-
-                    var show_datasets_btn = document.createElement("BUTTON")
-                    show_datasets_btn.className = "btn-info btn-xs"
-                    show_datasets_btn.textContent = "show datasets"
-
-                    show_datasets_btn.onclick = function() {
-                        var selected_conn = document.querySelector('input[name=connection_name]:checked')
-                        if (!selected_conn) {
-                            alert('Please choose a connection')
-                            return false
-                        }
-                        connection_name = selected_conn.value
-
-                        currentConnection = connection_name
-                        currentSampleIdentifier = sampleIdentifier.value
-                        if (!currentSampleIdentifier) {
-                            alert('Please specify a sample identifier/permId')
-                            return false
-                        }
-                        var url = env.notebook.base_url + 'openbis/sample/' + connection_name + '/' + encodeURIComponent(currentSampleIdentifier)
-
-                        fetch(url)
-                            .then( function(response) {
-                                if (response.ok) {
-                                    response.json()
-                                        .then( function(data) {
-                                            show_datasets_table(data, datasets_table)
-                                        })
-                                }
-                                else {
-                                    response.json()
-                                        .then( function(error) {
-                                            console.log(error.reason)
-                                            alert("Error: " + error.reason)
-                                        })
-                                }
-                            })
-                            .catch( function(error) {
-                                console.error('A serious network problem occured:', error)
-                            })
-
-                    }
-                    showDataSets.appendChild(sampleIdentifier)
-                    showDataSets.appendChild(show_datasets_btn)
-                    showDataSets.appendChild(datasets_table)
-
-                    // dataSetPermId only
-                    var dataset_direct = document.createElement("P")
-                    dataset_direct.style.marginTop='10px'
-                    dataset_direct.innerHTML = '<strong>enter DataSet permId directly: </strong>'
-
-                    //var dataset = $('<p>')
-                    //    .css('margin-top', '10px')
-                    //    .append($('<b>').text('... or enter DataSet permId directly: '))
-                    var datasetPermId = document.createElement("INPUT")
-                    datasetPermId.type = "text"
-                    datasetPermId.name = "datasetPermId"
-                    datasetPermId.size = "40"
-                    datasetPermId.placeholder = "dataSet permId"
-
-                    dataset_direct.appendChild(datasetPermId)
-
-                    var downloadPath = document.createElement("INPUT")
-                    downloadPath.type = "text"
-                    downloadPath.name = "downloadPath"
-                    downloadPath.size = "90"
-                    downloadPath.value = currentDownloadPath
-
-                    var path = document.createElement("DIV")
-                    path.innerHTML = "<strong>download data to path: </strong>"
-                    path.appendChild(downloadPath)
-
-                    var download_dialog_box = document.createElement("DIV")
-                    download_dialog_box.appendChild(conn_table)
-                    download_dialog_box.appendChild(showDataSets)
-                    download_dialog_box.appendChild(dataset_direct)
-                    download_dialog_box.appendChild(path)
-
-                    function downloadDataset(selected_conn, selectedPermIds, downloadPath) {
-                        var connection_name = selected_conn.value
-
-                        for (permId of selectedPermIds) {
-                            var downloadUrl = env.notebook.base_url + 'openbis/dataset/'
-                                + connection_name + '/' + permId + '/' + encodeURIComponent(downloadPath)
-
-                            fetch(downloadUrl)
-                                .then( function(response) {
-                                    if (response.ok) {
-                                        response.json()
-                                            .then( function(data) {
-                                                createFeedback('success', data.statusText)
-
-                                                // successful download:
-                                                // write statusText from returned data to notebooks metadata
-                                                writeMetaData(data)
-
-                                                // keep current download path for later use
-                                                currentDownloadPath = downloadPath
-                                            })
-                                    }
-                                    else {
-                                        response.json()
-                                            .then( function(error) {
-                                                console.log(error.reason)
-                                                alert("Error: " + error.reason)
-                                            })
-                                    }
-                                })
-                                .catch( function(error) {
-                                    console.error('A serious network problem occured:', error)
-                                })
-                        }
-                    }
-
-                    function onDownloadClick() {
-                        var selected_conn = document.querySelector('input[name=connection_name]:checked')
-                        if (! selected_conn) {
-                            alert('please choose a connection')
-                            return false
-                        }
-
-                        var selectedPermIds = []
-                        for (row of document.querySelectorAll('input[name=permId]:checked') ) {
-                            selectedPermIds.push(row.value)
-                        }
-                        if (datasetPermId.value) {
-                            selectedPermIds.push(datasetPermId.value)
-                        }
-                        if (!selectedPermIds) {
-                            alert('please select a dataset or provide a permId')
-                            return false
-                        }
-
-                        if (!downloadPath.value) {
-                            alert('Please specify where you would like to download your files!')
-                            return false
-                        }
-                        downloadDataset(selected_conn, selectedPermIds, downloadPath.value)
-                    }
-
-                    dialog.modal({
-                        body: download_dialog_box,
-                        title: 'Download openBIS DataSets',
-                        buttons: {
-                            'Cancel': {},
-                            'Download': {
-                                class: 'btn-primary btn-large',
-                                click: onDownloadClick,
-                            }
-                        },
-                        notebook: env.notebook,
-                        keyboard_manager: env.notebook.keyboard_manager
-                    })
-                }
-            }
-        }
-
-        var uploadDatasetsToOpenBis = {
-            help: 'upload Notebook and Data to openBIS',
-            icon: 'fa-upload',
-            help_index: '',
-            handler: function (env) {
-                conn_table = document.createElement("DIV")
-                var dst_title = document.createElement("STRONG")
-                dst_title.textContent = "DataSet type"
-                var dataset_types = document.createElement("SELECT")
-                dataset_types.id = "dataset_type"
-                dataset_types.className = "form-control select-xs"
-
-                var input_fields = document.createElement("DIV")
-                conn_table.id = "openbis_connections"
-
-                var onclick_cbf = function() {
-                    currentConnection = this.value
-                    getDatasetTypes(env, this.value, dataset_types, input_fields)
-                }
-
-                getOpenBisConnections(env)
-                    .done(function (data) {
-                        show_available_connections(env, data, conn_table, onclick_cbf)
-                    })
-                    .fail(function (data) {
-                        alert(data.status)
-                    })
-
-                var sample_title = document.createElement("STRONG")
-                sample_title.textContent = "Sample Identifier"
-                var sampleIdentifier = document.createElement("INPUT")
-                sampleIdentifier.type = "text"
-                sampleIdentifier.name = 'sampleIdentifier'
-                sampleIdentifier.placeholder = "Sample Identifier or permId"
-                sampleIdentifier.value = currentSampleIdentifier
-                sampleIdentifier.size  = "90"
-
-                var ds_title = document.createElement("STRONG")
-                ds_title.textContent = "DataSet files"
-                var ds_files         = document.createElement("INPUT")
-                ds_files.type        = "text"
-                ds_files.placeholder = "filenames"
-                ds_files.name        = "ds_files"
-                ds_files.size        = "90"
-
-                var inputs = document.createElement("DIV")
-                inputs.style.marginTop = '10px'
-                inputs.appendChild(dst_title)
-                inputs.appendChild(dataset_types)
-                inputs.appendChild(input_fields)
-                inputs.appendChild(sample_title)
-                inputs.appendChild(sampleIdentifier)
-                inputs.appendChild(ds_title)
-                inputs.appendChild(ds_files)
-
-                var uploadDialogBox = $('<div/>').append(conn_table).append(inputs)
-
-
-                // get the canvas for user feedback
-                var container = $('#notebook-container')
-
-                function onOk () {
-                    //var connection_name = $('input[name="connection_name"]:checked').val()
-                    var selected_connection = document.querySelector('input[name=connection_name]:checked')
-                    if (!selected_connection) {
-                        alert("No connection selected")
-                        return false
-                    }
-                    var connection_name = selected_connection.value
-
-                    var uploadUrl = env.notebook.base_url + 'openbis/dataset/' + connection_name
-
-                    var notebook = IPython.notebook
-                    var re = /\/notebooks\/(.*?)$/
-                    var files = []
-                    var filepath = window.location.pathname.match(re)[1]
-                    files.push(filepath)
-                    // FIXME
-                    //if (ds_files.val()) {
-                    //    files.push(ds_files.value)
-                    //}
-
-                    var dataSetInfo = {
-                        "type"            : dataset_types.value,
-                        "files"           : files,
-                        "sampleIdentifier": sampleIdentifier.value
-                    }
-
-                    var settings = {
-                        url: uploadUrl,
-                        processData: false,
-                        type: 'POST',
-                        dataType: 'json',
-                        data: JSON.stringify(dataSetInfo),
-                        contentType: 'application/json',
-                        success: function (data) {
-                            // display feedback to user
-                            createFeedback('success', data.statusText)
-
-                            // write statusText from returned data to notebooks metadata
-                            if ( typeof notebook.metadata.openbis === 'undefined') {
-                                notebook.metadata.openbis = {}
-                            }
-                            if ( typeof notebook.metadata.openbis.permIds === 'undefined' ) {
-                                notebook.metadata.openbis.permIds = {}
-                            }
-                            if ( data.permId ) {
-                                notebook.metadata.openbis.permIds[data.permId] = data.statusText
-                            }
-
-                        },
-                        error: function (data) {
-                            // display feedback to user
-                            var feedback = "<strong>Error: </strong>Dataset was not uploaded.<div>"
-                                + data.statusText
-                                + "</div>"
-                            createFeedback('danger', feedback)
-                        }
-                    }
-
-                    // display preloader during commit and push
-                    var preloader = '<img class="openbis-feedback" src="https://cdnjs.cloudflare.com/ajax/libs/slick-carousel/1.5.8/ajax-loader.gif">'
-
-                    // commit and push
-                    utils.ajax(settings)
-                }
-
-                if (IPython.notebook.dirty === true) {
-                    dialog.modal({
-                        body: 'Please save the notebook before uploading it to openBIS.',
-                        title: 'Save notebook first',
-                        buttons: {
-                            'Back': {}
-                        },
-                        notebook: env.notebook,
-                        keyboard_manager: env.notebook.keyboard_manager
-                    })
-                }
-                else {
-                    dialog.modal({
-                        body: uploadDialogBox,
-                        title: 'Upload openBIS DataSet',
-                        buttons: {
-                            'Cancel': {},
-                            'Upload': {
-                                class: 'btn-primary btn-large',
-                                click: onOk
-                            }
-                        },
-                        notebook: env.notebook,
-                        keyboard_manager: env.notebook.keyboard_manager
-                    })
-                }
-            }
-        }
-
-        var configureOpenBisConnections = {
-            help: 'configure openBIS connections',
-            icon: 'fa-sliders',
-            help_index: '',
-            handler: function (env) {
-                conn_table = document.createElement("DIV")
-                var dst_title = document.createElement("STRONG")
-                dst_title.textContent = "DataSet type"
-                var dataset_types = document.createElement("SELECT")
-                dataset_types.id = "dataset_type"
-                dataset_types.className = "form-control select-xs"
-
-                var input_fields = document.createElement("DIV")
-                conn_table.id = "openbis_connections"
-
-                var onclick_cbf = function() {
-                    currentConnection = this.value
-                    getDatasetTypes(env, this.value, dataset_types, input_fields)
-                }
-
-                getOpenBisConnections(env)
-                    .done(function (data) {
-                        show_available_connections(env, data, conn_table, onclick_cbf)
-                    })
-                    .fail(function (data) {
-                        alert(data.status)
-                    })
-
-                var uploadDialogBox = $('<div/>').append(conn_table)
-
-                // get the canvas for user feedback
-                var container = $('#notebook-container')
-
-                function onOk () {
-
-                }
-
-                dialog.modal({
-                    body: uploadDialogBox,
-                    title: 'Choose openBIS connection',
-                    buttons: {
-                        'Cancel': {},
-                        'Choose connection': {
-                            class: 'btn-primary btn-large',
-                            click: onOk
-                        }
-                    },
-                    notebook: env.notebook,
-                    keyboard_manager: env.notebook.keyboard_manager
-                })
-            }
-        }
-
-        function _onLoad () {
-            // show connections
-            var configure_openbis_connections = IPython.keyboard_manager.actions.register(
-                configureOpenBisConnections, 'openbis-connections', 'jupyter-openBIS')
-
-            // dnownload
-            var download_datasets = IPython.keyboard_manager.actions.register(
-                fetchDatasetFromOpenBis, 'openbis-dataset-download', 'jupyter-openBIS')
-
-            // upload
-            var upload_datasets = IPython.keyboard_manager.actions.register(
-                uploadDatasetsToOpenBis, 'openbis-dataset-upload', 'jupyter-openBIS')
-
-            // add button for new action
-            IPython.toolbar.add_buttons_group([configure_openbis_connections, download_datasets, upload_datasets])
-        }
-
-        return {load_ipython_extension: _onLoad}
-    })
diff --git a/jupyter-openbis-extension/static/downloadDialog.js b/jupyter-openbis-extension/static/downloadDialog.js
new file mode 100644
index 0000000000000000000000000000000000000000..08f0f92bc1ceef0a65347f64fe032689fe8bd4b3
--- /dev/null
+++ b/jupyter-openbis-extension/static/downloadDialog.js
@@ -0,0 +1,285 @@
+define([
+        "base/js/dialog",
+        "./common",
+        "./state"
+    ],
+    function (dialog, common, state) {
+
+        function writeMetaData(data) {
+            var notebook = IPython.notebook
+            if (typeof notebook.metadata.datasets === 'undefined') {
+                notebook.metadata.datasets = {}
+            }
+            // store metadata about the downloaded files into the notebook-metadata
+            if (data.permId) {
+                notebook.metadata.datasets[data.permId] = {
+                    "permId": data.permId,
+                    "path": data.path,
+                    "dataStore": data.dataStore,
+                    "location": data.location,
+                    "size": data.size,
+                    "status": data.statusText
+                }
+            }
+        }
+
+        function show_datasets_table(env, data, datasets_table, downloadPath, entityIdentifier) {
+            if (downloadPath.value === '') {
+                downloadPath.value = data.cwd
+            }
+
+            var table = document.createElement("TABLE")
+            table.className = "table-bordered table-striped table-condensed text-nowrap"
+            table.style.width = "100%"
+
+            var thead = table.createTHead()
+            var t_row = thead.insertRow()
+            var titles = ['', 'permId', 'Type', 'Experiment', 'Registration Date', 'Status', 'Size']
+            titles.forEach(function (title) {
+                t_row.insertCell().textContent = title
+            })
+            var tbody = table.createTBody()
+
+            for (dataSet of data.dataSets) {
+
+                const permId = document.createElement("INPUT")
+                permId.type = "checkbox"
+                permId.name = "permId"
+                permId.value = dataSet.permId
+                permId.checked = state.selectedDatasets.has(permId.value)
+                permId.onclick = () => permId.checked ? state.selectedDatasets.add(permId.value) : state.selectedDatasets.delete(permId.value)
+
+                var row = tbody.insertRow()
+                row.insertCell().appendChild(permId)
+                row.insertCell().textContent = dataSet.permId
+                row.insertCell().textContent = dataSet.type
+                row.insertCell().textContent = dataSet.experiment
+                row.insertCell().textContent = dataSet.registrationDate
+                row.insertCell().textContent = dataSet.status
+                row.insertCell().textContent = dataSet.size
+            }
+
+            while (datasets_table.firstChild) {
+                datasets_table.removeChild(datasets_table.firstChild);
+            }
+            datasets_table.appendChild(table)
+
+            const totalCount = parseInt(data.totalCount)
+            const count = parseInt(data.count)
+            const startWith = parseInt(data.start_with)
+            const hasNext = startWith + count < totalCount
+            const hasPrevious = startWith > 0
+            const nextCmd = () => getDatasets(env, startWith+5, 5, entityIdentifier, datasets_table, downloadPath)
+            const previousCmd = () => getDatasets(env, startWith-5, 5, entityIdentifier, datasets_table, downloadPath)
+
+            var previous = document.createElement("A")
+            var linkText = document.createTextNode("<<< Previous")
+            previous.appendChild(linkText)
+            previous.href = "#"
+            previous.onclick = previousCmd
+
+            var next = document.createElement("A")
+            var linkText = document.createTextNode("Next >>>")
+            next.appendChild(linkText)
+            next.href = "#"
+            next.onclick = nextCmd
+            next.style.float="right"
+
+            var paging = document.createElement("DIV")
+            paging.style.width = "100%"
+            if (hasPrevious) {
+                paging.appendChild(previous)
+            }
+            if (hasNext) {
+                paging.appendChild(next)
+            }
+
+            datasets_table.appendChild(paging)
+        }
+
+        function getDatasets(env, startWith, count, entityIdentifier, datasets_table, downloadPath) {
+            var connection_name = state.connection.name
+            if (!connection_name) {
+                alert('Please choose a connection')
+                return false
+            }
+
+            currentEntityIdentifier = entityIdentifier.value
+            if (!currentEntityIdentifier) {
+                alert('Please specify a Sample or Experiment identifier/permId')
+                return false
+            }
+            var url = env.notebook.base_url 
+                + 'openbis/sample/' 
+                + connection_name 
+                + '/' 
+                + encodeURIComponent(currentEntityIdentifier)
+                + "?start_with="
+                + startWith 
+                + "&count="
+                + count
+
+            fetch(url)
+                .then(function (response) {
+                    if (response.ok) {
+                        response.json()
+                            .then(function (data) {
+                                show_datasets_table(env, data, datasets_table, downloadPath, entityIdentifier)
+                            })
+                    } else {
+                        response.json()
+                            .then(function (error) {
+                                console.log(error.reason)
+                                alert("Error: " + error.reason)
+                            })
+                    }
+                })
+                .catch(function (error) {
+                    console.error('A serious network problem occured:', error)
+                })
+        }
+
+
+        return {
+            help: 'Download openBIS datasets to your local harddrive',
+            icon: 'fa-download',
+            help_index: '',
+            handler: function (env) {
+                state.selectedDatasets = new Set([])
+
+                conn_table = document.createElement("DIV")
+                conn_table.id = "openbis_connections"
+
+                var showDataSets = document.createElement("DIV")
+                var title = document.createElement("STRONG")
+                title.textContent = "Sample or Experiment identifier/permId: "
+                showDataSets.appendChild(title)
+                showDataSets.style.marginTop = '10px'
+
+                var entityIdentifier = document.createElement("INPUT")
+                entityIdentifier.type = "text"
+                entityIdentifier.name = "entityIdentifier"
+                entityIdentifier.size = 40
+                entityIdentifier.placeholder = "Sample or Experiment identifier/permId"
+                entityIdentifier.value = ''
+
+                var datasets_table = document.createElement("DIV")
+
+                var show_datasets_btn = document.createElement("BUTTON")
+                show_datasets_btn.className = "btn-info btn-xs"
+                show_datasets_btn.textContent = "show datasets"
+                show_datasets_btn.style.margin="10px"
+
+                showDataSets.appendChild(entityIdentifier)
+                showDataSets.appendChild(show_datasets_btn)
+                showDataSets.appendChild(datasets_table)
+
+                var dataset_direct = document.createElement("P")
+                dataset_direct.style.marginTop = '10px'
+                dataset_direct.innerHTML = '<strong>Enter DataSet permId directly: </strong>'
+
+                var datasetPermId = document.createElement("INPUT")
+                datasetPermId.type = "text"
+                datasetPermId.name = "datasetPermId"
+                datasetPermId.size = "40"
+                datasetPermId.placeholder = "dataSet permId"
+
+                dataset_direct.appendChild(datasetPermId)
+
+                var downloadPath = document.createElement("INPUT")
+                downloadPath.type = "text"
+                downloadPath.name = "downloadPath"
+                downloadPath.size = "90"
+                downloadPath.value = ''
+
+                show_datasets_btn.onclick = 
+                    () => getDatasets(env, 0, 5, entityIdentifier, datasets_table, downloadPath)
+                
+                var path = document.createElement("DIV")
+                path.innerHTML = "<strong>download data to path: </strong>"
+                path.appendChild(downloadPath)
+
+                var download_dialog_box = document.createElement("DIV")
+                download_dialog_box.appendChild(conn_table)
+                download_dialog_box.appendChild(showDataSets)
+                download_dialog_box.appendChild(dataset_direct)
+                download_dialog_box.appendChild(path)
+
+                function downloadDataset(connection_name, selectedPermIds, downloadPath) {
+
+                    for (permId of selectedPermIds) {
+                        var downloadUrl = env.notebook.base_url + 'openbis/dataset/' +
+                            connection_name + '/' + permId + '/' + encodeURIComponent(downloadPath)
+
+                        fetch(downloadUrl)
+                            .then(function (response) {
+                                if (response.ok) {
+                                    response.json()
+                                        .then(function (data) {
+                                            common.createFeedback('success', data.statusText)
+
+                                            // successful download:
+                                            // write statusText from returned data to notebooks metadata
+                                            writeMetaData(data)
+
+                                            // keep current download path for later use
+                                            currentDownloadPath = downloadPath
+                                        })
+                                } else {
+                                    response.json()
+                                        .then(function (error) {
+                                            console.log(error.reason)
+                                            alert("Error: " + error.reason)
+                                        })
+                                }
+                            })
+                            .catch(function (error) {
+                                console.error('A serious network problem occured:', error)
+                            })
+                    }
+                }
+
+                function onDownloadClick() {
+                    var selected_conn = state.connection.name
+                    if (!selected_conn) {
+                        alert('please choose a connection')
+                        return false
+                    }
+
+                    var selectedPermIds = []
+                    for (row of state.selectedDatasets) {
+                        selectedPermIds.push(row)
+                    }
+                    if (datasetPermId.value) {
+                        selectedPermIds.push(datasetPermId.value)
+                    }
+                    if (!selectedPermIds) {
+                        alert('please select a dataset or provide a permId')
+                        return false
+                    }
+
+                    if (!downloadPath.value) {
+                        alert('Please specify where you would like to download your files!')
+                        return false
+                    }
+
+                    downloadDataset(selected_conn, selectedPermIds, downloadPath.value)
+                }
+
+                dialog.modal({
+                    body: download_dialog_box,
+                    title: 'Download openBIS DataSets',
+                    buttons: {
+                        'Cancel': {},
+                        'Download': {
+                            class: 'btn-primary btn-large',
+                            click: onDownloadClick,
+                        }
+                    },
+                    notebook: env.notebook,
+                    keyboard_manager: env.notebook.keyboard_manager
+                })
+            }
+        }
+    }
+)
\ No newline at end of file
diff --git a/jupyter-openbis-extension/static/main.js b/jupyter-openbis-extension/static/main.js
new file mode 100644
index 0000000000000000000000000000000000000000..777e69d8ab57d2c25566d5425d904d546db115ef
--- /dev/null
+++ b/jupyter-openbis-extension/static/main.js
@@ -0,0 +1,33 @@
+define([
+        "base/js/namespace",
+        "./connectionDialog",
+        "./uploadDialog",
+        "./downloadDialog"
+    ],
+    function (IPython, connectionDialog, uploadDialog, downloadDialog) {
+        var ds_type = document.createElement("SELECT")
+        ds_type.size = '40'
+        ds_type.className = "select-xs"
+        ds_type.id = "dataset_type"
+
+        function _onLoad() {
+            // show connections
+            var configure_openbis_connections = IPython.keyboard_manager.actions.register(
+                connectionDialog, 'openbis-connections', 'jupyter-openBIS')
+
+            // dnownload
+            var download_datasets = IPython.keyboard_manager.actions.register(
+                downloadDialog, 'openbis-dataset-download', 'jupyter-openBIS')
+
+            // upload
+            var upload_datasets = IPython.keyboard_manager.actions.register(
+                uploadDialog, 'openbis-dataset-upload', 'jupyter-openBIS')
+
+            // add button for new action
+            IPython.toolbar.add_buttons_group([configure_openbis_connections, download_datasets, upload_datasets])
+        }
+
+        return {
+            load_ipython_extension: _onLoad
+        }
+    })
\ No newline at end of file
diff --git a/jupyter-openbis-extension/static/state.js b/jupyter-openbis-extension/static/state.js
new file mode 100644
index 0000000000000000000000000000000000000000..6f05b5ebad6619aec67d781bb7f43fe00a17e155
--- /dev/null
+++ b/jupyter-openbis-extension/static/state.js
@@ -0,0 +1,23 @@
+define([],
+    function () {
+        return {
+            // connection dialog
+            connection: {
+                name: null,
+                candidateName: null
+            },
+
+            // upload dialog
+            uploadDataSetType: null,
+            uploadDataSetTypes: {},
+            uploadEntityIdentifier: '',
+            datasetCheckboxes: [],
+            fileCheckboxes: [],
+            selectedFiles: [],
+            unselectedDatasets: [],
+
+            // download dialog
+            selectedDatasets: new Set([])
+        }
+    }
+)
\ No newline at end of file
diff --git a/jupyter-openbis-extension/static/uploadDialog.js b/jupyter-openbis-extension/static/uploadDialog.js
new file mode 100644
index 0000000000000000000000000000000000000000..0690d40cf1d5bd58e64824a8eef1ce93fa945b08
--- /dev/null
+++ b/jupyter-openbis-extension/static/uploadDialog.js
@@ -0,0 +1,377 @@
+define([
+        "base/js/dialog",
+        "base/js/utils",
+        "jquery",
+        "./state",
+        "./common",
+    ],
+    function (dialog, utils, $, state, common) {
+
+        var errorElements = { }
+        function createErrorElement(name) {
+            var element = document.createElement("STRONG")
+            element.textContent = ""
+            element.style.marginLeft = "8px"
+            element.style.color = "red"
+            errorElements[name.toLowerCase()] = element
+            return element
+        }
+        function cleanErrors() {
+            Object.keys(errorElements).forEach(key => errorElements[key].textContent="")
+        }
+
+        var spinner = document.createElement("IMG")
+        spinner.className="openbis-feedback"
+        spinner.src=""
+        function showSpinner() {
+            spinner.src="https://cdnjs.cloudflare.com/ajax/libs/slick-carousel/1.5.8/ajax-loader.gif"
+        }
+        function hideSpinner() {
+            spinner.src=""
+        }
+
+        function get_file_list(env, container) {
+            var url = env.notebook.base_url + 'general/filelist'
+        
+            fetch(url)
+                .then( function(response) {
+                    if (response.ok) {
+                        response.json()
+                            .then(function(data){
+                                var values = Object.keys(data.files)
+                                values.sort()
+                                state.fileCheckboxes = createSelectTable(values, container, false, state.selectedFiles)
+                            })
+                    }
+                    else {
+                        console.error(response.status)
+                    }
+                })
+        }
+
+        function get_dataset_list(env, container) {
+            var datasets = env.notebook.metadata.datasets
+            if (datasets != null) {
+                var values = Object.keys(datasets)
+                values.sort()
+                state.datasetCheckboxes = createSelectTable(values, container, true, state.unselectedDatasets)
+            }
+        }
+
+        function getDatasetTypes(env, connection_name, dataset_types, input_fields) {
+            // get all DatasetTypes of a given connection
+
+            var url = env.notebook.base_url + 'openbis/datasetTypes/' + connection_name
+            fetch(url)
+                .then(function (response) {
+                    if (response.ok) {
+                        response.json()
+                            .then(function (data) {
+                                var change_input_fields = function () {
+                                    hideSpinner()
+                                    cleanErrors()
+
+                                    var oldType = state.uploadDataSetType
+                                    if (oldType != null && !(oldType in state.uploadDataSetTypes)) {
+                                        state.uploadDataSetTypes[oldType] = {}
+                                    }
+
+                                    state.uploadDataSetType = dataset_types.options[dataset_types.selectedIndex].value
+
+                                    // remove existing input fields
+                                    while (input_fields.firstChild) {
+                                        var element = input_fields.firstChild
+
+                                        if (element.nodeName === "INPUT" && state.uploadDataSetType != null) {
+                                            state.uploadDataSetTypes[oldType][element.name] = element.value
+                                        }
+
+                                        input_fields.removeChild(element)
+                                    }
+
+                                    // for every property assignment, create an input field.
+                                    for (pa of dts[dataset_types.selectedIndex].propertyAssignments) {
+                                        var input_title = document.createElement("STRONG")
+                                        input_title.textContent = pa.mandatory ? pa.label + " (mandatory)" : pa.label
+                                        var input_error = createErrorElement('prop.'+pa.code)
+
+                                        var input_field = document.createElement("INPUT")
+                                        input_field.type = "text"
+                                        input_field.name = pa.code
+                                        input_field.placeholder = pa.description ? pa.description : pa.label
+                                        input_field.size = 90
+                                        input_field.style.width="100%"
+
+                                        var mem = state.uploadDataSetTypes[dts[dataset_types.selectedIndex].code]
+                                        if (mem == null) {
+                                            mem = {}
+                                        }
+                                        input_field.value = pa.code in mem ? mem[pa.code] : ""
+
+                                        input_fields.appendChild(input_title)
+                                        input_fields.appendChild(input_error)
+                                        input_fields.appendChild(input_field)
+                                        input_fields.appendChild(document.createElement("BR"))
+                                    }
+                                }
+                                dataset_types.onchange = change_input_fields
+
+                                // remove the old and add the new dataset-types
+                                dts = data.dataSetTypes
+                                while (dataset_types.firstChild) {
+                                    dataset_types.removeChild(dataset_types.firstChild);
+                                }
+                                var index = 0
+                                var selectedIndex = -1
+                                for (dt of dts) {
+                                    var option = document.createElement("OPTION")
+                                    option.value = dt.code
+                                    option.textContent = dt.description ? dt.code + ": " + dt.description : dt.code
+                                    dataset_types.appendChild(option)
+
+                                    if (dt.code === state.uploadDataSetType) {
+                                        selectedIndex = index
+                                    }
+                                    index++
+                                }
+
+                                dataset_types.selectedIndex = selectedIndex === -1 ? 0 : selectedIndex
+                                // change the input fields, since we just received new datasetTypes
+                                change_input_fields()
+
+                            })
+                            .catch(function (error) {
+                                console.error("Error while parsing dataset types", error)
+                            })
+
+                    } else {
+                        while (dataset_types.firstChild) {
+                            dataset_types.removeChild(dataset_types.firstChild);
+                        }
+                    }
+                })
+                .catch(function (error) {
+                    console.error("Error while fetching dataset types:", error)
+                })
+        }
+
+        function createSelectTable(values, container, checked, overrides) {
+
+            var table = document.createElement("TABLE")
+            table.className = 'table-bordered table-striped table-condensed'
+            table.style.width = "100%"
+            
+            var body = table.createTBody()
+
+            var checkboxes = []
+            values.forEach( value => {
+                var row = body.insertRow()
+                var checkbox = document.createElement("INPUT")
+                checkbox.type = "checkbox"
+                checkbox.value = value
+                checkbox.checked = overrides.includes(value) ? !checked :  checked
+                checkboxes.push(checkbox)
+                row.insertCell().appendChild(checkbox)
+                var valueCell = row.insertCell()
+                valueCell.textContent = value
+                valueCell.style.width = "100%"
+            })
+            container.appendChild(table)
+
+            return checkboxes
+        }
+
+        return {
+            help: 'upload Notebook and Data to openBIS',
+            icon: 'fa-upload',
+            help_index: '',
+            handler: function (env) {
+
+                var main_error = createErrorElement('main')
+
+                var dst_title = document.createElement("STRONG")
+                dst_title.textContent = "DataSet type"
+                var dataset_types = document.createElement("SELECT")
+                dataset_types.id = "dataset_type"
+                dataset_types.className = "form-control select-xs"
+                dataset_types.style.marginLeft = 0
+                dataset_types.style.padding = 0
+
+                var input_fields = document.createElement("DIV")
+                input_fields.setAttribute("id", "upload-input-fields");
+
+                getDatasetTypes(env, state.connection.name, dataset_types, input_fields)
+
+                var sample_title = document.createElement("STRONG")
+                sample_title.textContent = "Sample or Experiment identifier/permId"
+
+                var sample_error = createErrorElement('entityIdentifier')
+
+                var entityIdentifier = document.createElement("INPUT")
+                entityIdentifier.type = "text"
+                entityIdentifier.name = 'entityIdentifier'
+                entityIdentifier.placeholder = "Sample or Experiment identifier/permId"
+                entityIdentifier.value = state.uploadEntityIdentifier
+                entityIdentifier.size = "90"
+                entityIdentifier.style.width="100%"
+
+                var ds_title = document.createElement("STRONG")
+                var dataSetListContainer = document.createElement("DIV")
+                if (env.notebook.metadata.datasets) {
+                    ds_title.textContent = "DataSets"
+                    dataSetListContainer.style.maxHeight="150px"
+                    dataSetListContainer.style.overflow="auto"
+                    get_dataset_list(env, dataSetListContainer)
+                }
+
+                var files_title = document.createElement("STRONG")
+                files_title.textContent = "Files"
+                var fileListContainer = document.createElement("DIV")
+                fileListContainer.style.maxHeight="150px"
+                fileListContainer.style.overflow="auto"
+                get_file_list(env, fileListContainer)
+                
+                var inputs = document.createElement("DIV")
+                inputs.style.marginTop = '10px'
+                inputs.appendChild(main_error)
+                inputs.appendChild(spinner)
+                inputs.appendChild(document.createElement("BR"))
+                inputs.appendChild(dst_title)
+                inputs.appendChild(dataset_types)
+                inputs.appendChild(input_fields)
+                inputs.appendChild(sample_title)
+                inputs.appendChild(sample_error)
+                inputs.appendChild(entityIdentifier)
+                inputs.appendChild(ds_title)
+                inputs.appendChild(dataSetListContainer)
+                inputs.appendChild(files_title)
+                inputs.appendChild(fileListContainer)
+
+                var uploadDialogBox = $('<div/>').append(inputs)
+
+                function saveState() {
+                    state.uploadDataSetTypes[state.uploadDataSetType] = {}
+                    for (element of input_fields.children) {
+                        if (element.nodeName === "INPUT" && state.uploadDataSetType != null) {
+                            state.uploadDataSetTypes[state.uploadDataSetType][element.name] = element.value
+                        }
+                    }
+                    state.uploadEntityIdentifier = entityIdentifier.value
+                    state.unselectedDatasets = state.datasetCheckboxes.filter(cb => !cb.checked).map(cb => cb.value)
+                    state.selectedFiles = state.fileCheckboxes.filter(cb => cb.checked).map(cb => cb.value)
+                }
+
+                function onOk() {
+                    var connection_name = state.connection.name
+
+                    if (!connection_name) {
+                        alert("No connection selected")
+                        return false
+                    }
+
+                    var uploadUrl = env.notebook.base_url + 'openbis/dataset/' + connection_name
+
+                    var notebook = IPython.notebook
+                    var files = state.fileCheckboxes.filter(cb => cb.checked).map(cb => cb.value)
+                    var re = /\/notebooks\/(.*?)$/
+                    var filepath = window.location.pathname.match(re)[1]
+                    files.push(filepath)
+                    
+                    var props = {}
+                    for (input of $('#upload-input-fields').find('input')) {
+                        props[input.name] = input.value
+                    }
+
+                    var dataSetInfo = {
+                        "type": dataset_types.value,
+                        "files": files,
+                        "parents": state.datasetCheckboxes.filter(cb => cb.checked).map(cb => cb.value),
+                        "entityIdentifier": entityIdentifier.value,
+                        "props": props
+                    }
+
+                    var settings = {
+                        url: uploadUrl,
+                        processData: false,
+                        type: 'POST',
+                        dataType: 'json',
+                        data: JSON.stringify(dataSetInfo),
+                        contentType: 'application/json',
+                        success: function (data) {
+                            saveState()
+                            $('div.modal').remove()
+                            $('div.modal-backdrop').remove()
+                            common.createFeedback('success', data.statusText)
+
+                            // write statusText from returned data to notebooks metadata
+                            if (typeof notebook.metadata.openbis === 'undefined') {
+                                notebook.metadata.openbis = {}
+                            }
+                            if (typeof notebook.metadata.openbis.permIds === 'undefined') {
+                                notebook.metadata.openbis.permIds = {}
+                            }
+                            if (data.permId) {
+                                notebook.metadata.openbis.permIds[data.permId] = data.statusText
+                            }
+                        },
+                        error: function (data) {
+                            hideSpinner()
+
+                            if ("errors" in data.responseJSON) {
+                                var errors = data.responseJSON.errors
+                                for (error of errors) {
+                                    let key, value
+                                    Object.keys(error).forEach(k => {
+                                        key = k.toLowerCase()
+                                        value = error[k]
+                                    })
+                                    errorElements[key in errorElements ? key : "main"].textContent = value
+                                }
+                            } else {
+                                errorElements["main"].textContent = "Server error"
+                            }
+                        }
+                    }
+
+                    showSpinner()
+                    cleanErrors()
+                    utils.ajax(settings)
+                    return false
+                }
+
+                function onCancel() {
+                    saveState()
+                    return true
+                }
+
+                if (IPython.notebook.dirty === true) {
+                    dialog.modal({
+                        body: 'Please save the notebook before uploading it to openBIS.',
+                        title: 'Save notebook first',
+                        buttons: {
+                            'Back': {}
+                        },
+                        notebook: env.notebook,
+                        keyboard_manager: env.notebook.keyboard_manager
+                    })
+                } else {
+                    dialog.modal({
+                        body: uploadDialogBox,
+                        title: 'Upload openBIS DataSet',
+                        buttons: {
+                            'Cancel': {
+                                click: onCancel
+                            },
+                            'Upload': {
+                                class: 'btn-primary btn-large',
+                                click: onOk
+                            }
+                        },
+                        notebook: env.notebook,
+                        keyboard_manager: env.notebook.keyboard_manager
+                    })
+                }
+            }
+        }
+    }
+)
\ No newline at end of file
diff --git a/openbis-connections.yaml b/openbis-connections.yaml
index 4e02d7ff96ecd04da4ee3b7195f6a75e416c719d..754cd1c4c91dfe88e34e94d6368a2e36304705da 100644
--- a/openbis-connections.yaml
+++ b/openbis-connections.yaml
@@ -1,11 +1,16 @@
 connections:
-    - name                : local test openBIS instance
-      url                 : https://localhost:8443
-      verify_certificates : false
-      username            : username
-      password            : password
-    - name                : productive openBIS instance
-      url                 : https://openbis.example.com
-      verify_certificates : true
-      username            : username
-      password            : password
+  - name: local test openBIS instance
+    url: https://localhost:8443
+    verify_certificates: false
+    username: username
+    password: password
+  - name: vagrant openBIS instance
+    url: https://localhost:8122
+    verify_certificates: false
+    username: admin 
+    password: password
+  - name: productive openBIS instance
+    url: https://openbis.example.com
+    verify_certificates: true
+    username: username
+    password: password
diff --git a/setup.py b/setup.py
index a5e664ee2e3e71a72be2717010f07dc62a93b0f9..868ef0f5592d7c9c6358eda5499eade1d846e9ff 100644
--- a/setup.py
+++ b/setup.py
@@ -1,4 +1,3 @@
-import os
 import sys
 
 if sys.version_info < (3,3):
@@ -12,7 +11,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
 
 setup(
     name='jupyter-openbis-extension',
-    version= '0.0.2',
+    version= '0.1.1',
     author='Swen Vermeul |  ID SIS | ETH Zürich',
     author_email='swen@ethz.ch',
     description='Extension for Jupyter notebooks to connect to openBIS and download/upload datasets, inluding the notebook itself',
@@ -24,11 +23,13 @@ setup(
     install_requires=[
         'jupyter-nbextensions-configurator',
         'jupyter',
-        'pybis',
+        'pybis>=1.8.4',
+        'numpy',
     ],
     python_requires=">=3.3",
     classifiers=[
         "Programming Language :: Python :: 3.3",
+        "Programming Language :: JavaScript",
         "License :: OSI Approved :: Apache Software License",
         "Operating System :: OS Independent",
     ],
@@ -36,15 +37,21 @@ setup(
     data_files=[
         # like `jupyter nbextension install --sys-prefix`
         ("share/jupyter/nbextensions/jupyter-openbis-extension", [
-            "jupyter-openbis-extension/static/dialog.js",
+            "jupyter-openbis-extension/static/main.js",
+            "jupyter-openbis-extension/static/state.js",
+            "jupyter-openbis-extension/static/common.js",
+            "jupyter-openbis-extension/static/connectionDialog.js",
+            "jupyter-openbis-extension/static/connections.js",
+            "jupyter-openbis-extension/static/downloadDialog.js",
+            "jupyter-openbis-extension/static/uploadDialog.js",
         ]),
         # like `jupyter nbextension enable --sys-prefix`
         ("etc/jupyter/nbconfig/notebook.d", [
-            "jupyter-config/nbconfig/notebook.d/jupyter-openbis-extension.json"
+            "jupyter-config/nbconfig/notebook.d/jupyter_openbis_extension.json"
         ]),
         # like `jupyter serverextension enable --sys-prefix`
         ("etc/jupyter/jupyter_notebook_config.d", [
-            "jupyter-config/jupyter_notebook_config.d/jupyter-openbis-extension.json"
+            "jupyter-config/jupyter_notebook_config.d/jupyter_openbis_extension.json"
         ])
     ],
     zip_safe=False,
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 0000000000000000000000000000000000000000..c6c014d480956b6cd7275b4603d45228531b2323
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,67 @@
+import pytest
+import time
+import random
+
+from pybis import Openbis
+
+openbis_url = 'https://localhost:8443'
+admin_username = 'admin'
+admin_password = 'changeit'
+
+@pytest.yield_fixture(scope="module")
+def url():
+    yield openbis_url
+
+@pytest.yield_fixture(scope="module")
+def username():
+    yield admin_username
+
+@pytest.yield_fixture(scope="module")
+def password():
+    yield admin_password
+
+@pytest.yield_fixture(scope="module")
+def openbis_instance():
+    instance = Openbis(url=openbis_url, verify_certificates=False)
+    instance.login(admin_username, admin_password)
+    print("\nLOGGING IN...")
+    print(instance.is_session_active())
+
+    timestamp = time.strftime('%a_%y%m%d_%H%M%S').upper()
+    space_code = 'test_space_' + timestamp
+    space = instance.new_space(code=space_code)
+    space.save()
+
+    project_code = "TEST-PROJECT-{:04d}".format(random.randint(0, 9999))
+    project = instance.new_project(code=project_code, space=space)
+    project.save()
+
+    experiment_code = "TEST-EXPERIMENT-{:04d}".format(random.randint(0, 9999))
+    experiment = instance.new_experiment(
+        code=experiment_code,
+        type='DEFAULT_EXPERIMENT',
+        project=project,
+    )
+    experiment.save()
+
+    sample_code = "TEST-SAMPLE-{:04d}".format(random.randint(0, 9999))
+    sample = instance.new_sample(
+        code=sample_code, 
+        type='UNKNOWN',
+        space=space, 
+        experiment=experiment,
+    )
+    sample.save()
+
+    instance.login(admin_username, admin_password)
+
+    yield instance
+
+    # cleanup after tests have been running
+    sample.delete("test on {}".format(timestamp))
+    experiment.delete("test on {}".format(timestamp))
+    project.delete("test on {}".format(timestamp))
+    space.delete("test on {}".format(timestamp))
+    instance.logout()
+    print("LOGGED OUT...")
+
diff --git a/tests/test_connection.py b/tests/test_connection.py
new file mode 100644
index 0000000000000000000000000000000000000000..035f4c7973cf505763a3bd2cf6769f78d1d2e941
--- /dev/null
+++ b/tests/test_connection.py
@@ -0,0 +1,51 @@
+import pytest
+
+import importlib  
+js = importlib.import_module("jupyter-openbis-extension")
+
+
+def test_conn(url, username, password):
+
+    conn = js.connection.OpenBISConnection(
+        name = "test-conn",
+        url  = url,
+        username = username,
+        password = password,
+        verify_certificates = False,
+    )
+
+    info = conn.get_info()
+    assert isinstance(info, dict)
+    assert info['name'] == 'test-conn'
+    assert info['url']  == url
+    assert info['username'] == username
+    assert info['password'] == password
+
+    assert conn.status == 'not connected'
+    try:
+        conn.login()
+    except Exception:
+        pass
+    assert conn.status == 'connected'
+    assert conn.is_session_active() == True
+
+
+def test_conn_login(url, username, password):
+
+    conn = js.connection.OpenBISConnection(
+        name = "test-conn",
+        url  = url,
+        verify_certificates = False,
+    )
+
+    assert conn.status == 'not connected'
+    try:
+        conn.login(username, password)
+    except Exception:
+        pass
+
+    assert conn.status == "connected"
+
+def test_dataset(openbis_instance):
+    pass
+
diff --git a/todos/Jupyter-OpenBIS-extension todos.md b/todos/Jupyter-OpenBIS-extension todos.md
index 7b51fc65b31337372178efbd013399a1251b6b70..3e3ad02ec84f6e35837f415fb8de3c001fc43385 100644
--- a/todos/Jupyter-OpenBIS-extension todos.md	
+++ b/todos/Jupyter-OpenBIS-extension todos.md	
@@ -17,9 +17,10 @@
 
 
 ## 3) Uploading datasets
+   
    - let the upload dialog box stay in place until the upload was successful
-   - if metadata is faulty, tell the user to correct them (e.g. invalid vocabulary)
-   - show which metadata is mandatory and which are optional
+   - if metadata is faulty, tell the user to correct them (e.g. invalid vocabulary). This involves the backend to check as much as possible.
+   - show which metadata is mandatory and which are optional. Mandatory metadata is marked as such in the propertyAssignments that are fetched from the backend (e.g. `"mandatory:true`)
    - if dataset type is changed back and forth, keep the values that already have been entered
    - keep choice of current dataset type
 
diff --git a/vagrant/Vagrantfile b/vagrant/Vagrantfile
new file mode 100644
index 0000000000000000000000000000000000000000..b21d6f4e6bf9ebc4cf8ecda3d33c32897b9aedf2
--- /dev/null
+++ b/vagrant/Vagrantfile
@@ -0,0 +1,28 @@
+# -*- mode: ruby -*-
+# vi: set ft=ruby :
+
+Vagrant.configure("2") do |config|
+  config.vm.box = "ubuntu/bionic64"
+  config.vm.box_version = "20180531.0.0"
+
+  config.vm.provider "virtualbox" do |v|
+    v.memory = 4096
+    v.cpus = 4
+    v.name = "jupyter-openbis-extension"
+  end
+
+  # Needs to be unique among VMs
+  config.notify_forwarder.port = 27282
+
+  config.vm.synced_folder "..", "/home/vagrant/jupyter-openbis-extension"
+  config.vm.synced_folder '.', '/vagrant', disabled: true
+  config.vm.synced_folder "files", "/files", :mount_options => ["ro"]
+  config.vm.synced_folder "shared", "/shared"
+
+  config.vm.network "forwarded_port", guest: 8122, host: 8122, host_ip: "0.0.0.0"
+  config.vm.network "forwarded_port", guest: 8123, host: 8123, host_ip: "0.0.0.0"
+  config.vm.network "forwarded_port", guest: 8888, host: 8888, host_ip: "0.0.0.0"
+
+  config.vm.provision :shell, path: "bootstrap.sh"
+  config.vm.provision :shell, path: "files/start-services.sh", run: "always", privileged: true
+end
diff --git a/vagrant/bootstrap.sh b/vagrant/bootstrap.sh
new file mode 100644
index 0000000000000000000000000000000000000000..64620765b3dc52e51c25bb8de94bc42f0df97988
--- /dev/null
+++ b/vagrant/bootstrap.sh
@@ -0,0 +1,18 @@
+#!/usr/bin/env bash
+
+sudo locale-gen en_US.UTF-8
+
+echo "
+LC_ALL=en_US.UTF-8
+LANG=en_US.UTF-8
+" >> /etc/environment
+
+apt update
+apt install -y python3 python3-pip openjdk-8-jdk postgresql unzip
+
+cp /files/pg_hba.conf /etc/postgresql/10/main/pg_hba.conf 
+service postgresql restart
+sleep 10 # let the db engine start
+
+sudo -E -u postgres -H -i /files/setup-postgres.sh
+sudo -E -u vagrant -H -i /files/setup-vagrant.sh
diff --git a/vagrant/files/console.properties b/vagrant/files/console.properties
new file mode 100644
index 0000000000000000000000000000000000000000..faa327b56b38bcb8352771ef160acba48fddf193
--- /dev/null
+++ b/vagrant/files/console.properties
@@ -0,0 +1,62 @@
+#
+# The path where openBIS will be installed.
+# 
+# Example : 
+#     INSTALL_PATH=/home/openbis/
+#
+#   will result in the following directory structure
+#
+#   + /home/openbis
+#     +  bin/
+#     +  servers/
+#        + core-plugins/
+#        + openBIS-server/
+#        + datastore_server/
+# 
+INSTALL_PATH=/home/vagrant/openbis
+
+#
+# The path where openBIS will keep the imported data (e.g. images, analysis files)
+# and its incoming folders. 
+#
+DSS_ROOT_DIR=/home/vagrant/dss_root
+
+# Possible configuration options
+#   'local' - if the openBIS servers will only be accessed from this machine
+#   'server' - if the installation is meant to be accessible for remote users
+INSTALLATION_TYPE=local
+
+# Path to the file which should replace the current Java key store file
+#KEY_STORE_FILE = <path to key store>
+
+# Password of the key store
+KEY_STORE_PASSWORD = changeit
+
+# Password of the key
+KEY_PASSWORD = changeit
+
+# Standard technology PROTEOMICS is disabled by default
+#PROTEOMICS = true
+
+# Standard technology SCREENING is disabled by default
+#SCREENING = true
+
+# Standard technology ILLUMINA-NGS (ETH BSSE Setup) is disabled by default
+#ILLUMINA-NGS = true
+
+# Standard technology ELN-LIMS is disabled by default
+#ELN-LIMS = true
+
+# Standard technology MICROSCOPY is disabled by default
+#MICROSCOPY = true
+
+# Standard technology FLOW CYTOMETRY is disabled by default
+#FLOW = true
+
+# Full ELN/LIMS master data is enabled by default. This setting is meaningful only if ELN-LIMS is enabled
+ELN-LIMS-MASTER-DATA = false
+
+#
+# Comma-separated list of databases to backup. If the list is empty or undefined all databases
+# will be backauped.
+#DATABASES_TO_BACKUP =
\ No newline at end of file
diff --git a/vagrant/files/jupyter_notebook_config.py b/vagrant/files/jupyter_notebook_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..e137e1661539d2436a296e0332a2219a90ec8fd1
--- /dev/null
+++ b/vagrant/files/jupyter_notebook_config.py
@@ -0,0 +1,765 @@
+# Configuration file for jupyter-notebook.
+
+# ------------------------------------------------------------------------------
+# Application(SingletonConfigurable) configuration
+# ------------------------------------------------------------------------------
+
+# This is an application.
+
+# The date format used by logging formatters for %(asctime)s
+#c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S'
+
+# The Logging format template
+#c.Application.log_format = '[%(name)s]%(highlevel)s %(message)s'
+
+# Set the log level by value or name.
+#c.Application.log_level = 30
+
+# ------------------------------------------------------------------------------
+# JupyterApp(Application) configuration
+# ------------------------------------------------------------------------------
+
+# Base class for Jupyter applications
+
+# Answer yes to any prompts.
+#c.JupyterApp.answer_yes = False
+
+# Full path of a config file.
+#c.JupyterApp.config_file = ''
+
+# Specify a config file to load.
+#c.JupyterApp.config_file_name = ''
+
+# Generate default config file.
+#c.JupyterApp.generate_config = False
+
+# ------------------------------------------------------------------------------
+# NotebookApp(JupyterApp) configuration
+# ------------------------------------------------------------------------------
+
+# Set the Access-Control-Allow-Credentials: true header
+#c.NotebookApp.allow_credentials = False
+
+# Set the Access-Control-Allow-Origin header
+#
+#  Use '*' to allow any origin to access your server.
+#
+#  Takes precedence over allow_origin_pat.
+c.NotebookApp.allow_origin = '*'
+
+# Use a regular expression for the Access-Control-Allow-Origin header
+#
+#  Requests from an origin matching the expression will get replies with:
+#
+#      Access-Control-Allow-Origin: origin
+#
+#  where `origin` is the origin of the request.
+#
+#  Ignored if allow_origin is set.
+#c.NotebookApp.allow_origin_pat = ''
+
+# Allow password to be changed at login for the notebook server.
+#
+#  While loggin in with a token, the notebook server UI will give the opportunity
+#  to the user to enter a new password at the same time that will replace the
+#  token login mechanism.
+#
+#  This can be set to false to prevent changing password from the UI/API.
+#c.NotebookApp.allow_password_change = True
+
+# Allow requests where the Host header doesn't point to a local server
+#
+#  By default, requests get a 403 forbidden response if the 'Host' header shows
+#  that the browser thinks it's on a non-local domain. Setting this option to
+#  True disables this check.
+#
+#  This protects against 'DNS rebinding' attacks, where a remote web server
+#  serves you a page and then changes its DNS to send later requests to a local
+#  IP, bypassing same-origin checks.
+#
+#  Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, along
+#  with hostnames configured in local_hostnames.
+c.NotebookApp.allow_remote_access = True
+
+# Whether to allow the user to run the notebook as root.
+#c.NotebookApp.allow_root = False
+
+# DEPRECATED use base_url
+#c.NotebookApp.base_project_url = '/'
+
+# The base URL for the notebook server.
+#
+#  Leading and trailing slashes can be omitted, and will automatically be added.
+#c.NotebookApp.base_url = '/'
+
+# Specify what command to use to invoke a web browser when opening the notebook.
+#  If not specified, the default browser will be determined by the `webbrowser`
+#  standard library module, which allows setting of the BROWSER environment
+#  variable to override it.
+#c.NotebookApp.browser = ''
+
+# The full path to an SSL/TLS certificate file.
+#c.NotebookApp.certfile = ''
+
+# The full path to a certificate authority certificate for SSL/TLS client
+#  authentication.
+#c.NotebookApp.client_ca = ''
+
+# The config manager class to use
+#c.NotebookApp.config_manager_class = 'notebook.services.config.manager.ConfigManager'
+
+# The notebook manager class to use.
+#c.NotebookApp.contents_manager_class = 'notebook.services.contents.largefilemanager.LargeFileManager'
+
+# Extra keyword arguments to pass to `set_secure_cookie`. See tornado's
+#  set_secure_cookie docs for details.
+#c.NotebookApp.cookie_options = {}
+
+# The random bytes used to secure cookies. By default this is a new random
+#  number every time you start the Notebook. Set it to a value in a config file
+#  to enable logins to persist across server sessions.
+#
+#  Note: Cookie secrets should be kept private, do not share config files with
+#  cookie_secret stored in plaintext (you can read the value from a file).
+#c.NotebookApp.cookie_secret = b''
+
+# The file where the cookie secret is stored.
+#c.NotebookApp.cookie_secret_file = ''
+
+# Override URL shown to users.
+#
+#  Replace actual URL, including protocol, address, port and base URL, with the
+#  given value when displaying URL to the users. Do not change the actual
+#  connection URL. If authentication token is enabled, the token is added to the
+#  custom URL automatically.
+#
+#  This option is intended to be used when the URL to display to the user cannot
+#  be determined reliably by the Jupyter notebook server (proxified or
+#  containerized setups for example).
+#c.NotebookApp.custom_display_url = ''
+
+# The default URL to redirect to from `/`
+#c.NotebookApp.default_url = '/tree'
+
+# Disable cross-site-request-forgery protection
+#
+#  Jupyter notebook 4.3.1 introduces protection from cross-site request
+#  forgeries, requiring API requests to either:
+#
+#  - originate from pages served by this server (validated with XSRF cookie and
+#  token), or - authenticate with a token
+#
+#  Some anonymous compute resources still desire the ability to run code,
+#  completely without authentication. These services can disable all
+#  authentication and security checks, with the full knowledge of what that
+#  implies.
+c.NotebookApp.disable_check_xsrf = True 
+
+# Whether to enable MathJax for typesetting math/TeX
+#
+#  MathJax is the javascript library Jupyter uses to render math/LaTeX. It is
+#  very large, so you may want to disable it if you have a slow internet
+#  connection, or for offline use of the notebook.
+#
+#  When disabled, equations etc. will appear as their untransformed TeX source.
+#c.NotebookApp.enable_mathjax = True
+
+# extra paths to look for Javascript notebook extensions
+#c.NotebookApp.extra_nbextensions_path = []
+
+# handlers that should be loaded at higher priority than the default services
+#c.NotebookApp.extra_services = []
+
+# Extra paths to search for serving static files.
+#
+#  This allows adding javascript/css to be available from the notebook server
+#  machine, or overriding individual files in the IPython
+#c.NotebookApp.extra_static_paths = []
+
+# Extra paths to search for serving jinja templates.
+#
+#  Can be used to override templates from notebook.templates.
+#c.NotebookApp.extra_template_paths = []
+
+##
+#c.NotebookApp.file_to_run = ''
+
+# Extra keyword arguments to pass to `get_secure_cookie`. See tornado's
+#  get_secure_cookie docs for details.
+#c.NotebookApp.get_secure_cookie_kwargs = {}
+
+# Deprecated: Use minified JS file or not, mainly use during dev to avoid JS
+#  recompilation
+#c.NotebookApp.ignore_minified_js = False
+
+# (bytes/sec) Maximum rate at which stream output can be sent on iopub before
+#  they are limited.
+#c.NotebookApp.iopub_data_rate_limit = 1000000
+
+# (msgs/sec) Maximum rate at which messages can be sent on iopub before they are
+#  limited.
+#c.NotebookApp.iopub_msg_rate_limit = 1000
+
+# The IP address the notebook server will listen on.
+c.NotebookApp.ip = '0.0.0.0'
+
+# Supply extra arguments that will be passed to Jinja environment.
+#c.NotebookApp.jinja_environment_options = {}
+
+# Extra variables to supply to jinja templates when rendering.
+#c.NotebookApp.jinja_template_vars = {}
+
+# The kernel manager class to use.
+#c.NotebookApp.kernel_manager_class = 'notebook.services.kernels.kernelmanager.MappingKernelManager'
+
+# The kernel spec manager class to use. Should be a subclass of
+#  `jupyter_client.kernelspec.KernelSpecManager`.
+#
+#  The Api of KernelSpecManager is provisional and might change without warning
+#  between this version of Jupyter and the next stable one.
+#c.NotebookApp.kernel_spec_manager_class = 'jupyter_client.kernelspec.KernelSpecManager'
+
+# The full path to a private key file for usage with SSL/TLS.
+#c.NotebookApp.keyfile = ''
+
+# Hostnames to allow as local when allow_remote_access is False.
+#
+#  Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted as
+#  local as well.
+#c.NotebookApp.local_hostnames = ['localhost']
+
+# The login handler class to use.
+#c.NotebookApp.login_handler_class = 'notebook.auth.login.LoginHandler'
+
+# The logout handler class to use.
+#c.NotebookApp.logout_handler_class = 'notebook.auth.logout.LogoutHandler'
+
+# The MathJax.js configuration file that is to be used.
+#c.NotebookApp.mathjax_config = 'TeX-AMS-MML_HTMLorMML-full,Safe'
+
+# A custom url for MathJax.js. Should be in the form of a case-sensitive url to
+#  MathJax, for example:  /static/components/MathJax/MathJax.js
+#c.NotebookApp.mathjax_url = ''
+
+# Sets the maximum allowed size of the client request body, specified in  the
+#  Content-Length request header field. If the size in a request  exceeds the
+#  configured value, a malformed HTTP message is returned to the client.
+#
+#  Note: max_body_size is applied even in streaming mode.
+#c.NotebookApp.max_body_size = 536870912
+
+# Gets or sets the maximum amount of memory, in bytes, that is allocated  for
+#  use by the buffer manager.
+#c.NotebookApp.max_buffer_size = 536870912
+
+# Dict of Python modules to load as notebook server extensions.Entry values can
+#  be used to enable and disable the loading ofthe extensions. The extensions
+#  will be loaded in alphabetical order.
+#c.NotebookApp.nbserver_extensions = {}
+
+# The directory to use for notebooks and kernels.
+#c.NotebookApp.notebook_dir = ''
+
+# Whether to open in a browser after starting. The specific browser used is
+#  platform dependent and determined by the python standard library `webbrowser`
+#  module, unless it is overridden using the --browser (NotebookApp.browser)
+#  configuration option.
+c.NotebookApp.open_browser = False
+
+# Hashed password to use for web authentication.
+#
+#  To generate, type in a python/IPython shell:
+#
+#    from notebook.auth import passwd; passwd()
+#
+#  The string should be of the form type:salt:hashed-password.
+#c.NotebookApp.password = ''
+
+# Forces users to use a password for the Notebook server. This is useful in a
+#  multi user environment, for instance when everybody in the LAN can access each
+#  other's machine through ssh.
+#
+#  In such a case, server the notebook server on localhost is not secure since
+#  any user can connect to the notebook server via ssh.
+c.NotebookApp.password_required = False
+
+# The port the notebook server will listen on.
+#c.NotebookApp.port = 8888
+
+# The number of additional ports to try if the specified port is not available.
+#c.NotebookApp.port_retries = 50
+
+# DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib.
+#c.NotebookApp.pylab = 'disabled'
+
+# If True, display a button in the dashboard to quit (shutdown the notebook
+#  server).
+#c.NotebookApp.quit_button = True
+
+# (sec) Time window used to  check the message and data rate limits.
+#c.NotebookApp.rate_limit_window = 3
+
+# Reraise exceptions encountered loading server extensions?
+#c.NotebookApp.reraise_server_extension_failures = False
+
+# DEPRECATED use the nbserver_extensions dict instead
+#c.NotebookApp.server_extensions = []
+
+# The session manager class to use.
+#c.NotebookApp.session_manager_class = 'notebook.services.sessions.sessionmanager.SessionManager'
+
+# Shut down the server after N seconds with no kernels or terminals running and
+#  no activity. This can be used together with culling idle kernels
+#  (MappingKernelManager.cull_idle_timeout) to shutdown the notebook server when
+#  it's not in use. This is not precisely timed: it may shut down up to a minute
+#  later. 0 (the default) disables this automatic shutdown.
+#c.NotebookApp.shutdown_no_activity_timeout = 0
+
+# Supply SSL options for the tornado HTTPServer. See the tornado docs for
+#  details.
+#c.NotebookApp.ssl_options = {}
+
+# Supply overrides for terminado. Currently only supports "shell_command".
+#c.NotebookApp.terminado_settings = {}
+
+# Set to False to disable terminals.
+#
+#  This does *not* make the notebook server more secure by itself. Anything the
+#  user can in a terminal, they can also do in a notebook.
+#
+#  Terminals may also be automatically disabled if the terminado package is not
+#  available.
+#c.NotebookApp.terminals_enabled = True
+
+# Token used for authenticating first-time connections to the server.
+#
+#  When no password is enabled, the default is to generate a new, random token.
+#
+#  Setting to an empty string disables authentication altogether, which is NOT
+#  RECOMMENDED.
+c.NotebookApp.token = ''
+
+# Supply overrides for the tornado.web.Application that the Jupyter notebook
+#  uses.
+#c.NotebookApp.tornado_settings = {}
+
+# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
+#  For headerssent by the upstream reverse proxy. Necessary if the proxy handles
+#  SSL
+#c.NotebookApp.trust_xheaders = False
+
+# DEPRECATED, use tornado_settings
+#c.NotebookApp.webapp_settings = {}
+
+# Specify Where to open the notebook on startup. This is the `new` argument
+#  passed to the standard library method `webbrowser.open`. The behaviour is not
+#  guaranteed, but depends on browser support. Valid values are:
+#
+#   - 2 opens a new tab,
+#   - 1 opens a new window,
+#   - 0 opens in an existing window.
+#
+#  See the `webbrowser.open` documentation for details.
+#c.NotebookApp.webbrowser_open_new = 2
+
+# Set the tornado compression options for websocket connections.
+#
+#  This value will be returned from
+#  :meth:`WebSocketHandler.get_compression_options`. None (default) will disable
+#  compression. A dict (even an empty one) will enable compression.
+#
+#  See the tornado docs for WebSocketHandler.get_compression_options for details.
+#c.NotebookApp.websocket_compression_options = None
+
+# The base URL for websockets, if it differs from the HTTP server (hint: it
+#  almost certainly doesn't).
+#
+#  Should be in the form of an HTTP origin: ws[s]://hostname[:port]
+#c.NotebookApp.websocket_url = ''
+
+# ------------------------------------------------------------------------------
+# ConnectionFileMixin(LoggingConfigurable) configuration
+# ------------------------------------------------------------------------------
+
+# Mixin for configurable classes that work with connection files
+
+# JSON file in which to store connection info [default: kernel-<pid>.json]
+#
+#  This file will contain the IP, ports, and authentication key needed to connect
+#  clients to this kernel. By default, this file will be created in the security
+#  dir of the current profile, but can be specified by absolute path.
+#c.ConnectionFileMixin.connection_file = ''
+
+# set the control (ROUTER) port [default: random]
+#c.ConnectionFileMixin.control_port = 0
+
+# set the heartbeat port [default: random]
+#c.ConnectionFileMixin.hb_port = 0
+
+# set the iopub (PUB) port [default: random]
+#c.ConnectionFileMixin.iopub_port = 0
+
+# Set the kernel's IP address [default localhost]. If the IP address is
+#  something other than localhost, then Consoles on other machines will be able
+#  to connect to the Kernel, so be careful!
+#c.ConnectionFileMixin.ip = ''
+
+# set the shell (ROUTER) port [default: random]
+#c.ConnectionFileMixin.shell_port = 0
+
+# set the stdin (ROUTER) port [default: random]
+#c.ConnectionFileMixin.stdin_port = 0
+
+##
+#c.ConnectionFileMixin.transport = 'tcp'
+
+# ------------------------------------------------------------------------------
+# KernelManager(ConnectionFileMixin) configuration
+# ------------------------------------------------------------------------------
+
+# Manages a single kernel in a subprocess on this host.
+#
+#  This version starts kernels with Popen.
+
+# Should we autorestart the kernel if it dies.
+#c.KernelManager.autorestart = True
+
+# DEPRECATED: Use kernel_name instead.
+#
+#  The Popen Command to launch the kernel. Override this if you have a custom
+#  kernel. If kernel_cmd is specified in a configuration file, Jupyter does not
+#  pass any arguments to the kernel, because it cannot make any assumptions about
+#  the arguments that the kernel understands. In particular, this means that the
+#  kernel does not receive the option --debug if it given on the Jupyter command
+#  line.
+#c.KernelManager.kernel_cmd = []
+
+# Time to wait for a kernel to terminate before killing it, in seconds.
+#c.KernelManager.shutdown_wait_time = 5.0
+
+# ------------------------------------------------------------------------------
+# Session(Configurable) configuration
+# ------------------------------------------------------------------------------
+
+# Object for handling serialization and sending of messages.
+#
+#  The Session object handles building messages and sending them with ZMQ sockets
+#  or ZMQStream objects.  Objects can communicate with each other over the
+#  network via Session objects, and only need to work with the dict-based IPython
+#  message spec. The Session will handle serialization/deserialization, security,
+#  and metadata.
+#
+#  Sessions support configurable serialization via packer/unpacker traits, and
+#  signing with HMAC digests via the key/keyfile traits.
+#
+#  Parameters ----------
+#
+#  debug : bool
+#      whether to trigger extra debugging statements
+#  packer/unpacker : str : 'json', 'pickle' or import_string
+#      importstrings for methods to serialize message parts.  If just
+#      'json' or 'pickle', predefined JSON and pickle packers will be used.
+#      Otherwise, the entire importstring must be used.
+#
+#      The functions must accept at least valid JSON input, and output *bytes*.
+#
+#      For example, to use msgpack:
+#      packer = 'msgpack.packb', unpacker='msgpack.unpackb'
+#  pack/unpack : callables
+#      You can also set the pack/unpack callables for serialization directly.
+#  session : bytes
+#      the ID of this Session object.  The default is to generate a new UUID.
+#  username : unicode
+#      username added to message headers.  The default is to ask the OS.
+#  key : bytes
+#      The key used to initialize an HMAC signature.  If unset, messages
+#      will not be signed or checked.
+#  keyfile : filepath
+#      The file containing a key.  If this is set, `key` will be initialized
+#      to the contents of the file.
+
+# Threshold (in bytes) beyond which an object's buffer should be extracted to
+#  avoid pickling.
+#c.Session.buffer_threshold = 1024
+
+# Whether to check PID to protect against calls after fork.
+#
+#  This check can be disabled if fork-safety is handled elsewhere.
+#c.Session.check_pid = True
+
+# Threshold (in bytes) beyond which a buffer should be sent without copying.
+#c.Session.copy_threshold = 65536
+
+# Debug output in the Session
+#c.Session.debug = False
+
+# The maximum number of digests to remember.
+#
+#  The digest history will be culled when it exceeds this value.
+#c.Session.digest_history_size = 65536
+
+# The maximum number of items for a container to be introspected for custom
+#  serialization. Containers larger than this are pickled outright.
+#c.Session.item_threshold = 64
+
+# execution key, for signing messages.
+#c.Session.key = b''
+
+# path to file containing execution key.
+#c.Session.keyfile = ''
+
+# Metadata dictionary, which serves as the default top-level metadata dict for
+#  each message.
+#c.Session.metadata = {}
+
+# The name of the packer for serializing messages. Should be one of 'json',
+#  'pickle', or an import name for a custom callable serializer.
+#c.Session.packer = 'json'
+
+# The UUID identifying this session.
+#c.Session.session = ''
+
+# The digest scheme used to construct the message signatures. Must have the form
+#  'hmac-HASH'.
+#c.Session.signature_scheme = 'hmac-sha256'
+
+# The name of the unpacker for unserializing messages. Only used with custom
+#  functions for `packer`.
+#c.Session.unpacker = 'json'
+
+# Username for the Session. Default is your system username.
+#c.Session.username = 'vagrant'
+
+# ------------------------------------------------------------------------------
+# MultiKernelManager(LoggingConfigurable) configuration
+# ------------------------------------------------------------------------------
+
+# A class for managing multiple kernels.
+
+# The name of the default kernel to start
+#c.MultiKernelManager.default_kernel_name = 'python3'
+
+# The kernel manager class.  This is configurable to allow subclassing of the
+#  KernelManager for customized behavior.
+#c.MultiKernelManager.kernel_manager_class = 'jupyter_client.ioloop.IOLoopKernelManager'
+
+# ------------------------------------------------------------------------------
+# MappingKernelManager(MultiKernelManager) configuration
+# ------------------------------------------------------------------------------
+
+# A KernelManager that handles notebook mapping and HTTP error handling
+
+# Whether messages from kernels whose frontends have disconnected should be
+#  buffered in-memory.
+#
+#  When True (default), messages are buffered and replayed on reconnect, avoiding
+#  lost messages due to interrupted connectivity.
+#
+#  Disable if long-running kernels will produce too much output while no
+#  frontends are connected.
+#c.MappingKernelManager.buffer_offline_messages = True
+
+# Whether to consider culling kernels which are busy. Only effective if
+#  cull_idle_timeout > 0.
+#c.MappingKernelManager.cull_busy = False
+
+# Whether to consider culling kernels which have one or more connections. Only
+#  effective if cull_idle_timeout > 0.
+#c.MappingKernelManager.cull_connected = False
+
+# Timeout (in seconds) after which a kernel is considered idle and ready to be
+#  culled. Values of 0 or lower disable culling. Very short timeouts may result
+#  in kernels being culled for users with poor network connections.
+#c.MappingKernelManager.cull_idle_timeout = 0
+
+# The interval (in seconds) on which to check for idle kernels exceeding the
+#  cull timeout value.
+#c.MappingKernelManager.cull_interval = 300
+
+# Timeout for giving up on a kernel (in seconds).
+#
+#  On starting and restarting kernels, we check whether the kernel is running and
+#  responsive by sending kernel_info_requests. This sets the timeout in seconds
+#  for how long the kernel can take before being presumed dead.  This affects the
+#  MappingKernelManager (which handles kernel restarts)  and the
+#  ZMQChannelsHandler (which handles the startup).
+#c.MappingKernelManager.kernel_info_timeout = 60
+
+##
+#c.MappingKernelManager.root_dir = ''
+
+# ------------------------------------------------------------------------------
+# ContentsManager(LoggingConfigurable) configuration
+# ------------------------------------------------------------------------------
+
+# Base class for serving files and directories.
+#
+#  This serves any text or binary file, as well as directories, with special
+#  handling for JSON notebook documents.
+#
+#  Most APIs take a path argument, which is always an API-style unicode path, and
+#  always refers to a directory.
+#
+#  - unicode, not url-escaped
+#  - '/'-separated
+#  - leading and trailing '/' will be stripped
+#  - if unspecified, path defaults to '',
+#    indicating the root path.
+
+# Allow access to hidden files
+#c.ContentsManager.allow_hidden = False
+
+##
+#c.ContentsManager.checkpoints = None
+
+##
+#c.ContentsManager.checkpoints_class = 'notebook.services.contents.checkpoints.Checkpoints'
+
+##
+#c.ContentsManager.checkpoints_kwargs = {}
+
+# handler class to use when serving raw file requests.
+#
+#  Default is a fallback that talks to the ContentsManager API, which may be
+#  inefficient, especially for large files.
+#
+#  Local files-based ContentsManagers can use a StaticFileHandler subclass, which
+#  will be much more efficient.
+#
+#  Access to these files should be Authenticated.
+#c.ContentsManager.files_handler_class = 'notebook.files.handlers.FilesHandler'
+
+# Extra parameters to pass to files_handler_class.
+#
+#  For example, StaticFileHandlers generally expect a `path` argument specifying
+#  the root directory from which to serve files.
+#c.ContentsManager.files_handler_params = {}
+
+# Glob patterns to hide in file and directory listings.
+#c.ContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~']
+
+# Python callable or importstring thereof
+#
+#  To be called on a contents model prior to save.
+#
+#  This can be used to process the structure, such as removing notebook outputs
+#  or other side effects that should not be saved.
+#
+#  It will be called as (all arguments passed by keyword)::
+#
+#      hook(path=path, model=model, contents_manager=self)
+#
+#  - model: the model to be saved. Includes file contents.
+#    Modifying this dict will affect the file that is stored.
+#  - path: the API path of the save destination
+#  - contents_manager: this ContentsManager instance
+#c.ContentsManager.pre_save_hook = None
+
+##
+#c.ContentsManager.root_dir = '/'
+
+# The base name used when creating untitled directories.
+#c.ContentsManager.untitled_directory = 'Untitled Folder'
+
+# The base name used when creating untitled files.
+#c.ContentsManager.untitled_file = 'untitled'
+
+# The base name used when creating untitled notebooks.
+#c.ContentsManager.untitled_notebook = 'Untitled'
+
+# ------------------------------------------------------------------------------
+# FileManagerMixin(Configurable) configuration
+# ------------------------------------------------------------------------------
+
+# Mixin for ContentsAPI classes that interact with the filesystem.
+#
+#  Provides facilities for reading, writing, and copying both notebooks and
+#  generic files.
+#
+#  Shared by FileContentsManager and FileCheckpoints.
+#
+#  Note ---- Classes using this mixin must provide the following attributes:
+#
+#  root_dir : unicode
+#      A directory against against which API-style paths are to be resolved.
+#
+#  log : logging.Logger
+
+# By default notebooks are saved on disk on a temporary file and then if
+#  succefully written, it replaces the old ones. This procedure, namely
+#  'atomic_writing', causes some bugs on file system whitout operation order
+#  enforcement (like some networked fs). If set to False, the new notebook is
+#  written directly on the old one which could fail (eg: full filesystem or quota
+#  )
+#c.FileManagerMixin.use_atomic_writing = True
+
+# ------------------------------------------------------------------------------
+# FileContentsManager(FileManagerMixin,ContentsManager) configuration
+# ------------------------------------------------------------------------------
+
+# If True (default), deleting files will send them to the platform's
+#  trash/recycle bin, where they can be recovered. If False, deleting files
+#  really deletes them.
+#c.FileContentsManager.delete_to_trash = True
+
+# Python callable or importstring thereof
+#
+#  to be called on the path of a file just saved.
+#
+#  This can be used to process the file on disk, such as converting the notebook
+#  to a script or HTML via nbconvert.
+#
+#  It will be called as (all arguments passed by keyword)::
+#
+#      hook(os_path=os_path, model=model, contents_manager=instance)
+#
+#  - path: the filesystem path to the file just written - model: the model
+#  representing the file - contents_manager: this ContentsManager instance
+#c.FileContentsManager.post_save_hook = None
+
+##
+#c.FileContentsManager.root_dir = ''
+
+# DEPRECATED, use post_save_hook. Will be removed in Notebook 5.0
+#c.FileContentsManager.save_script = False
+
+# ------------------------------------------------------------------------------
+# NotebookNotary(LoggingConfigurable) configuration
+# ------------------------------------------------------------------------------
+
+# A class for computing and verifying notebook signatures.
+
+# The hashing algorithm used to sign notebooks.
+#c.NotebookNotary.algorithm = 'sha256'
+
+# The sqlite file in which to store notebook signatures. By default, this will
+#  be in your Jupyter data directory. You can set it to ':memory:' to disable
+#  sqlite writing to the filesystem.
+#c.NotebookNotary.db_file = ''
+
+# The secret key with which notebooks are signed.
+#c.NotebookNotary.secret = b''
+
+# The file where the secret key is stored.
+#c.NotebookNotary.secret_file = ''
+
+# A callable returning the storage backend for notebook signatures. The default
+#  uses an SQLite database.
+#c.NotebookNotary.store_factory = traitlets.Undefined
+
+# ------------------------------------------------------------------------------
+# KernelSpecManager(LoggingConfigurable) configuration
+# ------------------------------------------------------------------------------
+
+# If there is no Python kernelspec registered and the IPython kernel is
+#  available, ensure it is added to the spec list.
+#c.KernelSpecManager.ensure_native_kernel = True
+
+# The kernel spec class.  This is configurable to allow subclassing of the
+#  KernelSpecManager for customized behavior.
+#c.KernelSpecManager.kernel_spec_class = 'jupyter_client.kernelspec.KernelSpec'
+
+# Whitelist of allowed kernel names.
+#
+#  By default, all installed kernels are allowed.
+#c.KernelSpecManager.whitelist = set()
diff --git a/vagrant/files/pg_hba.conf b/vagrant/files/pg_hba.conf
new file mode 100644
index 0000000000000000000000000000000000000000..5383ee2765aad917cb2c909e743f2a9cf8b335e3
--- /dev/null
+++ b/vagrant/files/pg_hba.conf
@@ -0,0 +1,99 @@
+# PostgreSQL Client Authentication Configuration File
+# ===================================================
+#
+# Refer to the "Client Authentication" section in the PostgreSQL
+# documentation for a complete description of this file.  A short
+# synopsis follows.
+#
+# This file controls: which hosts are allowed to connect, how clients
+# are authenticated, which PostgreSQL user names they can use, which
+# databases they can access.  Records take one of these forms:
+#
+# local      DATABASE  USER  METHOD  [OPTIONS]
+# host       DATABASE  USER  ADDRESS  METHOD  [OPTIONS]
+# hostssl    DATABASE  USER  ADDRESS  METHOD  [OPTIONS]
+# hostnossl  DATABASE  USER  ADDRESS  METHOD  [OPTIONS]
+#
+# (The uppercase items must be replaced by actual values.)
+#
+# The first field is the connection type: "local" is a Unix-domain
+# socket, "host" is either a plain or SSL-encrypted TCP/IP socket,
+# "hostssl" is an SSL-encrypted TCP/IP socket, and "hostnossl" is a
+# plain TCP/IP socket.
+#
+# DATABASE can be "all", "sameuser", "samerole", "replication", a
+# database name, or a comma-separated list thereof. The "all"
+# keyword does not match "replication". Access to replication
+# must be enabled in a separate record (see example below).
+#
+# USER can be "all", a user name, a group name prefixed with "+", or a
+# comma-separated list thereof.  In both the DATABASE and USER fields
+# you can also write a file name prefixed with "@" to include names
+# from a separate file.
+#
+# ADDRESS specifies the set of hosts the record matches.  It can be a
+# host name, or it is made up of an IP address and a CIDR mask that is
+# an integer (between 0 and 32 (IPv4) or 128 (IPv6) inclusive) that
+# specifies the number of significant bits in the mask.  A host name
+# that starts with a dot (.) matches a suffix of the actual host name.
+# Alternatively, you can write an IP address and netmask in separate
+# columns to specify the set of hosts.  Instead of a CIDR-address, you
+# can write "samehost" to match any of the server's own IP addresses,
+# or "samenet" to match any address in any subnet that the server is
+# directly connected to.
+#
+# METHOD can be "trust", "reject", "md5", "password", "scram-sha-256",
+# "gss", "sspi", "ident", "peer", "pam", "ldap", "radius" or "cert".
+# Note that "password" sends passwords in clear text; "md5" or
+# "scram-sha-256" are preferred since they send encrypted passwords.
+#
+# OPTIONS are a set of options for the authentication in the format
+# NAME=VALUE.  The available options depend on the different
+# authentication methods -- refer to the "Client Authentication"
+# section in the documentation for a list of which options are
+# available for which authentication methods.
+#
+# Database and user names containing spaces, commas, quotes and other
+# special characters must be quoted.  Quoting one of the keywords
+# "all", "sameuser", "samerole" or "replication" makes the name lose
+# its special character, and just match a database or username with
+# that name.
+#
+# This file is read on server startup and when the server receives a
+# SIGHUP signal.  If you edit the file on a running system, you have to
+# SIGHUP the server for the changes to take effect, run "pg_ctl reload",
+# or execute "SELECT pg_reload_conf()".
+#
+# Put your actual configuration here
+# ----------------------------------
+#
+# If you want to allow non-local connections, you need to add more
+# "host" records.  In that case you will also need to make PostgreSQL
+# listen on a non-local interface via the listen_addresses
+# configuration parameter, or via the -i or -h command line switches.
+
+
+
+
+# DO NOT DISABLE!
+# If you change this first entry you will need to make sure that the
+# database superuser can access the database using some other method.
+# Noninteractive access to all databases is required during automatic
+# maintenance (custom daily cronjobs, replication, and similar tasks).
+#
+# Database administrative login by Unix domain socket
+#local   all             postgres                                peer
+
+# TYPE  DATABASE        USER            ADDRESS                 METHOD
+
+# "local" is for Unix domain socket connections only
+local   all             all                                     trust
+# IPv4 local connections:
+host    all             all             127.0.0.1/32            trust
+# IPv6 local connections:
+host    all             all             ::1/128                 md5
+# Allow replication connections from localhost, by a user with the
+# replication privilege.
+#local   replication     all                                     peer
+#host    replication     all             127.0.0.1/32            md5
+#host    replication     all             ::1/128                 md5
\ No newline at end of file
diff --git a/vagrant/files/setup-postgres.sh b/vagrant/files/setup-postgres.sh
new file mode 100755
index 0000000000000000000000000000000000000000..5e9f9f54c71da14349f5f60cbdca240d55f6b2fa
--- /dev/null
+++ b/vagrant/files/setup-postgres.sh
@@ -0,0 +1,3 @@
+echo " --- setup-postgres.sh ---"
+
+createuser vagrant
diff --git a/vagrant/files/setup-vagrant.sh b/vagrant/files/setup-vagrant.sh
new file mode 100755
index 0000000000000000000000000000000000000000..37f43424f1e23ec148ac762a35d731a415bd1115
--- /dev/null
+++ b/vagrant/files/setup-vagrant.sh
@@ -0,0 +1,38 @@
+echo " --- setup-vagrant.sh ---"
+
+rm -rf ~/jupyter-openbis-extension/venv
+rm -rf ~/jupyter-openbis-extension/jupyter_openbis_extension.egg-info
+rm -rf ~/jupyter-openbis-extension/.ipynb_checkpoints
+
+build="http://stage-jenkins.ethz.ch:8090/job/installation-18.06/lastSuccessfulBuild"
+path=$(curl -s "$build/api/xml?xpath=//relativePath"|sed -e "s/<relativePath>//"|sed -e "s/<\/relativePath>//")
+wget -q $build/artifact/$path
+archive=$(basename $path)
+tar xvfz $archive
+directory=$(echo "$archive" | cut -f 1 -d '.')
+cp /files/console.properties $directory
+export ADMIN_PASSWORD='password'
+$directory/run-console.sh
+
+sed -i "/jetty.ssl.port=/ s/=.*/=8122/" /home/vagrant/openbis/servers/openBIS-server/jetty/start.d/ssl.ini
+sed -i "/host-address =/ s/=.*/= https:\/\/localhost/" /home/vagrant/openbis/servers/datastore_server/etc/service.properties
+sed -i "/port =/ s/=.*/= 8123/" /home/vagrant/openbis/servers/datastore_server/etc/service.properties
+sed -i "/server-url =/ s/=.*/= \${host-address}:8122/" /home/vagrant/openbis/servers/datastore_server/etc/service.properties
+
+rm -rf ~/openBIS-installation-*
+
+mkdir ~/.jupyter
+cp /files/jupyter_notebook_config.py ~/.jupyter
+
+cd jupyter-openbis-extension
+pip3 install -e .
+export PATH=$PATH:/home/vagrant/.local/bin
+pip3 install --upgrade pybis
+pip3 install 'tornado==5.1.1' --force-reinstall 
+jupyter serverextension enable --py jupyter-openbis-extension
+jupyter nbextension install --py jupyter-openbis-extension --user --symlink
+jupyter nbextension enable jupyter-openbis-extension --user --py
+cd
+
+mkdir -p /home/vagrant/dss_root/incoming-default
+for i in $(seq 1 100); do touch /home/vagrant/dss_root/incoming-default/dataset${i}.txt; done
diff --git a/vagrant/files/start-services-vagrant.sh b/vagrant/files/start-services-vagrant.sh
new file mode 100755
index 0000000000000000000000000000000000000000..9bcd30ff0584281f51a4c098b51a0664a0da9c39
--- /dev/null
+++ b/vagrant/files/start-services-vagrant.sh
@@ -0,0 +1,8 @@
+echo " --- start-services-vagrant.sh ---"
+/home/vagrant/openbis/bin/allup.sh
+
+screen -S dev -t jupyter -Adm bash -c "
+  cd ~/jupyter-openbis-extension;
+  jupyter notebook;
+  bash;
+"
diff --git a/vagrant/files/start-services.sh b/vagrant/files/start-services.sh
new file mode 100755
index 0000000000000000000000000000000000000000..4fd56076ced3de09010ec899d8129d1b1b503621
--- /dev/null
+++ b/vagrant/files/start-services.sh
@@ -0,0 +1,2 @@
+echo " --- start-services.sh ---"
+runuser -l vagrant bash -c /files/start-services-vagrant.sh
diff --git a/vagrant/shared/.gitignore b/vagrant/shared/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..c96a04f008ee21e260b28f7701595ed59e2839e3
--- /dev/null
+++ b/vagrant/shared/.gitignore
@@ -0,0 +1,2 @@
+*
+!.gitignore
\ No newline at end of file