diff --git a/core-plugin-openbis/query-api.gradle b/core-plugin-openbis/query-api.gradle
index c46b86aea6715f297d2714511ebda956c43cfb64..15e679884de971d4f9324b259b396c9d64ef4110 100644
--- a/core-plugin-openbis/query-api.gradle
+++ b/core-plugin-openbis/query-api.gradle
@@ -268,6 +268,7 @@ task dropboxApiJar(type: Jar) {
     from project(':server-original-data-store').compileJava.outputs.getFiles().getAsFileTree().matching {
         include "ch/systemsx/cisd/etlserver/registrator/api/v2/*.class"
         include "ch/systemsx/cisd/etlserver/registrator/api/v2/impl/*.class"
+        include "ch/systemsx/cisd/openbis/dss/generic/shared/api/**/*.class"
     }
 }
 
@@ -286,6 +287,7 @@ task dropboxApiJavadoc(type: Javadoc) {
     source project(':server-original-data-store').files('source/java').getAsFileTree().matching {
         include "ch/systemsx/cisd/etlserver/registrator/api/v2/*.java"
         include "ch/systemsx/cisd/etlserver/registrator/api/v2/impl/*.java"
+        include "ch/systemsx/cisd/openbis/dss/generic/shared/api/**/*.class"
     }
     classpath = configurations.dropboxApiJavadoc
 }
diff --git a/docs/software-developer-documentation/apis/java-javascript-v3-api.md b/docs/software-developer-documentation/apis/java-javascript-v3-api.md
index d871d05ca2f92c87b8ef38ecfd751b25b58ea0e5..132d3e709075077ef58c7752a130b7f47e8b42da 100644
--- a/docs/software-developer-documentation/apis/java-javascript-v3-api.md
+++ b/docs/software-developer-documentation/apis/java-javascript-v3-api.md
@@ -1,7 +1,6 @@
-openBIS V3 API
-==============
+# Java / Javascript (V3 API) - openBIS V3 API
 
-### I. Architecture
+## I. Architecture
 
 Open BIS consists of two main components: an Application Server and one
 or more Data Store Servers. The Application Server manages the system’s
@@ -9,7 +8,7 @@ meta data, while the Data Store Server(s) manage the file store(s). Each
 Data Store Server manages its own file store. Here we will refer to the
 Application Server as the "AS" and the Data Store Server as the "DSS."
 
-#### One AS, one or more DSS
+### One AS, one or more DSS
 
 Why is there only one Application Server but multiple Data Store
 Servers? It is possible to have only one Data Store Server, but in a
@@ -25,7 +24,7 @@ system. 
 
 ![image info](img/139.png)
 
-#### The Java API
+### The Java API
 
 The Java V3 API consists of two interfaces:
 
@@ -39,16 +38,16 @@ All V3 API jars are packed in openBIS-API-V3-<VERSION>.zip which
 is part of openBIS-clients-and-APIs-<VERSION>.zip (the latest
 version can be downloaded at [Sprint Releases](#) > Clients and APIs)
 
-#### The Javascript API
+### The Javascript API
 
 The Javascript V3 API consists of a module hosted at
 <OPENBIS\_URL>/resources/api/v3/openbis.js, for instance
 <http://localhost/openbis>/ resources/api/v3/openbis.js. Please check
 the openbis.js file itself for more details.
 
-### II. API Features
+## II. API Features
 
-#### Current Features - AS
+### Current Features - AS
 
 The current implementation of the V3 openBIS API contains the following
 features:
@@ -75,12 +74,12 @@ features:
 -   Queries: create/update/get/search/delete/execute queries
 -   Generating codes/permids
 
-#### Current Features - DSS
+### Current Features - DSS
 
 -   Search data set files
 -   Download data set files
 
-#### Missing/Planned Features
+### Missing/Planned Features
 
 The current implementation of the V3 openBIS API does not yet include
 the following features:
@@ -91,7 +90,7 @@ the following features:
 -   Update features: Updating datasets share id, size, status, storage
     confirmation, post registration status
 
-### III. Accessing the API 
+## III. Accessing the API 
 
 In order to use V3 API you have to know the url of an openBIS instance
 you want to connect to. Moreover, before calling any of the API methods
@@ -153,7 +152,7 @@ Connecting in Java
 
     }
 
-#### Connecting in Javascript
+### Connecting in Javascript
 
 We have put a lot of effort to make the use of the API in Javascript and
 Java almost identical. The DTOs which are a big part of the API are
@@ -213,7 +212,7 @@ be conceptually consistent.
 
   
 
-###   IV. AS Methods  
+##   IV. AS Methods  
 
 The sections below describe how to use different methods of the V3 API.
 Each section describes a group of similar methods. For instance, we have
@@ -239,7 +238,7 @@ page template to make them shorter and more readable. Please
 check "Accessing the API" section for examples on how to get a reference
 to V3 API, authenticate or build a simple html page.
 
-#### Login
+### Login
 
 OpenBIS provides the following login methods:
 
@@ -254,7 +253,7 @@ All login methods return a session token if the provided parameters were
 correct. In case a given user does not exist or the provided password
 was incorrect the login methods return null.
 
-##### Example
+#### Example
 
 **V3LoginExample.java**
 
@@ -300,7 +299,7 @@ was incorrect the login methods return null.
         });
     </script>
 
-#### Personal Access Tokens
+### Personal Access Tokens
 
 A personal access token (in short: PAT) can be thought of as a longer
 lived session token which can be used for integrating openBIS with
@@ -346,12 +345,12 @@ Example of how to create and use a PAT:
         }
     }
 
-#### Session Information
+### Session Information
 
 OpenBIS provides a method to obtain the session information for an
 already log in user:
 
-##### Example
+#### Example
 
 **V3CreationExample.java**
 
@@ -370,7 +369,7 @@ already log in user:
         }
     }
 
-#### Creating entities
+### Creating entities
 
 The methods for creating entities in V3 API are called: createSpaces,
 createProjects, createExperiments, createSamples, createMaterials,
@@ -384,7 +383,7 @@ NOTE: Creating data sets via V3 API is not available yet. The new V3
 dropboxes are planned but not implemented yet. Please use V2 dropboxes
 until V3 version is out.
 
-##### Example
+#### Example
 
 **V3CreationExample.java**
 
@@ -434,7 +433,7 @@ until V3 version is out.
         });
     </script>
 
-##### Properties example
+#### Properties example
 
 **V3CreationWithPropertiesExample.java**
 
@@ -497,7 +496,7 @@ until V3 version is out.
         });
     </script>
 
-##### Different ids example
+#### Different ids example
 
 **V3CreationWithDifferentIdsExample.java**
 
@@ -553,7 +552,7 @@ until V3 version is out.
             });
     </script>
 
-##### Parent child example
+#### Parent child example
 
 The following example creates parent and child samples for a sample type
 which allow automatic code generation:
@@ -611,7 +610,7 @@ which allow automatic code generation:
             });
     </script>
 
-#### Updating entities
+### Updating entities
 
 The methods for updating entities in V3 API are called: updateSpaces,
 updateProjects, updateExperiments, updateSamples, updateDataSets,
@@ -626,7 +625,7 @@ updated. Please note that some of the entity fields cannot be changed
 once an entity is created, for instance sample code becomes immutable
 after creation.
 
-##### Example
+#### Example
 
 **V3UpdateExample.java**
 
@@ -672,7 +671,7 @@ after creation.
             });
     </script>
 
-##### Properties example
+#### Properties example
 
 **V3UpdateWithPropertiesExample.java**
 
@@ -727,7 +726,7 @@ after creation.
         });
     </script>
 
-##### Parents example
+#### Parents example
 
 **V3UpdateWithParentsExample.java**
 
@@ -797,7 +796,7 @@ after creation.
             });
     </script>
 
-#### Getting authorization rights for entities
+### Getting authorization rights for entities
 
 If the user isn't allowed to create or update an entity an exception is
 thrown. But often a client application wants to know in advance whether
@@ -814,7 +813,7 @@ with `new ExperimentIdentifier("/MY-SPACE/PROJECT1/DUMMY")` would return
 rights containing `CREATE` if the user is allowed to create an
 experiment in the project `/MY-SPACE/PROJECT1`.
 
-#### Freezing entities
+### Freezing entities
 
 An entity (Space, Project, Experiment, Sample, Data Set) can be frozen.
 There are two types of frozen: *Core* and *surface*. A frozen core means
@@ -838,7 +837,7 @@ freezing event.
 The following tables show all freezing possibilities and what is actual
 frozen.
 
-##### Space
+#### Space
 
 |Freezing method|Description|
 |--- |--- |
@@ -847,7 +846,7 @@ The description can not be set or changed.|
 |freezeForProjects|Same as freeze() plus no projects can be added to or removed from the specified space.|
 |freezeForSamples|Same as freeze() plus no samples can be added to or removed from the specified space.|
 
-##### Project
+#### Project
 
 |Freezing method|Description|
 |--- |--- |
@@ -857,7 +856,7 @@ No attachments can be added or removed.|
 |freezeForExperiments|Same as freeze() plus no experiments can be added to or removed from the specified project.|
 |freezeForSamples|Same as freeze() plus no samples can be added to or removed from the specified project.|
 
-##### Experiment
+#### Experiment
 
 |Freezing method|Description|
 |--- |--- |
@@ -867,7 +866,7 @@ No attachments can be added or removed.|
 |freezeForSamples|Same as freeze() plus no samples can be added to or removed from the specified experiment.|
 |freezeForDataSets|Same as freeze() plus no data sets can be added to or removed from the specified experiment.|
 
-##### Sample
+#### Sample
 
 |Freezing method|Description|
 |--- |--- |
@@ -879,7 +878,7 @@ No attachments can be added or removed.|
 |freezeForParents|Same as freeze() plus no parent samples can be added to or removed from the specified sample.|
 |freezeForDataSets|Same as freeze() plus no data sets can be added to or removed from the specified sample.|
 
-##### Data Set
+#### Data Set
 
 |Freezing method|Description|
 |--- |--- |
@@ -891,7 +890,7 @@ Content copies can be still added or removed for frozen link data sets.|
 |freezeForComponents|Same as freeze() plus no component data sets can be added to or removed from the specified data set.|
 |freezeForContainers|Same as freeze() plus no container data sets can be added to or removed from the specified data set.|
 
-#### Searching entities
+### Searching entities
 
 The methods for searching entities in V3 API are called: `searchSpaces`,
 `searchProjects`, `searchExperiments`, `searchSamples`,
@@ -937,7 +936,7 @@ Results can be sorted ascending or descending. Sorting by multiple
 fields is also possible (e.g. first sort by type and then by
 identifier). A code example on how to use sorting is presented below.
 
-##### Example
+#### Example
 
 **V3SearchExample.java**
 
@@ -997,7 +996,7 @@ identifier). A code example on how to use sorting is presented below.
             });
     </script>
 
-##### Example with pagination and sorting
+#### Example with pagination and sorting
 
 **V3SearchWithPaginationAndSortingExample.java**
 
@@ -1062,7 +1061,7 @@ identifier). A code example on how to use sorting is presented below.
             });
     </script>
 
-#####  Example with OR operator
+####  Example with OR operator
 
 By default all specified search criteria have to be fulfilled. If only
 one criteria needs to be fulfilled use `criteria.withOrOperator()` as in
@@ -1126,7 +1125,7 @@ the following example:
             });
     </script>
 
-##### Example with nested logical operators
+#### Example with nested logical operators
 
 The following code finds samples with perm ID that ends with "6" AND
 (with code that contains "-" OR that starts with "C") AND (with
@@ -1195,7 +1194,7 @@ experiment OR of type whose code starts with "MASTER").
             });
     </script>
 
-##### Example with recursive fetch options
+#### Example with recursive fetch options
 
 In order to get all descendent/acsendents of a sample fetch options can
 be used recursively by
@@ -1287,7 +1286,7 @@ example:
         });
     </script>
 
-##### Global search
+#### Global search
 
 There are two kinds or global search:
 
@@ -1368,7 +1367,7 @@ of meta data (entity attribute or property). Example:
 
   
 
-#### Getting entities
+### Getting entities
 
 The methods for getting entities in V3 API are called: getSpaces,
 getProjects, getExperiments, getSamples, getDataSets, getMaterials,
@@ -1380,7 +1379,7 @@ ids. If no entity was found for a given id or entity exists but you
 don't have access to it then there is no entry for such an id in the
 returned map.
 
-##### Example
+#### Example
 
 **V3GetExample.java**
 
@@ -1445,7 +1444,7 @@ returned map.
             });
     </script>
 
-#### Deleting entities
+### Deleting entities
 
 The methods for deleting entities in V3 API are called: deleteSpaces,
 deleteProjects, deleteExperiments, deleteSamples, deleteDataSets,
@@ -1459,7 +1458,7 @@ confirming the logical deletion to remove the entities permanently or
 reverting the logical deletion to take the entities out from the trash
 can.
 
-##### Example
+#### Example
 
 **V3DeleteExample.java**
 
@@ -1518,7 +1517,7 @@ can.
             });
     </script>
 
-#### Searching entity types
+### Searching entity types
 
 The following search methods allows to search for entity types including
 all assigned property
@@ -1583,7 +1582,7 @@ sample types and assigned property types:
             });
     </script>
 
-#### Modifications
+### Modifications
 
 The API allows to ask for the latest modification (UPDATE or
 CREATE\_OR\_DELETE) for groups of objects of various kinds (see
@@ -1649,21 +1648,21 @@ project and sample update:
             });
     </script>
 
-#### Custom AS Services
+### Custom AS Services
 
 In order to extend openBIS API new custom services can be established by
 core plugins of type `services` (see [Custom Application Server
 Services](/pages/viewpage.action?pageId=80699473)). The API offers a
 method to search for a service and to execute a service.
 
-##### Search for custom services
+#### Search for custom services
 
 As with any other search method `searchCustomASServices()` needs a
 search criteria `CustomASServiceSearchCriteria` and fetch options
 `CustomASServiceFetchOptions`. The following example returns all
 available custom AS services.
 
-###### Example 
+##### Example 
 
 **V3SearchCustomASServicesExample.java**
 
@@ -1705,7 +1704,7 @@ available custom AS services.
             });
     </script>
 
-##### Execute a custom service
+#### Execute a custom service
 
 In order to execute a custom AS service its code is needed. In addition
 a set of key-value pairs can be provided. The key has to be a string
@@ -1718,7 +1717,7 @@ The result can be any object (again it has to be Java serializable in
 the Java case). In a Java client the result will usually be casted for
 further processing.
 
-###### Example 
+##### Example 
 
 **V3ExecuteCustomASServiceExample.java**
 
@@ -1753,7 +1752,7 @@ further processing.
             });
     </script>
 
-#### Archiving / unarchiving data sets
+### Archiving / unarchiving data sets
 
 The API provides the following methods for handling the data set
 archiving: archiveDataSets and unarchiveDataSets. Both methods schedule
@@ -1762,9 +1761,9 @@ archiveDataSets/unarchiveDataSets method call finishes the requested
 data sets are only scheduled for the archiving/unarchiving but are not
 in the archive/store yet.
 
-##### Archiving data sets  
+#### Archiving data sets  
 
-###### Example 
+##### Example 
 
 **V3ArchiveDataSetsExample.java**
 
@@ -1824,9 +1823,9 @@ in the archive/store yet.
         });
     </script>
 
-##### Unarchiving data sets
+#### Unarchiving data sets
 
-###### Example 
+##### Example 
 
 **V3UnarchiveDataSetsExample.java**
 
@@ -1876,7 +1875,7 @@ in the archive/store yet.
         });
     </script>
 
-####  Executing Operations 
+###  Executing Operations 
 
 The V3 API provides you with methods that allow you to create, update,
 get, search and delete entities, archive and unarchive datasets, execute
@@ -1948,7 +1947,7 @@ More details on each of these methods in presented in the sections
 below. Please note that all of the described methods are available in
 both Javascript and Java.
 
-##### Method executeOperations
+#### Method executeOperations
 
 This method can be used to execute one or many operations either
 synchronously or asynchronously. Operations are always executed in a
@@ -1962,7 +1961,7 @@ IApplicationServerApi.createSpaces method is represented by
 CreateSpacesOperation class, IApplicationServerApi.updateSpaces method
 by UpdateSpacesOperation class etc.
 
-###### **Asynchronous operation execution**
+##### **Asynchronous operation execution**
 
 An asynchronous executeOperations invocation only schedules operations
 for the execution and then immediately returns. Results of the scheduled
@@ -2054,7 +2053,7 @@ states:
             });
     </script>
 
-###### **Synchronous operation execution**
+##### **Synchronous operation execution**
 
 A synchronous executeOperations invocation immediately executes all the
 operations. Any exceptions thrown by the executed operations can be
@@ -2142,7 +2141,7 @@ states:
             });
     </script>
 
-###### **Notifications**
+##### **Notifications**
 
 The executeOperations method can notify about finished or failed
 operation executions. At the moment the only supported notification
@@ -2226,7 +2225,7 @@ For failed executions an email contains:
             });
     </script>
 
-##### Method getOperationExecutions / searchOperationExecutions
+#### Method getOperationExecutions / searchOperationExecutions
 
 Operation execution information can be fetched by an owner of an
 execution (i.e. a person that called executeOperations method) or an
@@ -2613,7 +2612,7 @@ related information are done with two separate V3 maintenance tasks
             });
     </script>
 
-#####  Method updateOperationExecutions / deleteOperationExecutions
+####  Method updateOperationExecutions / deleteOperationExecutions
 
 The updateOperationExecutions and deleteOperationExecutions methods can
 be used to explicitly delete some part of information or delete all the
@@ -2845,14 +2844,14 @@ availability time expires.
             });
     </script>
 
-##### Configuration
+#### Configuration
 
 Many aspects of the operation execution behavior can be configured via
 service.properties file.  
 More details on what exactly can be configured can be found in the file
 itself.
 
-#### Semantic Annotations 
+### Semantic Annotations 
 
 If terms like: semantic web, RDF, OWL are new to you, then it is highly
 recommended to read the following tutorial first:
@@ -2900,7 +2899,7 @@ methods and specify appropriate withType().withSemanticAnnotations()
 condition in SampleSearchCriteria or withSemanticAnnotations() condition
 in SampleTypeSearchCriteria. 
 
-#### Web App Settings
+### Web App Settings
 
 The web app settings functionality is a user specific key-value map
 where a user specific configuration can be stored. The settings are
@@ -3041,7 +3040,7 @@ user or by an instance admin.
             });
     </script>
 
-#### Imports
+### Imports
 
 The imports that are normally accesible via "Import" menu in the generic
 openBIS UI can be also used programatically from within a V3 custom AS
@@ -3222,7 +3221,7 @@ to import that file is presented below.
         sampleType = parameters.get("sampleType")
         return context.getImportService().createSamples(context.getSessionToken(), "importWebappUploadKey", sampleType, None, None, None, False, False, None);
 
-#### Generate identifiers
+### Generate identifiers
 
 V3 API provides 2 methods for generating unique identifiers:
 
@@ -3273,9 +3272,9 @@ V3 API provides 2 methods for generating unique identifiers:
             });
     </script>
 
-### V. DSS Methods
+## V. DSS Methods
 
-#### Search files
+### Search files
 
 The searchFiles method can be used to search for data set files at a
 single data store (Java version) or at multiple data stores at the same
@@ -3289,7 +3288,7 @@ When searching across multiple data stores the results from each data
 store are combined together and returned back as a single regular search
 result object as if it was returned by only one data store.
 
-##### Example 
+#### Example 
 
 **V3SearchDataSetFilesExample.java**
 
@@ -3380,7 +3379,7 @@ result object as if it was returned by only one data store.
             });
     </script>
 
-####  Downloading files, folders, and datasets
+###  Downloading files, folders, and datasets
 
 Datasets that are created in Open BIS can be accessed by V3 API in a
 number of different ways. It's possible to download individual files,
@@ -3397,7 +3396,7 @@ The API provides two methods for downloading:
     used by a helper class to download files in parallel streams in
     chunks. It is based on the [SIS File Transfer Protocol](#).
 
-#### Simple Downloading
+### Simple Downloading
 
 By setting the DataSetFileDownloadOptions it's possible to change how
 data is downloaded - data can be downloaded file by file, or by folder,
@@ -3410,13 +3409,13 @@ already be inside Open BIS. It is necessary to know the dataset code at
 the very minimum. It is helpful to also know the file path to the file
 desired to download.
 
-##### Download a single file located inside a dataset
+#### Download a single file located inside a dataset
 
 Here is how to download a single file and print out the contents, when
 the dataset code and the file path are known. Here a search is not
 necessary since the file path and dataset code are known.
 
-###### A note about recursion
+##### A note about recursion
 
 Note that when only downloading one file, it is better to set the
 recursive flag to false in DataSetFileDownloadOptions, although it makes
@@ -3494,7 +3493,7 @@ the directory.
         }
     }
 
-##### Download a folder located inside a dataset
+#### Download a folder located inside a dataset
 
 The example below demonstrates how to download a folder and all its
 contents, when the dataset code and the folder path are known. The goal
@@ -3570,7 +3569,7 @@ the directory object.
         }
     }
 
-##### Search for a dataset and download all its contents, file by file
+#### Search for a dataset and download all its contents, file by file
 
 Here is an example that demonstrates how to search for datasets and
 download the contents file by file. Here recursion is not used - see
@@ -3671,7 +3670,7 @@ this example.
         }
     }
 
-##### Download a whole dataset recursively
+#### Download a whole dataset recursively
 
 Here is a simplified way to download a dataset. Instead of downloading
 files one by one, it is possible to download the entire dataset
@@ -3735,7 +3734,7 @@ DataSetFileDownloadOptions object.
         }
     }
 
-##### Search and list all the files inside a data store 
+#### Search and list all the files inside a data store 
 
 Here is an example that demonstrates how to list all the files in a data
 store. By simply leaving the following line as is:
@@ -3823,7 +3822,7 @@ the whole data store. 
         }
     } 
 
-#### Fast Downloading
+### Fast Downloading
 
 Fast downloading is based on the [SIS File Transfer Protocol](#) and
 library. Downloading is done in two steps:
@@ -3948,7 +3947,7 @@ Here is a complete example:
         }
     }
 
-##### What happens under the hood?
+#### What happens under the hood?
 
 The files to be downloaded are chunked into chunks of maximum size 1 MB.
 On the DSS a special web service (`FileTransferServerServlet`) provides
@@ -3973,7 +3972,7 @@ It is possible that the actual number of streams is zero if the server
 is currently too busy with downloading (that is, there is no free
 dowload stream available). The FastDownloader will retry it later.
 
-##### Customizing Fast Dowloading
+#### Customizing Fast Dowloading
 
 There are three ways to customizing the FastDownloader:
 
@@ -3991,7 +3990,7 @@ There are three ways to customizing the FastDownloader:
     three times. The first retry is a second later. For each following
     retry the waiting time is increases by the factor two.
 
-#### Register Data Sets
+### Register Data Sets
 
 To register datasets using the Java or JavaScript API use one of the
 following examples as a template.
@@ -4108,8 +4107,7 @@ Example (Javascript)**
     </body>
     </html>
 
-VI. Web application context
----------------------------
+## VI. Web application context
 
 When making web applications and embedding them into an openBIS tab on
 the core UI is often required to have information about the context
diff --git a/docs/software-developer-documentation/apis/matlab-v3-api.md b/docs/software-developer-documentation/apis/matlab-v3-api.md
index b9760c0cbbb5b1947fec6cf83bc6fab9291801a9..1b73f1381fcf1800092278ed3bfd686aae1391e0 100644
--- a/docs/software-developer-documentation/apis/matlab-v3-api.md
+++ b/docs/software-developer-documentation/apis/matlab-v3-api.md
@@ -1,4 +1,4 @@
-# How to access openBIS from MATLAB
+# Matlab (V3 API) - How to access openBIS from MATLAB
 
 ## Preamble
 [openBIS](https://wiki-bsse.ethz.ch/display/bis/Home) is a research data management system developed by [ETH SIS](https://sis.id.ethz.ch/). Data stored in openBIS can be accessed directly via the web UI or programmatically using APIs. For example, [pyBIS](https://sissource.ethz.ch/sispub/openbis/tree/master/pybis) is a project that provides a Python 3 module for interacting with openBIS. 
diff --git a/docs/software-developer-documentation/apis/personal-access-tokens.md b/docs/software-developer-documentation/apis/personal-access-tokens.md
index 8bed1565929836a23b4d3b52708eec6bd8bda82f..f4668de5741f46f0dc30e026dd0ff216d0a662f5 100644
--- a/docs/software-developer-documentation/apis/personal-access-tokens.md
+++ b/docs/software-developer-documentation/apis/personal-access-tokens.md
@@ -1,7 +1,6 @@
-Personal Access Tokens
-======================
+# Personal Access Tokens
 
-#### Background
+## Background
 
 "Personal access token" (in short: PAT) is an openBIS feature that was
 introduced to simplify integration of openBIS with other systems. Such
@@ -33,7 +32,7 @@ Depending on a use case and a type of the integration that could cause
 smaller or bigger headaches for the developers of the external system.
 Fortunately, "Personal access tokens" come to a rescue.
 
-#### What are "Personal access tokens" ?
+## What are "Personal access tokens" ?
 
 A personal access token (in short: PAT) is very similar to a session
 token but there are also some important differences.
@@ -67,12 +66,12 @@ Differences:
     transition period from one soon to be expired PAT to a new PAT that
     replaces it without losing the session's state
 
-#### Who can create a "Personal access token" ?
+## Who can create a "Personal access token" ?
 
 Any openBIS user can manage its own PATs. Instance admin users can
 manage all PATs in the system.
 
-#### Where can I use "Personal access tokens" ?
+## Where can I use "Personal access tokens" ?
 
 Endpoints that support PATs:
 
@@ -99,7 +98,7 @@ DSS:
 -   Session Workspace Provider
 -   SFTP
 
-#### Where "Personal access tokens" are stored ?
+## Where "Personal access tokens" are stored ?
 
 PATs are stored in "personal-access-tokens.json" JSON file. By default
 the file is located in the main openBIS folder where it survives openBIS
@@ -109,7 +108,7 @@ The location can be changed using "personal-access-tokens-file-path"
 property in AS service.properties. The JSON file is read at the openBIS
 start up.
 
-#### How long should my "Personal Access Tokens" be valid ?
+## How long should my "Personal Access Tokens" be valid ?
 
 Because of security reasons PATs should not be valid indefinitely.
 Instead, each PAT should have a well defined validity period after which
@@ -128,7 +127,7 @@ under the hood to the same openBIS session. Therefore, even if one of
 such PATs expires the session is kept active and its state is
 maintained.
 
-#### Configuration
+## Configuration
 
 "Personal access tokens" functionality is enabled by default. To
 configure it please use AS service.properties:
@@ -145,7 +144,7 @@ configure it please use AS service.properties:
     # set validity warning period (in seconds) - owners of personal access tokens that are going to expire within this warning period are going to receive email notifications (default: 5 days)
     personal-access-tokens-validity-warning-period = 259200
 
-#### Typical Application Workflow
+## Typical Application Workflow
 
 Most typical use case for Personal Access Tokens is to run code on a
 third party service against openBIS.
@@ -275,7 +274,7 @@ management.
 
     }
 
-#### V3 API 
+## V3 API 
 
 Code examples for personal access tokens can be found in the main V3 API
 documentation: [openBIS V3
diff --git a/docs/software-developer-documentation/apis/python-v3-api.md b/docs/software-developer-documentation/apis/python-v3-api.md
index 20ba1f6a2f70b28e8dfc956bf85df9267cf4b84f..ec6ce3d0203215ec23323c2b1f34666d7a7c0af9 100644
--- a/docs/software-developer-documentation/apis/python-v3-api.md
+++ b/docs/software-developer-documentation/apis/python-v3-api.md
@@ -1,4 +1,4 @@
-# Welcome to pyBIS!
+# Python (V3 API) - pyBIS!
 
 pyBIS is a Python module for interacting with openBIS. pyBIS is designed to be most useful in a [Jupyter Notebook](https://jupyter.org) or IPython environment, especially if you are developing Python scripts for automatisation. Jupyter Notebooks offer some sort of IDE for openBIS, supporting TAB completition and immediate data checks, making the life of a researcher hopefully easier.
 
@@ -24,7 +24,7 @@ pip install jupyter
 pip install jupyterlab
 ```
 
-# General Usage
+## General Usage
 
 ### TAB completition and other hints in Jupyter / IPython
 
@@ -62,9 +62,9 @@ pip install jupyterlab
 - **property type:** a single property, as defined in the entity types above. It can be of a classic data type (e.g. INTEGER, VARCHAR, BOOLEAN) or its values can be controlled (CONTROLLEDVOCABULARY).
 - **plugin:** a script written in [Jython](https://www.jython.org) which allows to check property values in a even more detailed fashion
 
-# connect to OpenBIS
+## connect to OpenBIS
 
-## login
+### login
 
 In an **interactive session** e.g. inside a Jupyter notebook, you can use `getpass` to enter your password safely:
 
@@ -215,7 +215,7 @@ Currently, mounting is supported for Linux and Mac OS X only.
 
 All attributes, if not provided, are re-used by a previous login() command. If no mountpoint is provided, the default mounpoint will be `~/hostname`. If this directory does not exist, it will be created. The directory must be empty before mounting.
 
-# Masterdata
+## Masterdata
 
 OpenBIS stores quite a lot of meta-data along with your dataSets. The collection of data that describes this meta-data (i.e. meta-meta-data) is called masterdata. It consists of:
 
@@ -230,7 +230,7 @@ OpenBIS stores quite a lot of meta-data along with your dataSets. The collection
 - tags
 - semantic annotations
 
-## browse masterdata
+### browse masterdata
 
 ```
 sample_types = o.get_sample_types()  # get a list of sample types
@@ -263,7 +263,7 @@ o.get_terms(vocabulary='STORAGE')
 o.get_tags()
 ```
 
-## create property types
+### create property types
 
 **Samples** (objects), **experiments** (collections) and **dataSets** contain type-specific **properties**. When you create a new sample, experiment or datasSet of a given type, the set of properties is well defined. Also, the values of these properties are being type-checked.
 
@@ -334,7 +334,7 @@ To create a **tabular, spreadsheet-like property**, use `XML` as `dataType` and
 
 **Note**: PropertyTypes that start with a \$ are by definition `managedInternally` and therefore this attribute must be set to True.
 
-## create sample types / object types
+### create sample types / object types
 
 The second step (after creating a property type, see above) is to create the **sample type**. The new name for **sample** is **object**. You can use both methods interchangeably:
 
@@ -365,7 +365,7 @@ sample_type.get_next_code()        # e.g. FLY77
 
 From pyBIS 1.31.0 onwards, you can provide a `code` even for samples where its sample type has `autoGeneratedCode=True` to offer the same functionality as ELN-LIMS. In earlier versions of pyBIS, providing a code in this situation caused an error.
 
-## assign and revoke properties to sample type / object type
+### assign and revoke properties to sample type / object type
 
 The third step, after saving the sample type, is to **assign or revoke properties** to the newly created sample type. This assignment procedure applies to all entity types (dataset type, experiment type).
 
@@ -383,7 +383,7 @@ sample_type.revoke_property('diff_time')
 sample_type.get_property_assignments()
 ```
 
-## create a dataset type
+### create a dataset type
 
 The second step (after creating a **property type**, see above) is to create the **dataset type**. The third step is to **assign or revoke the properties** to the newly created dataset type.
 
@@ -402,7 +402,7 @@ dataset_type.revoke_property('property_name')
 dataset_type.get_property_assignments()
 ```
 
-## create an experiment type / collection type
+### create an experiment type / collection type
 
 The second step (after creating a **property type**, see above) is to create the **experiment type**.
 
@@ -422,7 +422,7 @@ experiment_type.revoke_property('property_name')
 experiment_type.get_property_assignments()
 ```
 
-## create material types
+### create material types
 
 Materials and material types are deprecated in newer versions of openBIS.
 
@@ -439,7 +439,7 @@ material_type.get_property_assignments()
 
 ```
 
-## create plugins
+### create plugins
 
 Plugins are Jython scripts that can accomplish more complex data-checks than ordinary types and vocabularies can achieve. They are assigned to entity types (dataset type, sample type etc). [Documentation and examples can be found here](https://wiki-bsse.ethz.ch/display/openBISDoc/Properties+Handled+By+Scripts)
 
@@ -453,7 +453,7 @@ pl = o.new_plugin(
 pl.save()
 ```
 
-## Users, Groups and RoleAssignments
+### Users, Groups and RoleAssignments
 
 Users can only login into the openBIS system when:
 
@@ -496,7 +496,7 @@ ra = o.get_role_assignment(techId)
 ra.delete()
 ```
 
-## Spaces
+### Spaces
 
 Spaces are fundamental way in openBIS to divide access between groups. Within a space, data can be easily shared. Between spaces, people need to be given specific access rights (see section above). The structure in openBIS is as follows:
 
@@ -533,7 +533,7 @@ space.attrs.all()
 space.delete('reason for deletion')
 ```
 
-## Projects
+### Projects
 
 Projects live within spaces and usually contain experiments (aka collections):
 
@@ -589,7 +589,7 @@ project.freezeForExperiments = True
 project.freezeForSamples = True
 ```
 
-## Experiments / Collections
+### Experiments / Collections
 
 Experiments live within projects:
 
@@ -605,7 +605,7 @@ The new name for **experiment** is **collection**. You can use boths names inter
 - `new_experiment()` = `new_collection()`
 - `get_experiments()` = `get_collections()`
 
-### create a new experiment
+#### create a new experiment
 
 ```
 exp = o.new_experiment
@@ -617,7 +617,7 @@ exp = o.new_experiment
 exp.save()
 ```
 
-### search for experiments
+#### search for experiments
 
 ```
 experiments = o.get_experiments(
@@ -659,7 +659,7 @@ experiments = o.get_experiments(
 
 ```
 
-### Experiment attributes
+#### Experiment attributes
 
 ```
 exp.attrs.all()                    # returns all attributes as a dict
@@ -682,7 +682,7 @@ exp.freezeForSamples = True
 exp.save()                         # needed to save/update the changed attributes and properties
 ```
 
-### Experiment properties
+#### Experiment properties
 
 **Getting properties**
 
@@ -713,7 +713,7 @@ experiment.set_props({ key: value })      # set the values of some properties
 experiment.save()                         # needed to save/update the changed attributes and properties
 ```
 
-## Samples / Objects
+### Samples / Objects
 
 Samples usually live within experiments/collections:
 
@@ -778,7 +778,7 @@ sample.add_attachment('testfile.xls') # deprecated, see above
 sample.delete('deleted for some reason')
 ```
 
-## create/update/delete many samples in a transaction
+### create/update/delete many samples in a transaction
 
 Creating a single sample takes some time. If you need to create many samples, you might want to create them in one transaction. This will transfer all your sample data at once. The Upside of this is the **gain in speed**. The downside: this is a **all-or-nothing** operation, which means, either all samples will be registered or none (if any error occurs).
 
@@ -818,7 +818,7 @@ trans.commit()
 
 **Note:** You can use the `mark_to_be_deleted()`, `unmark_to_be_deleted()` and `is_marked_to_be_deleted()` methods to set and read the internal flag.
 
-### parents, children, components and container
+#### parents, children, components and container
 
 ```
 sample.get_parents()
@@ -848,7 +848,7 @@ sample.add_components('/MY_SPACE/COMPONENT_NAME')
 sample.del_components('/MY_SPACE/COMPONENT_NAME')
 ```
 
-### sample tags
+#### sample tags
 
 ```
 sample.get_tags()
@@ -857,7 +857,7 @@ sample.add_tags(['tag2','tag3'])
 sample.del_tags('tag1')
 ```
 
-### Sample attributes and properties
+#### Sample attributes and properties
 
 **Getting properties**
 
@@ -891,7 +891,7 @@ sample.set_props({ key: value })      # set the values of some properties
 sample.save()                         # needed to save/update the attributes and properties
 ```
 
-### search for samples / objects
+#### search for samples / objects
 
 The result of a search is always list, even when no items are found. The `.df` attribute returns
 the Pandas dataFrame of the results.
@@ -958,7 +958,7 @@ experiments = o.get_samples(
 
 ```
 
-### freezing samples
+#### freezing samples
 
 ```
 sample.freeze = True
@@ -968,7 +968,7 @@ sample.freezeForParents = True
 sample.freezeForDataSets = True
 ```
 
-## Datasets
+### Datasets
 
 Datasets are by all means the most important openBIS entity. The actual files are stored as datasets; all other openBIS entities mainly are necessary to annotate and to structure the data:
 
@@ -978,7 +978,7 @@ Datasets are by all means the most important openBIS entity. The actual files ar
       - sample / object
         - dataset
 
-### working with existing dataSets
+#### working with existing dataSets
 
 **search for datasets**
 
@@ -1046,7 +1046,7 @@ ds.download_attachments(<path or cwd>)  # Deprecated, as attachments are not com
                                   # Attachments are an old concept and should not be used anymore.
 ```
 
-### download dataSets
+#### download dataSets
 
 ```
 o.download_prefix                  # used for download() and symlink() method.
@@ -1066,7 +1066,7 @@ ds.download_path                   # returns the relative path (destination) of
 ds.is_physical()                   # TRUE if dataset is physically
 ```
 
-### link dataSets
+#### link dataSets
 
 Instead of downloading a dataSet, you can create a symbolic link to a dataSet in the openBIS dataStore. To do that, the openBIS dataStore needs to be mounted first (see mount method above). **Note:** Symbolic links and the mount() feature currently do not work with Windows.
 
@@ -1083,7 +1083,7 @@ ds.symlink(
 ds.is_symlink()
 ```
 
-### dataSet attributes and properties
+#### dataSet attributes and properties
 
 **Getting properties**
 
@@ -1115,7 +1115,7 @@ ds.p.set({'my_property':'value'}) # set the values of some properties
 ds.set_props({ key: value })      # set the values of some properties
 ```
 
-### search for dataSets
+#### search for dataSets
 
 - The result of a search is always list, even when no items are found
 - The `.df` attribute returns the Pandas dataFrame of the results
@@ -1174,7 +1174,7 @@ datasets = o.get_experiment('/MY_NEW_SPACE/MY_PROJECT/MY_EXPERIMENT4')\
            .get_datasets(type='RAW_DATA')
 ```
 
-### freeze dataSets
+#### freeze dataSets
 
 - once a dataSet has been frozen, it cannot be changed by anyone anymore
 - so be careful!
@@ -1188,7 +1188,7 @@ ds.freezeForContainers = True
 ds.save()
 ```
 
-### create a new dataSet
+#### create a new dataSet
 
 ```
 ds_new = o.new_dataset(
@@ -1201,7 +1201,7 @@ ds_new = o.new_dataset(
 ds_new.save()
 ```
 
-### create dataSet with zipfile
+#### create dataSet with zipfile
 
 DataSet containing one zipfile which will be unzipped in openBIS:
 
@@ -1214,7 +1214,7 @@ ds_new = o.new_dataset(
 ds_new.save()
 ```
 
-### create dataSet with mixed content
+#### create dataSet with mixed content
 
 - mixed content means: folders and files are provided
 - a relative specified folder (and all its content) will end up in the root, while keeping its structure
@@ -1243,7 +1243,7 @@ ds_new = o.new_dataset(
 ds_new.save()
 ```
 
-### create dataSet container
+#### create dataSet container
 
 A DataSet of kind=CONTAINER contains other DataSets, but no files:
 
@@ -1258,7 +1258,7 @@ ds_new = o.new_dataset(
 ds_new.save()
 ```
 
-### get, set, add and remove parent datasets
+#### get, set, add and remove parent datasets
 
 ```
 dataset.get_parents()
@@ -1276,7 +1276,7 @@ dataset.add_children(['20170115220259155-412'])
 dataset.del_children(['20170115220259155-412'])
 ```
 
-### dataSet containers
+#### dataSet containers
 
 - A DataSet may belong to other DataSets, which must be of kind=CONTAINER
 - As opposed to Samples, DataSets may belong (contained) to more than one DataSet-container
@@ -1301,7 +1301,7 @@ dataset.add_components(['20170115220259155-412'])
 dataset.del_components(['20170115220259155-412'])
 ```
 
-## Semantic Annotations
+### Semantic Annotations
 
 create semantic annotation for sample type 'UNKNOWN':
 
@@ -1370,7 +1370,7 @@ sa.save()
 sa.delete('reason')
 ```
 
-## Tags
+### Tags
 
 ```
 new_tag = o.new_tag(
@@ -1389,7 +1389,7 @@ tag.get_owner()   # returns a person object
 tag.delete('why?')
 ```
 
-## Vocabulary and VocabularyTerms
+### Vocabulary and VocabularyTerms
 
 An entity such as Sample (Object), Experiment (Collection), Material or DataSet can be of a specific _entity type_:
 
@@ -1455,9 +1455,9 @@ term.save()
 term.delete()
 ```
 
-## Change ELN Settings via pyBIS
+### Change ELN Settings via pyBIS
 
-### Main Menu
+#### Main Menu
 
 The ELN settings are stored as a **JSON string** in the `$eln_settings` property of the `GENERAL_ELN_SETTINGS` sample. You can show the **Main Menu settings** like this:
 
@@ -1491,7 +1491,7 @@ settings_sample.props['$eln_settings'] = json.dumps(settings)
 settings_sample.save()
 ```
 
-### Storages
+#### Storages
 
 The **ELN storages settings** can be found in the samples of project `/ELN_SETTINGS/STORAGES`
 
@@ -1517,7 +1517,7 @@ sto.props()
  sto.save()
 ```
 
-### Templates
+#### Templates
 
 The **ELN templates settings** can be found in the samples of project `/ELN_SETTINGS/TEMPLATES`
 
@@ -1527,7 +1527,7 @@ o.get_samples(project='/ELN_SETTINGS/TEMPLATES')
 
 To change the settings, use the same technique as shown above with the storages settings.
 
-### Custom Widgets
+#### Custom Widgets
 
 To change the **Custom Widgets settings**, get the `property_type` and set the `metaData` attribute:
 
@@ -1540,4 +1540,4 @@ pt.save()
 Currently, the value of the `custom_widget` key can be set to either
 
 - `Spreadsheet` (for tabular, Excel-like data)
-- `Word Processor` (for rich text data)
+- `Word Processor` (for rich text data)
\ No newline at end of file
diff --git a/docs/software-developer-documentation/server-side-extensions/as-api-listener.md b/docs/software-developer-documentation/server-side-extensions/as-api-listener.md
index 4333859d805da8b2a855f247585d5c13cba9924c..458067128624e15d35fa2e8b24ca9bfd20b59cfb 100644
--- a/docs/software-developer-documentation/server-side-extensions/as-api-listener.md
+++ b/docs/software-developer-documentation/server-side-extensions/as-api-listener.md
@@ -1,5 +1,4 @@
-API Listener Core Plugin (V3 API)]
-==================================
+# API Listener Core Plugin (V3 API)
 
 Introduction
 ------------
@@ -157,4 +156,4 @@ standard openbis log the operation name:
 
 You can download a complete example with sources
 [here](/download/attachments/132286253/api-listener-example.zip?version=1&modificationDate=1663665058217&api=v2) to
-use as a template to make your own.
\ No newline at end of file
+use as a template to make your own.
diff --git a/docs/system-admin-documentation/advanced-features/index.rst b/docs/system-admin-documentation/advanced-features/index.rst
index b2fafeac1298fd3be8c2c653b2cf4b3b745825aa..b5a8cbd0dbeb2e5ae7bc0adedb4d26be0de25d35 100644
--- a/docs/system-admin-documentation/advanced-features/index.rst
+++ b/docs/system-admin-documentation/advanced-features/index.rst
@@ -8,4 +8,4 @@ Advanced Features
    authentication-system
    share-ids
    maintenance-tasks
-   synchronization-of-openbis-databases
\ No newline at end of file
+   openbis-sync
diff --git a/docs/system-admin-documentation/advanced-features/openbis-sync.md b/docs/system-admin-documentation/advanced-features/openbis-sync.md
new file mode 100644
index 0000000000000000000000000000000000000000..94690187b2dd31515bc21f43285e5c8eeb12b021
--- /dev/null
+++ b/docs/system-admin-documentation/advanced-features/openbis-sync.md
@@ -0,0 +1,264 @@
+# openBIS Sync
+
+## Introduction
+
+This allows to synchronize two openBIS instances. One instance (called
+Data Source) provides the data (meta-data and data sets). The other
+instance (called Harvester) grabs these data and makes them available.
+In regular time intervals the harvester instance will synchronize its
+data with the data on the data source instance. That is, synchronization
+will delete/add data from/to the harvester instance. The harvester
+instance can synchronize only partially. It is also possible to gather
+data from several data-source instances.
+
+## Data Source
+
+The Data Source instance provides a service based on the ResourceSync
+Framework Specification (see
+<http://www.openarchives.org/rs/1.1/resourcesync>). This service is
+provided as [core plugin](/pages/viewpage.action?pageId=80699503) module
+`openbis-sync` which has a DSS service based on [Service
+Plugins](/pages/viewpage.action?pageId=80699366).
+
+This DSS service access the main openBIS database directly. If the name
+of this database isn't {{openbis\_prod}} the property `database.kind` in
+DSS service.properties should be defined with the same value as the same
+property in AS service.properties. Example:
+
+**servers/openBIS-server/jetty/etc/plugin.properties**
+
+    ...
+    database.kind = production
+    ...
+
+**servers/datastore\_server/etc/plugin.properties**
+
+    ...
+    database.kind = production
+    ...
+
+  
+
+The URL of the service is `<DSS base URL>/datastore_server/re-sync`. The
+returned XML document looks like the following:
+
+    <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:rs="http://www.openarchives.org/rs/terms/">
+      <rs:ln href="https://localhost:8444/datastore_server/re-sync/?verb=about.xml" rel="describedby"/>
+      <rs:md capability="description"/>
+      <url>
+        <loc>https://localhost:8444/datastore_server/re-sync/?verb=capabilitylist.xml</loc>
+        <rs:md capability="capabilitylist"/>
+      </url>
+    </urlset>
+
+The loc element contains the URL which delivers a list of all
+capabilities:
+
+    <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:rs="http://www.openarchives.org/rs/terms/">
+      <rs:ln href="https://localhost:8444/datastore_server/re-sync/?verb=about.xml" rel="up"/>
+      <rs:md capability="capabilitylist" from="2013-02-07T22:39:00"/>
+      <url>
+        <loc>https://localhost:8444/datastore_server/re-sync/?verb=resourcelist.xml</loc>
+        <rs:md capability="resourcelist"/>
+      </url>
+    </urlset>
+
+From capabilities described in the ResourceSync Framework Specification
+only `resourcelist` is supported. The resourcelist returns an XML with
+all metadata of the data source openBIS instance. This includes master
+data, meta data including file meta data.
+
+Two optional URL parameters filter the data by spaces:
+
+-   `black_list`: comma-separated list of regular expressions. All
+    entities which belong to a space which matches one of the regular
+    expressions of this list will be suppressed.
+-   `white_list`: comma-separated list of regular expressions. If
+    defined only entities which belong to a space which matches one of
+    the regular expressions of this list will be delivered (if not
+    suppressed by the black list).
+
+Remarks:
+
+-   Basic HTTP authentication is used for authentication.
+-   The resourcelist capability returns only data visible for the user
+    which did the authentication.
+
+## Harvester
+
+In order to get the data and meta-data from a Data Source openBIS
+instance a DSS harvester [maintenance
+task](/pages/viewpage.action?pageId=80699482) has to be configured on
+the Harvester openBIS instance. This maintenance task reads another
+configuration file each time the task is executed.
+
+**plugin.properties**
+
+    class = ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.HarvesterMaintenanceTask
+    interval = 1 d
+    harvester-config-file = ../../data/harvester-config.txt
+
+The only specific property of `HarvesterMaintenanceTask` is
+`harvester-config-file` which is absolute or relative path to the actual
+configuration file. This separation in two configuration files has been
+done because `plugin.properties` is only read once (at start up of DSS).
+Thus changes in Harvester configuration would be possible without
+restarting DSS.
+
+This DSS service access the main openBIS database directly in order to
+synchronize timestamps and users. If the name of this database isn't
+{{openbis\_prod}} the property `database.kind` in DSS service.properties
+should be defined with the same value as the same property in AS
+service.properties. Example:
+
+**servers/openBIS-server/jetty/etc/plugin.properties**
+
+    ...
+    database.kind = production
+    ...
+
+**servers/datastore\_server/etc/plugin.properties**
+
+    ...
+    database.kind = production
+    ...
+
+### Harvester Config File
+
+Here is an example of a typical configuration:
+
+**harvester-config.txt**
+
+    [DS1]
+
+    resource-list-url = https://<data source host>:<DSS port>/datastore_server/re-sync
+
+    data-source-openbis-url = https://<data source host>:<AS port>/openbis/openbis
+    data-source-dss-url = https://<data source host>:<DSS port>/datastore_server
+    data-source-auth-realm = OAI-PMH
+    data-source-auth-user = <data source user id>
+    data-source-auth-pass = <data source password>
+    space-black-list = SYSTEM
+    space-white-list = ABC_.*
+
+    harvester-user = <harvester user id>
+    harvester-pass = <harvester user password>
+
+    keep-original-timestamps-and-users = false
+    harvester-tmp-dir = temp
+    last-sync-timestamp-file = ../../data/last-sync-timestamp-file_HRVSTR.txt
+    log-file = log/synchronization.log
+
+    email-addresses = <e-mail 1>, <e-mail 2>, ...
+
+    translate-using-data-source-alias = true
+    verbose = true
+    #dry-run = true
+
+-   The configuration file can have one or many section for each openBIS
+    instance. Each section start with an arbitrary name in square
+    brackets.
+-   `<data source host>`, `<DSS port>` and `<AS port>` have to be host
+    name and ports of the Data Source openBIS instance as seen by the
+    Harvester instance.
+-   `<data source user id>` and `<data source password>` are the
+    credential to access the Data Source openBIS instance. Only data
+    seen by this user is harvested.
+-   `space-black-list` and `space-white-list` have the same meaning
+    as `black_list` and `white_list` as specified above in the Data
+    Source section.
+-   `<harvester user id>` and `<harvester user password>` are the
+    credential to access the Harvester openBIS instance. It has to be a
+    user with instance admin rights.
+-   `Temporary `files created during harvesting are stored
+    in` harvester-tmp-dir` which is a path relative to the root of the
+    data store. The root store is specified by `storeroot-dir` in
+    DSS `service.properties`. The default value is `temp`.
+-   By default the original timestamps (registration timestamps and
+    modification timestamps) and users (registrator and modifier) are
+    synchronized. If necessary users will be created. With the
+    configuration property  `keep-original-timestamps-and-users = false`
+    no timestamps and users will be synchronized. 
+-   The `last-sync-timestamp-file` is a relative or absolute path to the
+    file which store the last timestamp of synchronization.
+-   The `log-file` is a relative or absolute path to the file where
+    synchronization information is logged. This information does not
+    appear in the standard DSS log file.
+-   In case of an error an e-mail is sent to the specified e-mail
+    addresses.
+-   `translate-using-data-source-alias` is a flag which controls whether
+    the code of spaces, types and materials should have a prefix or not.
+    If true the prefix will be the name in the square bracket followed
+    by an underscore. The default value of this flag is false.
+-   `verbose` flag adds to the synchronization log added, updated and
+    deleted items. Default: `false` or `true` if `dry-run` flag is set.
+-   `dry-run` flag allows to run without changing Harvester openBIS
+    instance. This allows to check config errors or errors with the Data
+    Source openBIS instance. A dry run will be performed even if this
+    flag is set. Default: `false`
+-   `master-data-update-allowed` flag allows to update master data as
+    plugins, property types, entity types and entity assignments. Note,
+    that master data can still be added if this flag is `false`.
+    Default: `false`
+-   `property-unassignment-allowed` flag allows to unassign property
+    assignments, that is, removing property types from entity types.
+    Default: `false`
+-   `deletion-allowed` flag allows deletion of entities on the Harvester
+    openBIS instance. Default: `false`
+-   `keep-original-timestamps-and-users` flag yields that time stamps
+    and users are copied from the Data Source to the Harvester.
+    Otherwise the entities will have harvester user and the actual
+    registration time stamp. Default: `true`
+-   `keep-original-frozen-flags` flag yields that the frozen flags are
+    copied from the Data Source to the Harvester. Otherwise entities
+    which are frozen on the Data Source are not frozen on the Harvester.
+    Default: `true`
+
+### What HarvesterMaintenanceTask does
+
+In the first step it reads the configuration file from the file path
+specified by `harvester-config-file` in `plugins.properties`. Next, the
+following steps will be performed in DRY RUN mode. That is, all data are
+read, parsed and checked but nothing is changed on the Harvester. If no
+error occured and the `dry-run` flag isn't set the same steps are
+performed but this time the data is changed (i.e. synced) on the
+Harvester.
+
+1.  Read meta data from the Data Source.
+2.  Delete entities from the Harvester which are no longer on the Data
+    Source (if `deletion-allowed` flag is set).
+3.  Register/update master data.
+4.  Register/update spaces, projects, experiments, samples and
+    materials.
+5.  Register/update attachments.
+6.  Synchronize files from the file service.
+7.  Register/update data sets.
+8.  Update timestamps and users (if `keep-original-timestamps-and-users`
+    flag is set).
+9.  Update frozen flags (if `keep-original-frozen-flags` flag is set).
+
+-   Data are registered if they do not exists on the Harvester.
+    Otherwise they are updated if the Data Source version has a
+    modification timestamp which is after the last time the
+    HarvesterMaintenanceTask has been performed
+-   If `translate-using-data-source-alias` flag is set a prefix is added
+    to spaces, types and materials when created. 
+-   To find out if an entity already exist on the Harvester the perm ID
+    is used.
+
+### Master Data Synchronization Rules
+
+Normally all master data are registered/updated if they do not exists or
+they are older. But for internal vocabularies and property types
+different rules apply. Internal means that the entity (i.e. a vocabulary
+or a property type) is managed internally (visible by the prefix '$' in
+its code) and has been registered by the system user.
+
+1.  Internal vocabularies and property types will not be created or
+    updated on the Harvester.
+2.  An internal vocabulary or property type of the Data Source which
+    doesn't exist on the Harvester leads to an error.
+3.  An internal property type which exists on the Data Source and the
+    Harvester but have different data type leads to an error.
+4.  Terms of an internal vocabulary are added if they do not exists on
+    the Harvester.
diff --git a/docs/user-documentation/general-admin-users/admins-documentation/multi-group-set-up.md b/docs/user-documentation/general-admin-users/admins-documentation/multi-group-set-up.md
index c0881133de98d6e3df4b71bd2852317ed7a6c4c4..7b6989f437d6b2e11029e963aea45893d56bb0f7 100644
--- a/docs/user-documentation/general-admin-users/admins-documentation/multi-group-set-up.md
+++ b/docs/user-documentation/general-admin-users/admins-documentation/multi-group-set-up.md
@@ -69,7 +69,7 @@ and **Horizon**, **Snf** do not belong to any group in the Lab notebook.
 
  
 
-## Instance Settings
+### Instance Settings
 
  
 
@@ -89,7 +89,7 @@ and **Horizon**, **Snf** do not belong to any group in the Lab notebook.
 
  
 
-## Group Settings
+### Group Settings
 
  
 
diff --git a/docs/user-documentation/general-admin-users/admins-documentation/space-management.md b/docs/user-documentation/general-admin-users/admins-documentation/space-management.md
index da05ff557d6f278b6795f9a4fdc7456cea24a716..f4b3ec2130dcea7086303bd38869aab8b2f2cec7 100644
--- a/docs/user-documentation/general-admin-users/admins-documentation/space-management.md
+++ b/docs/user-documentation/general-admin-users/admins-documentation/space-management.md
@@ -13,7 +13,7 @@ Additional *Spaces* can be created by an *Instance admin*.
 
  
 
-## Create a new Inventory Space from the ELN UI
+### Create a new Inventory Space from the ELN UI
 
  
 
@@ -44,7 +44,7 @@ codes only accept alphanumeric characters, –, . , \_.
 
  
 
-### Multi-group instances
+#### Multi-group instances
 
  
 
@@ -71,7 +71,7 @@ create a new *Space*:
 
  
 
-## Create a new Inventory Space from the core UI
+### Create a new Inventory Space from the core UI
 
  
 
@@ -91,7 +91,7 @@ In the core UI:
 ![image info](img/spaces-admin-UI-300x158.png)
 
 
-### Set Inventory Spaces
+#### Set Inventory Spaces
 
   
 When new *Spaces* are created in the core UI, they are automatically
@@ -123,7 +123,7 @@ Updated on April 26, 2023
 [](# "Print this article")
 
 
-## Create a new Lab Notebook Space from the ELN UI
+### Create a new Lab Notebook Space from the ELN UI
 
  
 
@@ -151,7 +151,7 @@ accept alphanumeric characters, –, . , \_.
 
 ![image info](img/create-space.png)
 
-### Multi-group instances
+#### Multi-group instances
 
  
 
@@ -196,7 +196,7 @@ assigned by an *Instance admin*.
 
  
 
-## Create a new Lab Notebook Space from the core UI
+### Create a new Lab Notebook Space from the core UI
 
  
 
diff --git a/docs/user-documentation/general-users/additional-functionalities.md b/docs/user-documentation/general-users/additional-functionalities.md
index f505209aecd7beba3fc7601918e03062d558319a..688314de3d2eff64674c202794a1330f116c4c18 100644
--- a/docs/user-documentation/general-users/additional-functionalities.md
+++ b/docs/user-documentation/general-users/additional-functionalities.md
@@ -713,3 +713,18 @@ from one *Object*/*Collection* to another, the PermID of the old and new
 *Objects*/*Collections* are shown in the history table.
 
 Updated on November 10, 2022
+
+## Spreadsheet
+
+The spreadsheet component needs to be enabled by a group admin or lab manager who can edit the ELN Settings, as described here: [Enable Rich Text Editor or Spreadsheet Widgets](../general-admin-users/admins-documentation/new-entity-type-registration.md)
+
+![image info](img/Screenshot-2020-03-09-at-17.13.07.png)
+
+The spreadsheet supports some basic Excel functionalities, such as mathematical formulas (e.g. =SUM(A1+A2)).
+It is possible to import an openBIS Object into the spreadsheet, with the **import** button, on the spreadsheet itself:
+
+![image info](img/Screen-Shot-2019-09-17-at-13.18.40.png)
+
+Please note that if the Object is updated in openBIS, it will NOT be automatically updated in the spreadsheet.
+
+Updated on March 4, 2022
diff --git a/docs/user-documentation/general-users/img/Screen-Shot-2019-09-17-at-13.18.40.png b/docs/user-documentation/general-users/img/Screen-Shot-2019-09-17-at-13.18.40.png
new file mode 100644
index 0000000000000000000000000000000000000000..dbcdad2e05324bee9cee0d624ef3ac08ede460f1
Binary files /dev/null and b/docs/user-documentation/general-users/img/Screen-Shot-2019-09-17-at-13.18.40.png differ
diff --git a/docs/user-documentation/general-users/img/Screen-Shot-2019-09-17-at-13.18.40.png:Zone.Identifier b/docs/user-documentation/general-users/img/Screen-Shot-2019-09-17-at-13.18.40.png:Zone.Identifier
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/docs/user-documentation/general-users/img/Screenshot-2020-03-09-at-17.13.07.png b/docs/user-documentation/general-users/img/Screenshot-2020-03-09-at-17.13.07.png
new file mode 100644
index 0000000000000000000000000000000000000000..c9fd1e06462b7f30a91aa4f7cff894271838e5c0
Binary files /dev/null and b/docs/user-documentation/general-users/img/Screenshot-2020-03-09-at-17.13.07.png differ
diff --git a/docs/user-documentation/general-users/img/Screenshot-2020-03-09-at-17.13.07.png:Zone.Identifier b/docs/user-documentation/general-users/img/Screenshot-2020-03-09-at-17.13.07.png:Zone.Identifier
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/docs/user-documentation/general-users/lab-notebook.md b/docs/user-documentation/general-users/lab-notebook.md
index 4bfcb913d9ffbf38ab5804ec4c4bbe61acf88e16..2adfe778e0cc8ab9aab44f9040bf0a6a221cfe58 100644
--- a/docs/user-documentation/general-users/lab-notebook.md
+++ b/docs/user-documentation/general-users/lab-notebook.md
@@ -719,7 +719,7 @@ The roles can be granted to:
 
 Updated on April 25, 2023
 
-## Rich Test Editor
+## Rich Text Editor
 
 ### EMBED IMAGES IN TEXT FIELDS
 
diff --git a/docs/user-documentation/general-users/tools-for-analysis-of-data-stored-in-openbis.md b/docs/user-documentation/general-users/tools-for-analysis-of-data-stored-in-openbis.md
index 6882772e628bdaa7a21ba653431f98eddf863f8f..7d112e93906671791c9777e0dd0d9ddd4cb5234a 100644
--- a/docs/user-documentation/general-users/tools-for-analysis-of-data-stored-in-openbis.md
+++ b/docs/user-documentation/general-users/tools-for-analysis-of-data-stored-in-openbis.md
@@ -190,7 +190,7 @@ the pre-filled cell shown above.
 
  
 
-## Using a local Jupyter installation with openBIS
+### Using a local Jupyter installation with openBIS
 
 It is also possible to use a local Jupyter installation with openBIS. In
 this case, it is possible to download an extension for JupyterLab that
diff --git a/server-data-store/src/test/java/ch/ethz/sis/afsserver/core/AbstractPublicAPIWrapper.java b/server-data-store/src/test/java/ch/ethz/sis/afsserver/core/AbstractPublicAPIWrapper.java
index 6695bc3aeeddca9b0a40e7bdf06f0eb11a4babda..5051ebcf1564a4a98b3cefaf0f395b3e30de11ad 100644
--- a/server-data-store/src/test/java/ch/ethz/sis/afsserver/core/AbstractPublicAPIWrapper.java
+++ b/server-data-store/src/test/java/ch/ethz/sis/afsserver/core/AbstractPublicAPIWrapper.java
@@ -100,53 +100,59 @@ public abstract class AbstractPublicAPIWrapper implements PublicAPI
     @Override
     public void begin(UUID transactionId) throws Exception
     {
-        //TODO: Unused
+        Map<String, Object> args = Map.of(
+                "transactionId", transactionId);
+        process(null, "begin", args);
     }
 
     @Override
     public Boolean prepare() throws Exception
     {
-        //TODO: Unused
-        return true;
+        Map<String, Object> args = Map.of();
+        return process(Boolean.class, "prepare", args);
     }
 
     @Override
     public void commit() throws Exception
     {
-        //TODO: Unused
+        Map<String, Object> args = Map.of();
+        process(null, "commit", args);
     }
 
     @Override
     public void rollback() throws Exception
     {
-        //TODO: Unused
+        Map<String, Object> args = Map.of();
+        process(null, "rollback", args);
     }
 
     @Override
     public List<UUID> recover() throws Exception
     {
-        //TODO: Unused
-        return null;
+        Map<String, Object> args = Map.of();
+        return process(List.class, "recover", args);
     }
 
     @Override
     public String login(String userId, String password) throws Exception
     {
-        //TODO: Unused
-        return null;
+        Map<String, Object> args = Map.of(
+                "userId", userId,
+                "password", password);
+        return process(String.class, "login", args);
     }
 
     @Override
     public Boolean isSessionValid() throws Exception
     {
-        //TODO: Unused
-        return null;
+        Map<String, Object> args = Map.of();
+        return process(Boolean.class, "isSessionValid", args);
     }
 
     @Override
     public Boolean logout() throws Exception
     {
-        //TODO: Unused
-        return null;
+        Map<String, Object> args = Map.of();
+        return process(Boolean.class, "logout", args);
     }
 }
diff --git a/server-data-store/src/test/java/ch/ethz/sis/afsserver/core/PublicApiTest.java b/server-data-store/src/test/java/ch/ethz/sis/afsserver/core/PublicApiTest.java
index 186b8a1e28989ad61bf8e664edd3126f5acc1884..7381c70a02938b3d305c6977427779119b03cbde 100644
--- a/server-data-store/src/test/java/ch/ethz/sis/afsserver/core/PublicApiTest.java
+++ b/server-data-store/src/test/java/ch/ethz/sis/afsserver/core/PublicApiTest.java
@@ -38,6 +38,8 @@ public abstract class PublicApiTest extends AbstractTest
 
     public abstract PublicAPI getPublicAPI() throws Exception;
 
+    public abstract PublicAPI getPublicAPI(String interactiveSessionKey, String transactionManagerKey) throws Exception;
+
     public static final String ROOT = IOUtils.PATH_SEPARATOR_AS_STRING;
 
     public static final String FILE_A = "A.txt";
@@ -129,4 +131,83 @@ public abstract class PublicApiTest extends AbstractTest
         assertEquals(1, list.size());
         assertEquals(FILE_B, list.get(0).getName());
     }
+
+
+    @Test
+    public void operation_state_begin_succeed() throws Exception {
+        UUID sessionToken = UUID.randomUUID();
+        PublicAPI publicAPI = getPublicAPI("1234", "5678");
+        publicAPI.begin(sessionToken);
+    }
+
+    @Test
+    public void operation_state_prepare_succeed() throws Exception {
+        UUID sessionToken = UUID.randomUUID();
+        PublicAPI publicAPI = getPublicAPI("1234", "5678");
+        publicAPI.begin(sessionToken);
+        publicAPI.prepare();
+    }
+
+    @Test
+    public void operation_state_rollback_succeed() throws Exception {
+        UUID sessionToken = UUID.randomUUID();
+        PublicAPI publicAPI = getPublicAPI("1234", "5678");
+        publicAPI.begin(sessionToken);
+        publicAPI.prepare();
+        publicAPI.rollback();
+    }
+
+    @Test
+    public void operation_state_commit_succeed() throws Exception {
+        UUID sessionToken = UUID.randomUUID();
+        PublicAPI publicAPI = getPublicAPI("1234", "5678");
+        publicAPI.begin(sessionToken);
+        publicAPI.commit();
+    }
+
+    @Test
+    public void operation_state_commitPrepared_succeed() throws Exception {
+        UUID sessionToken = UUID.randomUUID();
+        PublicAPI publicAPI = getPublicAPI("1234", "5678");
+        publicAPI.begin(sessionToken);
+        publicAPI.prepare();
+        publicAPI.commit();
+    }
+
+    @Test
+    public void operation_state_commit_reuse_succeed() throws Exception {
+        UUID sessionToken = UUID.randomUUID();
+        PublicAPI publicAPI = getPublicAPI("1234", "5678");
+        publicAPI.begin(sessionToken);
+        publicAPI.prepare();
+        publicAPI.commit();
+        publicAPI.begin(sessionToken);
+    }
+
+    @Test
+    public void operation_state_rollback_reuse_succeed() throws Exception {
+        UUID sessionToken = UUID.randomUUID();
+        PublicAPI publicAPI = getPublicAPI("1234", "5678");
+        publicAPI.begin(sessionToken);
+        publicAPI.prepare();
+        publicAPI.rollback();
+        publicAPI.begin(sessionToken);
+    }
+
+    @Test(expected = RuntimeException.class)
+    public void operation_state_begin_reuse_fails() throws Exception {
+        UUID sessionToken = UUID.randomUUID();
+        PublicAPI publicAPI = getPublicAPI("1234", "5678");
+        publicAPI.begin(sessionToken);
+        publicAPI.begin(sessionToken);
+    }
+
+    @Test
+    public void operation_state_prepare_reuse_succeed() throws Exception {
+        UUID sessionToken = UUID.randomUUID();
+        PublicAPI publicAPI = getPublicAPI("1234", "5678");
+        publicAPI.begin(sessionToken);
+        publicAPI.prepare();
+        publicAPI.begin(sessionToken);
+    }
 }
\ No newline at end of file
diff --git a/server-data-store/src/test/java/ch/ethz/sis/afsserver/impl/APIServerAdapterWrapper.java b/server-data-store/src/test/java/ch/ethz/sis/afsserver/impl/APIServerAdapterWrapper.java
index 91972ae76949d1abb0a8dfcb27016aa78831e5eb..9555103102cd69ac86edcefb9f7e0b5421ae11d6 100644
--- a/server-data-store/src/test/java/ch/ethz/sis/afsserver/impl/APIServerAdapterWrapper.java
+++ b/server-data-store/src/test/java/ch/ethz/sis/afsserver/impl/APIServerAdapterWrapper.java
@@ -36,12 +36,26 @@ public class APIServerAdapterWrapper extends AbstractPublicAPIWrapper
 
     private ApiServerAdapter apiServerAdapter;
 
+    private String interactiveSessionKey;
+
+    private String transactionManagerKey;
+
+    private String sessionToken;
 
     public APIServerAdapterWrapper(ApiServerAdapter apiServerAdapter)
     {
         this.apiServerAdapter = apiServerAdapter;
     }
 
+    public APIServerAdapterWrapper(ApiServerAdapter apiServerAdapter, String interactiveSessionKey,
+            String transactionManagerKey, String sessionToken)
+    {
+        this.apiServerAdapter = apiServerAdapter;
+        this.interactiveSessionKey = interactiveSessionKey;
+        this.transactionManagerKey = transactionManagerKey;
+        this.sessionToken = sessionToken;
+    }
+
     public Map<String, List<String>> getURIParameters(Map<String, Object> args)
     {
         Map<String, List<String>> result = new HashMap<>(args.size());
@@ -64,7 +78,18 @@ public class APIServerAdapterWrapper extends AbstractPublicAPIWrapper
         {
             HttpMethod httpMethod = ApiServerAdapter.getHttpMethod(apiMethod);
             Map<String, List<String>> requestParameters = getURIParameters(params);
-            requestParameters.put("sessionToken", List.of(UUID.randomUUID().toString()));
+            if (interactiveSessionKey != null)
+            {
+                requestParameters.put("interactiveSessionKey", List.of(interactiveSessionKey));
+            }
+            if (transactionManagerKey != null)
+            {
+                requestParameters.put("transactionManagerKey", List.of(transactionManagerKey));
+            }
+            if (sessionToken != null)
+            {
+                requestParameters.put("sessionToken", List.of(sessionToken));
+            }
             requestParameters.put("method", List.of(apiMethod));
 
             byte[] requestBody = null;
@@ -82,7 +107,7 @@ public class APIServerAdapterWrapper extends AbstractPublicAPIWrapper
 
 
 
-            HttpResponse response = apiServerAdapter.process(httpMethod, requestParameters, null);
+            HttpResponse response = apiServerAdapter.process(httpMethod, requestParameters, requestBody);
             String contentType = response.getContentType();
             byte[] body = response.getBody();
 
diff --git a/server-data-store/src/test/java/ch/ethz/sis/afsserver/impl/APIServerWrapper.java b/server-data-store/src/test/java/ch/ethz/sis/afsserver/impl/APIServerWrapper.java
index 4746825aef72aa1c7de900bacc3b24f90eb7cf5a..901de2ebf848f40d971e3e1d982488a7c9337e16 100644
--- a/server-data-store/src/test/java/ch/ethz/sis/afsserver/impl/APIServerWrapper.java
+++ b/server-data-store/src/test/java/ch/ethz/sis/afsserver/impl/APIServerWrapper.java
@@ -34,17 +34,24 @@ public class APIServerWrapper extends AbstractPublicAPIWrapper
     private static final Logger logger = LogManager.getLogger(APIServerWrapper.class);
 
     private APIServer apiServer;
+    private String interactiveSessionKey;
+    private String transactionManagerKey;
+    private String sessionToken;
+
     private final ApiResponseBuilder apiResponseBuilder;
 
-    public APIServerWrapper(APIServer apiServer) {
+    public APIServerWrapper(APIServer apiServer, String interactiveSessionKey, String transactionManagerKey, String sessionToken) {
         this.apiServer = apiServer;
+        this.interactiveSessionKey = interactiveSessionKey;
+        this.transactionManagerKey = transactionManagerKey;
+        this.sessionToken = sessionToken;
         this.apiResponseBuilder = new ApiResponseBuilder();
     }
 
     public <E> E process(Class<E> responseType, String method, Map<String, Object> params) {
         PerformanceAuditor performanceAuditor = new PerformanceAuditor();
         // Random Session token just works for tests with dummy authentication
-        ApiRequest request = new ApiRequest("test", method, params, UUID.randomUUID().toString(), null, null);
+        ApiRequest request = new ApiRequest("test", method, params, sessionToken, interactiveSessionKey, transactionManagerKey);
 
         try {
             Response response = apiServer.processOperation(request, apiResponseBuilder, performanceAuditor);
diff --git a/server-data-store/src/test/java/ch/ethz/sis/afsserver/impl/ApiServerAdapterTest.java b/server-data-store/src/test/java/ch/ethz/sis/afsserver/impl/ApiServerAdapterTest.java
index 711a8bda4451a14f6b9b01081b829c5d352fb06a..e643c8dd248c8e35eba5dc06530783b800ab8824 100644
--- a/server-data-store/src/test/java/ch/ethz/sis/afsserver/impl/ApiServerAdapterTest.java
+++ b/server-data-store/src/test/java/ch/ethz/sis/afsserver/impl/ApiServerAdapterTest.java
@@ -22,15 +22,30 @@ import ch.ethz.sis.afsserver.server.impl.ApiServerAdapter;
 import ch.ethz.sis.afsserver.startup.AtomicFileSystemServerParameter;
 import ch.ethz.sis.afsjson.JsonObjectMapper;
 import ch.ethz.sis.shared.startup.Configuration;
+import org.junit.Test;
+
+import java.util.UUID;
 
 public class ApiServerAdapterTest extends ApiServerTest {
 
+    @Override
+    public PublicAPI getPublicAPI(String interactiveSessionKey, String transactionManagerKey) throws Exception {
+        UUID sessionToken = UUID.randomUUID();
+        APIServer apiServer = getAPIServer();
+        Configuration configuration = ServerClientEnvironmentFS.getInstance().getDefaultServerConfiguration();
+        JsonObjectMapper jsonObjectMapper = configuration.getSharableInstance(AtomicFileSystemServerParameter.jsonObjectMapperClass);
+        ApiServerAdapter apiServerAdapter = new ApiServerAdapter(apiServer, jsonObjectMapper);
+        return new APIServerAdapterWrapper(apiServerAdapter, interactiveSessionKey, transactionManagerKey, sessionToken.toString());
+    }
+
     @Override
     public PublicAPI getPublicAPI() throws Exception {
+        UUID sessionToken = UUID.randomUUID();
         APIServer apiServer = getAPIServer();
         Configuration configuration = ServerClientEnvironmentFS.getInstance().getDefaultServerConfiguration();
         JsonObjectMapper jsonObjectMapper = configuration.getSharableInstance(AtomicFileSystemServerParameter.jsonObjectMapperClass);
         ApiServerAdapter apiServerAdapter = new ApiServerAdapter(apiServer, jsonObjectMapper);
-        return new APIServerAdapterWrapper(apiServerAdapter);
+        return new APIServerAdapterWrapper(apiServerAdapter, null, null, sessionToken.toString());
     }
+
 }
diff --git a/server-data-store/src/test/java/ch/ethz/sis/afsserver/impl/ApiServerTest.java b/server-data-store/src/test/java/ch/ethz/sis/afsserver/impl/ApiServerTest.java
index cfe121fae004fa5af61d8db73a47933422c25b64..1a6494a62e0db69289999dc0e8d9a93312ccf6d9 100644
--- a/server-data-store/src/test/java/ch/ethz/sis/afsserver/impl/ApiServerTest.java
+++ b/server-data-store/src/test/java/ch/ethz/sis/afsserver/impl/ApiServerTest.java
@@ -28,6 +28,8 @@ import ch.ethz.sis.afsserver.worker.WorkerFactory;
 import ch.ethz.sis.shared.pool.Pool;
 import ch.ethz.sis.shared.startup.Configuration;
 
+import java.util.UUID;
+
 public class ApiServerTest extends PublicApiTest {
 
     protected APIServer getAPIServer() throws Exception {
@@ -54,6 +56,15 @@ public class ApiServerTest extends PublicApiTest {
 
     @Override
     public PublicAPI getPublicAPI() throws Exception {
-        return new APIServerWrapper(getAPIServer());
+        String sessionToken = UUID.randomUUID().toString();
+        return new APIServerWrapper(getAPIServer(), null, null, sessionToken);
+    }
+
+    @Override
+    public PublicAPI getPublicAPI(String interactiveSessionKey, String transactionManagerKey)
+            throws Exception
+    {
+        String sessionToken = UUID.randomUUID().toString();
+        return new APIServerWrapper(getAPIServer(), interactiveSessionKey, transactionManagerKey, sessionToken);
     }
 }