diff --git a/openbis/source/java/ch/systemsx/cisd/openbis/generic/shared/dto/SamplePE.java b/openbis/source/java/ch/systemsx/cisd/openbis/generic/shared/dto/SamplePE.java
index 38c2d1a2822d39c34d9dca5810cf7ddc2dfc3109..5e8a2400139581dec0ae19820dbd0545888f5ad5 100644
--- a/openbis/source/java/ch/systemsx/cisd/openbis/generic/shared/dto/SamplePE.java
+++ b/openbis/source/java/ch/systemsx/cisd/openbis/generic/shared/dto/SamplePE.java
@@ -22,7 +22,6 @@ import java.util.Collections;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -44,6 +43,10 @@ import javax.persistence.Version;
 import javax.validation.constraints.NotNull;
 import javax.validation.constraints.Pattern;
 
+import ch.systemsx.cisd.openbis.generic.server.CommonServiceProvider;
+import ch.systemsx.cisd.openbis.generic.shared.basic.BasicConstant;
+import ch.systemsx.cisd.openbis.generic.shared.basic.IIdHolder;
+import ch.systemsx.cisd.openbis.generic.shared.basic.IIdentityHolder;
 import org.apache.commons.lang3.builder.EqualsBuilder;
 import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.commons.lang3.builder.ToStringBuilder;
@@ -62,9 +65,6 @@ import ch.rinn.restrictions.Private;
 import ch.systemsx.cisd.common.collection.UnmodifiableSetDecorator;
 import ch.systemsx.cisd.common.reflection.ModifiedShortPrefixToStringStyle;
 import ch.systemsx.cisd.openbis.generic.shared.IServer;
-import ch.systemsx.cisd.openbis.generic.shared.basic.BasicConstant;
-import ch.systemsx.cisd.openbis.generic.shared.basic.IIdHolder;
-import ch.systemsx.cisd.openbis.generic.shared.basic.IIdentityHolder;
 import ch.systemsx.cisd.openbis.generic.shared.basic.dto.AttachmentHolderKind;
 import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.IdentifierHelper;
 import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifier;
@@ -129,32 +129,29 @@ public class SamplePE extends AttachmentHolderPE implements IIdAndCodeHolder, Co
 
     private String permId;
 
-    private Set<SampleRelationshipPE> parentRelationships = new LinkedHashSet<SampleRelationshipPE>();
+    private Set<SampleRelationshipPE> parentRelationships;
 
-    private Set<SampleRelationshipPE> childRelationships = new LinkedHashSet<SampleRelationshipPE>();
+    private Set<SampleRelationshipPE> childRelationships;
 
     private Set<MetaprojectAssignmentPE> metaprojectAssignments =
             new HashSet<MetaprojectAssignmentPE>();
 
-    @OptimisticLock(excluded = true)
-    @OneToMany(fetch = FetchType.LAZY, mappedBy = "parentSample")
-    @Fetch(FetchMode.SUBSELECT)
+    @Transient
     private Set<SampleRelationshipPE> getSampleChildRelationships()
     {
+        if (childRelationships == null) {
+            if (id == null) {
+                childRelationships = new HashSet<>();
+            } else {
+                childRelationships = new HashSet<>(CommonServiceProvider.getDAOFactory().getSampleRelationshipDAO().listSampleChildren(List.of(id)));
+            }
+        }
         return childRelationships;
     }
 
-    // Required by Hibernate.
-    @SuppressWarnings("unused")
-    private void setSampleChildRelationships(final Set<SampleRelationshipPE> childRelationships)
-    {
-        this.childRelationships = childRelationships;
-    }
-
     @Transient
     public Set<SampleRelationshipPE> getChildRelationships()
     {
-
         return new UnmodifiableSetDecorator<SampleRelationshipPE>(getSampleChildRelationships());
     }
 
@@ -171,23 +168,22 @@ public class SamplePE extends AttachmentHolderPE implements IIdAndCodeHolder, Co
     {
         relationship.setParentSample(this);
         getSampleChildRelationships().add(relationship);
+        CommonServiceProvider.getDAOFactory().getSampleRelationshipDAO().persist(List.of(relationship));
     }
 
-    @OptimisticLock(excluded = true)
-    @OneToMany(fetch = FetchType.LAZY, cascade = CascadeType.ALL, mappedBy = "childSample", orphanRemoval = true)
-    @Fetch(FetchMode.SUBSELECT)
+    @Transient
     private Set<SampleRelationshipPE> getSampleParentRelationships()
     {
+        if (parentRelationships == null) {
+            if (id == null) {
+                parentRelationships = new HashSet<>();
+            } else {
+                parentRelationships = new HashSet<>(CommonServiceProvider.getDAOFactory().getSampleRelationshipDAO().listSampleParents(List.of(id)));
+            }
+        }
         return parentRelationships;
     }
 
-    // Required by Hibernate.
-    @SuppressWarnings("unused")
-    private void setSampleParentRelationships(final Set<SampleRelationshipPE> parentRelationships)
-    {
-        this.parentRelationships = parentRelationships;
-    }
-
     @Transient
     public Set<SampleRelationshipPE> getParentRelationships()
     {
@@ -205,6 +201,7 @@ public class SamplePE extends AttachmentHolderPE implements IIdAndCodeHolder, Co
 
     public void setParentRelationships(final Set<SampleRelationshipPE> parentRelationships)
     {
+        CommonServiceProvider.getDAOFactory().getSampleRelationshipDAO().delete(getSampleParentRelationships());
         getSampleParentRelationships().clear();
         for (final SampleRelationshipPE sampleRelationship : parentRelationships)
         {
@@ -221,6 +218,7 @@ public class SamplePE extends AttachmentHolderPE implements IIdAndCodeHolder, Co
     {
         relationship.setChildSample(this);
         getSampleParentRelationships().add(relationship);
+        CommonServiceProvider.getDAOFactory().getSampleRelationshipDAO().persist(List.of(relationship));
     }
 
     public void removeParentRelationship(final SampleRelationshipPE relationship)
@@ -229,6 +227,7 @@ public class SamplePE extends AttachmentHolderPE implements IIdAndCodeHolder, Co
         relationship.getParentSample().getSampleChildRelationships().remove(relationship);
         relationship.setChildSample(null);
         relationship.setParentSample(null);
+        CommonServiceProvider.getDAOFactory().getSampleRelationshipDAO().delete(List.of(relationship));
     }
 
     /**
diff --git a/openbis/source/java/ch/systemsx/cisd/openbis/public/resources/api/v3/openbis.js b/openbis/source/java/ch/systemsx/cisd/openbis/public/resources/api/v3/openbis.js
index 01082f5e085e9e0df9f6634d8fe5ce990b7c3ca4..77b1bddfd9924e819cd3bd6852f15a4dad9bd492 100644
--- a/openbis/source/java/ch/systemsx/cisd/openbis/public/resources/api/v3/openbis.js
+++ b/openbis/source/java/ch/systemsx/cisd/openbis/public/resources/api/v3/openbis.js
@@ -1,4 +1,5 @@
-define([ 'jquery', 'util/Json', 'as/dto/datastore/search/DataStoreSearchCriteria', 'as/dto/datastore/fetchoptions/DataStoreFetchOptions', 'as/dto/common/search/SearchResult' ], function(jquery,
+define([ 'jquery', 'util/Json', 'as/dto/datastore/search/DataStoreSearchCriteria', 'as/dto/datastore/fetchoptions/DataStoreFetchOptions',
+	'as/dto/common/search/SearchResult'], function(jquery,
 		stjsUtil, DataStoreSearchCriteria, DataStoreFetchOptions, SearchResult) {
 	jquery.noConflict();
 
@@ -128,11 +129,11 @@ define([ 'jquery', 'util/Json', 'as/dto/datastore/search/DataStoreSearchCriteria
 			}
 		}
 
-		this._createUrlWithParameters = function(dataStore, servlet, parameters) {
+		function createUrlWithParameters(dataStore, servlet, parameters) {
 			return dataStore.downloadUrl + "/datastore_server/" + servlet + parameters;
 		}
 
-		this._createUrl = function(dataStore) {
+		function createUrl(dataStore) {
 			return dataStore.downloadUrl + "/datastore_server/rmi-data-store-server-v3.json";
 		}
 
@@ -141,7 +142,7 @@ define([ 'jquery', 'util/Json', 'as/dto/datastore/search/DataStoreSearchCriteria
 			return this._getDataStores().then(function(dataStores) {
 				var promises = dataStores.map(function(dataStore) {
 					return facade._private.ajaxRequest({
-						url : thisFacade._createUrl(dataStore),
+						url : createUrl(dataStore),
 						data : {
 							"method" : "searchFiles",
 							"params" : [ facade._private.sessionToken, criteria, fetchOptions ]
@@ -192,7 +193,7 @@ define([ 'jquery', 'util/Json', 'as/dto/datastore/search/DataStoreSearchCriteria
 					var dsCode = dataStore.getCode();
 					if (dsCode in creationsByStore) {
 						promises.push(facade._private.ajaxRequest({
-							url : thisFacade._createUrl(dataStore),
+							url : createUrl(dataStore),
 							data : {
 								"method" : "createDataSets",
 								"params" : [ facade._private.sessionToken, creationsByStore[dsCode] ]
@@ -269,60 +270,185 @@ define([ 'jquery', 'util/Json', 'as/dto/datastore/search/DataStoreSearchCriteria
 		}
 
 		this.createUploadedDataSet = function(creation) {
+			var dfd = jquery.Deferred();
+			this._getDataStores().done(function(dataStores) {
+				if (dataStores.length === 1) {
+					facade._private.ajaxRequest({
+						url: createUrl(dataStores[0]),
+						data: {
+							"method": "createUploadedDataSet",
+							"params": [facade._private.sessionToken, creation]
+						},
+						returnType: {
+							name: "DataSetPermId"
+						}
+					}).done(function (response) {
+						dfd.resolve(response);
+					}).fail(function (error) {
+						dfd.reject(error);
+					});
+				} else {
+					dfd.reject("Please specify exactly one data store");
+				}
+			});
+			return dfd.promise();
+		}
+
+		function getUUID() {
+			return ([1e7]+-1e3+-4e3+-8e3+-1e11).replace(/[018]/g, c =>
+				(c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)
+			);
+		}
+
+	    this.uploadFileWorkspaceDSS = function(files) {
 			var thisFacade = this;
-			return this._getDataStores().then(function(dataStores) {
-				if (dataStores.length > 1) {
-					var dfd = jquery.Deferred();
+			var uploadId = getUUID();
+			var dfd = jquery.Deferred();
+			this._uploadFileWorkspaceDSSEmptyDir(uploadId).done(function() {
+				thisFacade._uploadFileWorkspaceDSS(files, 0, uploadId, new Set()).done(function() {
+					dfd.resolve(uploadId);
+				}).fail(function(error) {
+					dfd.reject(error);
+				});
+			}).fail(function(error) {
+				dfd.reject(error);
+			});
+			return dfd.promise();
+    	}
+
+		this._uploadFileWorkspaceDSS = function(files, index, parentId, createdDirectories) {
+			var thisFacade = this;
+			var dfd = jquery.Deferred();
+			if (index < files.length) {
+				var relativePath = files[index].webkitRelativePath;
+				var directoryRelativePath = relativePath.substring(0, relativePath.lastIndexOf("/") + 1);
+				if (directoryRelativePath && !createdDirectories.has(directoryRelativePath)) {
+					this._uploadFileWorkspaceDSSEmptyDir(parentId + "/" + directoryRelativePath)
+						.done(function() {
+							createdDirectories.add(directoryRelativePath);
+							thisFacade._uploadFileWorkspaceDSSFile(files[index], parentId)
+								.done(function() {
+									thisFacade._uploadFileWorkspaceDSS(files, index + 1, parentId, createdDirectories)
+										.done(function() {
+											dfd.resolve();
+										})
+										.fail(function(error) {
+											dfd.reject(error);
+										});
+								})
+								.fail(function(error) {
+									dfd.reject(error);
+								});
+						})
+						.fail(function(error) {
+							dfd.reject(error);
+						});
+				} else {
+					this._uploadFileWorkspaceDSSFile(files[index], parentId)
+						.done(function() {
+							thisFacade._uploadFileWorkspaceDSS(files, index + 1, parentId, createdDirectories)
+								.done(function() {
+									dfd.resolve();
+								})
+								.fail(function(error) {
+									dfd.reject(error);
+								});
+						})
+						.fail(function(error) {
+							dfd.reject(error);
+						});
+				}
+			} else {
+				dfd.resolve();
+			}
+			return dfd.promise();
+		}
+
+		this._uploadFileWorkspaceDSSEmptyDir = function(pathToDir) {
+			var sessionID = facade._private.sessionToken;
+			var filename = encodeURIComponent(pathToDir);
+			var dfd = jquery.Deferred();
+			this._getDataStores().done(function (dataStores) {
+				if (dataStores.length === 1) {
+					fetch(createUrlWithParameters(dataStores[0], "session_workspace_file_upload",
+						"?sessionID=" + sessionID +
+						"&filename=" + filename +
+						"&id=1&startByte=0&endByte=0&size=0&emptyFolder=true"), {
+						method: "POST",
+						headers: {
+							"Content-Type": "multipart/form-data"
+						}
+					}).then(function (response) {
+						dfd.resolve(response);
+					}).catch(function (error) {
+						dfd.reject(error);
+					});
+				} else {
 					dfd.reject("Please specify exactly one data store");
-					return dfd.promise();
 				}
+			}).fail(function(error) {
+				dfd.reject(error);
+			});
+			return dfd.promise();
+		}
 
-				return facade._private.ajaxRequest({
-					url : thisFacade._createUrl(dataStores[0]),
-					data : {
-						"method" : "createUploadedDataSet",
-						"params" : [ facade._private.sessionToken, creation ]
+		this._uploadFileWorkspaceDSSFile = function(file, parentId) {
+			const dfd = jquery.Deferred();
+			this._getDataStores().done(function(dataStores) {
+				uploadBlob(dataStores[0], parentId, facade._private.sessionToken, file, 0, 1048576)
+					.done(function() {
+						dfd.resolve();
+					}).fail(function(error) {
+						dfd.reject(error);
+					});
+			}).fail(function(error) {
+				dfd.reject(error);
+			});
+			return dfd.promise();
+		}
+
+		function uploadBlob(dataStore, parentId, sessionID, file, startByte, chunkSize) {
+			var fileSize = file.size;
+			var promises = [];
+			for (var byte = startByte; byte < fileSize; byte += chunkSize) {
+				const dfd = jquery.Deferred();
+				fetch(createUrlWithParameters(dataStore, "session_workspace_file_upload",
+						"?sessionID=" + sessionID +
+						"&filename=" + encodeURIComponent(parentId + "/" +
+								(file.webkitRelativePath ? file.webkitRelativePath : file.name)) +
+						"&id=1&startByte=" + byte +
+						"&endByte=" + (byte + chunkSize) +
+						"&size=" + fileSize +
+						"&emptyFolder=false"), {
+					method: "POST",
+					headers: {
+						"Content-Type": "multipart/form-data"
 					},
-					returnType : {
-						name : "DataSetPermId"
-					}
+					body: makeChunk(file, byte, Math.min(byte + chunkSize, fileSize))
+				}).then(function () {
+					dfd.resolve();
+				}).catch(function (error) {
+					console.error("Error:", error);
+					dfd.reject(error);
 				});
-			});
+				promises.push(dfd);
+			}
+
+			return jquery.when.apply(jquery, promises);
 		}
 
-    this.uploadFileWorkspaceDSS = function(file) {
-    			var thisFacade = this;
-
-                var getUUID = function() {
-                return ([1e7]+-1e3+-4e3+-8e3+-1e11).replace(/[018]/g, c =>
-                      (c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)
-                    );
-                };
-
-                //File
-                var sessionID = facade._private.sessionToken
-                var id = 0;
-                var filename = file.name;
-                var startByte = 0;
-                var endByte = file.size;
-
-    			return this._getDataStores().then(function(dataStores) {
-    				if (dataStores.length > 1) {
-    					var dfd = jquery.Deferred();
-    					dfd.reject("Please specify exactly one data store");
-    					return dfd.promise();
-    				}
-
-                    var parameters = "?sessionID=" + sessionID + "&filename=" + encodeURIComponent(filename) + "&id=" + id + "&startByte=" + startByte + "&endByte=" + endByte;
-    				return facade._private.ajaxRequest({
-    					url : thisFacade._createUrlWithParameters(dataStores[0], "session_workspace_file_upload", parameters),
-                        contentType: "multipart/form-data",
-                        data: file
-    				});
-    			});
-    }
-
-}
+		function makeChunk(file, startByte, endByte) {
+			var blob = undefined;
+			if (file.slice) {
+				blob = file.slice(startByte, endByte);
+			} else if (file.webkitSlice) {
+				blob = file.webkitSlice(startByte, endByte);
+			} else if (file.mozSlice) {
+				blob = file.mozSlice(startByte, endByte);
+			}
+			return blob;
+		}
+	}
 
 	var facade = function(openbisUrl) {
 
diff --git a/openbis/sourceTest/java/ch/systemsx/cisd/openbis/generic/server/business/bo/SampleBOTest.java b/openbis/sourceTest/java/ch/systemsx/cisd/openbis/generic/server/business/bo/SampleBOTest.java
index 44d8b658618314371655ba475d3a10aa82e99808..1ba56ddbeb0504e6d106c44b4ed4cdc76797b431 100644
--- a/openbis/sourceTest/java/ch/systemsx/cisd/openbis/generic/server/business/bo/SampleBOTest.java
+++ b/openbis/sourceTest/java/ch/systemsx/cisd/openbis/generic/server/business/bo/SampleBOTest.java
@@ -885,6 +885,9 @@ public final class SampleBOTest extends AbstractBOTest
                     one(sampleTypeDAO).tryFindSampleTypeByCode(DILUTION_PLATE);
                     will(returnValue(new SampleTypePE()));
 
+                    one(permIdDAO).createPermId();
+                    will(returnValue("2023010112341234-10"));
+
                     one(propertiesConverter).convertProperties(IEntityProperty.EMPTY_ARRAY, null,
                             EXAMPLE_PERSON);
 
@@ -921,6 +924,9 @@ public final class SampleBOTest extends AbstractBOTest
                     one(sampleTypeDAO).tryFindSampleTypeByCode(DILUTION_PLATE);
                     will(returnValue(new SampleTypePE()));
 
+                    one(permIdDAO).createPermId();
+                    will(returnValue("2023010112341234-10"));
+
                     one(propertiesConverter).convertProperties(IEntityProperty.EMPTY_ARRAY, null,
                             EXAMPLE_PERSON);
 
diff --git a/openbis_api/source/java/ch/ethz/sis/openbis/generic/OpenBISAPI.java b/openbis_api/source/java/ch/ethz/sis/openbis/generic/OpenBISAPI.java
new file mode 100644
index 0000000000000000000000000000000000000000..ad3536e726558db21e35dd8def89c1b908ae0a5a
--- /dev/null
+++ b/openbis_api/source/java/ch/ethz/sis/openbis/generic/OpenBISAPI.java
@@ -0,0 +1,224 @@
+package ch.ethz.sis.openbis.generic;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Path;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.UUID;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeoutException;
+
+import org.apache.log4j.Logger;
+import org.eclipse.jetty.client.api.ContentResponse;
+import org.eclipse.jetty.client.api.Request;
+import org.eclipse.jetty.client.util.BytesContentProvider;
+import org.eclipse.jetty.http.HttpMethod;
+
+import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.id.DataSetPermId;
+import ch.ethz.sis.openbis.generic.dssapi.v3.IDataStoreServerApi;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.dataset.create.UploadedDataSetCreation;
+import ch.systemsx.cisd.common.exceptions.UserFailureException;
+import ch.systemsx.cisd.common.http.JettyHttpClientFactory;
+import ch.systemsx.cisd.common.logging.LogCategory;
+import ch.systemsx.cisd.common.logging.LogFactory;
+import ch.systemsx.cisd.common.spring.HttpInvokerUtils;
+
+public class OpenBISAPI {
+
+    private static final Logger OPERATION_LOG = LogFactory.getLogger(LogCategory.OPERATION, OpenBISAPI.class);
+
+    private static final int DEFAULT_TIMEOUT_IN_MILLIS = 30000; //30 seconds
+
+    private static final int CHUNK_SIZE = 1048576; // 1 MiB
+
+    private static final Collection<Integer> ACCEPTABLE_STATUSES = List.of(200);
+
+    private final IApplicationServerApi asFacade;
+
+    private final IDataStoreServerApi dssFacade;
+
+    private String sessionToken;
+
+    private final int timeout;
+
+    private final String asURL;
+
+    private final String dssURL;
+
+    public OpenBISAPI(final String asURL, final String dssURL)
+    {
+        this(asURL, dssURL, DEFAULT_TIMEOUT_IN_MILLIS);
+    }
+
+    public OpenBISAPI(final String asURL, final String dssURL, final int timeout)
+    {
+        this.timeout = timeout;
+        this.asURL = asURL;
+        asFacade = HttpInvokerUtils.createServiceStub(IApplicationServerApi.class, this.asURL + IApplicationServerApi.SERVICE_URL, timeout);
+        this.dssURL = dssURL;
+        dssFacade = HttpInvokerUtils.createServiceStub(IDataStoreServerApi.class, this.dssURL + IDataStoreServerApi.SERVICE_URL, timeout);
+    }
+
+    public String getSessionToken()
+    {
+        return sessionToken;
+    }
+
+    public void setSessionToken(final String sessionToken)
+    {
+        this.sessionToken = sessionToken;
+    }
+
+    public String login(String userId, String password) {
+        String sessionToken = asFacade.login(userId, password);
+        setSessionToken(sessionToken);
+        return sessionToken;
+    }
+
+    public void logout() {
+        asFacade.logout(sessionToken);
+    }
+
+    public DataSetPermId createUploadedDataSet(final UploadedDataSetCreation newDataSet)
+    {
+        return dssFacade.createUploadedDataSet(sessionToken, newDataSet);
+    }
+
+    public String uploadFileWorkspaceDSS(final Path fileOrFolder)
+    {
+        String uploadId = uploadFileWorkspaceDSSEmptyDir(UUID.randomUUID().toString());
+        uploadFileWorkspaceDSS(fileOrFolder.toFile(), uploadId);
+        return uploadId;
+    }
+
+    //
+    // Helper Methods to upload files to DSS Session Workspace
+    //
+
+    /**
+     * Upload file or folder to the DSS SessionWorkspaceFileUploadServlet and return the ID to be used by createUploadedDataSet
+     * This method hides the complexities of uploading a folder with many files and does the uploads in chunks.
+     */
+    private String uploadFileWorkspaceDSS(final File fileOrFolder, final String parentsOrNull)
+    {
+        if (fileOrFolder.exists() == false)
+        {
+            throw new UserFailureException("Path doesn't exist: " + fileOrFolder);
+        }
+        String fileNameOrFolderName = "";
+        if (parentsOrNull != null)
+        {
+            fileNameOrFolderName = parentsOrNull + "/";
+        }
+        fileNameOrFolderName += fileOrFolder.getName();
+
+        if (fileOrFolder.isDirectory())
+        {
+            uploadFileWorkspaceDSSEmptyDir(fileNameOrFolderName);
+            for (File file : fileOrFolder.listFiles())
+            {
+                uploadFileWorkspaceDSS(file, fileNameOrFolderName);
+            }
+        } else {
+            uploadFileWorkspaceDSSFile(fileNameOrFolderName, fileOrFolder);
+        }
+        return fileNameOrFolderName;
+    }
+
+    private String uploadFileWorkspaceDSSEmptyDir(String pathToDir) {
+        final org.eclipse.jetty.client.HttpClient client = JettyHttpClientFactory.getHttpClient();
+        final Request httpRequest = client.newRequest(dssURL + "/session_workspace_file_upload")
+                .method(HttpMethod.POST);
+        httpRequest.param("sessionID", sessionToken);
+        httpRequest.param("id", "1");
+        httpRequest.param("filename", pathToDir);
+        httpRequest.param("startByte", Long.toString(0));
+        httpRequest.param("endByte", Long.toString(0));
+        httpRequest.param("size", Long.toString(0));
+        httpRequest.param("emptyFolder", Boolean.TRUE.toString());
+
+        try {
+            final ContentResponse response = httpRequest.send();
+            final int status = response.getStatus();
+            if (status != 200)
+            {
+                throw new IOException(response.getContentAsString());
+            }
+        } catch (final IOException | TimeoutException | InterruptedException | ExecutionException e)
+        {
+            throw new RuntimeException(e);
+        }
+        return pathToDir;
+    }
+
+    private String uploadFileWorkspaceDSSFile(String pathToFile, File file) {
+        try {
+            long start = 0;
+            for (byte[] chunk : streamFile(file, CHUNK_SIZE)) {
+                final long end = start + chunk.length;
+
+                final org.eclipse.jetty.client.HttpClient client = JettyHttpClientFactory.getHttpClient();
+                final Request httpRequest = client.newRequest(dssURL + "/session_workspace_file_upload")
+                        .method(HttpMethod.POST);
+                httpRequest.param("sessionID", sessionToken);
+                httpRequest.param("id", "1");
+                httpRequest.param("filename", pathToFile);
+                httpRequest.param("startByte", Long.toString(start));
+                httpRequest.param("endByte", Long.toString(end));
+                httpRequest.param("size", Long.toString(file.length()));
+                httpRequest.content(new BytesContentProvider(chunk));
+                final ContentResponse response = httpRequest.send();
+                final int status = response.getStatus();
+                if (status != 200) {
+                    throw new IOException(response.getContentAsString());
+                }
+                start += CHUNK_SIZE;
+            }
+        } catch (final IOException | TimeoutException | InterruptedException | ExecutionException e) {
+            throw new RuntimeException(e);
+        }
+        return pathToFile;
+    }
+
+    private Iterable<byte[]> streamFile(final File file, final int chunkSize) throws FileNotFoundException
+    {
+        final InputStream inputStream = new FileInputStream(file);
+
+        return new Iterable<byte[]>() {
+            @Override
+            public Iterator<byte[]> iterator() {
+                return new Iterator<byte[]>() {
+                    public boolean hasMore = true;
+
+                    public boolean hasNext() {
+                        return hasMore;
+                    }
+
+                    public byte[] next() {
+                        try {
+                            byte[] bytes = inputStream.readNBytes(chunkSize);
+                            if (bytes.length < chunkSize) {
+                                hasMore = false;
+                                inputStream.close();
+                            }
+                            return bytes;
+                        } catch (final IOException e) {
+                            try {
+                                inputStream.close();
+                            } catch (final IOException ex) {
+                                throw new RuntimeException(ex);
+                            }
+                            throw new RuntimeException(e);
+                        }
+                    }
+                };
+            }
+        };
+    }
+}
diff --git a/openbis_standard_technologies/dist/core-plugins/eln-lims/1/dss/reporting-plugins/exports-api/exportsApi.py b/openbis_standard_technologies/dist/core-plugins/eln-lims/1/dss/reporting-plugins/exports-api/exportsApi.py
index 5c4fd14cc39983a3a196f9149f37ba6ffbc82570..9565d715b0f63b16145540dc4d72e7e83d6f5ef4 100644
--- a/openbis_standard_technologies/dist/core-plugins/eln-lims/1/dss/reporting-plugins/exports-api/exportsApi.py
+++ b/openbis_standard_technologies/dist/core-plugins/eln-lims/1/dss/reporting-plugins/exports-api/exportsApi.py
@@ -151,10 +151,12 @@ def validateDataSize(entitiesToExport, tr):
             estimatedSizeInBytes += 12000;  # AVG File Metadata size
     estimatedSizeInMegabytes = estimatedSizeInBytes / 1000000;
     operationLog.info(
-        "Size Limit check - limitDataSizeInBytes: " + str(limitDataSizeInBytes) + " > " + " estimatedSizeInBytes: " + str(estimatedSizeInBytes));
+        u"Size Limit check - limitDataSizeInBytes: " + str(limitDataSizeInBytes).encode('utf-8') +
+        u" > estimatedSizeInBytes: " + str(estimatedSizeInBytes).encode('utf-8'));
     if estimatedSizeInBytes > limitDataSizeInBytes:
-        raise UserFailureException("The selected data is " + str(estimatedSizeInMegabytes) + " MB that is bigger than the configured limit of " + str(
-            limitDataSizeInMegabytes) + " MB");
+        raise UserFailureException(u"The selected data is " + estimatedSizeInMegabytes +
+                                   u" MB that is bigger than the configured limit of " +
+                                   limitDataSizeInMegabytes + u" MB");
 
 
 def findEntitiesToExport(params):
@@ -172,17 +174,17 @@ def findEntitiesToExport(params):
         if entity.get("expand"):
             entitiesToExpand.append(entityAsPythonMap);
 
-    operationLog.info("Found %d entities to expand." % len(entitiesToExpand))
+    operationLog.info(u"Found %d entities to expand." % len(entitiesToExpand))
     while entitiesToExpand:
         entityToExpand = entitiesToExpand.popleft();
         type = entityToExpand["type"];
         permId = entityToExpand["permId"];
-        operationLog.info("Expanding type: " + str(type) + " permId: " + str(permId));
+        operationLog.info(u"Expanding type: " + type.encode("utf-8") + u" permId: " + permId.encode("utf-8"));
 
         if type == "ROOT":
             criteria = SpaceSearchCriteria();
             results = v3.searchSpaces(sessionToken, criteria, SpaceFetchOptions());
-            operationLog.info("Found: " + str(results.getTotalCount()) + " spaces");
+            operationLog.info(u"Found: %d spaces" % results.getTotalCount());
             for space in results.getObjects():
                 entityFound = {"type": "SPACE", "permId": space.getCode(), "registrationDate": space.getRegistrationDate()};
                 addToExportWithoutRepeating(entitiesToExport, entityFound);
@@ -191,7 +193,7 @@ def findEntitiesToExport(params):
             criteria = ProjectSearchCriteria();
             criteria.withSpace().withCode().thatEquals(permId);
             results = v3.searchProjects(sessionToken, criteria, ProjectFetchOptions());
-            operationLog.info("Found: " + str(results.getTotalCount()) + " projects");
+            operationLog.info(u"Found: %d projects" % results.getTotalCount());
             for project in results.getObjects():
                 entityFound = {"type": "PROJECT", "permId": project.getPermId().getPermId(), "registrationDate": project.getRegistrationDate()};
                 addToExportWithoutRepeating(entitiesToExport, entityFound);
@@ -200,7 +202,7 @@ def findEntitiesToExport(params):
             criteria = ExperimentSearchCriteria();
             criteria.withProject().withPermId().thatEquals(permId);
             results = v3.searchExperiments(sessionToken, criteria, ExperimentFetchOptions());
-            operationLog.info("Found: " + str(results.getTotalCount()) + " experiments");
+            operationLog.info(u"Found: %d experiments" % results.getTotalCount());
             for experiment in results.getObjects():
                 entityFound = {"type": "EXPERIMENT", "permId": experiment.getPermId().getPermId(),
                                "registrationDate": experiment.getRegistrationDate()};
@@ -210,7 +212,7 @@ def findEntitiesToExport(params):
             criteria = SampleSearchCriteria();
             criteria.withExperiment().withPermId().thatEquals(permId);
             results = v3.searchSamples(sessionToken, criteria, SampleFetchOptions());
-            operationLog.info("Found: " + str(results.getTotalCount()) + " samples");
+            operationLog.info(u"Found: %d samples" % results.getTotalCount());
 
             dCriteria = DataSetSearchCriteria();
             dCriteria.withExperiment().withPermId().thatEquals(permId);
@@ -218,13 +220,13 @@ def findEntitiesToExport(params):
             fetchOptions = DataSetFetchOptions()
             fetchOptions.withDataStore()
             dResults = v3.searchDataSets(sessionToken, dCriteria, fetchOptions);
-            operationLog.info("Found: " + str(dResults.getTotalCount()) + " datasets");
+            operationLog.info(u"Found: %d datasets" % dResults.getTotalCount());
             for dataset in dResults.getObjects():
                 entityFound = {"type": "DATASET", "permId": dataset.getPermId().getPermId(), "registrationDate": dataset.getRegistrationDate()};
                 addToExportWithoutRepeating(entitiesToExport, entityFound);
                 entitiesToExpand.append(entityFound);
 
-            operationLog.info("Found: " + str(results.getTotalCount()) + " samples");
+            operationLog.info(u"Found: %d samples" % results.getTotalCount());
             for sample in results.getObjects():
                 entityFound = {"type": "SAMPLE", "permId": sample.getPermId().getPermId(), "registrationDate": sample.getRegistrationDate()};
                 addToExportWithoutRepeating(entitiesToExport, entityFound);
@@ -235,7 +237,7 @@ def findEntitiesToExport(params):
             fetchOptions = DataSetFetchOptions()
             fetchOptions.withDataStore()
             results = v3.searchDataSets(sessionToken, criteria, fetchOptions);
-            operationLog.info("Found: " + str(results.getTotalCount()) + " datasets");
+            operationLog.info(u"Found: %d datasets" % results.getTotalCount());
             for dataset in results.getObjects():
                 entityFound = {"type": "DATASET", "permId": dataset.getPermId().getPermId(), "registrationDate": dataset.getRegistrationDate()};
                 addToExportWithoutRepeating(entitiesToExport, entityFound);
@@ -244,7 +246,7 @@ def findEntitiesToExport(params):
             criteria = DataSetFileSearchCriteria();
             criteria.withDataSet().withPermId().thatEquals(permId);
             results = v3d.searchFiles(sessionToken, criteria, DataSetFileFetchOptions());
-            operationLog.info("Found: " + str(results.getTotalCount()) + " files");
+            operationLog.info(u"Found: %d files" % results.getTotalCount());
             for file in results.getObjects():
                 entityFound = {"type": "FILE", "permId": permId, "path": file.getPath(), "isDirectory": file.isDirectory(),
                                "length": file.getFileLength()};
@@ -282,7 +284,7 @@ def generateFilesInZip(zos, entities, includeRoot, sessionToken, tempDirPath, de
     for entity in entities:
         type = entity["type"];
         permId = entity["permId"];
-        operationLog.info("exporting type: " + str(type) + " permId: " + str(permId));
+        operationLog.info(u"exporting type: " + type.encode("utf-8") + u" permId: " + permId.encode("utf-8"));
         entityObj = None;
         entityFilePath = None;
 
@@ -383,8 +385,7 @@ def generateFilesInZip(zos, entities, includeRoot, sessionToken, tempDirPath, de
         if entityObj is not None:
             objectCache[permId] = entityObj;
 
-        operationLog.info("--> Entity type: " + type + " permId: " + permId + " obj: " + str(entityObj is not None) + " path: " + str(
-            entityFilePath) + " before files.");
+        operationLog.info(u"--> Entity type: %s permId: %s obj: %s path: %s before files." % (type, permId, str(entityObj is not None), entityFilePath));
         if entityObj is not None and entityFilePath is not None:
             # JSON
             entityJson = String(objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(entityObj));
@@ -403,19 +404,20 @@ def generateFilesInZip(zos, entities, includeRoot, sessionToken, tempDirPath, de
             entityHTML = getDOCX(entityObj, v3, sessionToken, True);
             fileMetadatum = addFile(tempDirPath, entityFilePath, "html", entityHTML, zos, deflated=deflated);
             fileMetadata.append(fileMetadatum)
-            operationLog.info("--> Entity type: " + type + " permId: " + permId + " post html.");
+            operationLog.info(u"--> Entity type: " + type.encode("utf-8") + u" permId: " + permId.encode("utf-8") +
+                              u" post html.");
     if emptyZip:
-        raise IOError('Nothing added to ZIP file.')
+        raise IOError("Nothing added to ZIP file.")
     return fileMetadata
 
 
 def generateDownloadUrl(sessionToken, tempZipFileName, tempZipFilePath):
     # Store on workspace to be able to generate a download link
-    operationLog.info("Zip file can be found on the temporal directory: " + tempZipFilePath);
+    operationLog.info(u"Zip file can be found on the temporal directory: " + tempZipFilePath.encode("utf-8"));
     dssService = ServiceProvider.getApplicationContext().getBean("dss-service-rpc-generic")
     dssService.putFileToSessionWorkspace(sessionToken, tempZipFileName, FileInputStream(File(tempZipFilePath)))
     tempZipFileWorkspaceURL = DataStoreServer.getConfigParameters().getDownloadURL() + "/datastore_server/session_workspace_file_download?sessionID=" + sessionToken + "&filePath=" + tempZipFileName;
-    operationLog.info("Zip file can be downloaded from the workspace: " + tempZipFileWorkspaceURL);
+    operationLog.info(u"Zip file can be downloaded from the workspace: " + tempZipFileWorkspaceURL.encode("utf-8"));
     return tempZipFileWorkspaceURL