diff --git a/pybis/src/python/pybis/attribute.py b/pybis/src/python/pybis/attribute.py
index 0ac40c5e09efc7315f120c2074495a31e9e1e410..6d01ebf9f853361bdc34d5e73e0d67dac949ed32 100644
--- a/pybis/src/python/pybis/attribute.py
+++ b/pybis/src/python/pybis/attribute.py
@@ -106,6 +106,10 @@ class AttrHolder():
                 new_obj['typeId'] = self._type['permId']
                 continue
 
+            elif attr == 'kind':
+                # when creating a new dataset, the attribute «kind» is called «dataSetKind»
+                new_obj['dataSetKind'] = self._kind
+
             elif attr == 'attachments':
                 attachments = getattr(self, '_new_attachments')
                 if attachments is None:
diff --git a/pybis/src/python/pybis/dataset.py b/pybis/src/python/pybis/dataset.py
index 0dc26a232dd3f74622f8913b48712feef3a1a7d4..5c18c4f0dbb348f97fba14ecfc2631603c8455e6 100644
--- a/pybis/src/python/pybis/dataset.py
+++ b/pybis/src/python/pybis/dataset.py
@@ -17,7 +17,31 @@ class DataSet(OpenBisObject):
     """ DataSet are openBIS objects that contain the actual files.
     """
 
-    def __init__(self, openbis_obj, type=type, data=None, files=None, folder=None, props=None, **kwargs):
+    def __init__(self, openbis_obj, type=type, data=None, files=None, folder=None, kind=None, props=None, **kwargs):
+
+        if kwargs is None:
+            kwargs = {}
+
+        if kwargs.get('code'):
+            kwargs["autoGeneratedCode"] = False
+        else:
+            kwargs["autoGeneratedCode"] = True
+
+        if kind == 'PHYSICAL_DATA':
+            if files is None:
+                raise ValueError('please provide at least one file')
+
+            if isinstance(files, str):
+                files = [files]
+
+            for file in files:
+                if not os.path.exists(file):
+                    raise ValueError('File {} does not exist'.format(file))
+
+
+            self.__dict__['files'] = files
+
+        # initialize the attributes
         super(DataSet, self).__init__(openbis_obj, type, data, props, **kwargs)
 
         self.__dict__['files_in_wsp'] = []
@@ -31,9 +55,15 @@ class DataSet(OpenBisObject):
                 self.__dict__['shareId'] = data['physicalData']['shareId']
                 self.__dict__['location'] = data['physicalData']['location']
         
-        # new DataSet
-        if files is not None:
-            self.__dict__['files'] = files
+
+        if kind is not None:
+            kind = kind.upper()
+            allowed_kinds = ['PHYSICAL_DATA', 'CONTAINER', 'LINK']
+            if kind not in allowed_kinds:
+                raise ValueError(
+                    "only these values are allowed for kind: {}".format(allowed_kinds)
+                )
+            self.a.__dict__['_kind'] = kind
 
         self.__dict__['folder'] = folder
 
@@ -391,41 +421,67 @@ class DataSet(OpenBisObject):
 
 
     def save(self):
+
         if self.is_new:
-            if self.files is None or len(self.files) == 0:
-                raise ValueError('Cannot register a dataset without a file. Please provide at least one file')
+            datastores = self.openbis.get_datastores()
+            permId = None
 
             if self.sample is None and self.experiment is None:
                 raise ValueError('A DataSet must be either connected to a Sample or an Experiment')
 
-            # upload the data to the user session workspace
-            datastores = self.openbis.get_datastores()
-
-            self.upload_files(
-                datastore_url= datastores['downloadUrl'][0],
-                files=self.files,
-                folder='',
-                wait_until_finished=True
-            )
-
-            # activate the ingestion plugin, as soon as the data is uploaded
-            request = self._generate_plugin_request(dss=datastores['code'][0])
-
-            resp = self.openbis._post_request(self.openbis.reg_v1, request)
-
-            if resp['rows'][0][0]['value'] == 'OK':
-                permId = resp['rows'][0][2]['value']
-                if permId is None or permId == '': 
-                    self.__dict__['is_new'] = False
-                    if VERBOSE: print("DataSet successfully created. Because you connected to an openBIS version older than 16.05.04, you cannot update the object.")
+            if self.kind == 'PHYSICAL_DATA':
+                if self.files is None or len(self.files) == 0:
+                    raise ValueError(
+                        'Cannot register a dataset without a file. Please provide at least one file'
+                    )
+                
+                # for uploading phyiscal data, we first upload it to the session workspace
+                self.upload_files(
+                    datastore_url= datastores['downloadUrl'][0],
+                    files=self.files,
+                    folder='',
+                    wait_until_finished=True
+                )
+
+                # activate the ingestion plugin, as soon as the data is uploaded
+                # this will actually register the dataset in the datastore and the AS
+                request = self._generate_plugin_request(dss=datastores['code'][0])
+                resp = self.openbis._post_request(self.openbis.reg_v1, request)
+                if resp['rows'][0][0]['value'] == 'OK':
+                    permId = resp['rows'][0][2]['value']
+                    if permId is None or permId == '': 
+                        self.__dict__['is_new'] = False
+                        if VERBOSE: print("DataSet successfully created. Because you connected to an openBIS version older than 16.05.04, you cannot update the object.")
+                    else:
+                        new_dataset_data = self.openbis.get_dataset(permId, only_data=True)
+                        self._set_data(new_dataset_data)
+                        if VERBOSE: print("DataSet successfully created.")
+                        return self
                 else:
-                    new_dataset_data = self.openbis.get_dataset(permId, only_data=True)
-                    self._set_data(new_dataset_data)
-                    if VERBOSE: print("DataSet successfully created.")
+                    raise ValueError('Error while creating the DataSet: ' + resp['rows'][0][1]['value'])
+            # CONTAINER 
             else:
-                raise ValueError('Error while creating the DataSet: ' + resp['rows'][0][1]['value'])
+                if self.files is not None and len(self.files) > 0:
+                    raise ValueError(
+                        'DataSets of kind CONTAINER or LINK cannot contain data'
+                    )
+                request = self._new_attrs()
+                props = self.p._all_props()
+                DSpermId = datastores['code'][0]
+                request["params"][1][0]["properties"] = props
+                request["params"][1][0]["dataStoreId"] = {
+                    "permId": DSpermId,
+                    "@type": "as.dto.datastore.id.DataStorePermId"
+                }
+                resp = self.openbis._post_request(self.openbis.as_v3, request)
+
+                if VERBOSE: print("DataSet successfully created.")
+                new_dataset_data = self.openbis.get_dataset(resp[0]['permId'], only_data=True)
+                self._set_data(new_dataset_data)
+                return self
 
             
+        # updating the DataSEt
         else:
             request = self._up_attrs()
             props = self.p._all_props()
diff --git a/pybis/src/python/pybis/definitions.py b/pybis/src/python/pybis/definitions.py
index 6ccd5ddb2d494faba0795044b881072308a90512..9371574023d18ea5860339fe5270652d28e017a3 100644
--- a/pybis/src/python/pybis/definitions.py
+++ b/pybis/src/python/pybis/definitions.py
@@ -63,8 +63,8 @@ def openbis_definitions(entity):
             "multi": "parents children components tags attachments".split(),
         },
         "DataSet": {
-            "attrs_new": "type kind experiment sample parents children components containers tags".split(),
-            "attrs_up": "kind parents children experiment sample components containers tags".split(),
+            "attrs_new": "type code autoGeneratedCode kind experiment sample parents children components containers tags".split(),
+            "attrs_up": "parents children experiment sample components containers tags".split(),
             "attrs": "code permId type kind experiment sample parents children components containers tags accessDate dataProducer dataProductionDate registrator registrationDate modifier modificationDate dataStore size measured".split(),
 
             "ids2type": {
diff --git a/pybis/src/python/pybis/pybis.py b/pybis/src/python/pybis/pybis.py
index 16212f4e5a2aaf0a6344edad65127dee84c1f21a..2d4a15ae0ec067afc14ee38c01ccf796f5f51ab1 100644
--- a/pybis/src/python/pybis/pybis.py
+++ b/pybis/src/python/pybis/pybis.py
@@ -2783,21 +2783,13 @@ class Openbis:
 
     new_object = new_sample # Alias
 
-    def new_dataset(self, type=None, files=None, props=None, folder=None, kind='PHYSICAL_DATA', **kwargs):
+    def new_dataset(self, type=None, kind='PHYSICAL_DATA', files=None, props=None, folder=None, **kwargs):
         """ Creates a new dataset of a given sample type.
         """
-        if files is None:
-            raise ValueError('please provide at least one file')
-        elif isinstance(files, str):
-            files = [files]
-
-        for file in files:
-            if not os.path.exists(file):
-                raise ValueError('File {} does not exist'.format(file))
 
         type_obj = self.get_dataset_type(type.upper())
 
-        return DataSet(self, type=type_obj, files=files, folder=folder, props=props, **kwargs)
+        return DataSet(self, type=type_obj, kind=kind, files=files, folder=folder, props=props, **kwargs)
     
     def new_semantic_annotation(self, entityType=None, propertyType=None, **kwargs):
         """ Note: not functional yet. """
@@ -2836,7 +2828,6 @@ class Openbis:
             return dss[dss['code'] == dss_code]['downloadUrl'][0]
 
 
-
 class LinkedData():
     def __init__(self, data=None):
         self.data = data if data is not None else []