diff --git a/app-openbis-command-line/src/python/CHANGELOG.md b/app-openbis-command-line/src/python/CHANGELOG.md
index f8f41d5217c7f2e071003942e937aa6410986f43..675e0d034e01550c52bb0b7648fa85cd6d4969c0 100644
--- a/app-openbis-command-line/src/python/CHANGELOG.md
+++ b/app-openbis-command-line/src/python/CHANGELOG.md
@@ -1,3 +1,9 @@
+# New in version 0.4.2
+
+* Added filtering by object in object and data_set search commands
+* Added recursive search to object and data_set search commands
+* Updated documentation regarding authentication
+
 # New in version 0.4.1
 
 * Fixed parameters for determine_hostname method in addref functionality 
diff --git a/app-openbis-command-line/src/python/obis/__init__.py b/app-openbis-command-line/src/python/obis/__init__.py
index a103be85d40aeedac86ade4df9488dc0f54777f0..ca2cdd57ee2dfcd20ce8bc2a7a0e04837f651243 100644
--- a/app-openbis-command-line/src/python/obis/__init__.py
+++ b/app-openbis-command-line/src/python/obis/__init__.py
@@ -14,6 +14,6 @@
 #
 __author__ = "ID SIS • ETH Zürich"
 __email__ = "openbis-support@id.ethz.ch"
-__version__ = "0.4.1"
+__version__ = "0.4.2rc1"
 
 from .dm import *
diff --git a/app-openbis-command-line/src/python/obis/dm/commands/openbis_command.py b/app-openbis-command-line/src/python/obis/dm/commands/openbis_command.py
index 905e800a8b866c899bb7204e2ac8056ea7509ef1..e694b4f32007c7e991a6ce09d103809013954a7c 100644
--- a/app-openbis-command-line/src/python/obis/dm/commands/openbis_command.py
+++ b/app-openbis-command-line/src/python/obis/dm/commands/openbis_command.py
@@ -154,7 +154,8 @@ class OpenbisCommand(object):
     def login(self):
         """ Restore session token if available. """
         if 'config' in self.config_dict.keys():
-            if 'openbis_token' in self.config_dict['config'].keys():
+            if 'openbis_token' in self.config_dict['config'].keys() and \
+                    self.config_dict['config']['openbis_token'] is not None:
                 self.openbis.set_token(self.config_dict['config']['openbis_token'], True)
         """ Checks for valid session and asks user for password
         if login is needed. """
@@ -225,7 +226,8 @@ class OpenbisCommand(object):
         # ask user
         hostname = self.ask_for_hostname(socket.gethostname())
         # store
-        self.data_mgmt.config('config', True, False, OperationType.SET, prop='hostname', value=hostname)
+        self.data_mgmt.config('config', True, False, OperationType.SET, prop='hostname',
+                              value=hostname)
         return hostname
 
     def ask_for_hostname(self, hostname):
diff --git a/app-openbis-command-line/src/python/obis/dm/commands/search.py b/app-openbis-command-line/src/python/obis/dm/commands/search.py
index bb41b68df36cba2320564cee77260d6f9e2c5368..d4570b2ccdbf14bb03106e2f15e8270ed2193b8a 100644
--- a/app-openbis-command-line/src/python/obis/dm/commands/search.py
+++ b/app-openbis-command-line/src/python/obis/dm/commands/search.py
@@ -19,43 +19,53 @@ from ..utils import cd
 from ...scripts.click_util import click_echo
 
 
+def _dfs(objects, prop, func):
+    """Helper function that perform DFS search over children graph of objects"""
+    stack = [getattr(openbis_obj, prop) for openbis_obj in
+             objects]  # datasets and samples provide children in different formats
+    downloaded = {getattr(openbis_obj, prop): openbis_obj for openbis_obj in objects}
+    visited = set()
+    stack.reverse()
+    output = []
+    while stack:
+        key = stack.pop()
+        if key not in visited:
+            visited.add(key)
+            if key in downloaded:
+                obj = downloaded[key]
+            else:
+                obj = func(key)
+            output += [obj]
+            children = obj.children.copy()
+            children.reverse()
+            for child in children:
+                stack.append(child)
+    return output
+
+
 class Search(OpenbisCommand):
     """
-    Command to search data in openBIS.
+    Command to search samples or datasets in openBIS.
     """
 
-    def __init__(self, dm, filters, save_path):
+    def __init__(self, dm, filters, recursive, save_path):
         """
         :param dm: data management
         :param filters: Dictionary of filter to be used during search
+        :param recursive: Flag indicating recursive search in children
         :param save_path: Path to save results. If not set, results will not be saved.
         """
         self.filters = filters
+        self.recursive = recursive
         self.save_path = save_path
         self.load_global_config(dm)
+        self.props = "*"
+        self.attrs = ["parents", "children"]
         super(Search, self).__init__(dm)
 
     def search_samples(self):
-        properties = None
-        if self.filters['property_code'] is not None and self.filters['property_value'] is not None:
-            properties = {
-                self.filters['property_code']: self.filters['property_value'],
-            }
-
-        args = dict(space=self.filters['space'],
-                    project=self.filters['project'],  # Not Supported with Project Samples disabled
-                    experiment=self.filters['experiment'],
-                    type=self.filters['type_code'],
-                    where=properties,
-                    attrs=["parents", "children"],
-                    props="*")  # Fetch all properties
-
-        if self.filters['registration_date'] is not None:
-            args['registrationDate'] = self.filters['registration_date']
-        if self.filters['modification_date'] is not None:
-            args['modificationDate'] = self.filters['modification_date']
+        search_results = self._search_samples()
 
-        search_results = self.openbis.get_samples(**args)
         click_echo(f"Objects found: {len(search_results)}")
         if self.save_path is not None:
             click_echo(f"Saving search results in {self.save_path}")
@@ -66,37 +76,81 @@ class Search(OpenbisCommand):
 
         return CommandResult(returncode=0, output="Search completed.")
 
+    def _search_samples(self):
+        """Helper method to search samples"""
+
+        if "object_code" in self.filters:
+            results = self.openbis.get_samples(identifier=self.filters['object_code'],
+                                               attrs=self.attrs, props=self.props)
+        else:
+            args = self._get_filtering_args(self.props)
+            results = self.openbis.get_samples(**args)
+
+        if self.recursive:
+            output = _dfs(results.objects, 'identifier',
+                          self.openbis.get_sample)  # samples provide identifiers as children
+            search_results = self.openbis._sample_list_for_response(props=self.props,
+                                                                    response=[sample.data for sample
+                                                                              in output],
+                                                                    parsed=True)
+        else:
+            search_results = results
+        return search_results
+
     def search_data_sets(self):
         if self.save_path is not None and self.fileservice_url() is None:
             return CommandResult(returncode=-1,
                                  output="Configuration fileservice_url needs to be set for download.")
 
-        properties = None
+        if self.recursive:
+            search_results = self._search_samples()  # Look for samples recursively
+            o = []
+            for sample in search_results.objects:  # get datasets
+                o += sample.get_datasets(
+                    attrs=self.attrs, props=self.props)
+            output = _dfs(o, 'permId',  # datasets provide permIds as children
+                          self.openbis.get_dataset)  # look for child datasets of sample datasets
+            datasets = self.openbis._dataset_list_for_response(props=self.props,
+                                                               response=[dataset.data for dataset
+                                                                         in output],
+                                                               parsed=True)
+        else:
+            if "object_code" in self.filters:
+                results = self.openbis.get_sample(self.filters['object_code']).get_datasets(
+                    attrs=self.attrs, props=self.props)
+            else:
+                args = self._get_filtering_args(self.props)
+                results = self.openbis.get_datasets(**args)
+            datasets = results
+
+        click_echo(f"Data sets found: {len(datasets)}")
+        if self.save_path is not None:
+            click_echo(f"Saving search results in {self.save_path}")
+            with cd(self.data_mgmt.invocation_path):
+                datasets.df.to_csv(self.save_path, index=False)
+        else:
+            click_echo(f"Search results:\n{datasets}")
+
+        return CommandResult(returncode=0, output="Search completed.")
+
+    def _get_filtering_args(self, props):
+        where = None
         if self.filters['property_code'] is not None and self.filters['property_value'] is not None:
-            properties = {
+            where = {
                 self.filters['property_code']: self.filters['property_value'],
             }
+
         args = dict(space=self.filters['space'],
-                    project=self.filters['project'],  # Not Supported with Project Samples disabled
+                    project=self.filters['project'],
+                    # Not Supported with Project Samples disabled
                     experiment=self.filters['experiment'],
                     type=self.filters['type_code'],
-                    where=properties,
-                    attrs=["parents", "children"],
-                    props="*")  # Fetch all properties
+                    where=where,
+                    attrs=self.attrs,
+                    props=props)
 
         if self.filters['registration_date'] is not None:
             args['registrationDate'] = self.filters['registration_date']
         if self.filters['modification_date'] is not None:
             args['modificationDate'] = self.filters['modification_date']
-
-        datasets = self.openbis.get_datasets(**args)
-
-        click_echo(f"Data sets found: {len(datasets)}")
-        if self.save_path is not None:
-            click_echo(f"Saving search results in {self.save_path}")
-            with cd(self.data_mgmt.invocation_path):
-                datasets.df.to_csv(self.save_path, index=False)
-        else:
-            click_echo(f"Search results:\n{datasets}")
-
-        return CommandResult(returncode=0, output="Search completed.")
+        return args
diff --git a/app-openbis-command-line/src/python/obis/dm/data_mgmt.py b/app-openbis-command-line/src/python/obis/dm/data_mgmt.py
index eacdecccc553ffb793bd2690b31ae1f66bd7e5e2..e1ddceafd4da8109d269bee06b2ba3360d8d9c71 100644
--- a/app-openbis-command-line/src/python/obis/dm/data_mgmt.py
+++ b/app-openbis-command-line/src/python/obis/dm/data_mgmt.py
@@ -71,7 +71,8 @@ def DataMgmt(echo_func=None, settings_resolver=None, openbis_config={}, git_conf
             repository_type = Type.LINK
 
     if repository_type == Type.PHYSICAL:
-        return PhysicalDataMgmt(settings_resolver, None, None, openbis, log, data_path,
+        complete_openbis_config(openbis_config, settings_resolver)
+        return PhysicalDataMgmt(settings_resolver, openbis_config, None, openbis, log, data_path,
                                 metadata_path, invocation_path)
     else:
         complete_git_config(git_config)
@@ -229,17 +230,19 @@ class AbstractDataMgmt(metaclass=abc.ABCMeta):
         return
 
     @abc.abstractmethod
-    def search_object(self, filters, save):
+    def search_object(self, filters, recursive, save):
         """Search for objects in openBIS using filtering criteria.
         :param filters: dictionary of filter parameters
+        :param recursive: Flag indicating if search should include children recursively
         :param save: File path to save results. If missing, search results will not be saved.
         """
         return
 
     @abc.abstractmethod
-    def search_data_set(self, filters, save):
+    def search_data_set(self, filters, recursive, save):
         """Search for datasets in openBIS using filtering criteria.
         :param filters: dictionary of filter parameters
+        :param recursive: Flag indicating if search should include children recursively
         :param save: File path to save results. If missing, search results will not be saved.
         """
         return
@@ -642,12 +645,12 @@ class PhysicalDataMgmt(AbstractDataMgmt):
         cmd = Upload(self, sample_id, data_set_type, files)
         return cmd.run()
 
-    def search_object(self,filters, save):
-        cmd = Search(self, filters, save)
+    def search_object(self, filters, recursive, save):
+        cmd = Search(self, filters, recursive, save)
         return cmd.search_samples()
 
-    def search_data_set(self, filters, save):
-        cmd = Search(self, filters, save)
+    def search_data_set(self, filters, recursive, save):
+        cmd = Search(self, filters, recursive, save)
         return cmd.search_data_sets()
 
     def config(self, category, is_global, is_data_set_property, operation_type, prop=None,
diff --git a/app-openbis-command-line/src/python/obis/dm/git.py b/app-openbis-command-line/src/python/obis/dm/git.py
index c22df44daa7cfe6f7bfff37b38cc98c119c1576e..0c5ba19353d698b6ab72177ff65ea04425f344fb 100644
--- a/app-openbis-command-line/src/python/obis/dm/git.py
+++ b/app-openbis-command-line/src/python/obis/dm/git.py
@@ -17,12 +17,14 @@ import shutil
 
 from .checksum import ChecksumGeneratorCrc32, ChecksumGeneratorGitAnnex
 from .utils import run_shell
+from ..scripts.click_util import click_echo
 
 
 class GitWrapper(object):
     """A wrapper on commands to git and git annex."""
 
-    def __init__(self, git_path=None, git_annex_path=None, find_git=None, data_path=None, metadata_path=None, invocation_path=None):
+    def __init__(self, git_path=None, git_annex_path=None, find_git=None, data_path=None,
+                 metadata_path=None, invocation_path=None):
         self.git_path = git_path
         self.git_annex_path = git_annex_path
         self.data_path = data_path
@@ -39,17 +41,20 @@ class GitWrapper(object):
         cmd += params
         return run_shell(cmd, strip_leading_whitespace=strip_leading_whitespace)
 
-
     def can_run(self):
         """Return true if the perquisites are satisfied to run (git and git annex)"""
         if self.git_path is None:
+            click_echo('No git path found!')
             return False
         if self.git_annex_path is None:
+            click_echo('No git-annex path found!')
             return False
         if self._git(['help']).failure():
+            click_echo('Can not run git!')
             # git help should have a returncode of 0
             return False
         if self._git(['annex', 'help']).failure():
+            click_echo('Can not run git-annex!')
             # git help should have a returncode of 0
             return False
         result = run_shell([self.git_path, 'annex', 'version'])
@@ -60,7 +65,7 @@ class GitWrapper(object):
                 try:
                     self.annex_major_version = int(self.annex_version.split(".")[0])
                 except Exception as e:
-                    print("Invalid git-annex version line:",result.output)
+                    print("Invalid git-annex version line:", result.output)
                     return False
         return True
 
@@ -199,9 +204,11 @@ class GitRepoFileInfo(object):
     def cksum(self, files, git_annex_hash_as_checksum=False):
 
         if git_annex_hash_as_checksum == False:
-            checksum_generator = ChecksumGeneratorCrc32(self.git_wrapper.data_path, self.git_wrapper.metadata_path)
+            checksum_generator = ChecksumGeneratorCrc32(self.git_wrapper.data_path,
+                                                        self.git_wrapper.metadata_path)
         else:
-            checksum_generator = ChecksumGeneratorGitAnnex(self.git_wrapper.data_path, self.git_wrapper.metadata_path)
+            checksum_generator = ChecksumGeneratorGitAnnex(self.git_wrapper.data_path,
+                                                           self.git_wrapper.metadata_path)
 
         checksums = []
 
diff --git a/app-openbis-command-line/src/python/obis/dm/utils.py b/app-openbis-command-line/src/python/obis/dm/utils.py
index 580e7e2cbcc0bc914507a96ed9bacfe6c4140269..7ecf98a8c94bd0e5383b1b04f4f7a96f41e03e6b 100644
--- a/app-openbis-command-line/src/python/obis/dm/utils.py
+++ b/app-openbis-command-line/src/python/obis/dm/utils.py
@@ -44,12 +44,12 @@ def complete_openbis_config(config, resolver, local_only=True):
         config['verify_certificates'] = config_dict['verify_certificates']
     if config.get('token') is None:
         config['token'] = None
-    if config.get('is_physical') is None:
-        config['is_physical'] = None
+    if config.get('is_physical') is None and config_dict['is_physical'] is not None:
+        config['is_physical'] = config_dict['is_physical']
     if config.get(
             'allow_http_but_do_not_use_this_in_production_and_only_within_safe_networks') is None:
         config['allow_http_but_do_not_use_this_in_production_and_only_within_safe_networks'] = not \
-        config_dict['allow_only_https']
+            config_dict['allow_only_https']
 
 
 def complete_git_config(config):
diff --git a/app-openbis-command-line/src/python/obis/scripts/cli.py b/app-openbis-command-line/src/python/obis/scripts/cli.py
index 33cc94059f4712f7e5d8d54a4c7ca3a424ce105a..b64e610d0b34f19db84b5f657fc7844eba5dd542 100644
--- a/app-openbis-command-line/src/python/obis/scripts/cli.py
+++ b/app-openbis-command-line/src/python/obis/scripts/cli.py
@@ -217,14 +217,14 @@ def _clear(ctx, settings):
 @click.option('-g', '--is_global', default=False, is_flag=True, help='Get global or local.')
 @click.pass_context
 def settings(ctx, is_global):
-    """ Get all settings.
-    """
+    """ External Data Store: Get all settings. """
     ctx.obj['is_global'] = is_global
 
 
 @settings.command('get')
 @click.pass_context
 def settings_get(ctx):
+    """ External Data Store: Get setting. """
     runner = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     settings = runner.get_settings()
     settings_str = json.dumps(settings, indent=4, sort_keys=True)
@@ -237,8 +237,7 @@ def settings_get(ctx):
 @click.option('-g', '--is_global', default=False, is_flag=True, help='Set/get global or local.')
 @click.pass_context
 def repository(ctx, is_global):
-    """ Get/set settings related to the repository.
-    """
+    """ External Data Store: Get/set settings related to the repository. """
     runner = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.obj['is_global'] = is_global
     ctx.obj['runner'] = runner
@@ -249,6 +248,7 @@ def repository(ctx, is_global):
 @click.argument('settings', type=SettingsSet(), nargs=-1)
 @click.pass_context
 def repository_set(ctx, settings):
+    """ External Data Store: Set settings related to the repository. """
     return ctx.obj['runner'].run("repository_set", lambda dm: _set(ctx, settings))
 
 
@@ -256,6 +256,7 @@ def repository_set(ctx, settings):
 @click.argument('settings', type=SettingsGet(), nargs=-1)
 @click.pass_context
 def repository_get(ctx, settings):
+    """ External Data Store: Get settings related to the repository. """
     return ctx.obj['runner'].run("repository_get", lambda dm: _get(ctx, settings))
 
 
@@ -263,6 +264,7 @@ def repository_get(ctx, settings):
 @click.argument('settings', type=SettingsClear(), nargs=-1)
 @click.pass_context
 def repository_clear(ctx, settings):
+    """ External Data Store: Clear settings related to the repository. """
     return ctx.obj['runner'].run("repository_clear", lambda dm: _clear(ctx, settings))
 
 
@@ -275,6 +277,8 @@ _search_params = [
     click.option('-space', '--space', default=None, help='Space code'),
     click.option('-project', '--project', default=None, help='Full project identification code'),
     click.option('-experiment', '--experiment', default=None, help='Full experiment code'),
+    click.option('-object', '--object', 'object_code', default=None,
+                 help='Object identification information, it can be permId or identifier'),
     click.option('-type', '--type', 'type_code', default=None, help='Type code'),
     click.option('-property', 'property_code', default=None, help='Property code'),
     click.option('-property-value', 'property_value', default=None,
@@ -284,6 +288,8 @@ _search_params = [
     click.option('-modification-date', '--modification-date', 'modification_date', default=None,
                  help='Modification date, it can be in the format "oYYYY-MM-DD" (e.g. ">2023-01-01")'),
     click.option('-save', '--save', default=None, help='Filename to save results'),
+    click.option('-r', '--recursive', 'recursive', is_flag=True, default=False,
+                 help='Search data recursively'),
 ]
 
 
@@ -293,8 +299,7 @@ _search_params = [
               help='Configure data set property.')
 @click.pass_context
 def data_set(ctx, is_global, is_data_set_property):
-    """ Get/set settings related to the data set.
-    """
+    """ External Data Store: Get/set settings related to the data set. """
     runner = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.obj['is_global'] = is_global
     ctx.obj['is_data_set_property'] = is_data_set_property
@@ -306,6 +311,7 @@ def data_set(ctx, is_global, is_data_set_property):
 @click.argument('data_set_settings', type=SettingsSet(), nargs=-1)
 @click.pass_context
 def data_set_set(ctx, data_set_settings):
+    """ External Data Store: Set settings related to the data set. """
     return ctx.obj['runner'].run("data_set_set", lambda dm: _set(ctx, data_set_settings))
 
 
@@ -313,6 +319,7 @@ def data_set_set(ctx, data_set_settings):
 @click.argument('data_set_settings', type=SettingsGet(), nargs=-1)
 @click.pass_context
 def data_set_get(ctx, data_set_settings):
+    """ External Data Store: Get settings related to the data set. """
     return ctx.obj['runner'].run("data_set_get", lambda dm: _get(ctx, data_set_settings))
 
 
@@ -320,6 +327,7 @@ def data_set_get(ctx, data_set_settings):
 @click.argument('data_set_settings', type=SettingsClear(), nargs=-1)
 @click.pass_context
 def data_set_clear(ctx, data_set_settings):
+    """ External Data Store: Clear settings related to the data set. """
     return ctx.obj['runner'].run("data_set_clear", lambda dm: _clear(ctx, data_set_settings))
 
 
@@ -327,10 +335,13 @@ def data_set_clear(ctx, data_set_settings):
 @add_params(_search_params)
 @click.pass_context
 def data_set_search(ctx, type_code, space, project, experiment, registration_date,
-                    modification_date, property_code, property_value, save):
-    if all(v is None for v in
-           [type_code, space, project, experiment, registration_date, modification_date,
-            property_code, property_value]):
+                    modification_date, object_code, property_code, property_value, save, recursive):
+    """Standard Data Store: Search data sets given the filtering criteria or object identifier.
+    Results of this command can be used in `obis download`."""
+    filtering_arguments = [type_code, space, project, experiment, registration_date,
+                           modification_date,
+                           property_code, property_value]
+    if all(v is None for v in filtering_arguments + [object_code]):
         click_echo("You must provide at least one filtering criteria!")
         return -1
     if (property_code is None and property_value is not None) or (
@@ -338,12 +349,17 @@ def data_set_search(ctx, type_code, space, project, experiment, registration_dat
         click_echo("Property code and property value need to be specified!")
         return -1
     ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
-    filters = dict(type_code=type_code, space=space,
-                   project=project, experiment=experiment, property_code=property_code,
-                   registration_date=registration_date, modification_date=modification_date,
-                   property_value=property_value)
+    if object_code is not None:
+        if any(v is not None for v in filtering_arguments):
+            click_echo("Object parameter detected! Other filtering arguments will be omitted!")
+        filters = dict(object_code=object_code)
+    else:
+        filters = dict(type_code=type_code, space=space,
+                       project=project, experiment=experiment, property_code=property_code,
+                       registration_date=registration_date, modification_date=modification_date,
+                       property_value=property_value)
     return ctx.obj['runner'].run("data_set_search",
-                                 lambda dm: dm.search_data_set(filters, save)),
+                                 lambda dm: dm.search_data_set(filters, recursive, save)),
 
 
 # # object: object_id
@@ -353,7 +369,9 @@ def data_set_search(ctx, type_code, space, project, experiment, registration_dat
 @click.option('-g', '--is_global', default=False, is_flag=True, help='Set/get global or local.')
 @click.pass_context
 def object(ctx, is_global):
-    """ Get/set settings related to the object.
+    """ External Data Store: Get/set properties related to the object.
+
+    Standard Data Store: Get/set properties of objects connected to downloaded datasets.
     """
     runner = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.obj['is_global'] = is_global
@@ -365,6 +383,10 @@ def object(ctx, is_global):
 @click.argument('object_settings', type=SettingsSet(), nargs=-1)
 @click.pass_context
 def object_set(ctx, object_settings):
+    """ External Data Store: Set properties related to the object.
+
+    Standard Data Store: Set property to all objects connected to downloaded datasets.
+    """
     return ctx.obj['runner'].run("object_set", lambda dm: _set(ctx, object_settings))
 
 
@@ -372,6 +394,10 @@ def object_set(ctx, object_settings):
 @click.argument('object_settings', type=SettingsGet(), nargs=-1)
 @click.pass_context
 def object_get(ctx, object_settings):
+    """ External Data Store: Set properties related to the object.
+
+    Standard Data Store: Get given properties of all objects connected to downloaded datasets.
+    """
     return ctx.obj['runner'].run("object_get", lambda dm: _get(ctx, object_settings))
 
 
@@ -379,6 +405,7 @@ def object_get(ctx, object_settings):
 @click.argument('object_settings', type=SettingsClear(), nargs=-1)
 @click.pass_context
 def object_clear(ctx, object_settings):
+    """ External Data Store: Clear properties related to the object. """
     return ctx.obj['runner'].run("object_clear", lambda dm: _clear(ctx, object_settings))
 
 
@@ -386,10 +413,11 @@ def object_clear(ctx, object_settings):
 @add_params(_search_params)
 @click.pass_context
 def object_search(ctx, type_code, space, project, experiment, registration_date,
-                  modification_date, property_code, property_value, save):
-    if all(v is None for v in
-           [type_code, space, project, experiment, registration_date, modification_date,
-            property_code, property_value]):
+                  modification_date, object_code, property_code, property_value, save, recursive):
+    """Standard Data Store: Search for objects using a filtering criteria or object identifier."""
+    filtering_arguments = [type_code, space, project, experiment, registration_date,
+                           modification_date, property_code, property_value]
+    if all(v is None for v in filtering_arguments + [object_code]):
         click_echo("You must provide at least one filtering criteria!")
         return -1
     if (property_code is None and property_value is not None) or (
@@ -397,12 +425,17 @@ def object_search(ctx, type_code, space, project, experiment, registration_date,
         click_echo("Property code and property value need to be specified!")
         return -1
     ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
-    filters = dict(type_code=type_code, space=space,
-                   project=project, experiment=experiment, property_code=property_code,
-                   registration_date=registration_date, modification_date=modification_date,
-                   property_value=property_value)
+    if object_code is not None:
+        if any(v is not None for v in filtering_arguments):
+            click_echo("Object parameter detected! Other filtering arguments will be omitted!")
+        filters = dict(object_code=object_code)
+    else:
+        filters = dict(type_code=type_code, space=space,
+                       project=project, experiment=experiment, property_code=property_code,
+                       registration_date=registration_date, modification_date=modification_date,
+                       property_value=property_value)
     return ctx.obj['runner'].run("object_search",
-                                 lambda dm: dm.search_object(filters, save))
+                                 lambda dm: dm.search_object(filters, recursive, save))
 
 
 # # collection: collection_id
@@ -412,7 +445,9 @@ def object_search(ctx, type_code, space, project, experiment, registration_date,
 @click.option('-g', '--is_global', default=False, is_flag=True, help='Set/get global or local.')
 @click.pass_context
 def collection(ctx, is_global):
-    """ Get/set settings related to the collection.
+    """ External Data Store: Get/set settings related to the collection.
+
+    Standard Data Store: Get/set properties of all collections connected to downloaded datasets.
     """
     runner = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.obj['is_global'] = is_global
@@ -424,6 +459,10 @@ def collection(ctx, is_global):
 @click.argument('settings', type=SettingsSet(), nargs=-1)
 @click.pass_context
 def collection_set(ctx, settings):
+    """ External Data Store: Set settings related to the collection.
+
+    Standard Data Store: Set given properties of all collections connected to downloaded datasets.
+    """
     return ctx.obj['runner'].run("collection_set", lambda dm: _set(ctx, settings))
 
 
@@ -431,6 +470,10 @@ def collection_set(ctx, settings):
 @click.argument('settings', type=SettingsGet(), nargs=-1)
 @click.pass_context
 def collection_get(ctx, settings):
+    """ External Data Store: Get settings related to the collection.
+
+    Standard Data Store: Get given properties of all collections connected to downloaded datasets.
+    """
     return ctx.obj['runner'].run("collection_get", lambda dm: _get(ctx, settings))
 
 
@@ -438,6 +481,7 @@ def collection_get(ctx, settings):
 @click.argument('settings', type=SettingsClear(), nargs=-1)
 @click.pass_context
 def collection_clear(ctx, settings):
+    """External Data Store: Clear settings related to the collection."""
     return ctx.obj['runner'].run("collection_clear", lambda dm: _clear(ctx, settings))
 
 
@@ -448,7 +492,9 @@ def collection_clear(ctx, settings):
 @click.option('-g', '--is_global', default=False, is_flag=True, help='Set/get global or local.')
 @click.pass_context
 def config(ctx, is_global):
-    """ Get/set configurations.
+    """External Data Store: Get/set configurations.
+
+    Standard Data Store: Get/set configurations.
     """
     if is_global is True:
         runner = DataMgmtRunner(ctx.obj, halt_on_error_log=False, is_physical=True)
@@ -463,6 +509,10 @@ def config(ctx, is_global):
 @click.argument('settings', type=SettingsSet(), nargs=-1)
 @click.pass_context
 def config_set(ctx, settings):
+    """External Data Store: Set configurations.
+
+    Standard Data Store: Set configurations.
+    """
     return ctx.obj['runner'].run("config_set", lambda dm: _set(ctx, settings))
 
 
@@ -470,6 +520,10 @@ def config_set(ctx, settings):
 @click.argument('settings', type=SettingsGet(), nargs=-1)
 @click.pass_context
 def config_get(ctx, settings):
+    """External Data Store: Get configurations.
+
+    Standard Data Store: Get configurations.
+    """
     return ctx.obj['runner'].run("config_get", lambda dm: _get(ctx, settings))
 
 
@@ -477,6 +531,8 @@ def config_get(ctx, settings):
 @click.argument('settings', type=SettingsClear(), nargs=-1)
 @click.pass_context
 def config_clear(ctx, settings):
+    """External Data Store: Clear configurations.
+    """
     return ctx.obj['runner'].run("config_clear", lambda dm: _clear(ctx, settings))
 
 
@@ -500,6 +556,8 @@ _commit_params = [
 @click.pass_context
 @add_params(_commit_params)
 def repository_commit(ctx, msg, auto_add, ignore_missing_parent, repository):
+    """External Data Store: Commit the repository to git and inform openBIS.
+    """
     return ctx.obj['runner'].run("commit",
                                  lambda dm: dm.commit(msg, auto_add, ignore_missing_parent),
                                  repository)
@@ -509,6 +567,8 @@ def repository_commit(ctx, msg, auto_add, ignore_missing_parent, repository):
 @click.pass_context
 @add_params(_commit_params)
 def commit(ctx, msg, auto_add, ignore_missing_parent, repository):
+    """External Data Store: Commit the repository to git and inform openBIS.
+    """
     ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.invoke(repository_commit, msg=msg, auto_add=auto_add,
                ignore_missing_parent=ignore_missing_parent, repository=repository)
@@ -529,19 +589,27 @@ _init_params = [
 @click.pass_context
 @add_params(_init_params)
 def repository_init(ctx, repository_path, description):
+    """External Data Store: Initialize the folder as a data repository.
+    """
     return init_data_impl(ctx, repository_path, description)
 
 
 _init_params_physical = \
     _init_params + \
     [click.option('-p', '--physical', 'is_physical', default=False, is_flag=True,
-                  help='If parent data set is missing, ignore it.')]
+                  help='Initialize folder for Standard Data Store data handling.')]
 
 
 @cli.command(short_help="Initialize the folder as a data repository.")
 @click.pass_context
 @add_params(_init_params_physical)
 def init(ctx, repository_path, description, is_physical):
+    """External Data Store: Initialize the folder as a data repository for External Data Store
+    data handling.
+
+    Standard Data Store: Initialize the folder as a data repository for Standard Data Store
+    data handling.
+    """
     ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False, is_physical=is_physical)
     ctx.invoke(repository_init, repository_path=repository_path, description=description)
 
@@ -560,6 +628,7 @@ _init_analysis_params += _init_params
 @click.pass_context
 @add_params(_init_analysis_params)
 def repository_init_analysis(ctx, parent, repository_path, description):
+    """External Data Store: Initialize the folder as an analysis folder."""
     return init_analysis_impl(ctx, parent, repository_path, description)
 
 
@@ -567,6 +636,7 @@ def repository_init_analysis(ctx, parent, repository_path, description):
 @click.pass_context
 @add_params(_init_analysis_params)
 def init_analysis(ctx, parent, repository_path, description):
+    """External Data Store: Initialize the folder as an analysis folder."""
     ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.invoke(repository_init_analysis, parent=parent,
                repository_path=repository_path, description=description)
@@ -585,6 +655,7 @@ _status_params = [
 @click.pass_context
 @add_params(_status_params)
 def repository_status(ctx, repository):
+    """External Data Store: Show the state of the obis repository."""
     return ctx.obj['runner'].run("repository_status", lambda dm: dm.status(), repository)
 
 
@@ -592,6 +663,7 @@ def repository_status(ctx, repository):
 @click.pass_context
 @add_params(_status_params)
 def status(ctx, repository):
+    """External Data Store: Show the state of the obis repository."""
     ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.invoke(repository_status, repository=repository)
 
@@ -616,6 +688,7 @@ def _repository_sync(dm, ignore_missing_parent):
 @click.pass_context
 @add_params(_sync_params)
 def repository_sync(ctx, ignore_missing_parent, repository):
+    """External Data Store: Sync the repository with openBIS."""
     return ctx.obj['runner'].run("sync", lambda dm: _repository_sync(dm, ignore_missing_parent),
                                  repository)
 
@@ -624,6 +697,7 @@ def repository_sync(ctx, ignore_missing_parent, repository):
 @click.pass_context
 @add_params(_sync_params)
 def sync(ctx, ignore_missing_parent, repository):
+    """External Data Store: Sync the repository with openBIS."""
     ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.invoke(repository_sync,
                ignore_missing_parent=ignore_missing_parent, repository=repository)
@@ -706,6 +780,7 @@ _addref_params = [
 @click.pass_context
 @add_params(_addref_params)
 def repository_addref(ctx, repository):
+    """Used for External Data Store only."""
     return ctx.obj['runner'].run("addref", lambda dm: dm.addref(), repository)
 
 
@@ -713,6 +788,7 @@ def repository_addref(ctx, repository):
 @click.pass_context
 @add_params(_addref_params)
 def addref(ctx, repository):
+    """Used for External Data Store only."""
     ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.invoke(repository_addref, repository=repository)
 
@@ -733,6 +809,7 @@ _removeref_params = [
 @click.pass_context
 @add_params(_removeref_params)
 def repository_removeref(ctx, data_set_id, repository):
+    """Used for External Data Store only."""
     if data_set_id is not None and repository is not None:
         click_echo("Only provide the data_set id OR the repository.")
         return -1
@@ -744,6 +821,7 @@ def repository_removeref(ctx, data_set_id, repository):
 @click.pass_context
 @add_params(_removeref_params)
 def removeref(ctx, data_set_id, repository):
+    """Used for External Data Store only."""
     ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.invoke(repository_removeref, data_set_id=data_set_id,
                repository=repository)
diff --git a/app-openbis-command-line/src/python/setup.py b/app-openbis-command-line/src/python/setup.py
index 2c2d235bd268f25ec910376e81e9cbddf78578d2..3b6db830cb153ea6adb36cafac6255f5c34ab7df 100644
--- a/app-openbis-command-line/src/python/setup.py
+++ b/app-openbis-command-line/src/python/setup.py
@@ -31,7 +31,7 @@ data_files = [
 
 setup(
     name="obis",
-    version="0.4.1",
+    version="0.4.2rc1",
     description="Local data management with assistance from OpenBIS.",
     long_description=long_description,
     long_description_content_type="text/markdown",
diff --git a/docs/app-openbis-command-line/README.md b/docs/app-openbis-command-line/README.md
index 5f6b6cd587e799108fb3f5a242ac78961276c29a..4505c3ef8b2989e313248987b091250d3c39f815 100644
--- a/docs/app-openbis-command-line/README.md
+++ b/docs/app-openbis-command-line/README.md
@@ -22,9 +22,12 @@ case, OpenBIS is aware of its existence and the data can be used for provenance
         1. [Settings](#521-settings)
         2. [Commands](#522-commands)
         3. [Examples](#523-examples)
-6. [Big Data Link Services](#6-big-data-link-services)
-7. [Rationale for obis](#7-rationale-for-obis)
-8. [Literature](#8-literature)
+6. [Authentication](#6-authentication)
+   1. [Login](#61-login)
+   2. [Personal Access Token](#62-personal-access-token)
+7. [Big Data Link Services](#7-big-data-link-services)
+8. [Rationale for obis](#8-rationale-for-obis)
+9. [Literature](#9-literature)
 
 ## 1. Prerequisites
 
@@ -166,7 +169,7 @@ Here is a short summary of which commands are available in given modes:
 | settings clear   |          ❌          |          ✅          |
 | status           |          ❌          |          ✅          |
 | sync             |          ❌          |          ✅          |
-| token            |          ❌          |          ✅          |
+| token            |          ✅          |          ✅          |
 | upload           |          ✅          |          ❌          |
 
 **Login**
@@ -234,6 +237,7 @@ Options:
   -space, --space TEXT            Space code
   -project, --project TEXT        Full project identification code
   -experiment, --experiment TEXT  Full experiment code
+  -object, --object TEXT          Object identification information, it can be permId or identifier
   -type, --type TEXT              Type code
   -registration-date, --registration-date TEXT
                                   Registration date, it can be in the format
@@ -244,12 +248,16 @@ Options:
   -property TEXT                  Property code
   -property-value TEXT            Property value
   -save, --save TEXT              Directory name to save results
+  -r, --recursive                 Search data recursively
 ```
 
 With `data_set search` command, obis connects to a configured OpenBIS instance and searches for all
-data sets that fulfill given filtering criteria.
-At least one filtering criteria must be specified. Search results can be downloaded by
-using `save` option.
+data sets that fulfill given filtering criteria or by using object identification string.
+At least one search option must be specified. 
+
+Search results can be downloaded into a file by using `save` option.
+
+Recursive option enables searching for datasets of children samples or datasets
 
 *Note: Filtering by `-project` may not work when `Project Samples` are disabled in OpenBIS
 configuration.*
@@ -306,6 +314,7 @@ Options:
   -space, --space TEXT            Space code
   -project, --project TEXT        Full project identification code
   -experiment, --experiment TEXT  Full experiment 
+  -object, --object TEXT          Object identification information, it can be permId or identifier
   -registration-date, --registration-date TEXT
                                   Registration date, it can be in the format
                                   "oYYYY-MM-DD" (e.g. ">2023-01-31", "=2023-01-31", "<2023-01-31")
@@ -315,12 +324,16 @@ Options:
   -property TEXT                  Property code
   -property-value TEXT            Property value
   -save, --save TEXT              File name to save results in csv format
+  -r, --recursive                 Search data recursively
 ```
 
 With `object search` command, obis connects to a configured OpenBIS instance and searches for all
-objects/samples that fulfill given filtering criteria.
-At least one filtering criteria must be specified. Search results can be downloaded int a file by
-using `-save` option.
+objects/samples that fulfill given filtering criteria or by using object identification string.
+At least one search option must be specified. 
+
+Search results can be downloaded into a file by using `save` option.
+
+Recursive option enables searching for datasets of children samples or datasets
 
 *Note: Filtering by `-project` may not work when `Project Samples` are disabled in OpenBIS
 configuration.*
@@ -550,6 +563,7 @@ was moved or copied without using the `move` or `copy` commands.
 
 **token**
 
+
 ```
 obis token get <session_name> [--validity-days] [--validity-weeks] [--validity-months]
 ```
@@ -602,7 +616,24 @@ echo content >> example_file
 obis commit -m 'message'
 ```
 
-## 6. Big Data Link Services
+## 6. Authentication
+
+There are 2 ways to perform user authentication against OpenBIS.
+
+### 6.1. Login
+Obis, internally, stores a session token which is used to connect with OpenBIS. Whenever this token 
+is invalidated, obis will ask user to provide credentials to log into OpenBIS again.   
+
+
+### 6.2. Personal Access Token
+Session token is short-lived and its interactive generation makes it unfeasible for usage in automatic 
+scripts. An alternative way to authorize is to generate personal access token (PAT), which can be 
+configured to last for a long periods of time.
+
+PAT generation is explained in depth in `token` command section.
+
+
+## 7. Big Data Link Services
 
 The Big Data Link Services can be used to download files which are contained in an obis repository.
 The services are included in the installation folder of openBIS,
@@ -610,7 +641,7 @@ under `servers/big_data_link_services`. For how to configure and run them, consu
 the [README.md](https://sissource.ethz.ch/sispub/openbis/blob/master/big_data_link_server/README.md)
 file.
 
-## 7. Rationale for obis
+## 8. Rationale for obis
 
 Data-provenance tracking tools like openBIS make it possible to understand and follow the research
 process. What was studied, what data was acquired and how, how was data analyzed to arrive at final
@@ -639,7 +670,7 @@ Using `git-annex`, even large binary artifacts can be tracked efficiently. For c
 openBIS, `obis` uses the openBIS API, which offers the power to register and track all metadata
 supported by openBIS.
 
-## 8. Literature
+## 9. Literature
 
 V. Korolev, A. Joshi, V. Korolev, M.A. Grasso, A. Joshi, M.A. Grasso, et al., "PROB: A tool for
 tracking provenance and reproducibility of big data experiments", Reproduce '14. HPCA 2014, vol. 11,