diff --git a/obis/src/python/integration_tests/integration_tests.py b/obis/src/python/integration_tests/integration_tests.py
index 7152493bb4ee80b1db9065ac5a1b8b34be694e76..a8beef3c8e3c14faf3c74ee4884fa673ca0bad02 100644
--- a/obis/src/python/integration_tests/integration_tests.py
+++ b/obis/src/python/integration_tests/integration_tests.py
@@ -8,15 +8,16 @@ import json
 import os
 import socket
 import subprocess
-from subprocess import PIPE
 from subprocess import SubprocessError
 from contextlib import contextmanager
 from random import randrange
 from pybis import Openbis
+from subprocess import PIPE
 
 
 output_buffer = ''
 
+
 def decorator_print(func):
     def wrapper(tmpdir, *args, **kwargs):
         try:
@@ -26,21 +27,13 @@ def decorator_print(func):
             raise
     return wrapper
 
-@decorator_print
-def test_obis(tmpdir):
-    global output_buffer
-
-    o = Openbis('https://obisserver:8443', verify_certificates=False)
-    o.login('admin', 'admin', save_token=True)
-    setup_masterdata(o)
 
+def init_global_settings():
+    global output_buffer
     output_buffer = '=================== 1. Global settings ===================\n'
-    if os.path.exists('~/.obis'):
-        os.rmdir('~/.obis')
-    cmd('obis config -g set openbis_url=https://obisserver:8443')
-    cmd('obis config -g set user=admin')
-    cmd('obis config -g set verify_certificates=false')
-    cmd('obis config -g set hostname=' + socket.gethostname())
+    cmd('obis config -g clear')
+    cmd('obis data_set -g clear')
+    cmd('obis config -g set openbis_url=https://obisserver:8443, user=admin, verify_certificates=false, hostname=' + socket.gethostname())
     cmd('obis data_set -g set type=UNKNOWN')
     settings = get_settings_global()
     assert settings['config']['openbis_url'] == 'https://obisserver:8443'
@@ -49,10 +42,37 @@ def test_obis(tmpdir):
     assert settings['config']['hostname'] == socket.gethostname()
     assert settings['data_set']['type'] == 'UNKNOWN'
 
+
+@decorator_print
+def test_obis(tmpdir):
+    o = get_openbis()
+    setup_masterdata(o)
+    init_global_settings()
+    run(tmpdir, o)
+
+
+@decorator_print
+def test_obis_with_metadata_folder(tmpdir):
+    o = get_openbis()
+    setup_masterdata(o)
+    init_global_settings()
+
+    obis_metadata_folder = os.path.join(tmpdir, 'obis_metadata')
+    os.makedirs(obis_metadata_folder)
+    cmd('obis config -g set obis_metadata_folder=' + obis_metadata_folder)
+    settings = get_settings_global()
+    assert settings['config']['obis_metadata_folder'] == obis_metadata_folder
+
+    run(tmpdir, o, skip=['clone', 'addref', 'removeref', 'sync'])
+
+
+def run(tmpdir, o, skip=[]):
+    global output_buffer
+
     with cd(tmpdir): cmd('mkdir obis_data')
     with cd(tmpdir + '/obis_data'):
 
-        output_buffer = '=================== 2. First commit ===================\n'
+        output_buffer = '=================== 2. First commit =================== skip: ' + str(skip) + '\n'
         cmd('obis init data1')
         with cd('data1'):
             cmd('touch file')
@@ -78,7 +98,7 @@ def test_obis(tmpdir):
             assert settings['repository']['external_dms_id'] == settings_before['repository']['external_dms_id']
             assert settings['repository']['id'] == settings_before['repository']['id']
             assert "Created data set {}.".format(settings['repository']['data_set_id']) in result
-            result = cmd('git annex info big_file')
+            result = cmd_git('annex info big_file', settings, tmpdir, 'obis_data/data1')
             assert 'file: big_file' in result
             assert 'key: SHA256E-s1000000--d29751f2649b32ff572b5e0a9f541ea660a50f94ff0beedfb0b692b924cc8025' in result
             assert 'present: true' in result
@@ -149,32 +169,34 @@ def test_obis(tmpdir):
             data_set = o.get_dataset(settings['repository']['data_set_id']).data
             assert_matching(settings, data_set, tmpdir, 'obis_data/data5')
 
-        output_buffer = '=================== 8. Addref ===================\n'
-        cmd('cp -r data1 data6')
-        cmd('obis addref data6')
-        with cd('data1'): settings_data1 = get_settings()
-        with cd('data6'): settings_data6 = get_settings()
-        assert settings_data6 == settings_data1
-        result = cmd('obis addref data6')
-        assert 'DataSet already exists in the database' in result
-        result = cmd('obis addref data7')
-        assert 'Invalid value' in result
-        data_set = o.get_dataset(settings_data6['repository']['data_set_id']).data
-        with cd('data6'): assert_matching(settings_data6, data_set, tmpdir, 'obis_data/data6')
-
-        output_buffer = '=================== 9. Local clone ===================\n'
-        with cd('data2'): settings_data2 = get_settings()
-        with cd('../obis_data_b'):
-            cmd('obis clone ' + settings_data2['repository']['data_set_id'])
-            with cd('data2'):
-                settings_data2_clone = get_settings()
-                assert settings_data2_clone['repository']['external_dms_id'].startswith('ADMIN-' + socket.gethostname().upper())
-                assert settings_data2_clone['repository']['external_dms_id'] != settings_data2['repository']['external_dms_id']
-                data_set = o.get_dataset(settings_data2_clone['repository']['data_set_id']).data
-                assert_matching(settings_data2_clone, data_set, tmpdir, 'obis_data_b/data2')
-                del settings_data2['repository']['external_dms_id']
-                del settings_data2_clone['repository']['external_dms_id']
-                assert settings_data2_clone == settings_data2
+        if 'addref' not in skip:
+            output_buffer = '=================== 8. Addref ===================\n'
+            cmd('cp -r data1 data6')
+            cmd('obis addref data6')
+            with cd('data1'): settings_data1 = get_settings()
+            with cd('data6'): settings_data6 = get_settings()
+            assert settings_data6 == settings_data1
+            result = cmd('obis addref data6')
+            assert 'DataSet already exists in the database' in result
+            result = cmd('obis addref data7')
+            assert 'Invalid value' in result
+            data_set = o.get_dataset(settings_data6['repository']['data_set_id']).data
+            with cd('data6'): assert_matching(settings_data6, data_set, tmpdir, 'obis_data/data6')
+
+        if 'clone' not in skip:
+            output_buffer = '=================== 9. Local clone ===================\n'
+            with cd('data2'): settings_data2 = get_settings()
+            with cd('../obis_data_b'):
+                cmd('obis clone ' + settings_data2['repository']['data_set_id'])
+                with cd('data2'):
+                    settings_data2_clone = get_settings()
+                    assert settings_data2_clone['repository']['external_dms_id'].startswith('ADMIN-' + socket.gethostname().upper())
+                    assert settings_data2_clone['repository']['external_dms_id'] != settings_data2['repository']['external_dms_id']
+                    data_set = o.get_dataset(settings_data2_clone['repository']['data_set_id']).data
+                    assert_matching(settings_data2_clone, data_set, tmpdir, 'obis_data_b/data2')
+                    del settings_data2['repository']['external_dms_id']
+                    del settings_data2_clone['repository']['external_dms_id']
+                    assert settings_data2_clone == settings_data2
 
         output_buffer = '=================== 11. Init analysis ===================\n'
         cmd('obis init_analysis -p data1 analysis1')
@@ -206,7 +228,7 @@ def test_obis(tmpdir):
                 data_set = o.get_dataset(settings_analysis2['repository']['data_set_id']).data
                 assert_matching(settings_analysis2, data_set, tmpdir, 'obis_data/data1/analysis2')
                 assert data_set['parents'][0]['code'] == settings_data1['repository']['data_set_id']
-            result = cmd('git check-ignore analysis2')
+            result = cmd_git('check-ignore analysis2', settings_data1, tmpdir, 'obis_data/data1')
             assert 'analysis2' in result
 
         output_buffer = '=================== 12. Metadata only commit ===================\n'
@@ -226,18 +248,19 @@ def test_obis(tmpdir):
             data_set = o.get_dataset(settings['repository']['data_set_id']).data
             assert_matching(settings, data_set, tmpdir, 'obis_data/data7')
 
-        output_buffer = '=================== 13. obis sync ===================\n'
-        with cd('data7'):
-            cmd('touch file2')
-            cmd('git add file2')
-            cmd('git commit -m \'msg\'')
-            result = cmd('obis sync')
-            settings = get_settings()
-            assert "Created data set {}.".format(settings['repository']['data_set_id']) in result
-            data_set = o.get_dataset(settings['repository']['data_set_id']).data
-            assert_matching(settings, data_set, tmpdir, 'obis_data/data7')
-            result = cmd('obis sync')
-            assert 'Nothing to sync' in result
+        if 'sync' not in skip:
+            output_buffer = '=================== 13. obis sync ===================\n'
+            with cd('data7'):
+                cmd('touch file2')
+                cmd('git add file2')
+                cmd('git commit -m \'msg\'')
+                result = cmd('obis sync')
+                settings = get_settings()
+                assert "Created data set {}.".format(settings['repository']['data_set_id']) in result
+                data_set = o.get_dataset(settings['repository']['data_set_id']).data
+                assert_matching(settings, data_set, tmpdir, 'obis_data/data7')
+                result = cmd('obis sync')
+                assert 'Nothing to sync' in result
 
         output_buffer = '=================== 14. Set data set properties ===================\n'
         cmd('obis init data8')
@@ -252,22 +275,23 @@ def test_obis(tmpdir):
             result = cmd('obis data_set set properties={"a":"0","A":"1"}')
             assert 'Duplicate key after capitalizing JSON config: A' in result
 
-        output_buffer = '=================== 15. Removeref ===================\n'
-        with cd('data6'): settings = get_settings()
-        content_copies = get_data_set(o, settings)['linkedData']['contentCopies']
-        assert len(content_copies) == 2
-        cmd('obis removeref data6')
-        content_copies = get_data_set(o, settings)['linkedData']['contentCopies']
-        assert len(content_copies) == 1
-        assert content_copies[0]['path'].endswith('data1')
-        cmd('obis addref data6')
-        cmd('obis removeref data1')
-        content_copies = get_data_set(o, settings)['linkedData']['contentCopies']
-        assert len(content_copies) == 1
-        assert content_copies[0]['path'].endswith('data6')
-        result = cmd('obis removeref data1')
-        assert 'Matching content copy not fount in data set' in result
-        cmd('obis addref data1')
+        if 'removeref' not in skip:
+            output_buffer = '=================== 15. Removeref ===================\n'
+            with cd('data6'): settings = get_settings()
+            content_copies = get_data_set(o, settings)['linkedData']['contentCopies']
+            assert len(content_copies) == 2
+            cmd('obis removeref data6')
+            content_copies = get_data_set(o, settings)['linkedData']['contentCopies']
+            assert len(content_copies) == 1
+            assert content_copies[0]['path'].endswith('data1')
+            cmd('obis addref data6')
+            cmd('obis removeref data1')
+            content_copies = get_data_set(o, settings)['linkedData']['contentCopies']
+            assert len(content_copies) == 1
+            assert content_copies[0]['path'].endswith('data6')
+            result = cmd('obis removeref data1')
+            assert 'Matching content copy not fount in data set' in result
+            cmd('obis addref data1')
 
         output_buffer = '=================== 18. Use git-annex hashes as checksums ===================\n'
         cmd('obis init data10')
@@ -344,10 +368,12 @@ def assert_file_paths(files, expected_paths):
 
 
 def get_settings():
-    return json.loads(cmd('obis settings get'))
+    settings = cmd('obis settings get')
+    return json.loads(settings)
 
 def get_settings_global():
-    return json.loads(cmd('obis settings -g get'))
+    settings = cmd('obis settings -g get')
+    return json.loads(settings)
 
 def get_data_set(o, settings):
     return o.get_dataset(settings['repository']['data_set_id']).data
@@ -362,13 +388,6 @@ def cd(newdir):
     finally:
         os.chdir(prevdir)
 
-def cmd(cmd, timeout=None):
-    global output_buffer
-    output_buffer += '==== running: ' + cmd + '\n'
-    completed_process = subprocess.run(cmd.split(' '), stdout=PIPE, stderr=PIPE, timeout=timeout)
-    result = get_cmd_result(completed_process)
-    output_buffer += result + '\n'
-    return result
 
 def get_cmd_result(completed_process, tmpdir=''):
     result = ''
@@ -378,12 +397,32 @@ def get_cmd_result(completed_process, tmpdir=''):
         result += completed_process.stdout.decode('utf-8').strip()
     return result
 
+
+def cmd(cmd, timeout=None):
+    global output_buffer
+    output_buffer += '==== running: ' + cmd + '\n'
+    completed_process = subprocess.run(cmd.split(' '), stdout=PIPE, stderr=PIPE, timeout=timeout)
+    result = get_cmd_result(completed_process)
+    output_buffer += result + '\n'
+    return result
+
+
+def cmd_git(params, settings, tmpdir, path):
+    obis_metadata_folder = settings['config']['obis_metadata_folder']
+    if obis_metadata_folder is None:
+        return cmd('git ' + params)
+    else:
+        work_tree = os.path.join(tmpdir, path)
+        git_dir = os.path.join(obis_metadata_folder, work_tree[1:], '.git') 
+        return cmd('git --work-tree=' + work_tree + ' --git-dir=' + git_dir + ' ' + params)
+
+
 def assert_matching(settings, data_set, tmpdir, path):
     content_copies = data_set['linkedData']['contentCopies']
     content_copy = list(filter(lambda cc: cc['path'].endswith(path) == 1, content_copies))[0]
     assert data_set['type']['code'] == settings['data_set']['type']
     assert content_copy['externalDms']['code'] == settings['repository']['external_dms_id']
-    assert content_copy['gitCommitHash'] == cmd('git rev-parse --short HEAD')
+    assert content_copy['gitCommitHash'] == cmd_git('rev-parse --short HEAD', settings, tmpdir, path)
     assert content_copy['gitRepositoryId'] == settings['repository']['id']
     if settings['object']['id'] is not None:
         assert data_set['sample']['identifier']['identifier'] == settings['object']['id']
@@ -435,8 +474,13 @@ def commit_new_change(tmpdir, o, repo_name):
         return settings
 
 
+def get_openbis():
+    o = Openbis('https://obisserver:8443', verify_certificates=False)
+    o.login('admin', 'admin', save_token=True)
+    return o
+
+
 def setup_masterdata(o):
-    spaces = o.get_spaces()
     if 'BIGDATA' not in o.get_spaces().df.code.values:
         o.new_space(code='BIGDATA').save()
     if '/DEFAULT/BIGDATA2' not in o.get_samples().df.identifier.values:
diff --git a/obis/src/python/obis/dm/checksum.py b/obis/src/python/obis/dm/checksum.py
index 2df5356dc4bd2db7adee47839de71cc6c15f8689..aaa64942fbaf16be0f1503ba795044bfb5edeec5 100644
--- a/obis/src/python/obis/dm/checksum.py
+++ b/obis/src/python/obis/dm/checksum.py
@@ -1,25 +1,26 @@
+import abc
 import hashlib
 import json
 import os
 from abc import ABC, abstractmethod
-from .utils import run_shell
+from .utils import run_shell, cd
 from .command_result import CommandResult, CommandException
 
 
-def get_checksum_generator(checksum_type, default=None):
+def get_checksum_generator(checksum_type, data_path, metadata_path, default=None):
     if checksum_type == "SHA256":
-        return ChecksumGeneratorSha256()
+        return ChecksumGeneratorSha256(data_path, metadata_path)
     elif checksum_type == "MD5":
-        return ChecksumGeneratorMd5()
+        return ChecksumGeneratorMd5(data_path, metadata_path)
     elif checksum_type == "WORM":
-        return ChecksumGeneratorWORM()
+        return ChecksumGeneratorWORM(data_path, metadata_path)
     elif default is not None:
         return default
     else:
         return None
 
 
-def validate_checksum(openbis, files, data_set_id, folder):
+def validate_checksum(openbis, files, data_set_id, data_path, metadata_path):
     invalid_files = []
     dataset_files = openbis.search_files(data_set_id)['objects']
     dataset_files_by_path = {}
@@ -27,23 +28,36 @@ def validate_checksum(openbis, files, data_set_id, folder):
         dataset_files_by_path[dataset_file['path']] = dataset_file
     for filename in files:
         dataset_file = dataset_files_by_path[filename]
-        filename_dest = os.path.join(folder, filename)
         checksum_generator = None
         if dataset_file['checksumCRC32'] is not None and dataset_file['checksumCRC32'] > 0:
-            checksum_generator = ChecksumGeneratorCrc32()
+            checksum_generator = ChecksumGeneratorCrc32(data_path, metadata_path)
             expected_checksum = dataset_file['checksumCRC32']
         elif dataset_file['checksumType'] is not None:
-            checksum_generator = get_checksum_generator(dataset_file['checksumType'])
+            checksum_generator = get_checksum_generator(dataset_file['checksumType'], data_path, metadata_path)
             expected_checksum = dataset_file['checksum']
         if checksum_generator is not None:
-            checksum = checksum_generator.get_checksum(filename_dest)['checksum']
+            checksum = checksum_generator.get_checksum(filename)['checksum']
             if checksum != expected_checksum:
                 invalid_files.append(filename)
     return invalid_files
 
 
-class ChecksumGeneratorCrc32(object):
+class ChecksumGenerator(metaclass=abc.ABCMeta):
+
+    def __init__(self, data_path, metadata_path=None):
+        self.data_path = data_path
+        self.metadata_path = metadata_path
+
     def get_checksum(self, file):
+        with cd(self.data_path):
+            return self._get_checksum(file)
+
+    @abc.abstractmethod
+    def _get_checksum(self, file):
+        return
+
+class ChecksumGeneratorCrc32(ChecksumGenerator):
+    def _get_checksum(self, file):
         result = run_shell(['cksum', file])
         if result.failure():
             raise CommandException(result)
@@ -55,15 +69,15 @@ class ChecksumGeneratorCrc32(object):
         }
 
 
-class ChecksumGeneratorHashlib(ABC):
-    @abstractmethod
+class ChecksumGeneratorHashlib(ChecksumGenerator):
+
     def hash_function(self):
         pass
-    @abstractmethod
+
     def hash_type(self):
         pass
 
-    def get_checksum(self, file):
+    def _get_checksum(self, file):
         return {
             'checksum': self._checksum(file),
             'checksumType': self.hash_type(),
@@ -93,8 +107,8 @@ class ChecksumGeneratorMd5(ChecksumGeneratorHashlib):
         return "MD5"
 
 
-class ChecksumGeneratorWORM(object):
-    def get_checksum(self, file):
+class ChecksumGeneratorWORM(ChecksumGenerator):
+    def _get_checksum(self, file):
         return {
             'checksum': self.worm(file),
             'checksumType': 'WORM',
@@ -107,21 +121,28 @@ class ChecksumGeneratorWORM(object):
         return "s{}-m{}--{}".format(size, modification_time, file)
 
 
-class ChecksumGeneratorGitAnnex(object):
+class ChecksumGeneratorGitAnnex(ChecksumGenerator):
 
-    def __init__(self):
+    def __init__(self, data_path, metadata_path):
+        self.data_path = data_path
+        self.metadata_path = metadata_path
         self.backend = self._get_annex_backend()
-        self.checksum_generator_replacement = ChecksumGeneratorCrc32() if self.backend is None else None
+        self.checksum_generator_replacement = None
+        if self.backend is None:
+            self.checksum_generator_replacement = ChecksumGeneratorCrc32(self.data_path, self.metadata_path)
         # define which generator to use for files which are not handled by annex
-        self.checksum_generator_supplement = get_checksum_generator(self.backend, default=ChecksumGeneratorCrc32())
+        self.checksum_generator_supplement = get_checksum_generator(
+            self.backend, self.data_path, self.metadata_path, 
+            default=ChecksumGeneratorCrc32(self.data_path, self.metadata_path))
 
-    def get_checksum(self, file):
+    def _get_checksum(self, file):
         if self.checksum_generator_replacement is not None:
             return self.checksum_generator_replacement.get_checksum(file)
-        return self._get_checksum(file)
+        return self.__get_checksum(file)
 
-    def _get_checksum(self, file):
-        annex_result = run_shell(['git', 'annex', 'info', '-j', file], raise_exception_on_failure=True)
+    def __get_checksum(self, file):
+        git_dir = os.path.join(self.metadata_path, '.git')
+        annex_result = run_shell(['git', '--work-tree', self.data_path, '--git-dir', git_dir, 'annex', 'info', '-j', file], raise_exception_on_failure=True)
         if 'Not a valid object name' in annex_result.output:
             return self.checksum_generator_supplement.get_checksum(file)
         annex_info = json.loads(annex_result.output)
@@ -144,11 +165,12 @@ class ChecksumGeneratorGitAnnex(object):
             raise ValueError("Git annex backend not supported: " + self.backend)
 
     def _get_annex_backend(self):
-        with open('.git/info/attributes') as gitattributes:
-            for line in gitattributes.readlines():
-                if 'annex.backend' in line:
-                    backend = line.split('=')[1].strip()
-                    if backend == 'SHA256E':
-                        backend = 'SHA256'
-                    return backend
+        with cd(self.metadata_path):
+            with open('.git/info/attributes') as gitattributes:
+                for line in gitattributes.readlines():
+                    if 'annex.backend' in line:
+                        backend = line.split('=')[1].strip()
+                        if backend == 'SHA256E':
+                            backend = 'SHA256'
+                        return backend
         return None
diff --git a/obis/src/python/obis/dm/commands/addref.py b/obis/src/python/obis/dm/commands/addref.py
index 1480ffaa2a88f0adc77d35b7fe0a39f04a9eab52..2c7ba0420e10e53ebf784bc3b54d980c6be61811 100644
--- a/obis/src/python/obis/dm/commands/addref.py
+++ b/obis/src/python/obis/dm/commands/addref.py
@@ -1,6 +1,6 @@
 import os
 from .openbis_command import OpenbisCommand
-from ..command_result import CommandResult
+from ..command_result import CommandResult, CommandException
 from ..utils import complete_openbis_config
 
 
diff --git a/obis/src/python/obis/dm/commands/clone.py b/obis/src/python/obis/dm/commands/clone.py
index e22aec7e2fe793ed57824bb324eddc7bfc3d2467..0dd0689024df527223ca0107c5cce089491ecf98 100644
--- a/obis/src/python/obis/dm/commands/clone.py
+++ b/obis/src/python/obis/dm/commands/clone.py
@@ -17,6 +17,8 @@ class Clone(OpenbisCommand):
     """
 
     def __init__(self, dm, data_set_id, ssh_user, content_copy_index, skip_integrity_check):
+        if dm.data_path != dm.metadata_path:
+            raise CommandException(CommandResult(returncode=-1, output='Clone/move not supported with obis_metadata_folder.'))
         self.data_set_id = data_set_id
         self.ssh_user = ssh_user
         self.content_copy_index = content_copy_index
@@ -59,7 +61,8 @@ class Clone(OpenbisCommand):
             return result
         data_set = self.openbis.get_dataset(self.data_set_id)
         if self.skip_integrity_check != True:
-            invalid_files = validate_checksum(self.openbis, data_set.file_list, data_set.permId, repository_folder)
+            data_path = os.path.join(self.data_mgmt.data_path, repository_folder)
+            invalid_files = validate_checksum(self.openbis, data_set.file_list, data_set.permId, data_path, self.data_mgmt.metadata_path)
             if len(invalid_files) > 0:
                 raise CommandException(CommandResult(returncode=-1, output="Invalid checksum for files {}.".format(str(invalid_files))))
         return self.add_content_copy_to_openbis(repository_folder)
@@ -73,11 +76,19 @@ class Clone(OpenbisCommand):
         """
         commit_hash = content_copy['gitCommitHash']
         repository_folder = path.split('/')[-1]
-        with cd(repository_folder):
-            return self.git_wrapper.git_checkout(commit_hash)
+        return self.git_wrapper.git_checkout(commit_hash, relative_repo_path=repository_folder)
 
 
     def add_content_copy_to_openbis(self, repository_folder):
         with cd(repository_folder):
-            data_mgmt = dm.DataMgmt(openbis_config={}, git_config={'find_git': True})
+            data_path = os.path.join(self.data_mgmt.data_path, repository_folder)
+            metadata_path = os.path.join(self.data_mgmt.metadata_path, repository_folder)
+            invocation_path = self.data_mgmt.invocation_path
+            data_mgmt = dm.DataMgmt(openbis_config={}, git_config={
+                    'find_git': True,
+                    'data_path': data_path,
+                    'metadata_path': metadata_path,
+                    'invocation_path': invocation_path
+                })
+            data_mgmt.set_property(data_mgmt.settings_resolver.config, 'hostname', None, False)
             return data_mgmt.addref()
diff --git a/obis/src/python/obis/dm/commands/download.py b/obis/src/python/obis/dm/commands/download.py
index f1af146091981d765c1b077f25b3339ea3218bee..0c3bf4e662b2b22e7bd0dc323d74f634718adbd7 100644
--- a/obis/src/python/obis/dm/commands/download.py
+++ b/obis/src/python/obis/dm/commands/download.py
@@ -3,6 +3,7 @@ import pybis
 from .openbis_command import OpenbisCommand, ContentCopySelector
 from ..command_result import CommandResult
 from ..checksum import validate_checksum
+from ..utils import cd
 
 class Download(OpenbisCommand):
     """
@@ -29,13 +30,15 @@ class Download(OpenbisCommand):
         data_set = self.openbis.get_dataset(self.data_set_id)
         content_copy_index =  ContentCopySelector(data_set, self.content_copy_index, get_index=True).select()
         files = self.files if self.files is not None else data_set.file_list
-        destination, invalid_files = data_set.download(files, linked_dataset_fileservice_url=self.fileservice_url(), content_copy_index=content_copy_index)
-        if self.skip_integrity_check != True:
-            files = [file for file in files if file not in invalid_files]
-            target_folder = os.path.join(destination, data_set.permId)
-            invalid_files += validate_checksum(self.openbis, files, data_set.permId, target_folder)
-            self.redownload_invalid_files_on_demand(invalid_files, target_folder)
-        return CommandResult(returncode=0, output="Files downloaded to: %s" % target_folder)
+
+        with cd(self.data_mgmt.invocation_path):
+            destination, invalid_files = data_set.download(files, linked_dataset_fileservice_url=self.fileservice_url(), content_copy_index=content_copy_index)
+            if self.skip_integrity_check != True:
+                files = [file for file in files if file not in invalid_files]
+                target_folder = os.path.join(destination, data_set.permId)
+                invalid_files += validate_checksum(self.openbis, files, data_set.permId, target_folder, None)
+                self.redownload_invalid_files_on_demand(invalid_files, target_folder)
+            return CommandResult(returncode=0, output="Files downloaded to: %s" % target_folder)
 
 
     def redownload_invalid_files_on_demand(self, invalid_files, target_folder):
diff --git a/obis/src/python/obis/dm/commands/move.py b/obis/src/python/obis/dm/commands/move.py
index 060f3ef830d87b643b4a5b86606becfb9fc4f4ac..95510c88b79e847a77ad9f4138397eec6c23f9ff 100644
--- a/obis/src/python/obis/dm/commands/move.py
+++ b/obis/src/python/obis/dm/commands/move.py
@@ -3,7 +3,6 @@ import os
 import pybis
 from .clone import Clone
 from .openbis_command import OpenbisCommand, ContentCopySelector
-from ..checksum import validate_checksum
 from ..command_result import CommandResult
 from ..utils import cd
 from ..utils import run_shell
diff --git a/obis/src/python/obis/dm/commands/openbis_command.py b/obis/src/python/obis/dm/commands/openbis_command.py
index 599d66ca2400e5dda99a7e35d4daca1bb4814b6d..1c54f7dabf9616f6cb47056dbd6e412daf9dab26 100644
--- a/obis/src/python/obis/dm/commands/openbis_command.py
+++ b/obis/src/python/obis/dm/commands/openbis_command.py
@@ -12,7 +12,7 @@ from ...scripts import cli
 
 class OpenbisCommand(object):
 
-    def __init__(self, dm, openbis=None):
+    def __init__(self, dm):
         self.data_mgmt = dm
         self.openbis = dm.openbis
         self.git_wrapper = dm.git_wrapper
@@ -162,13 +162,12 @@ class OpenbisCommand(object):
         result = self.git_wrapper.git_top_level_path()
         if result.failure():
             return result
-        top_level_path = result.output
         edms_path, path_name = os.path.split(result.output)
         if external_dms_id is None:
             external_dms_id = self.generate_external_data_management_system_code(user, hostname, edms_path)
         try:
             external_dms = self.openbis.get_external_data_management_system(external_dms_id.upper())
-        except ValueError as e:
+        except ValueError:
             # external dms does not exist - create it
             try:
                 external_dms = self.openbis.create_external_data_management_system(external_dms_id, external_dms_id,
@@ -187,8 +186,7 @@ class OpenbisCommand(object):
         # ask user
         hostname = self.ask_for_hostname(socket.gethostname())
         # store
-        resolver = self.data_mgmt.settings_resolver.config
-        cli.config_internal(self.data_mgmt, resolver, True, False, prop='hostname', value=hostname, set=True)
+        self.data_mgmt.config('config', True, False, prop='hostname', value=hostname, set=True)
         return hostname
 
     def ask_for_hostname(self, hostname):
diff --git a/obis/src/python/obis/dm/commands/openbis_command_test.py b/obis/src/python/obis/dm/commands/openbis_command_test.py
index 389ae1fe77955432a66de9fe9035c7fab607aba4..b53ee5594158d99115ad7f355e3664e14ac86731 100644
--- a/obis/src/python/obis/dm/commands/openbis_command_test.py
+++ b/obis/src/python/obis/dm/commands/openbis_command_test.py
@@ -10,28 +10,26 @@ from .. import data_mgmt
 
 def test_prepare_run(monkeypatch):
     # given
-    dm = data_mgmt.DataMgmt(openbis=Mock())
+    dm = data_mgmt.DataMgmt(openbis=Mock(), git_config={
+        'data_path': '',
+        'metadata_path': '',
+        'invocation_path': ''
+    })
     openbis_command = OpenbisCommand(dm)
+    define_config(openbis_command)
     monkeypatch.setattr(getpass, 'getpass', lambda s: 'password')
     dm.openbis.is_session_active.return_value = False
     # when
     openbis_command.prepare_run()
     # then
     dm.openbis.is_session_active.assert_called()
-    dm.openbis.login.assert_called_with('auser', 'password', save_token=True)
+    dm.openbis.login.assert_called_with('watney', 'password', save_token=True)
 
-# TODO
-# def test_determine_hostname():
-#     # given
-#     dm = data_mgmt.DataMgmt(openbis=Mock())
-#     openbis_command = OpenbisCommand(dm)
-#     define_config(openbis_command)
-#     # when
-#     openbis_command.determine_hostname()
-#     # then
 
 def define_config(openbis_command):
     openbis_command.config_dict = {
-        'hostname': None
+        'config': {
+            'user': 'watney'
+        }
     }
     
diff --git a/obis/src/python/obis/dm/commands/openbis_sync.py b/obis/src/python/obis/dm/commands/openbis_sync.py
index 69b87af12f6e509be95d8d370705d736c8bb8bc9..4b01c85610d2f10b81ce78c6adf4821063e014d7 100644
--- a/obis/src/python/obis/dm/commands/openbis_sync.py
+++ b/obis/src/python/obis/dm/commands/openbis_sync.py
@@ -72,29 +72,21 @@ class OpenbisSync(OpenbisCommand):
                 self.settings_resolver.object.set_value_for_parameter('id', sample_id, 'local')
                 # permId is cleared when the id is set - set it again
                 self.settings_resolver.object.set_value_for_parameter('permId', self.object_permId(), 'local')
-                self.commit_metadata_updates("object identifier changed in openBIS")
         if self.collection_permId() is not None:
             experiment_id = self.openbis.get_experiment(self.collection_permId()).identifier
             if experiment_id != self.collection_id():
                 self.settings_resolver.collection.set_value_for_parameter('id', experiment_id, 'local')
                 # permId is cleared when the id is set - set it again
                 self.settings_resolver.collection.set_value_for_parameter('permId', self.collection_permId(), 'local')
-                self.commit_metadata_updates("collection identifier changed in openBIS")
         return sample_id, experiment_id
 
     def _storePermId(self):
         if self.object_permId() is None and self.object_id() is not None:
             sample = self.openbis.get_sample(self.object_id())
             self.settings_resolver.object.set_value_for_parameter('permId', sample.permId, 'local')
-            self.commit_metadata_updates("object permId", omit_usersettings=False)
         if self.collection_permId() is None and self.collection_id() is not None:
             experiment = self.openbis.get_experiment(self.collection_id())
             self.settings_resolver.collection.set_value_for_parameter('permId', experiment.permId, 'local')
-            self.commit_metadata_updates("collection permId", omit_usersettings=False)
-
-
-    def commit_metadata_updates(self, msg_fragment=None, omit_usersettings=True):
-        return self.data_mgmt.commit_metadata_updates(msg_fragment, omit_usersettings=omit_usersettings)
 
 
     def prepare_repository_id(self):
@@ -186,14 +178,10 @@ class OpenbisSync(OpenbisCommand):
         if result.failure():
             return result
 
-        self.commit_metadata_updates()
-
         # store permId of object / collection so we can use those as a reference in the future
         self._storePermId()
 
-        # Update data set id as last commit so we can easily revert it on failure
         self.settings_resolver.repository.set_value_for_parameter('data_set_id', data_set_code, 'local')
-        self.commit_metadata_updates("data set id")
 
         # create a data set, using the existing data set as a parent, if there is one
         result, data_set = self.create_data_set(data_set_code, external_dms, repository_id, ignore_parent)
diff --git a/obis/src/python/obis/dm/config.py b/obis/src/python/obis/dm/config.py
index 512c859b635cc16121465262604ccd716875afa4..bef63ee2e28277efcdb0b9761cb471fcbc7803e8 100644
--- a/obis/src/python/obis/dm/config.py
+++ b/obis/src/python/obis/dm/config.py
@@ -31,7 +31,7 @@ class ConfigLocation(object):
 class ConfigParam(object):
     """Class for configuration parameters."""
 
-    def __init__(self, name, private, is_json=False, ignore_global=False, default_value=None):
+    def __init__(self, name, private=False, is_json=False, ignore_global=False, default_value=None):
         """
         :param name: Name of the parameter.
         :param private: Should the parameter be private to the repo or visible in the data set?
@@ -102,14 +102,15 @@ class ConfigEnv(object):
                 locations = locations[sub_desc]
 
     def initialize_params(self):
-        self.add_param(ConfigParam(name='openbis_url', private=False))
-        self.add_param(ConfigParam(name='fileservice_url', private=False))
-        self.add_param(ConfigParam(name='user', private=True))
-        self.add_param(ConfigParam(name='verify_certificates', private=True, is_json=True, default_value=True))
-        self.add_param(ConfigParam(name='allow_only_https', private=True, is_json=True, default_value=True))
-        self.add_param(ConfigParam(name='hostname', private=False))
-        self.add_param(ConfigParam(name='git_annex_hash_as_checksum', private=False, is_json=True, default_value=True))
-        self.add_param(ConfigParam(name='git_annex_backend', private=False))
+        self.add_param(ConfigParam(name='openbis_url'))
+        self.add_param(ConfigParam(name='fileservice_url'))
+        self.add_param(ConfigParam(name='user'))
+        self.add_param(ConfigParam(name='verify_certificates', is_json=True, default_value=True))
+        self.add_param(ConfigParam(name='allow_only_https', is_json=True, default_value=True))
+        self.add_param(ConfigParam(name='hostname'))
+        self.add_param(ConfigParam(name='git_annex_hash_as_checksum', is_json=True, default_value=True))
+        self.add_param(ConfigParam(name='git_annex_backend'))
+        self.add_param(ConfigParam(name='obis_metadata_folder'))
 
     def add_param(self, param):
         self.params[param.name] = param
@@ -133,8 +134,8 @@ class ConfigEnv(object):
 class CollectionEnv(ConfigEnv):
 
     def initialize_params(self):
-        self.add_param(ConfigParam(name='id', private=False, ignore_global=True))
-        self.add_param(ConfigParam(name='permId', private=False, ignore_global=True))
+        self.add_param(ConfigParam(name='id', ignore_global=True))
+        self.add_param(ConfigParam(name='permId', ignore_global=True))
 
     def initialize_rules(self):
         self.add_rule(ClearPermIdRule())
@@ -144,8 +145,8 @@ class CollectionEnv(ConfigEnv):
 class ObjectEnv(ConfigEnv):
 
     def initialize_params(self):
-        self.add_param(ConfigParam(name='id', private=False, ignore_global=True))
-        self.add_param(ConfigParam(name='permId', private=False, ignore_global=True))
+        self.add_param(ConfigParam(name='id', ignore_global=True))
+        self.add_param(ConfigParam(name='permId', ignore_global=True))
 
     def initialize_rules(self):
         self.add_rule(ClearPermIdRule())
@@ -155,17 +156,17 @@ class ObjectEnv(ConfigEnv):
 class DataSetEnv(ConfigEnv):
 
     def initialize_params(self):
-        self.add_param(ConfigParam(name='type', private=False))
-        self.add_param(ConfigParam(name='properties', private=False, is_json=True))        
+        self.add_param(ConfigParam(name='type'))
+        self.add_param(ConfigParam(name='properties', is_json=True))        
 
 
 class RepositoryEnv(ConfigEnv):
     """ These are properties which are not configured by the user but set by obis. """
 
     def initialize_params(self):
-        self.add_param(ConfigParam(name='id', private=True))
-        self.add_param(ConfigParam(name='external_dms_id', private=True))
-        self.add_param(ConfigParam(name='data_set_id', private=True))
+        self.add_param(ConfigParam(name='id'))
+        self.add_param(ConfigParam(name='external_dms_id'))
+        self.add_param(ConfigParam(name='data_set_id'))
 
     def is_usersetting(self):
         return False
@@ -257,6 +258,7 @@ class ConfigResolver(object):
         location_path = param.location_path(loc)
         location = self.env.location_at_path(location_path)
         location_dir_path = self.location_resolver.resolve_location(location)
+
         if not os.path.exists(location_dir_path):
             os.makedirs(location_dir_path)
         config_path = os.path.join(location_dir_path, self.categoty + '.json')
@@ -348,6 +350,13 @@ class SettingsResolver(object):
         self.resolvers.append(self.collection)
         self.resolvers.append(self.config)
 
+
+    def get(self, category):
+        for resolver in self.resolvers:
+            if resolver.categoty == category:
+                return resolver
+
+
     def config_dict(self, local_only=False):
         combined_dict = {}
         for resolver in self.resolvers:
diff --git a/obis/src/python/obis/dm/data_mgmt.py b/obis/src/python/obis/dm/data_mgmt.py
index 5c54b14b3ada109e90a1bed1f59e3a87a2e92481..955baa790bb7ade67c67b81d577bfc7577f3c0eb 100644
--- a/obis/src/python/obis/dm/data_mgmt.py
+++ b/obis/src/python/obis/dm/data_mgmt.py
@@ -10,6 +10,7 @@ Created by Chandrasekhar Ramakrishnan on 2017-02-01.
 Copyright (c) 2017 Chandrasekhar Ramakrishnan. All rights reserved.
 """
 import abc
+import json
 import os
 import shutil
 import traceback
@@ -17,6 +18,7 @@ import pybis
 import requests
 import signal
 import sys
+from pathlib import Path
 from . import config as dm_config
 from .commands.addref import Addref
 from .commands.removeref import Removeref
@@ -33,6 +35,7 @@ from .utils import complete_git_config
 from .utils import complete_openbis_config
 from .utils import cd
 from ..scripts import cli
+from ..scripts.click_util import click_echo, check_result
 
 
 # noinspection PyPep8Naming
@@ -41,19 +44,21 @@ def DataMgmt(echo_func=None, settings_resolver=None, openbis_config={}, git_conf
 
     echo_func = echo_func if echo_func is not None else default_echo
 
+    data_path = git_config['data_path']
+    metadata_path = git_config['metadata_path']
+    invocation_path = git_config['invocation_path']
+
     complete_git_config(git_config)
     git_wrapper = GitWrapper(**git_config)
     if not git_wrapper.can_run():
-        return NoGitDataMgmt(settings_resolver, None, git_wrapper, openbis, log)
+        return NoGitDataMgmt(settings_resolver, None, git_wrapper, openbis, log, data_path, metadata_path, invocation_path)
 
     if settings_resolver is None:
         settings_resolver = dm_config.SettingsResolver()
-        result = git_wrapper.git_top_level_path()
-        if result.success():
-            settings_resolver.set_resolver_location_roots('data_set', result.output)
+
     complete_openbis_config(openbis_config, settings_resolver)
 
-    return GitDataMgmt(settings_resolver, openbis_config, git_wrapper, openbis, log, debug)
+    return GitDataMgmt(settings_resolver, openbis_config, git_wrapper, openbis, log, data_path, metadata_path, invocation_path, debug)
 
 
 class AbstractDataMgmt(metaclass=abc.ABCMeta):
@@ -62,12 +67,15 @@ class AbstractDataMgmt(metaclass=abc.ABCMeta):
     All operations throw an exepction if they fail.
     """
 
-    def __init__(self, settings_resolver, openbis_config, git_wrapper, openbis, log, debug=False):
+    def __init__(self, settings_resolver, openbis_config, git_wrapper, openbis, log, data_path, metadata_path, invocation_path, debug=False):
         self.settings_resolver = settings_resolver
         self.openbis_config = openbis_config
         self.git_wrapper = git_wrapper
         self.openbis = openbis
         self.log = log
+        self.data_path = data_path
+        self.metadata_path = metadata_path
+        self.invocation_path = invocation_path
         self.debug = debug
 
     def error_raise(self, command, reason):
@@ -76,7 +84,12 @@ class AbstractDataMgmt(metaclass=abc.ABCMeta):
         raise ValueError(message)
 
     @abc.abstractmethod
-    def init_data(self, path, desc=None, create=True):
+    def get_settings_resolver(self):
+        """ Get the settings resolver """
+        return
+
+    @abc.abstractmethod
+    def init_data(self, desc=None, create=True):
         """Initialize a data repository at the path with the description.
         :param path: Path for the repository.
         :param desc: An optional short description of the repository (used by git-annex)
@@ -86,9 +99,9 @@ class AbstractDataMgmt(metaclass=abc.ABCMeta):
         return
 
     @abc.abstractmethod
-    def init_analysis(self, path, parent, desc=None, create=True, apply_config=False):
+    def init_analysis(self, parent_folder, desc=None, create=True, apply_config=False):
         """Initialize an analysis repository at the path.
-        :param path: Path for the repository.
+        :param parent_folder: Path for the repository.
         :param parent: (required when outside of existing repository) Path for the parent repositort
         :return: A CommandResult.
         """
@@ -173,10 +186,13 @@ class AbstractDataMgmt(metaclass=abc.ABCMeta):
 class NoGitDataMgmt(AbstractDataMgmt):
     """DataMgmt operations when git is not available -- show error messages."""
 
-    def init_data(self, path, desc=None, create=True):
+    def get_settings_resolver(self):
+        self.error_raise("get settings resolver", "No git command found.")
+
+    def init_data(self, desc=None, create=True):
         self.error_raise("init data", "No git command found.")
 
-    def init_analysis(self, path, parent, desc=None, create=True, apply_config=False):
+    def init_analysis(self, parent_folder, desc=None, create=True, apply_config=False):
         self.error_raise("init analysis", "No git command found.")
 
     def commit(self, msg, auto_add=True, ignore_missing_parent=False, sync=True):
@@ -232,11 +248,13 @@ def with_restore(f):
             result = f(self, *args)
             if result.failure():
                 self.restore()
+            self.clear_restorepoint()
             return result
         except Exception as e:
             self.restore()
             if self.debug == True:
                 raise e
+            self.clear_restorepoint()
             return CommandResult(returncode=-1, output="Error: " + str(e))
     return f_with_restore
 
@@ -244,78 +262,84 @@ def with_restore(f):
 class GitDataMgmt(AbstractDataMgmt):
     """DataMgmt operations in normal state."""
 
-    def setup_local_settings(self, all_settings, path):
-        with cd(path):
-            self.settings_resolver.set_resolver_location_roots('data_set', '.')
-            for resolver_type, settings in all_settings.items():
-                resolver = getattr(self.settings_resolver, resolver_type)
-                for key, value in settings.items():
-                    resolver.set_value_for_parameter(key, value, 'local')
+    def get_settings_resolver(self, relative_path=None):
+        if relative_path is None:
+            return self.settings_resolver
+        else:
+            settings_resolver = dm_config.SettingsResolver()
+            settings_resolver.set_resolver_location_roots('data_set', relative_path)
+            return settings_resolver
+
 
+    def setup_local_settings(self, all_settings):
+        self.settings_resolver.set_resolver_location_roots('data_set', '.')
+        for resolver_type, settings in all_settings.items():
+            resolver = getattr(self.settings_resolver, resolver_type)
+            for key, value in settings.items():
+                resolver.set_value_for_parameter(key, value, 'local')
 
-    def check_repository_state(self, path):
+
+    def check_repository_state(self):
         """Checks if the repo already exists and has uncommitted files."""
-        with cd(path):
-            git_status = self.git_wrapper.git_status()
-            if git_status.failure():
-                return ('NOT_INITIALIZED', None)
-            if git_status.output is not None and len(git_status.output) > 0:
-                return ('PENDING_CHANGES', git_status.output)
-            return ('SYNCHRONIZED', None)
-
-
-    def get_data_set_id(self, path):
-        with cd(path):
-            return self.settings_resolver.repository.config_dict().get('data_set_id')
-
-    def get_repository_id(self, path):
-        with cd(path):
-            return self.settings_resolver.repository.config_dict().get('id')
-
-    def init_data(self, path, desc=None, create=True, apply_config=False):
-        if not os.path.exists(path) and create:
-            os.mkdir(path)
-        result = self.git_wrapper.git_init(path)
+        git_status = self.git_wrapper.git_status()
+        if git_status.failure():
+            return ('NOT_INITIALIZED', None)
+        if git_status.output is not None and len(git_status.output) > 0:
+            return ('PENDING_CHANGES', git_status.output)
+        return ('SYNCHRONIZED', None)
+
+
+    def get_data_set_id(self, relative_path):
+        settings_resolver = self.get_settings_resolver(relative_path)
+        return settings_resolver.repository.config_dict().get('data_set_id')
+
+
+    def get_repository_id(self, relative_path):
+        settings_resolver = self.get_settings_resolver(relative_path)
+        return settings_resolver.repository.config_dict().get('id')
+
+
+    def init_data(self, desc=None, create=True, apply_config=False):
+        # check that analysis repository does not already exist
+        if os.path.exists('.obis'):
+            return CommandResult(returncode=-1, output="Folder is already an obis repository.")
+        result = self.git_wrapper.git_init()
         if result.failure():
             return result
         git_annex_backend = self.settings_resolver.config.config_dict().get('git_annex_backend')
-        result = self.git_wrapper.git_annex_init(path, desc, git_annex_backend)
+        result = self.git_wrapper.git_annex_init(desc, git_annex_backend)
         if result.failure():
             return result
-        with cd(path):
-            result = self.git_wrapper.initial_commit()
-            if result.failure():
-                return result
-            # Update the resolvers location
-            self.settings_resolver.set_resolver_location_roots('data_set', '.')
-            self.settings_resolver.copy_global_to_local()
-            self.commit_metadata_updates('local with global')
+        result = self.git_wrapper.initial_commit()
+        if result.failure():
+            return result
+        # Update the resolvers location
+        self.settings_resolver.set_resolver_location_roots('data_set', '.')
+        self.settings_resolver.copy_global_to_local()
         return CommandResult(returncode=0, output="")
 
 
-    def init_analysis(self, path, parent, desc=None, create=True, apply_config=False):
-
+    def init_analysis(self, parent_folder, desc=None, create=True, apply_config=False):
         # get data_set_id of parent from current folder or explicit parent argument
-        parent_folder = parent if parent is not None and len(parent) > 0 else "."
         parent_data_set_id = self.get_data_set_id(parent_folder)
         # check that parent repository has been added to openBIS
         if self.get_repository_id(parent_folder) is None:
             return CommandResult(returncode=-1, output="Parent data set must be committed to openBIS before creating an analysis data set.")
-        # check that analysis repository does not already exist
-        if os.path.exists(path):
-            return CommandResult(returncode=-1, output="Data set already exists: " + path)
         # init analysis repository
-        result = self.init_data(path, desc, create, apply_config)
+        result = self.init_data(desc, create, apply_config)
         if result.failure():
             return result
+
         # add analysis repository folder to .gitignore of parent
-        if os.path.exists('.obis'):
-            self.git_wrapper.git_ignore(path)
-        elif parent is None:
-            return CommandResult(returncode=-1, output="Not within a repository and no parent set.")
+        parent_folder_abs = os.path.join(os.getcwd(), parent_folder)
+        analysis_folder_abs = os.getcwd()
+        if Path(analysis_folder_abs) in Path(parent_folder_abs).parents:
+            analysis_folder_relative = os.path.relpath(analysis_folder_abs, parent_folder_abs)
+            with cd(parent_folder):
+                self.git_wrapper.git_ignore(analysis_folder_relative)
+
         # set data_set_id to analysis repository so it will be used as parent when committing
-        with cd(path):
-            cli.set_property(self, self.settings_resolver.repository, "data_set_id", parent_data_set_id, False, False)
+        self.set_property(self.settings_resolver.repository, "data_set_id", parent_data_set_id, False, False)
         return result
 
 
@@ -329,16 +353,8 @@ class GitDataMgmt(AbstractDataMgmt):
         return cmd.run()
 
 
-    def commit(self, msg, auto_add=True, ignore_missing_parent=False, sync=True, path=None):
-        if path is not None:
-            with cd(path):
-                return self._commit(msg, auto_add, ignore_missing_parent, sync);
-        else:
-            return self._commit(msg, auto_add, ignore_missing_parent, sync);
-
-
     @with_restore
-    def _commit(self, msg, auto_add=True, ignore_missing_parent=False, sync=True):
+    def commit(self, msg, auto_add=True, ignore_missing_parent=False, sync=True):
         if auto_add:
             result = self.git_wrapper.git_top_level_path()
             if result.failure():
@@ -368,32 +384,19 @@ class GitDataMgmt(AbstractDataMgmt):
             output += sync_status.output
         return CommandResult(returncode=0, output=output)
 
-    def commit_metadata_updates(self, msg_fragment=None, omit_usersettings=True):
-        properties_paths = self.settings_resolver.local_public_properties_paths(omit_usersettings=omit_usersettings)
-        total_status = ''
-        for properties_path in properties_paths:
-            status = self.git_wrapper.git_status(properties_path).output.strip()
-            total_status += status
-            if len(status) > 0:
-                self.git_wrapper.git_add(properties_path)
-        if len(total_status) < 1:
-            # Nothing to commit
-            return CommandResult(returncode=0, output="")
-        if msg_fragment is None:
-            msg = "OBIS: Update openBIS metadata cache."
-        else:
-            msg = "OBIS: Update {}.".format(msg_fragment)
-        return self.git_wrapper.git_commit(msg)
-
     def set_restorepoint(self):
         self.previous_git_commit_hash = self.git_wrapper.git_commit_hash().output
+        self.clear_restorepoint()
+        shutil.copytree('.obis', '.obis_restorepoint')
 
     def restore(self):
         self.git_wrapper.git_reset_to(self.previous_git_commit_hash)
-        properties_paths = self.settings_resolver.local_public_properties_paths()
-        for properties_path in properties_paths:
-            self.git_wrapper.git_checkout(properties_path)
-            self.git_wrapper.git_delete_if_untracked(properties_path)
+        shutil.rmtree('.obis')
+        shutil.copytree('.obis_restorepoint', '.obis')
+
+    def clear_restorepoint(self):
+        if os.path.exists('.obis_restorepoint'):
+            shutil.rmtree('.obis_restorepoint')
 
     def clone(self, data_set_id, ssh_user, content_copy_index, skip_integrity_check):
         cmd = Clone(self, data_set_id, ssh_user, content_copy_index, skip_integrity_check)
@@ -415,3 +418,70 @@ class GitDataMgmt(AbstractDataMgmt):
     def download(self, data_set_id, content_copy_index, file, skip_integrity_check):
         cmd = Download(self, data_set_id, content_copy_index, file, skip_integrity_check)
         return cmd.run()
+
+    #
+    # settings
+    #
+
+    def config(self, category, is_global, is_data_set_property, prop=None, value=None, set=False, get=False, clear=False):
+        resolver = self.settings_resolver.get(category)
+        if resolver is None:
+            raise ValueError('Invalid settings category: ' + category)
+        if set == True:
+            assert get == False
+            assert clear == False
+            assert prop is not None
+            assert value is not None
+        elif get == True:
+            assert set == False
+            assert clear == False
+            assert value is None
+        elif clear == True:
+            assert get == False
+            assert set == False
+            assert value is None
+
+        assert set == True or get == True or clear == True
+        if is_global:
+            resolver.set_location_search_order(['global'])
+        else:
+            resolver.set_location_search_order(['local'])
+
+        config_dict = resolver.config_dict()
+        if is_data_set_property:
+            config_dict = config_dict['properties']
+        if get == True:
+            if prop is None:
+                config_str = json.dumps(config_dict, indent=4, sort_keys=True)
+                click_echo("{}".format(config_str), with_timestamp=False)
+            else:
+                if not prop in config_dict:
+                    raise ValueError("Unknown setting {} for {}.".format(prop, resolver.categoty))
+                little_dict = {prop: config_dict[prop]}
+                config_str = json.dumps(little_dict, indent=4, sort_keys=True)
+                click_echo("{}".format(config_str), with_timestamp=False)
+        elif set == True:
+            return check_result("config", self.set_property(resolver, prop, value, is_global, is_data_set_property))
+        elif clear == True:
+            if prop is None:
+                returncode = 0
+                for prop in config_dict.keys():
+                    returncode += check_result("config", self.set_property(resolver, prop, None, is_global, is_data_set_property))
+                return returncode
+            else:
+                return check_result("config", self.set_property(resolver, prop, None, is_global, is_data_set_property))
+
+    def set_property(self, resolver, prop, value, is_global, is_data_set_property=False):
+        """Helper function to implement the property setting semantics."""
+        loc = 'global' if is_global else 'local'
+        try:
+            if is_data_set_property:
+                resolver.set_value_for_json_parameter('properties', prop, value, loc, apply_rules=True)
+            else:
+                resolver.set_value_for_parameter(prop, value, loc, apply_rules=True)
+        except ValueError as e:
+            if self.debug ==  True:
+                raise e
+            return CommandResult(returncode=-1, output="Error: " + str(e))
+        else:
+            return CommandResult(returncode=0, output="")
diff --git a/obis/src/python/obis/dm/data_mgmt_test.py b/obis/src/python/obis/dm/data_mgmt_test.py
index 4b6cf0222621b8534d1a9e2b0781531361caef0f..ff2ccf1919d99eea1dc3680ea1792a8c161bfe61 100644
--- a/obis/src/python/obis/dm/data_mgmt_test.py
+++ b/obis/src/python/obis/dm/data_mgmt_test.py
@@ -31,17 +31,21 @@ def generate_perm_id():
     return "{}-{:04d}".format(ts, sequence)
 
 
-def shared_dm():
+def shared_dm(path):
     openbis_config = {
         'allow_http_but_do_not_use_this_in_production_and_only_within_safe_networks': True
     }
-    dm = data_mgmt.DataMgmt(openbis_config=openbis_config)
+    dm = data_mgmt.DataMgmt(openbis_config=openbis_config, git_config={
+        'data_path': path,
+        'metadata_path': path,
+        'invocation_path': path
+    })
     dm.debug = True
     return dm
 
 
 def test_no_git(tmpdir):
-    git_config = {'find_git': False}
+    git_config = {'find_git': False, 'data_path': None, 'metadata_path': None, 'invocation_path': None}
     dm = data_mgmt.DataMgmt(git_config=git_config)
     try:
         dm.init_data(str(tmpdir), "")
@@ -63,7 +67,7 @@ def git_status(path=None, annex=False):
 
 def check_correct_config_semantics():
     # This how things should work
-    with open('.git/obis/repository.json') as f:
+    with open('.obis/repository.json') as f:
         config_local = json.load(f)
     assert config_local.get('data_set_id') is not None
 
@@ -76,21 +80,23 @@ def check_workaround_config_semantics():
 
 
 def test_data_use_case(tmpdir):
-    dm = shared_dm()
+    dm = shared_dm(tmpdir)
 
     tmp_dir_path = str(tmpdir)
     assert git_status(tmp_dir_path).returncode == 128  # The folder should not be a git repo at first.
 
-    result = dm.init_data(tmp_dir_path, "test")
-    assert result.returncode == 0
+    with data_mgmt.cd(tmp_dir_path):
 
-    assert git_status(tmp_dir_path).returncode == 0  # The folder should be a git repo now
-    assert git_status(tmp_dir_path, annex=True).returncode == 0  # ...and a git-annex repo as well.
+        result = dm.init_data(tmp_dir_path, "test")
+        print(result.output)
+        assert result.returncode == 0
 
-    copy_test_data(tmpdir)
+        assert git_status(tmp_dir_path).returncode == 0  # The folder should be a git repo now
+        assert git_status(tmp_dir_path, annex=True).returncode == 0  # ...and a git-annex repo as well.
 
-    with data_mgmt.cd(tmp_dir_path):
-        dm = shared_dm()
+        copy_test_data(tmpdir)
+
+        dm = shared_dm(tmpdir)
         prepare_registration_expectations(dm)
         set_registration_configuration(dm)
 
@@ -126,17 +132,18 @@ def test_data_use_case(tmpdir):
 
 
 def test_child_data_set(tmpdir):
-    dm = shared_dm()
+    dm = shared_dm(tmpdir)
 
     tmp_dir_path = str(tmpdir)
 
-    result = dm.init_data(tmp_dir_path, "test")
-    assert result.returncode == 0
+    with data_mgmt.cd(tmp_dir_path):
 
-    copy_test_data(tmpdir)
+        result = dm.init_data(tmp_dir_path, "test")
+        assert result.returncode == 0
 
-    with data_mgmt.cd(tmp_dir_path):
-        dm = shared_dm()
+        copy_test_data(tmpdir)
+
+        dm = shared_dm(tmpdir)
         prepare_registration_expectations(dm)
         set_registration_configuration(dm)
 
@@ -161,31 +168,36 @@ def test_child_data_set(tmpdir):
                                         properties, contents)
 
 
-def test_external_dms_code_and_address():
-    # given
-    dm = shared_dm()
-    prepare_registration_expectations(dm)
-    obis_sync = data_mgmt.OpenbisSync(dm)
-    set_registration_configuration(dm)
-    user = obis_sync.user()
-    hostname = socket.gethostname()
-    expected_edms_id = obis_sync.external_dms_id()
-    result = obis_sync.git_wrapper.git_top_level_path()
-    assert result.failure() == False
-    edms_path, folder = os.path.split(result.output)
-    path_hash = hashlib.sha1(edms_path.encode("utf-8")).hexdigest()[0:8]
-    if expected_edms_id is None:
-        expected_edms_id = "{}-{}-{}".format(user, hostname, path_hash).upper()
-    # when
-    result = obis_sync.get_or_create_external_data_management_system();
-    # then
-    assert result.failure() == False
-    dm.openbis.get_external_data_management_system.assert_called_with(expected_edms_id)
+def test_external_dms_code_and_address(tmpdir):
+    tmp_dir_path = str(tmpdir)
+
+    with data_mgmt.cd(tmp_dir_path):
+        # given
+        dm = shared_dm(tmp_dir_path)
+        prepare_registration_expectations(dm)
+        obis_sync = data_mgmt.OpenbisSync(dm)
+        set_registration_configuration(dm)
+        user = obis_sync.user()
+        hostname = socket.gethostname()
+        expected_edms_id = obis_sync.external_dms_id()
+        result = obis_sync.git_wrapper.git_init()
+        assert result.failure() == False
+        result = obis_sync.git_wrapper.git_top_level_path()
+        assert result.failure() == False
+        edms_path, folder = os.path.split(result.output)
+        path_hash = hashlib.sha1(edms_path.encode("utf-8")).hexdigest()[0:8]
+        if expected_edms_id is None:
+            expected_edms_id = "{}-{}-{}".format(user, hostname, path_hash).upper()
+        # when
+        result = obis_sync.get_or_create_external_data_management_system();
+        # then
+        assert result.failure() == False
+        dm.openbis.get_external_data_management_system.assert_called_with(expected_edms_id)
 
 
 def test_undo_commit_when_sync_fails(tmpdir):
     # given
-    dm = shared_dm()
+    dm = shared_dm(tmpdir)
     dm.git_wrapper = Mock()
     dm.git_wrapper.git_top_level_path = MagicMock(return_value = CommandResult(returncode=0, output=None))
     dm.git_wrapper.git_add = MagicMock(return_value = CommandResult(returncode=0, output=None))
@@ -199,18 +211,19 @@ def test_undo_commit_when_sync_fails(tmpdir):
 
 
 def test_init_analysis(tmpdir):
-    dm = shared_dm()
-
     tmp_dir_path = str(tmpdir)
 
-    result = dm.init_data(tmp_dir_path, "test")
-    assert result.returncode == 0
-
-    copy_test_data(tmpdir)
-
     with data_mgmt.cd(tmp_dir_path):
-        dm = shared_dm()
+
+        dm = shared_dm(tmp_dir_path)
         prepare_registration_expectations(dm)
+        openbis = dm.openbis
+
+        result = dm.init_data(tmp_dir_path, "test")
+        assert result.returncode == 0
+
+        copy_test_data(tmpdir)
+
         set_registration_configuration(dm)
 
         result = dm.commit("Added data.")
@@ -218,11 +231,17 @@ def test_init_analysis(tmpdir):
         parent_ds_code = dm.settings_resolver.config_dict()['repository']['data_set_id']
 
         analysis_repo = "analysis"
-        result = dm.init_analysis(analysis_repo, None)
-        assert result.returncode == 0
+        os.mkdir(analysis_repo)
 
         with data_mgmt.cd(analysis_repo):
 
+            dm = shared_dm(os.path.join(tmpdir, analysis_repo))
+            dm.openbis = openbis
+            prepare_new_data_set_expectations(dm)
+
+            result = dm.init_analysis("..")
+            assert result.returncode == 0
+
             set_registration_configuration(dm)
             prepare_new_data_set_expectations(dm)
             result = dm.commit("Analysis.")
diff --git a/obis/src/python/obis/dm/git.py b/obis/src/python/obis/dm/git.py
index 32a5b670283adf2f7b984694db52fcf77084f36f..af3bb7304a80f41703268235536be3a7a4366c2f 100644
--- a/obis/src/python/obis/dm/git.py
+++ b/obis/src/python/obis/dm/git.py
@@ -1,7 +1,7 @@
 import shutil
 import os
 from pathlib import Path
-from .utils import run_shell
+from .utils import run_shell, cd
 from .command_result import CommandResult, CommandException
 from .checksum import ChecksumGeneratorCrc32, ChecksumGeneratorGitAnnex
 
@@ -9,9 +9,20 @@ from .checksum import ChecksumGeneratorCrc32, ChecksumGeneratorGitAnnex
 class GitWrapper(object):
     """A wrapper on commands to git."""
 
-    def __init__(self, git_path=None, git_annex_path=None, find_git=None):
+    def __init__(self, git_path=None, git_annex_path=None, find_git=None, data_path=None, metadata_path=None, invocation_path=None):
         self.git_path = git_path
         self.git_annex_path = git_annex_path
+        self.data_path = data_path
+        self.metadata_path = metadata_path
+
+    def _git(self, params, strip_leading_whitespace=True, relative_repo_path=''):
+        cmd = [self.git_path]
+        if self.data_path is not None and self.metadata_path is not None:
+            git_dir = os.path.join(self.metadata_path, relative_repo_path, '.git')
+            cmd += ['--work-tree', self.data_path, '--git-dir', git_dir]
+        cmd += params
+        return run_shell(cmd, strip_leading_whitespace=strip_leading_whitespace)
+
 
     def can_run(self):
         """Return true if the perquisites are satisfied to run"""
@@ -19,52 +30,55 @@ class GitWrapper(object):
             return False
         if self.git_annex_path is None:
             return False
-        if run_shell([self.git_path, 'help']).failure():
+        if self._git(['help']).failure():
             # git help should have a returncode of 0
             return False
-        if run_shell([self.git_annex_path, 'help']).failure():
+        if self._git(['annex', 'help']).failure():
             # git help should have a returncode of 0
             return False
         return True
 
-    def git_init(self, path):
-        return run_shell([self.git_path, "init", path])
+    def git_init(self):
+        result = self._git(["init"])
+        self.git_ignore('.obis')
+        self.git_ignore('.obis_restorepoint')
+        return result
 
     def git_status(self, path=None):
         if path is None:
-            return run_shell([self.git_path, "annex", "status"], strip_leading_whitespace=False)
+            return self._git(["annex", "status"], strip_leading_whitespace=False)
         else:
-            return run_shell([self.git_path, "annex", "status", path], strip_leading_whitespace=False)
+            return self._git(["annex", "status", path], strip_leading_whitespace=False)
 
-    def git_annex_init(self, path, desc, git_annex_backend=None):
-        cmd = [self.git_path, "-C", path, "annex", "init", "--version=5"]
+    def git_annex_init(self, desc, git_annex_backend=None):
+        cmd = ["annex", "init", "--version=5"]
         if desc is not None:
             cmd.append(desc)
-        result = run_shell(cmd)
+        result = self._git(cmd)
         if result.failure():
             return result
 
         # annex.thin to avoid copying big files
-        cmd = [self.git_path, "-C", path, "config", "annex.thin", "true"]
-        result = run_shell(cmd)
+        cmd = ["config", "annex.thin", "true"]
+        result = self._git(cmd)
         if result.failure():
             return result
 
         # direct mode so annex uses hard links instead of soft links
-        cmd = [self.git_path, "-C", path, "annex", "direct"]
-        result = run_shell(cmd)
+        cmd = ["annex", "direct"]
+        result = self._git(cmd)
         if result.failure():
             return result
 
         # re-enable the repository to be used with git directly
         # though we need to know what we do since annex can lead to unexpected behaviour
-        cmd = [self.git_path, "-C", path, "config", "--unset", "core.bare"]
-        result = run_shell(cmd)
+        cmd = ["config", "--unset", "core.bare"]
+        result = self._git(cmd)
         if result.failure():
             return result
 
         attributes_src = os.path.join(os.path.dirname(__file__), "git-annex-attributes")
-        attributes_dst = os.path.join(path, ".git/info/attributes")
+        attributes_dst = '.git/info/attributes'
         shutil.copyfile(attributes_src, attributes_dst)
         self._apply_git_annex_backend(attributes_dst, git_annex_backend)
 
@@ -72,16 +86,7 @@ class GitWrapper(object):
 
     def initial_commit(self):
         # initial commit is needed. we can restore to it when something fails
-        folder = '.obis'
-        file = '.gitignore'
-        path = folder + '/' + file
-        if not os.path.exists(folder):
-            os.makedirs(folder)
-        Path(path).touch()
-        result = self.git_add(path)
-        if result.failure():
-            return result
-        return self.git_commit("Initial commit.")
+        return self._git(['commit', '--allow-empty', '-m', 'Initial commit.'])
 
     def _apply_git_annex_backend(self, filename, git_annex_backend):
         if git_annex_backend is not None:
@@ -99,37 +104,35 @@ class GitWrapper(object):
 
     def git_add(self, path):
         # git annex add to avoid out of memory error when adding files bigger than RAM
-        return run_shell([self.git_path, "annex", "add", path, "--include-dotfiles"])
+        return self._git(["annex", "add", path, "--include-dotfiles"])
 
     def git_commit(self, msg):
-        return run_shell([self.git_path, "commit", '-m', msg])
+        return self._git(['commit', '--allow-empty', '-m', msg])
 
     def git_top_level_path(self):
-        return run_shell([self.git_path, 'rev-parse', '--show-toplevel'])
+        return self._git(['rev-parse', '--show-toplevel'])
 
     def git_commit_hash(self):
-        return run_shell([self.git_path, 'rev-parse', '--short', 'HEAD'])
+        return self._git(['rev-parse', '--short', 'HEAD'])
 
     def git_ls_tree(self):
-        return run_shell([self.git_path, 'ls-tree', '--full-tree', '-r', 'HEAD'])
+        return self._git(['ls-tree', '--full-tree', '-r', 'HEAD'])
 
-    def git_checkout(self, path):
-        return run_shell([self.git_path, "checkout", path])
+    def git_checkout(self, path_or_hash, relative_repo_path=''):
+        if relative_repo_path:
+            return self._git(['checkout', path_or_hash], relative_repo_path=relative_repo_path)
+        return self._git(["checkout", path_or_hash])
 
     def git_reset_to(self, commit_hash):
-        return run_shell([self.git_path, 'reset', commit_hash])
+        return self._git(['reset', commit_hash])
 
     def git_ignore(self, path):
-        result = run_shell([self.git_path, 'check-ignore', path])
+        result = self._git(['check-ignore', path])
         if result.returncode == 1:
-            with open(".gitignore", "a") as gitignore:
+            with open(".git/info/exclude", "a") as gitignore:
                 gitignore.write(path)
                 gitignore.write("\n")
 
-    def git_delete_if_untracked(self, file):
-        result = run_shell([self.git_path, 'ls-files', '--error-unmatch', file])
-        if 'did not match' in result.output:
-            run_shell(['rm', file])
 
 class GitRepoFileInfo(object):
     """Class that gathers checksums and file lengths for all files in the repo."""
@@ -153,7 +156,7 @@ class GitRepoFileInfo(object):
 
     def file_list(self):
         tree = self.git_wrapper.git_ls_tree()
-        if tree.failure():
+        if tree.failure() or len(tree.output) == 0:
             return []
         lines = tree.output.split("\n")
         files = [line.split("\t")[-1].strip() for line in lines]
@@ -162,9 +165,9 @@ class GitRepoFileInfo(object):
     def cksum(self, files, git_annex_hash_as_checksum=False):
 
         if git_annex_hash_as_checksum == False:
-            checksum_generator = ChecksumGeneratorCrc32()
+            checksum_generator = ChecksumGeneratorCrc32(self.git_wrapper.data_path, self.git_wrapper.metadata_path)
         else:
-            checksum_generator = ChecksumGeneratorGitAnnex()
+            checksum_generator = ChecksumGeneratorGitAnnex(self.git_wrapper.data_path, self.git_wrapper.metadata_path)
 
         checksums = []
 
diff --git a/obis/src/python/obis/dm/repo.py b/obis/src/python/obis/dm/repo.py
deleted file mode 100644
index 9e6599744bbc3a3c589cc5c07404679998513ee9..0000000000000000000000000000000000000000
--- a/obis/src/python/obis/dm/repo.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-"""
-repo.py
-
-Python interface to data management functionality. Recommended for use from Jupyter.
-
-Created by Chandrasekhar Ramakrishnan on 2017-03-03.
-Copyright (c) 2017 Chandrasekhar Ramakrishnan. All rights reserved.
-"""
-
-from . import data_mgmt
-
-
-class DataRepo(object):
-    def __init__(self, root):
-        """ Initialize with a folder as the root.
-        :param root: The root path for the repo
-        """
-        self.root = root
-        self.dm_api = data_mgmt.DataMgmt(git_config={'find_git': True})
-        self.dm_api.settings_resolver.set_resolver_location_roots('data_set', self.root)
-
-    def init(self, desc=None):
-        return self.dm_api.init_data(self.root, desc)
-
-    def commit(self, msg, auto_add=True, sync=True):
-        with data_mgmt.cd(self.root):
-            result = self.dm_api.commit(msg, auto_add, sync)
-        return result
diff --git a/obis/src/python/obis/dm/repo_test.py b/obis/src/python/obis/dm/repo_test.py
deleted file mode 100644
index 8fd7ec4c9d788094c3630f9851ded15cd03c2b44..0000000000000000000000000000000000000000
--- a/obis/src/python/obis/dm/repo_test.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-"""
-repo_test.py
-
-
-Created by Chandrasekhar Ramakrishnan on 2017-03-03.
-Copyright (c) 2017 Chandrasekhar Ramakrishnan. All rights reserved.
-"""
-from unittest.mock import Mock, MagicMock
-
-from unittest.mock import Mock, MagicMock, ANY
-from . import CommandResult
-from . import repo as dm_repo
-from . import data_mgmt
-from . import utils
-from .data_mgmt_test import git_status, copy_test_data, prepare_registration_expectations, \
-    set_registration_configuration
-
-
-def test_data_use_case(tmpdir):
-    tmp_dir_path = str(tmpdir)
-    assert git_status(tmp_dir_path).returncode == 128  # The folder should not be a git repo at first.
-
-    repo = dm_repo.DataRepo(tmp_dir_path)
-    prepare_registration_expectations(repo.dm_api)
-    set_registration_configuration(repo.dm_api)
-
-    # repo.dm_api.commit = MagicMock(return_value = CommandResult(returncode=0, output=None))
-    print(repo.dm_api.openbis)
-
-    result = repo.init("test")
-    assert result.returncode == 0
-
-    assert git_status(tmp_dir_path).returncode == 0  # The folder should be a git repo now
-    assert git_status(tmp_dir_path, annex=True).returncode == 0  # ...and a git-annex repo as well.
-
-    copy_test_data(tmpdir)
-
-    result = repo.commit("Added data.")
-    assert result.returncode == 0
-
-    with data_mgmt.cd(tmp_dir_path):
-        # The zip should be in the annex
-        result = utils.run_shell(['git', 'annex', 'info', 'snb-data.zip'])
-        present_p = result.output.split('\n')[-1]
-        assert present_p == 'present: true'
-
-        # The txt files should be in git normally
-        result = utils.run_shell(['git', 'annex', 'info', 'text-data.txt'])
-        assert 'Not a valid object name' in str(result)
-        result = utils.run_shell(['git', 'log', '--oneline', 'text-data.txt'])
-        present_p = " ".join(result.output.split(' ')[1:])
-        assert present_p == 'Added data.'
diff --git a/obis/src/python/obis/scripts/cli.py b/obis/src/python/obis/scripts/cli.py
index 1e551dd11bcbd32f407f110a53bfd32f562f4886..37088ebf271b4d8d6e01c8de04eebde8671befa8 100644
--- a/obis/src/python/obis/scripts/cli.py
+++ b/obis/src/python/obis/scripts/cli.py
@@ -11,21 +11,16 @@ Created by Chandrasekhar Ramakrishnan on 2017-01-27.
 Copyright (c) 2017 Chandrasekhar Ramakrishnan. All rights reserved.
 """
 import json
+import os
 import sys
 from datetime import datetime
 
 import click
 
-from .. import dm
 from ..dm.command_result import CommandResult
-from ..dm.command_result import CommandException
 from ..dm.utils import cd
-from ..dm.command_log import CommandLog
-
-
-def click_echo(message):
-    timestamp = datetime.now().strftime("%H:%M:%S")
-    click.echo("{} {}".format(timestamp, message))
+from .data_mgmt_runner import DataMgmtRunner
+from .click_util import click_echo
 
 
 def click_progress(progress_data):
@@ -46,37 +41,6 @@ def add_params(params):
     return _add_params
 
 
-def shared_data_mgmt(context={}, halt_on_error_log=True):
-    git_config = {'find_git': True}
-    openbis_config = {}
-    if context.get('verify_certificates') is not None:
-        openbis_config['verify_certificates'] = context['verify_certificates']
-    log = CommandLog()
-    if halt_on_error_log and log.any_log_exists():
-        click_echo("Error: A previous command did not finish. Please check the log ({}) and remove it when you want to continue using obis".format(log.folder_path))
-        sys.exit(-1)
-    return dm.DataMgmt(openbis_config=openbis_config, git_config=git_config, log=log, debug=context['debug'])
-
-
-def check_result(command, result):
-    if result.failure():
-        click_echo("Could not {}:\n{}".format(command, result.output))
-    elif len(result.output) > 0:
-        click_echo(result.output)
-    return result.returncode
-
-
-def run(ctx, function):
-    try:
-        return function()
-    except CommandException as e:
-        return e.command_result
-    except Exception as e:
-        if ctx.obj['debug'] == True:
-            raise e
-        return CommandResult(returncode=-1, output="Error: " + str(e))
-
-
 @click.group()
 @click.version_option(version=None)
 @click.option('-q', '--quiet', default=False, is_flag=True, help='Suppress status reporting.')
@@ -90,53 +54,29 @@ def cli(ctx, quiet, skip_verification, debug):
     ctx.obj['debug'] = debug
 
 
-def set_property(data_mgmt, resolver, prop, value, is_global, is_data_set_property=False):
-    """Helper function to implement the property setting semantics."""
-    loc = 'global' if is_global else 'local'
-    try:
-        if is_data_set_property:
-            resolver.set_value_for_json_parameter('properties', prop, value, loc, apply_rules=True)
-        else:
-            resolver.set_value_for_parameter(prop, value, loc, apply_rules=True)
-    except ValueError as e:
-        if data_mgmt.debug ==  True:
-            raise e
-        return CommandResult(returncode=-1, output="Error: " + str(e))
-    if not is_global:
-        return data_mgmt.commit_metadata_updates(prop)
-    else:
-        return CommandResult(returncode=0, output="")
-
-
-def init_data_impl(ctx, object_id, collection_id, repository, desc):
+def init_data_impl(ctx, repository, desc):
     """Shared implementation for the init_data command."""
     if repository is None:
         repository = "."
     click_echo("init_data {}".format(repository))
-    data_mgmt = shared_data_mgmt(ctx.obj)
     desc = desc if desc != "" else None
-    result = run(ctx, lambda: data_mgmt.init_data(repository, desc, create=True))
-    init_handle_cleanup(result, object_id, collection_id, repository, data_mgmt)
+    return ctx.obj['runner'].run("init_data", lambda dm: dm.init_data(desc, create=True), repository)
 
 
-def init_analysis_impl(ctx, parent, object_id, collection_id, repository, description):
+def init_analysis_impl(ctx, parent, repository, description):
     click_echo("init_analysis {}".format(repository))
-    data_mgmt = shared_data_mgmt(ctx.obj)
+    if parent is not None and os.path.isabs(parent):
+        click_echo('Error: The parent must be given as a relative path.')
+        return -1
+    if repository is not None and os.path.isabs(repository):
+        click_echo('Error: The repository must be given as a relative path.')
+        return -1
     description = description if description != "" else None
-    result = run(ctx, lambda: data_mgmt.init_analysis(repository, parent, description, create=True))
-    init_handle_cleanup(result, object_id, collection_id, repository, data_mgmt)
-
-
-def init_handle_cleanup(result, object_id, collection_id, repository, data_mgmt):
-    if (not object_id and not collection_id) or result.failure():
-        return check_result("init_data", result)
-    with dm.cd(repository):
-        if object_id:
-            resolver = data_mgmt.object
-            return check_result("init_data", set_property(data_mgmt, resolver, 'id', object_id, False, False))
-        if collection_id:
-            resolver = data_mgmt.collection
-            return check_result("init_data", set_property(data_mgmt, resolver, 'id', collection_id, False, False))
+    parent_dir = os.getcwd() if parent is None else os.path.join(os.getcwd(), parent)
+    analysis_dir = os.path.join(os.getcwd(), repository)
+    parent = os.path.relpath(parent_dir, analysis_dir)
+    parent = '..' if parent is None else parent
+    return ctx.obj['runner'].run("init_analysis", lambda dm: dm.init_analysis(parent, description, create=True), repository)
 
 
 # settings commands
@@ -217,65 +157,14 @@ def _join_settings_get(setting_lists):
     return joined
 
 
-def config_internal(data_mgmt, resolver, is_global, is_data_set_property, prop=None, value=None, set=False, get=False, clear=False):
-    if set == True:
-        assert get == False
-        assert clear == False
-        assert prop is not None
-        assert value is not None
-    elif get == True:
-        assert set == False
-        assert clear == False
-        assert value is None
-    elif clear == True:
-        assert get == False
-        assert set == False
-        assert value is None
-
-    assert set == True or get == True or clear == True
-    if is_global:
-        resolver.set_location_search_order(['global'])
-    else:
-        top_level_path = data_mgmt.git_wrapper.git_top_level_path()
-        if top_level_path.success():
-            resolver.set_resolver_location_roots('data_set', top_level_path.output)
-            resolver.set_location_search_order(['local'])
-        else:
-            resolver.set_location_search_order(['global'])
-
-    config_dict = resolver.config_dict()
-    if is_data_set_property:
-        config_dict = config_dict['properties']
-    if get == True:
-        if prop is None:
-            config_str = json.dumps(config_dict, indent=4, sort_keys=True)
-            click.echo("{}".format(config_str))
-        else:
-            if not prop in config_dict:
-                raise ValueError("Unknown setting {} for {}.".format(prop, resolver.categoty))
-            little_dict = {prop: config_dict[prop]}
-            config_str = json.dumps(little_dict, indent=4, sort_keys=True)
-            click.echo("{}".format(config_str))            
-    elif set == True:
-        return check_result("config", set_property(data_mgmt, resolver, prop, value, is_global, is_data_set_property))
-    elif clear == True:
-        if prop is None:
-            returncode = 0
-            for prop in config_dict.keys():
-                returncode += check_result("config", set_property(data_mgmt, resolver, prop, None, is_global, is_data_set_property))
-            return returncode
-        else:
-            return check_result("config", set_property(data_mgmt, resolver, prop, None, is_global, is_data_set_property))
-
-
 def _access_settings(ctx, prop=None, value=None, set=False, get=False, clear=False):
     is_global = ctx.obj['is_global']
-    data_mgmt = ctx.obj['data_mgmt']
+    runner = ctx.obj['runner']
     resolver = ctx.obj['resolver']
     is_data_set_property = False
     if 'is_data_set_property' in ctx.obj:
         is_data_set_property = ctx.obj['is_data_set_property']
-    config_internal(data_mgmt, resolver, is_global, is_data_set_property, prop=prop, value=value, set=set, get=get, clear=clear)
+    runner.config(resolver, is_global, is_data_set_property, prop=prop, value=value, set=set, get=get, clear=clear)
 
 
 def _set(ctx, settings):
@@ -317,8 +206,8 @@ def settings(ctx, is_global):
 @settings.command('get')
 @click.pass_context
 def settings_get(ctx):
-    data_mgmt = shared_data_mgmt(ctx.obj, halt_on_error_log=False)
-    settings = data_mgmt.settings_resolver.config_dict()
+    runner = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
+    settings = runner.get_settings()
     settings_str = json.dumps(settings, indent=4, sort_keys=True)
     click.echo("{}".format(settings_str))
 
@@ -331,30 +220,31 @@ def settings_get(ctx):
 def repository(ctx, is_global):
     """ Get/set settings related to the repository.
     """
+    runner = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.obj['is_global'] = is_global
-    ctx.obj['data_mgmt'] = shared_data_mgmt(ctx.obj, halt_on_error_log=False)
-    ctx.obj['resolver'] = ctx.obj['data_mgmt'].settings_resolver.repository
+    ctx.obj['runner'] = runner
+    ctx.obj['resolver'] = 'repository'
 
 
 @repository.command('set')
 @click.argument('settings', type=SettingsSet(), nargs=-1)
 @click.pass_context
 def repository_set(ctx, settings):
-    return check_result("repository_set", run(ctx, lambda: _set(ctx, settings)))
+    return ctx.obj['runner'].run("repository_set", lambda dm: _set(ctx, settings))
 
 
 @repository.command('get')
 @click.argument('settings', type=SettingsGet(), nargs=-1)
 @click.pass_context
 def repository_get(ctx, settings):
-    return check_result("repository_get", run(ctx, lambda: _get(ctx, settings)))
+    return ctx.obj['runner'].run("repository_get", lambda dm: _get(ctx, settings))
 
 
 @repository.command('clear')
 @click.argument('settings', type=SettingsClear(), nargs=-1)
 @click.pass_context
 def repository_clear(ctx, settings):
-    return check_result("repository_clear", run(ctx, lambda: _clear(ctx, settings)))
+    return ctx.obj['runner'].run("repository_clear", lambda dm: _clear(ctx, settings))
 
 
 ## data_set: type, properties
@@ -367,31 +257,32 @@ def repository_clear(ctx, settings):
 def data_set(ctx, is_global, is_data_set_property):
     """ Get/set settings related to the data set.
     """
+    runner = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.obj['is_global'] = is_global
     ctx.obj['is_data_set_property'] = is_data_set_property
-    ctx.obj['data_mgmt'] = shared_data_mgmt(ctx.obj, halt_on_error_log=False)
-    ctx.obj['resolver'] = ctx.obj['data_mgmt'].settings_resolver.data_set
+    ctx.obj['runner'] = runner
+    ctx.obj['resolver'] = 'data_set'
 
 
 @data_set.command('set')
 @click.argument('settings', type=SettingsSet(), nargs=-1)
 @click.pass_context
 def data_set_set(ctx, settings):
-    return check_result("data_set_set", run(ctx, lambda: _set(ctx, settings)))
+    return ctx.obj['runner'].run("data_set_set", lambda dm: _set(ctx, settings))
 
 
 @data_set.command('get')
 @click.argument('settings', type=SettingsGet(), nargs=-1)
 @click.pass_context
 def data_set_get(ctx, settings):
-    return check_result("data_set_get", run(ctx, lambda: _get(ctx, settings)))
+    return ctx.obj['runner'].run("data_set_get", lambda dm: _get(ctx, settings))
 
 
 @data_set.command('clear')
 @click.argument('settings', type=SettingsClear(), nargs=-1)
 @click.pass_context
 def data_set_clear(ctx, settings):
-    return check_result("data_set_clear", run(ctx, lambda: _clear(ctx, settings)))
+    return ctx.obj['runner'].run("data_set_clear", lambda dm: _clear(ctx, settings))
 
 
 ## object: object_id
@@ -403,30 +294,31 @@ def data_set_clear(ctx, settings):
 def object(ctx, is_global):
     """ Get/set settings related to the object.
     """
+    runner = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.obj['is_global'] = is_global
-    ctx.obj['data_mgmt'] = shared_data_mgmt(ctx.obj, halt_on_error_log=False)
-    ctx.obj['resolver'] = ctx.obj['data_mgmt'].settings_resolver.object
+    ctx.obj['runner'] = runner
+    ctx.obj['resolver'] = 'object'
 
 
 @object.command('set')
 @click.argument('settings', type=SettingsSet(), nargs=-1)
 @click.pass_context
 def object_set(ctx, settings):
-    return check_result("object_set", run(ctx, lambda: _set(ctx, settings)))
+    return ctx.obj['runner'].run("object_set", lambda dm: _set(ctx, settings))
 
 
 @object.command('get')
 @click.argument('settings', type=SettingsGet(), nargs=-1)
 @click.pass_context
 def object_get(ctx, settings):
-    return check_result("object_get", run(ctx, lambda: _get(ctx, settings)))
+    return ctx.obj['runner'].run("object_get", lambda dm: _get(ctx, settings))
 
 
 @object.command('clear')
 @click.argument('settings', type=SettingsClear(), nargs=-1)
 @click.pass_context
 def object_clear(ctx, settings):
-    return check_result("object_clear", run(ctx, lambda: _clear(ctx, settings)))
+    return ctx.obj['runner'].run("object_clear", lambda dm: _clear(ctx, settings))
 
 
 ## collection: collection_id
@@ -438,30 +330,31 @@ def object_clear(ctx, settings):
 def collection(ctx, is_global):
     """ Get/set settings related to the collection.
     """
+    runner = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.obj['is_global'] = is_global
-    ctx.obj['data_mgmt'] = shared_data_mgmt(ctx.obj, halt_on_error_log=False)
-    ctx.obj['resolver'] = ctx.obj['data_mgmt'].settings_resolver.collection
+    ctx.obj['runner'] = runner
+    ctx.obj['resolver'] = 'collection'
 
 
 @collection.command('set')
 @click.argument('settings', type=SettingsSet(), nargs=-1)
 @click.pass_context
 def collection_set(ctx, settings):
-    return check_result("collection_set", run(ctx, lambda: _set(ctx, settings)))
+    return ctx.obj['runner'].run("collection_set", lambda dm: _set(ctx, settings))
 
 
 @collection.command('get')
 @click.argument('settings', type=SettingsGet(), nargs=-1)
 @click.pass_context
 def collection_get(ctx, settings):
-    return check_result("collection_get", run(ctx, lambda: _get(ctx, settings)))
+    return ctx.obj['runner'].run("collection_get", lambda dm: _get(ctx, settings))
 
 
 @collection.command('clear')
 @click.argument('settings', type=SettingsClear(), nargs=-1)
 @click.pass_context
 def collection_clear(ctx, settings):
-    return check_result("collection_clear", run(ctx, lambda: _clear(ctx, settings)))
+    return ctx.obj['runner'].run("collection_clear", lambda dm: _clear(ctx, settings))
 
 
 ## config: fileservice_url, git_annex_hash_as_checksum, hostname, openbis_url, user, verify_certificates
@@ -473,30 +366,31 @@ def collection_clear(ctx, settings):
 def config(ctx, is_global):
     """ Get/set configurations.
     """
+    runner = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.obj['is_global'] = is_global
-    ctx.obj['data_mgmt'] = shared_data_mgmt(ctx.obj, halt_on_error_log=False)
-    ctx.obj['resolver'] = ctx.obj['data_mgmt'].settings_resolver.config
+    ctx.obj['runner'] = runner
+    ctx.obj['resolver'] = 'config'
 
 
 @config.command('set')
 @click.argument('settings', type=SettingsSet(), nargs=-1)
 @click.pass_context
 def config_set(ctx, settings):
-    return check_result("config_set", run(ctx, lambda: _set(ctx, settings)))
+    return ctx.obj['runner'].run("config_set", lambda dm: _set(ctx, settings))
 
 
 @config.command('get')
 @click.argument('settings', type=SettingsGet(), nargs=-1)
 @click.pass_context
 def config_get(ctx, settings):
-    return check_result("config_get", run(ctx, lambda: _get(ctx, settings)))
+    return ctx.obj['runner'].run("config_get", lambda dm: _get(ctx, settings))
 
 
 @config.command('clear')
 @click.argument('settings', type=SettingsClear(), nargs=-1)
 @click.pass_context
 def config_clear(ctx, settings):
-    return check_result("config_clear", run(ctx, lambda: _clear(ctx, settings)))
+    return ctx.obj['runner'].run("config_clear", lambda dm: _clear(ctx, settings))
 
 
 # repository commands: status, sync, commit, init, addref, removeref, init_analysis
@@ -504,36 +398,29 @@ def config_clear(ctx, settings):
 ## commit
 
 _commit_params = [
-    click.option('-m', '--msg', prompt=True, help='A message explaining what was done.'),
+    click.option('-m', '--msg', default="obis commit", help='A message explaining what was done.'),
     click.option('-a', '--auto_add', default=True, is_flag=True, help='Automatically add all untracked files.'),
     click.option('-i', '--ignore_missing_parent', default=True, is_flag=True, help='If parent data set is missing, ignore it.'),
     click.argument('repository', type=click.Path(exists=True, file_okay=False), required=False),
 ]
 
-def _repository_commit(ctx, msg, auto_add, ignore_missing_parent):
-    data_mgmt = shared_data_mgmt(ctx.obj)
-    return check_result("commit", run(ctx, lambda: data_mgmt.commit(msg, auto_add, ignore_missing_parent)))
 
 @repository.command("commit", short_help="Commit the repository to git and inform openBIS.")
 @click.pass_context
 @add_params(_commit_params)
 def repository_commit(ctx, msg, auto_add, ignore_missing_parent, repository):
-    if repository is None:
-        return _repository_commit(ctx, msg, auto_add, ignore_missing_parent)
-    with cd(repository):
-        return _repository_commit(ctx, msg, auto_add, ignore_missing_parent)
+    return ctx.obj['runner'].run("commit", lambda dm: dm.commit(msg, auto_add, ignore_missing_parent), repository)
 
 @cli.command(short_help="Commit the repository to git and inform openBIS.")
 @click.pass_context
 @add_params(_commit_params)
 def commit(ctx, msg, auto_add, ignore_missing_parent, repository):
+    ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.invoke(repository_commit, msg=msg, auto_add=auto_add, ignore_missing_parent=ignore_missing_parent, repository=repository)
 
 ## init
 
 _init_params = [
-    click.option('-oi', '--object_id', help='Set the id of the owning sample.'),
-    click.option('-ci', '--collection_id', help='Set the id of the owning experiment.'),
     click.argument('repository', type=click.Path(exists=False, file_okay=False), required=False),
     click.argument('description', default=""),
 ]
@@ -541,14 +428,15 @@ _init_params = [
 @repository.command("init", short_help="Initialize the folder as a data repository.")
 @click.pass_context
 @add_params(_init_params)
-def repository_init(ctx, object_id, collection_id, repository, description):
-    return init_data_impl(ctx, object_id, collection_id, repository, description)
+def repository_init(ctx, repository, description):
+    return init_data_impl(ctx, repository, description)
 
 @cli.command(short_help="Initialize the folder as a data repository.")
 @click.pass_context
 @add_params(_init_params)
-def init(ctx, object_id, collection_id, repository, description):
-    ctx.invoke(repository_init, object_id=object_id, collection_id=collection_id, repository=repository, description=description)
+def init(ctx, repository, description):
+    ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
+    ctx.invoke(repository_init, repository=repository, description=description)
 
 ## init analysis
 
@@ -560,14 +448,15 @@ _init_analysis_params += _init_params
 @repository.command("init_analysis", short_help="Initialize the folder as an analysis folder.")
 @click.pass_context
 @add_params(_init_analysis_params)
-def repository_init_analysis(ctx, parent, object_id, collection_id, repository, description):
-    return init_analysis_impl(ctx, parent, object_id, collection_id, repository, description)
+def repository_init_analysis(ctx, parent, repository, description):
+    return init_analysis_impl(ctx, parent, repository, description)
 
 @cli.command(short_help="Initialize the folder as an analysis folder.")
 @click.pass_context
 @add_params(_init_analysis_params)
-def init_analysis(ctx, parent, object_id, collection_id, repository, description):
-    ctx.invoke(repository_init_analysis, parent=parent, object_id=object_id, collection_id=collection_id, repository=repository, description=description)
+def init_analysis(ctx, parent, repository, description):
+    ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
+    ctx.invoke(repository_init_analysis, parent=parent, repository=repository, description=description)
 
 ## status
 
@@ -575,24 +464,17 @@ _status_params = [
     click.argument('repository', type=click.Path(exists=True, file_okay=False), required=False),
 ]
 
-def _repository_status(ctx):
-    data_mgmt = shared_data_mgmt(ctx.obj)
-    result = run(ctx, data_mgmt.status)
-    click.echo(result.output)    
-
 @repository.command("status", short_help="Show the state of the obis repository.")
 @click.pass_context
 @add_params(_status_params)
 def repository_status(ctx, repository):
-    if repository is None:
-        return _repository_status(ctx)
-    with cd(repository):
-        return _repository_status(ctx)        
+    return ctx.obj['runner'].run("repository_status", lambda dm: dm.status(), repository)
 
 @cli.command(short_help="Show the state of the obis repository.")
 @click.pass_context
 @add_params(_status_params)
 def status(ctx, repository):
+    ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.invoke(repository_status, repository=repository)
 
 ## sync
@@ -602,23 +484,17 @@ _sync_params = [
     click.argument('repository', type=click.Path(exists=True, file_okay=False), required=False),
 ]
 
-def _repository_sync(ctx, ignore_missing_parent):
-    data_mgmt = shared_data_mgmt(ctx.obj)
-    return check_result("sync", run(ctx, lambda: data_mgmt.sync(ignore_missing_parent)))
-
 @repository.command("sync", short_help="Sync the repository with openBIS.")
 @click.pass_context
 @add_params(_sync_params)
 def repository_sync(ctx, ignore_missing_parent, repository):
-    if repository is None:
-        return _repository_sync(ctx, ignore_missing_parent)
-    with cd(repository):
-        return _repository_sync(ctx, ignore_missing_parent)
+    return ctx.obj['runner'].run("sync", lambda dm: dm.sync(ignore_missing_parent), repository)
 
 @cli.command(short_help="Sync the repository with openBIS.")
 @click.pass_context
 @add_params(_sync_params)
 def sync(ctx, ignore_missing_parent, repository):
+    ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.invoke(repository_sync, ignore_missing_parent=ignore_missing_parent, repository=repository)
 
 ## addref
@@ -627,23 +503,17 @@ _addref_params = [
     click.argument('repository', type=click.Path(exists=True, file_okay=False), required=False),
 ]
 
-def _repository_addref(ctx):
-    data_mgmt = shared_data_mgmt(ctx.obj)
-    return check_result("addref", run(ctx, data_mgmt.addref))
-
 @repository.command("addref", short_help="Add the given repository as a reference to openBIS.")
 @click.pass_context
 @add_params(_addref_params)
 def repository_addref(ctx, repository):
-    if repository is None:
-        return _repository_addref(ctx)
-    with cd(repository):
-        return _repository_addref(ctx)
+    return ctx.obj['runner'].run("addref", lambda dm: dm.addref(), repository)
 
 @cli.command(short_help="Add the given repository as a reference to openBIS.")
 @click.pass_context
 @add_params(_addref_params)
 def addref(ctx, repository):
+    ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.invoke(repository_addref, repository=repository)
 
 # removeref
@@ -653,31 +523,21 @@ _removeref_params = [
     click.argument('repository', type=click.Path(exists=True, file_okay=False), required=False),
 ]
 
-def _repository_removeref(ctx, data_set_id=None):
-    data_mgmt = shared_data_mgmt(ctx.obj)
-    return check_result("removeref", run(ctx, lambda: data_mgmt.removeref(data_set_id=data_set_id)))
-
 @repository.command("removeref", short_help="Remove the reference to the given repository from openBIS.")
 @click.pass_context
 @add_params(_removeref_params)
 def repository_removeref(ctx, data_set_id, repository):
-    if data_set_id is None:
-        if repository is None:
-            return _repository_removeref(ctx)
-        with cd(repository):
-            return _repository_removeref(ctx)
-    else:
-        if repository is not None:
-            print(repository)
-            click_echo("Only provide the data_set id OR the repository.")
-            return -1
-        return _repository_removeref(ctx, data_set_id=data_set_id)
+    if data_set_id is not None and repository is not None:
+        click_echo("Only provide the data_set id OR the repository.")
+        return -1
+    return ctx.obj['runner'].run("removeref", lambda dm: dm.removeref(data_set_id=data_set_id), repository)
 
 
 @cli.command(short_help="Remove the reference to the given repository from openBIS.")
 @click.pass_context
 @add_params(_removeref_params)
 def removeref(ctx, data_set_id, repository):
+    ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.invoke(repository_removeref, data_set_id=data_set_id, repository=repository)
 
 
@@ -696,13 +556,13 @@ _download_params = [
 @add_params(_download_params)
 @click.pass_context 
 def data_set_download(ctx, content_copy_index, file, data_set_id, skip_integrity_check):
-    data_mgmt = shared_data_mgmt(ctx.obj)
-    return check_result("download", run(ctx, lambda: data_mgmt.download(data_set_id, content_copy_index, file, skip_integrity_check)))
+    return ctx.obj['runner'].run("download", lambda dm: dm.download(data_set_id, content_copy_index, file, skip_integrity_check))
 
 @cli.command(short_help="Download files of a linked data set.")
 @add_params(_download_params)
 @click.pass_context
 def download(ctx, content_copy_index, file, data_set_id, skip_integrity_check):
+    ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.invoke(data_set_download, content_copy_index=content_copy_index, file=file, data_set_id=data_set_id, skip_integrity_check=skip_integrity_check)
 
 ## clone
@@ -718,13 +578,13 @@ _clone_move_params = [
 @click.pass_context
 @add_params(_clone_move_params)
 def data_set_clone(ctx, ssh_user, content_copy_index, data_set_id, skip_integrity_check):
-    data_mgmt = shared_data_mgmt(ctx.obj)
-    return check_result("clone", run(ctx, lambda: data_mgmt.clone(data_set_id, ssh_user, content_copy_index, skip_integrity_check)))
+    return ctx.obj['runner'].run("clone", lambda dm: dm.clone(data_set_id, ssh_user, content_copy_index, skip_integrity_check))
 
 @cli.command(short_help="Clone the repository found in the given data set id.")
 @click.pass_context
 @add_params(_clone_move_params)
 def clone(ctx, ssh_user, content_copy_index, data_set_id, skip_integrity_check):
+    ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.invoke(data_set_clone, ssh_user=ssh_user, content_copy_index=content_copy_index, data_set_id=data_set_id, skip_integrity_check=skip_integrity_check)
 
 
@@ -734,13 +594,13 @@ def clone(ctx, ssh_user, content_copy_index, data_set_id, skip_integrity_check):
 @click.pass_context
 @add_params(_clone_move_params)
 def data_set_move(ctx, ssh_user, content_copy_index, data_set_id, skip_integrity_check):
-    data_mgmt = shared_data_mgmt(ctx.obj)
-    return check_result("move", run(ctx, lambda: data_mgmt.move(data_set_id, ssh_user, content_copy_index, skip_integrity_check)))
+    return ctx.obj['runner'].run("move", lambda dm: dm.move(data_set_id, ssh_user, content_copy_index, skip_integrity_check))
 
 @cli.command(short_help="Move the repository found in the given data set id.")
 @click.pass_context
 @add_params(_clone_move_params)
 def move(ctx, ssh_user, content_copy_index, data_set_id, skip_integrity_check):
+    ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False)
     ctx.invoke(data_set_move, ssh_user=ssh_user, content_copy_index=content_copy_index, data_set_id=data_set_id, skip_integrity_check=skip_integrity_check)
 
 
diff --git a/obis/src/python/obis/scripts/click_util.py b/obis/src/python/obis/scripts/click_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..d54984ae42446806e077d3c3baba10028fa1254b
--- /dev/null
+++ b/obis/src/python/obis/scripts/click_util.py
@@ -0,0 +1,16 @@
+import click
+from datetime import datetime
+
+def click_echo(message, with_timestamp=True):
+    if with_timestamp:
+        timestamp = datetime.now().strftime("%H:%M:%S")
+        click.echo("{} {}".format(timestamp, message))
+    else:
+        click.echo(message)
+
+def check_result(command, result):
+    if result.failure():
+        click_echo("Could not {}:\n{}".format(command, result.output))
+    elif len(result.output) > 0:
+        click_echo(result.output)
+    return result.returncode
diff --git a/obis/src/python/obis/scripts/data_mgmt_runner.py b/obis/src/python/obis/scripts/data_mgmt_runner.py
new file mode 100644
index 0000000000000000000000000000000000000000..205c78499f62b05f0ba0c9dead8a779d4be4b6eb
--- /dev/null
+++ b/obis/src/python/obis/scripts/data_mgmt_runner.py
@@ -0,0 +1,114 @@
+import click
+import json
+import os
+import sys
+
+from datetime import datetime
+
+from .. import dm
+from ..dm.utils import cd
+from ..dm.command_result import CommandResult, CommandException
+from ..dm.command_log import CommandLog
+from ..dm.utils import run_shell
+from .click_util import click_echo, check_result
+
+
+class DataMgmtRunner(object):
+
+
+    def __init__(self, context, halt_on_error_log=True):
+        self.context = context
+        self.halt_on_error_log = halt_on_error_log
+        self.data_path = None
+        self.metadata_path = None
+        self.invocation_path = os.getcwd()
+
+
+    def init_paths(self, repository=None):
+        if self.data_path is not None and self.metadata_path is not None:
+            return
+        # data path
+        self.data_path = run_shell(['pwd'], raise_exception_on_failure=True).output
+        if repository is not None:
+            self.data_path = os.path.join(self.data_path, repository)
+        # metadata path
+        self.metadata_path = self.data_path
+        obis_metadata_folder = self.get_settings_resolver(do_cd=False).config.config_dict().get('obis_metadata_folder')
+        if obis_metadata_folder is not None:
+            result = self._validate_obis_metadata_folder(obis_metadata_folder)
+            if result.failure():
+                click_echo(result.output)
+            else:
+                self.metadata_path = os.path.join(obis_metadata_folder, self.data_path[1:])
+        if not os.path.exists(self.metadata_path):
+            os.makedirs(self.metadata_path)
+        if not os.path.exists(self.data_path):
+            os.makedirs(self.data_path)
+
+
+    def _validate_obis_metadata_folder(self, obis_metadata_folder):
+        if not os.path.isabs(obis_metadata_folder):
+            return CommandResult(
+                returncode=-1, 
+                output="Ignoring obis_metadata_folder. Must be absolute but is: {}".format(obis_metadata_folder))
+        if not os.path.exists(obis_metadata_folder):
+            return CommandResult(
+                returncode=-1, 
+                output="Ignoring obis_metadata_folder. Folder does not exist: {}".format(obis_metadata_folder))
+        return CommandResult(returncode=0, output="")
+
+
+    def run(self, command, function, repository=None):
+        self.init_paths(repository)
+        with cd(self.metadata_path):
+            result = self._run(function)
+        return check_result(command, result)
+
+
+    def _run(self, function):
+        try:
+            return function(self._get_dm())
+        except CommandException as e:
+            return e.command_result
+        except Exception as e:
+            if self.context['debug'] == True:
+                raise e
+            return CommandResult(returncode=-1, output="Error: " + str(e))
+
+
+    def get_settings(self, repository=None):
+        self.init_paths()
+        with cd(self.metadata_path):
+            return self.get_settings_resolver().config_dict()
+
+
+    def get_settings_resolver(self, do_cd=True):
+        if do_cd:
+            self.init_paths()
+            with cd(self.metadata_path):
+                return self._get_dm().get_settings_resolver()
+        else:
+            return self._get_dm().get_settings_resolver()
+
+
+    def config(self, resolver, is_global, is_data_set_property, prop, value, set, get, clear):
+        self.init_paths()
+        with cd(self.metadata_path):
+            self._get_dm().config(resolver, is_global, is_data_set_property, prop, value, set, get, clear)
+
+
+    def _get_dm(self):
+        git_config = {
+                'find_git': True,
+                'data_path': self.data_path,
+                'metadata_path': self.metadata_path,
+                'invocation_path': self.invocation_path
+            }
+        openbis_config = {}
+        if self.context.get('verify_certificates') is not None:
+            openbis_config['verify_certificates'] = self.context['verify_certificates']
+        log = CommandLog()
+        if self.halt_on_error_log and log.any_log_exists():
+            click_echo("Error: A previous command did not finish. Please check the log ({}) and remove it when you want to continue using obis".format(log.folder_path))
+            sys.exit(-1)
+        return dm.DataMgmt(openbis_config=openbis_config, git_config=git_config, log=log, debug=self.context['debug'])
diff --git a/obis/src/vagrant/initialize/setup_general.sh b/obis/src/vagrant/initialize/setup_general.sh
index cf0f8e8a1a6fb75ca9745fd3eaa7d0d5b47abd54..b1afa4b9677c9f2256bbd73bae0b1d32ef148309 100755
--- a/obis/src/vagrant/initialize/setup_general.sh
+++ b/obis/src/vagrant/initialize/setup_general.sh
@@ -12,3 +12,5 @@ sudo yum -y install vim
 sudo yum -y install nano
 sudo yum -y install nmap
 sudo yum -y install net-tools
+
+sudo echo 'export PYTHONWARNINGS="ignore:Unverified HTTPS request"' > /etc/profile