Skip to content
Snippets Groups Projects
pybis.py 140 KiB
Newer Older
  • Learn to ignore specific revisions
  • #!/usr/bin/env python
    # -*- coding: utf-8 -*-
    
    """
    pybis.py
    
    
    Swen Vermeul's avatar
    Swen Vermeul committed
    Work with openBIS from Python.
    
    import requests
    
    from requests.packages.urllib3.exceptions import InsecureRequestWarning
    
    requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
    
    
    import json
    import re
    
    from urllib.parse import urlparse, urljoin, quote
    
    import zlib
    
    from texttable import Texttable
    
    from tabulate import tabulate
    
    
    from pybis.utils import parse_jackson, check_datatype, split_identifier, format_timestamp, is_identifier, is_permid, nvl
    
    from pybis.property import PropertyHolder, PropertyAssignments
    from pybis.masterdata import Vocabulary
    
    import pandas as pd
    from pandas import DataFrame, Series
    
    
    import threading
    from threading import Thread
    from queue import Queue
    
    PYBIS_PLUGIN = "dataset-uploader-api"
    
    def _definitions(entity):
    
            "Space": {
                "attrs_new": "code description".split(),
                "attrs_up": "description".split(),
    
                "attrs": "code permId description registrator registrationDate modificationDate".split(),
    
            },
            "Project": {
                "attrs_new": "code description space attachments".split(),
                "attrs_up": "description space attachments".split(),
                "attrs": "code description permId identifier space leader registrator registrationDate modifier modificationDate attachments".split(),
                "multi": "".split(),
                "identifier": "projectId",
            },
            "Experiment": {
    
                "attrs_new": "code type project tags attachments".split(),
                "attrs_up": "project tags attachments".split(),
                "attrs": "code permId identifier type project tags attachments".split(),
    
                "multi": "tags attachments".split(),
                "identifier": "experimentId",
            },
    
                "attrs_new": "code type parents children space experiment tags attachments".split(),
                "attrs_up": "parents children space experiment tags attachments".split(),
    
                "attrs": "code permId identifier type parents children components space experiment tags attachments".split(),
    
                    'parentIds': {'permId': {'@type': 'as.dto.sample.id.SamplePermId'}},
                    'childIds': {'permId': {'@type': 'as.dto.sample.id.SamplePermId'}},
                    'componentIds': {'permId': {'@type': 'as.dto.sample.id.SamplePermId'}},
    
                },
                "identifier": "sampleId",
                "cre_type": "as.dto.sample.create.SampleCreation",
    
                "multi": "parents children components tags attachments".split(),
    
                "attrs_new": "type experiment sample parents children components tags".split(),
                "attrs_up": "parents children experiment sample components tags".split(),
                "attrs": "code permId type experiment sample parents children components tags accessDate dataProducer dataProductionDate registrator registrationDate modifier modificationDate dataStore measured".split(),
    
                    'parentIds': {'permId': {'@type': 'as.dto.dataset.id.DataSetPermId'}},
                    'childIds': {'permId': {'@type': 'as.dto.dataset.id.DataSetPermId'}},
                    'componentIds': {'permId': {'@type': 'as.dto.dataset.id.DataSetPermId'}},
                    'containerIds': {'permId': {'@type': 'as.dto.dataset.id.DataSetPermId'}},
    
                "multi": "parents children container".split(),
    
            "Material": {
                "attrs_new": "code description type creation tags".split(),
    
                "attrs": "code description type creation registrator tags".split()
    
            },
            "Tag": {
                "attrs_new": "code description experiments samples dataSets materials".split(),
                "attrs": "code description experiments samples dataSets materials registrationDate".split(),
            },
    
                "space": "spaceId",
                "project": "projectId",
                "sample": "sampleId",
                "samples": "sampleIds",
                "dataSet": "dataSetId",
                "dataSets": "dataSetIds",
                "experiment": "experimentId",
    
                "experiments": "experimentIds",
    
                "material": "materialId",
                "materials": "materialIds",
                "container": "containerId",
                "component": "componentId",
                "components": "componentIds",
                "parents": "parentIds",
                "children": "childIds",
                "tags": "tagIds",
    
                'spaceId': {'permId': {'@type': 'as.dto.space.id.SpacePermId'}},
                'projectId': {'permId': {'@type': 'as.dto.project.id.ProjectPermId'}},
                'experimentId': {'permId': {'@type': 'as.dto.experiment.id.ExperimentPermId'}},
                'tagIds': {'code': {'@type': 'as.dto.tag.id.TagCode'}},
    
        return entities[entity]
    
    search_criteria = {
    
        "space": "as.dto.space.search.SpaceSearchCriteria",
        "project": "as.dto.project.search.ProjectSearchCriteria",
    
        "experiment": "as.dto.experiment.search.ExperimentSearchCriteria",
    
        "sample": "as.dto.sample.search.SampleSearchCriteria",
        "dataset": "as.dto.dataset.search.DataSetSearchCriteria",
        "code": "as.dto.common.search.CodeSearchCriteria",
        "sample_type": "as.dto.sample.search.SampleTypeSearchCriteria",
    
    Swen Vermeul's avatar
    Swen Vermeul committed
    fetch_option = {
    
        "space": {"@type": "as.dto.space.fetchoptions.SpaceFetchOptions"},
        "project": {"@type": "as.dto.project.fetchoptions.ProjectFetchOptions"},
        "experiment": {
    
            "@type": "as.dto.experiment.fetchoptions.ExperimentFetchOptions",
    
            "type": {"@type": "as.dto.experiment.fetchoptions.ExperimentTypeFetchOptions"}
    
            "@type": "as.dto.sample.fetchoptions.SampleFetchOptions",
    
            "type": {"@type": "as.dto.sample.fetchoptions.SampleTypeFetchOptions"}
    
        "samples": {"@type": "as.dto.sample.fetchoptions.SampleFetchOptions"},
        "dataSets": {
    
            "@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions",
    
            "properties": {"@type": "as.dto.property.fetchoptions.PropertyFetchOptions"},
            "type": {"@type": "as.dto.dataset.fetchoptions.DataSetTypeFetchOptions"},
    
        "physicalData": {"@type": "as.dto.dataset.fetchoptions.PhysicalDataFetchOptions"},
    
        "linkedData": {
            "externalDms": {"@type": "as.dto.externaldms.fetchoptions.ExternalDmsFetchOptions"},
            "@type": "as.dto.dataset.fetchoptions.LinkedDataFetchOptions"
        },
    
        "properties": {"@type": "as.dto.property.fetchoptions.PropertyFetchOptions"},
        "propertyAssignments": {
            "@type": "as.dto.property.fetchoptions.PropertyAssignmentFetchOptions",
    
                "@type": "as.dto.property.fetchoptions.PropertyTypeFetchOptions",
                "vocabulary": {
                    "@type": "as.dto.vocabulary.fetchoptions.VocabularyFetchOptions",
                }
    
        "tags": {"@type": "as.dto.tag.fetchoptions.TagFetchOptions"},
    
        "registrator": {"@type": "as.dto.person.fetchoptions.PersonFetchOptions"},
        "modifier": {"@type": "as.dto.person.fetchoptions.PersonFetchOptions"},
        "leader": {"@type": "as.dto.person.fetchoptions.PersonFetchOptions"},
    
        "attachments": {"@type": "as.dto.attachment.fetchoptions.AttachmentFetchOptions"},
    
        "attachmentsWithContent": {
            "@type": "as.dto.attachment.fetchoptions.AttachmentFetchOptions",
            "content": {
                "@type": "as.dto.common.fetchoptions.EmptyFetchOptions"
            },
        },
    
        "history": {"@type": "as.dto.history.fetchoptions.HistoryEntryFetchOptions"},
        "dataStore": {"@type": "as.dto.datastore.fetchoptions.DataStoreFetchOptions"},
    
    def search_request_for_identifier(ident, entity):
    
            search_request = {
                "identifier": ident.upper(),
    
                "@type": "as.dto.{}.id.{}Identifier".format(entity.lower(), entity.capitalize())
    
                "@type": "as.dto.{}.id.{}PermId".format(entity.lower(), entity.capitalize())
    
    def extract_code(obj):
    
    Swen Vermeul's avatar
    Swen Vermeul committed
        if not isinstance(obj, dict):
            return str(obj)
    
    def extract_deletion(obj):
        del_objs = []
        for deleted_object in obj['deletedObjects']:
            del_objs.append({
                "reason": obj['reason'],
                "permId": deleted_object["id"]["permId"],
                "type": deleted_object["id"]["@type"]
            })
        return del_objs
    
    
    def extract_identifier(ident):
    
        if not isinstance(ident, dict):
    
            return str(ident)
        return ident['identifier']
    
    
    def extract_nested_identifier(ident):
    
        if not isinstance(ident, dict):
    
            return str(ident)
        return ident['identifier']['identifier']
    
    
    Swen Vermeul's avatar
    Swen Vermeul committed
    def extract_permid(permid):
        if not isinstance(permid, dict):
            return str(permid)
        return permid['permId']
    
    
    Swen Vermeul's avatar
    Swen Vermeul committed
    def extract_nested_permid(permid):
        if not isinstance(permid, dict):
            return str(permid)
        return permid['permId']['permId']
    
    
    def extract_property_assignments(pas):
        pa_strings = []
        for pa in pas:
            if not isinstance(pa['propertyType'], dict):
                pa_strings.append(pa['propertyType'])
            else:
                pa_strings.append(pa['propertyType']['label'])
        return pa_strings
    
    
    def extract_person(person):
    
    Swen Vermeul's avatar
    Swen Vermeul committed
        if not isinstance(person, dict):
    
    Swen Vermeul's avatar
    Swen Vermeul committed
        return person['userId']
    
    def crc32(fileName):
    
        """since Python3 the zlib module returns unsigned integers (2.7: signed int)
        """
    
        prev = 0
    
        for eachLine in open(fileName, "rb"):
    
            prev = zlib.crc32(eachLine, prev)
        # return as hex
    
    
    def _create_tagIds(tags=None):
        if tags is None:
            return None
    
        if not isinstance(tags, list):
            tags = [tags]
    
        tagIds = []
        for tag in tags:
    
            tagIds.append({"code": tag, "@type": "as.dto.tag.id.TagCode"})
    
    def _tagIds_for_tags(tags=None, action='Add'):
        """creates an action item to add or remove tags. Action is either 'Add', 'Remove' or 'Set'
        """
        if tags is None:
            return
        if not isinstance(tags, list):
            tags = [tags]
    
        items = []
        for tag in tags:
            items.append({
                "code": tag,
                "@type": "as.dto.tag.id.TagCode"
            })
    
        tagIds = {
            "actions": [
                {
                    "items": items,
                    "@type": "as.dto.common.update.ListUpdateAction{}".format(action.capitalize())
                }
            ],
            "@type": "as.dto.common.update.IdListUpdateValue"
        }
    
        return tagIds
    
    
    def _list_update(ids=None, entity=None, action='Add'):
        """creates an action item to add, set or remove ids. 
        """
        if ids is None:
            return
        if not isinstance(ids, list):
            ids = [ids]
    
        items = []
        for ids in ids:
            items.append({
                "code": ids,
                "@type": "as.dto.{}.id.{}Code".format(entity.lower(), entity)
            })
    
        list_update = {
            "actions": [
                {
                    "items": items,
                    "@type": "as.dto.common.update.ListUpdateAction{}".format(action.capitalize())
                }
            ],
            "@type": "as.dto.common.update.IdListUpdateValue"
        }
    
    
    def _create_typeId(type):
        return {
            "permId": type.upper(),
            "@type": "as.dto.entitytype.id.EntityTypePermId"
        }
    
    
    def _create_projectId(ident):
        match = re.match('/', ident)
        if match:
            return {
                "identifier": ident,
                "@type": "as.dto.project.id.ProjectIdentifier"
            }
        else:
    
                "permId": ident,
                "@type": "as.dto.project.id.ProjectPermId"
            }
    
    
    def _create_experimentId(ident):
        return {
            "identifier": ident,
            "@type": "as.dto.experiment.id.ExperimentIdentifier"
        }
    
    
    
    def _common_search(search_type, value, comparison="StringEqualToValue"):
        sreq = {
    
                "@type": "as.dto.common.search.{}".format(comparison)
            }
        }
    
    def _criteria_for_code(code):
        return {
            "fieldValue": {
    
                "@type": "as.dto.common.search.StringEqualToValue"
            },
            "@type": "as.dto.common.search.CodeSearchCriteria"
        }
    
    
    def _subcriteria_for_type(code, entity):
    
            "@type": "as.dto.{}.search.{}TypeSearchCriteria".format(entity.lower(), entity),
    
                    "@type": "as.dto.common.search.CodeSearchCriteria",
                    "fieldValue": {
                        "value": code.upper(),
                        "@type": "as.dto.common.search.StringEqualToValue"
                    }
    
    def _subcriteria_for_status(status_value):
        status_value = status_value.upper()
        valid_status = "AVAILABLE LOCKED ARCHIVED UNARCHIVE_PENDING ARCHIVE_PENDING BACKUP_PENDING".split()
        if not status_value in valid_status:
            raise ValueError("status must be one of the following: " + ", ".join(valid_status))
    
        return {
            "@type": "as.dto.dataset.search.PhysicalDataSearchCriteria",
            "operator": "AND",
            "criteria": [{
                "@type":
    
                    "as.dto.dataset.search.StatusSearchCriteria",
                "fieldName": "status",
    
                "fieldType": "ATTRIBUTE",
    
    def _gen_search_criteria(req):
    
        sreq = {}
        for key, val in req.items():
            if key == "criteria":
                items = []
                for item in req['criteria']:
    
                    items.append(_gen_search_criteria(item))
    
                sreq['criteria'] = items
            elif key == "code":
    
                sreq["criteria"] = [_common_search(
                    "as.dto.common.search.CodeSearchCriteria", val.upper()
                )]
            elif key == "identifier":
    
                if is_identifier(val):
                    # if we have an identifier, we need to search in Space and Code separately
                    si = split_identifier(val)
                    sreq["criteria"] = []
                    if "space" in si:
                        sreq["criteria"].append(
                            _gen_search_criteria({"space": "Space", "code": si["space"]})
                        )
                    if "experiment" in si:
                        pass
    
                    if "code" in si:
                        sreq["criteria"].append(
                            _common_search(
                                "as.dto.common.search.CodeSearchCriteria", si["code"].upper()
                            )
    
                elif is_permid(val):
                    sreq["criteria"] = [_common_search(
                        "as.dto.common.search.PermIdSearchCriteria", val
                    )]
                else:
                    # we assume we just got a code
                    sreq["criteria"] = [_common_search(
                        "as.dto.common.search.CodeSearchCriteria", val.upper()
                    )]
    
            else:
                sreq["@type"] = "as.dto.{}.search.{}SearchCriteria".format(key, val)
        return sreq
    
    
    def _subcriteria_for_tags(tags):
        if not isinstance(tags, list):
            tags = [tags]
    
        criterias = []
        for tag in tags:
            criterias.append({
                "fieldName": "code",
                "fieldType": "ATTRIBUTE",
                "fieldValue": {
                    "value": tag,
                    "@type": "as.dto.common.search.StringEqualToValue"
                },
                "@type": "as.dto.common.search.CodeSearchCriteria"
            })
    
        return {
            "@type": "as.dto.tag.search.TagSearchCriteria",
            "operator": "AND",
            "criteria": criterias
        }
    
    
    def _subcriteria_for_is_finished(is_finished):
        return {
            "@type": "as.dto.common.search.StringPropertySearchCriteria",
            "fieldName": "FINISHED_FLAG",
            "fieldType": "PROPERTY",
            "fieldValue": {
                "value": is_finished,
                "@type": "as.dto.common.search.StringEqualToValue"
            }
        }
    
    
    def _subcriteria_for_properties(prop, val):
        return {
            "@type": "as.dto.common.search.StringPropertySearchCriteria",
            "fieldName": prop.upper(),
            "fieldType": "PROPERTY",
            "fieldValue": {
                "value": val,
                "@type": "as.dto.common.search.StringEqualToValue"
            }
        }
    
    
    
    def _subcriteria_for_permid(permids, entity, parents_or_children=''):
    
        if not isinstance(permids, list):
            permids = [permids]
    
        criterias = []
        for permid in permids:
    
                "@type": "as.dto.common.search.PermIdSearchCriteria",
                "fieldValue": {
                    "value": permid,
                    "@type": "as.dto.common.search.StringEqualToValue"
                },
                "fieldType": "ATTRIBUTE",
                "fieldName": "code"
    
            "@type": "as.dto.{}.search.{}{}SearchCriteria".format(
    
                entity.lower(), entity, parents_or_children
    
    def _subcriteria_for_code(code, object_type):
    
    Swen Vermeul's avatar
    Swen Vermeul committed
        if code is not None:
            if is_permid(code):
                fieldname = "permId"
                fieldtype = "as.dto.common.search.PermIdSearchCriteria"
            else:
                fieldname = "code"
                fieldtype = "as.dto.common.search.CodeSearchCriteria"
    
            criteria = {
                "criteria": [
                    {
                        "fieldName": fieldname,
                        "fieldType": "ATTRIBUTE",
                        "fieldValue": {
                            "value": code.upper(),
                            "@type": "as.dto.common.search.StringEqualToValue"
                        },
    
    Swen Vermeul's avatar
    Swen Vermeul committed
                    }
                ],
                "@type": search_criteria[object_type.lower()],
                "operator": "AND"
            }
            return criteria
    
            criteria = {"@type": search_criteria[object_type.lower()]}
    
    Swen Vermeul's avatar
    Swen Vermeul committed
            return criteria
    
    Swen Vermeul's avatar
    Swen Vermeul committed
    
    
        """Interface for communicating with openBIS. 
        A recent version of openBIS is required (minimum 16.05.2).
    
        For creation of datasets, dataset-uploader-api needs to be installed.
    
        def __init__(self, url, verify_certificates=True, token=None):
    
    Swen Vermeul's avatar
    Swen Vermeul committed
            """Initialize a new connection to an openBIS server.
    
            :param host:
    
            url_obj = urlparse(url)
    
                raise ValueError("please provide the url in this format: https://openbis.host.ch:8443")
    
            if url_obj.hostname is None:
                raise ValueError("hostname is missing")
    
            self.url = url_obj.geturl()
            self.port = url_obj.port
    
            self.hostname = url_obj.hostname
    
            self.as_v3 = '/openbis/openbis/rmi-application-server-v3.json'
            self.as_v1 = '/openbis/openbis/rmi-general-information-v1.json'
            self.reg_v1 = '/openbis/openbis/rmi-query-v1.json'
    
            self.dataset_types = None
            self.sample_types = None
    
            self.token_path = None
    
            # use an existing token, if available
            if self.token is None:
    
                self.token = self._get_cached_token()
    
        def __dir__(self):
            return [
                'url', 'port', 'hostname',
                'login()', 'logout()', 'is_session_active()', 'token', 'is_token_valid("")',
                "get_dataset('permId')",
                "get_datasets()",
                "get_dataset_type('raw_data')",
                "get_dataset_types()",
                "get_datastores()",
                "get_deletions()",
                "get_experiment('permId', withAttachments=False)",
                "get_experiments()",
                "get_experiment_type('type')",
                "get_experiment_types()",
    
                "get_external_data_management_system(permId)",
    
                "get_material_type('type')",
                "get_material_types()",
                "get_project('project')",
                "get_projects(space=None, code=None)",
                "get_sample('id')",
    
                "get_object('id')", # "get_sample('id')" alias
    
                "get_objects()", # "get_samples()" alias
    
                "get_object_type(type))", # "get_sample_type(type))" alias
    
                "get_object_types()", # "get_sample_types()" alias
    
                "get_semantic_annotations()",
                "get_semantic_annotation(permId, only_data = False)",
    
                "get_spaces()",
                "get_tags()",
                "get_terms()",
                'new_space(name, description)',
    
                'new_project(space, code, description, attachments)',
    
                'new_experiment(type, code, project, props={})',
    
                'new_sample(type, space, project, experiment, parents)',
                'new_object(type, space, project, experiment, parents)', # 'new_sample(type, space, project, experiment)' alias
    
                'new_dataset(type, parent, experiment, sample, files=[], folder, props={})',
    
                'new_semantic_annotation(entityType, propertyType)',
    
                'update_sample(sampleId, space, project, experiment, parents, children, components, properties, tagIds, attachments)',
                'update_object(sampleId, space, project, experiment, parents, children, components, properties, tagIds, attachments)', # 'update_sample(sampleId, space, project, experiment, parents, children, components, properties, tagIds, attachments)' alias
    
        @property
        def spaces(self):
            return self.get_spaces()
    
        @property
        def projects(self):
            return self.get_projects()
    
    
        def _get_cached_token(self):
    
            """Read the token from the cache, and set the token ivar to it, if there, otherwise None.
            If the token is not valid anymore, delete it. 
            """
            token_path = self.gen_token_path()
    
        def gen_token_path(self, parent_folder=None):
            """generates a path to the token file.
            The token is usually saved in a file called
            ~/.pybis/hostname.token
            """
    
                # save token under ~/.pybis folder
                parent_folder = os.path.join(
                    os.path.expanduser("~"),
                    '.pybis'
                )
            path = os.path.join(parent_folder, self.hostname + '.token')
    
        def save_token(self, token=None, parent_folder=None):
    
    Swen Vermeul's avatar
    Swen Vermeul committed
            """ saves the session token to the disk, usually here: ~/.pybis/hostname.token. When a new Openbis instance is created, it tries to read this saved token by default.
    
            if token is None:
                token = self.token
    
            token_path = None;
            if parent_folder is None:
                token_path = self.gen_token_path()
            else:
                token_path = self.gen_token_path(parent_folder)
    
            # create the necessary directories, if they don't exist yet
    
            os.makedirs(os.path.dirname(token_path), exist_ok=True)
            with open(token_path, 'w') as f:
    
                f.write(token)
                self.token_path = token_path
    
        def delete_token(self, token_path=None):
    
            """ deletes a stored session token.
            """
    
            if token_path is None:
                token_path = self.token_path
            os.remove(token_path)
    
    Swen Vermeul's avatar
    Swen Vermeul committed
        def _post_request(self, resource, request):
    
            """ internal method, used to handle all post requests and serializing / deserializing
            data
            """
    
            return self._post_request_full_url(urljoin(self.url,resource), request)
    
    
        def _post_request_full_url(self, full_url, request):
    
            """ internal method, used to handle all post requests and serializing / deserializing
            data
            """
    
    Swen Vermeul's avatar
    Swen Vermeul committed
            if "id" not in request:
                request["id"] = "1"
            if "jsonrpc" not in request:
                request["jsonrpc"] = "2.0"
    
            if request["params"][0] is None:
                raise ValueError("Your session expired, please log in again")
    
            resp = requests.post(
    
                verify=self.verify_certificates
            )
    
    Swen Vermeul's avatar
    Swen Vermeul committed
                resp = resp.json()
                if 'error' in resp:
                    print(json.dumps(request))
    
                    raise ValueError(resp['error']['message'])
    
    Swen Vermeul's avatar
    Swen Vermeul committed
                elif 'result' in resp:
                    return resp['result']
    
                else:
                    raise ValueError('request did not return either result nor error')
            else:
                raise ValueError('general error while performing post request')
    
    
        def logout(self):
    
    Swen Vermeul's avatar
    Swen Vermeul committed
            """ Log out of openBIS. After logout, the session token is no longer valid.
    
    
            logout_request = {
    
                "method": "logout",
                "params": [self.token],
    
            resp = self._post_request(self.as_v3, logout_request)
    
            return resp
    
        def login(self, username=None, password=None, save_token=False):
    
            Expects a username and a password and updates the token (session-ID).
            The token is then used for every request.
    
            Clients may want to store the credentials object in a credentials store after successful login.
    
            Throw a ValueError with the error message if login failed.
            """
    
            if password is None:
                import getpass
                password = getpass.getpass()
    
    
            login_request = {
    
                "method": "login",
                "params": [username, password],
    
            result = self._post_request(self.as_v3, login_request)
    
            if result is None:
                raise ValueError("login to openBIS failed")
            else:
                self.token = result
    
                if save_token:
                    self.save_token()
                return self.token
    
            # Request just 1 permId
            request = {
                "method": "createPermIdStrings",
                "params": [self.token, 1],
            }
            resp = self._post_request(self.as_v3, request)
            if resp is not None:
                return resp[0]
            else:
                raise ValueError("Could not create permId")
    
        def get_datastores(self):
    
            """ Get a list of all available datastores. Usually there is only one, but in some cases
    
            there might be multiple servers. If you upload a file, you need to specifiy the datastore you want
    
            the file uploaded to.
            """
    
    
            request = {
                "method": "listDataStores",
    
            }
            resp = self._post_request(self.as_v1, request)
            if resp is not None:
    
                return DataFrame(resp)[['code', 'downloadUrl', 'hostUrl']]
    
                raise ValueError("No datastore found!")
    
        def get_spaces(self, code=None):
    
            """ Get a list of all available spaces (DataFrame object). To create a sample or a
            dataset, you need to specify in which space it should live.
            """
    
            criteria = {}
            options = {}
    
            request = {
                "method": "searchSpaces",
    
                "params": [self.token,
                           criteria,
                           options,
                           ],
    
            }
            resp = self._post_request(self.as_v3, request)
            if resp is not None:
                spaces = DataFrame(resp['objects'])
    
                spaces['registrationDate'] = spaces['registrationDate'].map(format_timestamp)
                spaces['modificationDate'] = spaces['modificationDate'].map(format_timestamp)
    
                sp = Things(
                    self,
                    'space',
                    spaces[['code', 'description', 'registrationDate', 'modificationDate']]
                )
                return sp
    
                raise ValueError("No spaces found!")
    
    
        def get_space(self, code, only_data=False):
            """ Returns a Space object for a given identifier.
    
    Swen Vermeul's avatar
    Swen Vermeul committed
    
    
            fetchopts = {"@type": "as.dto.space.fetchoptions.SpaceFetchOptions"}
    
            for option in ['registrator']:
                fetchopts[option] = fetch_option[option]
    
    
                "method": "getSpaces",
                "params": [
                    self.token,
                    [{
    
                        "@type": "as.dto.space.id.SpacePermId"
                    }],
                    fetchopts
    
            resp = self._post_request(self.as_v3, request)
    
    Swen Vermeul's avatar
    Swen Vermeul committed
            if len(resp) == 0:
    
                raise ValueError("No such space: %s" % code)
    
            for permid in resp:
                if only_data:
                    return resp[permid]
                else:
                    return Space(self, data=resp[permid])
    
        def get_samples(self, code=None, permId=None, space=None, project=None, experiment=None, type=None,
    
                        withParents=None, withChildren=None, tags=None, props=None, **properties):
    
            """ Get a list of all samples for a given space/project/experiment (or any combination)
            """
    
            sub_criteria = []
            if space:
    
                sub_criteria.append(_gen_search_criteria({
    
                    "space": "Space",
                    "operator": "AND",
                    "code": space
    
                exp_crit = _subcriteria_for_code(experiment, 'experiment')
                proj_crit = _subcriteria_for_code(project, 'project')
    
    Swen Vermeul's avatar
    Swen Vermeul committed
                exp_crit['criteria'] = []
                exp_crit['criteria'].append(proj_crit)
                sub_criteria.append(exp_crit)
    
                sub_criteria.append(_subcriteria_for_code(experiment, 'experiment'))
    
            if properties is not None:
                for prop in properties:
                    sub_criteria.append(_subcriteria_for_properties(prop, properties[prop]))
    
            if type:
                sub_criteria.append(_subcriteria_for_code(type, 'sample_type'))
    
            if tags:
                sub_criteria.append(_subcriteria_for_tags(tags))
    
            if code:
                sub_criteria.append(_criteria_for_code(code))
    
                sub_criteria.append(_common_search("as.dto.common.search.PermIdSearchCriteria", permId))
    
                if not isinstance(withParents, list):
                    withParents = [withParents]
                for parent in withParents:
                    sub_criteria.append(
    
                            "sample": "SampleParents",
                            "identifier": parent
                        })
                    )
    
                if not isinstance(withChildren, list):
                    withChildren = [withChildren]
                for child in withChildren:
                    sub_criteria.append(
    
                            "sample": "SampleChildren",
                            "identifier": child
                        })
                    )
    
    
            criteria = {
                "criteria": sub_criteria,
                "@type": "as.dto.sample.search.SampleSearchCriteria",
                "operator": "AND"
            }
    
            # build the various fetch options
    
            fetchopts = fetch_option['sample']
    
            for option in ['tags', 'properties', 'registrator', 'modifier', 'experiment']:
                fetchopts[option] = fetch_option[option]
    
            request = {
                "method": "searchSamples",
    
                "params": [self.token,
                           criteria,
                           fetchopts,
                           ],
    
            }
            resp = self._post_request(self.as_v3, request)
    
                raise ValueError("no samples found!")
    
            samples = DataFrame(objects)
            samples['registrationDate'] = samples['registrationDate'].map(format_timestamp)
            samples['modificationDate'] = samples['modificationDate'].map(format_timestamp)
    
            samples['registrator'] = samples['registrator'].map(extract_person)
            samples['modifier'] = samples['modifier'].map(extract_person)
            samples['identifier'] = samples['identifier'].map(extract_identifier)
    
            samples['permId'] = samples['permId'].map(extract_permid)
    
            samples['experiment'] = samples['experiment'].map(extract_nested_identifier)
            samples['sample_type'] = samples['type'].map(extract_nested_permid)
    
            attrs = ['identifier', 'permId', 'experiment', 'sample_type',
    
                     'registrator', 'registrationDate', 'modifier', 'modificationDate']
    
            if props is not None:
    
                    samples[prop.upper()] = samples['properties'].map(lambda x: x.get(prop.upper(), ''))
    
            return Things(self, 'sample', ss, 'identifier')
    
        get_objects = get_samples # Alias
    
    
        def get_experiments(self, code=None, type=None, space=None, project=None, tags=None, is_finished=None, props=None,
                            **properties):
    
            """ Get a list of all experiment for a given space or project (or any combination)
            """
    
            sub_criteria = []
            if space:
    
                sub_criteria.append(_subcriteria_for_code(space, 'space'))
    
                sub_criteria.append(_subcriteria_for_code(project, 'project'))
    
            if code:
    
                sub_criteria.append(_criteria_for_code(code))
    
            if type:
                sub_criteria.append(_subcriteria_for_type(type, 'Experiment'))
            if tags:
                sub_criteria.append(_subcriteria_for_tags(tags))
            if is_finished is not None: