Newer
Older
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
pybis.py
"""
import os
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
Swen Vermeul
committed
import time
from datetime import datetime
Swen Vermeul
committed
import base64
from collections import namedtuple
import pandas as pd
from pandas import DataFrame, Series
Swen Vermeul
committed
import threading
from threading import Thread
from queue import Queue
DROPBOX_PLUGIN = "jupyter-uploader-api"
entities = {
"Sample": {
"attrs": "code permId identifier type parents children attachments space experiment container components tags ".split(),
"ids2type": {
'parentIds': { 'permId': { '@type': 'as.dto.sample.id.SamplePermId' } },
'childIds': { 'permId': { '@type': 'as.dto.sample.id.SamplePermId' } },
'componentIds': { 'permId': {'@type': 'as.dto.sample.id.SamplePermId' } },
},
"identifier": "sampleId",
"cre_type": "as.dto.sample.create.SampleCreation",
"multi": "parents children components tags".split(),
},
"Experiment": {
"attrs": "code permId identifier type space project tags attachments".split(),
"multi": "tags".split(),
"identifier": "experimentId",
Swen Vermeul
committed
},
"Project": {
"attrs": "code description permId identifier space attachments".split(),
Swen Vermeul
committed
"multi": "tags".split(),
"identifier": "projectId",
},
"DataSet": {
"autoGeneratedCode" : True,
"attrs": "experiment sample parents children container components tags".split(),
"ids2type": {
'parentIds': { 'permId': { '@type': 'as.dto.dataset.id.DataSetPermId' } },
'childIds': { 'permId': { '@type': 'as.dto.dataset.id.DataSetPermId' } },
},
"multi": [],
"identifier": "dataSetId",
},
"attr2ids": {
"sample" : "sampleId",
"experiment" : "experimentId",
"space" : "spaceId",
"container" : "containerId",
"component" : "componentId",
"components" : "componentIds",
"parents" : "parentIds",
"children" : "childIds",
"tags" : "tagIds",
},
"ids2type": {
'spaceId': { 'permId': { '@type': 'as.dto.space.id.SpacePermId' } },
'projectId': { 'permId': { '@type': 'as.dto.project.id.ProjectPermId' } },
'experimentId': { 'permId': { '@type': 'as.dto.experiment.id.ExperimentPermId' } },
'tagIds': { 'code': { '@type': 'as.dto.tag.id.TagCode' } },
},
}
"space": "as.dto.space.search.SpaceSearchCriteria",
"project": "as.dto.project.search.ProjectSearchCriteria",
"experiment": "as.dto.experiment.search.ExperimentSearchCriteria",
"sample": "as.dto.sample.search.SampleSearchCriteria",
"dataset": "as.dto.dataset.search.DataSetSearchCriteria",
"code": "as.dto.common.search.CodeSearchCriteria",
"sample_type":"as.dto.sample.search.SampleTypeSearchCriteria",
"space": { "@type": "as.dto.space.fetchoptions.SpaceFetchOptions" },
"project": { "@type": "as.dto.project.fetchoptions.ProjectFetchOptions" },
"experiment": { "@type": "as.dto.experiment.fetchoptions.ExperimentFetchOptions" },
"sample": { "@type": "as.dto.sample.fetchoptions.SampleFetchOptions" },
Swen Vermeul
committed
"samples": { "@type": "as.dto.sample.fetchoptions.SampleFetchOptions" },
"dataSets": {
"@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions",
"properties": { "@type": "as.dto.property.fetchoptions.PropertyFetchOptions" },
"type": { "@type": "as.dto.dataset.fetchoptions.DataSetTypeFetchOptions" },
},
"physicalData": { "@type": "as.dto.dataset.fetchoptions.PhysicalDataFetchOptions" },
"linkedData": { "@type": "as.dto.dataset.fetchoptions.LinkedDataFetchOptions" },
"properties": { "@type": "as.dto.property.fetchoptions.PropertyFetchOptions" },
Swen Vermeul
committed
"propertyAssignments" : {
"@type" : "as.dto.property.fetchoptions.PropertyAssignmentFetchOptions",
"propertyType": {
"@type": "as.dto.property.fetchoptions.PropertyTypeFetchOptions"
}
},
"tags": { "@type": "as.dto.tag.fetchoptions.TagFetchOptions" },
"registrator": { "@type": "as.dto.person.fetchoptions.PersonFetchOptions" },
"modifier": { "@type": "as.dto.person.fetchoptions.PersonFetchOptions" },
"leader": { "@type": "as.dto.person.fetchoptions.PersonFetchOptions" },
"attachments": { "@type": "as.dto.attachment.fetchoptions.AttachmentFetchOptions" },
Swen Vermeul
committed
"attachmentsWithContent": {
"@type": "as.dto.attachment.fetchoptions.AttachmentFetchOptions",
"content": {
"@type": "as.dto.common.fetchoptions.EmptyFetchOptions"
},
},
"history": { "@type": "as.dto.history.fetchoptions.HistoryEntryFetchOptions" },
"dataStore": { "@type": "as.dto.datastore.fetchoptions.DataStoreFetchOptions" },
def parse_jackson(input_json):
"""openBIS uses a library called «jackson» to automatically generate the JSON RPC output.
Objects that are found the first time are added an attribute «@id».
Any further findings only carry this reference id.
This function is used to dereference the output.
"""
interesting=['tags', 'registrator', 'modifier', 'type', 'parents',
'children', 'containers', 'properties', 'experiment', 'sample',
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
]
found = {}
def build_cache(graph):
if isinstance(graph, list):
for item in graph:
build_cache(item)
elif isinstance(graph, dict) and len(graph) > 0:
for key, value in graph.items():
if key in interesting:
if isinstance(value, dict):
if '@id' in value:
found[value['@id']] = value
build_cache(value)
elif isinstance(value, list):
for item in value:
if isinstance(item, dict):
if '@id' in item:
found[item['@id']] = item
build_cache(item)
elif isinstance(value, dict):
build_cache(value)
elif isinstance(value, list):
build_cache(value)
def deref_graph(graph):
if isinstance(graph, list):
for i, list_item in enumerate(graph):
if isinstance(list_item, int):
graph[i] = found[list_item]
else:
deref_graph(list_item)
elif isinstance(graph, dict) and len(graph) > 0:
for key, value in graph.items():
if key in interesting:
if isinstance(value, dict):
deref_graph(value)
elif isinstance(value, int):
graph[key] = found[value]
elif isinstance(value, list):
for i, list_item in enumerate(value):
if isinstance(list_item, int):
Swen Vermeul
committed
if list_item in found:
value[i] = found[list_item]
else:
value[i] = list_item
elif isinstance(value, dict):
deref_graph(value)
elif isinstance(value, list):
deref_graph(value)
build_cache(input_json)
deref_graph(input_json)
def check_datatype(type_name, value):
if type_name == 'INTEGER':
return isinstance(value, int)
if type_name == 'BOOLEAN':
return isinstance(value, bool)
if type_name == 'VARCHAR':
return isinstance(value, str)
return True
Swen Vermeul
committed
def is_identifier(ident):
# assume we got a sample identifier e.g. /TEST/TEST-SAMPLE
match = re.match('/', ident)
if match:
Swen Vermeul
committed
return True
else:
Swen Vermeul
committed
return False
def is_permid(ident):
match = re.match('^\d+\-\d+$', ident)
if match:
return True
else:
return False
def search_request_for_identifier(ident, entity):
search_request = {}
Swen Vermeul
committed
if is_identifier(ident):
search_request = {
"identifier": ident.upper(),
"@type": "as.dto.{}.id.{}Identifier".format(entity.lower(), entity.capitalize())
}
else:
search_request = {
"permId": ident,
"@type": "as.dto.{}.id.{}PermId".format(entity.lower(), entity.capitalize())
}
return search_request
def table_for_attributes(attributes):
table = '<table border="1" class="dataframe"><thead><tr style="text-align: right;"> <th>attribute</th> <th>value</th> </tr> </thead><tbody>'
for key, val in attributes.items():
table += '<tr><th>{}</th><td>{}</td></tr>'.format(key, val)
table += '</tbody></table>'
return table
def format_timestamp(ts):
return datetime.fromtimestamp(round(ts/1000)).strftime('%Y-%m-%d %H:%M:%S')
if not isinstance(obj, dict):
return str(obj)
def extract_deletion(obj):
del_objs = []
for deleted_object in obj['deletedObjects']:
del_objs.append({
"reason": obj['reason'],
"permId": deleted_object["id"]["permId"],
"type": deleted_object["id"]["@type"]
})
return del_objs
def extract_identifier(ident):
if not isinstance(ident, dict):
return str(ident)
return ident['identifier']
def extract_nested_identifier(ident):
if not isinstance(ident, dict):
return str(ident)
return ident['identifier']['identifier']
def extract_permid(permid):
if not isinstance(permid, dict):
return str(permid)
return permid['permId']
def extract_nested_permid(permid):
if not isinstance(permid, dict):
return str(permid)
return permid['permId']['permId']
def extract_property_assignments(pas):
pa_strings = []
for pa in pas:
if not isinstance(pa['propertyType'], dict):
pa_strings.append(pa['propertyType'])
else:
pa_strings.append(pa['propertyType']['label'])
return pa_strings
def extract_person(person):
if 'email' in person and person['email'] is not '':
return "%s %s <%s>" % (person['firstName'], person['lastName'], person['email'])
else:
return "%s %s" % (person['firstName'], person['lastName'])
def extract_properties(prop):
if isinstance(prop, dict):
newline = "; "
props = []
for key in prop:
props.append("%s: %s" % (key, prop[key]))
return newline.join(props)
def extract_tags(tags):
if isinstance(tags, dict):
tags = [tags]
new_tags = []
for tag in tags:
new_tags.append(tag["code"])
return new_tags
def extract_attachments(attachments):
att = []
for attachment in attachments:
att.append(attachment['fileName'])
return att
def signed_to_unsigned(sig_int):
"""openBIS delivers crc32 checksums as signed integers.
If the number is negative, we just have to add 2**32
We display the hex number to match with the classic UI
"""
if sig_int < 0:
sig_int += 2**32
return "%x"%(sig_int & 0xFFFFFFFF)
"""since Python3 the zlib module returns unsigned integers (2.7: signed int)
"""
prev = 0
for eachLine in open(fileName,"rb"):
prev = zlib.crc32(eachLine, prev)
# return as hex
return "%x"%(prev & 0xFFFFFFFF)
def _create_tagIds(tags=None):
if tags is None:
return None
tagIds = []
for tag in tags:
tagIds.append({ "code": tag, "@type": "as.dto.tag.id.TagCode" })
return tagIds
def _tagIds_for_tags(tags=None, action='Add'):
"""creates an action item to add or remove tags. Action is either 'Add', 'Remove' or 'Set'
"""
if tags is None:
return
if not isinstance(tags, list):
tags = [tags]
items = []
for tag in tags:
items.append({
"code": tag,
"@type": "as.dto.tag.id.TagCode"
})
tagIds = {
"actions": [
{
"items": items,
"@type": "as.dto.common.update.ListUpdateAction{}".format(action.capitalize())
}
],
"@type": "as.dto.common.update.IdListUpdateValue"
}
def _list_update(ids=None, entity=None, action='Add'):
"""creates an action item to add, set or remove ids.
"""
if ids is None:
return
if not isinstance(ids, list):
ids = [ids]
items = []
for ids in ids:
items.append({
"code": ids,
"@type": "as.dto.{}.id.{}Code".format(entity.lower(), entity)
})
list_update = {
"actions": [
{
"items": items,
"@type": "as.dto.common.update.ListUpdateAction{}".format(action.capitalize())
}
],
"@type": "as.dto.common.update.IdListUpdateValue"
}
return list_update
def _create_typeId(type):
return {
"permId": type.upper(),
"@type": "as.dto.entitytype.id.EntityTypePermId"
}
def _create_projectId(ident):
match = re.match('/', ident)
if match:
return {
"identifier": ident,
"@type": "as.dto.project.id.ProjectIdentifier"
}
else:
return {
"permId": ident,
"@type": "as.dto.project.id.ProjectPermId"
}
def _common_search(search_type, value, comparison="StringEqualToValue"):
sreq = {
"@type": search_type,
"fieldValue": {
"value": value,
"@type": "as.dto.common.search.{}".format(comparison)
}
}
return sreq
def _criteria_for_code(code):
return {
"fieldValue": {
"value": code.upper(),
"@type": "as.dto.common.search.StringEqualToValue"
},
"@type": "as.dto.common.search.CodeSearchCriteria"
}
def _subcriteria_for_type(code, entity):
return {
"@type": "as.dto.{}.search.{}TypeSearchCriteria".format(entity.lower(), entity),
"criteria": [
{
"@type": "as.dto.common.search.CodeSearchCriteria",
"fieldValue": {
"value": code.upper(),
"@type": "as.dto.common.search.StringEqualToValue"
}
}
]
}
def _split_identifier(ident):
bla = []
bla=ident.upper().split("/")
results = {}
try:
if bla[0] == '':
bla.pop(0)
if bla[-1] == '':
bla.pop(-1)
results["space"] = bla.pop(0)
results["code"] = bla.pop(-1)
results["experiment"] = bla.pop(0)
except Exception:
pass
return results
def _gen_search_request(req):
sreq = {}
for key, val in req.items():
if key == "criteria":
items = []
for item in req['criteria']:
items.append(_gen_search_request(item))
sreq['criteria'] = items
elif key == "code":
sreq["criteria"] = [_common_search(
"as.dto.common.search.CodeSearchCriteria", val.upper()
)]
elif key == "permid":
sreq["criteria"] = [_common_search(
"as.dto.common.search.PermIdSearchCriteria", val
)]
elif key == "identifier":
si = _split_identifier(val)
sreq["criteria"] = []
if "space" in si:
sreq["criteria"].append(
_gen_search_request({ "space": "Space", "code": si["space"] })
)
if "experiment" in si:
pass
if "code" in si:
sreq["criteria"].append(
_common_search(
"as.dto.common.search.CodeSearchCriteria", si["code"].upper()
)
)
elif key == "operator":
sreq["operator"] = val.upper()
else:
sreq["@type"] = "as.dto.{}.search.{}SearchCriteria".format(key, val)
return sreq
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
def _subcriteria_for_tags(tags):
if not isinstance(tags, list):
tags = [tags]
criterias = []
for tag in tags:
criterias.append({
"fieldName": "code",
"fieldType": "ATTRIBUTE",
"fieldValue": {
"value": tag,
"@type": "as.dto.common.search.StringEqualToValue"
},
"@type": "as.dto.common.search.CodeSearchCriteria"
})
return {
"@type": "as.dto.tag.search.TagSearchCriteria",
"operator": "AND",
"criteria": criterias
}
def _subcriteria_for_is_finished(is_finished):
return {
"@type": "as.dto.common.search.StringPropertySearchCriteria",
"fieldName": "FINISHED_FLAG",
"fieldType": "PROPERTY",
"fieldValue": {
"value": is_finished,
"@type": "as.dto.common.search.StringEqualToValue"
}
}
def _subcriteria_for_properties(prop, val):
return {
"@type": "as.dto.common.search.StringPropertySearchCriteria",
"fieldName": prop.upper(),
"fieldType": "PROPERTY",
"fieldValue": {
"value": val,
"@type": "as.dto.common.search.StringEqualToValue"
}
}
def _subcriteria_for_permid(permids, entity, parents_or_children=''):
if not isinstance(permids, list):
permids = [permids]
criterias = []
for permid in permids:
criterias.append( {
"@type": "as.dto.common.search.PermIdSearchCriteria",
"fieldValue": {
"value": permid,
"@type": "as.dto.common.search.StringEqualToValue"
},
"fieldType": "ATTRIBUTE",
"fieldName": "code"
} )
criteria = {
"criteria": criterias,
"@type": "as.dto.{}.search.{}{}SearchCriteria".format(
entity.lower(), entity, parents_or_children
),
"operator": "OR"
}
return criteria
def _subcriteria_for_code(code, object_type):
if is_permid(code):
fieldname = "permId"
fieldtype = "as.dto.common.search.PermIdSearchCriteria"
else:
fieldname = "code"
fieldtype = "as.dto.common.search.CodeSearchCriteria"
"value": code.upper(),
"@type": "as.dto.common.search.StringEqualToValue"
"@type": search_criteria[object_type.lower()],
crit = _gen_search_request({
object_type.lower() : object_type.capitalize(),
"operator": "AND",
"code": code
})
class Openbis:
"""Interface for communicating with openBIS. A current version of openBIS is needed.
(minimum version 16.05).
Swen Vermeul
committed
def __init__(self, url='https://localhost:8443', verify_certificates=True, token=None):
"""Initialize a new connection to an openBIS server.
"""
url_obj = urlparse(url)
if url_obj.netloc is None:
raise ValueError("please provide the url in this format: https://openbis.host.ch:8443")
self.url_obj = url_obj
self.url = url_obj.geturl()
self.port = url_obj.port
self.hostname = url_obj.hostname
self.as_v3 = '/openbis/openbis/rmi-application-server-v3.json'
self.as_v1 = '/openbis/openbis/rmi-general-information-v1.json'
self.reg_v1 = '/openbis/openbis/rmi-query-v1.json'
Chandrasekhar Ramakrishnan
committed
self.verify_certificates = verify_certificates
Swen Vermeul
committed
self.token = token
self.dataset_types = None
self.sample_types = None
Swen Vermeul
committed
self.files_in_wsp = []
Swen Vermeul
committed
self.token_path = None
# use an existing token, if available
if self.token is None:
@property
def spaces(self):
return self.get_spaces()
@property
def projects(self):
return self.get_projects()
Swen Vermeul
committed
"""Read the token from the cache, and set the token ivar to it, if there, otherwise None.
If the token is not valid anymore, delete it.
"""
token_path = self.gen_token_path()
Chandrasekhar Ramakrishnan
committed
if not os.path.exists(token_path):
Swen Vermeul
committed
return None
Chandrasekhar Ramakrishnan
committed
try:
with open(token_path) as f:
Swen Vermeul
committed
token = f.read()
if not self.is_token_valid(token):
Chandrasekhar Ramakrishnan
committed
os.remove(token_path)
Swen Vermeul
committed
return None
else:
return token
except FileNotFoundError:
Swen Vermeul
committed
return None
Swen Vermeul
committed
def gen_token_path(self, parent_folder=None):
"""generates a path to the token file.
The token is usually saved in a file called
~/.pybis/hostname.token
"""
Chandrasekhar Ramakrishnan
committed
if parent_folder is None:
Swen Vermeul
committed
# save token under ~/.pybis folder
parent_folder = os.path.join(
os.path.expanduser("~"),
'.pybis'
)
path = os.path.join(parent_folder, self.hostname + '.token')
Chandrasekhar Ramakrishnan
committed
return path
Swen Vermeul
committed
def save_token(self, token=None, parent_folder=None):
""" saves the session token to the disk, usually here: ~/.pybis/hostname.token. When a new Openbis instance is created, it tries to read this saved token by default.
Swen Vermeul
committed
if token is None:
token = self.token
token_path = None;
if parent_folder is None:
token_path = self.gen_token_path()
else:
token_path = self.gen_token_path(parent_folder)
# create the necessary directories, if they don't exist yet
Chandrasekhar Ramakrishnan
committed
os.makedirs(os.path.dirname(token_path), exist_ok=True)
with open(token_path, 'w') as f:
Swen Vermeul
committed
f.write(token)
self.token_path = token_path
def delete_token(self, token_path=None):
Swen Vermeul
committed
if token_path is None:
token_path = self.token_path
os.remove(token_path)
def _post_request(self, resource, data):
""" internal method, used to handle all post requests and serializing / deserializing
data
"""
if "id" not in data:
data["id"] = "1"
if "jsonrpc" not in data:
data["jsonrpc"] = "2.0"
resp = requests.post(
self.url + resource,
json.dumps(data),
verify=self.verify_certificates
)
data = resp.json()
if 'error' in data:
raise ValueError('an error has occured: ' + data['error']['message'] )
elif 'result' in data:
return data['result']
else:
raise ValueError('request did not return either result nor error')
else:
raise ValueError('general error while performing post request')
""" Log out of openBIS. After logout, the session token is no longer valid.
if self.token is None:
return
logout_request = {
"method":"logout",
"params":[self.token],
}
resp = self._post_request(self.as_v3, logout_request)
Swen Vermeul
committed
self.token = None
self.token_path = None
Swen Vermeul
committed
def login(self, username=None, password=None, save_token=False):
"""Log into openBIS.
Expects a username and a password and updates the token (session-ID).
The token is then used for every request.
Chandrasekhar Ramakrishnan
committed
Clients may want to store the credentials object in a credentials store after successful login.
Throw a ValueError with the error message if login failed.
"""
login_request = {
"method":"login",
"params":[username, password],
}
result = self._post_request(self.as_v3, login_request)
if result is None:
raise ValueError("login to openBIS failed")
else:
self.token = result
Swen Vermeul
committed
if save_token:
self.save_token()
return self.token
def get_datastores(self):
""" Get a list of all available datastores. Usually there is only one, but in some cases
there might be more. If you upload a file, you need to specifiy the datastore you want
the file uploaded to.
"""
if len(self.datastores) == 0:
request = {
"method": "listDataStores",
"params": [ self.token ],
}
resp = self._post_request(self.as_v1, request)
if resp is not None:
self.datastores = DataFrame(resp)[['code','downloadUrl', 'hostUrl']]
return self.datastores
else:
raise ValueError("No datastore found!")
else:
return self.datastores
def get_spaces(self, code=None):
""" Get a list of all available spaces (DataFrame object). To create a sample or a
dataset, you need to specify in which space it should live.
"""
criteria = {}
options = {}
"params": [ self.token,
criteria,
options,
],
}
resp = self._post_request(self.as_v3, request)
if resp is not None:
spaces = DataFrame(resp['objects'])
spaces['registrationDate']= spaces['registrationDate'].map(format_timestamp)
spaces['modificationDate']= spaces['modificationDate'].map(format_timestamp)
sp = Things(
self,
'space',
spaces[['code', 'description', 'registrationDate', 'modificationDate']]
)
return sp
def get_space(self, spaceId):
""" Returns a Space object for a given identifier (spaceId).
"""
request = {
"method": "getSpaces",
"params": [
self.token,
[{
"@id": 0,
"permId": spaceId,
"@type": "as.dto.space.id.SpacePermId"
}],
{
"@id": 0,
"@type": "as.dto.space.fetchoptions.SpaceFetchOptions",
"registrator": None,
"samples": None,
"projects": None,
"sort": None
}
],
}
resp = self._post_request(self.as_v3, request)
if len(resp) == 0:
raise ValueError("No such space: %s" % spaceId)
return Space(self, resp[spaceId])
Chandrasekhar Ramakrishnan
committed
def get_samples(self, code=None, permId=None, space=None, project=None, experiment=None, type=None,
withParents=None, withChildren=None, tags=None, **properties):
""" Get a list of all samples for a given space/project/experiment (or any combination)
"""
sub_criteria = []
if space:
sub_criteria.append(_gen_search_request({
"space": "Space",
"operator": "AND",
"code": space
})
)
exp_crit = _subcriteria_for_code(experiment, 'experiment')
proj_crit = _subcriteria_for_code(project, 'project')
exp_crit['criteria'] = []
exp_crit['criteria'].append(proj_crit)
sub_criteria.append(exp_crit)
sub_criteria.append(_subcriteria_for_code(experiment, 'experiment'))
if properties is not None:
for prop in properties:
sub_criteria.append(_subcriteria_for_properties(prop, properties[prop]))
if type:
sub_criteria.append(_subcriteria_for_code(type, 'sample_type'))
if tags:
sub_criteria.append(_subcriteria_for_tags(tags))
if code:
sub_criteria.append(_criteria_for_code(code))
if permId:
sub_criteria.append(_common_search("as.dto.common.search.PermIdSearchCriteria",permId))
if withParents:
if not isinstance(withParents, list):
withParents = [withParents]
for parent in withParents:
sub_criteria.append(
_gen_search_request({
"sample": "SampleParents",
"identifier": parent
})
)
if withChildren:
if not isinstance(withChildren, list):
withChildren = [withChildren]
for child in withChildren:
sub_criteria.append(
_gen_search_request({
"sample": "SampleChildren",
"identifier": child
})
)
criteria = {
"criteria": sub_criteria,
"@type": "as.dto.sample.search.SampleSearchCriteria",
"operator": "AND"
}
"properties": { "@type": "as.dto.property.fetchoptions.PropertyFetchOptions" },
"tags": { "@type": "as.dto.tag.fetchoptions.TagFetchOptions" },
"registrator": { "@type": "as.dto.person.fetchoptions.PersonFetchOptions" },
"modifier": { "@type": "as.dto.person.fetchoptions.PersonFetchOptions" },
"experiment": { "@type": "as.dto.experiment.fetchoptions.ExperimentFetchOptions" },
"type": { "@type": "as.dto.sample.fetchoptions.SampleTypeFetchOptions" },
"@type": "as.dto.sample.fetchoptions.SampleFetchOptions",
request = {
"method": "searchSamples",
"params": [ self.token,
criteria,
options,
],
}
resp = self._post_request(self.as_v3, request)
if resp is not None:
objects = resp['objects']
parse_jackson(objects)
samples = DataFrame(objects)
if len(samples) is 0:
raise ValueError("No samples found!")
samples['registrationDate']= samples['registrationDate'].map(format_timestamp)
samples['modificationDate']= samples['modificationDate'].map(format_timestamp)
samples['registrator'] = samples['registrator'].map(extract_person)
samples['modifier'] = samples['modifier'].map(extract_person)
samples['identifier'] = samples['identifier'].map(extract_identifier)
samples['permId'] = samples['permId'].map(extract_permid)
samples['experiment'] = samples['experiment'].map(extract_nested_identifier)
samples['sample_type'] = samples['type'].map(extract_nested_permid)
ss = samples[['identifier', 'permId', 'experiment', 'sample_type', 'registrator', 'registrationDate', 'modifier', 'modificationDate']]
return Things(self, 'sample', ss, 'identifier')
else:
raise ValueError("No samples found!")
def get_experiments(self, code=None, type=None, space=None, project=None, tags=None, is_finished=None, **properties):
""" Get a list of all experiment for a given space or project (or any combination)
"""
sub_criteria = []
if space:
sub_criteria.append(_subcriteria_for_code(space, 'space'))
sub_criteria.append(_subcriteria_for_code(project, 'project'))
sub_criteria.append(_criteria_for_code(code))
if type:
sub_criteria.append(_subcriteria_for_type(type, 'Experiment'))
if tags:
sub_criteria.append(_subcriteria_for_tags(tags))
if is_finished is not None:
sub_criteria.append(_subcriteria_for_is_finished(is_finished))
if properties is not None:
for prop in properties:
sub_criteria.append(_subcriteria_for_properties(prop, properties[prop]))
criteria = {
"criteria": sub_criteria,
"@type": "as.dto.experiment.search.ExperimentSearchCriteria",
"operator": "AND"
}
options = {
"properties": { "@type": "as.dto.property.fetchoptions.PropertyFetchOptions" },
"tags": { "@type": "as.dto.tag.fetchoptions.TagFetchOptions" },
"registrator": { "@type": "as.dto.person.fetchoptions.PersonFetchOptions" },
"modifier": { "@type": "as.dto.person.fetchoptions.PersonFetchOptions" },
"project": { "@type": "as.dto.project.fetchoptions.ProjectFetchOptions" },
"type": { "@type": "as.dto.experiment.fetchoptions.ExperimentTypeFetchOptions" },
"@type": "as.dto.experiment.fetchoptions.ExperimentFetchOptions"