Newer
Older
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
pybis.py
"""
import os
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
Swen Vermeul
committed
import time
from datetime import datetime
from collections import namedtuple
import pandas as pd
from pandas import DataFrame, Series
Swen Vermeul
committed
import threading
from threading import Thread
from queue import Queue
DROPBOX_PLUGIN = "jupyter-uploader-api"
search_for_type = {
"space": "as.dto.space.search.SpaceSearchCriteria",
"project": "as.dto.project.search.ProjectSearchCriteria",
"experiment": "as.dto.experiment.search.ExperimentSearchCriteria",
"code": "as.dto.common.search.CodeSearchCriteria",
"sample_type":"as.dto.sample.search.SampleTypeSearchCriteria",
"space": { "@type": "as.dto.space.fetchoptions.SpaceFetchOptions" },
"project": { "@type": "as.dto.project.fetchoptions.ProjectFetchOptions" },
"experiment": { "@type": "as.dto.experiment.fetchoptions.ExperimentFetchOptions" },
"sample": { "@type": "as.dto.sample.fetchoptions.SampleFetchOptions" },
"dataset": { "@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions" },
"physicalData": { "@type": "as.dto.dataset.fetchoptions.PhysicalDataFetchOptions" },
"linkedData": { "@type": "as.dto.dataset.fetchoptions.LinkedDataFetchOptions" },
"properties": { "@type": "as.dto.property.fetchoptions.PropertyFetchOptions" },
"tags": { "@type": "as.dto.tag.fetchoptions.TagFetchOptions" },
"registrator": { "@type": "as.dto.person.fetchoptions.PersonFetchOptions" },
"modifier": { "@type": "as.dto.person.fetchoptions.PersonFetchOptions" },
"leader": { "@type": "as.dto.person.fetchoptions.PersonFetchOptions" },
"attachments": { "@type": "as.dto.attachment.fetchoptions.AttachmentFetchOptions" },
"history": { "@type": "as.dto.history.fetchoptions.HistoryEntryFetchOptions" },
"dataStore": { "@type": "as.dto.datastore.fetchoptions.DataStoreFetchOptions" },
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
def parse_jackson(input_json):
"""openBIS uses a library called «jackson» to automatically generate the JSON RPC output.
Objects that are found the first time are added an attribute «@id».
Any further findings only carry this reference id.
This function is used to dereference the output.
"""
interesting=['tags', 'registrator', 'modifier', 'type', 'parents',
'children', 'containers', 'properties', 'experiment', 'sample',
'project', 'space', 'propertyType'
]
found = {}
def build_cache(graph):
if isinstance(graph, list):
for item in graph:
build_cache(item)
elif isinstance(graph, dict) and len(graph) > 0:
for key, value in graph.items():
if key in interesting:
if isinstance(value, dict):
if '@id' in value:
found[value['@id']] = value
build_cache(value)
elif isinstance(value, list):
for item in value:
if isinstance(item, dict):
if '@id' in item:
found[item['@id']] = item
build_cache(item)
elif isinstance(value, dict):
build_cache(value)
elif isinstance(value, list):
build_cache(value)
def deref_graph(graph):
if isinstance(graph, list):
for item in graph:
deref_graph(item)
elif isinstance(graph, dict) and len(graph) > 0:
for key, value in graph.items():
if key in interesting:
if isinstance(value, dict):
deref_graph(value)
elif isinstance(value, int):
graph[key] = found[value]
elif isinstance(value, list):
for i, list_item in enumerate(value):
if isinstance(list_item, int):
value[i] = found[list_item]
elif isinstance(value, dict):
deref_graph(value)
elif isinstance(value, list):
deref_graph(value)
build_cache(input_json)
deref_graph(input_json)
def check_datatype(type_name, value):
if type_name == 'INTEGER':
return isinstance(value, int)
if type_name == 'BOOLEAN':
return isinstance(value, bool)
if type_name == 'VARCHAR':
return isinstance(value, str)
return True
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
def search_request_for_identifier(identifier, entity_type):
search_request = {}
# assume we got a sample identifier e.g. /TEST/TEST-SAMPLE
match = re.match('/', identifier)
if match:
search_request = {
"identifier": identifier.upper(),
"@type": "as.dto.{}.id.{}Identifier".format(entity_type.lower(), entity_type.capitalize())
}
else:
search_request = {
"permId": identifier,
"@type": "as.dto.{}.id.{}PermId".format(entity_type.lower(), entity_type.capitalize())
}
return search_request
def table_for_attributes(attributes):
table = '<table border="1" class="dataframe"><thead><tr style="text-align: right;"> <th>attribute</th> <th>value</th> </tr> </thead><tbody>'
for key, val in attributes.items():
table += '<tr><th>{}</th><td>{}</td></tr>'.format(key, val)
table += '</tbody></table>'
return table
def format_timestamp(ts):
return datetime.fromtimestamp(round(ts/1000)).strftime('%Y-%m-%d %H:%M:%S')
def extract_code(obj):
return obj['code']
def extract_deletion(obj):
del_objs = []
for deleted_object in obj['deletedObjects']:
del_objs.append({
"reason": obj['reason'],
"permId": deleted_object["id"]["permId"],
"type": deleted_object["id"]["@type"]
})
return del_objs
def extract_identifier(ident):
if not isinstance(ident, dict):
return str(ident)
return ident['identifier']
def extract_nested_identifier(ident):
if not isinstance(ident, dict):
return str(ident)
return ident['identifier']['identifier']
def extract_permid(permid):
if not isinstance(permid, dict):
return str(permid)
return permid['permId']
def extract_nested_permid(permid):
if not isinstance(permid, dict):
return str(permid)
return permid['permId']['permId']
def extract_property_assignments(pas):
pa_strings = []
for pa in pas:
if not isinstance(pa['propertyType'], dict):
pa_strings.append(pa['propertyType'])
else:
pa_strings.append(pa['propertyType']['label'])
return pa_strings
def extract_person(person):
if 'email' in person and person['email'] is not '':
return "%s %s <%s>" % (person['firstName'], person['lastName'], person['email'])
else:
return "%s %s" % (person['firstName'], person['lastName'])
def extract_properties(prop):
if isinstance(prop, dict):
newline = "; "
props = []
for key in prop:
props.append("%s: %s" % (key, prop[key]))
return newline.join(props)
def extract_tags(tags):
if isinstance(tags, dict):
tags = [tags]
new_tags = []
for tag in tags:
new_tags.append(tag["code"])
return new_tags
def extract_attachments(attachments):
att = []
for attachment in attachments:
att.append(attachment['fileName'])
return att
def signed_to_unsigned(sig_int):
"""openBIS delivers crc32 checksums as signed integers.
If the number is negative, we just have to add 2**32
We display the hex number to match with the classic UI
"""
if sig_int < 0:
sig_int += 2**32
return "%x"%(sig_int & 0xFFFFFFFF)
"""since Python3 the zlib module returns unsigned integers (2.7: signed int)
"""
prev = 0
for eachLine in open(fileName,"rb"):
prev = zlib.crc32(eachLine, prev)
# return as hex
return "%x"%(prev & 0xFFFFFFFF)
def _create_tagIds(tags=None):
if tags is None:
return None
tagIds = []
for tag in tags:
tagIds.append({ "code": tag, "@type": "as.dto.tag.id.TagCode" })
return tagIds
def _tagIds_for_tags(tags=None, action='Add'):
"""creates an action item to add or remove tags. Action is either 'Add', 'Remove' or 'Set'
"""
if tags is None:
return
if not isinstance(tags, list):
tags = [tags]
items = []
for tag in tags:
items.append({
"code": tag,
"@type": "as.dto.tag.id.TagCode"
})
tagIds = {
"actions": [
{
"items": items,
"@type": "as.dto.common.update.ListUpdateAction{}".format(action.capitalize())
}
],
"@type": "as.dto.common.update.IdListUpdateValue"
}
return tagIds
def _create_typeId(type):
return {
"permId": type.upper(),
"@type": "as.dto.entitytype.id.EntityTypePermId"
}
def _create_projectId(ident):
match = re.match('/', ident)
if match:
return {
"identifier": ident,
"@type": "as.dto.project.id.ProjectIdentifier"
}
else:
return {
"permId": ident,
"@type": "as.dto.project.id.ProjectPermId"
}
def _criteria_for_code(code):
return {
"fieldValue": {
"value": code.upper(),
"@type": "as.dto.common.search.StringEqualToValue"
},
"@type": "as.dto.common.search.CodeSearchCriteria"
}
def _subcriteria_for_type(code, entity_type):
return {
"@type": "as.dto.{}.search.{}TypeSearchCriteria".format(entity_type.lower(), entity_type),
"criteria": [
{
"@type": "as.dto.common.search.CodeSearchCriteria",
"fieldValue": {
"value": code.upper(),
"@type": "as.dto.common.search.StringEqualToValue"
}
}
]
}
def _gen_search_request(req):
sreq = {}
for key, val in req.items():
if key == "criteria":
items = []
for item in req['criteria']:
items.append(_gen_search_request(item))
sreq['criteria'] = items
elif key == "code":
sreq["criteria"] = [{
"@type": "as.dto.common.search.CodeSearchCriteria",
"fieldName": "code",
"fieldType": "ATTRIBUTE",
"fieldValue": {
"value": val.upper(),
"@type": "as.dto.common.search.StringEqualToValue"
}
}]
elif key == "operator":
sreq["operator"] = val.upper()
else:
sreq["@type"] = "as.dto.{}.search.{}SearchCriteria".format(key, val)
return sreq
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
def _subcriteria_for_tags(tags):
if not isinstance(tags, list):
tags = [tags]
criterias = []
for tag in tags:
criterias.append({
"fieldName": "code",
"fieldType": "ATTRIBUTE",
"fieldValue": {
"value": tag,
"@type": "as.dto.common.search.StringEqualToValue"
},
"@type": "as.dto.common.search.CodeSearchCriteria"
})
return {
"@type": "as.dto.tag.search.TagSearchCriteria",
"operator": "AND",
"criteria": criterias
}
def _subcriteria_for_is_finished(is_finished):
return {
"@type": "as.dto.common.search.StringPropertySearchCriteria",
"fieldName": "FINISHED_FLAG",
"fieldType": "PROPERTY",
"fieldValue": {
"value": is_finished,
"@type": "as.dto.common.search.StringEqualToValue"
}
}
def _subcriteria_for_properties(prop, val):
return {
"@type": "as.dto.common.search.StringPropertySearchCriteria",
"fieldName": prop.upper(),
"fieldType": "PROPERTY",
"fieldValue": {
"value": val,
"@type": "as.dto.common.search.StringEqualToValue"
}
}
def _subcriteria_for_permid(permids, entity_type, parents_or_children='Parents'):
if not isinstance(permids, list):
permids = [permids]
criterias = []
for permid in permids:
criterias.append( {
"@type": "as.dto.common.search.PermIdSearchCriteria",
"fieldValue": {
"value": permid,
"@type": "as.dto.common.search.StringEqualToValue"
},
"fieldType": "ATTRIBUTE",
"fieldName": "code"
} )
criteria = {
"criteria": criterias,
"@type": "as.dto.sample.search.{}{}SearchCriteria".format(
entity_type, parents_or_children
),
"operator": "OR"
}
return criteria
def _subcriteria_for_code(code, object_type):
criteria = {
"criteria": [
{
"fieldName": "code",
"fieldType": "ATTRIBUTE",
"fieldValue": {
"value": code.upper(),
"@type": "as.dto.common.search.StringEqualToValue"
},
"@type": "as.dto.common.search.CodeSearchCriteria"
}
],
"@type": search_for_type[object_type],
"operator": "AND"
}
return criteria
class Openbis:
"""Interface for communicating with openBIS. A current version of openBIS is needed.
(minimum version 16.05).
Swen Vermeul
committed
def __init__(self, url='https://localhost:8443', verify_certificates=True, token=None):
"""Initialize a new connection to an openBIS server.
"""
url_obj = urlparse(url)
if url_obj.netloc is None:
raise ValueError("please provide the url in this format: https://openbis.host.ch:8443")
self.url_obj = url_obj
self.url = url_obj.geturl()
self.port = url_obj.port
self.hostname = url_obj.hostname
self.as_v3 = '/openbis/openbis/rmi-application-server-v3.json'
self.as_v1 = '/openbis/openbis/rmi-general-information-v1.json'
self.reg_v1 = '/openbis/openbis/rmi-query-v1.json'
Chandrasekhar Ramakrishnan
committed
self.verify_certificates = verify_certificates
Swen Vermeul
committed
self.token = token
self.dataset_types = None
self.sample_types = None
Swen Vermeul
committed
self.files_in_wsp = []
Swen Vermeul
committed
self.token_path = None
# some default settings for working with samples etc.
self.default_space = None
self.default_project = None
self.default_experiment = None
self.default_sample_type = None
Swen Vermeul
committed
# use an existing token, if available
if self.token is None:
@property
def spaces(self):
return self.get_spaces()
@property
def projects(self):
return self.get_projects()
Swen Vermeul
committed
"""Read the token from the cache, and set the token ivar to it, if there, otherwise None.
If the token is not valid anymore, delete it.
"""
token_path = self.gen_token_path()
Chandrasekhar Ramakrishnan
committed
if not os.path.exists(token_path):
Swen Vermeul
committed
return None
Chandrasekhar Ramakrishnan
committed
try:
with open(token_path) as f:
Swen Vermeul
committed
token = f.read()
if not self.is_token_valid(token):
Chandrasekhar Ramakrishnan
committed
os.remove(token_path)
Swen Vermeul
committed
return None
else:
return token
except FileNotFoundError:
Swen Vermeul
committed
return None
Swen Vermeul
committed
def gen_token_path(self, parent_folder=None):
"""generates a path to the token file.
The token is usually saved in a file called
~/.pybis/hostname.token
"""
Chandrasekhar Ramakrishnan
committed
if parent_folder is None:
Swen Vermeul
committed
# save token under ~/.pybis folder
parent_folder = os.path.join(
os.path.expanduser("~"),
'.pybis'
)
path = os.path.join(parent_folder, self.hostname + '.token')
Chandrasekhar Ramakrishnan
committed
return path
Swen Vermeul
committed
def save_token(self, token=None, parent_folder=None):
""" saves the session token to the disk, usually here: ~/.pybis/hostname.token. When a new Openbis instance is created, it tries to read this saved token by default.
Swen Vermeul
committed
if token is None:
token = self.token
token_path = None;
if parent_folder is None:
token_path = self.gen_token_path()
else:
token_path = self.gen_token_path(parent_folder)
# create the necessary directories, if they don't exist yet
Chandrasekhar Ramakrishnan
committed
os.makedirs(os.path.dirname(token_path), exist_ok=True)
with open(token_path, 'w') as f:
Swen Vermeul
committed
f.write(token)
self.token_path = token_path
def delete_token(self, token_path=None):
Swen Vermeul
committed
if token_path is None:
token_path = self.token_path
os.remove(token_path)
def _post_request(self, resource, data):
""" internal method, used to handle all post requests and serializing / deserializing
data
"""
if "id" not in data:
data["id"] = "1"
if "jsonrpc" not in data:
data["jsonrpc"] = "2.0"
resp = requests.post(
self.url + resource,
json.dumps(data),
verify=self.verify_certificates
)
data = resp.json()
if 'error' in data:
raise ValueError('an error has occured: ' + data['error']['message'] )
elif 'result' in data:
return data['result']
else:
raise ValueError('request did not return either result nor error')
else:
raise ValueError('general error while performing post request')
""" Log out of openBIS. After logout, the session token is no longer valid.
if self.token is None:
return
logout_request = {
"method":"logout",
"params":[self.token],
}
resp = self._post_request(self.as_v3, logout_request)
Swen Vermeul
committed
self.token = None
self.token_path = None
Swen Vermeul
committed
def login(self, username=None, password=None, save_token=False):
"""Log into openBIS.
Expects a username and a password and updates the token (session-ID).
The token is then used for every request.
Chandrasekhar Ramakrishnan
committed
Clients may want to store the credentials object in a credentials store after successful login.
Throw a ValueError with the error message if login failed.
"""
login_request = {
"method":"login",
"params":[username, password],
}
result = self._post_request(self.as_v3, login_request)
if result is None:
raise ValueError("login to openBIS failed")
else:
self.token = result
Swen Vermeul
committed
if save_token:
self.save_token()
return self.token
def get_datastores(self):
""" Get a list of all available datastores. Usually there is only one, but in some cases
there might be more. If you upload a file, you need to specifiy the datastore you want
the file uploaded to.
"""
if len(self.datastores) == 0:
request = {
"method": "listDataStores",
"params": [ self.token ],
}
resp = self._post_request(self.as_v1, request)
if resp is not None:
self.datastores = DataFrame(resp)[['code','downloadUrl', 'hostUrl']]
return self.datastores
else:
raise ValueError("No datastore found!")
else:
return self.datastores
def get_spaces(self, code=None):
""" Get a list of all available spaces (DataFrame object). To create a sample or a
dataset, you need to specify in which space it should live.
"""
criteria = {}
options = {}
"params": [ self.token,
criteria,
options,
],
}
resp = self._post_request(self.as_v3, request)
if resp is not None:
spaces = DataFrame(resp['objects'])
spaces['registrationDate']= spaces['registrationDate'].map(format_timestamp)
spaces['modificationDate']= spaces['modificationDate'].map(format_timestamp)
sp = Things(
self,
'space',
spaces[['code', 'description', 'registrationDate', 'modificationDate']]
)
return sp
def get_space(self, spaceId):
""" Returns a Space object for a given identifier (spaceId).
"""
request = {
"method": "getSpaces",
"params": [
self.token,
[{
"@id": 0,
"permId": spaceId,
"@type": "as.dto.space.id.SpacePermId"
}],
{
"@id": 0,
"@type": "as.dto.space.fetchoptions.SpaceFetchOptions",
"registrator": None,
"samples": None,
"projects": None,
"sort": None
}
],
}
resp = self._post_request(self.as_v3, request)
if len(resp) == 0:
raise ValueError("No such space: %s" % spaceId)
return Space(self, resp[spaceId])
Chandrasekhar Ramakrishnan
committed
def get_samples(self, code=None, space=None, project=None, experiment=None, type=None,
withParents=None, withChildren=None, **properties):
""" Get a list of all samples for a given space/project/experiment (or any combination)
"""
if space is None:
space = self.default_space
if project is None:
project = self.default_project
sub_criteria = []
if space:
sub_criteria.append(_subcriteria_for_code(space, 'space'))
exp_crit = _subcriteria_for_code(experiment, 'experiment')
proj_crit = _subcriteria_for_code(project, 'project')
exp_crit['criteria'] = []
exp_crit['criteria'].append(proj_crit)
sub_criteria.append(exp_crit)
sub_criteria.append(_subcriteria_for_code(experiment, 'experiment'))
if experiment is None:
experiment = self.default_experiment
if properties is not None:
for prop in properties:
sub_criteria.append(_subcriteria_for_properties(prop, properties[prop]))
if type:
sub_criteria.append(_subcriteria_for_code(type, 'sample_type'))
if code:
sub_criteria.append(_criteria_for_code(code))
if withParents:
sub_criteria.append(_subcriteria_for_permid(withParents, 'Sample', 'Parents'))
if withChildren:
sub_criteria.append(_subcriteria_for_permid(withChildren, 'Sample', 'Children'))
criteria = {
"criteria": sub_criteria,
"@type": "as.dto.sample.search.SampleSearchCriteria",
"operator": "AND"
}
"properties": { "@type": "as.dto.property.fetchoptions.PropertyFetchOptions" },
"tags": { "@type": "as.dto.tag.fetchoptions.TagFetchOptions" },
"registrator": { "@type": "as.dto.person.fetchoptions.PersonFetchOptions" },
"modifier": { "@type": "as.dto.person.fetchoptions.PersonFetchOptions" },
"experiment": { "@type": "as.dto.experiment.fetchoptions.ExperimentFetchOptions" },
"type": { "@type": "as.dto.sample.fetchoptions.SampleTypeFetchOptions" },
"@type": "as.dto.sample.fetchoptions.SampleFetchOptions",
request = {
"method": "searchSamples",
"params": [ self.token,
criteria,
options,
],
}
resp = self._post_request(self.as_v3, request)
if resp is not None:
objects = resp['objects']
parse_jackson(objects)
samples = DataFrame(objects)
if len(samples) is 0:
raise ValueError("No samples found!")
samples['registrationDate']= samples['registrationDate'].map(format_timestamp)
samples['modificationDate']= samples['modificationDate'].map(format_timestamp)
samples['registrator'] = samples['registrator'].map(extract_person)
samples['modifier'] = samples['modifier'].map(extract_person)
samples['identifier'] = samples['identifier'].map(extract_identifier)
samples['experiment'] = samples['experiment'].map(extract_nested_identifier)
samples['sample_type'] = samples['type'].map(extract_nested_permid)
ss = samples[['code', 'identifier', 'experiment', 'sample_type', 'registrator', 'registrationDate', 'modifier', 'modificationDate']]
return Things(self, 'sample', ss, 'identifier')
else:
raise ValueError("No samples found!")
def get_experiments(self, code=None, type=None, space=None, project=None, tags=None, is_finished=None, **properties):
""" Get a list of all experiment for a given space or project (or any combination)
"""
if space is None:
space = self.default_space
if project is None:
project = self.default_project
sub_criteria = []
if space:
sub_criteria.append(_subcriteria_for_code(space, 'space'))
sub_criteria.append(_subcriteria_for_code(project, 'project'))
sub_criteria.append(_criteria_for_code(code))
if type:
sub_criteria.append(_subcriteria_for_type(type, 'Experiment'))
if tags:
sub_criteria.append(_subcriteria_for_tags(tags))
if is_finished is not None:
sub_criteria.append(_subcriteria_for_is_finished(is_finished))
if properties is not None:
for prop in properties:
sub_criteria.append(_subcriteria_for_properties(prop, properties[prop]))
criteria = {
"criteria": sub_criteria,
"@type": "as.dto.experiment.search.ExperimentSearchCriteria",
"operator": "AND"
}
options = {
"properties": { "@type": "as.dto.property.fetchoptions.PropertyFetchOptions" },
"tags": { "@type": "as.dto.tag.fetchoptions.TagFetchOptions" },
"registrator": { "@type": "as.dto.person.fetchoptions.PersonFetchOptions" },
"modifier": { "@type": "as.dto.person.fetchoptions.PersonFetchOptions" },
"project": { "@type": "as.dto.project.fetchoptions.ProjectFetchOptions" },
"type": { "@type": "as.dto.experiment.fetchoptions.ExperimentTypeFetchOptions" },
"@type": "as.dto.experiment.fetchoptions.ExperimentFetchOptions"
}
request = {
"method": "searchExperiments",
"params": [ self.token,
criteria,
options,
],
}
resp = self._post_request(self.as_v3, request)
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
if len(resp['objects']) == 0:
raise ValueError("No experiments found!")
objects = resp['objects']
parse_jackson(objects)
experiments = DataFrame(objects)
experiments['registrationDate']= experiments['registrationDate'].map(format_timestamp)
experiments['modificationDate']= experiments['modificationDate'].map(format_timestamp)
experiments['project']= experiments['project'].map(extract_code)
experiments['registrator'] = experiments['registrator'].map(extract_person)
experiments['modifier'] = experiments['modifier'].map(extract_person)
experiments['identifier'] = experiments['identifier'].map(extract_identifier)
experiments['type'] = experiments['type'].map(extract_code)
exps = experiments[['code', 'identifier', 'project', 'type', 'registrator',
'registrationDate', 'modifier', 'modificationDate']]
return Things(self, 'experiment', exps, 'identifier')
def get_datasets(self, code=None, type=None, withParents=None, withChildren=None):
sub_criteria = []
if code:
sub_criteria.append(_criteria_for_code(code))
if type:
sub_criteria.append(_subcriteria_for_type(type, 'DataSet'))
if withParents:
sub_criteria.append(_subcriteria_for_permid(withParents, 'DataSet', 'Parents'))
if withChildren:
sub_criteria.append(_subcriteria_for_permid(withChildren, 'DataSet', 'Children'))
criteria = {
"criteria": sub_criteria,
"@type": "as.dto.dataset.search.DataSetSearchCriteria",
"operator": "AND"
}
fetchopts = {
# "parents": { "@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions" },
# "children": { "@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions" },
"containers": { "@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions" },
"type": { "@type": "as.dto.dataset.fetchoptions.DataSetTypeFetchOptions" }
}
for option in ['tags', 'properties', 'sample']:
fetchopts[option] = fetch_option[option]
request = {
"method": "searchDataSets",
"params": [ self.token,
criteria,
fetchopts,
],
}
resp = self._post_request(self.as_v3, request)
if resp is not None:
objects = resp['objects']
parse_jackson(objects)
datasets = DataFrame(objects)
datasets['registrationDate']= datasets['registrationDate'].map(format_timestamp)
datasets['modificationDate']= datasets['modificationDate'].map(format_timestamp)
datasets['sample']= datasets['sample'].map(extract_nested_identifier)
datasets['type']= datasets['type'].map(extract_code)
ds = Things(
self,
'dataset',
datasets[['code', 'properties', 'type', 'sample', 'registrationDate', 'modificationDate']]
)
return ds
def get_experiment(self, expId):
""" Returns an experiment object for a given identifier (expId).
"""
fetchopts = {
"@type": "as.dto.experiment.fetchoptions.ExperimentFetchOptions"
}
search_request = search_request_for_identifier(expId, 'experiment')
for option in ['tags', 'properties', 'attachments', 'project']:
fetchopts[option] = fetch_option[option]
request = {
"method": "getExperiments",
"params": [
self.token,
[ search_request ],
fetchopts
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
],
}
resp = self._post_request(self.as_v3, request)
if len(resp) == 0:
raise ValueError("No such experiment: %s" % expId)
return Experiment(self, resp[expId])
def new_experiment(self, project_ident, code, type, properties=None, attachments=None, tags=None):
tagIds = _create_tagIds(tags)
typeId = _create_typeId(type)
projectId = _create_projectId(project_ident)
if properties is None:
properties = {}
request = {
"method": "createExperiments",
"params": [
self.token,
[
{
"properties": properties,
"code": code,
"typeId" : typeId,
"projectId": projectId,
"tagIds": tagIds,
"attachments": attachments,
"@type": "as.dto.experiment.create.ExperimentCreation",
}
]
],
}
resp = self._post_request(self.as_v3, request)
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
return self.get_experiment(resp[0]['permId'])
def update_experiment(self, experimentId, properties=None, tagIds=None, attachments=None):
params = {
"experimentId": {
"permId": experimentId,
"@type": "as.dto.experiment.id.ExperimentPermId"
},
"@type": "as.dto.experiment.update.ExperimentUpdate"
}
if properties is not None:
params["properties"]= properties
if tagIds is not None:
params["tagIds"] = tagIds
if attachments is not None:
params["attachments"] = attachments
request = {
"method": "updateExperiments",
"params": [
self.token,
[ params ]
]
}
self._post_request(self.as_v3, request)
def create_sample(self, space_ident, code, type,
project_ident=None, experiment_ident=None, properties=None, attachments=None, tags=None):
tagIds = _create_tagIds(tags)
typeId = _create_typeId(type)
projectId = _create_projectId(project_ident)
experimentId = _create_experimentId(experiment_ident)
if properties is None:
properties = {}
request = {
"method": "createSamples",
"params": [
self.token,
[
{
"properties": properties,
"code": code,
"typeId" : typeId,