Newer
Older
there might be multiple servers. If you upload a file, you need to specifiy the datastore you want
request = {
"method": "listDataStores",
"params": [self.token],
}
resp = self._post_request(self.as_v1, request)
if resp is not None:
return DataFrame(resp)[['code', 'downloadUrl', 'hostUrl']]
raise ValueError("No datastore found!")
def new_person(self, userId, space=None):
""" creates an openBIS person
"""
try:
person = self.get_person(userId=userId)
except Exception:
return Person(self, userId=userId, space=space)
raise ValueError(
"There already exists a user with userId={}".format(userId)
)
def new_group(self, code, description=None, users=None):
""" creates an openBIS person
"""
return Group(self, code=code, description=description, users=users)
def get_group(self, groupId, only_data=False):
""" Get an openBIS AuthorizationGroup. Returns a Group object.
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
ids = [{
"@type": "as.dto.authorizationgroup.id.AuthorizationGroupPermId",
"permId": groupId
}]
fetchopts = {}
for option in ['roleAssignments', 'users', 'registrator']:
fetchopts[option] = fetch_option[option]
request = {
"method": "getAuthorizationGroups",
"params": [
self.token,
ids,
fetchopts
]
}
resp = self._post_request(self.as_v3, request)
if len(resp) == 0:
raise ValueError("No group found!")
for permid in resp:
group = resp[permid]
parse_jackson(group)
if only_data:
return group
else:
return Group(self, data=group)
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
def get_assigned_roles(self, **search_args):
""" Get the assigned roles for a given group, person or space
"""
search_criteria = get_search_type_for_entity('roleAssignment', 'AND')
allowed_search_attrs = ['role', 'roleLevel', 'user', 'group', 'space']
sub_crit = []
for attr in search_args:
if attr in allowed_search_attrs:
if attr == 'space':
sub_crit.append(
_subcriteria_for_code(search_args[attr], 'space')
)
elif attr == 'user':
userId = ''
if isinstance(search_args[attr], str):
userId = search_args[attr]
else:
userId = search_args[attr].userId
sub_crit.append(
_subcriteria_for_userId(userId)
)
elif attr == 'group':
sub_crit.append(
_subcriteria_for_permid(search_args[attr], 'AuthorizationGroup')
)
elif attr == 'role':
# TODO
raise ValueError("not yet implemented")
elif attr == 'roleLevel':
# TODO
raise ValueError("not yet implemented")
else:
pass
else:
raise ValueError("unknown search argument {}".format(search_arg))
search_criteria['criteria'] = sub_crit
fetchopts = {}
for option in ['roleAssignments', 'space', 'user', 'authorizationGroup','registrator']:
fetchopts[option] = fetch_option[option]
request = {
"method": "searchRoleAssignments",
"params": [
self.token,
search_criteria,
fetchopts
]
}
resp = self._post_request(self.as_v3, request)
if len(resp['objects']) == 0:
raise ValueError("No assigned roles found!")
objects = resp['objects']
parse_jackson(objects)
roles = DataFrame(objects)
roles['id'] = roles['id'].map(extract_id)
roles['user'] = roles['user'].map(extract_userId)
roles['group'] = roles['authorizationGroup'].map(extract_code)
roles['space'] = roles['space'].map(extract_code)
p = Things(
self, entity='role',
df=roles[['id', 'role', 'roleLevel', 'user', 'group', 'space']],
identifier_name='permId'
)
return p
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
def get_groups(self, **search_args):
""" Get openBIS AuthorizationGroups. Returns a «Things» object.
Usage::
groups = e.get.groups()
groups[0] # select first group
groups['GROUP_NAME'] # select group with this code
for group in groups:
... # a Group object
groups.df # get a DataFrame object of the group list
print(groups) # print a nice ASCII table (eg. in IPython)
groups # HTML table (in a Jupyter notebook)
"""
criteria = []
for search_arg in ['code', 'description', 'registrator']:
if search_arg in search_args:
pass
#sub_crit = get_search_type_for_entity(search_arg)
#criteria.append(sub_crit)
search_criteria = get_search_type_for_entity('authorizationGroup')
search_criteria['criteria'] = criteria
fetchopts = fetch_option['authorizationGroup']
for option in ['roleAssignments', 'registrator', 'users']:
fetchopts[option] = fetch_option[option]
request = {
"method": "searchAuthorizationGroups",
"params": [
self.token,
fetchopts
],
}
resp = self._post_request(self.as_v3, request)
if len(resp['objects']) == 0:
raise ValueError("No groups found!")
objects = resp['objects']
parse_jackson(objects)
groups['permId'] = groups['permId'].map(extract_permid)
groups['registrator'] = groups['registrator'].map(extract_person)
groups['users'] = groups['users'].map(extract_userId)
groups['registrationDate'] = groups['registrationDate'].map(format_timestamp)
groups['modificationDate'] = groups['modificationDate'].map(format_timestamp)
self, entity='group',
df=groups[['permId', 'code', 'description', 'users', 'registrator', 'registrationDate', 'modificationDate']],
identifier_name='permId'
)
return p
def get_persons(self, **search_args):
""" Get openBIS users
"""
search_criteria = get_search_criteria('person', **search_args)
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
fetchopts = {}
for option in ['space']:
fetchopts[option] = fetch_option[option]
request = {
"method": "searchPersons",
"params": [
self.token,
search_criteria,
fetchopts
],
}
resp = self._post_request(self.as_v3, request)
if len(resp['objects']) == 0:
raise ValueError("No persons found!")
objects = resp['objects']
parse_jackson(objects)
persons = DataFrame(resp['objects'])
persons['permId'] = persons['permId'].map(extract_permid)
persons['registrationDate'] = persons['registrationDate'].map(format_timestamp)
persons['space'] = persons['space'].map(extract_nested_permid)
p = Things(
self, entity='person',
df=persons[['permId', 'userId', 'firstName', 'lastName', 'email', 'space', 'registrationDate', 'active']],
identifier_name='permId'
)
return p
def get_person(self, userId, only_data=False):
""" Get a person (user)
"""
ids = [{
"@type": "as.dto.person.id.PersonPermId",
"permId": userId
}]
fetchopts = {}
for option in ['space', 'roleAssignments', 'project']:
fetchopts[option] = fetch_option[option]
request = {
"method": "getPersons",
"params": [
self.token,
ids,
fetchopts,
],
}
resp = self._post_request(self.as_v3, request)
if len(resp) == 0:
raise ValueError("No person found!")
#persons['roleAssignments'] = persons['roleAssignments'].map(extract_role_assignments)
for permid in resp:
person = resp[permid]
parse_jackson(person)
if only_data:
return person
else:
return Person(self, data=person)
def get_spaces(self, code=None):
""" Get a list of all available spaces (DataFrame object). To create a sample or a
dataset, you need to specify in which space it should live.
"""
search_criteria = _subcriteria_for_code(code, 'space')
fetchopts = {}
"params": [self.token,
search_criteria,
fetchopts,
}
resp = self._post_request(self.as_v3, request)
if resp is not None:
spaces = DataFrame(resp['objects'])
spaces['registrationDate'] = spaces['registrationDate'].map(format_timestamp)
spaces['modificationDate'] = spaces['modificationDate'].map(format_timestamp)
sp = Things(
self,
'space',
spaces[['code', 'description', 'registrationDate', 'modificationDate']]
)
return sp
def get_space(self, code, only_data=False):
""" Returns a Space object for a given identifier.
code = str(code).upper()
fetchopts = {"@type": "as.dto.space.fetchoptions.SpaceFetchOptions"}
for option in ['registrator']:
fetchopts[option] = fetch_option[option]
"method": "getSpaces",
"params": [
self.token,
[{
"permId": code,
"@type": "as.dto.space.id.SpacePermId"
}],
fetchopts
resp = self._post_request(self.as_v3, request)
raise ValueError("No such space: %s" % code)
for permid in resp:
if only_data:
return resp[permid]
else:
return Space(self, data=resp[permid])
Chandrasekhar Ramakrishnan
committed
def get_samples(self, code=None, permId=None, space=None, project=None, experiment=None, type=None,
Swen Vermeul
committed
withParents=None, withChildren=None, tags=None, props=None, **properties):
""" Get a list of all samples for a given space/project/experiment (or any combination)
"""
sub_criteria = []
if space:
sub_criteria.append(_gen_search_criteria({
"space": "Space",
"operator": "AND",
"code": space
)
exp_crit = _subcriteria_for_code(experiment, 'experiment')
proj_crit = _subcriteria_for_code(project, 'project')
exp_crit['criteria'] = []
exp_crit['criteria'].append(proj_crit)
sub_criteria.append(exp_crit)
sub_criteria.append(_subcriteria_for_code(experiment, 'experiment'))
if properties is not None:
for prop in properties:
sub_criteria.append(_subcriteria_for_properties(prop, properties[prop]))
if type:
sub_criteria.append(_subcriteria_for_code(type, 'sample_type'))
if tags:
sub_criteria.append(_subcriteria_for_tags(tags))
if code:
sub_criteria.append(_criteria_for_code(code))
if permId:
sub_criteria.append(_common_search("as.dto.common.search.PermIdSearchCriteria", permId))
if withParents:
if not isinstance(withParents, list):
withParents = [withParents]
for parent in withParents:
sub_criteria.append(
_gen_search_criteria({
"sample": "SampleParents",
"identifier": parent
})
)
if withChildren:
if not isinstance(withChildren, list):
withChildren = [withChildren]
for child in withChildren:
sub_criteria.append(
_gen_search_criteria({
"sample": "SampleChildren",
"identifier": child
})
)
criteria = {
"criteria": sub_criteria,
"@type": "as.dto.sample.search.SampleSearchCriteria",
"operator": "AND"
}
# build the various fetch options
fetchopts = fetch_option['sample']
for option in ['tags', 'properties', 'registrator', 'modifier', 'experiment']:
fetchopts[option] = fetch_option[option]
request = {
"method": "searchSamples",
"params": [self.token,
criteria,
fetchopts,
],
}
resp = self._post_request(self.as_v3, request)
Swen Vermeul
committed
if len(resp['objects']) == 0:
raise ValueError("no samples found!")
Swen Vermeul
committed
objects = resp['objects']
parse_jackson(objects)
samples = DataFrame(objects)
samples['registrationDate'] = samples['registrationDate'].map(format_timestamp)
samples['modificationDate'] = samples['modificationDate'].map(format_timestamp)
Swen Vermeul
committed
samples['registrator'] = samples['registrator'].map(extract_person)
samples['modifier'] = samples['modifier'].map(extract_person)
samples['identifier'] = samples['identifier'].map(extract_identifier)
samples['permId'] = samples['permId'].map(extract_permid)
Swen Vermeul
committed
samples['experiment'] = samples['experiment'].map(extract_nested_identifier)
samples['sample_type'] = samples['type'].map(extract_nested_permid)
attrs = ['identifier', 'permId', 'experiment', 'sample_type',
'registrator', 'registrationDate', 'modifier', 'modificationDate']
Swen Vermeul
committed
Swen Vermeul
committed
for prop in props:
samples[prop.upper()] = samples['properties'].map(lambda x: x.get(prop.upper(), ''))
Swen Vermeul
committed
attrs.append(prop.upper())
ss = samples[attrs]
return Things(self, 'sample', ss, 'identifier')
Swen Vermeul
committed
get_objects = get_samples # Alias
def get_experiments(self, code=None, type=None, space=None, project=None, tags=None, is_finished=None, props=None, **properties):
""" Searches for all experiment which match the search criteria. Returns a
«Things» object which can be used in many different situations.
Usage::
experiments = get_experiments(project='PROJECT_NAME', props=['NAME','FINISHED_FLAG'])
experiments[0] # returns first experiment
experiments['/MATERIALS/REAGENTS/ANTIBODY_COLLECTION']
for experiment in experiment:
# handle every experiment
...
experiments.df # returns DataFrame object of the experiment list
print(experiments) # prints a nice ASCII table
"""
sub_criteria = []
if space:
sub_criteria.append(_subcriteria_for_code(space, 'space'))
sub_criteria.append(_subcriteria_for_code(project, 'project'))
sub_criteria.append(_criteria_for_code(code))
if type:
sub_criteria.append(_subcriteria_for_type(type, 'Experiment'))
if tags:
sub_criteria.append(_subcriteria_for_tags(tags))
if is_finished is not None:
sub_criteria.append(_subcriteria_for_is_finished(is_finished))
if properties is not None:
for prop in properties:
sub_criteria.append(_subcriteria_for_properties(prop, properties[prop]))
search_criteria = get_search_type_for_entity('experiment')
search_criteria['criteria'] = sub_criteria
search_criteria['operator'] = 'AND'
fetchopts = fetch_option['experiment']
for option in ['tags', 'properties', 'registrator', 'modifier', 'project']:
fetchopts[option] = fetch_option[option]
request = {
"method": "searchExperiments",
"params": [
self.token,
search_criteria,
fetchopts,
],
}
resp = self._post_request(self.as_v3, request)
if len(resp['objects']) == 0:
raise ValueError("No experiments found!")
objects = resp['objects']
parse_jackson(objects)
experiments = DataFrame(objects)
experiments['registrationDate'] = experiments['registrationDate'].map(format_timestamp)
experiments['modificationDate'] = experiments['modificationDate'].map(format_timestamp)
experiments['project'] = experiments['project'].map(extract_code)
experiments['registrator'] = experiments['registrator'].map(extract_person)
experiments['modifier'] = experiments['modifier'].map(extract_person)
experiments['identifier'] = experiments['identifier'].map(extract_identifier)
experiments['permId'] = experiments['permId'].map(extract_permid)
experiments['type'] = experiments['type'].map(extract_code)
attrs = ['identifier', 'permId', 'project', 'type',
Swen Vermeul
committed
'registrator', 'registrationDate', 'modifier', 'modificationDate']
if props is not None:
for prop in props:
experiments[prop.upper()] = experiments['properties'].map(lambda x: x.get(prop.upper(), ''))
Swen Vermeul
committed
attrs.append(prop.upper())
exps = experiments[attrs]
return Things(self, 'experiment', exps, 'identifier')
def get_datasets(self,
code=None, type=None, withParents=None, withChildren=None, status=None,
sample=None, experiment=None, project=None, tags=None, props=None, **properties
sub_criteria = []
if code:
sub_criteria.append(_criteria_for_code(code))
if type:
sub_criteria.append(_subcriteria_for_type(type, 'DataSet'))
if withParents:
sub_criteria.append(_subcriteria_for_permid(withParents, 'DataSet', 'Parents'))
if withChildren:
sub_criteria.append(_subcriteria_for_permid(withChildren, 'DataSet', 'Children'))
if sample:
sub_criteria.append(_subcriteria_for_code(sample, 'Sample'))
if experiment:
sub_criteria.append(_subcriteria_for_code(experiment, 'Experiment'))
if project:
exp_crit = _subcriteria_for_code(experiment, 'Experiment')
proj_crit = _subcriteria_for_code(project, 'Project')
exp_crit['criteria'] = []
exp_crit['criteria'].append(proj_crit)
sub_criteria.append(exp_crit)
if tags:
sub_criteria.append(_subcriteria_for_tags(tags))
if status:
sub_criteria.append(_subcriteria_for_status(status))
if properties is not None:
for prop in properties:
sub_criteria.append(_subcriteria_for_properties(prop, properties[prop]))
Swen Vermeul
committed
search_criteria = get_search_type_for_entity('dataset')
search_criteria['criteria'] = sub_criteria
search_criteria['operator'] = 'AND'
fetchopts = {
"containers": {"@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions"},
"type": {"@type": "as.dto.dataset.fetchoptions.DataSetTypeFetchOptions"}
}
for option in ['tags', 'properties', 'sample', 'experiment', 'physicalData']:
fetchopts[option] = fetch_option[option]
request = {
"method": "searchDataSets",
"params": [self.token,
fetchopts,
],
}
resp = self._post_request(self.as_v3, request)
objects = resp['objects']
parse_jackson(objects)
datasets = DataFrame(objects)
datasets['registrationDate'] = datasets['registrationDate'].map(format_timestamp)
datasets['modificationDate'] = datasets['modificationDate'].map(format_timestamp)
datasets['experiment'] = datasets['experiment'].map(extract_nested_identifier)
datasets['sample'] = datasets['sample'].map(extract_nested_identifier)
datasets['type'] = datasets['type'].map(extract_code)
datasets['permId'] = datasets['code']
datasets['location'] = datasets['physicalData'].map(lambda x: x.get('location') if x else '')
attrs = ['permId', 'properties', 'type', 'experiment', 'sample', 'registrationDate', 'modificationDate',
'location']
if props is not None:
for prop in props:
datasets[prop.upper()] = datasets['properties'].map(lambda x: x.get(prop.upper(), ''))
attrs.append(prop.upper())
return Things(self, 'dataset', datasets[attrs], 'permId')
def get_experiment(self, expId, withAttachments=False, only_data=False):
""" Returns an experiment object for a given identifier (expId).
"""
fetchopts = {
Swen Vermeul
committed
"@type": "as.dto.experiment.fetchoptions.ExperimentFetchOptions",
"type": {
"@type": "as.dto.experiment.fetchoptions.ExperimentTypeFetchOptions",
},
search_request = search_request_for_identifier(expId, 'experiment')
Swen Vermeul
committed
for option in ['tags', 'properties', 'attachments', 'project', 'samples']:
fetchopts[option] = fetch_option[option]
Swen Vermeul
committed
if withAttachments:
fetchopts['attachments'] = fetch_option['attachmentsWithContent']
"method": "getExperiments",
"params": [
self.token,
[search_request],
fetchopts
resp = self._post_request(self.as_v3, request)
if len(resp) == 0:
raise ValueError("No such experiment: %s" % expId)
for id in resp:
if only_data:
return resp[id]
else:
return Experiment(
openbis_obj = self,
type = self.get_experiment_type(resp[expId]["type"]["code"]),
data = resp[id]
)
def new_experiment(self, type, code, project, props=None, **kwargs):
""" Creates a new experiment of a given experiment type.
"""
return Experiment(
openbis_obj = self,
type = self.get_experiment_type(type),
data = None,
props = props,
code = code,
**kwargs
)
def update_experiment(self, experimentId, properties=None, tagIds=None, attachments=None):
params = {
"experimentId": {
"permId": experimentId,
"@type": "as.dto.experiment.id.ExperimentPermId"
},
"@type": "as.dto.experiment.update.ExperimentUpdate"
}
if properties is not None:
params["properties"] = properties
if tagIds is not None:
params["tagIds"] = tagIds
if attachments is not None:
params["attachments"] = attachments
request = {
"method": "updateExperiments",
"params": [
self.token,
[params]
]
}
self._post_request(self.as_v3, request)
def create_sample(self, space_ident, code, type,
project_ident=None, experiment_ident=None, properties=None, attachments=None, tags=None):
tagIds = _create_tagIds(tags)
typeId = _create_typeId(type)
projectId = _create_projectId(project_ident)
experimentId = _create_experimentId(experiment_ident)
if properties is None:
properties = {}
request = {
"method": "createSamples",
"params": [
self.token,
[
{
"properties": properties,
"code": code,
"typeId": typeId,
"projectId": projectId,
"experimentId": experimentId,
"tagIds": tagIds,
"attachments": attachments,
"@type": "as.dto.sample.create.SampleCreation",
}
]
],
}
resp = self._post_request(self.as_v3, request)
return self.get_sample(resp[0]['permId'])
create_object = create_sample # Alias
Chandrasekhar Ramakrishnan
committed
def create_external_data_management_system(self, code, label, address, address_type='FILE_SYSTEM'):
Chandrasekhar Ramakrishnan
committed
"""Create an external DMS.
:param code: An openBIS code for the external DMS.
:param label: A human-readable label.
:param address: The address for accessing the external DMS. E.g., a URL.
Chandrasekhar Ramakrishnan
committed
:param address_type: One of OPENBIS, URL, or FILE_SYSTEM
Chandrasekhar Ramakrishnan
committed
:return:
"""
request = {
"method": "createExternalDataManagementSystems",
"params": [
self.token,
[
{
"code": code,
"label": label,
"addressType": address_type,
"address": address,
"@type": "as.dto.externaldms.create.ExternalDmsCreation",
}
]
],
}
resp = self._post_request(self.as_v3, request)
return self.get_external_data_management_system(resp[0]['permId'])
def update_sample(self, sampleId, space=None, project=None, experiment=None,
parents=None, children=None, components=None, properties=None, tagIds=None, attachments=None):
params = {
"sampleId": {
"permId": sampleId,
"@type": "as.dto.sample.id.SamplePermId"
},
"@type": "as.dto.sample.update.SampleUpdate"
}
if space is not None:
params['spaceId'] = space
if project is not None:
params['projectId'] = project
if properties is not None:
params["properties"] = properties
if tagIds is not None:
params["tagIds"] = tagIds
if attachments is not None:
params["attachments"] = attachments
request = {
"method": "updateSamples",
"params": [
self.token,
[params]
]
}
self._post_request(self.as_v3, request)
update_object = update_sample # Alias
def delete_entity(self, entity, permid, reason, capitalize=True):
"""Deletes Spaces, Projects, Experiments, Samples and DataSets
"""
if capitalize:
entity_capitalized = entity.capitalize()
else:
entity_capitalized = entity
entity_type = "as.dto.{}.id.{}PermId".format(entity.lower(), entity_capitalized)
request = {
"method": "delete" + entity_capitalized + 's',
"params": [
self.token,
[
{
"permId": permid,
"@type": entity_type
}
],
{
"reason": reason,
"@type": "as.dto.{}.delete.{}DeletionOptions".format(entity.lower(), entity_capitalized)
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
}
]
}
self._post_request(self.as_v3, request)
def get_deletions(self):
request = {
"method": "searchDeletions",
"params": [
self.token,
{},
{
"deletedObjects": {
"@type": "as.dto.deletion.fetchoptions.DeletedObjectFetchOptions"
}
}
]
}
resp = self._post_request(self.as_v3, request)
objects = resp['objects']
parse_jackson(objects)
new_objs = []
for value in objects:
del_objs = extract_deletion(value)
if len(del_objs) > 0:
new_objs.append(*del_objs)
return DataFrame(new_objs)
def new_project(self, space, code, description=None, **kwargs):
return Project(self, None, space=space, code=code, description=description, **kwargs)
def _gen_fetchoptions(self, options):
fo = {}
for option in options:
fo[option] = fetch_option[option]
return fo
def get_project(self, projectId, only_data=False):
options = ['space', 'registrator', 'modifier', 'attachments']
if is_identifier(projectId) or is_permid(projectId):
request = self._create_get_request(
'getProjects', 'project', projectId, options
)
resp = self._post_request(self.as_v3, request)
if only_data:
return resp[projectId]
return Project(self, resp[projectId])
else:
search_criteria = _gen_search_criteria({
'project': 'Project',
'operator': 'AND',
'code': projectId
})
fo = self._gen_fetchoptions(options)
request = {
"method": "searchProjects",
"params": [self.token, search_criteria, fo]
}
resp = self._post_request(self.as_v3, request)
Swen Vermeul
committed
if len(resp['objects']) == 0:
raise ValueError("No such project: %s" % projectId)
if only_data:
return resp['objects'][0]
return Project(self, resp['objects'][0])
def get_projects(self, space=None, code=None):
""" Get a list of all available projects (DataFrame object).
"""
sub_criteria = []
if space:
sub_criteria.append(_subcriteria_for_code(space, 'space'))
if code:
sub_criteria.append(_criteria_for_code(code))
criteria = {
"criteria": sub_criteria,
"@type": "as.dto.project.search.ProjectSearchCriteria",
"operator": "AND"
}
fetchopts = {"@type": "as.dto.project.fetchoptions.ProjectFetchOptions"}
for option in ['registrator', 'modifier', 'leader']:
request = {
"method": "searchProjects",
"params": [self.token,
criteria,
fetchopts,
],
}
resp = self._post_request(self.as_v3, request)
objects = resp['objects']
if len(objects) == 0:
raise ValueError("No projects found!")
parse_jackson(objects)
projects = DataFrame(objects)
if len(projects) is 0:
raise ValueError("No projects found!")
projects['registrationDate'] = projects['registrationDate'].map(format_timestamp)
projects['modificationDate'] = projects['modificationDate'].map(format_timestamp)
projects['leader'] = projects['leader'].map(extract_person)
projects['registrator'] = projects['registrator'].map(extract_person)
projects['modifier'] = projects['modifier'].map(extract_person)
projects['permId'] = projects['permId'].map(extract_permid)
projects['identifier'] = projects['identifier'].map(extract_identifier)
pros = projects[['identifier', 'permId', 'leader', 'registrator', 'registrationDate',
'modifier', 'modificationDate']]
return Things(self, 'project', pros, 'identifier')
def _create_get_request(self, method_name, entity, permids, options):
if not isinstance(permids, list):
permids = [permids]
type = "as.dto.{}.id.{}".format(entity.lower(), entity.capitalize())
search_params = []
for permid in permids:
# decide if we got a permId or an identifier
match = re.match('/', permid)
if match:
search_params.append(
{"identifier": permid, "@type": type + 'Identifier'}
else:
{"permId": permid, "@type": type + 'PermId'}
)
fo = {}
for option in options:
fo[option] = fetch_option[option]
request = {
"method": method_name,
"params": [
self.token,
search_params,
fo
],
}
return request
def get_terms(self, vocabulary=None):
""" Returns information about vocabulary, including its controlled vocabulary
search_request = {}
if vocabulary is not None:
search_request = _gen_search_criteria({
"vocabulary": "VocabularyTerm",
"criteria": [{
"vocabulary": "Vocabulary",
"code": vocabulary
}]
})
fetch_options = {
"vocabulary": {"@type": "as.dto.vocabulary.fetchoptions.VocabularyFetchOptions"},
"@type": "as.dto.vocabulary.fetchoptions.VocabularyTermFetchOptions"
}
request = {
"method": "searchVocabularyTerms",
"params": [self.token, search_request, fetch_options]
}
resp = self._post_request(self.as_v3, request)
parse_jackson(resp)
return Vocabulary(resp)
def get_tags(self):
""" Returns a DataFrame of all
"""
request = {
"method": "searchTags",
"params": [self.token, {}, {}]
}
resp = self._post_request(self.as_v3, request)
parse_jackson(resp)
objects = DataFrame(resp['objects'])
objects['registrationDate'] = objects['registrationDate'].map(format_timestamp)
return objects[['code', 'registrationDate']]
def _search_semantic_annotations(self, criteria):
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
1995
1996
1997
1998
1999
2000
fetch_options = {
"@type": "as.dto.semanticannotation.fetchoptions.SemanticAnnotationFetchOptions",
"entityType": {"@type": "as.dto.entitytype.fetchoptions.EntityTypeFetchOptions"},
"propertyType": {"@type": "as.dto.property.fetchoptions.PropertyTypeFetchOptions"},
"propertyAssignment": {
"@type": "as.dto.property.fetchoptions.PropertyAssignmentFetchOptions",
"entityType" : {
"@type" : "as.dto.entitytype.fetchoptions.EntityTypeFetchOptions"
},
"propertyType" : {
"@type" : "as.dto.property.fetchoptions.PropertyTypeFetchOptions"
}
}
}
request = {
"method": "searchSemanticAnnotations",
"params": [self.token, criteria, fetch_options]
}
resp = self._post_request(self.as_v3, request)
if resp is not None:
objects = resp['objects']
if len(objects) is 0:
raise ValueError("No semantic annotations found!")