Newer
Older
def _search_semantic_annotations(self, criteria):
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
2042
2043
2044
2045
2046
2047
2048
2049
2050
2051
2052
2053
2054
2055
2056
2057
2058
2059
2060
2061
2062
2063
2064
2065
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
fetch_options = {
"@type": "as.dto.semanticannotation.fetchoptions.SemanticAnnotationFetchOptions",
"entityType": {"@type": "as.dto.entitytype.fetchoptions.EntityTypeFetchOptions"},
"propertyType": {"@type": "as.dto.property.fetchoptions.PropertyTypeFetchOptions"},
"propertyAssignment": {
"@type": "as.dto.property.fetchoptions.PropertyAssignmentFetchOptions",
"entityType" : {
"@type" : "as.dto.entitytype.fetchoptions.EntityTypeFetchOptions"
},
"propertyType" : {
"@type" : "as.dto.property.fetchoptions.PropertyTypeFetchOptions"
}
}
}
request = {
"method": "searchSemanticAnnotations",
"params": [self.token, criteria, fetch_options]
}
resp = self._post_request(self.as_v3, request)
if resp is not None:
objects = resp['objects']
if len(objects) is 0:
raise ValueError("No semantic annotations found!")
parse_jackson(objects)
for object in objects:
object['permId'] = object['permId']['permId']
if object.get('entityType') is not None:
object['entityType'] = object['entityType']['code']
elif object.get('propertyType') is not None:
object['propertyType'] = object['propertyType']['code']
elif object.get('propertyAssignment') is not None:
object['entityType'] = object['propertyAssignment']['entityType']['code']
object['propertyType'] = object['propertyAssignment']['propertyType']['code']
object['creationDate'] = format_timestamp(object['creationDate'])
return objects
else:
raise ValueError("No semantic annotations found!")
def get_semantic_annotations(self):
""" Get a list of all available semantic annotations (DataFrame object).
"""
objects = self._search_semantic_annotations({})
attrs = ['permId', 'entityType', 'propertyType', 'predicateOntologyId', 'predicateOntologyVersion', 'predicateAccessionId', 'descriptorOntologyId', 'descriptorOntologyVersion', 'descriptorAccessionId', 'creationDate']
annotations = DataFrame(objects)
return Things(self, 'semantic_annotation', annotations[attrs], 'permId')
def get_semantic_annotation(self, permId, only_data = False):
criteria = {
"@type" : "as.dto.semanticannotation.search.SemanticAnnotationSearchCriteria",
"criteria" : [{
"@type" : "as.dto.common.search.PermIdSearchCriteria",
"fieldValue" : {
"@type" : "as.dto.common.search.StringEqualToValue",
"value" : permId
}
}]
}
objects = self._search_semantic_annotations(criteria)
object = objects[0]
if only_data:
return object
else:
return SemanticAnnotation(self, isNew=False, **object)
2078
2079
2080
2081
2082
2083
2084
2085
2086
2087
2088
2089
2090
2091
2092
2093
2094
2095
2096
2097
2098
2099
2100
2101
2102
2103
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
2159
2160
2161
2162
2163
2164
2165
2166
2167
2168
2169
2170
def get_plugins(self):
criteria = []
search_criteria = get_search_type_for_entity('plugin', 'AND')
search_criteria['criteria'] = criteria
fetchopts = fetch_option['plugin']
for option in ['registrator']:
fetchopts[option] = fetch_option[option]
request = {
"method": "searchPlugins",
"params": [
self.token,
search_criteria,
fetchopts,
],
}
resp = self._post_request(self.as_v3, request)
attrs = ['name', 'description', 'pluginType', 'pluginKind',
'entityKinds', 'registrator', 'registrationDate', 'permId']
if len(resp['objects']) == 0:
plugins = DataFrame(columns=attrs)
else:
objects = resp['objects']
parse_jackson(objects)
plugins = DataFrame(objects)
plugins['permId'] = plugins['permId'].map(extract_permid)
plugins['registrator'] = plugins['registrator'].map(extract_person)
plugins['registrationDate'] = plugins['registrationDate'].map(format_timestamp)
plugins['description'] = plugins['description'].map(lambda x: '' if x is None else x)
plugins['entityKinds'] = plugins['entityKinds'].map(lambda x: '' if x is None else x)
return Things(self, 'plugin', plugins[attrs], 'name')
def get_plugin(self, permId, only_data=False, with_script=True):
search_request = search_request_for_identifier(permId, 'plugin')
fetchopts = fetch_option['plugin']
options = ['registrator']
if with_script:
options.append('script')
for option in options:
fetchopts[option] = fetch_option[option]
request = {
"method": "getPlugins",
"params": [
self.token,
[search_request],
fetchopts
],
}
resp = self._post_request(self.as_v3, request)
parse_jackson(resp)
if resp is None or len(resp) == 0:
raise ValueError('no such plugin found: ' + permId)
else:
for permId in resp:
if only_data:
return resp[permId]
else:
return Plugin(self, data=resp[permId])
def new_plugin(self, name, pluginType= "MANAGED_PROPERTY", pluginKind = "JYTHON", **kwargs):
""" Creates a new Plugin in openBIS. The attribute pluginKind must be one of
the following:
DYNAMIC_PROPERTY, MANAGED_PROPERTY, ENTITY_VALIDATION;
Usage::
o.new_plugin(
name = 'name of plugin',
description = '...',
pluginType = "ENTITY_VALIDATION",
script = "def a():\n pass",
available = True,
entityKind = None
)
"""
if pluginType not in [
'DYNAMIC_PROPERTY', 'MANAGED_PROPERTY', 'ENTITY_VALIDATION'
]:
raise ValueError(
"pluginType must be one of the following: DYNAMIC_PROPERTY, MANAGED_PROPERTY, ENTITY_VALIDATION")
return Plugin(self, pluginType=pluginType, pluginKind=pluginKind, **kwargs)
Swen Vermeul
committed
def get_sample_types(self, type=None):
""" Returns a list of all available sample types
"""
return self._get_types_of(
"searchSampleTypes",
"Sample",
type,
Swen Vermeul
committed
["generatedCodePrefix"]
)
get_object_types = get_sample_types # Alias
Swen Vermeul
committed
def get_sample_type(self, type):
try:
return self._get_types_of(
"searchSampleTypes",
Swen Vermeul
committed
"Sample",
type,
Swen Vermeul
committed
)
except Exception:
raise ValueError("no such sample type: {}".format(type))
get_object_type = get_sample_type # Alias
Swen Vermeul
committed
def get_experiment_types(self, type=None):
""" Returns a list of all available experiment types
"""
return self._get_types_of(
"searchExperimentTypes",
"Experiment",
Swen Vermeul
committed
type
)
get_collection_types = get_experiment_types # Alias
Swen Vermeul
committed
def get_experiment_type(self, type):
Swen Vermeul
committed
return self._get_types_of(
"searchExperimentTypes",
"Experiment",
Swen Vermeul
committed
type
)
except Exception:
raise ValueError("No such experiment type: {}".format(type))
get_collection_type = get_experiment_type # Alias
Swen Vermeul
committed
def get_material_types(self, type=None):
""" Returns a list of all available material types
"""
return self._get_types_of("searchMaterialTypes", "Material", type)
def get_material_type(self, type):
try:
return self._get_types_of("searchMaterialTypes", "Material", type)
except Exception:
raise ValueError("No such material type: {}".format(type))
def get_dataset_types(self, type=None):
""" Returns a list (DataFrame object) of all currently available dataset types
"""
Yves Noirjean
committed
return self._get_types_of("searchDataSetTypes", "DataSet", type, optional_attributes=['kind'])
Swen Vermeul
committed
def get_dataset_type(self, type):
try:
Yves Noirjean
committed
return self._get_types_of("searchDataSetTypes", "DataSet", type, optional_attributes=['kind'])
Swen Vermeul
committed
except Exception:
raise ValueError("No such dataSet type: {}".format(type))
Yves Noirjean
committed
def _get_types_of(self, method_name, entity, type_name=None, additional_attributes=[], optional_attributes=[]):
""" Returns a list of all available types of an entity.
If the name of the entity-type is given, it returns a PropertyAssignments object
search_request = {}
fetch_options = {
"@type": "as.dto.{}.fetchoptions.{}TypeFetchOptions".format(
entity.lower(), entity
)
}
if type_name is not None:
search_request = _gen_search_criteria({
"operator": "AND",
})
Swen Vermeul
committed
fetch_options['propertyAssignments'] = fetch_option['propertyAssignments']
fetch_options['validationPlugin'] = fetch_option['plugin']
"params": [self.token, search_request, fetch_options],
}
resp = self._post_request(self.as_v3, request)
parse_jackson(resp)
if type_name is not None and len(resp['objects']) == 1:
return PropertyAssignments(self, resp['objects'][0])
if len(resp['objects']) >= 1:
types = DataFrame(resp['objects'])
Swen Vermeul
committed
types['modificationDate'] = types['modificationDate'].map(format_timestamp)
Yves Noirjean
committed
attributes = self._get_attributes(type_name, types, additional_attributes, optional_attributes)
return Things(self, entity.lower() + '_type', types[attributes])
Yves Noirjean
committed
def _get_attributes(self, type_name, types, additional_attributes, optional_attributes):
attributes = ['code', 'description'] + additional_attributes
attributes += [attribute for attribute in optional_attributes if attribute in types]
attributes += ['modificationDate']
if type_name is not None:
attributes += ['propertyAssignments']
return attributes
Swen Vermeul
committed
def is_session_active(self):
""" checks whether a session is still active. Returns true or false.
"""
Swen Vermeul
committed
return self.is_token_valid(self.token)
def is_token_valid(self, token=None):
Chandrasekhar Ramakrishnan
committed
"""Check if the connection to openBIS is valid.
This method is useful to check if a token is still valid or if it has timed out,
requiring the user to login again.
Chandrasekhar Ramakrishnan
committed
:return: Return True if the token is valid, False if it is not valid.
"""
if token is None:
token = self.token
if token is None:
return False
request = {
"method": "isSessionActive",
"params": [token],
Swen Vermeul
committed
try:
resp = self._post_request(self.as_v1, request)
except Exception as e:
return False
Swen Vermeul
committed
def get_dataset(self, permid, only_data=False):
"""fetch a dataset and some metadata attached to it:
- properties
- sample
- parents
- children
- containers
- dataStore
- physicalData
- linkedData
:return: a DataSet object
"""
criteria = [{
"permId": permid,
"@type": "as.dto.dataset.id.DataSetPermId"
}]
fetchopts = {
"parents": {"@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions"},
"children": {"@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions"},
"containers": {"@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions"},
"type": {"@type": "as.dto.dataset.fetchoptions.DataSetTypeFetchOptions"},
}
for option in ['tags', 'properties', 'dataStore', 'physicalData', 'linkedData',
'experiment', 'sample']:
fetchopts[option] = fetch_option[option]
request = {
"params": [
self.token,
criteria,
fetchopts,
],
resp = self._post_request(self.as_v3, request)
raise ValueError('no such dataset found: ' + permid)
parse_jackson(resp)
for permid in resp:
if only_data:
return resp[permid]
else:
return DataSet(
self,
type=self.get_dataset_type(resp[permid]["type"]["code"]),
data=resp[permid]
)
Swen Vermeul
committed
def get_sample(self, sample_ident, only_data=False, withAttachments=False):
Chandrasekhar Ramakrishnan
committed
"""Retrieve metadata for the sample.
Get metadata for the sample and any directly connected parents of the sample to allow access
to the same information visible in the ELN UI. The metadata will be on the file system.
:param sample_identifiers: A list of sample identifiers to retrieve.
"""
search_request = search_request_for_identifier(sample_ident, 'sample')
Swen Vermeul
committed
fetchopts = {"type": {"@type": "as.dto.sample.fetchoptions.SampleTypeFetchOptions"}}
Swen Vermeul
committed
for option in ['tags', 'properties', 'attachments', 'space', 'experiment', 'registrator', 'dataSets']:
fetchopts[option] = fetch_option[option]
if withAttachments:
fetchopts['attachments'] = fetch_option['attachmentsWithContent']
for key in ['parents','children','container','components']:
fetchopts[key] = {"@type": "as.dto.sample.fetchoptions.SampleFetchOptions"}
"method": "getSamples",
"params": [
self.token,
[search_request],
Swen Vermeul
committed
fetchopts
resp = self._post_request(self.as_v3, request)
parse_jackson(resp)
raise ValueError('no such sample found: ' + sample_ident)
for sample_ident in resp:
if only_data:
return resp[sample_ident]
else:
return Sample(self, self.get_sample_type(resp[sample_ident]["type"]["code"]), resp[sample_ident])
get_object = get_sample # Alias
def get_external_data_management_system(self, permId, only_data=False):
Chandrasekhar Ramakrishnan
committed
"""Retrieve metadata for the external data management system.
:param permId: A permId for an external DMS.
Chandrasekhar Ramakrishnan
committed
:param only_data: Return the result data as a hash-map, not an object.
"""
request = {
"method": "getExternalDataManagementSystems",
"params": [
self.token,
[{
"@type": "as.dto.externaldms.id.ExternalDmsPermId",
"permId": permId
Chandrasekhar Ramakrishnan
committed
}],
{},
],
}
resp = self._post_request(self.as_v3, request)
parse_jackson(resp)
if resp is None or len(resp) == 0:
raise ValueError('no such external DMS found: ' + permId)
Chandrasekhar Ramakrishnan
committed
else:
for ident in resp:
if only_data:
return resp[ident]
else:
return ExternalDMS(self, resp[ident])
def new_space(self, **kwargs):
return Space(self, None, **kwargs)
def new_git_data_set(self, data_set_type, path, commit_id, repository_id, dms, sample=None, experiment=None, properties={},
dss_code=None, parents=None, data_set_code=None, contents=[]):
""" Create a link data set.
:param data_set_type: The type of the data set
Chandrasekhar Ramakrishnan
committed
:param data_set_type: The type of the data set
:param path: The path to the git repository
:param commit_id: The git commit id
:param repository_id: The git repository id - same for copies
:param dms: An external data managment system object or external_dms_id
:param sample: A sample object or sample id.
Chandrasekhar Ramakrishnan
committed
:param dss_code: Code for the DSS -- defaults to the first dss if none is supplied.
:param properties: Properties for the data set.
:param parents: Parents for the data set.
Chandrasekhar Ramakrishnan
committed
:param data_set_code: A data set code -- used if provided, otherwise generated on the server
:param contents: A list of dicts that describe the contents:
{'file_length': [file length],
'crc32': [crc32 checksum],
'directory': [is path a directory?]
'path': [the relative path string]}
:return: A DataSet object
"""
return pbds.GitDataSetCreation(self, data_set_type, path, commit_id, repository_id, dms, sample, experiment,
properties, dss_code, parents, data_set_code, contents).new_git_data_set()
def new_content_copy(self, path, commit_id, repository_id, edms_id, data_set_id):
"""
Create a content copy in an existing link data set.
:param path: path of the new content copy
"param commit_id: commit id of the new content copy
"param repository_id: repository id of the content copy
"param edms_id: Id of the external data managment system of the content copy
"param data_set_id: Id of the data set to which the new content copy belongs
"""
return pbds.GitDataSetUpdate(self, path, commit_id, repository_id, edms_id, data_set_id).new_content_copy()
Chandrasekhar Ramakrishnan
committed
@staticmethod
def sample_to_sample_id(sample):
"""Take sample which may be a string or object and return an identifier for it."""
return Openbis._object_to_object_id(sample, "as.dto.sample.id.SampleIdentifier", "as.dto.sample.id.SamplePermId");
@staticmethod
def experiment_to_experiment_id(experiment):
"""Take experiment which may be a string or object and return an identifier for it."""
return Openbis._object_to_object_id(experiment, "as.dto.experiment.id.ExperimentIdentifier", "as.dto.experiment.id.SamplePermId");
@staticmethod
def _object_to_object_id(obj, identifierType, permIdType):
object_id = None
if isinstance(obj, str):
if (is_identifier(obj)):
object_id = {
"identifier": obj,
"@type": identifierType
}
else:
object_id = {
"permId": obj,
"@type": permIdType
}
else:
object_id = {
"identifier": obj.identifier,
"@type": identifierType
return object_id
Chandrasekhar Ramakrishnan
committed
@staticmethod
def data_set_to_data_set_id(data_set):
if isinstance(data_set, str):
code = data_set
else:
code = data_set.permId
return {
"permId": code,
"@type": "as.dto.dataset.id.DataSetPermId"
}
def external_data_managment_system_to_dms_id(self, dms):
if isinstance(dms, str):
dms_id = {
"permId": dms,
"@type": "as.dto.externaldms.id.ExternalDmsPermId"
}
else:
dms_id = {
"identifier": dms.code,
"@type": "as.dto.sample.id.SampleIdentifier"
}
return dms_id
Swen Vermeul
committed
def new_sample(self, type, props=None, **kwargs):
""" Creates a new sample of a given sample type.
Swen Vermeul
committed
return Sample(self, self.get_sample_type(type), None, props, **kwargs)
new_object = new_sample # Alias
def new_dataset(self, type=None, files=None, props=None, folder=None, **kwargs):
""" Creates a new dataset of a given sample type.
"""
if files is None:
raise ValueError('please provide at least one file')
elif isinstance(files, str):
files = [files]
type_obj = self.get_dataset_type(type.upper())
return DataSet(self, type=type_obj, files=files, folder=folder, props=props, **kwargs)
def new_semantic_annotation(self, entityType=None, propertyType=None, **kwargs):
return SemanticAnnotation(
openbis_obj=self, isNew=True,
entityType=entityType, propertyType=propertyType, **kwargs
)
def new_vocabulary(self, code, terms, managedInternally=False, internalNameSpace=False, chosenFromList=True, **kwargs):
""" Creates a new vocabulary
Usage::
new_vocabulary(
code = 'vocabulary_code',
description = '',
terms = [
{ "code": "term1", "label": "label1", "description": "description1" },
{ "code": "term2", "label": "label2", "description": "description2" },
]
)
"""
kwargs['code'] = code
kwargs['managedInternally'] = managedInternally
kwargs['internalNameSpace'] = internalNameSpace
kwargs['chosenFromList'] = chosenFromList
return Vocabulary(self, data=None, terms=terms, **kwargs)
def _get_dss_url(self, dss_code=None):
""" internal method to get the downloadURL of a datastore.
"""
Swen Vermeul
committed
dss = self.get_datastores()
if dss_code is None:
return dss['downloadUrl'][0]
else:
Swen Vermeul
committed
return dss[dss['code'] == dss_code]['downloadUrl'][0]
class LinkedData():
def __init__(self, data=None):
self.data = data if data is not None else []
self.attrs = ['externalCode', 'contentCopies']
def __dir__(self):
return self.attrs
def __getattr__(self, name):
if name in self.attrs:
if name in self.data:
return self.data[name]
else:
return ''
Swen Vermeul
committed
class PhysicalData():
def __init__(self, data=None):
if data is None:
data = []
self.data = data
self.attrs = ['speedHint', 'complete', 'shareId', 'size',
'fileFormatType', 'storageFormat', 'location', 'presentInArchive',
'storageConfirmation', 'locatorType', 'status']
Swen Vermeul
committed
2620
2621
2622
2623
2624
2625
2626
2627
2628
2629
2630
2631
2632
2633
2634
2635
2636
2637
2638
2639
2640
2641
2642
2643
2644
def __dir__(self):
return self.attrs
def __getattr__(self, name):
if name in self.attrs:
if name in self.data:
return self.data[name]
else:
return ''
def _repr_html_(self):
html = """
<table border="1" class="dataframe">
<thead>
<tr style="text-align: right;">
<th>attribute</th>
<th>value</th>
</tr>
</thead>
<tbody>
"""
for attr in self.attrs:
html += "<tr> <td>{}</td> <td>{}</td> </tr>".format(
attr, getattr(self, attr, '')
Swen Vermeul
committed
)
html += """
</tbody>
</table>
"""
return html
def __repr__(self):
headers = ['attribute', 'value']
lines = []
for attr in self.attrs:
lines.append([
attr,
getattr(self, attr, '')
])
return tabulate(lines, headers=headers)
Swen Vermeul
committed
class ExternalDMS():
""" managing openBIS external data management systems
Swen Vermeul
committed
def __init__(self, openbis_obj, data=None, **kwargs):
self.__dict__['openbis'] = openbis_obj
Swen Vermeul
committed
Swen Vermeul
committed
self.__dict__['data'] = data
Swen Vermeul
committed
if kwargs is not None:
for key in kwargs:
setattr(self, key, kwargs[key])
Swen Vermeul
committed
def __getattr__(self, name):
return self.__dict__['data'].get(name)
Swen Vermeul
committed
"""all the available methods and attributes that should be displayed
when using the autocompletion feature (TAB) in Jupyter
"""
return ['code', 'label', 'urlTemplate', 'address', 'addressType', 'openbis']
Swen Vermeul
committed
def __str__(self):
return self.data.get('code', None)