Newer
Older
request = {
"method": "searchExperiments",
"params": [self.token,
criteria,
fetchopts,
],
}
resp = self._post_request(self.as_v3, request)
if len(resp['objects']) == 0:
raise ValueError("No experiments found!")
objects = resp['objects']
parse_jackson(objects)
experiments = DataFrame(objects)
experiments['registrationDate'] = experiments['registrationDate'].map(format_timestamp)
experiments['modificationDate'] = experiments['modificationDate'].map(format_timestamp)
experiments['project'] = experiments['project'].map(extract_code)
experiments['registrator'] = experiments['registrator'].map(extract_person)
experiments['modifier'] = experiments['modifier'].map(extract_person)
experiments['identifier'] = experiments['identifier'].map(extract_identifier)
experiments['permId'] = experiments['permId'].map(extract_permid)
experiments['type'] = experiments['type'].map(extract_code)
attrs = ['identifier', 'permId', 'project', 'type',
Swen Vermeul
committed
'registrator', 'registrationDate', 'modifier', 'modificationDate']
if props is not None:
for prop in props:
experiments[prop.upper()] = experiments['properties'].map(lambda x: x.get(prop.upper(), ''))
Swen Vermeul
committed
attrs.append(prop.upper())
exps = experiments[attrs]
return Things(self, 'experiment', exps, 'identifier')
def get_datasets(self,
code=None, type=None, withParents=None, withChildren=None, status=None,
sample=None, experiment=None, project=None, tags=None, props=None, **properties
sub_criteria = []
if code:
sub_criteria.append(_criteria_for_code(code))
if type:
sub_criteria.append(_subcriteria_for_type(type, 'DataSet'))
if withParents:
sub_criteria.append(_subcriteria_for_permid(withParents, 'DataSet', 'Parents'))
if withChildren:
sub_criteria.append(_subcriteria_for_permid(withChildren, 'DataSet', 'Children'))
if sample:
sub_criteria.append(_subcriteria_for_code(sample, 'Sample'))
if experiment:
sub_criteria.append(_subcriteria_for_code(experiment, 'Experiment'))
if project:
exp_crit = _subcriteria_for_code(experiment, 'Experiment')
proj_crit = _subcriteria_for_code(project, 'Project')
exp_crit['criteria'] = []
exp_crit['criteria'].append(proj_crit)
sub_criteria.append(exp_crit)
if tags:
sub_criteria.append(_subcriteria_for_tags(tags))
if status:
sub_criteria.append(_subcriteria_for_status(status))
if properties is not None:
for prop in properties:
sub_criteria.append(_subcriteria_for_properties(prop, properties[prop]))
Swen Vermeul
committed
criteria = {
"criteria": sub_criteria,
"@type": "as.dto.dataset.search.DataSetSearchCriteria",
"operator": "AND"
}
fetchopts = {
"containers": {"@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions"},
"type": {"@type": "as.dto.dataset.fetchoptions.DataSetTypeFetchOptions"}
}
for option in ['tags', 'properties', 'sample', 'experiment', 'physicalData']:
fetchopts[option] = fetch_option[option]
request = {
"method": "searchDataSets",
"params": [self.token,
criteria,
fetchopts,
],
}
resp = self._post_request(self.as_v3, request)
objects = resp['objects']
parse_jackson(objects)
datasets = DataFrame(objects)
datasets['registrationDate'] = datasets['registrationDate'].map(format_timestamp)
datasets['modificationDate'] = datasets['modificationDate'].map(format_timestamp)
datasets['experiment'] = datasets['experiment'].map(extract_nested_identifier)
datasets['sample'] = datasets['sample'].map(extract_nested_identifier)
datasets['type'] = datasets['type'].map(extract_code)
datasets['permId'] = datasets['code']
datasets['location'] = datasets['physicalData'].map(lambda x: x.get('location') if x else '')
attrs = ['permId', 'properties', 'type', 'experiment', 'sample', 'registrationDate', 'modificationDate',
'location']
if props is not None:
for prop in props:
datasets[prop.upper()] = datasets['properties'].map(lambda x: x.get(prop.upper(), ''))
attrs.append(prop.upper())
return Things(self, 'dataset', datasets[attrs], 'permId')
def get_experiment(self, expId, withAttachments=False, only_data=False):
""" Returns an experiment object for a given identifier (expId).
"""
fetchopts = {
Swen Vermeul
committed
"@type": "as.dto.experiment.fetchoptions.ExperimentFetchOptions",
"type": {
"@type": "as.dto.experiment.fetchoptions.ExperimentTypeFetchOptions",
},
search_request = search_request_for_identifier(expId, 'experiment')
Swen Vermeul
committed
for option in ['tags', 'properties', 'attachments', 'project', 'samples']:
fetchopts[option] = fetch_option[option]
Swen Vermeul
committed
if withAttachments:
fetchopts['attachments'] = fetch_option['attachmentsWithContent']
"method": "getExperiments",
"params": [
self.token,
[search_request],
fetchopts
resp = self._post_request(self.as_v3, request)
if len(resp) == 0:
raise ValueError("No such experiment: %s" % expId)
for id in resp:
if only_data:
return resp[id]
else:
return Experiment(
openbis_obj = self,
type = self.get_experiment_type(resp[expId]["type"]["code"]),
data = resp[id]
)
def new_experiment(self, type, code, props=None, **kwargs):
""" Creates a new experiment of a given experiment type.
"""
return Experiment(
openbis_obj = self,
type = self.get_experiment_type(type),
data = None,
props = props,
code = code,
**kwargs
)
def update_experiment(self, experimentId, properties=None, tagIds=None, attachments=None):
params = {
"experimentId": {
"permId": experimentId,
"@type": "as.dto.experiment.id.ExperimentPermId"
},
"@type": "as.dto.experiment.update.ExperimentUpdate"
}
if properties is not None:
params["properties"] = properties
if tagIds is not None:
params["tagIds"] = tagIds
if attachments is not None:
params["attachments"] = attachments
request = {
"method": "updateExperiments",
"params": [
self.token,
[params]
]
}
self._post_request(self.as_v3, request)
def create_sample(self, space_ident, code, type,
project_ident=None, experiment_ident=None, properties=None, attachments=None, tags=None):
tagIds = _create_tagIds(tags)
typeId = _create_typeId(type)
projectId = _create_projectId(project_ident)
experimentId = _create_experimentId(experiment_ident)
if properties is None:
properties = {}
request = {
"method": "createSamples",
"params": [
self.token,
[
{
"properties": properties,
"code": code,
"typeId": typeId,
"projectId": projectId,
"experimentId": experimentId,
"tagIds": tagIds,
"attachments": attachments,
"@type": "as.dto.sample.create.SampleCreation",
}
]
],
}
resp = self._post_request(self.as_v3, request)
return self.get_sample(resp[0]['permId'])
Chandrasekhar Ramakrishnan
committed
def create_external_data_management_system(self, code, label, address, address_type='FILE_SYSTEM'):
Chandrasekhar Ramakrishnan
committed
"""Create an external DMS.
:param code: An openBIS code for the external DMS.
:param label: A human-readable label.
:param address: The address for accessing the external DMS. E.g., a URL.
Chandrasekhar Ramakrishnan
committed
:param address_type: One of OPENBIS, URL, or FILE_SYSTEM
Chandrasekhar Ramakrishnan
committed
:return:
"""
request = {
"method": "createExternalDataManagementSystems",
"params": [
self.token,
[
{
"code": code,
"label": label,
"addressType": address_type,
"address": address,
"@type": "as.dto.externaldms.create.ExternalDmsCreation",
}
]
],
}
resp = self._post_request(self.as_v3, request)
return self.get_external_data_management_system(resp[0]['permId'])
def update_sample(self, sampleId, space=None, project=None, experiment=None,
parents=None, children=None, components=None, properties=None, tagIds=None, attachments=None):
params = {
"sampleId": {
"permId": sampleId,
"@type": "as.dto.sample.id.SamplePermId"
},
"@type": "as.dto.sample.update.SampleUpdate"
}
if space is not None:
params['spaceId'] = space
if project is not None:
params['projectId'] = project
if properties is not None:
params["properties"] = properties
if tagIds is not None:
params["tagIds"] = tagIds
if attachments is not None:
params["attachments"] = attachments
request = {
"method": "updateSamples",
"params": [
self.token,
[params]
]
}
self._post_request(self.as_v3, request)
def delete_entity(self, entity, permid, reason, capitalize=True):
"""Deletes Spaces, Projects, Experiments, Samples and DataSets
"""
if capitalize:
entity_capitalized = entity.capitalize()
else:
entity_capitalized = entity
entity_type = "as.dto.{}.id.{}PermId".format(entity.lower(), entity_capitalized)
request = {
"method": "delete" + entity_capitalized + 's',
"params": [
self.token,
[
{
"permId": permid,
"@type": entity_type
}
],
{
"reason": reason,
"@type": "as.dto.{}.delete.{}DeletionOptions".format(entity.lower(), entity_capitalized)
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
}
]
}
self._post_request(self.as_v3, request)
def get_deletions(self):
request = {
"method": "searchDeletions",
"params": [
self.token,
{},
{
"deletedObjects": {
"@type": "as.dto.deletion.fetchoptions.DeletedObjectFetchOptions"
}
}
]
}
resp = self._post_request(self.as_v3, request)
objects = resp['objects']
parse_jackson(objects)
new_objs = []
for value in objects:
del_objs = extract_deletion(value)
if len(del_objs) > 0:
new_objs.append(*del_objs)
return DataFrame(new_objs)
def new_project(self, space, code, description=None, **kwargs):
return Project(self, None, space=space, code=code, description=description, **kwargs)
def _gen_fetchoptions(self, options):
fo = {}
for option in options:
fo[option] = fetch_option[option]
return fo
def get_project(self, projectId):
options = ['space', 'registrator', 'modifier', 'attachments']
if is_identifier(projectId):
request = self._create_get_request(
'getProjects', 'project', projectId, options
)
resp = self._post_request(self.as_v3, request)
return Project(self, resp[projectId])
else:
search_criteria = _gen_search_criteria({
'project': 'Project',
'operator': 'AND',
'code': projectId
})
fo = self._gen_fetchoptions(options)
request = {
"method": "searchProjects",
"params": [self.token, search_criteria, fo]
}
resp = self._post_request(self.as_v3, request)
Swen Vermeul
committed
if len(resp['objects']) == 0:
raise ValueError("No such project: %s" % projectId)
return Project(self, resp['objects'][0])
def get_projects(self, space=None, code=None):
""" Get a list of all available projects (DataFrame object).
"""
sub_criteria = []
if space:
sub_criteria.append(_subcriteria_for_code(space, 'space'))
if code:
sub_criteria.append(_criteria_for_code(code))
criteria = {
"criteria": sub_criteria,
"@type": "as.dto.project.search.ProjectSearchCriteria",
"operator": "AND"
}
fetchopts = {"@type": "as.dto.project.fetchoptions.ProjectFetchOptions"}
for option in ['registrator', 'modifier', 'leader']:
request = {
"method": "searchProjects",
"params": [self.token,
criteria,
fetchopts,
],
}
resp = self._post_request(self.as_v3, request)
if resp is not None:
objects = resp['objects']
parse_jackson(objects)
projects = DataFrame(objects)
if len(projects) is 0:
raise ValueError("No projects found!")
projects['registrationDate'] = projects['registrationDate'].map(format_timestamp)
projects['modificationDate'] = projects['modificationDate'].map(format_timestamp)
projects['leader'] = projects['leader'].map(extract_person)
projects['registrator'] = projects['registrator'].map(extract_person)
projects['modifier'] = projects['modifier'].map(extract_person)
projects['permId'] = projects['permId'].map(extract_permid)
projects['identifier'] = projects['identifier'].map(extract_identifier)
pros = projects[['identifier', 'permId', 'leader', 'registrator', 'registrationDate',
'modifier', 'modificationDate']]
return Things(self, 'project', pros, 'identifier')
else:
raise ValueError("No projects found!")
def _create_get_request(self, method_name, entity, permids, options):
if not isinstance(permids, list):
permids = [permids]
type = "as.dto.{}.id.{}".format(entity.lower(), entity.capitalize())
search_params = []
for permid in permids:
# decide if we got a permId or an identifier
match = re.match('/', permid)
if match:
search_params.append(
{"identifier": permid, "@type": type + 'Identifier'}
else:
{"permId": permid, "@type": type + 'PermId'}
)
fo = {}
for option in options:
fo[option] = fetch_option[option]
request = {
"method": method_name,
"params": [
self.token,
search_params,
fo
],
}
return request
def get_terms(self, vocabulary=None):
""" Returns information about vocabulary, including its controlled vocabulary
search_request = {}
if vocabulary is not None:
search_request = _gen_search_criteria({
"vocabulary": "VocabularyTerm",
"criteria": [{
"vocabulary": "Vocabulary",
"code": vocabulary
}]
})
fetch_options = {
"vocabulary": {"@type": "as.dto.vocabulary.fetchoptions.VocabularyFetchOptions"},
"@type": "as.dto.vocabulary.fetchoptions.VocabularyTermFetchOptions"
}
request = {
"method": "searchVocabularyTerms",
"params": [self.token, search_request, fetch_options]
}
resp = self._post_request(self.as_v3, request)
parse_jackson(resp)
return Vocabulary(resp)
def get_tags(self):
""" Returns a DataFrame of all
"""
request = {
"method": "searchTags",
"params": [self.token, {}, {}]
}
resp = self._post_request(self.as_v3, request)
parse_jackson(resp)
objects = DataFrame(resp['objects'])
objects['registrationDate'] = objects['registrationDate'].map(format_timestamp)
return objects[['code', 'registrationDate']]
Swen Vermeul
committed
def get_sample_types(self, type=None):
""" Returns a list of all available sample types
"""
return self._get_types_of(
"searchSampleTypes",
"Sample",
type,
Swen Vermeul
committed
["generatedCodePrefix"]
)
def get_sample_type(self, type):
try:
return self._get_types_of(
"searchSampleTypes",
Swen Vermeul
committed
"Sample",
type,
["generatedCodePrefix"]
)
except Exception:
raise ValueError("no such sample type: {}".format(type))
def get_experiment_types(self, type=None):
""" Returns a list of all available experiment types
"""
return self._get_types_of(
"searchExperimentTypes",
"Experiment",
Swen Vermeul
committed
type
)
def get_experiment_type(self, type):
Swen Vermeul
committed
return self._get_types_of(
"searchExperimentTypes",
"Experiment",
Swen Vermeul
committed
type
)
except Exception:
raise ValueError("No such experiment type: {}".format(type))
def get_material_types(self, type=None):
""" Returns a list of all available material types
"""
return self._get_types_of("searchMaterialTypes", "Material", type)
def get_material_type(self, type):
try:
return self._get_types_of("searchMaterialTypes", "Material", type)
except Exception:
raise ValueError("No such material type: {}".format(type))
def get_dataset_types(self, type=None):
""" Returns a list (DataFrame object) of all currently available dataset types
"""
Yves Noirjean
committed
return self._get_types_of("searchDataSetTypes", "DataSet", type, optional_attributes=['kind'])
Swen Vermeul
committed
def get_dataset_type(self, type):
try:
Yves Noirjean
committed
return self._get_types_of("searchDataSetTypes", "DataSet", type, optional_attributes=['kind'])
Swen Vermeul
committed
except Exception:
raise ValueError("No such dataSet type: {}".format(type))
Yves Noirjean
committed
def _get_types_of(self, method_name, entity, type_name=None, additional_attributes=[], optional_attributes=[]):
""" Returns a list of all available types of an entity.
If the name of the entity-type is given, it returns a PropertyAssignments object
search_request = {}
fetch_options = {}
if type_name is not None:
search_request = _gen_search_criteria({
"operator": "AND",
})
fetch_options = {
Swen Vermeul
committed
"@type": "as.dto.{}.fetchoptions.{}TypeFetchOptions".format(
Swen Vermeul
committed
)
}
Swen Vermeul
committed
fetch_options['propertyAssignments'] = fetch_option['propertyAssignments']
"params": [self.token, search_request, fetch_options],
}
resp = self._post_request(self.as_v3, request)
parse_jackson(resp)
if type_name is not None and len(resp['objects']) == 1:
return PropertyAssignments(self, resp['objects'][0])
if len(resp['objects']) >= 1:
types = DataFrame(resp['objects'])
Swen Vermeul
committed
types['modificationDate'] = types['modificationDate'].map(format_timestamp)
Yves Noirjean
committed
attributes = self._get_attributes(type_name, types, additional_attributes, optional_attributes)
return Things(self, entity.lower() + '_type', types[attributes])
Yves Noirjean
committed
def _get_attributes(self, type_name, types, additional_attributes, optional_attributes):
attributes = ['code', 'description'] + additional_attributes
attributes += [attribute for attribute in optional_attributes if attribute in types]
attributes += ['modificationDate']
if type_name is not None:
attributes += ['propertyAssignments']
return attributes
Swen Vermeul
committed
def is_session_active(self):
""" checks whether a session is still active. Returns true or false.
"""
Swen Vermeul
committed
return self.is_token_valid(self.token)
def is_token_valid(self, token=None):
Chandrasekhar Ramakrishnan
committed
"""Check if the connection to openBIS is valid.
This method is useful to check if a token is still valid or if it has timed out,
requiring the user to login again.
Chandrasekhar Ramakrishnan
committed
:return: Return True if the token is valid, False if it is not valid.
"""
if token is None:
token = self.token
if token is None:
return False
request = {
"method": "isSessionActive",
"params": [token],
resp = self._post_request(self.as_v1, request)
def get_dataset(self, permid, only_data=False):
"""fetch a dataset and some metadata attached to it:
- properties
- sample
- parents
- children
- containers
- dataStore
- physicalData
- linkedData
:return: a DataSet object
"""
criteria = [{
"permId": permid,
"@type": "as.dto.dataset.id.DataSetPermId"
}]
fetchopts = {
"parents": {"@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions"},
"children": {"@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions"},
"containers": {"@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions"},
"type": {"@type": "as.dto.dataset.fetchoptions.DataSetTypeFetchOptions"},
}
for option in ['tags', 'properties', 'dataStore', 'physicalData', 'linkedData',
'experiment', 'sample']:
fetchopts[option] = fetch_option[option]
request = {
"params": [self.token,
criteria,
fetchopts,
],
resp = self._post_request(self.as_v3, request)
raise ValueError('no such dataset found: ' + permid)
for permid in resp:
if only_data:
return resp[permid]
else:
return DataSet(
self,
type=self.get_dataset_type(resp[permid]["type"]["code"]),
data=resp[permid]
)
Swen Vermeul
committed
def get_sample(self, sample_ident, only_data=False, withAttachments=False):
Chandrasekhar Ramakrishnan
committed
"""Retrieve metadata for the sample.
Get metadata for the sample and any directly connected parents of the sample to allow access
to the same information visible in the ELN UI. The metadata will be on the file system.
:param sample_identifiers: A list of sample identifiers to retrieve.
"""
search_request = search_request_for_identifier(sample_ident, 'sample')
Swen Vermeul
committed
fetchopts = {"type": {"@type": "as.dto.sample.fetchoptions.SampleTypeFetchOptions"}}
Swen Vermeul
committed
for option in ['tags', 'properties', 'attachments', 'space', 'experiment', 'registrator', 'dataSets']:
fetchopts[option] = fetch_option[option]
if withAttachments:
fetchopts['attachments'] = fetch_option['attachmentsWithContent']
for key in ['parents','children','container','components']:
fetchopts[key] = {"@type": "as.dto.sample.fetchoptions.SampleFetchOptions"}
sample_request = {
"method": "getSamples",
"params": [
self.token,
[search_request],
Swen Vermeul
committed
fetchopts
resp = self._post_request(self.as_v3, sample_request)
parse_jackson(resp)
raise ValueError('no such sample found: ' + sample_ident)
for sample_ident in resp:
if only_data:
return resp[sample_ident]
else:
return Sample(self, self.get_sample_type(resp[sample_ident]["type"]["code"]), resp[sample_ident])
def get_external_data_management_system(self, permId, only_data=False):
Chandrasekhar Ramakrishnan
committed
"""Retrieve metadata for the external data management system.
:param permId: A permId for an external DMS.
Chandrasekhar Ramakrishnan
committed
:param only_data: Return the result data as a hash-map, not an object.
"""
request = {
"method": "getExternalDataManagementSystems",
"params": [
self.token,
[{
"@type": "as.dto.externaldms.id.ExternalDmsPermId",
"permId": permId
Chandrasekhar Ramakrishnan
committed
}],
{},
],
}
resp = self._post_request(self.as_v3, request)
parse_jackson(resp)
if resp is None or len(resp) == 0:
raise ValueError('no such external DMS found: ' + permId)
Chandrasekhar Ramakrishnan
committed
else:
for ident in resp:
if only_data:
return resp[ident]
else:
return ExternalDMS(self, resp[ident])
def new_space(self, **kwargs):
return Space(self, None, **kwargs)
Swen Vermeul
committed
def new_analysis(self, name, description=None, sample=None, dss_code=None, result_files=None,
notebook_files=None, parents=None):
Swen Vermeul
committed
""" An analysis contains the Jupyter notebook file(s) and some result files.
Technically this method involves uploading files to the session workspace
and activating the dropbox aka dataset ingestion service "jupyter-uploader-api"
Swen Vermeul
committed
"""
if dss_code is None:
dss_code = self.get_datastores()['code'][0]
# if a sample identifier was given, use it as a string.
# if a sample object was given, take its identifier
sampleId = self.sample_to_sample_id(sample)
Swen Vermeul
committed
parentIds = []
if parents is not None:
if not isinstance(parents, list):
parants = [parents]
for parent in parents:
parentIds.append(parent.permId)
Swen Vermeul
committed
folder = time.strftime('%Y-%m-%d_%H-%M-%S')
Swen Vermeul
committed
data_sets = []
if notebook_files is not None:
notebooks_folder = os.path.join(folder, 'notebook_files')
self.upload_files(
datastore_url=datastore_url,
Swen Vermeul
committed
files=notebook_files,
folder=notebooks_folder,
Swen Vermeul
committed
wait_until_finished=True
)
data_sets.append({
"dataSetType": "JUPYTER_NOTEBOOk",
Swen Vermeul
committed
"sessionWorkspaceFolder": notebooks_folder,
"fileNames": notebook_files,
"properties": {}
Swen Vermeul
committed
})
if result_files is not None:
results_folder = os.path.join(folder, 'result_files')
self.upload_files(
datastore_url=datastore_url,
Swen Vermeul
committed
files=result_files,
folder=results_folder,
wait_until_finished=True
)
data_sets.append({
"dataSetType": "JUPYTER_RESULT",
"sessionWorkspaceFolder": results_folder,
"fileNames": result_files,
"properties": {}
Swen Vermeul
committed
})
"method": "createReportFromAggregationService",
"params": [
self.token,
dss_code,
{
"sampleId": sampleId,
"parentIds": parentIds,
"containers": [{
"dataSetType": "JUPYTER_CONTAINER",
"properties": {
"NAME": name,
"DESCRIPTION": description
}
}],
"dataSets": data_sets,
}
],
resp = self._post_request(self.reg_v1, request)
try:
if resp['rows'][0][0]['value'] == 'OK':
return resp['rows'][0][1]['value']
except:
return resp
Swen Vermeul
committed
def new_git_data_set(self, data_set_type, path, commit_id, repository_id, dms, sample=None, properties={},
dss_code=None, parents=None, data_set_code=None, contents=[]):
""" Create a link data set.
:param data_set_type: The type of the data set
Chandrasekhar Ramakrishnan
committed
:param data_set_type: The type of the data set
:param path: The path to the git repository
:param commit_id: The git commit id
:param repository_id: The git repository id - same for copies
:param dms: An external data managment system object or external_dms_id
:param sample: A sample object or sample id.
Chandrasekhar Ramakrishnan
committed
:param dss_code: Code for the DSS -- defaults to the first dss if none is supplied.
:param properties: Properties for the data set.
:param parents: Parents for the data set.
Chandrasekhar Ramakrishnan
committed
:param data_set_code: A data set code -- used if provided, otherwise generated on the server
:param contents: A list of dicts that describe the contents:
{'file_length': [file length],
'crc32': [crc32 checksum],
'directory': [is path a directory?]
'path': [the relative path string]}
:return: A DataSet object
"""
return pbds.GitDataSetCreation(self, data_set_type, path, commit_id, repository_id, dms, sample,
properties, dss_code, parents, data_set_code, contents).new_git_data_set()
Chandrasekhar Ramakrishnan
committed
@staticmethod
def sample_to_sample_id(sample):
"""Take sample which may be a string or object and return an identifier for it."""
sample_id = None
if isinstance(sample, str):
if (is_identifier(sample)):
"identifier": sample,
"@type": "as.dto.sample.id.SampleIdentifier"
}
else:
"permId": sample,
"@type": "as.dto.sample.id.SamplePermId"
}
else:
"identifier": sample.identifier,
"@type": "as.dto.sample.id.SampleIdentifier"
}
return sample_id
Chandrasekhar Ramakrishnan
committed
@staticmethod
def data_set_to_data_set_id(data_set):
if isinstance(data_set, str):
code = data_set
else:
code = data_set.permId
return {
"permId": code,
"@type": "as.dto.dataset.id.DataSetPermId"
}
def external_data_managment_system_to_dms_id(self, dms):
if isinstance(dms, str):
dms_id = {
"permId": dms,
"@type": "as.dto.externaldms.id.ExternalDmsPermId"
}
else:
dms_id = {
"identifier": dms.code,
"@type": "as.dto.sample.id.SampleIdentifier"
}
return dms_id
Swen Vermeul
committed
def new_sample(self, type, props=None, **kwargs):
""" Creates a new sample of a given sample type.
Swen Vermeul
committed
return Sample(self, self.get_sample_type(type), None, props, **kwargs)
def new_dataset(self, type=None, files=None, props=None, folder=None, **kwargs):
""" Creates a new dataset of a given sample type.
"""
if files is None:
raise ValueError('please provide at least one file')
elif isinstance(files, str):
files = [files]
type_obj = self.get_dataset_type(type.upper())
return DataSet(self, type=type_obj, files=files, folder=folder, props=props, **kwargs)
def _get_dss_url(self, dss_code=None):
""" internal method to get the downloadURL of a datastore.
"""
Swen Vermeul
committed
dss = self.get_datastores()
if dss_code is None:
return dss['downloadUrl'][0]
else:
Swen Vermeul
committed
return dss[dss['code'] == dss_code]['downloadUrl'][0]
Swen Vermeul
committed
def upload_files(self, datastore_url=None, files=None, folder=None, wait_until_finished=False):
Swen Vermeul
committed
if datastore_url is None:
if files is None:
raise ValueError("Please provide a filename.")
Swen Vermeul
committed
if folder is None:
# create a unique foldername
folder = time.strftime('%Y-%m-%d_%H-%M-%S')
if isinstance(files, str):
files = [files]
self.files = files
self.startByte = 0
self.endByte = 0
# define a queue to handle the upload threads
queue = DataSetUploadQueue()
real_files = []
for filename in files:
if os.path.isdir(filename):
real_files.extend(
[os.path.join(dp, f) for dp, dn, fn in os.walk(os.path.expanduser(filename)) for f in fn])
else:
real_files.append(os.path.join(filename))
# compose the upload-URL and put URL and filename in the upload queue
for filename in real_files:
file_in_wsp = os.path.join(folder, filename)
Swen Vermeul
committed
self.files_in_wsp.append(file_in_wsp)
upload_url = (
Swen Vermeul
committed
datastore_url + '/session_workspace_file_upload'
+ '?filename=' + os.path.join(folder, filename)
+ '&id=1'
+ '&startByte=0&endByte=0'
+ '&sessionID=' + self.token
)
queue.put([upload_url, filename, self.verify_certificates])
# wait until all files have uploaded
if wait_until_finished:
queue.join()
# return files with full path in session workspace
Swen Vermeul
committed
return self.files_in_wsp
def __init__(self, workers=20):
# maximum files to be uploaded at once
self.upload_queue = Queue()
# define number of threads and start them
for t in range(workers):
t = Thread(target=self.upload_file)
t.daemon = True
t.start()
def put(self, things):
""" expects a list [url, filename] which is put into the upload queue
"""
self.upload_queue.put(things)