Newer
Older
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
pybis.py
A class with methods for interacting with openBIS.
Created by Chandrasekhar Ramakrishnan on 2016-05-10.
Copyright (c) 2016 ETH Zuerich. All rights reserved.
"""
import os
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
Swen Vermeul
committed
import time
from datetime import datetime
import pandas as pd
from pandas import DataFrame, Series
Swen Vermeul
committed
import threading
from threading import Thread
from queue import Queue
DROPBOX_PLUGIN = "jupyter-uploader-api"
def format_timestamp(ts):
return datetime.fromtimestamp(round(ts/1000)).strftime('%Y-%m-%d %H:%M:%S')
class Openbis:
"""Interface for communicating with openBIS. A current version of openBIS is needed (at
least version 16.05).
"""
Swen Vermeul
committed
def __init__(self, url='https://localhost:8443', verify_certificates=True, token=None):
"""Initialize an interface to openBIS with information necessary to connect to the server.
url_obj = urlparse(url)
if url_obj.netloc is None:
raise ValueError("please provide the url in this format: https://openbis.host.ch:8443")
self.url_obj = url_obj
self.url = url_obj.geturl()
self.port = url_obj.port
self.hostname = url_obj.hostname
self.as_v3 = '/openbis/openbis/rmi-application-server-v3.json'
self.as_v1 = '/openbis/openbis/rmi-general-information-v1.json'
self.reg_v1 = '/openbis/openbis/rmi-query-v1.json'
Chandrasekhar Ramakrishnan
committed
self.verify_certificates = verify_certificates
Swen Vermeul
committed
self.token = token
self.spaces = None
self.dataset_types = None
self.sample_types = None
Swen Vermeul
committed
self.files_in_wsp = []
Swen Vermeul
committed
self.token_path = None
# use an existing token, if available
if self.token is None:
Swen Vermeul
committed
"""Read the token from the cache, and set the token ivar to it, if there, otherwise None.
If the token is not valid anymore, delete it.
"""
token_path = self.gen_token_path()
Chandrasekhar Ramakrishnan
committed
if not os.path.exists(token_path):
Swen Vermeul
committed
return None
Chandrasekhar Ramakrishnan
committed
try:
with open(token_path) as f:
Swen Vermeul
committed
token = f.read()
if not self.is_token_valid(token):
Chandrasekhar Ramakrishnan
committed
os.remove(token_path)
Swen Vermeul
committed
return None
else:
return token
except FileNotFoundError:
Swen Vermeul
committed
return None
Swen Vermeul
committed
def gen_token_path(self, parent_folder=None):
"""generates a path to the token file.
The token is usually saved in a file called
~/.pybis/hostname.token
"""
Chandrasekhar Ramakrishnan
committed
if parent_folder is None:
Swen Vermeul
committed
# save token under ~/.pybis folder
parent_folder = os.path.join(
os.path.expanduser("~"),
'.pybis'
)
path = os.path.join(parent_folder, self.hostname + '.token')
Chandrasekhar Ramakrishnan
committed
return path
Swen Vermeul
committed
def save_token(self, token=None, parent_folder=None):
""" saves the session token to the disk, usually here: ~/.pybis/hostname.token
"""
Swen Vermeul
committed
if token is None:
token = self.token
token_path = None;
if parent_folder is None:
token_path = self.gen_token_path()
else:
token_path = self.gen_token_path(parent_folder)
# create the necessary directories, if they don't exist yet
Chandrasekhar Ramakrishnan
committed
os.makedirs(os.path.dirname(token_path), exist_ok=True)
with open(token_path, 'w') as f:
Swen Vermeul
committed
f.write(token)
self.token_path = token_path
def delete_token(self, token_path=None):
Swen Vermeul
committed
if token_path is None:
token_path = self.token_path
os.remove(token_path)
def _post_request(self, resource, data):
""" internal method, used to handle all post requests and serializing / deserializing
data
"""
resp = requests.post(
self.url + resource,
json.dumps(data),
verify=self.verify_certificates
)
data = resp.json()
if 'error' in data:
raise ValueError('an error has occured: ' + data['error']['message'] )
elif 'result' in data:
return data['result']
else:
raise ValueError('request did not return either result nor error')
else:
raise ValueError('general error while performing post request')
""" Log out of openBIS. After that, the session token is no longer valid.
"""
if self.token is None:
return
logout_request = {
"method":"logout",
"params":[self.token],
"id":"1",
"jsonrpc":"2.0"
}
resp = self._post_request(self.as_v3, logout_request)
Swen Vermeul
committed
self.token = None
self.token_path = None
Swen Vermeul
committed
def login(self, username=None, password=None, save_token=False):
"""Log into openBIS.
Expects a username and a password and updates the token (session-ID).
The token is then used for every request.
Chandrasekhar Ramakrishnan
committed
Clients may want to store the credentials object in a credentials store after successful login.
Throw a ValueError with the error message if login failed.
"""
login_request = {
"method":"login",
"params":[username, password],
"id":"1",
"jsonrpc":"2.0"
}
result = self._post_request(self.as_v3, login_request)
if result is None:
raise ValueError("login to openBIS failed")
else:
self.token = result
Swen Vermeul
committed
if save_token:
self.save_token()
return self.token
def get_datastores(self):
""" Get a list of all available datastores. Usually there is only one, but in some cases
there might be more. If you upload a file, you need to specifiy the datastore you want
the file uploaded to.
"""
if len(self.datastores) == 0:
request = {
"method": "listDataStores",
"params": [ self.token ],
"id": "1",
"jsonrpc": "2.0"
}
resp = self._post_request(self.as_v1, request)
if resp is not None:
self.datastores = DataFrame(resp)[['code','downloadUrl', 'hostUrl']]
return self.datastores
else:
raise ValueError("No datastore found!")
else:
return self.datastores
def get_spaces(self, refresh=None):
""" Get a list of all available spaces (DataFrame object). To create a sample or a
dataset, you need to specify in which space it should live.
"""
if self.spaces is None or refresh is not None:
request = {
"method": "searchSpaces",
"params": [ self.token, {}, {} ],
"id": "1",
"jsonrpc": "2.0"
}
resp = self._post_request(self.as_v3, request)
spaces = DataFrame(resp['objects'])
spaces['registrationDate']= spaces['registrationDate'].map(format_timestamp)
spaces['modificationDate']= spaces['modificationDate'].map(format_timestamp)
self.spaces = spaces[['code', 'description', 'registrationDate', 'modificationDate']]
return self.spaces
else:
raise ValueError("No spaces found!")
else:
return self.spaces
def get_space(self, spaceId):
""" Returns a Space object for a given identifier (spaceId).
"""
request = {
"method": "getSpaces",
"params": [
self.token,
[{
"@id": 0,
"permId": spaceId,
"@type": "as.dto.space.id.SpacePermId"
}],
{
"@id": 0,
"@type": "as.dto.space.fetchoptions.SpaceFetchOptions",
"registrator": None,
"samples": None,
"projects": None,
"sort": None
}
],
"id": "1",
"jsonrpc": "2.0"
}
resp = self._post_request(self.as_v3, request)
return Space(self, resp[spaceId])
Chandrasekhar Ramakrishnan
committed
def get_sample_types(self, refresh=None):
""" Returns a list of all available sample types as a DataFrame object.
"""
if self.sample_types is None or refresh is not None:
request = {
"method": "searchSampleTypes",
"params": [ self.token, {}, {} ],
"id": "1",
"jsonrpc": "2.0"
}
resp = self._post_request(self.as_v3, request)
if resp is not None:
sample_types = DataFrame(resp['objects'])
sample_types['modificationDate'] = sample_types['modificationDate'].map(format_timestamp)
self.sample_types = sample_types[['code', 'description', 'modificationDate']]
return self.sample_types
return DataFrame()
else:
return self.sample_types
def get_dataset_types(self, refresh=None):
""" Returns a list (DataFrame object) of all currently available dataset types
"""
if self.dataset_types is None or refresh is not None:
request = {
"method": "searchDataSetTypes",
"params": [ self.token, {}, {} ],
"id": "1",
"jsonrpc": "2.0"
}
resp = self._post_request(self.as_v3, request)
if resp is not None:
dataset_types = DataFrame(resp['objects'])
dataset_types['modificationDate']= dataset_types['modificationDate'].map(format_timestamp)
self.dataset_types = dataset_types[['code', 'description', 'modificationDate']]
return self.dataset_types
else:
raise ValueError("No dataset types found!")
else:
return self.dataset_types
Swen Vermeul
committed
def is_session_active(self):
""" checks whether a session is still active. Returns true or false.
"""
Swen Vermeul
committed
return self.is_token_valid(self.token)
def is_token_valid(self, token=None):
Chandrasekhar Ramakrishnan
committed
"""Check if the connection to openBIS is valid.
This method is useful to check if a token is still valid or if it has timed out,
requiring the user to login again.
Chandrasekhar Ramakrishnan
committed
:return: Return True if the token is valid, False if it is not valid.
"""
if token is None:
token = self.token
if token is None:
return False
"params": [ token ],
resp = self._post_request(self.as_v1, request)
"""fetch a dataset and some metadata attached to it:
- properties
- sample
- parents
- children
- containers
- dataStore
- physicalData
- linkedData
:return: a DataSet object
"""
dataset_request = {
"method": "getDataSets",
"params": [
self.token,
[
{
"permId": permid,
"@type": "as.dto.dataset.id.DataSetPermId"
}
],
{
"parents": {
"@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions"
},
"children": {
"@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions"
},
"containers": {
"@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions"
},
"physicalData": {
"@type": "as.dto.dataset.fetchoptions.PhysicalDataFetchOptions"
},
"linkedData": {
"@type": "as.dto.dataset.fetchoptions.LinkedDataFetchOptions",
},
"dataStore": {
"@type": "as.dto.datastore.fetchoptions.DataStoreFetchOptions",
},
"sample": {
"@type": "as.dto.sample.fetchoptions.SampleFetchOptions"
},
"properties": {
"@type": "as.dto.property.fetchoptions.PropertyFetchOptions"
},
"@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions"
}
],
"id": permid,
resp = self._post_request(self.as_v3, dataset_request)
if resp is not None:
for permid in resp:
return DataSet(self, permid, resp[permid])
Chandrasekhar Ramakrishnan
committed
def get_sample(self, sample_ident):
Chandrasekhar Ramakrishnan
committed
"""Retrieve metadata for the sample.
Get metadata for the sample and any directly connected parents of the sample to allow access
to the same information visible in the ELN UI. The metadata will be on the file system.
:param sample_identifiers: A list of sample identifiers to retrieve.
"""
if self.token is None:
raise ValueError("Please login first")
search_request = None
# assume we got a sample identifier e.g. /TEST/TEST-SAMPLE
match = re.match('/', sample_ident)
if match:
search_request = {
"identifier": sample_ident,
"@type": "as.dto.sample.id.SampleIdentifier"
}
else:
# look if we got a PermID eg. 234567654345-123
match = re.match('\d+\-\d+', sample_ident)
search_request = {
"permId": sample_ident,
"@type": "as.dto.sample.id.SamplePermId"
}
else:
raise ValueError(
'"' + sample_ident + '" is neither a Sample Identifier nor a PermID'
)
"type": {
"@type": "as.dto.sample.fetchoptions.SampleTypeFetchOptions"
},
"properties": {
"@type": "as.dto.property.fetchoptions.PropertyFetchOptions"
},
"parents": {
"@type": "as.dto.sample.fetchoptions.SampleFetchOptions",
"properties": {
"@type": "as.dto.property.fetchoptions.PropertyFetchOptions"
}
},
"children": {
"@type": "as.dto.sample.fetchoptions.SampleFetchOptions",
"properties": {
"@type": "as.dto.property.fetchoptions.PropertyFetchOptions"
}
},
"dataSets": {
"@type": "as.dto.dataset.fetchoptions.DataSetFetchOptions",
"properties": {
"@type": "as.dto.property.fetchoptions.PropertyFetchOptions"
},
"registrator": {
"@type": "as.dto.person.fetchoptions.PersonFetchOptions",
},
"tags": {
"@type": "as.dto.tag.fetchoptions.TagFetchOptions",
},
sample_request = {
"method": "getSamples",
"params": [
self.token,
[
search_request,
],
fetch_options
"id": sample_ident,
resp = self._post_request(self.as_v3, sample_request)
if resp is not None:
for sample_ident in resp:
return Sample(self, resp[sample_ident])
def delete_sample(self, permid, reason):
sample_delete_request = {
"method": "deleteSamples",
"params": [
self.token,
[
{
"permId": permid,
"@type": "as.dto.sample.id.SamplePermId"
}
],
{
"reason": reason,
"@type": "as.dto.sample.delete.SampleDeletionOptions"
}
],
"id": "1",
"jsonrpc": "2.0"
}
resp = self._post_request(self.as_v3, sample_delete_request)
return
def new_space(self, name, description=None):
""" Creates a new space in the openBIS instance. Returns a list of all spaces
"""
request = {
"method": "createSpaces",
"params": [
self.token,
[ {
"@id": 0,
"code": name,
"description": description,
"@type": "as.dto.space.create.SpaceCreation"
} ]
],
"id": "1",
"jsonrpc": "2.0"
}
resp = self._post_request(self.as_v3, request)
return self.get_spaces(refresh=True)
Swen Vermeul
committed
def new_analysis(self, name, description=None, sample=None, dss_code=None, result_files=None,
notebook_files=None, parents=[]):
""" An analysis contains the Jupyter notebook file(s) and some result files.
Technically this method involves uploading files to the session workspace
and activating the dropbox aka dataset ingestion service "jupyter-uploader-api"
Swen Vermeul
committed
"""
if dss_code is None:
dss_code = self.get_datastores()['code'][0]
# if a sample identifier was given, use it as a string.
# if a sample object was given, take its identifier
# TODO: handle permId's
sample_identifier = None
if isinstance(sample, str):
sample_identifier = sample
else:
sample_identifier = sample.ident
Swen Vermeul
committed
folder = time.strftime('%Y-%m-%d_%H-%M-%S')
Swen Vermeul
committed
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
data_sets = []
if notebook_files is not None:
notebooks_folder = os.path.join(folder, 'notebook_files')
self.upload_files(
datastore_url = datastore_url,
files=notebook_files,
folder= notebooks_folder,
wait_until_finished=True
)
data_sets.append({
"dataSetType" : "JUPYTER_NOTEBOOk",
"sessionWorkspaceFolder": notebooks_folder,
"fileNames" : notebook_files,
"properties" : {}
})
if result_files is not None:
results_folder = os.path.join(folder, 'result_files')
self.upload_files(
datastore_url = datastore_url,
files=result_files,
folder=results_folder,
wait_until_finished=True
)
data_sets.append({
"dataSetType" : "JUPYTER_RESULT",
"sessionWorkspaceFolder" : results_folder,
"fileNames" : result_files,
"properties" : {}
})
request = {
"method": "createReportFromAggregationService",
"params": [
self.token,
dss_code,
Swen Vermeul
committed
DROPBOX_PLUGIN,
Swen Vermeul
committed
"identifier" : sample.identifier
Swen Vermeul
committed
"containers" : [
{
"dataSetType" : "JUPYTER_CONTAINER",
"properties" : {
"NAME" : name,
"DESCRIPTION" : description
Swen Vermeul
committed
}
Swen Vermeul
committed
"dataSets" : data_sets,
"parents" : parents,
}
],
"id": "1",
"jsonrpc": "2.0"
}
resp = self._post_request(self.reg_v1, request)
return resp
Swen Vermeul
committed
Swen Vermeul
committed
def new_sample(self, sample_name, space_name, sample_type="UNKNOWN", tags=[]):
""" Creates a new sample of a given sample type ('UNKNOWN' is the default).
"""
if isinstance(tags, str):
tags = [tags]
tag_ids = []
for tag in tags:
tag_dict = {
"code":tag,
"@type":"as.dto.tag.id.TagCode"
}
tag_ids.append(tag_dict)
sample_create_request = {
"method":"createSamples",
"params":[
self.token,
"properties":{},
"typeId":{
"permId": sample_type,
"@type":"as.dto.entitytype.id.EntityTypePermId"
},
"code": sample_name,
"spaceId":{
"permId": space_name,
"@type":"as.dto.space.id.SpacePermId"
},
"tagIds":tag_ids,
"@type":"as.dto.sample.create.SampleCreation",
"experimentId":None,
"containerId":None,
"componentIds":None,
"parentIds":None,
"childIds":None,
"attachments":None,
"creationId":None,
"autoGeneratedCode":None
],
"id":"1",
"jsonrpc":"2.0"
}
resp = self._post_request(self.as_v3, sample_create_request)
if 'permId' in resp[0]:
return self.get_sample(resp[0]['permId'])
else:
raise ValueError("error while trying to fetch sample from server: " + str(resp))
def _get_dss_url(self, dss_code=None):
""" internal method to get the downloadURL of a datastore.
"""
Swen Vermeul
committed
dss = self.get_datastores()
if dss_code is None:
return dss['downloadUrl'][0]
else:
Swen Vermeul
committed
return dss[dss['code'] == dss_code]['downloadUrl'][0]
Swen Vermeul
committed
def upload_files(self, datastore_url=None, files=None, folder=None, wait_until_finished=False):
Swen Vermeul
committed
if datastore_url is None:
if files is None:
raise ValueError("Please provide a filename.")
Swen Vermeul
committed
if folder is None:
# create a unique foldername
folder = time.strftime('%Y-%m-%d_%H-%M-%S')
if isinstance(files, str):
files = [files]
self.files = files
self.startByte = 0
self.endByte = 0
# define a queue to handle the upload threads
queue = DataSetUploadQueue()
real_files = []
for filename in files:
if os.path.isdir(filename):
real_files.extend([os.path.join(dp, f) for dp, dn, fn in os.walk(os.path.expanduser(filename)) for f in fn])
else:
real_files.append(os.path.join(filename))
# compose the upload-URL and put URL and filename in the upload queue
for filename in real_files:
file_in_wsp = os.path.join(folder, filename)
Swen Vermeul
committed
self.files_in_wsp.append(file_in_wsp)
upload_url = (
Swen Vermeul
committed
datastore_url + '/session_workspace_file_upload'
+ '?filename=' + os.path.join(folder,filename)
+ '&id=1'
+ '&startByte=0&endByte=0'
+ '&sessionID=' + self.token
)
queue.put([upload_url, filename, self.verify_certificates])
# wait until all files have uploaded
if wait_until_finished:
queue.join()
# return files with full path in session workspace
Swen Vermeul
committed
return self.files_in_wsp
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
class DataSetUploadQueue:
def __init__(self, workers=20):
# maximum files to be uploaded at once
self.upload_queue = Queue()
# define number of threads and start them
for t in range(workers):
t = Thread(target=self.upload_file)
t.daemon = True
t.start()
def put(self, things):
""" expects a list [url, filename] which is put into the upload queue
"""
self.upload_queue.put(things)
def join(self):
""" needs to be called if you want to wait for all uploads to be finished
"""
self.upload_queue.join()
def upload_file(self):
while True:
# get the next item in the queue
upload_url, filename, verify_certificates = self.upload_queue.get()
# upload the file to our DSS session workspace
with open(filename, 'rb') as f:
resp = requests.post(upload_url, data=f, verify=verify_certificates)
resp.raise_for_status()
# Tell the queue that we are done
self.upload_queue.task_done()
Swen Vermeul
committed
class DataSetDownloadQueue:
def __init__(self, workers=20):
# maximum files to be downloaded at once
self.download_queue = Queue()
# define number of threads
for t in range(workers):
t = Thread(target=self.download_file)
t.daemon = True
t.start()
def put(self, things):
""" expects a list [url, filename] which is put into the download queue
"""
self.download_queue.put(things)
Swen Vermeul
committed
def join(self):
""" needs to be called if you want to wait for all downloads to be finished
"""
self.download_queue.join()
def download_file(self):
while True:
url, filename, verify_certificates = self.download_queue.get()
Swen Vermeul
committed
# create the necessary directory structure if they don't exist yet
os.makedirs(os.path.dirname(filename), exist_ok=True)
# request the file in streaming mode
r = requests.get(url, stream=True, verify=verify_certificates)
Swen Vermeul
committed
with open(filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
self.download_queue.task_done()
class DataSet():
""" DataSet are openBIS objects that contain the actual files.
"""
def __init__(self, openbis_obj, permid, data):
self.openbis = openbis_obj
self.permid = permid
self.data = data
self.v1_ds = '/datastore_server/rmi-dss-api-v1.json'
self.downloadUrl = self.data['dataStore']['downloadUrl']
Swen Vermeul
committed
def download(self, files=None, wait_until_finished=False, workers=10):
""" download the actual files and put them by default in the following folder:
Swen Vermeul
committed
__current_dir__/hostname/dataset_permid/
If no files are specified, all files of a given dataset are downloaded.
Files are usually downloaded in parallel, using 10 workers by default. If you want to wait until
all the files are downloaded, set the wait_until_finished option to True.
Swen Vermeul
committed
"""
if files == None:
files = self.file_list()
elif isinstance(files, str):
files = [files]
base_url = self.downloadUrl + '/datastore_server/' + self.permid + '/'
Swen Vermeul
committed
queue = DataSetDownloadQueue(workers=workers)
# get file list and start download
for filename in files:
download_url = base_url + filename + '?sessionID=' + self.openbis.token
filename = os.path.join(self.openbis.hostname, self.permid, filename)
queue.put([download_url, filename, self.openbis.verify_certificates])
Swen Vermeul
committed
# wait until all files have downloaded
if wait_until_finished:
queue.join()
def get_parents(self):
""" Returns an array of the parents of the given dataset. Returns an empty array if no
parents were found.
"""
parents = []
for item in self.data['parents']:
parent = self.openbis.get_dataset(item['code'])
if parent is not None:
parents.append(parent)
return parents
def get_children(self):
""" Returns an array of the children of the given dataset. Returns an empty array if no
children were found.
"""
children = []
for item in self.data['children']:
child = self.openbis.get_dataset(item['code'])
if child is not None:
children.append(child)
return children
def file_list(self):
files = []
for file in self.get_file_list(recursive=True):
if file['isDirectory']:
pass
else:
files.append(file['pathInDataSet'])
return files
def get_file_list(self, recursive=True, start_folder="/"):
""" Lists all files of a given dataset. You can specifiy a start_folder other than "/".
By default, all directories and their containing files are listed recursively. You can
turn off this option by setting recursive=False.
"""
request = {
"method" : "listFilesForDataSet",
"params" : [
self.openbis.token,
self.permid,
recursive,
],
"id":"1"
}
resp = requests.post(
self.downloadUrl + self.v1_ds,
json.dumps(request),
verify=self.openbis.verify_certificates
)
data = resp.json()
if 'error' in data:
Swen Vermeul
committed
raise ValueError('Error from openBIS: ' + data['error'] )
elif 'result' in data:
return data['result']
Swen Vermeul
committed
raise ValueError('request to openBIS did not return either result nor error')
Swen Vermeul
committed
raise ValueError('internal error while performing post request')
class Sample(dict):
""" A Sample is one of the most commonly used objects in openBIS.
"""
def __init__(self, openbis_obj, *args, **kwargs):
super(Sample, self).__init__(*args, **kwargs)
self.__dict__ = self
self.openbis = openbis_obj
self.permid = self.permId['permId']
self.ident = self.identifier['identifier']
def delete(self, permid, reason):
self.openbis.delete_sample(permid, reason)
def get_datasets(self):
datasets = []
for item in self.dataSets:
datasets.append(self.openbis.get_dataset(item['permId']['permId']))
return datasets
def get_parents(self):
parents = []
for item in self.parents:
parent = self.openbis.get_sample(item['permId']['permId'])
if parent is not None:
parents.append(parent)
return parents
def get_children(self):
children = []
for item in self.children:
child = self.openbis.get_sample(item['permId']['permId'])
if child is not None:
children.append(child)
return children
class Space(dict):
""" managing openBIS spaces
"""
def __init__(self, openbis_obj, *args, **kwargs):
super(Space, self).__init__(*args, **kwargs)
self.__dict__ = self
self.openbis = openbis_obj
self.code = self.code
def get_samples(self):
""" Lists all samples in a given space. A pandas DataFrame object is returned.
"""
Swen Vermeul
committed
fields = ['spaceCode','permId', 'identifier','experimentIdentifierOrNull']
request = {
"method": "searchForSamples",
"params": [
self.openbis.token,
{
"matchClauses": [
{
"@type": "AttributeMatchClause",
"fieldType": "ATTRIBUTE",
"attribute": "SPACE",
"desiredValue": self.code,
}
],
"subCriterias": [],
"operator": "MATCH_ALL_CLAUSES"
},
[
"PROPERTIES",
"PARENTS"
]
],
"id": "1",
"jsonrpc": "2.0"
}
resp = self.openbis._post_request(self.openbis.as_v1, request)