Skip to content
Snippets Groups Projects
Commit 51bab3a3 authored by Swen Vermeul's avatar Swen Vermeul
Browse files

Merge branch 'release/pybis-1.10.4'

parents e1fb0ed7 7f268053
No related branches found
No related tags found
No related merge requests found
## Changes with pybis-1.10.4
* better error messages when downloading files from datastore server
## Changes with pybis-1.10.3 ## Changes with pybis-1.10.3
* print warning message when downloaded file-size does not match with promised file-size. Do not die. * print warning message when downloaded file-size does not match with promised file-size. Do not die.
......
name = 'pybis' name = 'pybis'
__author__ = 'Swen Vermeul' __author__ = 'Swen Vermeul'
__email__ = 'swen@ethz.ch' __email__ = 'swen@ethz.ch'
__version__ = '1.10.3' __version__ = '1.10.4'
from . import pybis from . import pybis
from .pybis import Openbis from .pybis import Openbis
......
...@@ -18,6 +18,7 @@ import time ...@@ -18,6 +18,7 @@ import time
# needed for Data upload # needed for Data upload
PYBIS_PLUGIN = "dataset-uploader-api" PYBIS_PLUGIN = "dataset-uploader-api"
dataset_definitions = openbis_definitions('dataSet') dataset_definitions = openbis_definitions('dataSet')
dss_endpoint = '/datastore_server/rmi-data-store-server-v3.json'
class DataSet( class DataSet(
...@@ -202,6 +203,33 @@ class DataSet( ...@@ -202,6 +203,33 @@ class DataSet(
set_props = set_properties set_props = set_properties
def get_dataset_files(self, **properties):
search_criteria = get_search_type_for_entity('datasetFiles')
search_criteria['criteria'] = sub_criteria
search_criteria['operator'] = 'AND'
request = {
"method": "searchFiles",
"params": [
self.token,
search_criteria,
fetchopts,
],
}
resp = self._post_request(datastore.url, dss_endpoint, request)
return self._dataset_list_for_response(
response=resp['objects'],
props=props,
start_with=start_with,
count=count,
totalCount=resp['totalCount'],
)
def download(self, files=None, destination=None, wait_until_finished=True, workers=10, def download(self, files=None, destination=None, wait_until_finished=True, workers=10,
linked_dataset_fileservice_url=None, content_copy_index=0): linked_dataset_fileservice_url=None, content_copy_index=0):
""" download the actual files and put them by default in the following folder: """ download the actual files and put them by default in the following folder:
...@@ -247,6 +275,7 @@ class DataSet( ...@@ -247,6 +275,7 @@ class DataSet(
file_info = self.get_file_list(start_folder=filename) file_info = self.get_file_list(start_folder=filename)
file_size = file_info[0]['fileSize'] file_size = file_info[0]['fileSize']
download_url = base_url + filename + '?sessionID=' + self.openbis.token download_url = base_url + filename + '?sessionID=' + self.openbis.token
#print(download_url)
filename_dest = os.path.join(destination, self.permId, filename) filename_dest = os.path.join(destination, self.permId, filename)
queue.put([download_url, filename, filename_dest, file_size, self.openbis.verify_certificates, 'wb']) queue.put([download_url, filename, filename_dest, file_size, self.openbis.verify_certificates, 'wb'])
...@@ -771,11 +800,18 @@ class DataSetDownloadQueue(): ...@@ -771,11 +800,18 @@ class DataSetDownloadQueue():
if r.ok == False: if r.ok == False:
raise ValueError("Could not download from {}: HTTP {}. Reason: {}".format(url, r.status_code, r.reason)) raise ValueError("Could not download from {}: HTTP {}. Reason: {}".format(url, r.status_code, r.reason))
with open(filename_dest, write_mode) as f: with open(filename_dest, write_mode) as fh:
for chunk in r.iter_content(chunk_size=1024): for chunk in r.iter_content(chunk_size=1024*1024):
#size += len(chunk)
#print("WRITE ", datetime.now(), len(chunk))
if chunk: # filter out keep-alive new chunks if chunk: # filter out keep-alive new chunks
f.write(chunk) fh.write(chunk)
#print("DONE WRITE", datetime.now())
#print("DONE", datetime.now())
r.raise_for_status()
#print("{} bytes written".format(size))
actual_file_size = os.path.getsize(filename_dest) actual_file_size = os.path.getsize(filename_dest)
if actual_file_size != int(file_size): if actual_file_size != int(file_size):
if self.collect_files_with_wrong_length: if self.collect_files_with_wrong_length:
...@@ -785,6 +821,12 @@ class DataSetDownloadQueue(): ...@@ -785,6 +821,12 @@ class DataSetDownloadQueue():
"WARNING! File {} has the wrong length: Expected: {} Actual size: {}".format( "WARNING! File {} has the wrong length: Expected: {} Actual size: {}".format(
filename_dest, int(file_size), actual_file_size) filename_dest, int(file_size), actual_file_size)
) )
print (
"REASON: The connection has been silently dropped upstreams.",
"Please check the http timeout settings of the openBIS datastore server"
)
except Exception as err:
print("ERROR while writing file {}: {}".format(filename_dest, err))
finally: finally:
self.download_queue.task_done() self.download_queue.task_done()
......
...@@ -35,6 +35,10 @@ class Experiment( ...@@ -35,6 +35,10 @@ class Experiment(
'save()' 'save()'
] + super().__dir__() ] + super().__dir__()
@property
def props(self):
return self.__dict__['p']
@property @property
def type(self): def type(self):
return self.__dict__['type'] return self.__dict__['type']
......
...@@ -24,6 +24,10 @@ class Project( ...@@ -24,6 +24,10 @@ class Project(
'save()', 'delete()' 'save()', 'delete()'
] + super().__dir__() ] + super().__dir__()
@property
def props(self):
return self.__dict__['p']
def get_samples(self, **kwargs): def get_samples(self, **kwargs):
return self.openbis.get_samples(project=self.permId, **kwargs) return self.openbis.get_samples(project=self.permId, **kwargs)
get_objects = get_samples # Alias get_objects = get_samples # Alias
......
...@@ -11,7 +11,7 @@ with open("README.md", "r", encoding="utf-8") as fh: ...@@ -11,7 +11,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
setup( setup(
name='PyBIS', name='PyBIS',
version= '1.10.3', version= '1.10.4',
author='Swen Vermeul • ID SIS • ETH Zürich', author='Swen Vermeul • ID SIS • ETH Zürich',
author_email='swen@ethz.ch', author_email='swen@ethz.ch',
description='openBIS connection and interaction, optimized for using with Jupyter', description='openBIS connection and interaction, optimized for using with Jupyter',
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment