diff --git a/docs/.readthedocs.yaml b/.readthedocs.yaml similarity index 100% rename from docs/.readthedocs.yaml rename to .readthedocs.yaml diff --git a/README.md b/README.md index eca0fb375efdbea63b7e3524b4c5d6fb3c2f635a..0136fd8b19a12a58943fe98973daf763ff664ae9 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,3 @@ -  # Repository organization @@ -158,7 +157,7 @@ The project does not uses modules yet. Add '--add-opens' statements manually whe ## Development of NG UI 1. Generate openBIS JS bundle by running in command line - 1. cd /<OPENBIS_PROJECT_ROOT>/core-plugin-openbis + 1. cd /<OPENBIS_PROJECT_ROOT>/api-openbis-javascript 2. ./gradlew :bundleOpenbisStaticResources 2. Start openBIS in your chosen IDE (NG UI assumes it will run at: http://localhost:8888/openbis-test/): @@ -168,9 +167,9 @@ The project does not uses modules yet. Add '--add-opens' statements manually whe 1. cd /<OPENBIS_PROJECT_ROOT>/ui-admin 2. npm install 3. npm run dev -4. Open in your chosen browser a url, by default: http://localhost:8124/ng-ui-path +4. Open in your chosen browser a url, by default: http://localhost:9999/admin ## Setting up IntelliJ Idea 1. Under "IntelliJ IDEA" -> "Preferences" -> "Languages and Frameworks" -> Javascript, set the - language version to ECMAScript 6. \ No newline at end of file + language version to ECMAScript 6. diff --git a/api-openbis-matlab/OpenBis.m b/api-openbis-matlab/OpenBis.m index 0369de6d951172461da45976a29e7afbfae5a3bb..012d55fb1234b13d983e44f460007b103999767b 100644 --- a/api-openbis-matlab/OpenBis.m +++ b/api-openbis-matlab/OpenBis.m @@ -25,29 +25,44 @@ classdef OpenBis methods %% Constructor method - function obj = OpenBis(varargin) + function obj = OpenBis(url) % OpenBis Constructor method for class OpenBis - % Creates the Python Openbis object and logs into the server - % Optional positional input arguments: - % url ... URL of the openBIS server (incl. port) - % user ... user name for openBIS - % pw ... password for openBIS - % Usage: - % obi = OpenBis() --> opens UI to enter URL, user name and password - % obi = OpenBis('server_url', 'user_name', 'user_password') - - if nargin > 0 - url = varargin{1}; - user = varargin{2}; - pw = varargin{3}; - else - [url, user, pw] = user_url_pw_inputdlg; - end - + % Creates the Python Openbis object for openBIS server + % Usage: + % url ... URL of the openBIS server + % Example + % obi = OpenBis('server_url') o = py.pybis.Openbis(url, pyargs('verify_certificates', 0)); - o.login(user, pw, pyargs('save_token', 1)); obj.pybis = o; end + + function login(obj) + %login + % Login to openBIS with username and password + [user, pw] = user_pass_input; + obj.pybis.login(user, pw, pyargs('save_token', 1)) + end + + function set_token(obj, token) + %set_token + % Login to openBIS with a session token + obj.pybis.set_token(token) + end + + function get_or_create_personal_access_token(obj, sessionName) + %get_or_create_personal_access_token + % Creates a new personal access token (PAT). If a PAT with the + % given sessionName already exists and its expiry date (validToDate) + % is not within the warning period, the existing PAT is returned instead. + pat = obj.pybis.get_or_create_personal_access_token(pyargs('sessionName', sessionName)); + obj.pybis.set_token(pat.permId, pyargs('save_token', 1)) + end + + function get_personal_access_tokens(obj) + %get_personal_access_tokens + % Return registered PATs + obj.pybis.get_personal_access_tokens() + end function logout(obj) %logout @@ -56,7 +71,6 @@ classdef OpenBis % obi.logout() % % After logout, the session token is no longer valid. - obj.pybis.logout(); end diff --git a/api-openbis-matlab/openbis_example.mlx b/api-openbis-matlab/openbis_example.mlx index 47de9a7094746be7c177cad7a149dc1b3221da1b..9bf03cf79416ff294f42277d5cfafec29358a5a9 100644 Binary files a/api-openbis-matlab/openbis_example.mlx and b/api-openbis-matlab/openbis_example.mlx differ diff --git a/api-openbis-matlab/passcode.m b/api-openbis-matlab/passcode.m deleted file mode 100644 index 13abf634d7b5e35d0cc8cf6740cc9a3fff66eeba..0000000000000000000000000000000000000000 --- a/api-openbis-matlab/passcode.m +++ /dev/null @@ -1,74 +0,0 @@ -function pass = passcode -%PASSCODE password input dialog box. -% passcode creates a modal dialog box that returns user password input. -% Given characters are substituted with '*'-Signs like in usual password dialogs. -% -% usage: -% password = PASSCODE -% -% Adapted from https://www.mathworks.com/matlabcentral/fileexchange/6590-passcode -% Version: v1.2 (03-Mar-2008) -% Author: Elmar Tarajan [MCommander@gmx.de] - - -ScreenSize = get(0,'ScreenSize'); -hfig = figure('Menubar','none', ... - 'Units','Pixels', ... - 'Resize','off', ... - 'NumberTitle','off', ... - 'Name',['Enter openBIS password'], ... - 'Position',[ (ScreenSize(3:4)-[300 75])/2 300 75], ... - 'Color',[0.8 0.8 0.8], ... - 'WindowStyle','modal'); -hedit = uicontrol('Parent',hfig, ... - 'Style','Edit', ... - 'Enable','inactive', ... - 'Units','Pixels','Position',[49 28 202 22], ... - 'FontSize',15, ... - 'String',[], ... - 'BackGroundColor',[0.7 0.7 0.7]); -hpass = uicontrol('Parent',hfig, ... - 'Style','Text', ... - 'Tag','password', ... - 'Units','Pixels','Position',[51 30 198 18], ... - 'FontSize',15, ... - 'BackGroundColor',[1 1 1]); -hwarn = uicontrol('Parent',hfig, ... - 'Style','Text', ... - 'Tag','error', ... - 'Units','Pixels','Position',[50 2 200 20], ... - 'FontSize',8, ... - 'String','character not allowed',... - 'Visible','off',... - 'ForeGroundColor',[1 0 0], ... - 'BackGroundColor',[0.8 0.8 0.8]); - -set(hfig,'KeyPressFcn',{@keypress_Callback,hedit,hpass,hwarn}, ... - 'CloseRequestFcn','uiresume') - -uiwait -pass = get(hpass,'userdata'); -delete(hfig) - - -function keypress_Callback(~,data,~,hpass,~) - -pass = get(hpass,'userdata'); - -switch data.Key - case 'backspace' - pass = pass(1:end-1); - case 'return' - uiresume - return - otherwise - try - pass = [pass data.Character]; - catch - disp('Some error occured during password entry!') - end -end - -set(hpass,'userdata',pass) -set(hpass,'String',char('*'*sign(pass))) - diff --git a/api-openbis-matlab/passwordEntryDialog.m b/api-openbis-matlab/passwordEntryDialog.m deleted file mode 100644 index d6d9ee860795b54dd82be1358261c353d84215af..0000000000000000000000000000000000000000 --- a/api-openbis-matlab/passwordEntryDialog.m +++ /dev/null @@ -1,406 +0,0 @@ -function [Password, UserName] = passwordEntryDialog(varargin) -% PASSWORDENTRYDIALOG -% [Password, UserName] = passwordEntryDialog(varargin) -% -% Create a password entry dialog for entering a password that is visibly -% hidden. Java must be enabled for this function to work properly. -% -% It has only been tested on the Windows platform in R2008a. It should -% work in R2007a or later. -% -% The password box is created using the Java Swing component -% JPasswordField. -% -% Optional Input Arguments -% ------------------------ -% -% 'enterUserName' DEFAULT: false -% Display the user name entry box. The user name entered must be at least -% one character or an error dialog is displayed. -% -% 'DefaultUserName' DEFAULT: '' -% String value of user name to populate in User Name box upon creation. -% -% 'ValidatePassword' DEFAULT: false -% Display dialog box to reenter password for validation purposes. -% -% 'CheckPasswordLength' DEFAULT: true -% Check the password length to ensure it meets the specified criteria. -% -% 'PasswordLengthMin' DEFAULT: 2 -% Minimum password length allowed. -% -% 'PasswordLengthMax' DEFAULT: 8 -% Maximum password length allowed. -% -% 'WindowName' DEFAULT: 'Login' -% Title of the password entry window. -% -% Examples -% -------- -% -% Create a password dialog box with the default options. -% ----------------------------------------------------------------------- -% [Password] = passwordEntryDialog; -% -% Create a user name and password entry dialog box without password -% verification. -% ----------------------------------------------------------------------- -% [Password, UserName] = passwordEntryDialog('enterUserName', true) -% -% Create a user name and password entry dialog box without password -% verification. Set the user name to 'jdoe' upon startup. -% ----------------------------------------------------------------------- -% [Password, UserName] = passwordEntryDialog('enterUserName', true,... -% 'DefaultUserName', 'jdoe') -% -% Create a password dialog box with password validation -% ----------------------------------------------------------------------- -% [Password] = passwordEntryDialog('ValidatePassword', true); -% -% Create a user name and password entry dialog box with password -% verification. -% ----------------------------------------------------------------------- -% [Password, UserName] = passwordEntryDialog('enterUserName', true,... -% 'ValidatePassword', true) -% -% Check the length of the password to be between 5 and 8 characters -% ----------------------------------------------------------------------- -% [Password, UserName] = passwordEntryDialog('CheckPasswordLength', true,... -% 'PasswordLengthMin', 5,... -% 'PasswordLengthMax', 8) -% -% ----------------------------------------------------------------------- -% Copyright (C) 2007-2008, Jesse B. Lai -% -% This program is free software: you can redistribute it and/or modify -% it under the terms of the GNU Lesser General Public License as published -% by the Free Software Foundation, either version 3 of the License, or -% (at your option) any later version. -% -% This program is distributed in the hope that it will be useful, -% but WITHOUT ANY WARRANTY; without even the implied warranty of -% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -% GNU Lesser General Public License for more details. -% -% You should have received a copy of the GNU Lesser General Public License -% along with this program. If not, see <http://www.gnu.org/licenses/> - -%% History -% The history of this program is outlined in this section. -% -% 20080616 - JBL - 0.0.2 -% Started 20080612 -% -% Updated to remove requirement for uicomponent function written by Yair -% Altman. Now, the Java components are created manually and the -% undocumented feature javacomponent is used. -% -% A focus issue was worked out by using the drawnow command in a couple of -% places to allow the objects to be focused properly upon startup. -% -% 20080427 - JBL - 0.0.1 -% Started 20080425. -% -% Initial version. Uses Java to create the password entry box. An edit -% box was initially attemped with the Java frame, but was occasionally -% getting Java exceptions when trying to query the 'SelectionStart' and -% 'SelectionEnd' properties. -% -% Basic options of entering user name and password with options for -% password validation. -% -% ToDo: Maybe add valid string input argument to only allow certain -% characters. - -%% Program Information - -% ProgramName = 'passwordEntryDialog'; -% ProgramVersion = '0.0.2'; -% svnRevision = '$Revision: 184 $'; -% svnRevision = getSVNRevision(svnRevision); -% ProgramVersion = [ProgramVersion, '.' ,svnRevision]; -% -% LastChangedDate = '$LastChangedDate: 2008-06-16 09:08:17 -0600 (Mon, 16 Jun 2008) $'; -% ProgramDate = getSVNDate(LastChangedDate); - -%% Check for Existance of Java -if ~usejava('swing') - error('passwordEntryDialog: Java is required for this program to run.'); -end - -%% Parse Input Arguments -% Input arguments are parsed with the MATLAB inputParser class. - -% Create input parser object -ProgramOptionsParser = inputParser; -ProgramOptionsParser.KeepUnmatched = true; - -ProgramOptionsParser.addParamValue('enterUserName', false, @(x) islogical(x) || isnumeric(x)); -ProgramOptionsParser.addParamValue('DefaultUserName', '', @ischar); -ProgramOptionsParser.addParamValue('ValidatePassword', false, @(x) islogical(x) || isnumeric(x)); -ProgramOptionsParser.addParamValue('CheckPasswordLength', true, @(x) islogical(x) || isnumeric(x)); -ProgramOptionsParser.addParamValue('PasswordLengthMin', 2, @isnumeric); -ProgramOptionsParser.addParamValue('PasswordLengthMax', 8, @isnumeric); -ProgramOptionsParser.addParamValue('WindowName', 'Login', @ischar); - -% Parse Input Arguments -try - ProgramOptionsParser.parse(varargin{:}); -catch Error - ProgramOptionsParser.parse; - if strcmpi(Error.identifier, 'MATLAB:InputParser:ArgumentFailedValidation') - error(Error.identifier, Error.message); - end; -end; - -ProgramOptions = ProgramOptionsParser.Results; - -% Validate password length options -if ProgramOptions.CheckPasswordLength - if ProgramOptions.PasswordLengthMax < ProgramOptions.PasswordLengthMin - error('MATLAB:InputParser:ArgumentFailedValidation', 'PasswordLengthMax must be greater than PasswordLengthMin'); - end; -end; - -%% Determine GUI Size and Position -% Center the GUI on the screen. - -set(0,'Units','pixels') -Screen = get(0,'screensize'); - -PositionGUI = [0 0 300 50]; -if ProgramOptions.enterUserName - PositionGUI = PositionGUI + [0 0 0 50]; -end; -if ProgramOptions.ValidatePassword - PositionGUI = PositionGUI + [0 0 0 50]; - OffsetBottom = 43; -else - OffsetBottom = 0; -end; - -PositionGUI = [Screen(3:4)/2-PositionGUI(3:4)/2 PositionGUI(3:4)]; -PositionLeft = 10; -BoxWidth = 200; - -%% Create the GUI - -BackgroundColor = get(0,'DefaultUicontrolBackgroundcolor'); -handles.figure1 = figure('Menubar','none', ... - 'Units','Pixels', ... - 'Resize','off', ... - 'NumberTitle','off', ... - 'Name',ProgramOptions.WindowName, ... - 'Position',PositionGUI, ... - 'Color', BackgroundColor, ... - 'WindowStyle','modal'); - -% Create Password Validation Entry Box -if ProgramOptions.ValidatePassword - handles.java_PasswordValidate = javax.swing.JPasswordField(); - handles.java_PasswordValidate.setFocusable(true); - [handles.java_PasswordValidate, handles.edit_PasswordValidate] = javacomponent(handles.java_PasswordValidate, [], handles.figure1); - - set(handles.edit_PasswordValidate, ... - 'Parent', handles.figure1, ... - 'Tag', 'edit_PasswordValidate', ... - 'Units', 'Pixels', ... - 'Position',[PositionLeft 10 BoxWidth 23]); - - handles.text_LabelPasswordValidate = uicontrol('Parent',handles.figure1, ... - 'Tag', 'text_LabelPassword', ... - 'Style','Text', ... - 'Units','Pixels',... - 'Position',[PositionLeft 33 BoxWidth 16], ... - 'FontSize',10, ... - 'String','Reenter password:',... - 'HorizontalAlignment', 'Left'); -end; - -% Create Password Entry Box -handles.java_Password = javax.swing.JPasswordField(); -[handles.java_Password, handles.edit_Password] = javacomponent(handles.java_Password, [PositionLeft 10+OffsetBottom BoxWidth 23], handles.figure1); -handles.java_Password.setFocusable(true); - -set(handles.edit_Password, ... - 'Parent', handles.figure1, ... - 'Tag', 'edit_Password', ... - 'Units', 'Pixels', ... - 'Position',[PositionLeft 10+OffsetBottom BoxWidth 23]); -drawnow; % This drawnow is required to allow the focus to work - -handles.text_LabelPassword = uicontrol('Parent',handles.figure1, ... - 'Tag', 'text_LabelPassword', ... - 'Style','Text', ... - 'Units','Pixels',... - 'Position',[PositionLeft 33+OffsetBottom BoxWidth 16], ... - 'FontSize',10, ... - 'String','Key:',... - 'HorizontalAlignment', 'Left'); - -% Create OK Pushbutton -handles.pushbutton_OK = uicontrol('Parent',handles.figure1, ... - 'Tag', 'pushbutton_OK', ... - 'Style','Pushbutton', ... - 'Units','Pixels',... - 'Position',[PositionLeft+BoxWidth+5 10 30 23], ... - 'FontSize',10, ... - 'String','OK',... - 'HorizontalAlignment', 'Center'); - -% Create Cancel Pushbutton -handles.pushbutton_Cancel = uicontrol('Parent',handles.figure1, ... - 'Tag', 'pushbutton_Cancel', ... - 'Style','Pushbutton', ... - 'Units','Pixels',... - 'Position',[PositionLeft+BoxWidth+30+7 10 50 23], ... - 'FontSize',10, ... - 'String','Cancel',... - 'HorizontalAlignment', 'Center'); - -% Create User Name Edit Box -if ProgramOptions.enterUserName - handles.java_UserName = javax.swing.JTextField(); - handles.java_UserName.setFocusable(true); - [handles.java_UserName, handles.edit_UserName] = javacomponent(handles.java_UserName, [], handles.figure1); - - set(handles.edit_UserName, ... - 'Parent', handles.figure1, ... - 'Tag', 'edit_UserName', ... - 'Units', 'Pixels', ... - 'Position',[PositionLeft 53+OffsetBottom 200 23]); - set(handles.java_UserName, 'Text', ProgramOptions.DefaultUserName); - drawnow; % This drawnow is required to allow the focus to work - - handles.text_LabelUserName = uicontrol('Parent',handles.figure1, ... - 'Tag', 'text_LabelUserName', ... - 'Style','Text', ... - 'Units','Pixels',... - 'Position',[PositionLeft 76+OffsetBottom 200 16], ... - 'FontSize',10, ... - 'String','User name:',... - 'HorizontalAlignment', 'Left'); - - %uicontrol(handles.edit_UserName); - %set(handles.figure1,'CurrentObject',handles.java_UserName) - handles.java_UserName.requestFocus; % Get focus - drawnow; -else - handles.java_Password.requestFocus; % Get focus - drawnow; -end; - -%% Setup Callbacks for Objects -% Adds the callback functions for the objects in the GUI - -set(handles.pushbutton_OK, 'Callback', {@pushbutton_OK_Callback, handles, ProgramOptions}, 'KeyPressFcn', {@pushbutton_KeyPressFcn, handles, ProgramOptions}); -set(handles.pushbutton_Cancel, 'Callback', {@pushbutton_Cancel_Callback, handles, ProgramOptions}, 'KeyPressFcn', {@pushbutton_KeyPressFcn, handles, ProgramOptions}); -set(handles.java_Password, 'ActionPerformedCallback', {@pushbutton_OK_Callback, handles, ProgramOptions}); - -if ProgramOptions.ValidatePassword - if ProgramOptions.enterUserName - ObjectNext = handles.java_UserName; - else - ObjectNext = handles.java_Password; - end; - set(handles.java_PasswordValidate, 'ActionPerformedCallback', {@pushbutton_OK_Callback, handles, ProgramOptions}, 'NextFocusableComponent', ObjectNext); - set(handles.java_Password, 'NextFocusableComponent', handles.java_PasswordValidate); -elseif ProgramOptions.enterUserName - set(handles.java_Password, 'NextFocusableComponent', handles.java_UserName); -end; - -if ProgramOptions.enterUserName - set(handles.java_UserName, 'ActionPerformedCallback', {@pushbutton_OK_Callback, handles, ProgramOptions}, 'NextFocusableComponent', handles.java_Password); -end; - -setappdata(handles.figure1, 'isCanceled', false); - -%% Wait for Completion - -% Wait for the user to complete entry. -drawnow; -uiwait(handles.figure1); - -%% Get current information -% User either closed the window or pressed cancel or OK. - -isCanceled = ~ishandle(handles.figure1) || getappdata(handles.figure1, 'isCanceled'); -if isCanceled - if ishandle(handles.figure1) - delete(handles.figure1); - end; - Password = -1; - UserName = ''; - return; -end; - -Password = handles.java_Password.Password'; -if ProgramOptions.enterUserName - UserName = strtrim(get(handles.java_UserName, 'Text')); -else - UserName = ''; -end; -delete(handles.figure1); - -%% DEFINE FUNCTIONS -% The subfunctions required by this program are in the following section. - -function pushbutton_KeyPressFcn(hObject, eventdata, handles, ProgramOptions) - -switch eventdata.Key - case 'return' - Callback = get(hObject, 'Callback'); - feval(Callback{1}, hObject, '', Callback{2:end}); -end; - -function pushbutton_OK_Callback(hObject, eventdata, handles, ProgramOptions) -if ProgramOptions.enterUserName - % Check if username is blank - UserName = strtrim(get(handles.java_UserName, 'Text')); - if isempty(UserName) - strMessage = 'Username is blank'; - %disp(strMessage) - hError = errordlg(strMessage, 'passwordEntryDialog'); - uiwait(hError); - return; - end; -end; - -if ProgramOptions.CheckPasswordLength - %Password = handles.edit_Password.Password'; - Password = handles.java_Password.Password'; - if length(Password) < ProgramOptions.PasswordLengthMin || length(Password) > ProgramOptions.PasswordLengthMax - strMessage = sprintf('Password must be between %d and %d characters', ... - ProgramOptions.PasswordLengthMin, ... - ProgramOptions.PasswordLengthMax); - %disp(strMessage); - hError = errordlg(strMessage, 'passwordEntryDialog'); - uiwait(hError); - if ProgramOptions.ValidatePassword - set(handles.java_PasswordValidate,'Text', ''); - end; - handles.java_Password.requestFocus - return; - end; -end; - -if ProgramOptions.ValidatePassword - % Check if passwords match - if ~isequal(handles.java_Password.Password, handles.java_PasswordValidate.Password) - strMessage = 'Passwords do not match. Please try again'; - %disp(strMessage); - hError=errordlg(strMessage, 'passwordEntryDialog','modal'); - uiwait(hError); - set(handles.java_Password,'Text', ''); - set(handles.java_PasswordValidate,'Text', ''); - - handles.java_Password.requestFocus - return; - end; -end; -uiresume(handles.figure1); - -function pushbutton_Cancel_Callback(hObject, eventdata, handles, ProgramOptions) -setappdata(handles.figure1, 'isCanceled', true); -uiresume(handles.figure1); \ No newline at end of file diff --git a/api-openbis-matlab/user_pass_input.m b/api-openbis-matlab/user_pass_input.m new file mode 100644 index 0000000000000000000000000000000000000000..e8f61f9037c1051608554ddba38faef0a0e7d635 --- /dev/null +++ b/api-openbis-matlab/user_pass_input.m @@ -0,0 +1,87 @@ +function [user, pw] = user_pass_input +%user_pw_input +% UI window to obtain user name and pwword for openBIS + +% check if Java is available (Matlab not started with -nojvm flag) +if ~usejava('awt') + error('This function requires Java. Start Matlab with Java enabled.') +end + +% default values +user = ''; pw = ''; + +% Setup figure for UI window +sz = get(0, 'ScreenSize'); +dlgName = 'openBIS Credentials'; + +% setup figure window +hFig = figure(WindowStyle='modal', Position=[(sz(3:4)-[350 100])/2 350 100], Name=dlgName, ... + Resize='off', NumberTitle='off', Menubar='none', Color=[0.9 0.9 0.9], CloseRequestFcn=@(~,~)uiresume); + +% setup text field for user name +hUser = uicontrol(hFig, Style='edit', Position=[80 70 250 20], KeyPressFcn=@userKeyPress, ... + FontSize=10, BackGroundColor='w', String=user); + +% setup text field for password +hPw = uicontrol(hFig, Style='edit', Position=[80 40 250 20], KeyPressFcn=@pwKeyPress, ... + FontSize=10, BackGroundColor='w', String=''); + +% labels for text fields +annotation(hFig, 'textbox', Units='pixels', Position=[00 70 80 20], String='Username', ... + EdgeColor='n', VerticalAlignment='middle', HorizontalAlignment='right') +annotation(hFig, 'textbox', Units='pixels', Position=[00 40 80 20], String='Password', ... + EdgeColor='n', VerticalAlignment='middle', HorizontalAlignment='right') + +% OK / cancel buttons +hOK = uicontrol(hFig, Style="pushbutton", Position=[140 7 50 20], Callback=@okClick, String='OK'); +hCancel = uicontrol(hFig, Style="pushbutton", Position=[215 7 50 20], Callback=@cancelClick, String='Cancel'); + +uicontrol(hUser) % give focuse to username field +uiwait % wait for uiresume command +drawnow + +user = hUser.String; +delete(hFig) + + + function userKeyPress(~, event) + if event.Key == "return" + uiresume, return %done + elseif event.Key == "escape" + hUser.String = ''; pw = ''; + uiresume, return %abort + end + end + + function pwKeyPress(~, event) + if event.Key == "backspace" + pw = pw(1:end-1); %shorten pwword + elseif event.Key == "return" + uiresume, return %done + elseif event.Key == "escape" + hUser.String = ''; pw = ''; + uiresume, return %abort + elseif contains(event.Character,num2cell(char(32:126))) + pw(end+1) = event.Character; % append key to password + end + redrawPassField(pw) + end + + function redrawPassField(pw) + % redraw the entire password text field with the entered value + % hidden + hPw = uicontrol(hFig, Style='edit', Position=[80 40 250 20], KeyPressFcn=@pwKeyPress, ... + FontSize=10, BackGroundColor='w', String=repmat(char(8226),size(pw))); + end + + function okClick(source, event) + uiresume, return + end + + function cancelClick(source, event) + % default values + hUser.String = ''; pw = ''; + uiresume, return + end + +end \ No newline at end of file diff --git a/api-openbis-matlab/user_url_pw_input_dialog.m b/api-openbis-matlab/user_url_pw_input_dialog.m deleted file mode 100644 index 970f750aa528640de0380f6b8cdba72f7258df02..0000000000000000000000000000000000000000 --- a/api-openbis-matlab/user_url_pw_input_dialog.m +++ /dev/null @@ -1,93 +0,0 @@ -function [url, user, pass] = user_url_pw_input_dialog -%user_url_pw_input -% Return the URL, user name and password for the openBIS server - -url = 'https://XYZ.ethz.ch/openbis:8443'; -user = ''; -pass = ''; - - -ScreenSize = get(0,'ScreenSize'); -fig = uifigure('Name', 'Enter openBIS credentials', 'Position',[(ScreenSize(3:4)-[300 75])/2 400 150]); -fig.CloseRequestFcn = @(fig,event)my_closereq(fig); - -% URL label and text field -lbl_url = uilabel(fig, 'Text', 'URL:', ... - 'Position',[10 120 80 20]); - -txt_url = uieditfield(fig,... - 'Position',[70 120 280 20], ... - 'Value', url, ... - 'Tag', 'url_textfield'); - -% User label and text field -lbl_user = uilabel(fig, 'Text', 'User:', ... - 'Position',[10 90 80 20]); - -txt_user = uieditfield(fig,... - 'Position',[70 90 280 20], ... - 'Value', user, ... - 'Tag', 'user_textfield'); - -% Password label and text field -lbl_pass = uilabel(fig, 'Text', 'Password:', ... - 'Position',[10 60 80 20]); - -txt_pass = uieditfield(fig,... - 'Position',[70 60 280 20], ... - 'Tag', 'pass_textfield', ... - 'ValueChangingFcn', @textChanging, ... - 'UserData', ''); - -% Push button to accept entries -btn = uibutton(fig,'push', ... - 'Position',[150 10 100 40], ... - 'Text', 'Connect', ... - 'FontWeight', 'bold', ... - 'ButtonPushedFcn', @(btn,event) buttonPushed(btn, fig)); - -uiwait(fig) - - % run this when figure closes - function my_closereq(fig,selection) - - url = get(txt_url, 'Value'); - user = get(txt_user, 'Value'); - pass = get(txt_pass,'UserData'); - - delete(fig) - - end - -end - -% Callback functions -function textChanging(txt, event) -% replace typed text with stars -% Todo: handle delete / backspace - -% disp(event.Value); - -if isempty(txt.UserData) - txt.UserData = event.Value; -else - txt.UserData = append(txt.UserData, event.Value(end)); -end - -val = event.Value; -if ~isempty(val) - val(1:length(val)) = '*'; -else - val = '*'; -end -txt.Value = val; - -end - -function buttonPushed(btn, fig) -% close the figure, call CloseRequestFcn before - - close(fig) -end - - diff --git a/api-openbis-matlab/user_url_pw_inputdlg.m b/api-openbis-matlab/user_url_pw_inputdlg.m deleted file mode 100644 index d982a5b09440d9d6d1c5de183197b4742754f739..0000000000000000000000000000000000000000 --- a/api-openbis-matlab/user_url_pw_inputdlg.m +++ /dev/null @@ -1,16 +0,0 @@ -function [url, user, pw] = user_url_pw_inputdlg -%user_url_pw_inputdlg -% Return the URL, user name and password for the openBIS server - -prompt = {'openBIS URL:', 'openBIS user:'}; -title = 'openBIS connection details'; -definput = {'https://XYZ.ethz.ch/openbis:8443', ''}; -answer = inputdlg(prompt, title, 1, definput); - -url = answer{1}; -user = answer{2}; - -% pw = passwordEntryDialog('CheckPasswordLength',0); -pw = passcode; - -end \ No newline at end of file diff --git a/api-openbis-python3-pybis/src/python/CHANGELOG.md b/api-openbis-python3-pybis/src/python/CHANGELOG.md index a9f158e7d26ac87d28b3ea5b93a41f223894b4a5..3e9f63a035a14f8cca43c9d181bf33e7bda31a0f 100644 --- a/api-openbis-python3-pybis/src/python/CHANGELOG.md +++ b/api-openbis-python3-pybis/src/python/CHANGELOG.md @@ -1,3 +1,24 @@ +## Changes with pybis-1.36.0 + +- Reverted breaking changes to dataset upload functionality +- Performance improvements to get_sample and get_samples methods + +## Changes with pybis-1.35.11 + +- Improvements to dataset upload performance + +## Changes with pybis-1.35.10 + +- Fixed issue with changing properties for linked datasets + +## Changes with pybis-1.35.9 + +- Changed get_samples method to also include dataset ids depending on the params + +## Changes with pybis-1.35.8 + +- Fixed a typo in the set attribute method + ## Changes with pybis-1.35.7 - Improvements to fast download scheme diff --git a/api-openbis-python3-pybis/src/python/pybis/__init__.py b/api-openbis-python3-pybis/src/python/pybis/__init__.py index 541a460f3a332849550b70a18d33ce5152c30a04..8a25c785035679ee38ca0e9d7803ac6971e2af90 100644 --- a/api-openbis-python3-pybis/src/python/pybis/__init__.py +++ b/api-openbis-python3-pybis/src/python/pybis/__init__.py @@ -15,7 +15,7 @@ name = "pybis" __author__ = "ID SIS • ETH Zürich" __email__ = "openbis-support@id.ethz.ch" -__version__ = "1.35.7rc3" +__version__ = "1.36.0" from . import pybis from .pybis import DataSet diff --git a/api-openbis-python3-pybis/src/python/pybis/openbis_object.py b/api-openbis-python3-pybis/src/python/pybis/openbis_object.py index d351da7485764e7616f76691ce64798b8cd1b7bb..d19d90b8663914cd324e08e4130962df82702553 100644 --- a/api-openbis-python3-pybis/src/python/pybis/openbis_object.py +++ b/api-openbis-python3-pybis/src/python/pybis/openbis_object.py @@ -71,9 +71,10 @@ class OpenBisObject: # put the properties in the self.p namespace (without checking them) if "properties" in data: for key, value in data["properties"].items(): - data_type = self.p._property_names[key.lower()]['dataType'] - if data_type in ("ARRAY_INTEGER", "ARRAY_REAL", "ARRAY_STRING", "ARRAY_TIMESTAMP"): - value = self.formatter.to_array(data_type, value) + if self.p.type: + data_type = self.p._property_names[key.lower()]['dataType'] + if data_type in ("ARRAY_INTEGER", "ARRAY_REAL", "ARRAY_STRING", "ARRAY_TIMESTAMP"): + value = self.formatter.to_array(data_type, value) self.p.__dict__[key.lower()] = value # object is already saved to openBIS, so it is not new anymore diff --git a/api-openbis-python3-pybis/src/python/pybis/property.py b/api-openbis-python3-pybis/src/python/pybis/property.py index 54b8577de72f50f76309aa0d3e8baa7af5f56698..751ccf2fccb1a91285efc82aba633b3dd43f9ae7 100644 --- a/api-openbis-python3-pybis/src/python/pybis/property.py +++ b/api-openbis-python3-pybis/src/python/pybis/property.py @@ -137,11 +137,20 @@ class PropertyHolder: data_type = property_type["dataType"] if data_type == "CONTROLLEDVOCABULARY": terms = property_type["terms"] - value = str(value).upper() - if value not in terms.df["code"].values: - raise ValueError( - f"Value for attribute «{name}» must be one of these terms: {', '.join(terms.df['code'].values)}" - ) + if "multiValue" in property_type and property_type["multiValue"] is True: + if type(value) != list: + value = [value] + for single_value in value: + if str(single_value).upper() not in terms.df["code"].values: + raise ValueError( + f"Value for attribute «{name}» must be one of these terms: {', '.join(terms.df['code'].values)}" + ) + else: + value = str(value).upper() + if value not in terms.df["code"].values: + raise ValueError( + f"Value for attribute «{name}» must be one of these terms: {', '.join(terms.df['code'].values)}" + ) elif data_type in ("INTEGER", "BOOLEAN", "VARCHAR", "ARRAY_INTEGER", "ARRAY_REAL", "ARRAY_STRING", "ARRAY_TIMESTAMP"): if not check_datatype(data_type, value): raise ValueError(f"Value must be of type {data_type}") diff --git a/api-openbis-python3-pybis/src/python/pybis/pybis.py b/api-openbis-python3-pybis/src/python/pybis/pybis.py index c37bcd9f607589ddf4b2d9d0c2c09c5abe05b1f2..12e50318f8104cb40fa126f44931065fedcd5919 100644 --- a/api-openbis-python3-pybis/src/python/pybis/pybis.py +++ b/api-openbis-python3-pybis/src/python/pybis/pybis.py @@ -1267,6 +1267,7 @@ class Openbis: """internal method, used to handle all post requests and serializing / deserializing data """ + if "id" not in request: request["id"] = "2" if "jsonrpc" not in request: @@ -1276,7 +1277,6 @@ class Openbis: if DEBUG_LEVEL >= LOG_DEBUG: print(json.dumps(request)) - try: resp = requests.post( full_url, json.dumps(request), verify=self.verify_certificates @@ -2380,6 +2380,7 @@ class Openbis: attrs=None, props=None, where=None, + raw_response=False, **properties, ): """Returns a DataFrame of all samples for a given space/project/experiment (or any combination). @@ -2497,6 +2498,9 @@ class Openbis: if props is not None: fetchopts["properties"] = get_fetchoption_for_entity("properties") + if "dataSets" in attrs: + fetchopts["dataSets"] = get_fetchoptions("dataSets") + request = { "method": "searchSamples", "params": [ @@ -2509,6 +2513,8 @@ class Openbis: resp = self._post_request(self.as_v3, request) parse_jackson(resp) + if raw_response: + return resp response = resp["objects"] @@ -4660,7 +4666,8 @@ class Openbis: ) def get_sample( - self, sample_ident, only_data=False, withAttachments=False, props=None, **kvals + self, sample_ident, only_data=False, withAttachments=False, props=None, + withDataSetIds=False, raw_response=False, **kvals ): """Retrieve metadata for the sample. Get metadata for the sample and any directly connected parents of the sample to allow access @@ -4701,6 +4708,9 @@ class Openbis: for key in ["parents", "children", "container", "components"]: fetchopts[key] = {"@type": "as.dto.sample.fetchoptions.SampleFetchOptions"} + if withDataSetIds: + fetchopts["dataSets"] = get_fetchoptions("dataSets") + request = { "method": "getSamples", "params": [self.token, identifiers, fetchopts], @@ -4723,6 +4733,9 @@ class Openbis: data=resp[sample_ident], ) else: + if raw_response: + parse_jackson(resp) + return resp return self._sample_list_for_response( response=list(resp.values()), props=props, parsed=False ) @@ -4801,7 +4814,7 @@ class Openbis: samples["container"] = samples["container"].map( extract_nested_identifier ) - for column in ["parents", "children", "components"]: + for column in ["parents", "children", "components", "dataSets"]: if column in samples: samples[column] = samples[column].map(extract_identifiers) samples["permId"] = samples["permId"].map(extract_permid) diff --git a/api-openbis-python3-pybis/src/python/setup.cfg b/api-openbis-python3-pybis/src/python/setup.cfg index 6feadb21ce8091dac2e2c46abc221ca37211f407..ae2a809cf65da2ecd34e1c0ed7f7865c04536297 100644 --- a/api-openbis-python3-pybis/src/python/setup.cfg +++ b/api-openbis-python3-pybis/src/python/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = PyBIS -version = 1.35.7rc3 +version = 1.36.0 author = ID SIS • ETH Zürich author_email = openbis-support@id.ethz.ch license = Apache Software License Version 2.0 diff --git a/api-openbis-python3-pybis/src/python/setup.py b/api-openbis-python3-pybis/src/python/setup.py index 7c625170c0351777a72bcbbaef0fee8b94ada21d..0efb031a3d7b0748014abb8fdf203c667cadeef5 100644 --- a/api-openbis-python3-pybis/src/python/setup.py +++ b/api-openbis-python3-pybis/src/python/setup.py @@ -26,7 +26,7 @@ with open("README.md", "r", encoding="utf-8") as fh: setup( name="PyBIS", - version="1.35.7rc3", + version="1.36.0", author="ID SIS • ETH Zürich", author_email="openbis-support@id.ethz.ch", description="openBIS connection and interaction, optimized for using with Jupyter", diff --git a/api-openbis-python3-pybis/src/python/tests/conftest.py b/api-openbis-python3-pybis/src/python/tests/conftest.py index 718e1d0483461163ee170a4ae83da3435bd08029..3be9356183298c95e7a326a9d8f66d543069bbd7 100644 --- a/api-openbis-python3-pybis/src/python/tests/conftest.py +++ b/api-openbis-python3-pybis/src/python/tests/conftest.py @@ -20,7 +20,7 @@ from pybis import Openbis openbis_url = "https://localhost:8443" admin_username = "admin" -admin_password = "changeit" +admin_password = "admin" @pytest.fixture(scope="module") def openbis_instance(): diff --git a/api-openbis-python3-pybis/src/python/tests/jenkinstest/__init__.py b/api-openbis-python3-pybis/src/python/tests/jenkinstest/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6b5d1ce929b60094a402f1ff6e10dc47f67f1e8d --- /dev/null +++ b/api-openbis-python3-pybis/src/python/tests/jenkinstest/__init__.py @@ -0,0 +1,14 @@ +# Copyright ETH 2007 - 2023 Zürich, Scientific IT Services +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/api-openbis-python3-pybis/src/python/tests/jenkinstest/artifactrepository.py b/api-openbis-python3-pybis/src/python/tests/jenkinstest/artifactrepository.py new file mode 100644 index 0000000000000000000000000000000000000000..23b787cb909a05a8013b2df6c03e429d37b86bf0 --- /dev/null +++ b/api-openbis-python3-pybis/src/python/tests/jenkinstest/artifactrepository.py @@ -0,0 +1,142 @@ +# Copyright ETH 2013 - 2023 Zürich, Scientific IT Services +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os.path +import re +import xml.dom.minidom +from urllib.request import urlopen + +from util import printAndFlush + + +class ArtifactRepository: + """ + Abstract artifact repository which keeps artifacts in a local repository folder. + The main method is getPathToArtifact() which returns the path to the requested artifact in the repository. + Concrete subclasses have to implement downloadArtifact(). + """ + + def __init__(self, localRepositoryFolder): + """ + Creates a new instance for the specified folder. The folder will be created if it does not exist. + """ + self.localRepositoryFolder = localRepositoryFolder + if not os.path.exists(localRepositoryFolder): + os.makedirs(localRepositoryFolder) + printAndFlush("Artifact repository: %s" % localRepositoryFolder) + + def clear(self): + """ + Removes all artifacts in the local repository folder. + """ + for f in os.listdir(self.localRepositoryFolder): + path = "%s/%s" % (self.localRepositoryFolder, f) + if os.path.isfile(path): + os.remove(path) + printAndFlush("Artifact repository cleared.") + + def getPathToArtifact(self, project, pattern='.*'): + """ + Returns the path to artifact requested by the specified pattern and project. + The pattern is a regular expression which has to match the beginning of the artifact file name. + The project specifies the project on CI server to download the artifact. + + An Exception is raised if non or more than one artifact matches the pattern. + """ + files = [f for f in os.listdir(self.localRepositoryFolder) if re.match(pattern, f)] + if len(files) > 1: + raise Exception("More than one artifact in '%s' matches the pattern '%s': %s" + % (self.localRepositoryFolder, pattern, files)) + if len(files) == 0: + f = self.downloadArtifact(project, pattern) + else: + f = files[0] + return "%s/%s" % (self.localRepositoryFolder, f) + + def downloadArtifact(self, project, pattern): + """ + Abstract method which needs to be implemented by subclasses. + """ + pass + + def _download(self, readHandle, fileName): + filePath = "%s/%s" % (self.localRepositoryFolder, fileName) + writeHandle = open(filePath, 'wb') + try: + blockSize = 8192 + while True: + dataBlock = readHandle.read(blockSize) + if not dataBlock: + break + writeHandle.write(dataBlock) + finally: + writeHandle.close() + + +class JenkinsArtifactRepository(ArtifactRepository): + """ + Artifact repository for a CI server based on Jenkins. + """ + + def __init__(self, baseUrl, localRepositoryFolder): + """ + Creates a new instance for the specified server URL and local repository. + """ + ArtifactRepository.__init__(self, localRepositoryFolder) + self.baseUrl = baseUrl + + def downloadArtifact(self, project, pattern): + """ + Downloads the requested artifact from Jenkins. It uses the Jenkins API. + """ + projectUrl = "%s/job/%s" % (self.baseUrl, project) + apiUrl = "%s/lastSuccessfulBuild/api/xml?xpath=//artifact&wrapper=bag" % projectUrl + printAndFlush("Get artifact info from %s" % apiUrl) + handle = urlopen(apiUrl) # urllib.urlopen(apiUrl) + url = None + fileName = None + dom = xml.dom.minidom.parseString(handle.read()) + for element in dom.getElementsByTagName('artifact'): + elementFileName = element.getElementsByTagName('fileName')[0].firstChild.nodeValue + if re.match(pattern, elementFileName): + if fileName != None: + raise Exception( + "Pattern '%s' matches at least two artifacts in project '%s': %s and %s" + % (pattern, project, fileName, elementFileName)) + fileName = elementFileName + relativePath = element.getElementsByTagName('relativePath')[0].firstChild.nodeValue + url = "%s/lastSuccessfulBuild/artifact/%s" % (projectUrl, relativePath) + if url == None: + raise Exception( + "For pattern '%s' no artifact found in project '%s'." % (pattern, project)) + printAndFlush("Download %s to %s." % (url, self.localRepositoryFolder)) + self._download(urlopen(url), fileName) + return fileName + + +class GitArtifactRepository(ArtifactRepository): + """ + Artifact repository for a git projects. + """ + + def __init__(self, localRepositoryFolder, host='github.com'): + ArtifactRepository.__init__(self, localRepositoryFolder) + self.host = host + + def downloadArtifact(self, project, pattern): + url = "https://%s/%s/archive/%s" % (self.host, project, pattern) + printAndFlush("Download %s to %s." % (url, self.localRepositoryFolder)) + self._download(urlopen(url), pattern) + return pattern diff --git a/api-openbis-python3-pybis/src/python/tests/jenkinstest/settings.py b/api-openbis-python3-pybis/src/python/tests/jenkinstest/settings.py new file mode 100644 index 0000000000000000000000000000000000000000..431a8a584306fb7a6a1a0efd879b4e28bdcb7884 --- /dev/null +++ b/api-openbis-python3-pybis/src/python/tests/jenkinstest/settings.py @@ -0,0 +1,55 @@ +# Copyright ETH 2013 - 2023 Zürich, Scientific IT Services +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +""" +Setup infrastructure common for all tests. +""" +import os.path +import sys + +# Default base URL of the CI server which hosts the artifacts. +ci_base_url = 'http://localhost:8080' + +reuseRepository = False +devMode = False +cmd = sys.argv[0] +if len(sys.argv) > 1: + firstArgument = sys.argv[1] + if firstArgument == '-r': + reuseRepository = True + elif firstArgument == '-dr' or firstArgument == '-rd': + reuseRepository = True + devMode = True + elif firstArgument == '-d': + devMode = True + elif firstArgument == '-s': + ci_base_url = sys.argv[2] + elif firstArgument == '-h': + print(("Usage: %s [-h|-r|-d|-rd|-s <ci server>]\n-h: prints this help\n-r: reuses artifact repository\n" + + "-d: developing mode\n-rd: both options\n" + + "-s <ci server>: option for CI server base URL") % os.path.basename(cmd)) + exit(1) + else: + print("Unknown option: %s. Use option '-h' to see usage." % firstArgument) + exit(1) + +dirname = os.path.dirname(os.path.abspath(__file__)) +sys.path.append("%s/source" % dirname) +sys.path.append("%s/sourceTest" % dirname) + +from artifactrepository import JenkinsArtifactRepository + +REPOSITORY = JenkinsArtifactRepository(ci_base_url, "%s/targets/artifact-repository" % dirname) +if not reuseRepository: + REPOSITORY.clear() diff --git a/api-openbis-python3-pybis/src/python/tests/jenkinstest/test.py b/api-openbis-python3-pybis/src/python/tests/jenkinstest/test.py new file mode 100644 index 0000000000000000000000000000000000000000..f1700ac09ccd83dc62c0033822df4c78daa4073c --- /dev/null +++ b/api-openbis-python3-pybis/src/python/tests/jenkinstest/test.py @@ -0,0 +1,34 @@ +# Copyright ETH 2023 Zürich, Scientific IT Services +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +import settings +import testcase +import util as util + + +class TestCase(testcase.TestCase): + + def execute(self): + self.installOpenbis(technologies=['eln-lims', 'eln-lims-life-sciences', 'flow']) + # pybis should be installed on the jenkins job configuration level + self.openbisController = self.createOpenbisController() + self.openbisController.allUp() + # run tests + util.executeCommand(['pytest', '--verbose', '--junitxml=test_results_pybis.xml', + 'api-openbis-python3-pybis/src/python/tests']) + + +TestCase(settings, __file__).runTest() diff --git a/api-openbis-python3-pybis/src/python/tests/jenkinstest/testcase.py b/api-openbis-python3-pybis/src/python/tests/jenkinstest/testcase.py new file mode 100644 index 0000000000000000000000000000000000000000..65de13fa27cc8def80a489a31ee05e62ef856383 --- /dev/null +++ b/api-openbis-python3-pybis/src/python/tests/jenkinstest/testcase.py @@ -0,0 +1,872 @@ +# Copyright ETH 2013 - 2023 Zürich, Scientific IT Services +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import difflib +import os +import os.path +import re +import shutil +import time +import traceback + +import util as util + +INSTALLER_PROJECT = 'app-openbis-installer' +OPENBIS_STANDARD_TECHNOLOGIES_PROJECT = 'core-plugin-openbis' +DATAMOVER_PROJECT = 'datamover' + +PSQL_EXE = 'psql' + +PLAYGROUND = 'targets/playground' +TEMPLATES = 'templates' +TEST_DATA = 'testData' + +DEFAULT_TIME_OUT_IN_MINUTES = 5 + + +class TestCase(object): + """ + Abstract superclass of a test case. + Subclasses have to override execute() and optionally executeInDevMode(). + The test case is run by invoking runTest(). + Here is a skeleton of a test case: + + #!/usr/bin/python + import settings + import jenkinstest.testcase + + class TestCase(jenkinstest.testcase.TestCase): + + def execute(self): + .... + + def executeInDevMode(self): + .... + + TestCase(settings, __file__).runTest() + + There are two execution modes (controlled by command line option -d and -rd): + + Normal mode: + 1. Cleans up playground: Kills running servers and deletes playground folder of this test case. + 2. Invokes execute() method. + 3. Release resources: Shuts down running servers. + + Developing mode: + 1. Invokes executeInDevMode() method. + + The developing mode allows to reuse already installed servers. + Servers might be restarted. This mode leads to fast development + of test code by doing incremental development. Working code + can be moved from executeInDevMode() to execute(). + """ + + def __init__(self, settings, filePath): + self.artifactRepository = settings.REPOSITORY + self.project = None + fileName = os.path.basename(filePath) + self.name = fileName[0:fileName.rfind('.')] + self.playgroundFolder = "%s/%s" % (PLAYGROUND, self.name) + self.numberOfFailures = 0 + self.devMode = settings.devMode + self.runningInstances = [] + + def runTest(self): + """ + Runs this test case. This is a final method. It should not be overwritten. + """ + startTime = time.time() + util.printAndFlush("\n/''''''''''''''''''' %s started at %s %s ''''''''''" + % (self.name, time.strftime('%Y-%m-%d %H:%M:%S'), + 'in DEV MODE' if self.devMode else '')) + try: + if not self.devMode: + if os.path.exists(self.playgroundFolder): + self._cleanUpPlayground() + os.makedirs(self.playgroundFolder) + self.execute() + else: + self.executeInDevMode() + success = self.numberOfFailures == 0 + except: + util.printAndFlush(traceback.format_exc()) + success = False + finally: + duration = util.renderDuration(time.time() - startTime) + if not self.devMode: + self.releaseResources() + if success: + util.printAndFlush( + "\...........SUCCESS: %s executed in %s .........." % (self.name, duration)) + else: + util.printAndFlush( + "\............FAILED: %s executed in %s .........." % (self.name, duration)) + raise Exception("%s failed" % self.name) + + def execute(self): + """ + Executes this test case in normal mode. + This is an abstract method which has to be overwritten in subclasses. + """ + pass + + def executeInDevMode(self): + """ + Executes this test case in developing mode. + This method can be overwritten in subclasses. + """ + pass + + def releaseResources(self): + """ + Releases resources. It shuts down all running servers. + This method can be overwritten in subclasses. + Note, this method can be invoked in subclasses as follows: + + super(type(self), self).releaseResources() + + """ + self._shutdownSevers() + + def assertPatternInLog(self, log, pattern): + if not re.search(pattern, log): + self.fail("Pattern doesn't match: %s" % pattern) + + def assertSmaller(self, itemName, expectedUpperLimit, actualValue, verbose=True): + """ + Asserts that actualValue <= expectedUpperLimit. If not the test will be continued but counted as failed. + Returns False if assertion fails otherwise True. + """ + if actualValue > expectedUpperLimit: + self.fail("%s\n actual value <%s> exceeds the expected upper limit <%s>" % ( + itemName, actualValue, expectedUpperLimit)) + return False + elif verbose: + util.printAndFlush("%s actual value <%s> is below the expected upper limit <%s>" % ( + itemName, actualValue, expectedUpperLimit)) + return True + + def assertEquals(self, itemName, expected, actual, verbose=True): + """ + Asserts that expected == actual. If not the test will be continued but counted as failed. + Returns False if assertion fails otherwise True. + """ + rendered_expected = self._render(expected) + if expected != actual: + rendered_actual = self._render(actual) + diff = difflib.ndiff(rendered_expected.splitlines(), rendered_actual.splitlines()) + self.fail("%s\n Differences:\n%s" % (itemName, '\n'.join(diff))) + return False + elif verbose: + util.printAndFlush("%s as expected: <%s>" % (itemName, rendered_expected)) + return True + + def assertType(self, variableName, expectedType, variable): + self.assertEquals("Type of %s" % variableName, expectedType, type(variable)) + + def assertIn(self, itemsName, items, item): + if item not in items: + self.fail("Item %s not in %s" % (item, itemsName)) + util.printAndFlush("%s as expected: contains <%s>" % (itemsName, item)) + + def assertNone(self, itemName, item): + self.assertEquals(itemName, None, item) + + def assertNotNone(self, itemName, item): + if item is None: + self.fail("Item %s is None" % itemName) + util.printAndFlush("%s as expected: not None" % itemName) + + def assertTrue(self, itemName, item): + self.assertEquals(itemName, True, item) + + def assertFalse(self, itemName, item): + self.assertEquals(itemName, False, item) + + def assertLength(self, itemsName, length, items): + self.assertEquals("Length of %s" % itemsName, length, len(items)) + + def assertEmpty(self, itemsName, items): + self.assertLength(itemsName, 0, items) + + def assertNotEmpty(self, itemsName, items): + if len(items) == 0: + self.fail("%s is empty" % itemsName) + util.printAndFlush("%s as expected: not empty" % itemsName) + + def _render(self, item): + if not isinstance(item, list): + return str(item) + result = "" + for e in item: + if len(result) > 0: + result += "\n" + result += str(e) + return result + + def fail(self, errorMessage): + """ + Prints specified error message and mark test case as failed. + """ + self.numberOfFailures += 1 + util.printWhoAmI(levels=10, template="ERROR found (caller chain: %s)") + util.printAndFlush("ERROR causing test failure: %s" % errorMessage) + + def installScriptBasedServer(self, templateName, instanceName, + startCommand=['./start.sh'], stopCommand=['./stop.sh']): + installPath = self._getInstallPath(instanceName) + if os.path.exists(installPath): + shutil.rmtree(installPath) + shutil.copytree("%s/%s" % (self.getTemplatesFolder(), templateName), installPath) + return ScriptBasedServerController(self, self.name, installPath, instanceName, startCommand, + stopCommand) + + def createScriptBasedServerController(self, instanceName, startCommand=['./start.sh'], + stopCommand=['./stop.sh']): + return ScriptBasedServerController(self, self.name, self._getInstallPath(instanceName), + instanceName, + startCommand, stopCommand) + + def installDatamover(self, instanceName='datamover'): + zipFile = self.artifactRepository.getPathToArtifact(DATAMOVER_PROJECT, 'datamover') + installPath = self._getInstallPath(instanceName) + util.unzip(zipFile, self.playgroundFolder) + os.rename("%s/datamover" % (self.playgroundFolder), installPath) + return DatamoverController(self, self.name, installPath, instanceName) + + def createDatamoverController(self, instanceName='datamover'): + return DatamoverController(self, self.name, self._getInstallPath(instanceName), + instanceName) + + def installOpenbis(self, instanceName='openbis', technologies=[]): + """ + Installs openBIS from the installer. + + The instanceName specifies the subfolder in the playground folder + where the instance will be installed. + In addition it is also part of the database names. + The technologies are an array of enabled technologies. + """ + installerPath = self.artifactRepository.getPathToArtifact(INSTALLER_PROJECT, + 'openBIS-installation') + installerFileName = os.path.basename(installerPath).rpartition('.tar')[0] + util.executeCommand(['tar', '-zxf', installerPath, '-C', self.playgroundFolder], + "Couldn't untar openBIS installer.") + consolePropertiesFile = "%s/%s/console.properties" % ( + self.playgroundFolder, installerFileName) + consoleProperties = util.readProperties(consolePropertiesFile) + installPath = self._getInstallPath(instanceName) + consoleProperties['INSTALL_PATH'] = installPath + consoleProperties['DSS_ROOT_DIR'] = "%s/data" % installPath + for technology in technologies: + consoleProperties[technology.upper()] = True + print(f"CONSOLE_PROPERTIES:{consoleProperties}") + util.writeProperties(consolePropertiesFile, consoleProperties) + util.executeCommand("%s/%s/run-console.sh" % (self.playgroundFolder, installerFileName), + "Couldn't install openBIS", consoleInput='admin\nadmin') + shutil.rmtree("%s/%s" % (self.playgroundFolder, installerFileName)) + + def cloneOpenbisInstance(self, nameOfInstanceToBeCloned, nameOfNewInstance, + dataStoreServerOnly=False): + """ Clones an openBIS instance. """ + + oldInstanceInstallPath = "%s/%s" % (self.playgroundFolder, nameOfInstanceToBeCloned) + newInstanceInstallPath = "%s/%s" % (self.playgroundFolder, nameOfNewInstance) + paths = ['bin', 'data', 'servers/core-plugins', 'servers/datastore_server'] + if not dataStoreServerOnly: + paths.append('servers/openBIS-server') + for path in paths: + util.copyFromTo(oldInstanceInstallPath, newInstanceInstallPath, path) + dssPropsFile = "%s/servers/datastore_server/etc/service.properties" % newInstanceInstallPath + dssProps = util.readProperties(dssPropsFile) + dssProps['root-dir'] = dssProps['root-dir'].replace(nameOfInstanceToBeCloned, + nameOfNewInstance) + util.writeProperties(dssPropsFile, dssProps) + + def createOpenbisController(self, instanceName='openbis', port='8443', dropDatabases=True, + databasesToDrop=[]): + """ + Creates an openBIS controller object assuming that an openBIS instance for the specified name is installed. + """ + return OpenbisController(self, self.name, self._getInstallPath(instanceName), instanceName, + port, + dropDatabases, databasesToDrop) + + def installScreeningTestClient(self): + """ Installs the screening test client and returns an instance of ScreeningTestClient. """ + zipFile = self.artifactRepository.getPathToArtifact(OPENBIS_STANDARD_TECHNOLOGIES_PROJECT, + 'openBIS-screening-API') + installPath = "%s/screeningAPI" % self.playgroundFolder + util.unzip(zipFile, installPath) + return ScreeningTestClient(self, installPath) + + def installPybis(self): + # install the local pybis in editable-mode (-e) + util.executeCommand(['pip3', 'install', '-e', '../api-openbis-python3-pybis/src/python'], + "Installation of pybis failed.") + + def installObis(self): + # install the local obis in editable-mode (-e) + util.executeCommand(['pip3', 'install', '-e', '../app-openbis-command-line/src/python'], + "Installation of obis failed.") + + def getTemplatesFolder(self): + return "%s/%s" % (TEMPLATES, self.name) + + def _getInstallPath(self, instanceName): + return os.path.abspath("%s/%s" % (self.playgroundFolder, instanceName)) + + def _cleanUpPlayground(self): + for f in os.listdir(self.playgroundFolder): + path = "%s/%s" % (self.playgroundFolder, f) + if not os.path.isdir(path): + continue + util.printAndFlush("clean up %s" % path) + util.killProcess("%s/servers/datastore_server/datastore_server.pid" % path) + util.killProcess("%s/servers/openBIS-server/jetty/openbis.pid" % path) + util.killProcess("%s/datamover.pid" % path) + util.deleteFolder(self.playgroundFolder) + + def _addToRunningInstances(self, controller): + self.runningInstances.append(controller) + + def _removeFromRunningInstances(self, controller): + if controller in self.runningInstances: + self.runningInstances.remove(controller) + + def _shutdownSevers(self): + for instance in reversed(self.runningInstances): + instance.stop() + + +class _Controller(object): + def __init__(self, testCase, testName, installPath, instanceName): + self.testCase = testCase + self.testName = testName + self.instanceName = instanceName + self.installPath = installPath + util.printAndFlush("Controller created for instance '%s'. Installation path: %s" % ( + instanceName, installPath)) + + def createFolder(self, folderPath): + """ + Creates a folder with specified path relative to installation directory. + """ + path = "%s/%s" % (self.installPath, folderPath) + os.makedirs(path) + + def assertEmptyFolder(self, pathRelativeToInstallPath): + """ + Asserts that the specified path (relative to the installation path) is an empty folder. + """ + relativePath = "%s/%s" % (self.installPath, pathRelativeToInstallPath) + files = self._getFiles(relativePath) + if len(files) == 0: + util.printAndFlush("Empty folder as expected: %s" % relativePath) + else: + self.testCase.fail( + "%s isn't empty. It contains the following files:\n %s" % (relativePath, files)) + + def assertFiles(self, folderPathRelativeToInstallPath, expectedFiles): + """ + Asserts that the specified path (relative to the installation path) contains the specified files. + """ + relativePath = "%s/%s" % (self.installPath, folderPathRelativeToInstallPath) + files = self._getFiles(relativePath) + self.testCase.assertEquals("Files in %s" % relativePath, expectedFiles, sorted(files)) + + def _getFiles(self, relativePath): + if not os.path.isdir(relativePath): + self.testCase.fail("Doesn't exist or isn't a folder: %s" % relativePath) + files = os.listdir(relativePath) + return files + + +class ScriptBasedServerController(_Controller): + def __init__(self, testCase, testName, installPath, instanceName, startCommand, stopCommand): + super(ScriptBasedServerController, self).__init__(testCase, testName, installPath, + instanceName) + self.startCommand = startCommand + self.stopCommand = stopCommand + + def start(self): + self.testCase._addToRunningInstances(self) + util.executeCommand(self.startCommand, "Couldn't start server '%s'" % self.instanceName, + workingDir=self.installPath) + + def stop(self): + self.testCase._removeFromRunningInstances(self) + util.executeCommand(self.stopCommand, "Couldn't stop server '%s'" % self.instanceName, + workingDir=self.installPath) + + +class DatamoverController(_Controller): + def __init__(self, testCase, testName, installPath, instanceName): + super(DatamoverController, self).__init__(testCase, testName, installPath, instanceName) + self.servicePropertiesFile = "%s/etc/service.properties" % self.installPath + self.serviceProperties = util.readProperties(self.servicePropertiesFile) + self.serviceProperties['check-interval'] = 2 + self.serviceProperties['quiet-period'] = 5 + self.serviceProperties['inactivity-period'] = 15 + dataCompletedScript = "%s/%s/data-completed.sh" % ( + testCase.getTemplatesFolder(), instanceName) + if os.path.exists(dataCompletedScript): + self.serviceProperties['data-completed-script'] = "../../../../%s" % dataCompletedScript + + def setPrefixForIncoming(self, prefix): + """ Set service property 'prefix-for-incoming'. """ + self.serviceProperties['prefix-for-incoming'] = prefix + + def setTreatIncomingAsRemote(self, flag): + """ Set service property 'treat-incoming-as-remote'. """ + self.serviceProperties['treat-incoming-as-remote'] = flag + + def setOutgoingTarget(self, path): + """ + Set service property 'outgoing-target'. + This has to be a path relative to installation path of the datamover. + """ + self.serviceProperties['outgoing-target'] = path + + def setExtraCopyDir(self, path): + """ + Set service property 'extra-copy-dir'. + This has to be a path relative to installation path of the datamover. + """ + self.serviceProperties['extra-copy-dir'] = path + + def start(self): + """ Starts up datamover server. """ + util.writeProperties(self.servicePropertiesFile, self.serviceProperties) + self.testCase._addToRunningInstances(self) + output = util.executeCommand(["%s/datamover.sh" % self.installPath, 'start'], + suppressStdOut=True) + joinedOutput = '\n'.join(output) + if 'FAILED' in joinedOutput: + util.printAndFlush( + "Start up of datamover %s failed:\n%s" % (self.instanceName, joinedOutput)) + raise Exception("Couldn't start up datamover '%s'." % self.instanceName) + + def stop(self): + """ Stops datamover server. """ + self.testCase._removeFromRunningInstances(self) + util.executeCommand(["%s/datamover.sh" % self.installPath, 'stop'], + "Couldn't shut down datamover '%s'." % self.instanceName) + + def drop(self, testDataSetName): + """ Drops the specified test data set into incoming folder. """ + util.copyFromTo("%s/%s" % (TEST_DATA, self.testName), "%s/data/incoming" % self.installPath, + testDataSetName) + + +class ScreeningTestClient(object): + """ + Class representing the screeing test client. + """ + + def __init__(self, testCase, installPath): + self.testCase = testCase + self.installPath = installPath + + def run(self): + """ Runs the test client and returns the console output as a list of strings. """ + output = util.executeCommand(['java', + '-Djavax.net.ssl.trustStore=../openbis/servers/openBIS-server/jetty/etc/openBIS.keystore', + '-jar', 'openbis_screening_api.jar', 'admin', 'admin', + 'https://localhost:8443'], suppressStdOut=True, + workingDir=self.installPath) + with open("%s/log.txt" % self.installPath, 'w') as log: + for line in output: + log.write("%s\n" % line) + return output + + +class DataSet(object): + def __init__(self, resultSetRow): + self.id = resultSetRow[0] + self.dataStore = resultSetRow[1] + self.experimentCode = resultSetRow[2] + self.code = resultSetRow[3] + self.type = resultSetRow[4] + self.location = resultSetRow[5] + self.status = resultSetRow[6] + self.presentInArchive = resultSetRow[7] + self.producer = resultSetRow[8] + self.productionTimeStamp = resultSetRow[9] + self.parents = [] + self.children = [] + + def __str__(self): + parents = [d.id for d in self.parents] + children = [d.id for d in self.children] + return "%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s" % ( + self.id, self.dataStore, self.code, self.type, + self.location, self.status, self.presentInArchive, + parents, children, self.experimentCode, + self.producer, self.productionTimeStamp) + + +class OpenbisController(_Controller): + """ + Class to control AS and DSS of an installed openBIS instance. + """ + + def __init__(self, testCase, testName, installPath, instanceName, port='8443', + dropDatabases=True, databasesToDrop=[]): + """ + Creates a new instance for specifies test case with specified test and instance name, installation path. + """ + super(OpenbisController, self).__init__(testCase, testName, installPath, instanceName) + self.templatesFolder = testCase.getTemplatesFolder() + self.binFolder = "%s/bin" % installPath + self.bisUpScript = "%s/bisup.sh" % self.binFolder + self.bisDownScript = "%s/bisdown.sh" % self.binFolder + self.dssUpScript = "%s/dssup.sh" % self.binFolder + self.dssDownScript = "%s/dssdown.sh" % self.binFolder + self.databaseKind = "%s_%s" % (testName, instanceName) + self.asServicePropertiesFile = "%s/servers/openBIS-server/jetty/etc/service.properties" % installPath + self.asProperties = None + if os.path.exists(self.asServicePropertiesFile): + self.asProperties = util.readProperties(self.asServicePropertiesFile) + self.asProperties['database.kind'] = self.databaseKind + self.asPropertiesModified = True + self.dssServicePropertiesFile = "%s/servers/datastore_server/etc/service.properties" % installPath + self.dssProperties = util.readProperties(self.dssServicePropertiesFile) + self.dssProperties['path-info-db.databaseKind'] = self.databaseKind + self.dssProperties['imaging-database.kind'] = self.databaseKind + self.dssPropertiesModified = True + self.passwdScript = "%s/servers/openBIS-server/jetty/bin/passwd.sh" % installPath + if port != '8443': + self.sslIniFile = "%s/servers/openBIS-server/jetty/start.d/ssl.ini" % installPath + if os.path.exists(self.sslIniFile): + self.sslIni = util.readProperties(self.sslIniFile) + self.sslIni['jetty.ssl.port'] = port + util.writeProperties(self.sslIniFile, self.sslIni) + if dropDatabases: + util.dropDatabase(PSQL_EXE, "openbis_%s" % self.databaseKind) + util.dropDatabase(PSQL_EXE, "pathinfo_%s" % self.databaseKind) + util.dropDatabase(PSQL_EXE, "imaging_%s" % self.databaseKind) + self._setUpStore() + self._setUpFileServer() + for databaseToDrop in databasesToDrop: + util.dropDatabase(PSQL_EXE, "%s_%s" % (databaseToDrop, self.databaseKind)) + self._applyCorePlugins() + + def setDummyAuthentication(self): + """ Disables authentication. """ + self.asProperties['authentication-service'] = 'dummy-authentication-service' + + def setOpenbisPortDataStoreServer(self, port): + as_url = self.dssProperties['server-url'] + util.printAndFlush('as_url' + as_url) + parts = as_url.split(':') + s = "" + for idx, part in enumerate(parts): + if (idx < len(parts) - 1): + s = s + part + ":" + self.dssProperties['server-url'] = s + port + + def setDataStoreServerCode(self, code): + """ Sets the code of the Data Store Server. """ + self.dssProperties['data-store-server-code'] = code + + def getDataStoreServerCode(self): + return self.dssProperties['data-store-server-code'] + + def setDataStoreServerPort(self, port): + """ Sets the port of the Data Store Server. """ + self.dssProperties['port'] = port + + def setDataStoreServerUsername(self, username): + """ Sets the username of the Data Store Server. """ + self.dssProperties['username'] = username + + def setDataStoreServerProperty(self, prop, val): + """ Can be used to set the value of any property in DSS service.properties """ + self.dssProperties[prop] = val + + def setAsMaxHeapSize(self, maxHeapSize): + self._setMaxHeapSize("openBIS-server/jetty/etc/openbis.conf", maxHeapSize) + + def setDssMaxHeapSize(self, maxHeapSize): + self._setMaxHeapSize("datastore_server/etc/datastore_server.conf", maxHeapSize) + + def enableProjectSamples(self): + self.asProperties['project-samples-enabled'] = "true" + + def assertFileExist(self, pathRelativeToInstallPath): + """ + Asserts that the specified path (relative to the installation path) exists. + """ + relativePath = "%s/%s" % (self.installPath, pathRelativeToInstallPath) + if os.path.exists(relativePath): + util.printAndFlush("Path exists as expected: %s" % relativePath) + else: + self.testCase.fail("Path doesn't exist: %s" % relativePath) + + def assertDataSetContent(self, pathToOriginal, dataSet): + path = "%s/data/store/1/%s/original" % (self.installPath, dataSet.location) + path = "%s/%s" % (path, os.listdir(path)[0]) + numberOfDifferences = util.getNumberOfDifferences(pathToOriginal, path) + if numberOfDifferences > 0: + self.testCase.fail("%s differences found." % numberOfDifferences) + + def assertNumberOfDataSets(self, expectedNumberOfDataSets, dataSets): + """ + Asserts that the specified number of data sets from the specified list of DataSet instances + are in the data store. + """ + count = 0 + for dataSet in dataSets: + if dataSet.dataStore != self.getDataStoreServerCode() or dataSet.location == '': + continue + count += 1 + self.assertFileExist("data/store/1/%s" % dataSet.location) + self.testCase.assertEquals( + "Number of data sets in data store %s" % self.getDataStoreServerCode(), + expectedNumberOfDataSets, count) + + def storeDirectory(self): + """ + Return the path to the data/store directory + """ + return "data/store" + + def getDataSets(self): + """ + Returns all data sets as a list (ordered by data set ids) of instances of class DataSet. + """ + resultSet = self.queryDatabase('openbis', + "select data.id,ds.code,e.code,data.code,t.code,location,status,present_in_archive," + + " data.data_producer_code,data.production_timestamp from data" + + " left join external_data as ed on ed.id = data.id" + + " join data_set_types as t on data.dsty_id = t.id" + + " join experiments as e on data.expe_id = e.id" + + " join data_stores as ds on data.dast_id = ds.id order by data.id") + dataSets = [] + dataSetsById = {} + for row in resultSet: + dataSet = DataSet(row) + dataSets.append(dataSet) + dataSetsById[dataSet.id] = dataSet + relationships = self.queryDatabase('openbis', + "select data_id_parent, data_id_child from data_set_relationships" + + " order by data_id_parent, data_id_child") + for parent_id, child_id in relationships: + parent = dataSetsById[parent_id] + child = dataSetsById[child_id] + parent.children.append(child) + child.parents.append(parent) + util.printAndFlush( + "All data sets:\nid,dataStore,code,type,location,status,presentInArchive,parents,children,experiment,producer,productionTimeStamp") + for dataSet in dataSets: + util.printAndFlush(dataSet) + return dataSets + + def createTestDatabase(self, databaseType): + """ + Creates a test database for the specified database type. + """ + database = "%s_%s" % (databaseType, self.databaseKind) + scriptPath = "%s/%s.sql" % (self.templatesFolder, database) + util.createDatabase(PSQL_EXE, database, scriptPath) + + def dropDatabase(self, databaseType): + """ + Drops the database for the specified database type. + """ + util.dropDatabase(PSQL_EXE, "%s_%s" % (databaseType, self.databaseKind)) + + def queryDatabase(self, databaseType, queryStatement, showHeaders=False): + """ + Executes the specified SQL statement for the specified database type. Result set is returned + as a list of lists. + """ + database = "%s_%s" % (databaseType, self.databaseKind) + return util.queryDatabase(PSQL_EXE, database, queryStatement, showHeaders) + + def allUp(self): + """ Starts up AS and DSS if not running. """ + if not util.isAlive("%s/servers/openBIS-server/jetty/openbis.pid" % self.installPath, + "openBIS.keystore"): + self._saveAsPropertiesIfModified() + util.executeCommand([self.bisUpScript], + "Starting up openBIS AS '%s' failed." % self.instanceName) + self.dssUp() + + def stop(self): + self.allDown() + + def allDown(self): + """ Shuts down AS and DSS. """ + self.testCase._removeFromRunningInstances(self) + util.executeCommand([self.dssDownScript], + "Shutting down openBIS DSS '%s' failed." % self.instanceName) + if self.asProperties: + util.executeCommand([self.bisDownScript], + "Shutting down openBIS AS '%s' failed." % self.instanceName) + + def dssUp(self): + """ Starts up DSS if not running. """ + if not util.isAlive("%s/servers/datastore_server/datastore_server.pid" % self.installPath, + "openBIS.keystore"): + self._saveDssPropertiesIfModified() + self.testCase._addToRunningInstances(self) + util.executeCommand([self.dssUpScript], + "Starting up openBIS DSS '%s' failed." % self.instanceName) + + def dssDown(self): + """ Shuts down DSS. """ + self.testCase._removeFromRunningInstances(self) + util.executeCommand([self.dssDownScript], + "Shutting down openBIS DSS '%s' failed." % self.instanceName) + + def dropAndWait(self, dataName, dropBoxName, numberOfDataSets=1, + timeOutInMinutes=DEFAULT_TIME_OUT_IN_MINUTES): + """ + Drops the specified data into the specified drop box. The data is either a folder or a ZIP file + in TEST_DATA/<test name>. A ZIP file will be unpacked in the drop box. After dropping the method waits + until the specified number of data sets have been registered. + """ + self.drop(dataName, dropBoxName) + self.waitUntilDataSetRegistrationFinished(numberOfDataSets=numberOfDataSets, + timeOutInMinutes=timeOutInMinutes) + + def dataFile(self, dataName): + """ + Returns the path to the given test data + """ + return "%s/%s/%s" % (TEST_DATA, self.testName, dataName) + + def drop(self, dataName, dropBoxName): + """ + Drops the specified test data into the specified drop box. The test data is either a folder or a ZIP file + in TEST_DATA/<test name>. A ZIP file will be unpacked in the drop box. + """ + destination = "%s/data/%s" % (self.installPath, dropBoxName) + self.dropIntoDestination(dataName, destination) + + def dropIntoDestination(self, dataName, destination): + """ + Drops the specified test data into the destination. The test data is either a folder or a ZIP file + in TEST_DATA/<test name>. A ZIP file will be unpacked in the drop box. + """ + testDataFolder = "%s/%s" % (TEST_DATA, self.testName) + if dataName.endswith('.zip'): + util.unzip("%s/%s" % (testDataFolder, dataName), destination) + else: + util.copyFromTo(testDataFolder, destination, dataName) + + def waitUntilDataSetRegistrationFinished(self, numberOfDataSets=1, + timeOutInMinutes=DEFAULT_TIME_OUT_IN_MINUTES): + """ Waits until the specified number of data sets have been registrated. """ + monitor = self.createLogMonior(timeOutInMinutes) + monitor.addNotificationCondition(util.RegexCondition('Incoming Data Monitor')) + monitor.addNotificationCondition(util.RegexCondition('post-registration')) + numberOfRegisteredDataSets = 0 + while numberOfRegisteredDataSets < numberOfDataSets: + condition1 = util.RegexCondition('Post registration of (\\d*). of \\1 data sets') + condition2 = util.RegexCondition( + 'Paths inside data set .* successfully added to database') + elements = monitor.waitUntilEvent(util.ConditionSequence([condition1, condition2])) + numberOfRegisteredDataSets += int(elements[0][0]) + util.printAndFlush( + "%d of %d data sets registered" % (numberOfRegisteredDataSets, numberOfDataSets)) + + def waitUntilDataSetRegistrationFailed(self, timeOutInMinutes=DEFAULT_TIME_OUT_IN_MINUTES): + """ Waits until data set registration failed. """ + self.waitUntilConditionMatched(util.EventTypeCondition('ERROR'), timeOutInMinutes) + util.printAndFlush("Data set registration failed as expected.") + + def waitUntilConditionMatched(self, condition, timeOutInMinutes=DEFAULT_TIME_OUT_IN_MINUTES): + """ + Waits until specified condition has been detected in DSS log. + """ + monitor = self.createLogMonior(timeOutInMinutes) + monitor.addNotificationCondition(util.RegexCondition('Incoming Data Monitor')) + monitor.addNotificationCondition(util.RegexCondition('post-registration')) + monitor.waitUntilEvent(condition) + + def createLogMonior(self, timeOutInMinutes=DEFAULT_TIME_OUT_IN_MINUTES): + logFilePath = "%s/servers/datastore_server/log/datastore_server_log.txt" % self.installPath + return util.LogMonitor("%s.DSS" % self.instanceName, logFilePath, timeOutInMinutes) + + def assertFeatureVectorLabel(self, featureCode, expectedFeatureLabel): + data = self.queryDatabase('imaging', + "select distinct label from feature_defs where code = '%s'" % featureCode) + self.testCase.assertEquals("label of feature %s" % featureCode, [[expectedFeatureLabel]], + data) + + def _applyCorePlugins(self): + source = "%s/core-plugins/%s" % (self.templatesFolder, self.instanceName) + if os.path.exists(source): + corePluginsFolder = "%s/servers/core-plugins" % self.installPath + destination = "%s/%s" % (corePluginsFolder, self.instanceName) + shutil.rmtree(destination, ignore_errors=True) + shutil.copytree(source, destination) + self.enableCorePlugin(self.instanceName) + + def enableCorePlugin(self, pluginName): + corePluginsFolder = "%s/servers/core-plugins" % self.installPath + corePluginsPropertiesFile = "%s/core-plugins.properties" % corePluginsFolder + corePluginsProperties = util.readProperties(corePluginsPropertiesFile) + enabledModules = corePluginsProperties['enabled-modules'] + enabledModules = "%s, %s" % (enabledModules, pluginName) if len( + enabledModules) > 0 else pluginName + corePluginsProperties['enabled-modules'] = enabledModules + util.writeProperties(corePluginsPropertiesFile, corePluginsProperties) + + def addUser(self, name, password): + util.executeCommand([self.passwdScript, 'add', name, '-p', password], + "Could not add user '%s' to instance '%s'." % (name, self.instanceName)) + + def _setUpStore(self): + templateStore = "%s/stores/%s" % (self.templatesFolder, self.instanceName) + if os.path.isdir(templateStore): + storeFolder = "%s/data/store" % self.installPath + util.printAndFlush("Set up initial data store by copying content of %s to %s" % ( + templateStore, storeFolder)) + shutil.rmtree(storeFolder, ignore_errors=True) + shutil.copytree(templateStore, storeFolder) + + def _setUpFileServer(self): + templateFileServer = "%s/file-servers/%s" % (self.templatesFolder, self.instanceName) + if os.path.isdir(templateFileServer): + fileServiceFolder = "%s/data/file-server" % self.installPath + util.printAndFlush("Set up initial file server by copying content of %s to %s" % ( + templateFileServer, fileServiceFolder)) + shutil.rmtree(fileServiceFolder, ignore_errors=True) + shutil.copytree(templateFileServer, fileServiceFolder) + + def _saveAsPropertiesIfModified(self): + if self.asPropertiesModified: + util.writeProperties(self.asServicePropertiesFile, self.asProperties) + self.asPropertiesModified = False + + def _saveDssPropertiesIfModified(self): + if self.dssPropertiesModified: + util.writeProperties(self.dssServicePropertiesFile, self.dssProperties) + self.dssPropertiesModified = False + + def _setMaxHeapSize(self, configFile, maxHeapSize): + path = "%s/servers/%s" % (self.installPath, configFile) + lines = [] + for line in util.getContent(path): + if line.strip().startswith('JAVA_MEM_OPTS'): + line = re.sub(r'(.*)-Xmx[^ ]+(.*)', r"\1-Xmx%s\2" % maxHeapSize, line) + lines.append(line) + with open(path, "w") as f: + for line in lines: + f.write("%s\n" % line) diff --git a/api-openbis-python3-pybis/src/python/tests/jenkinstest/util.py b/api-openbis-python3-pybis/src/python/tests/jenkinstest/util.py new file mode 100644 index 0000000000000000000000000000000000000000..d1e0c4305ceea13a839f109be27c2251a9d94278 --- /dev/null +++ b/api-openbis-python3-pybis/src/python/tests/jenkinstest/util.py @@ -0,0 +1,412 @@ +# Copyright ETH 2013 - 2023 Zürich, Scientific IT Services +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import filecmp +import inspect +import os +import os.path +import re +import shutil +import subprocess +import sys +import time +import zipfile + +USER=os.environ['USER'] +DEFAULT_WHO_AM_I_TEMPLATE=""" + +/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\ +\\/\\/\/ %s +/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\ + +""" + +def printWhoAmI(levels = 1, template = DEFAULT_WHO_AM_I_TEMPLATE): + """ + Prints the names of the functions in the caller chain of this function up to the specified number of levels. + """ + stack = inspect.stack() + chain = '' + for i in range(1, min(levels + 1, len(stack))): + stack_entry = stack[i] + location = "%s:%s" % (stack_entry[3], stack_entry[2]) + chain = "%s > %s" % (location, chain) if chain != '' else location + printAndFlush(template % chain) + +def printAndFlush(data): + """ + Prints argument onto the standard console and flushes output. + This is necessary to get Python output and bash output in sync on CI server. + """ + print(data) + sys.stdout.flush() + + +def readProperties(propertiesFile): + """ + Reads a Java properties file and returns the key-value pairs as a dictionary. + """ + with open(propertiesFile, "r") as f: + result = {} + for line in f.readlines(): + trimmedLine = line.strip() + if len(trimmedLine) > 0 and not trimmedLine.startswith('#'): + splittedLine = line.split('=', 1) + key = splittedLine[0].strip() + value = splittedLine[1].strip() + result[key] = value + return result + +def writeProperties(propertiesFile, dictionary): + """ + Saves the specified dictionary as a Java properties file. + """ + with open(propertiesFile, "w") as f: + for key in sorted(dictionary): + f.write("%s=%s\n" % (key, dictionary[key])) + +def executeCommand(commandWithArguments, failingMessage = None, consoleInput = None, suppressStdOut = False, + workingDir = None): + """ + Executes specified command with arguments. + If the exit value of the command is not zero and a failing message has been specified + an exception with the failing message will be thrown. + Optionally a string for console input can be specified. + If flag suppressStdOut is set standard output will be suppressed but returned as a list of output lines. + If workingDir is specified a change to workingDir is done for execution. + """ + printAndFlush("\n------- START: %s" % commandWithArguments) + currentDir = None + if workingDir != None: + printAndFlush("change to working directory '%s'" % workingDir) + currentDir = os.getcwd() + os.chdir(workingDir) + try: + processIn = subprocess.PIPE if consoleInput != None else None + processOut = subprocess.PIPE if suppressStdOut else None + # Setting the time zone is needed for sprint server otherwise Java log files have wrong time zone + os.environ['TZ'] = time.tzname[0] + p = subprocess.Popen(commandWithArguments, stdin = processIn, stdout = processOut, encoding='utf8') + if consoleInput != None: + p.communicate(consoleInput) + lines = [] + if suppressStdOut: + for line in iter(p.stdout.readline,''): + lines.append(line.strip()) + exitValue = p.wait() + if currentDir != None: + printAndFlush("change back to previous working directory '%s'" % currentDir) + if exitValue != 0 and failingMessage != None: + printAndFlush("---- FAILED %d: %s" % (exitValue, commandWithArguments)) + raise Exception(failingMessage) + printAndFlush("---- FINISHED: %s" % commandWithArguments) + return lines + finally: + if currentDir != None: + os.chdir(currentDir) + + + +def killProcess(pidFile): + """ + Kills the process in specified PID file. Does nothing if PID file doesn't exist. + """ + pid = getPid(pidFile) + if pid is None: + return + executeCommand(['kill', pid]) + +def isAlive(pidFile, pattern): + """ + Checks if the process with PID in specified file is alive. The specified regex + is used to check that the process of expected PID is the process expected. + """ + pid = getPid(pidFile) + if pid is None: + return False + lines = executeCommand(['ps', '-p', pid], suppressStdOut=True) + if len(lines) < 2: + return False + return re.compile(pattern).search(lines[1]) is not None + + +def getPid(pidFile): + if not os.path.exists(pidFile): + return None + return readFirstLine(pidFile) + +def readFirstLine(textFile): + """ + Returns the first line of the specified textFile. + """ + with open(textFile, 'r') as handle: + return handle.readline().rstrip() + +def unzip(zipFile, destination): + """ + Unzips specified ZIP file at specified destination. + """ + executeCommand(['unzip', '-q', '-o', zipFile, '-d', destination], "Couldn't unzip %s at %s" % (zipFile, destination)) + +def unzipSubfolder(zipFile, subfolder, destination): + """ + Unzips the specified subtree from the specified ZIP file into the specified destination + """ + zf = zipfile.ZipFile(zipFile) + parent, name = os.path.split(subfolder) + if name == '': + parent = os.path.dirname(parent) + for entry in zf.namelist(): + if entry.startswith(subfolder): + newPath = entry.replace(parent, destination) + newPathParent = os.path.dirname(newPath) + if not os.path.exists(newPathParent): + os.makedirs(newPathParent) + if not newPath.endswith('/'): + data = zf.read(entry) + with open(newPath, 'wb') as out: + out.write(data) + +def deleteFolder(folderPath): + """ + Deletes the specified folder. + Raises an exception in case of error. + """ + printAndFlush("Delete '%s'" % folderPath) + def errorHandler(*args): + _, path, _ = args + raise Exception("Couldn't delete '%s'" % path) + shutil.rmtree(folderPath, onerror = errorHandler) + +def copyFromTo(sourceFolder, destinationFolder, relativePathInSourceFolder): + source = "%s/%s" % (sourceFolder, relativePathInSourceFolder) + destination = "%s/%s" % (destinationFolder, relativePathInSourceFolder) + if os.path.isfile(source): + shutil.copyfile(source, destination) + else: + shutil.copytree(source, + destination, ignore = shutil.ignore_patterns(".*")) + printAndFlush("'%s' copied from '%s' to '%s'" % (relativePathInSourceFolder, sourceFolder, destinationFolder)) + +def getDatabaseHost(): + host = os.environ.get('FORCE_OPENBIS_POSTGRES_HOST') + if (host is None): + host = "localhost" + return host + +def dropDatabase(psqlExe, database): + """ + Drops the specified database by using the specified path to psql. + """ + executeCommand([psqlExe, + '-h', getDatabaseHost(), + '-U', 'postgres', '-c' , "drop database if exists %s" % database], + "Couldn't drop database %s" % database) + +def createDatabase(psqlExe, database, scriptPath = None): + """ + Creates specified database and run (if defined) the specified SQL script. + """ + executeCommand([psqlExe, + '-h', getDatabaseHost(), + '-U', 'postgres', '-c' , "create database %s with owner %s" % (database, USER)], + "Couldn't create database %s" % database) + if scriptPath == None: + return + executeCommand([psqlExe, + '-h', getDatabaseHost(), + '-q', '-U', USER, '-d', database, '-f', scriptPath], suppressStdOut=True, + failingMessage="Couldn't execute script %s for database %s" % (scriptPath, database)) + +def queryDatabase(psqlExe, database, queryStatement, showHeaders = False): + """ + Queries specified database by applying specified SQL statement and returns the result set as a list + where each row is a list, too. + """ + printingOption = '-A' if showHeaders else '-tA' + lines = executeCommand([psqlExe, + '-h', getDatabaseHost(), + '-U', 'postgres', printingOption, '-d', database, '-c', queryStatement], + "Couldn't execute query: %s" % queryStatement, suppressStdOut = True) + result = [] + for line in lines: + result.append(line.split('|')) + return result + +def printResultSet(resultSet): + """ + Prints the specified result set. + """ + for row in resultSet: + printAndFlush(row) + +def getNumberOfDifferences(fileOrFolder1, fileOrFolder2): + """ + Gets and reports differences in file system structures between both arguments. + """ + result = filecmp.dircmp(fileOrFolder1, fileOrFolder2, ignore=['.svn']) + result.report() + return len(result.left_only) + len(result.right_only) + len(result.diff_files) + +def getContent(path): + """ + Returns the content at specified path as an array of lines. Trailing white spaces (including new line) + has been stripped off. + """ + with open(path, "r") as f: + return [ l.rstrip() for l in f.readlines()] + +def renderDuration(duration): + renderedDuration = renderNumber(duration, 'second') + if duration > 80: + minutes = duration / 60 + seconds = duration % 60 + if seconds > 0: + renderedDuration = "%s and %s" % (renderNumber(minutes, 'minute'), renderNumber(seconds, 'second')) + else: + renderedDuration = renderNumber(minutes, 'minute') + return renderedDuration + +def renderNumber(number, unit): + return ("1 %s" % unit) if number == 1 else ("%d %ss" % (number, unit)) + + +class LogMonitor(): + """ + Monitor of a log file. Conditions can be specified for printing a notification and waiting. + + A condition has to be a class with method 'match' which has two string arguments: + Event type and log message. It returns 'None' in case of no match and + a tuple with zero or more matching elements found in log message. + """ + def __init__(self, logName, logFilePath, timeOutInMinutes = 5): + """ + Creates an instance with specified log name (used in notification), log file, and time out. + """ + self.logName = logName + self.logFilePath = logFilePath + self.timeOutInMinutes = timeOutInMinutes + self.conditions = [] + self.timeProvider = time + class SystemPrinter: + def printMsg(self, msg): + printAndFlush(msg) + self.printer = SystemPrinter() + + def addNotificationCondition(self, condition): + """ + Adds a notification condition + """ + self.conditions.append(condition) + + def getFormattedTime(self, timeSec): + if timeSec is None: + self.printer.printMsg("Error: Provided time is None!") + return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(timeSec)) + + + def waitUntilEvent(self, condition, startTime = None, delay = 0): + """ + Waits until an event matches the specified condition. + Returns tuple with zero or more elements of matching log message. + """ + startTime = self.timeProvider.time() if startTime is None else startTime + self.conditions.append(condition) + renderedStartTime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(startTime)) + self.printer.printMsg("\n>>>>> Start monitoring %s log at %s >>>>>>>>>>>>>>>>>>>>" + % (self.logName, renderedStartTime)) + finalTime = startTime + self.timeOutInMinutes * 60 + if delay > 0: + time.sleep(delay) + try: + alreadyPrintedLines = set() + while True: + log = open(self.logFilePath, 'r') + while True: + actualTime = self.timeProvider.time() + if actualTime > finalTime: + self.printer.printMsg(f"Time out detected! start time: {renderedStartTime}, calculated end time: {self.getFormattedTime(finalTime)}, current time: {self.getFormattedTime(actualTime)}, timeout (min): {self.timeOutInMinutes}") + raise Exception("Time out after %d minutes for monitoring %s log." + % (self.timeOutInMinutes, self.logName)) + line = log.readline() + if line == '': + break + match = re.match('(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}),(\d{3}) (.{6})(.*)', line) + if match == None: + continue + timestamp = match.group(1) + milliseconds = int(match.group(2)) + eventType = match.group(3).strip() + message = match.group(4) + eventTime = time.mktime(time.strptime(timestamp, '%Y-%m-%d %H:%M:%S')) + 0.001 * milliseconds + if eventTime < startTime: + continue + for c in self.conditions: + if c.match(eventType, message) != None and not line in alreadyPrintedLines: + alreadyPrintedLines.add(line) + self.printer.printMsg(">> %s" % line.strip()) + break + elements = condition.match(eventType, message) + if elements != None: + return elements + log.seek(0, os.SEEK_CUR) + time.sleep(2) + finally: + self.printer.printMsg(">>>>> Finished monitoring %s log >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>" + % self.logName) + +class EventTypeCondition(): + """ A condition which matches in case of specified event type. """ + def __init__(self, eventType): + self.eventType = eventType + + def match(self, eventType, message): + return () if self.eventType == eventType else None + +class StartsWithCondition(): + """ + A condition which matches if the message starts with a specified string. + """ + def __init__(self, startsWithString): + self.startsWithString = startsWithString + + def match(self, eventType, message): + return () if message.startswith(self.startsWithString) else None + +class RegexCondition(): + """ + A condition which matches if the message matches a specified regular expression. + """ + def __init__(self, regex): + self.regex = regex + + def match(self, eventType, message): + match = re.search(self.regex, message) + return match.groups() if match else None + +class ConditionSequence(): + def __init__(self, conditions): + self.conditions = conditions + self.matches = [] + + def match(self, eventType, message): + match_count = len(self.matches) + if match_count == len(self.conditions): + return self.matches + result = self.conditions[match_count].match(eventType, message) + if result is not None: + self.matches.append(result) + if len(self.matches) == len(self.conditions): + return self.matches + return None \ No newline at end of file diff --git a/api-openbis-python3-pybis/src/python/tests/test_dataset.py b/api-openbis-python3-pybis/src/python/tests/test_dataset.py index baefba6ae121fcc89a9ab4fac17d7cc7b59d5846..f53f8376247b0dd4b6482329947cfaf3824e540d 100644 --- a/api-openbis-python3-pybis/src/python/tests/test_dataset.py +++ b/api-openbis-python3-pybis/src/python/tests/test_dataset.py @@ -23,18 +23,68 @@ import pytest from pybis.things import Things -def test_get_datasets(space): - # test paging +def test_get_datasets_count(space): o = space.openbis - current_datasets = o.get_datasets(start_with=1, count=1) - assert current_datasets is not None - assert len(current_datasets) == 1 + testfile_path = os.path.join(os.path.dirname(__file__), "testdir/testfile") + dataset = o.new_dataset( + type="RAW_DATA", + experiment="/DEFAULT/DEFAULT/DEFAULT", + files=[testfile_path], + props={"$name": "some good name"}, + ) + dataset.save() + + try: + current_datasets = o.get_datasets(count=1) + assert current_datasets is not None + assert len(current_datasets) == 1 + finally: + dataset.delete("test_get_datasets_count", True) + + +def test_get_datasets_paging(space): + o = space.openbis + testfile_path = os.path.join(os.path.dirname(__file__), "testdir/testfile") + dataset1 = o.new_dataset( + type="RAW_DATA", + experiment="/DEFAULT/DEFAULT/DEFAULT", + files=[testfile_path], + props={"$name": "some good name"}, + ) + dataset1.save() + + dataset2 = o.new_dataset( + type="RAW_DATA", + experiment="/DEFAULT/DEFAULT/DEFAULT", + files=[testfile_path], + props={"$name": "some good name"}, + ) + dataset2.save() + + try: + current_datasets = o.get_datasets(start_with=1, count=1) + assert current_datasets is not None + assert len(current_datasets) == 1 + finally: + dataset1.delete("test_get_datasets_paging", True) + dataset2.delete("test_get_datasets_paging", True) + + +def test_create_datasets_no_file(space): + o = space.openbis + with pytest.raises(Exception) as exc: + o.new_dataset( + type="RAW_DATA", + experiment="/DEFAULT/DEFAULT/DEFAULT", + props={"$name": "some good name"}, + ) + assert str(exc.value) == "please provide at least one file" def test_create_delete_dataset(space): timestamp = time.strftime("%a_%y%m%d_%H%M%S").upper() o = space.openbis - testfile_path = os.path.join(os.path.dirname(__file__), "testfile") + testfile_path = os.path.join(os.path.dirname(__file__), "testdir/testfile") dataset = o.new_dataset( type="RAW_DATA", @@ -210,7 +260,7 @@ def test_create_new_dataset_v3_directory(space): dataset.save() assert dataset.permId is not None - assert dataset.file_list == ["testdir/testfile"] + assert dataset.file_list == ["original/DEFAULT/testdir/testfile"] def test_dataset_property_in_isoformat_date(space): @@ -303,12 +353,11 @@ def create_array_properties(openbis, code_prefix): def test_dataset_array_properties(space): - create_array_properties(space.openbis, "DATASET") dataset_code = 'TEST_ARRAY_DATASET' dataset_type = space.openbis.new_dataset_type( - code = dataset_code + code=dataset_code ) dataset_type.save() @@ -321,10 +370,10 @@ def test_dataset_array_properties(space): testfile_path = os.path.join(os.path.dirname(__file__), "testdir/testfile") dataset = space.openbis.new_dataset( - type = dataset_code, + type=dataset_code, sample="/DEFAULT/DEFAULT/DEFAULT", files=[testfile_path], - props = { 'dataset_array_integer': [1, 2, 3]} + props={'dataset_array_integer': [1, 2, 3]} ) dataset.save() @@ -332,10 +381,14 @@ def test_dataset_array_properties(space): dataset.props['dataset_array_real'] = [3.1, 2.2, 1.3] dataset.props['dataset_array_string'] = ["aa", "bb", "cc"] dataset.props['dataset_array_timestamp'] = ['2023-05-18 11:17:03', '2023-05-18 11:17:04', - '2023-05-18 11:17:05'] + '2023-05-18 11:17:05'] dataset.props['dataset_json'] = "{ \"key\": [1, 1, 1] }" dataset.save() - assert dataset.props['sample_array_integer'] == [3, 2, 1] - assert dataset.props['sample_array_real'] == [3.1, 2.2, 1.3] - + assert dataset.props['dataset_array_integer'] == [3, 2, 1] + assert dataset.props['dataset_array_real'] == [3.1, 2.2, 1.3] + assert dataset.props['dataset_array_string'] == ["aa", "bb", "cc"] + assert dataset.props['dataset_json'] == "{ \"key\": [1, 1, 1] }" + assert dataset.props['dataset_array_timestamp'] == ['2023-05-18 11:17:03', + '2023-05-18 11:17:04', + '2023-05-18 11:17:05'] diff --git a/api-openbis-python3-pybis/src/python/tests/test_openbis.py b/api-openbis-python3-pybis/src/python/tests/test_openbis.py index 3c59ee469e83071e5f6e84ec86e1ad2e595682f0..1263ac709a3ba00e7481951a8321e6bb62b4966e 100644 --- a/api-openbis-python3-pybis/src/python/tests/test_openbis.py +++ b/api-openbis-python3-pybis/src/python/tests/test_openbis.py @@ -16,6 +16,7 @@ import re import time import pytest + from pybis import Openbis @@ -25,7 +26,7 @@ def test_token(openbis_instance): assert openbis_instance.is_session_active() is True -def test_http_only(openbis_instance): +def http_only(): with pytest.raises(Exception): new_instance = Openbis("http://localhost") assert new_instance is None diff --git a/api-openbis-python3-pybis/src/python/tests/test_project.py b/api-openbis-python3-pybis/src/python/tests/test_project.py index b6878e478e807bcf745fbe23544b1dae275fb834..82a67c1582beae5456f25a6249af963ed258bf96 100644 --- a/api-openbis-python3-pybis/src/python/tests/test_project.py +++ b/api-openbis-python3-pybis/src/python/tests/test_project.py @@ -12,15 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import json +import os import random -import re +import time import pytest -import time -import os -from pybis import DataSet -from pybis import Openbis def test_create_delete_project(space): @@ -55,7 +51,7 @@ def test_create_project_with_attachment(space): timestamp = time.strftime("%a_%y%m%d_%H%M%S").upper() project_name = "project_" + timestamp + "_" + str(random.randint(0, 1000)) - filename = os.path.join(os.path.dirname(__file__), "testfile") + filename = os.path.join(os.path.dirname(__file__), "testdir/testfile") if not os.path.exists(filename): raise ValueError("File not found: {}".format(filename)) diff --git a/app-openbis-command-line/src/python/CHANGELOG.md b/app-openbis-command-line/src/python/CHANGELOG.md index 675e0d034e01550c52bb0b7648fa85cd6d4969c0..fd5eb265bfae4dd38f57ff8a2f83984e67c2e81a 100644 --- a/app-openbis-command-line/src/python/CHANGELOG.md +++ b/app-openbis-command-line/src/python/CHANGELOG.md @@ -3,6 +3,8 @@ * Added filtering by object in object and data_set search commands * Added recursive search to object and data_set search commands * Updated documentation regarding authentication +* Added dataset ids to sample search results +* changed pybis dependency to version == 1.36.0 # New in version 0.4.1 diff --git a/app-openbis-command-line/src/python/obis/__init__.py b/app-openbis-command-line/src/python/obis/__init__.py index 33413492eec9df159c4674eb6f327d628b6d7717..6d545ea634c5102d20c88e11a3896288072380fb 100644 --- a/app-openbis-command-line/src/python/obis/__init__.py +++ b/app-openbis-command-line/src/python/obis/__init__.py @@ -14,6 +14,6 @@ # __author__ = "ID SIS • ETH Zürich" __email__ = "openbis-support@id.ethz.ch" -__version__ = "0.4.2rc2" +__version__ = "0.4.2rc7" from .dm import * diff --git a/app-openbis-command-line/src/python/obis/dm/commands/search.py b/app-openbis-command-line/src/python/obis/dm/commands/search.py index 9f47b7c503138f4a5fbeba9432bdbdb31eb68364..3198164bd1675c6c1b73c3b52afd55527ffe772b 100644 --- a/app-openbis-command-line/src/python/obis/dm/commands/search.py +++ b/app-openbis-command-line/src/python/obis/dm/commands/search.py @@ -13,10 +13,11 @@ # limitations under the License. # -# from queue import Queue -# from threading import Thread import concurrent.futures +import pandas as pd + +from pybis.property_reformatter import is_of_openbis_supported_date_format from .openbis_command import OpenbisCommand from ..command_result import CommandResult from ..utils import cd @@ -25,11 +26,11 @@ from ...scripts.click_util import click_echo def _dfs(objects, prop, func, func_specific): """Helper function that perform DFS search over children graph of objects""" + # TODO: improve performance of this - make it similar to _dfs_samples with concurrent.futures.ThreadPoolExecutor( max_workers=5) as pool_simple, concurrent.futures.ThreadPoolExecutor( - max_workers=20) as pool_full: - stack = [getattr(openbis_obj, prop) for openbis_obj in - objects] # datasets and samples provide children in different formats + max_workers=20) as pool_full: + stack = [openbis_obj[prop] for openbis_obj in objects] # datasets and samples provide children in different formats visited = set() stack.reverse() output = [] @@ -53,6 +54,90 @@ def _dfs(objects, prop, func, func_specific): return output +def _dfs_samples(data_base, prop, func): + """Helper function that perform DFS search over children graph of objects""" + output = data_base + ids = [x['children'] for x in data_base if x['children']] + ids = [x[prop][prop] for x in flatten(ids)] + visited = set([x[prop][prop] for x in data_base]) + while ids: + data = func(ids) + data = list(data.values()) + output += data + ids = [] + children = [] + for obj in data: + key = obj[prop][prop] + children += [x[prop][prop] for x in obj['children']] + if key not in visited: + visited.add(key) + for child in children: + if child not in visited: + ids += [child] + return output + + +def flatten(matrix): + flat_list = [] + for row in matrix: + flat_list += row + return flat_list + + +def _check_date(sign, date1, date2): + if is_of_openbis_supported_date_format(date1) and is_of_openbis_supported_date_format(date2): + timestamp1 = pd.to_datetime(date1) + timestamp2 = pd.to_datetime(date2) + if sign == "=": + return timestamp2 == timestamp1 + elif sign == ">": + return timestamp2 > timestamp1 + elif sign == "<": + return timestamp2 < timestamp1 + raise ValueError(f"Unknown sign {sign}") + else: + raise ValueError("Dates are not in a supported formats!") + + +def _filter_dataset(dataset, filters): + if filters.get("space", None) is not None: + space = filters["space"] + if dataset.sample is not None and dataset.sample.space.code != space: + return False + if dataset.experiment is not None and dataset.experiment.project.space.code != space: + return False + if filters.get("type_code", None) is not None: + if dataset.type.code != filters["type_code"]: + return False + if filters.get("project", None) is not None: + project = filters["project"] + if dataset.sample is not None and dataset.sample.project.code != project: + return False + if dataset.experiment is not None and dataset.experiment.project.code != project: + return False + if filters.get("experiment", None) is not None: + if dataset.experiment is not None and dataset.experiment.code != filters["experiment"]: + return False + if filters.get("property_code", None) is not None: + prop_code = filters["property_code"] + prop_value = filters["property_value"] + if dataset.props is not None and dataset.props[prop_code.lower()] != prop_value: + return False + if filters.get("registration_date", None) is not None: + registration_date = filters["registration_date"] + sign = "=" + if registration_date[0] in [">", "<", "="]: + sign, registration_date = registration_date[0], registration_date[1:] + return _check_date(sign, registration_date, dataset.registrationDate) + if filters.get("modification_date", None) is not None: + modification_date = filters["modification_date"] + sign = "=" + if modification_date[0] in [">", "<", "="]: + sign, modification_date = modification_date[0], modification_date[1:] + return _check_date(sign, modification_date, dataset.modificationDate) + return True + + class Search(OpenbisCommand): """ Command to search samples or datasets in openBIS. @@ -70,11 +155,16 @@ class Search(OpenbisCommand): self.save_path = save_path self.load_global_config(dm) self.props = "*" - self.attrs = ["parents", "children"] super(Search, self).__init__(dm) def search_samples(self): - search_results = self._search_samples() + search_results = self._search_samples(raw_response=True) + + search_results = self.openbis._sample_list_for_response(props=self.props, + response=search_results, + attrs=["parents", "children", + "dataSets"], + parsed=True) click_echo(f"Objects found: {len(search_results)}") if self.save_path is not None: @@ -87,29 +177,40 @@ class Search(OpenbisCommand): return CommandResult(returncode=0, output="Search completed.") def _get_samples_children(self, identifier): - return self.openbis.get_samples(identifier, attrs=["children"]) + return self.openbis.get_samples(identifier, attrs=["children", "dataSets"]) - def _search_samples(self): + def _get_sample_with_datasets(self, identifier): + return self.openbis.get_sample(identifier, withDataSetIds=True) + + def _get_sample_with_datasets2(self, identifier): + return self.openbis.get_sample(identifier, withDataSetIds=True, raw_response=True) + + def _search_samples(self, raw_response=False): """Helper method to search samples""" + if self.recursive: + raw_response = True + if "object_code" in self.filters: results = self.openbis.get_samples(identifier=self.filters['object_code'], - attrs=self.attrs, props=self.props) + attrs=["parents", "children", "dataSets"], + raw_response=raw_response, + props=self.props) else: - args = self._get_filtering_args(self.props) + args = self._get_filtering_args(self.props, ["parents", "children", "dataSets"]) + args["raw_response"] = raw_response results = self.openbis.get_samples(**args) if self.recursive: click_echo(f"Recursive search enabled. It may take time to produce results.") - output = _dfs(results.objects, 'identifier', - self._get_samples_children, - self.openbis.get_sample) # samples provide identifiers as children - search_results = self.openbis._sample_list_for_response(props=self.props, - response=[sample.data for sample - in output], - parsed=True) + output2 = _dfs_samples(results['objects'], 'identifier', self._get_sample_with_datasets2) + + search_results = output2 else: - search_results = results + if raw_response: + search_results = results['objects'] + else: + search_results = results return search_results def _get_datasets_children(self, permId): @@ -120,28 +221,56 @@ class Search(OpenbisCommand): return CommandResult(returncode=-1, output="Configuration fileservice_url needs to be set for download.") - if self.recursive: - click_echo(f"Recursive search enabled. It may take time to produce results.") - search_results = self._search_samples() # Look for samples recursively - o = [] - for sample in search_results.objects: # get datasets - o += sample.get_datasets( - attrs=self.attrs, props=self.props) - output = _dfs(o, 'permId', # datasets provide permIds as children - self._get_datasets_children, - self.openbis.get_dataset) # look for child datasets + main_filters = self.filters.copy() + + object_filters = {k[7:]: v for (k, v) in main_filters.items() if k.startswith('object_')} + dataset_filters = {k: v for (k, v) in main_filters.items() if not k.startswith('object_')} + if object_filters: + if 'id' in object_filters: + if object_filters['id'] is not None: + object_filters['object_code'] = object_filters['id'] + del object_filters['id'] + self.filters = object_filters + search_results = self._search_samples(raw_response=True) + click_echo(f"Samples found: {len(search_results)}") + + datasets = [x["dataSets"] for x in search_results] + datasets = flatten(datasets) + datasets = [x['permId']['permId'] for x in datasets] + datasets = self.openbis.get_dataset(permIds=datasets) + + filtered_datasets = [] + for dataset in datasets: + if _filter_dataset(dataset, dataset_filters): + filtered_datasets += [dataset] + datasets = self.openbis._dataset_list_for_response(props=self.props, - response=[dataset.data for dataset - in output], + response=[x.data for x in + filtered_datasets], parsed=True) else: - if "object_code" in self.filters: - results = self.openbis.get_sample(self.filters['object_code']).get_datasets( - attrs=self.attrs, props=self.props) + if self.recursive: + search_results = self._search_samples() # Look for samples recursively + o = [] + for sample in search_results.objects: # get datasets + o += sample.get_datasets( + attrs=["parents", "children"], props=self.props) + output = _dfs(o, 'permId', # datasets provide permIds as children + self._get_datasets_children, + self.openbis.get_dataset) # look for child datasets + datasets = self.openbis._dataset_list_for_response(props=self.props, + response=[dataset.data for + dataset + in output], + parsed=True) else: - args = self._get_filtering_args(self.props) - results = self.openbis.get_datasets(**args) - datasets = results + if "dataset_id" in self.filters: + results = self.openbis.get_sample(self.filters['dataset_id']).get_datasets( + attrs=["parents", "children"], props=self.props) + else: + args = self._get_filtering_args(self.props, ["parents", "children"]) + results = self.openbis.get_datasets(**args) + datasets = results click_echo(f"Data sets found: {len(datasets)}") if self.save_path is not None: @@ -153,7 +282,7 @@ class Search(OpenbisCommand): return CommandResult(returncode=0, output="Search completed.") - def _get_filtering_args(self, props): + def _get_filtering_args(self, props, attrs): where = None if self.filters['property_code'] is not None and self.filters['property_value'] is not None: where = { @@ -163,10 +292,10 @@ class Search(OpenbisCommand): args = dict(space=self.filters['space'], project=self.filters['project'], # Not Supported with Project Samples disabled - experiment=self.filters['experiment'], + experiment=self.filters['collection'], type=self.filters['type_code'], where=where, - attrs=self.attrs, + attrs=attrs, props=props) if self.filters['registration_date'] is not None: diff --git a/app-openbis-command-line/src/python/obis/scripts/cli.py b/app-openbis-command-line/src/python/obis/scripts/cli.py index b64e610d0b34f19db84b5f657fc7844eba5dd542..efd84086e5ec4b7688241efdc8cd0bd597466d7e 100644 --- a/app-openbis-command-line/src/python/obis/scripts/cli.py +++ b/app-openbis-command-line/src/python/obis/scripts/cli.py @@ -27,9 +27,9 @@ from datetime import datetime import click from dateutil.relativedelta import relativedelta -from pybis import Openbis from requests import ConnectionError +from pybis import Openbis from .click_util import click_echo from .data_mgmt_runner import DataMgmtRunner from ..dm.command_result import CommandResult @@ -270,19 +270,15 @@ def repository_clear(ctx, settings): # data_set: type, properties - -_search_params = [ - click.option('-object_type', '--object_type', 'type_code', default=None, - help='Object type code to filter by'), +_dataset_search_params = [ click.option('-space', '--space', default=None, help='Space code'), - click.option('-project', '--project', default=None, help='Full project identification code'), - click.option('-experiment', '--experiment', default=None, help='Full experiment code'), - click.option('-object', '--object', 'object_code', default=None, - help='Object identification information, it can be permId or identifier'), - click.option('-type', '--type', 'type_code', default=None, help='Type code'), + click.option('-project', '--project', default=None, help='Project identification code'), + click.option('-collection', '--collection', default=None, help='Collection code'), + click.option('-id', '--id', 'dataset_id', default=None, + help='Dataset identification information, it can be permId or identifier'), + click.option('-type', '--type', 'type_code', default=None, help='Dataset type code'), click.option('-property', 'property_code', default=None, help='Property code'), - click.option('-property-value', 'property_value', default=None, - help='Property value'), + click.option('-property-value', 'property_value', default=None, help='Property value'), click.option('-registration-date', '--registration-date', 'registration_date', default=None, help='Registration date, it can be in the format "oYYYY-MM-DD" (e.g. ">2023-01-01")'), click.option('-modification-date', '--modification-date', 'modification_date', default=None, @@ -292,6 +288,29 @@ _search_params = [ help='Search data recursively'), ] +_search_by_sample_params = [ + click.option('-object-type', '--object-type', 'object_type_code', default=None, + help='Object type code to filter by'), + click.option('-object-space', '--object-space', 'object_space', default=None, + help='Object space code'), + click.option('-object-project', '--object-project', 'object_project', default=None, + help='Full object project identification code'), + click.option('-object-collection', '--object-collection', 'object_collection', default=None, + help='Full object collection code'), + click.option('-object-id', '--object-id', 'object_id', default=None, + help='Object identification information, it can be permId or identifier'), + click.option('-object-property', 'object_property_code', default=None, + help='Object property code'), + click.option('-object-property-value', 'object_property_value', default=None, + help='Object property value'), + click.option('-object-registration-date', '--object-registration-date', + 'object_registration_date', default=None, + help='Registration date, it can be in the format "oYYYY-MM-DD" (e.g. ">2023-01-01")'), + click.option('-object-modification-date', '--object-modification-date', + 'object_modification_date', default=None, + help='Modification date, it can be in the format "oYYYY-MM-DD" (e.g. ">2023-01-01")'), +] + @cli.group('data_set') @click.option('-g', '--is_global', default=False, is_flag=True, help='Set/get global or local.') @@ -331,33 +350,48 @@ def data_set_clear(ctx, data_set_settings): return ctx.obj['runner'].run("data_set_clear", lambda dm: _clear(ctx, data_set_settings)) +def _pair_is_not_set(param1, param2): + return (param1 is None and param2 is not None) or (param1 is not None and param2 is None) + + @data_set.command('search', short_help="Search for datasets using a filtering criteria.") -@add_params(_search_params) +@add_params(_dataset_search_params + _search_by_sample_params) @click.pass_context -def data_set_search(ctx, type_code, space, project, experiment, registration_date, - modification_date, object_code, property_code, property_value, save, recursive): +def data_set_search(ctx, type_code, space, project, collection, registration_date, + modification_date, dataset_id, property_code, property_value, save, recursive, + object_type_code, object_space, object_project, object_collection, object_id, + object_property_code, object_property_value, object_registration_date, + object_modification_date): """Standard Data Store: Search data sets given the filtering criteria or object identifier. Results of this command can be used in `obis download`.""" - filtering_arguments = [type_code, space, project, experiment, registration_date, - modification_date, - property_code, property_value] - if all(v is None for v in filtering_arguments + [object_code]): + filtering_arguments = [type_code, space, project, collection, registration_date, + modification_date, property_code, property_value, + object_type_code, object_space, object_project, object_collection, + object_id, object_property_code, object_property_value, + object_registration_date, object_modification_date] + if all(v is None for v in filtering_arguments + [dataset_id]): click_echo("You must provide at least one filtering criteria!") return -1 - if (property_code is None and property_value is not None) or ( - property_code is not None and property_value is None): - click_echo("Property code and property value need to be specified!") + if _pair_is_not_set(property_code, property_value) or _pair_is_not_set(object_property_code, + object_property_value): + click_echo("Property code and property value pair needs to be specified!") return -1 ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False) - if object_code is not None: + if dataset_id is not None: if any(v is not None for v in filtering_arguments): - click_echo("Object parameter detected! Other filtering arguments will be omitted!") - filters = dict(object_code=object_code) + click_echo("Dataset id parameter detected! Other filtering arguments will be omitted!") + filters = dict(dataset_id=dataset_id) else: filters = dict(type_code=type_code, space=space, - project=project, experiment=experiment, property_code=property_code, + project=project, experiment=collection, property_code=property_code, registration_date=registration_date, modification_date=modification_date, - property_value=property_value) + property_value=property_value, object_type_code=object_type_code, + object_space=object_space, object_project=object_project, + object_collection=object_collection, object_id=object_id, + object_property_code=object_property_code, + object_property_value=object_property_value, + object_registration_date=object_registration_date, + object_modification_date=object_modification_date) return ctx.obj['runner'].run("data_set_search", lambda dm: dm.search_data_set(filters, recursive, save)), @@ -409,15 +443,35 @@ def object_clear(ctx, object_settings): return ctx.obj['runner'].run("object_clear", lambda dm: _clear(ctx, object_settings)) -@object.command('search', short_help="Search for samples using a filtering criteria.") -@add_params(_search_params) +_object_search_params = [ + click.option('-space', '--space', default=None, help='Space code'), + click.option('-project', '--project', default=None, help='Full project identification code'), + click.option('-collection', '--collection', default=None, help='Full collection code'), + click.option('-object', '--object', 'object_id', default=None, + help='Object identification information, it can be permId or identifier'), + click.option('-type', '--type', 'type_code', default=None, help='Type code'), + click.option('-property', 'property_code', default=None, help='Property code'), + click.option('-property-value', 'property_value', default=None, + help='Property value'), + click.option('-registration-date', '--registration-date', 'registration_date', default=None, + help='Registration date, it can be in the format "oYYYY-MM-DD" (e.g. ">2023-01-01")'), + click.option('-modification-date', '--modification-date', 'modification_date', default=None, + help='Modification date, it can be in the format "oYYYY-MM-DD" (e.g. ">2023-01-01")'), + click.option('-save', '--save', default=None, help='Filename to save results'), + click.option('-r', '--recursive', 'recursive', is_flag=True, default=False, + help='Search data recursively'), +] + + +@object.command('search', short_help="Search for objects using a filtering criteria.") +@add_params(_object_search_params) @click.pass_context -def object_search(ctx, type_code, space, project, experiment, registration_date, - modification_date, object_code, property_code, property_value, save, recursive): +def object_search(ctx, type_code, space, project, collection, registration_date, + modification_date, object_id, property_code, property_value, save, recursive): """Standard Data Store: Search for objects using a filtering criteria or object identifier.""" - filtering_arguments = [type_code, space, project, experiment, registration_date, + filtering_arguments = [type_code, space, project, collection, registration_date, modification_date, property_code, property_value] - if all(v is None for v in filtering_arguments + [object_code]): + if all(v is None for v in filtering_arguments + [object_id]): click_echo("You must provide at least one filtering criteria!") return -1 if (property_code is None and property_value is not None) or ( @@ -425,13 +479,13 @@ def object_search(ctx, type_code, space, project, experiment, registration_date, click_echo("Property code and property value need to be specified!") return -1 ctx.obj['runner'] = DataMgmtRunner(ctx.obj, halt_on_error_log=False) - if object_code is not None: + if object_id is not None: if any(v is not None for v in filtering_arguments): click_echo("Object parameter detected! Other filtering arguments will be omitted!") - filters = dict(object_code=object_code) + filters = dict(object_code=object_id) else: filters = dict(type_code=type_code, space=space, - project=project, experiment=experiment, property_code=property_code, + project=project, collection=collection, property_code=property_code, registration_date=registration_date, modification_date=modification_date, property_value=property_value) return ctx.obj['runner'].run("object_search", diff --git a/app-openbis-command-line/src/python/setup.py b/app-openbis-command-line/src/python/setup.py index c08dc96fcccd831e8808bfafcb9b155779582667..993559aaf6330ec95493a046ac3cc10e94c2ef62 100644 --- a/app-openbis-command-line/src/python/setup.py +++ b/app-openbis-command-line/src/python/setup.py @@ -31,7 +31,7 @@ data_files = [ setup( name="obis", - version="0.4.2rc2", + version="0.4.2rc7", description="Local data management with assistance from OpenBIS.", long_description=long_description, long_description_content_type="text/markdown", @@ -42,7 +42,7 @@ setup( packages=["obis", "obis.dm", "obis.dm.commands", "obis.scripts"], data_files=data_files, package_data={"obis": ["dm/git-annex-attributes"]}, - install_requires=["pyOpenSSL", "pytest", "pybis==1.33.2", "click"], + install_requires=["pyOpenSSL", "pytest", "pybis==1.36.0", "click"], entry_points={"console_scripts": ["obis=obis.scripts.cli:main"]}, zip_safe=False, python_requires=">=3.3", diff --git a/docs/app-openbis-command-line/README.md b/docs/app-openbis-command-line/README.md index 4505c3ef8b2989e313248987b091250d3c39f815..f194d05afbbbb16abe15cccfdbde87aae18b552a 100644 --- a/docs/app-openbis-command-line/README.md +++ b/docs/app-openbis-command-line/README.md @@ -232,23 +232,44 @@ it comes to integration with other tools. obis data_set search [OPTIONS] Options: - -object_type, --object_type TEXT - Object type code to filter by -space, --space TEXT Space code - -project, --project TEXT Full project identification code - -experiment, --experiment TEXT Full experiment code - -object, --object TEXT Object identification information, it can be permId or identifier - -type, --type TEXT Type code + -project, --project TEXT Project identification code + -collection, --collection TEXT Collection code + -id, --id TEXT Dataset identification information, it can + be permId or identifier + -type, --type TEXT Dataset type code + -property TEXT Property code + -property-value TEXT Property value -registration-date, --registration-date TEXT Registration date, it can be in the format - "oYYYY-MM-DD" (e.g. ">2023-01-31", "=2023-01-31", "<2023-01-31") + "oYYYY-MM-DD" (e.g. ">2023-01-01") -modification-date, --modification-date TEXT Modification date, it can be in the format - "oYYYY-MM-DD" (e.g. ">2023-01-31", "=2023-01-31", "<2023-01-31") - -property TEXT Property code - -property-value TEXT Property value - -save, --save TEXT Directory name to save results + "oYYYY-MM-DD" (e.g. ">2023-01-01") + -save, --save TEXT Filename to save results -r, --recursive Search data recursively + +Search by sample object parameters: + -object-type, --object-type TEXT + Object type code to filter by + -object-space, --object-space TEXT + Object space code + -object-project, --object-project TEXT + Full object project identification code + -object-collection, --object-collection TEXT + Full object collection code + -object-id, --object-id TEXT Object identification information, it can be + permId or identifier + -object-property TEXT Object property code + -object-property-value TEXT Object property value + -object-registration-date, --object-registration-date TEXT + Registration date, it can be in the format + "oYYYY-MM-DD" (e.g. ">2023-01-01") + -object-modification-date, --object-modification-date TEXT + Modification date, it can be in the format + "oYYYY-MM-DD" (e.g. ">2023-01-01") + --help Show this message and exit. + ``` With `data_set search` command, obis connects to a configured OpenBIS instance and searches for all @@ -259,6 +280,10 @@ Search results can be downloaded into a file by using `save` option. Recursive option enables searching for datasets of children samples or datasets +-object* filtering parameters allows to search for datasets owned by objects specified by these params, +i.e. obis will find objects fitting these criterias (as if it was an `object search` command) and then it will extract +dataset data. + *Note: Filtering by `-project` may not work when `Project Samples` are disabled in OpenBIS configuration.* @@ -310,21 +335,23 @@ data set is connected directly to an object - gets or sets given properties to i obis object search [OPTIONS] Options: - -type, --type TEXT Type code to filter by -space, --space TEXT Space code -project, --project TEXT Full project identification code - -experiment, --experiment TEXT Full experiment - -object, --object TEXT Object identification information, it can be permId or identifier + -collection, --collection TEXT Full collection code + -object, --object TEXT Object identification information, it can be + permId or identifier + -type, --type TEXT Type code + -property TEXT Property code + -property-value TEXT Property value -registration-date, --registration-date TEXT Registration date, it can be in the format - "oYYYY-MM-DD" (e.g. ">2023-01-31", "=2023-01-31", "<2023-01-31") + "oYYYY-MM-DD" (e.g. ">2023-01-01") -modification-date, --modification-date TEXT Modification date, it can be in the format - "oYYYY-MM-DD" (e.g. ">2023-01-31", "=2023-01-31", "<2023-01-31") - -property TEXT Property code - -property-value TEXT Property value - -save, --save TEXT File name to save results in csv format + "oYYYY-MM-DD" (e.g. ">2023-01-01") + -save, --save TEXT Filename to save results -r, --recursive Search data recursively + ``` With `object search` command, obis connects to a configured OpenBIS instance and searches for all diff --git a/docs/software-developer-documentation/apis/java-javascript-v3-api.md b/docs/software-developer-documentation/apis/java-javascript-v3-api.md index 132d3e709075077ef58c7752a130b7f47e8b42da..663df3215eee1251ff4355b968d841ec4e3c8c21 100644 --- a/docs/software-developer-documentation/apis/java-javascript-v3-api.md +++ b/docs/software-developer-documentation/apis/java-javascript-v3-api.md @@ -34,14 +34,14 @@ The Java V3 API consists of two interfaces: Please check our JavaDoc for more details: <https://openbis.ch/javadoc/20.10.x/javadoc-api-v3/index.html> -All V3 API jars are packed in openBIS-API-V3-<VERSION>.zip which -is part of openBIS-clients-and-APIs-<VERSION>.zip (the latest -version can be downloaded at [Sprint Releases](#) > Clients and APIs) +All V3 API jars are packed in openBIS-API-V3-<VERSION>.zip which +is part of openBIS-clients-and-APIs-<VERSION>.zip (the latest +version can be downloaded at [Sprint Releases](#) > Clients and APIs) ### The Javascript API The Javascript V3 API consists of a module hosted at -<OPENBIS\_URL>/resources/api/v3/openbis.js, for instance +<OPENBIS\_URL>/resources/api/v3/openbis.js, for instance <http://localhost/openbis>/ resources/api/v3/openbis.js. Please check the openbis.js file itself for more details. @@ -113,12 +113,15 @@ property [javax.net](http://javax.net).ssl.trustStore. Example: **Using openBIS trust store in Java clients** +```bash java -Djavax.net.ssl.trustStore=/home/openbis/openbis/servers/openBIS-server/jetty/etc/openBIS.keystore -jar the-client.jar +``` Connecting in Java **V3ConnectionExample.java** +```java import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi; import ch.ethz.sis.openbis.generic.asapi.v3.dto.common.search.SearchResult; import ch.ethz.sis.openbis.generic.asapi.v3.dto.space.Space; @@ -151,6 +154,7 @@ Connecting in Java } +``` ### Connecting in Javascript @@ -165,6 +169,7 @@ be conceptually consistent. **V3ConnectionExample.html** +```html <!DOCTYPE html> <html> <head> @@ -209,7 +214,7 @@ be conceptually consistent. </script> </body> </html> - +``` ## IV. AS Methods @@ -257,6 +262,7 @@ was incorrect the login methods return null. **V3LoginExample.java** +```java public class V3LoginExample { public static void main(String[] args) @@ -276,9 +282,12 @@ was incorrect the login methods return null. System.out.println(sessionToken); } } +``` + **V3LoginExample.html** +```html <script>  // we assume here that v3 object has been already created (please check "Accessing the API" section for more details) @@ -298,6 +307,7 @@ was incorrect the login methods return null. }); }); </script> +``` ### Personal Access Tokens @@ -309,6 +319,7 @@ Tokens](/pages/viewpage.action?pageId=122140993). Example of how to create and use a PAT: +```java import java.util.Arrays; import java.util.Date; import java.util.List; @@ -344,7 +355,7 @@ Example of how to create and use a PAT: v3api.searchSpaces(pat.getHash(), new SpaceSearchCriteria(), new SpaceFetchOptions()); } } - +``` ### Session Information OpenBIS provides a method to obtain the session information for an @@ -352,8 +363,10 @@ already log in user: #### Example + **V3CreationExample.java** +```java import ch.ethz.sis.openbis.generic.asapi.v3.dto.session.SessionInformation; public class V3SessionInformationExample @@ -368,6 +381,7 @@ already log in user: System.out.println("Creator Person: " + sessionInformation.getCreatorPerson()); } } +``` ### Creating entities @@ -387,6 +401,7 @@ until V3 version is out. **V3CreationExample.java** +```java import java.util.List; import java.util.Arrays; import ch.ethz.sis.openbis.generic.asapi.v3.dto.entitytype.id.EntityTypePermId; @@ -411,9 +426,11 @@ until V3 version is out. System.out.println("Perm ids: " + permIds);  } } +``` **V3CreationExample.html** +```html <script> require([ "as/dto/sample/create/SampleCreation", "as/dto/entitytype/id/EntityTypePermId", "as/dto/space/id/SpacePermId", "as/dto/experiment/id/ExperimentIdentifier" ], function(SampleCreation, EntityTypePermId, SpacePermId, ExperimentIdentifier) { @@ -432,11 +449,13 @@ until V3 version is out. }); }); </script> +``` #### Properties example **V3CreationWithPropertiesExample.java** +```java import java.util.Arrays; import ch.ethz.sis.openbis.generic.asapi.v3.dto.entitytype.id.EntityTypePermId; import ch.ethz.sis.openbis.generic.asapi.v3.dto.experiment.id.ExperimentIdentifier; @@ -466,9 +485,12 @@ until V3 version is out. v3.createSamples(sessionToken, Arrays.asList(sample)); } } +``` + **V3CreationWithPropertiesExample.html** +```html <script> require([ "as/dto/sample/create/SampleCreation", "as/dto/entitytype/id/EntityTypePermId", "as/dto/space/id/SpacePermId", "as/dto/experiment/id/ExperimentIdentifier" ], function(SampleCreation, EntityTypePermId, SpacePermId, ExperimentIdentifier) { @@ -495,11 +517,14 @@ until V3 version is out. }); }); </script> +``` #### Different ids example + **V3CreationWithDifferentIdsExample.java** +```java import java.util.Arrays; import ch.ethz.sis.openbis.generic.asapi.v3.dto.entitytype.id.EntityTypePermId; import ch.ethz.sis.openbis.generic.asapi.v3.dto.experiment.id.ExperimentIdentifier; @@ -526,9 +551,11 @@ until V3 version is out. v3.createSamples(sessionToken, Arrays.asList(sample)); } } +``` **V3CreationWithDifferentIdsExample.html** +```html <script> require([ "as/dto/sample/create/SampleCreation", "as/dto/entitytype/id/EntityTypePermId", "as/dto/space/id/SpacePermId", "as/dto/experiment/id/ExperimentIdentifier", "as/dto/experiment/id/ExperimentPermId" ], function(SampleCreation, EntityTypePermId, SpacePermId, ExperimentIdentifier, ExperimentPermId) { @@ -551,7 +578,7 @@ until V3 version is out. }); }); </script> - +``` #### Parent child example The following example creates parent and child samples for a sample type @@ -559,6 +586,7 @@ which allow automatic code generation: **V3CreationParentAndChildExample** +```java import java.util.Arrays; import java.util.List; import ch.ethz.sis.openbis.generic.asapi.v3.dto.common.id.CreationId; @@ -587,9 +615,11 @@ which allow automatic code generation: System.out.println("Perm ids: " + permIds); } } +``` **V3CreationParentAndChildExample.html** +```html <script> require([ "openbis", "as/dto/sample/create/SampleCreation", "as/dto/entitytype/id/EntityTypePermId", "as/dto/space/id/SpacePermId", "as/dto/common/id/CreationId" ], function(openbis, SampleCreation, EntityTypePermId, SpacePermId, CreationId) { @@ -609,6 +639,7 @@ which allow automatic code generation: }); }); </script> +``` ### Updating entities @@ -629,6 +660,7 @@ after creation. **V3UpdateExample.java** +```java import java.util.Arrays; import ch.ethz.sis.openbis.generic.asapi.v3.dto.experiment.id.ExperimentIdentifier; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.id.SampleIdentifier; @@ -650,9 +682,11 @@ after creation. System.out.println("Updated"); } } +``` **V3UpdateExample.html** +```html <script> require([ "as/dto/sample/update/SampleUpdate", "as/dto/sample/id/SampleIdentifier", "as/dto/experiment/id/ExperimentIdentifier" ], function(SampleUpdate, SampleIdentifier, ExperimentIdentifier) { @@ -670,11 +704,13 @@ after creation. }); }); </script> +``` #### Properties example **V3UpdateWithPropertiesExample.java** +```java import java.util.Arrays; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.id.SampleIdentifier; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.update.SampleUpdate; @@ -701,9 +737,11 @@ after creation. System.out.println("Updated"); } } +``` **V3UpdateWithPropertiesExample.html** +```html <script> require([ "as/dto/sample/update/SampleUpdate", "as/dto/sample/id/SampleIdentifier" ], function(SampleUpdate, SampleIdentifier) { @@ -725,11 +763,13 @@ after creation. }); }); </script> +``` #### Parents example **V3UpdateWithParentsExample.java** +```java import java.util.Arrays; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.id.SampleIdentifier; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.update.SampleUpdate; @@ -763,9 +803,11 @@ after creation. System.out.println("Updated"); } } +``` **V3UpdateWithParentsExample.html** +```html <script> require([ "as/dto/sample/update/SampleUpdate", "as/dto/sample/id/SampleIdentifier" ], function(SampleUpdate, SampleIdentifier) { @@ -795,7 +837,7 @@ after creation. }); }); </script> - +``` ### Getting authorization rights for entities If the user isn't allowed to create or update an entity an exception is @@ -940,6 +982,7 @@ identifier). A code example on how to use sorting is presented below. **V3SearchExample.java** +```java import ch.ethz.sis.openbis.generic.asapi.v3.dto.common.search.SearchResult; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.Sample; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.fetchoptions.SampleFetchOptions; @@ -969,9 +1012,11 @@ identifier). A code example on how to use sorting is presented below. } } } +``` **V3SearchExample.html** +```html <script> require([ "as/dto/sample/search/SampleSearchCriteria", "as/dto/sample/fetchoptions/SampleFetchOptions" ], function(SampleSearchCriteria, SampleFetchOptions) { @@ -995,11 +1040,13 @@ identifier). A code example on how to use sorting is presented below. }); }); </script> +``` #### Example with pagination and sorting **V3SearchWithPaginationAndSortingExample.java** +```java import ch.ethz.sis.openbis.generic.asapi.v3.dto.common.search.SearchResult; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.Sample; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.fetchoptions.SampleFetchOptions; @@ -1031,9 +1078,11 @@ identifier). A code example on how to use sorting is presented below. System.out.println(result.getTotalCount()); } } +``` **V3SearchWithPaginationAndSortingExample.html** +```html <script> require([ "as/dto/sample/search/SampleSearchCriteria", "as/dto/sample/fetchoptions/SampleFetchOptions" ], function(SampleSearchCriteria, SampleFetchOptions) { @@ -1060,6 +1109,7 @@ identifier). A code example on how to use sorting is presented below. }); }); </script> +``` #### Example with OR operator @@ -1069,6 +1119,7 @@ the following example: **V3SearchWithOrOperatorExample.java** +```java import ch.ethz.sis.openbis.generic.asapi.v3.dto.common.search.SearchResult; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.Sample; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.fetchoptions.SampleFetchOptions; @@ -1098,9 +1149,11 @@ the following example: } } } +``` **V3SearchWithOrOperatorExample.html** +```html <script> require([ "as/dto/sample/search/SampleSearchCriteria", "as/dto/sample/fetchoptions/SampleFetchOptions" ], function(SampleSearchCriteria, SampleFetchOptions) { @@ -1124,6 +1177,7 @@ the following example: }); }); </script> +``` #### Example with nested logical operators @@ -1133,6 +1187,7 @@ experiment OR of type whose code starts with "MASTER"). **V3SearchWithNestedLogicalOperatorsExample.java** +```java import ch.ethz.sis.openbis.generic.asapi.v3.dto.common.search.SearchResult; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.Sample; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.fetchoptions.SampleFetchOptions; @@ -1164,9 +1219,11 @@ experiment OR of type whose code starts with "MASTER"). } } } +``` **V3SearchWithNestedLogicalOperatorsExample.html** +```html <script> require([ "as/dto/sample/search/SampleSearchCriteria", "as/dto/sample/fetchoptions/SampleFetchOptions" ], function(SampleSearchCriteria, SampleFetchOptions) { @@ -1193,6 +1250,7 @@ experiment OR of type whose code starts with "MASTER"). }); }); </script> +``` #### Example with recursive fetch options @@ -1203,6 +1261,7 @@ example: **V3SearchWithRecursiveFetchOptionsExample.java** +```java import ch.ethz.sis.openbis.generic.asapi.v3.dto.common.search.SearchResult; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.Sample; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.fetchoptions.SampleFetchOptions; @@ -1247,9 +1306,11 @@ example: return sample.getCode() + " -> (" + builder.toString() + ")"; } } +``` **V3SearchWithRecursiveFetchOptionsExample.html** +```html <script> require([ "as/dto/sample/search/SampleSearchCriteria", "as/dto/sample/fetchoptions/SampleFetchOptions" ], function(SampleSearchCriteria, SampleFetchOptions) { @@ -1285,6 +1346,7 @@ example: } }); </script> +``` #### Global search @@ -1304,6 +1366,7 @@ of meta data (entity attribute or property). Example: **V3GlobalSearchExample.java** +```java import ch.ethz.sis.openbis.generic.asapi.v3.dto.common.search.SearchResult; import ch.ethz.sis.openbis.generic.asapi.v3.dto.global.GlobalSearchObject; import ch.ethz.sis.openbis.generic.asapi.v3.dto.global.fetchoptions.GlobalSearchObjectFetchOptions; @@ -1335,11 +1398,12 @@ of meta data (entity attribute or property). Example: } } } - +``` **V3GlobalSearchExample.html** +```html <script> require([ "as/dto/global/search/GlobalSearchCriteria", "as/dto/global/search/GlobalSearchObjectKind", "as/dto/global/fetchoptions/GlobalSearchObjectFetchOptions" ], function(GlobalSearchCriteria, GlobalSearchObjectKind, GlobalSearchObjectFetchOptions) { @@ -1364,7 +1428,7 @@ of meta data (entity attribute or property). Example: }); }); </script> - +``` ### Getting entities @@ -1383,6 +1447,7 @@ returned map. **V3GetExample.java** +```java import java.util.Arrays; import java.util.Map; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.Sample; @@ -1415,9 +1480,11 @@ returned map. map.get(id5); // returns null } } +``` **V3GetExample.html** +```html <script> require([ "as/dto/sample/id/SampleIdentifier", "as/dto/sample/id/SamplePermId", "as/dto/sample/fetchoptions/SampleFetchOptions" ], function(SampleIdentifier, SamplePermId, SampleFetchOptions) { @@ -1443,6 +1510,7 @@ returned map. }); }); </script> +``` ### Deleting entities @@ -1462,6 +1530,7 @@ can. **V3DeleteExample.java** +```java import java.util.Arrays; import ch.ethz.sis.openbis.generic.asapi.v3.dto.deletion.id.IDeletionId; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.delete.SampleDeletionOptions; @@ -1490,9 +1559,11 @@ can. v3.revertDeletions(sessionToken, Arrays.asList(deletionId)); } } +``` **V3DeleteExample.html** +```html <script> require([ "as/dto/sample/id/SampleIdentifier", "as/dto/sample/delete/SampleDeletionOptions" ], function(SampleIdentifier, SampleDeletionOptions) { @@ -1516,6 +1587,7 @@ can. }); }); </script> +``` ### Searching entity types @@ -1527,6 +1599,7 @@ sample types and assigned property types: **V3SearchTypesExample.java** +```java import ch.ethz.sis.openbis.generic.asapi.v3.dto.common.search.SearchResult; import ch.ethz.sis.openbis.generic.asapi.v3.dto.property.PropertyAssignment; import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.SampleType; @@ -1555,9 +1628,11 @@ sample types and assigned property types: } } } +``` **V3SearchTypesExample.html** +```html <script> require([ "as/dto/sample/search/SampleTypeSearchCriteria", "as/dto/sample/fetchoptions/SampleTypeFetchOptions" ], function(SampleTypeSearchCriteria, SampleTypeFetchOptions) { @@ -1581,6 +1656,7 @@ sample types and assigned property types: }); }); </script> +``` ### Modifications @@ -1593,6 +1669,7 @@ project and sample update: **V3SearchObjectKindModificationsExample.java** +```java import ch.ethz.sis.openbis.generic.asapi.v3.dto.common.search.SearchResult; import ch.ethz.sis.openbis.generic.asapi.v3.dto.objectkindmodification.ObjectKind; import ch.ethz.sis.openbis.generic.asapi.v3.dto.objectkindmodification.ObjectKindModification; @@ -1621,9 +1698,11 @@ project and sample update: } } } +``` **V3SearchObjectKindModificationsExample.html** +```html <script> require([ "as/dto/objectkindmodification/search/ObjectKindModificationSearchCriteria", "as/dto/objectkindmodification/ObjectKind", "as/dto/objectkindmodification/OperationKind", @@ -1647,6 +1726,7 @@ project and sample update: }); }); </script> +``` ### Custom AS Services @@ -1666,6 +1746,7 @@ available custom AS services. **V3SearchCustomASServicesExample.java** +```java import ch.ethz.sis.openbis.generic.asapi.v3.dto.common.search.SearchResult; import ch.ethz.sis.openbis.generic.asapi.v3.dto.service.CustomASService; import ch.ethz.sis.openbis.generic.asapi.v3.dto.service.fetchoptions.CustomASServiceFetchOptions; @@ -1686,9 +1767,11 @@ available custom AS services. } } } +``` **V3SearchCustomASServicesExample.html** +```html <script> require([ "as/dto/service/search/CustomASServiceSearchCriteria", "as/dto/service/fetchoptions/CustomASServiceFetchOptions" ], function(CustomASServiceSearchCriteria, CustomASServiceFetchOptions) { @@ -1703,6 +1786,7 @@ available custom AS services. }); }); </script> +``` #### Execute a custom service @@ -1721,6 +1805,7 @@ further processing. **V3ExecuteCustomASServiceExample.java** +```java import ch.ethz.sis.openbis.generic.asapi.v3.dto.service.CustomASServiceExecutionOptions; import ch.ethz.sis.openbis.generic.asapi.v3.dto.service.id.CustomASServiceCode; @@ -1737,9 +1822,11 @@ further processing. System.out.println("Result: " + result); } } +``` **V3ExecuteCustomASServiceExample.html** +```html <script> require([ "as/dto/service/id/CustomASServiceCode", "as/dto/service/CustomASServiceExecutionOptions" ], function(CustomASServiceCode, CustomASServiceExecutionOptions) { @@ -1751,6 +1838,7 @@ further processing. }); }); </script> +``` ### Archiving / unarchiving data sets @@ -1767,6 +1855,7 @@ in the archive/store yet. **V3ArchiveDataSetsExample.java** +```java import java.util.Arrays; import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi; import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.archive.DataSetArchiveOptions; @@ -1796,9 +1885,11 @@ in the archive/store yet. System.out.println("Archiving scheduled"); } } +``` **V3ArchiveDataSetsExample.html** +```html <script> require([ "openbis", "as/dto/dataset/id/DataSetPermId", "as/dto/dataset/archive/DataSetArchiveOptions" ], function(openbis, DataSetPermId, DataSetArchiveOptions) { @@ -1822,6 +1913,7 @@ in the archive/store yet. }); }); </script> +``` #### Unarchiving data sets @@ -1829,6 +1921,7 @@ in the archive/store yet. **V3UnarchiveDataSetsExample.java** +```java import java.util.Arrays; import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi; import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.id.DataSetPermId; @@ -1853,9 +1946,11 @@ in the archive/store yet. System.out.println("Unarchiving scheduled"); } } +``` **V3UnarchiveDataSetsExample.html** +```html <script> require([ "openbis", "as/dto/dataset/id/DataSetPermId", "as/dto/dataset/unarchive/DataSetUnarchiveOptions" ], function(openbis, DataSetPermId, DataSetUnarchiveOptions) { @@ -1874,6 +1969,7 @@ in the archive/store yet. }); }); </script> +``` ### Executing Operations @@ -1999,6 +2095,7 @@ states: **V3ExecuteOperationsAsynchronous.java** +```java import java.util.Arrays; import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi; import ch.ethz.sis.openbis.generic.asapi.v3.dto.entitytype.id.EntityTypePermId; @@ -2029,9 +2126,11 @@ states: System.out.println("Execution id: " + results.getExecutionId()); } } +``` **V3ExecuteOperationsAsynchronous.html** +```html <script> require([ "openbis", "as/dto/sample/create/SampleCreation", "as/dto/entitytype/id/EntityTypePermId", "as/dto/space/id/SpacePermId", "as/dto/experiment/id/ExperimentIdentifier", "as/dto/sample/create/CreateSamplesOperation", "as/dto/operation/AsynchronousOperationExecutionOptions" ], function(openbis, SampleCreation, EntityTypePermId, SpacePermId, ExperimentIdentifier, CreateSamplesOperation, AsynchronousOperationExecutionOptions) { @@ -2052,6 +2151,7 @@ states: }); }); </script> +``` ##### **Synchronous operation execution** @@ -2083,6 +2183,7 @@ states: **V3ExecuteOperationsSynchronous.java** +```java import java.util.Arrays; import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi; import ch.ethz.sis.openbis.generic.asapi.v3.dto.entitytype.id.EntityTypePermId; @@ -2116,9 +2217,11 @@ states: System.out.println("Sample id: " + result.getObjectIds()); } } +``` **V3ExecuteOperationsSynchronous.html** +```html <script> require([ "openbis", "as/dto/sample/create/SampleCreation", "as/dto/entitytype/id/EntityTypePermId", "as/dto/space/id/SpacePermId", "as/dto/experiment/id/ExperimentIdentifier", "as/dto/sample/create/CreateSamplesOperation", "as/dto/operation/SynchronousOperationExecutionOptions" ], function(openbis, SampleCreation, EntityTypePermId, SpacePermId, ExperimentIdentifier, CreateSamplesOperation, SynchronousOperationExecutionOptions) { @@ -2140,6 +2243,7 @@ states: }); }); </script> +``` ##### **Notifications** @@ -2164,6 +2268,7 @@ For failed executions an email contains: **V3ExecuteOperationsEmailNotification.java** +```java import java.util.Arrays; import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi; import ch.ethz.sis.openbis.generic.asapi.v3.dto.entitytype.id.EntityTypePermId; @@ -2198,9 +2303,11 @@ For failed executions an email contains: System.out.println("Execution id: " + results.getExecutionId()); } } +``` **V3ExecuteOperationsEmailNotification.html** +```html <script> require([ "openbis", "as/dto/sample/create/SampleCreation", "as/dto/entitytype/id/EntityTypePermId", "as/dto/space/id/SpacePermId", "as/dto/experiment/id/ExperimentIdentifier", "as/dto/sample/create/CreateSamplesOperation", "as/dto/operation/AsynchronousOperationExecutionOptions", "as/dto/operation/OperationExecutionEmailNotification" ], function(openbis, SampleCreation, EntityTypePermId, SpacePermId, ExperimentIdentifier, CreateSamplesOperation, AsynchronousOperationExecutionOptions, OperationExecutionEmailNotification) { @@ -2224,6 +2331,7 @@ For failed executions an email contains: }); }); </script> +``` #### Method getOperationExecutions / searchOperationExecutions @@ -2281,6 +2389,7 @@ related information are done with two separate V3 maintenance tasks **V3GetOperationExecutionsAsynchronous.java** +```java import java.util.Arrays; import java.util.Map; import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi; @@ -2368,9 +2477,11 @@ related information are done with two separate V3 maintenance tasks System.out.println("Details.error: " + execution.getSummary().getError()); } } +``` **V3GetOperationExecutionsAsynchronous.html** +```html <script> require([ "openbis", "as/dto/sample/create/SampleCreation", "as/dto/entitytype/id/EntityTypePermId", "as/dto/space/id/SpacePermId", "as/dto/experiment/id/ExperimentIdentifier", "as/dto/sample/create/CreateSamplesOperation", "as/dto/operation/AsynchronousOperationExecutionOptions", "as/dto/operation/fetchoptions/OperationExecutionFetchOptions", "as/dto/operation/id/OperationExecutionPermId" ], function(openbis, SampleCreation, EntityTypePermId, SpacePermId, ExperimentIdentifier, CreateSamplesOperation, AsynchronousOperationExecutionOptions, OperationExecutionFetchOptions, OperationExecutionPermId) { @@ -2445,13 +2556,11 @@ related information are done with two separate V3 maintenance tasks }); }); </script> - - - - +``` **V3GetOperationExecutionsSynchronous.java** +```java import java.util.Arrays; import java.util.Map; import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi; @@ -2536,9 +2645,11 @@ related information are done with two separate V3 maintenance tasks System.out.println("Details.error: " + execution.getSummary().getError()); } } +``` **V3GetOperationExecutionsSynchronous.html** +```html <script> require([ "openbis", "as/dto/sample/create/SampleCreation", "as/dto/entitytype/id/EntityTypePermId", "as/dto/space/id/SpacePermId", "as/dto/experiment/id/ExperimentIdentifier", "as/dto/sample/create/CreateSamplesOperation", "as/dto/operation/SynchronousOperationExecutionOptions", "as/dto/operation/fetchoptions/OperationExecutionFetchOptions", "as/dto/operation/id/OperationExecutionPermId" ], function(openbis, SampleCreation, EntityTypePermId, SpacePermId, ExperimentIdentifier, CreateSamplesOperation, SynchronousOperationExecutionOptions, OperationExecutionFetchOptions, OperationExecutionPermId) { @@ -2611,6 +2722,7 @@ related information are done with two separate V3 maintenance tasks }); }); </script> +``` #### Method updateOperationExecutions / deleteOperationExecutions @@ -2621,6 +2733,7 @@ availability time expires. **V3UpdateOperationExecutions.java** +```java import java.util.Arrays; import java.util.Map; import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi; @@ -2684,9 +2797,11 @@ availability time expires. System.out.println("Details.availability: " + execution.getDetailsAvailability()); } } +``` **V3UpdateOperationExecutions.html** +```html <script> require([ "openbis", "as/dto/sample/create/SampleCreation", "as/dto/entitytype/id/EntityTypePermId", "as/dto/space/id/SpacePermId", "as/dto/experiment/id/ExperimentIdentifier", "as/dto/sample/create/CreateSamplesOperation", "as/dto/operation/AsynchronousOperationExecutionOptions", "as/dto/operation/update/OperationExecutionUpdate", "as/dto/operation/fetchoptions/OperationExecutionFetchOptions" ], function(openbis, SampleCreation, EntityTypePermId, SpacePermId, ExperimentIdentifier, CreateSamplesOperation, AsynchronousOperationExecutionOptions, OperationExecutionUpdate, OperationExecutionFetchOptions) { @@ -2736,12 +2851,11 @@ availability time expires. }); }); </script> - - - +``` **V3DeleteOperationExecutions.java** +```java import java.util.Arrays; import java.util.Map; import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi; @@ -2798,9 +2912,11 @@ availability time expires. System.out.println("Availability: " + (execution != null ? execution.getAvailability() : null)); } } +``` **V3DeleteOperationExecutions.html** +```html <script> require([ "openbis", "as/dto/sample/create/SampleCreation", "as/dto/entitytype/id/EntityTypePermId", "as/dto/space/id/SpacePermId", "as/dto/experiment/id/ExperimentIdentifier", "as/dto/sample/create/CreateSamplesOperation", "as/dto/operation/AsynchronousOperationExecutionOptions", "as/dto/operation/delete/OperationExecutionDeletionOptions", "as/dto/operation/fetchoptions/OperationExecutionFetchOptions" ], function(openbis, SampleCreation, EntityTypePermId, SpacePermId, ExperimentIdentifier, CreateSamplesOperation, AsynchronousOperationExecutionOptions, OperationExecutionDeletionOptions, OperationExecutionFetchOptions) { @@ -2843,6 +2959,7 @@ availability time expires. }); }); </script> +``` #### Configuration @@ -2911,6 +3028,7 @@ user or by an instance admin. **WebAppSettingsExample.java** +```java import java.util.Arrays; import java.util.Map; import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi; @@ -2978,9 +3096,11 @@ user or by an instance admin. System.out.println(settings2); } } +``` **WebAppSettingsExample.html** +```html <script> require([ "jquery", "openbis", "as/dto/person/update/PersonUpdate", "as/dto/person/id/Me", "as/dto/webapp/create/WebAppSettingCreation", "as/dto/person/fetchoptions/PersonFetchOptions" ], function($, openbis, PersonUpdate, Me, WebAppSettingCreation, PersonFetchOptions) { @@ -3039,6 +3159,7 @@ user or by an instance admin. }); }); </script> +``` ### Imports @@ -3137,15 +3258,12 @@ message like "Registration of 1 sample(s) is complete." while the asynchronous version could return a message like "When the import is complete the confirmation or failure report will be sent by email.". - - An example webapp to upload a file with samples and a custom AS service to import that file is presented below. - - **ImportSamplesWebAppExample.html** +```html <!DOCTYPE html> <html> <head> @@ -3214,14 +3332,17 @@ to import that file is presented below. </body> </html> +``` **ImportSamplesServiceExample.py** +```python def process(context, parameters): sampleType = parameters.get("sampleType") return context.getImportService().createSamples(context.getSessionToken(), "importWebappUploadKey", sampleType, None, None, None, False, False, None); ### Generate identifiers +``` V3 API provides 2 methods for generating unique identifiers: @@ -3233,10 +3354,9 @@ V3 API provides 2 methods for generating unique identifiers: (e.g. "MY-PREFIX-147"); this method uses a dedicated sequence for each entity kind. - - **GenerateIdentifiersExample.java** +```java import java.util.List; import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi; import ch.ethz.sis.openbis.generic.asapi.v3.dto.entitytype.EntityKind; @@ -3253,9 +3373,11 @@ V3 API provides 2 methods for generating unique identifiers: System.out.println(codes); // example output: [MY-PREFIX-782, MY-PREFIX-783, MY-PREFIX-784] } } +``` **GenerateIdentifiersExample.html** +```html <script> require([ "jquery", "openbis", "as/dto/entitytype/EntityKind" ], function($, openbis, EntityKind) { $(document).ready(function() { @@ -3271,6 +3393,7 @@ V3 API provides 2 methods for generating unique identifiers: }); }); </script> +``` ## V. DSS Methods @@ -3292,6 +3415,7 @@ result object as if it was returned by only one data store. **V3SearchDataSetFilesExample.java** +```java import ch.ethz.sis.openbis.generic.asapi.v3.dto.common.search.SearchResult; import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.search.DataSetSearchCriteria; import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.DataSetFile; @@ -3321,11 +3445,12 @@ result object as if it was returned by only one data store. } } } - +```  **V3SearchDataSetFilesAtAllDataStoresExample.html** +```html <script> require([ "openbis", "dss/dto/datasetfile/search/DataSetFileSearchCriteria", "dss/dto/datasetfile/fetchoptions/DataSetFileFetchOptions" ], function(DataSetFileSearchCriteria, DataSetFileFetchOptions) { @@ -3350,9 +3475,11 @@ result object as if it was returned by only one data store. }); }); </script> +``` **V3SearchDataSetFilesAtChosenDataStoresExample.html** +```html <script> require([ "openbis", "dss/dto/datasetfile/search/DataSetFileSearchCriteria", "dss/dto/datasetfile/fetchoptions/DataSetFileFetchOptions" ], function(DataSetFileSearchCriteria, DataSetFileFetchOptions) { @@ -3378,6 +3505,7 @@ result object as if it was returned by only one data store. }); }); </script> +``` ### Downloading files, folders, and datasets @@ -3428,6 +3556,7 @@ the directory. **Download a single file** +```java import java.io.InputStream; import java.util.Arrays; import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi; @@ -3492,6 +3621,7 @@ the directory. } } } +``` #### Download a folder located inside a dataset @@ -3507,6 +3637,7 @@ the directory object. **Download a folder** +```java import java.io.InputStream; import java.util.Arrays; import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi; @@ -3568,6 +3699,7 @@ the directory object. } } } +``` #### Search for a dataset and download all its contents, file by file @@ -3589,6 +3721,7 @@ this example. **Search & download a whole dataset, file by file** +```java import java.io.InputStream; import java.util.LinkedList; import java.util.List; @@ -3669,6 +3802,7 @@ this example. } } } +``` #### Download a whole dataset recursively @@ -3679,6 +3813,7 @@ DataSetFileDownloadOptions object. **Download a whole dataset recursively** +```java import java.io.InputStream; import java.util.Arrays; import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi; @@ -3733,6 +3868,7 @@ DataSetFileDownloadOptions object. } } } +``` #### Search and list all the files inside a data store @@ -3747,6 +3883,7 @@ the whole data store. **Search and list all files inside a data store** +```java import java.io.InputStream; import java.util.LinkedList; import java.util.List; @@ -3820,7 +3957,8 @@ the whole data store. System.out.println(file.getInputStream()); } } - } + } +``` ### Fast Downloading @@ -3843,13 +3981,14 @@ library. Downloading is done in two steps: new FastDownloader(downloadSession).downloadTo(destinationFolder); - The files are stored in the destination folder in <data set - code>/<relative file path as in the data store on openBIS>. + The files are stored in the destination folder in <data set + code>/<relative file path as in the data store on openBIS>. Here is a complete example: **Search and list all files inside a data store** +```java import java.io.File; import java.nio.file.Path; import java.util.ArrayList; @@ -3946,6 +4085,7 @@ Here is a complete example: v3.logout(sessionToken); } } +``` #### What happens under the hood? @@ -3999,6 +4139,7 @@ following examples as a template. **Register Data Set** +```java import java.util.UUID; import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.client.api.Request; @@ -4047,12 +4188,14 @@ following examples as a template. openbisV3.logout(); } } +``` ** Example (Javascript)** **Register Data Set** +```html <!DOCTYPE html> <html> <head> @@ -4106,6 +4249,7 @@ Example (Javascript)** </script> </body> </html> +``` ## VI. Web application context @@ -4127,6 +4271,7 @@ web application will do nothing. **WebAppContextExample.html** +```html <script> require(['openbis'], function(openbis) { var openbisV3 = new openbis(); @@ -4144,4 +4289,5 @@ web application will do nothing. console.log(sessionInfo.getUserName()); }); }); - </script> \ No newline at end of file + </script> +``` diff --git a/docs/software-developer-documentation/apis/personal-access-tokens.md b/docs/software-developer-documentation/apis/personal-access-tokens.md index f4668de5741f46f0dc30e026dd0ff216d0a662f5..9ee7177ff08406ef981d4748702281590de04473 100644 --- a/docs/software-developer-documentation/apis/personal-access-tokens.md +++ b/docs/software-developer-documentation/apis/personal-access-tokens.md @@ -115,10 +115,10 @@ Instead, each PAT should have a well defined validity period after which it should be replaced with a new PAT with a different hash. To make this transition as smooth as possible please use the following guide: -- create PAT\_1 with sessionName = <MY\_SESSION> and use it in +- create PAT\_1 with sessionName = <MY\_SESSION> and use it in your integration - when PAT\_1 is soon to be expired, create PAT\_2 with the same - sessionName = <MY\_SESSION> (both PAT\_1 and PAT\_2 will work + sessionName = <MY\_SESSION> (both PAT\_1 and PAT\_2 will work at this point and will refer to the same openBIS session) - replace PAT\_1 with PAT\_2 in your integration @@ -163,6 +163,7 @@ as the recommend way to manage getting the most up to date personal access token for an application and user. Including creation and renewal management. +```java private static final String URL = "https://openbis-sis-ci-sprint.ethz.ch/openbis/openbis" + IApplicationServerApi.SERVICE_URL; private static final int TIMEOUT = 10000; @@ -273,9 +274,10 @@ management. } } +``` ## V3 API Code examples for personal access tokens can be found in the main V3 API documentation: [openBIS V3 -API\#PersonalAccessTokens](/pages/viewpage.action?pageId=80699415) \ No newline at end of file +API\#PersonalAccessTokens](/pages/viewpage.action?pageId=80699415) diff --git a/docs/software-developer-documentation/apis/python-v3-api.md b/docs/software-developer-documentation/apis/python-v3-api.md index ec6ce3d0203215ec23323c2b1f34666d7a7c0af9..b402188ad11e20fc3108b94690cf1f23a98d3471 100644 --- a/docs/software-developer-documentation/apis/python-v3-api.md +++ b/docs/software-developer-documentation/apis/python-v3-api.md @@ -11,7 +11,7 @@ pyBIS is a Python module for interacting with openBIS. pyBIS is designed to be m ## Installation -``` +```python pip install --upgrade pybis ``` @@ -19,7 +19,7 @@ That command will download install pyBIS and all its dependencies. If pyBIS is a If you haven't done yet, install Jupyter and/or Jupyter Lab (the next Generation of Jupyter): -``` +```python pip install jupyter pip install jupyterlab ``` @@ -190,7 +190,7 @@ https://osxfuse.github.io **Unix Cent OS 7** -``` +```bash $ sudo yum install epel-release $ sudo yum --enablerepo=epel -y install fuse-sshfs $ user="$(whoami)" @@ -203,7 +203,7 @@ After the installation, an `sshfs` command should be available. Because the mount/unmount procedure differs from platform to platform, pyBIS offers two simple methods: -``` +```python o.mount() o.mount(username, password, hostname, mountpoint, volname) o.is_mounted() @@ -232,7 +232,7 @@ OpenBIS stores quite a lot of meta-data along with your dataSets. The collection ### browse masterdata -``` +```python sample_types = o.get_sample_types() # get a list of sample types sample_types.df # DataFrame object st = o.get_sample_types()[3] # get 4th element of that list @@ -269,7 +269,7 @@ o.get_tags() The first step in creating a new entity type is to create a so called **property type**: -``` +```python pt_text = o.new_property_type( code = 'MY_NEW_PROPERTY_TYPE', label = 'yet another property type', @@ -340,7 +340,7 @@ The second step (after creating a property type, see above) is to create the **s - `new_sample_type()` == `new_object_type()` -``` +```python sample_type = o.new_sample_type( code = 'my_own_sample_type', # mandatory generatedCodePrefix = 'S', # mandatory @@ -358,7 +358,7 @@ sample_type.save() When `autoGeneratedCode` attribute is set to `True`, then you don't need to provide a value for `code` when you create a new sample. You can get the next autoGeneratedCode like this: -``` +```python sample_type.get_next_sequence() # eg. 67 sample_type.get_next_code() # e.g. FLY77 ``` @@ -369,7 +369,7 @@ From pyBIS 1.31.0 onwards, you can provide a `code` even for samples where its s The third step, after saving the sample type, is to **assign or revoke properties** to the newly created sample type. This assignment procedure applies to all entity types (dataset type, experiment type). -``` +```python sample_type.assign_property( prop = 'diff_time', # mandatory section = '', @@ -387,7 +387,7 @@ sample_type.get_property_assignments() The second step (after creating a **property type**, see above) is to create the **dataset type**. The third step is to **assign or revoke the properties** to the newly created dataset type. -``` +```python dataset_type = o.new_dataset_type( code = 'my_dataset_type', # mandatory description = None, @@ -410,7 +410,7 @@ The new name for **experiment** is **collection**. You can use both methods inte - `new_experiment_type()` == `new_collection_type()` -``` +```python experiment_type = o.new_experiment_type( code, description = None, @@ -426,7 +426,7 @@ experiment_type.get_property_assignments() Materials and material types are deprecated in newer versions of openBIS. -``` +```python material_type = o.new_material_type( code, description=None, @@ -443,7 +443,7 @@ material_type.get_property_assignments() Plugins are Jython scripts that can accomplish more complex data-checks than ordinary types and vocabularies can achieve. They are assigned to entity types (dataset type, sample type etc). [Documentation and examples can be found here](https://wiki-bsse.ethz.ch/display/openBISDoc/Properties+Handled+By+Scripts) -``` +```python pl = o.new_plugin( name ='my_new_entry_validation_plugin', pluginType ='ENTITY_VALIDATION', # or 'DYNAMIC_PROPERTY' or 'MANAGED_PROPERTY', @@ -463,7 +463,7 @@ Users can only login into the openBIS system when: - the user is already added to the openBIS user list (see below) - the user is assigned a role which allows a login, either directly assigned or indirectly assigned via a group membership -``` +```python o.get_groups() group = o.new_group(code='group_name', description='...') group = o.get_group('group_name') @@ -506,7 +506,7 @@ Spaces are fundamental way in openBIS to divide access between groups. Within a - sample / object - dataset -``` +```python space = o.new_space(code='space_name', description='') space.save() o.get_spaces( @@ -543,7 +543,7 @@ Projects live within spaces and usually contain experiments (aka collections): - sample / object - dataset -``` +```python project = o.new_project( space = space, code = 'project_name', @@ -607,7 +607,7 @@ The new name for **experiment** is **collection**. You can use boths names inter #### create a new experiment -``` +```python exp = o.new_experiment code='MY_NEW_EXPERIMENT', type='DEFAULT_EXPERIMENT', @@ -619,7 +619,7 @@ exp.save() #### search for experiments -``` +```python experiments = o.get_experiments( project = 'YEASTS', space = 'MY_SPACE', @@ -661,7 +661,7 @@ experiments = o.get_experiments( #### Experiment attributes -``` +```python exp.attrs.all() # returns all attributes as a dict exp.attrs.tags = ['some', 'tags'] @@ -686,7 +686,7 @@ exp.save() # needed to save/update the changed attribute **Getting properties** -``` +```python experiment.props == ds.p # you can use either .props or .p to access the properties experiment.p # in Jupyter: show all properties in a nice table experiment.p() # get all properties as a dict @@ -698,7 +698,7 @@ experiment.p['property'] # get the value of a property **Setting properties** -``` +```python experiment.experiment = 'first_exp' # assign sample to an experiment experiment.project = 'my_project' # assign sample to a project @@ -731,7 +731,7 @@ The new name for **sample** is **object**. You can use boths names interchangeab etc. -``` +```python sample = o.new_sample( type = 'YEAST', space = 'MY_SPACE', @@ -784,7 +784,7 @@ Creating a single sample takes some time. If you need to create many samples, yo **create many samples in one transaction** -``` +```python trans = o.new_transaction() for i in range (0, 100): sample = o.new_sample(...) @@ -795,7 +795,7 @@ trans.commit() **update many samples in one transaction** -``` +```python trans = o.new_transaction() for sample in o.get_samples(count=100): sample.prop.some_property = 'different value' @@ -806,7 +806,7 @@ trans.commit() **delete many samples in one transaction** -``` +```python trans = o.new_transaction() for sample in o.get_samples(count=100): sample.mark_to_be_deleted() @@ -820,7 +820,7 @@ trans.commit() #### parents, children, components and container -``` +```python sample.get_parents() sample.set_parents(['/MY_SPACE/PARENT_SAMPLE_NAME') sample.add_parents('/MY_SPACE/PARENT_SAMPLE_NAME') @@ -850,7 +850,7 @@ sample.del_components('/MY_SPACE/COMPONENT_NAME') #### sample tags -``` +```python sample.get_tags() sample.set_tags('tag1') sample.add_tags(['tag2','tag3']) @@ -861,7 +861,7 @@ sample.del_tags('tag1') **Getting properties** -``` +```python sample.attrs.all() # returns all attributes as a dict sample.attribute_name # return the attribute value @@ -876,7 +876,7 @@ sample.p['property'] # get the value of a property **Setting properties** -``` +```python sample.experiment = 'first_exp' # assign sample to an experiment sample.project = 'my_project' # assign sample to a project @@ -896,7 +896,7 @@ sample.save() # needed to save/update the attributes and The result of a search is always list, even when no items are found. The `.df` attribute returns the Pandas dataFrame of the results. -``` +```python samples = o.get_samples( space ='MY_SPACE', type ='YEAST', @@ -960,7 +960,7 @@ experiments = o.get_samples( #### freezing samples -``` +```python sample.freeze = True sample.freezeForComponents = True sample.freezeForChildren = True @@ -989,7 +989,7 @@ This example does the following - print the list of all files in this dataset - download the dataset -``` +```python datasets = sample.get_datasets(type='SCANS', start_with=0, count=10) for dataset in datasets: print(dataset.props()) @@ -1020,7 +1020,7 @@ experiments = o.get_datasets( **More dataset functions:** -``` +```python ds = o.get_dataset('20160719143426517-259') ds.get_parents() ds.get_children() @@ -1048,7 +1048,7 @@ ds.download_attachments(<path or cwd>) # Deprecated, as attachments are not com #### download dataSets -``` +```python o.download_prefix # used for download() and symlink() method. # Is set to data/hostname by default, but can be changed. ds.get_files(start_folder="/") # get file list as Pandas dataFrame @@ -1070,7 +1070,7 @@ ds.is_physical() # TRUE if dataset is physically Instead of downloading a dataSet, you can create a symbolic link to a dataSet in the openBIS dataStore. To do that, the openBIS dataStore needs to be mounted first (see mount method above). **Note:** Symbolic links and the mount() feature currently do not work with Windows. -``` +```python o.download_prefix # used for download() and symlink() method. # Is set to data/hostname by default, but can be changed. ds.symlink() # creates a symlink for this dataset: data/hostname/permId @@ -1087,7 +1087,7 @@ ds.is_symlink() **Getting properties** -``` +```python ds.attrs.all() # returns all attributes as a dict ds.attribute_name # return the attribute value @@ -1102,7 +1102,7 @@ ds.p['property'] # get the value of a property **Setting properties** -``` +```python ds.experiment = 'first_exp' # assign dataset to an experiment ds.sample = 'my_sample' # assign dataset to a sample @@ -1120,7 +1120,7 @@ ds.set_props({ key: value }) # set the values of some properties - The result of a search is always list, even when no items are found - The `.df` attribute returns the Pandas dataFrame of the results -``` +```python datasets = o.get_datasets( type ='MY_DATASET_TYPE', **{ "SOME.WEIRD:PROP": "value"}, # property name contains a dot or a @@ -1155,7 +1155,7 @@ df = datasets.df # returns a Pandas dataFrame object of the In some cases, you might want to retrieve precisely certain datasets. This can be achieved by methods chaining (but be aware, it might not be very performant): -``` +```python datasets = o.get_experiments(project='YEASTS')\ .get_samples(type='FLY')\ .get_datasets( @@ -1167,7 +1167,7 @@ datasets = o.get_experiments(project='YEASTS')\ - another example: -``` +```python datasets = o.get_experiment('/MY_NEW_SPACE/MY_PROJECT/MY_EXPERIMENT4')\ .get_samples(type='UNKNOWN')\ .get_parents()\ @@ -1179,7 +1179,7 @@ datasets = o.get_experiment('/MY_NEW_SPACE/MY_PROJECT/MY_EXPERIMENT4')\ - once a dataSet has been frozen, it cannot be changed by anyone anymore - so be careful! -``` +```python ds.freeze = True ds.freezeForChildren = True ds.freezeForParents = True @@ -1190,7 +1190,7 @@ ds.save() #### create a new dataSet -``` +```python ds_new = o.new_dataset( type = 'ANALYZED_DATA', experiment = '/SPACE/PROJECT/EXP1', @@ -1234,7 +1234,7 @@ ds_new.save() | `../../myData/` | `myData/` | | `some/experiment/results/` | `results/` | -``` +```python ds_new = o.new_dataset( type = 'RAW_DATA', sample = '/SPACE/SAMP1', @@ -1247,7 +1247,7 @@ ds_new.save() A DataSet of kind=CONTAINER contains other DataSets, but no files: -``` +```python ds_new = o.new_dataset( type = 'ANALYZED_DATA', experiment = '/SPACE/PROJECT/EXP1', @@ -1260,7 +1260,7 @@ ds_new.save() #### get, set, add and remove parent datasets -``` +```python dataset.get_parents() dataset.set_parents(['20170115220259155-412']) dataset.add_parents(['20170115220259155-412']) @@ -1269,7 +1269,7 @@ dataset.del_parents(['20170115220259155-412']) #### get, set, add and remove child datasets -``` +```python dataset.get_children() dataset.set_children(['20170115220259155-412']) dataset.add_children(['20170115220259155-412']) @@ -1282,7 +1282,7 @@ dataset.del_children(['20170115220259155-412']) - As opposed to Samples, DataSets may belong (contained) to more than one DataSet-container - caveat: containers are NOT compatible with ELN-LIMS -``` +```python dataset.get_containers() dataset.set_containers(['20170115220259155-412']) dataset.add_containers(['20170115220259155-412']) @@ -1294,7 +1294,7 @@ dataset.del_containers(['20170115220259155-412']) - you may also use the xxx_contained() functions, which are just aliases. - caveat: components are NOT compatible with ELN-LIMS -``` +```python dataset.get_components() dataset.set_components(['20170115220259155-412']) dataset.add_components(['20170115220259155-412']) @@ -1305,7 +1305,7 @@ dataset.del_components(['20170115220259155-412']) create semantic annotation for sample type 'UNKNOWN': -``` +```python sa = o.new_semantic_annotation( entityType = 'UNKNOWN', @@ -1321,14 +1321,14 @@ sa.save() Create semantic annotation for property type (predicate and descriptor values omitted for brevity) -``` +```python sa = o.new_semantic_annotation(propertyType = 'DESCRIPTION', ...) sa.save() ``` **Create** semantic annotation for sample property assignment (predicate and descriptor values omitted for brevity) -``` +```python sa = o.new_semantic_annotation( entityType = 'UNKNOWN', propertyType = 'DESCRIPTION', @@ -1339,26 +1339,26 @@ sa.save() **Create** a semantic annotation directly from a sample type. Will also create sample property assignment annotations when propertyType is given: -``` +```python st = o.get_sample_type("ORDER") st.new_semantic_annotation(...) ``` **Get all** semantic annotations -``` +```python o.get_semantic_annotations() ``` **Get** semantic annotation by perm id -``` +```python sa = o.get_semantic_annotation("20171015135637955-30") ``` **Update** semantic annotation -``` +```python sa.predicateOntologyId = 'new_po_id' sa.descriptorOntologyId = 'new_do_id' sa.save() @@ -1366,13 +1366,13 @@ sa.save() **Delete** semantic annotation -``` +```python sa.delete('reason') ``` ### Tags -``` +```python new_tag = o.new_tag( code = 'my_tag', description = 'some descriptive text' @@ -1410,7 +1410,7 @@ So for example, you want to add a property called **Animal** to a Sample and you **create new Vocabulary with three VocabularyTerms** -``` +```python voc = o.new_vocabulary( code = 'BBB', description = 'description of vocabulary aaa', @@ -1430,7 +1430,7 @@ voc.save() # update **create additional VocabularyTerms** -``` +```python term = o.new_term( code='TERM_CODE_XXX', vocabularyCode='BBB', @@ -1444,7 +1444,7 @@ term.save() To change the ordinal of a term, it has to be moved either to the top with the `.move_to_top()` method or after another term using the `.move_after_term('TERM_BEFORE')` method. -``` +```python voc = o.get_vocabulary('STORAGE') term = voc.get_terms()['RT'] term.label = "Room Temperature" @@ -1540,4 +1540,4 @@ pt.save() Currently, the value of the `custom_widget` key can be set to either - `Spreadsheet` (for tabular, Excel-like data) -- `Word Processor` (for rich text data) \ No newline at end of file +- `Word Processor` (for rich text data) diff --git a/docs/software-developer-documentation/client-side-extensions/eln-lims-web-ui-extensions.md b/docs/software-developer-documentation/client-side-extensions/eln-lims-web-ui-extensions.md index 5aad91e7cc31544fa48f19e80c7ebcc29cff4c05..73266064fefbeb8eff422aaf98c6dbafb8dec1b8 100644 --- a/docs/software-developer-documentation/client-side-extensions/eln-lims-web-ui-extensions.md +++ b/docs/software-developer-documentation/client-side-extensions/eln-lims-web-ui-extensions.md @@ -29,9 +29,11 @@ Each extension currently contains a single file with name "plugin.js". Contains a section called  PLUGINS\_CONFIGURATION indicating the plugins to be loaded from the plugins folder. +```js var PLUGINS_CONFIGURATION = { extraPlugins : ["life-sciences", "flow", "microscopy"] } +``` ### plugin.js file @@ -83,7 +85,7 @@ Pattern](https://en.wikipedia.org/wiki/Interceptor_pattern) - beforeViewPaint - afterViewPaint - + - Template methods are only needed to add custom components to from views. Best examples of how to use these can be found in @@ -105,6 +107,7 @@ Pattern](https://en.wikipedia.org/wiki/Interceptor_pattern) An example with only type configurations extensions is show below. +```js function MyTechnology() { this.init(); } @@ -153,12 +156,14 @@ An example with only type configurations extensions is show below. }); profile.plugins.push(new MyTechnology()); +``` ### Toolbar Extensions An example with only toolbar extensions is shown below, variables with a dollar sign '$' indicate they are jquery components: +```js function MyTechnology() { this.init(); } @@ -198,8 +203,9 @@ dollar sign '$' indicate they are jquery components: }); profile.plugins.push(new MyTechnology()); +``` ### Extra Views as Utilities Please check the provided example: -<https://sissource.ethz.ch/sispub/openbis/-/blob/master/ui-eln-lims/src/core-plugins/eln-lims/1/as/webapps/eln-lims/html/plugins/template-extra-utilities/plugin.js> \ No newline at end of file +<https://sissource.ethz.ch/sispub/openbis/-/blob/master/ui-eln-lims/src/core-plugins/eln-lims/1/as/webapps/eln-lims/html/plugins/template-extra-utilities/plugin.js> diff --git a/docs/software-developer-documentation/client-side-extensions/openbis-webapps.md b/docs/software-developer-documentation/client-side-extensions/openbis-webapps.md index 3889340c5384be77b8237872176ffdb62fd963af..52262359597457093fc655360e6eaa4e8b5b051d 100644 --- a/docs/software-developer-documentation/client-side-extensions/openbis-webapps.md +++ b/docs/software-developer-documentation/client-side-extensions/openbis-webapps.md @@ -246,7 +246,7 @@ Notes about subtab identifiers: webapp core-plugin folder, i.e. \[technology\]/\[version\]/as/webapps/\[WEBAPP\_CODE\]) -Cross communication openBIS > DSS +Cross communication openBIS > DSS ------------------------------------ ### Background diff --git a/docs/software-developer-documentation/development-environment/architectural-overview.md b/docs/software-developer-documentation/development-environment/architectural-overview.md index dc65bec12a58b485e18016f359e669698718410b..db3844f079929e10c5cc8128377161ef3b874673 100644 --- a/docs/software-developer-documentation/development-environment/architectural-overview.md +++ b/docs/software-developer-documentation/development-environment/architectural-overview.md @@ -1,4 +1,4 @@ -Architectural Overview -====================== - +Architectural Overview +====================== + hello world \ No newline at end of file diff --git a/docs/software-developer-documentation/development-environment/installation-and configuration-guide.md b/docs/software-developer-documentation/development-environment/installation-and configuration-guide.md index 6cfd30cd4b087f93740f2c4134a6e47bd72564d1..8cfc3063ec0ec2c1d1990cbe1e5235f0abc8d1f8 100644 --- a/docs/software-developer-documentation/development-environment/installation-and configuration-guide.md +++ b/docs/software-developer-documentation/development-environment/installation-and configuration-guide.md @@ -1,4 +1,4 @@ -Installation And Configuration Guide -==================================== - +Installation And Configuration Guide +==================================== + hello world \ No newline at end of file diff --git a/docs/software-developer-documentation/development-environment/system-requirements.md b/docs/software-developer-documentation/development-environment/system-requirements.md index a1deff5b1a8c9c36173da30ec70e70c9f91c0fb9..50b27cd6a3c2bb87a10a9371d8e5a3c6db161264 100644 --- a/docs/software-developer-documentation/development-environment/system-requirements.md +++ b/docs/software-developer-documentation/development-environment/system-requirements.md @@ -1,4 +1,4 @@ -System Requirements -=================== - +System Requirements +=================== + hello world \ No newline at end of file diff --git a/docs/software-developer-documentation/server-side-extensions/as-api-listener.md b/docs/software-developer-documentation/server-side-extensions/as-api-listener.md index 458067128624e15d35fa2e8b24ca9bfd20b59cfb..0c66bc7154ad51f94ca2e57e3ebe97cb6954a8b6 100644 --- a/docs/software-developer-documentation/server-side-extensions/as-api-listener.md +++ b/docs/software-developer-documentation/server-side-extensions/as-api-listener.md @@ -32,7 +32,7 @@ It is required to provide an 'operation-listener.class' indicating the class name of the listener that will be loaded. Additionally any number of properties following the -pattern 'operation-listener.<your-custom-name>' can be provided. +pattern 'operation-listener.<your-custom-name>' can be provided. Custom properties are provided to help maintainability, they give an opportunity to the integrator to only need to compile the listener once and configure it differently for different instances. diff --git a/docs/software-developer-documentation/server-side-extensions/core-plugins.md b/docs/software-developer-documentation/server-side-extensions/core-plugins.md index 6ca96076a1e20d203fde1b80dad49520eb835031..0928e3da9d23af8f8d970575ca9038871fcef2cf 100644 --- a/docs/software-developer-documentation/server-side-extensions/core-plugins.md +++ b/docs/software-developer-documentation/server-side-extensions/core-plugins.md @@ -283,7 +283,7 @@ rules: ## Using Java libraries in Core Plugins OpenBIS allows you to include Java libraries in core plugin folders. The -\*.jar files have to be stored in "<code plugin folder>/lib" +\*.jar files have to be stored in "<code plugin folder>/lib" folder. For instance, in order to use "my-lib.jar" in "my-dropbox" a following file structure is needed: diff --git a/docs/system-admin-documentation/advanced-features/authentication-systems.md b/docs/system-admin-documentation/advanced-features/authentication-systems.md index e60b24d6e1d65d206cacc5c106fd77621bb060c8..25fd581ed9cce4d5fb7877f77256fd6ed8fb8abd 100644 --- a/docs/system-admin-documentation/advanced-features/authentication-systems.md +++ b/docs/system-admin-documentation/advanced-features/authentication-systems.md @@ -1,4 +1,4 @@ -Authentication Systems -====================== - +Authentication Systems +====================== + To be written \ No newline at end of file diff --git a/docs/system-admin-documentation/advanced-features/maintenance-tasks.md b/docs/system-admin-documentation/advanced-features/maintenance-tasks.md index 5cb3691c072c33f40fe735bc234e48977ef206ea..ca453c8d6e6fe91ee3d15d31dae5ce904b7415a4 100644 --- a/docs/system-admin-documentation/advanced-features/maintenance-tasks.md +++ b/docs/system-admin-documentation/advanced-features/maintenance-tasks.md @@ -34,7 +34,7 @@ The following properties are common for all maintenance tasks: | start | A time at which the task should be executed the first time. Format: HH:mm. where HH is a two-digit hour (in 24h notation) and mm is a two-digit minute. By default the task is execute at server startup. | | run-schedule | Scheduling plan for task execution. Properties execute-only-once, interval, and start will be ignored if specified. Crontab syntax: -cron: <second> <minute> <hour> <day> <month> <weekday> +cron: <second> <minute> <hour> <day> <month> <weekday> Examples: cron: 0 0 * * * *: the top of every hour of every day. cron: */10 * * * * *: every ten seconds. @@ -45,15 +45,15 @@ cron: 0 0 9-17 * * MON-FRI: on the hour nine-to-five weekdays. cron: 0 0 0 25 12 ?: every Christmas Day at midnight. Non-crontab syntax: Comma-separated list of definitions with following syntax: -[[<counter>.]<week day>] [<month day>[.<month>]] <hour>[:<minute>] -where <counter> counts the specified week day of the month. <week day> is MO, MON, TU, TUE, WE, WED, TH, THU, FR, FRI, SA, SAT, SU, or SUN (ignoring case). <month> is either the month number (followed by an optionl '.') or JAN, FEB, MAR, APR, MAY, JUN, JUL, AUG, SEP, OCT, NOV, or DEC (ignoring case). +[[<counter>.]<week day>] [<month day>[.<month>]] <hour>[:<minute>] +where <counter> counts the specified week day of the month. <week day> is MO, MON, TU, TUE, WE, WED, TH, THU, FR, FRI, SA, SAT, SU, or SUN (ignoring case). <month> is either the month number (followed by an optionl '.') or JAN, FEB, MAR, APR, MAY, JUN, JUL, AUG, SEP, OCT, NOV, or DEC (ignoring case). Examples: 6, 18: every day at 6 AM and 6 PM. 3.FR 22:15: every third friday of a month at 22:15. 1. 15:50: every first day of a month at 3:50 PM. SAT 1:30: every saturday at 1:30 AM. 1.Jan 5:15, 1.4. 5:15, 1.7 5:15, 1. OCT 5:15: every first day of a quarter at 5:15 AM. | -| run-schedule-file | File where the timestamp for next execution is stored. It is used if run-schedule is specified. Default: <installation folder>/<plugin name>_<class name> | +| run-schedule-file | File where the timestamp for next execution is stored. It is used if run-schedule is specified. Default: <installation folder>/<plugin name>_<class name> | | retry-intervals-after-failure | Optional comma-separated list of time intervals (format as for interval) after which a failed execution will be retried. Note, that a maintenance task will be execute always when the next scheduled timepoint occurs. This feature allows to execute a task much earlier in case of temporary errors (e.g. temporary unavailibity of another server). | ## Feature @@ -180,13 +180,13 @@ properties need to scanned they should be added to the plugin.properties |----------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | dataset-types | Comma-separated list of regular expressions of data set types. All FASTA and FASTQ files from those data sets are handled. All data sets of types not matching at least one of the regular expression are not handled. | | entity-sequence-properties | Comma-separated list of descriptions of entity properties with sequences. A description is of the form -<entity kind>+<entity type code>+<property type code> -where <entity kind> is either EXPERIMENT, SAMPLE or DATA_SET (Materials are not supported). | +<entity kind>+<entity type code>+<property type code> +where <entity kind> is either EXPERIMENT, SAMPLE or DATA_SET (Materials are not supported). | | file-types | Space separated list of file types. Data set files of those file types have to be FASTA or FASTQ files. Default: .fasta .fa .fsa .fastq | | blast-tools-directory | Path in the file system where all BLAST tools are located. If it is not specified or empty the tools directory has to be in the PATH environment variable. | -| blast-databases-folder | Path to the folder where all BLAST databases are stored. Default: <data store root>/blast-databases | -| blast-temp-folder | Path to the folder where temporary FASTA files are stored. Default: <blast-databases-folder>/tmp | -| last-seen-data-set-file | Path to the file which stores the id of the last seen data set. Default: <data store root>/last-seen-data-set-for-BLAST-database-creation | +| blast-databases-folder | Path to the folder where all BLAST databases are stored. Default: <data store root>/blast-databases | +| blast-temp-folder | Path to the folder where temporary FASTA files are stored. Default: <blast-databases-folder>/tmp | +| last-seen-data-set-file | Path to the file which stores the id of the last seen data set. Default: <data store root>/last-seen-data-set-for-BLAST-database-creation | **Example**: @@ -325,7 +325,7 @@ some criteria. This tasks needs the archive plugin to be configured in | Property Key | Description | |-----------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | excluded-data-set-types | Comma-separated list of data set types. Data sets of such types are not archived. Default: No data set type is excluded. | -| estimated-data-set-size-in-KB.<data set type> | Specifies for the data set type <data set type> the average size in KB. If <data set type> is DEFAULT it will be used for all data set types with unspecified estimated size. | +| estimated-data-set-size-in-KB.<data set type> | Specifies for the data set type <data set type> the average size in KB. If <data set type> is DEFAULT it will be used for all data set types with unspecified estimated size. | | free-space-provider.class | Fully qualified class name of the free space provider (implementing ch.systemsx.cisd.common.filesystem.IFreeSpaceProvider). Depending on the free space provider additional properties, all starting with prefix free-space-provider., might be needed. Default: ch.systemsx.cisd.common.filesystem.SimpleFreeSpaceProvider | | monitored-dir | Path to the directory to be monitored by the free space provider. | | minimum-free-space-in-MB | Minimum free space in MB. If the free space is below this limit the task archives data sets. Default: 1 GB | @@ -381,8 +381,8 @@ are organized hierachical in accordance to their experiment and samples | storeroot-dir | Path to the root directory of the store. Used if storeroot-dir-link-path is not specified. | | hierarchy-root-dir | Path to the root directory of mirrored store. | | link-naming-strategy.class | Fully qualified class name of the strategy to generate the hierarchy (implementing ch.systemsx.cisd.etlserver.plugins.IHierarchicalStorageLinkNamingStrategy). Depending on the actual strategy additional properties, all starting with prefix link-naming-strategy., mighty be needed. Default: ch.systemsx.cisd.etlserver.plugins.TemplateBasedLinkNamingStrategy | -| link-source-subpath.<data set type> | Link source subpath for the specified data set type. Only files and folder in this relative path inside a data set will be mirrored. Default: The complete data set folder will be mirroed. | -| link-from-first-child.<data set type> | Flag which specifies whether only the first child of or the complete folder (either the data set or the one specified by link-source-subpath.<data set type>). Default: False | +| link-source-subpath.<data set type> | Link source subpath for the specified data set type. Only files and folder in this relative path inside a data set will be mirrored. Default: The complete data set folder will be mirroed. | +| link-from-first-child.<data set type> | Flag which specifies whether only the first child of or the complete folder (either the data set or the one specified by link-source-subpath.<data set type>). Default: False | | with-meta-data | Flag, which specifies whether directories with meta-data.tsv and a link should be created or only links. The default behavior is to create links-only. Default: false | | link-naming-strategy.template | The exact form of link paths produced by TemplateBasedLinkNamingStrategy is defined by this template. The variables dataSet, dataSetType, sample, experiment, project and space will be recognized and replaced in the actual link path. @@ -522,9 +522,9 @@ data set is the starting point when the task is executed next time. | Property Key | Description | |----------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | compute-checksum | If true the CRC32 checksum (and optionally a checksum of the type specified by checksum-type) of all files will be calculated and stored in pathinfo database. Default value: false | -| checksum-type | Optional checksum type. If specified and compute-checksum = true two checksums are calculated: CRC32 checksum and the checksum of specified type. The type and the checksum are stored in the pathinfo database. An allowed type has to be supported by MessageDigest.getInstance(<checksum type>). For more details see http://docs.oracle.com/javase/8/docs/api/java/security/MessageDigest.html#getInstance-java.lang.String-. | +| checksum-type | Optional checksum type. If specified and compute-checksum = true two checksums are calculated: CRC32 checksum and the checksum of specified type. The type and the checksum are stored in the pathinfo database. An allowed type has to be supported by MessageDigest.getInstance(<checksum type>). For more details see http://docs.oracle.com/javase/8/docs/api/java/security/MessageDigest.html#getInstance-java.lang.String-. | | data-set-chunk-size | Number of data sets requested from AS in one chunk if it is used as a maintenance task. Default: 1000 | -| max-number-of-chunks | Maximum number of chunks of size data-set-chunk-size are processed if it is used as a maintenance task. If it is <= 0 and time-limit isn't defined all data sets are processed. Default: 0 | +| max-number-of-chunks | Maximum number of chunks of size data-set-chunk-size are processed if it is used as a maintenance task. If it is <= 0 and time-limit isn't defined all data sets are processed. Default: 0 | | time-limit | Limit of execution time of this task if it is used as a maintenance task. The task is stopped before reading next chunk if the time has been used up. If it is specified it is an alternative way to limit the number of data sets to be processed instead of specifying max-number-of-chunks. This parameter can be specified with one of the following time units: ms, msec, s, sec, m, min, h, hours, d, days. Default time unit is sec. | **Example**: @@ -685,7 +685,7 @@ When specified this task stops checking after the specified pausing time point a After all data sets have been checked the task checks again all data sets started by the oldest one specified by checking-time-interval. | | continuing-time-point | Time point where checking continous. Format: HH:mm. where HH is a two-digit hour (in 24h notation) and mm is a two-digit minute. Ignored when pausing-time-point isn't specified. Default value: Time when the task is executed. | | chunk-size | Maximum number of data sets retrieved from AS. Ignored when pausing-time-point isn't specified. Default value: 1000 | -| state-file | File to store registration time stamp and code of last considered data set. This is only used when pausing-time-point has been specified. Default: <store root>/DataSetAndPathInfoDBConsistencyCheckTask-state.txt | +| state-file | File to store registration time stamp and code of last considered data set. This is only used when pausing-time-point has been specified. Default: <store root>/DataSetAndPathInfoDBConsistencyCheckTask-state.txt | **Example**: The following example checks all data sets of the last ten years. It does the check only during the night and continues next night. @@ -759,21 +759,21 @@ makes several assumptions on the database schema: The general format of the mapping file is as follows: -\[<Material Type Code>: <table Name>, <code column -name>\] +\[<Material Type Code>: <table Name>, <code column +name>\] -<Property Type Code>: <column name> +<Property Type Code>: <column name> -<Property Type Code>: <column name> +<Property Type Code>: <column name> ... -\[<Material Type Code>: <table Name>, <code column -name>\] +\[<Material Type Code>: <table Name>, <code column +name>\] -<Property Type Code>: <column name> +<Property Type Code>: <column name> -<Property Type Code>: <column name> +<Property Type Code>: <column name> ... @@ -1045,7 +1045,7 @@ data source for key 'path-info-db'. | Property Key | Description | |---------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| checksum-type | Optional checksum type. If specified two checksums are calculated: CRC32 checksum and the checksum of specified type. The type and the checksum are stored in the pathinfo database. An allowed type has to be supported by MessageDigest.getInstance(<checksum type>). For more details see http://docs.oracle.com/javase/8/docs/api/java/security/MessageDigest.html#getInstance-java.lang.String-. | +| checksum-type | Optional checksum type. If specified two checksums are calculated: CRC32 checksum and the checksum of specified type. The type and the checksum are stored in the pathinfo database. An allowed type has to be supported by MessageDigest.getInstance(<checksum type>). For more details see http://docs.oracle.com/javase/8/docs/api/java/security/MessageDigest.html#getInstance-java.lang.String-. | **Example**: @@ -1083,12 +1083,12 @@ the pathinfo database. | Property Key | Description | |---------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| time-stamp-of-youngest-data-set | Time stamp of the youngest data set to be considered. The format has to be <4 digit year>-<month>-<day> <hour>:<minute>:<second>. | +| time-stamp-of-youngest-data-set | Time stamp of the youngest data set to be considered. The format has to be <4 digit year>-<month>-<day> <hour>:<minute>:<second>. | | compute-checksum | If true the CRC32 checksum (and optionally a checksum of the type specified by checksum-type) of all files will be calculated and stored in pathinfo database. Default value: true | -| checksum-type | Optional checksum type. If specified and compute-checksum = true two checksums are calculated: CRC32 checksum and the checksum of specified type. The type and the checksum are stored in the pathinfo database. An allowed type has to be supported by MessageDigest.getInstance(<checksum type>). For more details see http://docs.oracle.com/javase/8/docs/api/java/security/MessageDigest.html#getInstance-java.lang.String-. | +| checksum-type | Optional checksum type. If specified and compute-checksum = true two checksums are calculated: CRC32 checksum and the checksum of specified type. The type and the checksum are stored in the pathinfo database. An allowed type has to be supported by MessageDigest.getInstance(<checksum type>). For more details see http://docs.oracle.com/javase/8/docs/api/java/security/MessageDigest.html#getInstance-java.lang.String-. | | chunk-size | Number of data sets requested from AS in one chunk. Default: 1000 | | data-set-type | Optional data set type. If specified, only data sets of the specified type are considered. Default: All data set types. | -| state-file | File to store registration time stamp and code of last considered data set. Default: <store root>/PathInfoDatabaseRefreshingTask-state.txt | +| state-file | File to store registration time stamp and code of last considered data set. Default: <store root>/PathInfoDatabaseRefreshingTask-state.txt | **Example**: diff --git a/docs/system-admin-documentation/advanced-features/share-ids.md b/docs/system-admin-documentation/advanced-features/share-ids.md index 2eaf5ad0ea9a6db391525576e32fdc2901a4a635..7f228a00ef6e990d58c12cebd365e74c8e91c866 100644 --- a/docs/system-admin-documentation/advanced-features/share-ids.md +++ b/docs/system-admin-documentation/advanced-features/share-ids.md @@ -1,4 +1,4 @@ -Share IDs -========= - +Share IDs +========= + To be written \ No newline at end of file diff --git a/docs/system-admin-documentation/docker-installation/docker-installation-and-configuration.md b/docs/system-admin-documentation/docker-installation/docker-installation-and-configuration.md index 1b5697bdad4e303de5190fb6b625e64d820d02fd..d75b8295aab33e30bbede935188a85f8b92872be 100644 --- a/docs/system-admin-documentation/docker-installation/docker-installation-and-configuration.md +++ b/docs/system-admin-documentation/docker-installation/docker-installation-and-configuration.md @@ -1,4 +1,4 @@ -Docker Installation And Configuration -===================================== - +Docker Installation And Configuration +===================================== + To be written \ No newline at end of file diff --git a/docs/system-admin-documentation/installation/architectural-overview.md b/docs/system-admin-documentation/installation/architectural-overview.md index e4cc8172296063063338b14975833a0cd2c09dd4..b919163d7a78a33cce1650e76b22d0626f1b7c1f 100644 --- a/docs/system-admin-documentation/installation/architectural-overview.md +++ b/docs/system-admin-documentation/installation/architectural-overview.md @@ -1,4 +1,4 @@ -Architectural Overview -====================== - +Architectural Overview +====================== + To be written \ No newline at end of file diff --git a/docs/system-admin-documentation/installation/installation-and-configuration-guide.md b/docs/system-admin-documentation/installation/installation-and-configuration-guide.md index a6d2b8646fc72c618a0aaf15d5e73d25db11f8af..ac90a776a2e098a97ebeeef7d9b128b91631e984 100644 --- a/docs/system-admin-documentation/installation/installation-and-configuration-guide.md +++ b/docs/system-admin-documentation/installation/installation-and-configuration-guide.md @@ -481,7 +481,7 @@ openBIS database. They are all mandatory. | `database.create-from-scratch` | If true the database will be dropped and an empty database will be created. In productive use always set this value to false . | | `database.script-single-step-mode` | If true all SQL scripts are executed in single step mode. Useful for localizing errors in SQL scripts. Should be always false in productive mode. | | `database.url-host-part` | Part of JDBC URL denoting the host of the database server. If openBIS Application Server and database server are running on the same machine this property should be an empty string. | -| `database.kind` | Part of the name of the database. The full name reads openbis_< kind >. | +| `database.kind` | Part of the name of the database. The full name reads openbis_< kind >. | | `database.admin-user` | ID of the user on database server with admin rights, like creation of tables. Should be an empty string if default admin user should be used. In case of PostgreSQL the default admin user is assumed to be postgres. | | database.admin-password | Password for admin user. Usual an empty string. | | `database.owner` | ID of the user owning the data. This should generally be openbis. The openbis role and password need to be created. In case of an empty string it is the same user who started up openBIS Application Server. | @@ -1314,8 +1314,8 @@ configured: | Property | Description | |---------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| <database>.data-space | To which data-space this database belongs to (optional, i.e. a query database can be configured not to belong to one data space by leaving this configuration value empty). | -| <database>.creator-minimal-role | What role is required to be allowed to create / edit queries on this database (optional, default: INSTANCE_OBSERVER if data-space is not set, POWER_USER otherwise). | +| <database>.data-space | To which data-space this database belongs to (optional, i.e. a query database can be configured not to belong to one data space by leaving this configuration value empty). | +| <database>.creator-minimal-role | What role is required to be allowed to create / edit queries on this database (optional, default: INSTANCE_OBSERVER if data-space is not set, POWER_USER otherwise). | The given parameters data-space and creator-minimal-role are used by openBIS to enforce proper authorization. @@ -1406,17 +1406,17 @@ The table below describes the possible commands and their arguments. | Command | Argument(s) | Default Value | Description | |--------------------------------------|--------------------------------------------------------|---------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | log-service-calls | 'on', 'off' | 'off' | Turns on / off detailed service call logging. -When this feature is enabled, openBIS will log about start and end of every service call it executes to file <installation directory>/servers/openBIS-server/jetty/log/openbis_service_calls.txt | +When this feature is enabled, openBIS will log about start and end of every service call it executes to file <installation directory>/servers/openBIS-server/jetty/log/openbis_service_calls.txt | | log-long-running-invocations | 'on', 'off' | 'on' | Turns on / off logging of long running invocations. -When this feature is enabled, openBIS will periodically create a report of all service calls that have been in execution more than 15 seconds to file <installation directory>/servers/openBIS-server/jetty/log/openbis_long_running_threads.txt. | +When this feature is enabled, openBIS will periodically create a report of all service calls that have been in execution more than 15 seconds to file <installation directory>/servers/openBIS-server/jetty/log/openbis_long_running_threads.txt. | | debug-db-connections | 'on', 'off' | 'off' | Turns on / off logging about database connection pool activity. -When this feature is enabled, information about every borrow and return to database connection pool is logged to openBIS main log in file <installation directory>/servers/openBIS-server/jetty/log/openbis_log.txt | -| log-db-connections | no argument / minimum connection age (in milliseconds) | 5000 | When this command is executed without an argument, information about every database connection that has been borrowed from the connection pool is written into openBIS main log in file <installation directory>/servers/openBIS-server/jetty/log/openbis_log.txt +When this feature is enabled, information about every borrow and return to database connection pool is logged to openBIS main log in file <installation directory>/servers/openBIS-server/jetty/log/openbis_log.txt | +| log-db-connections | no argument / minimum connection age (in milliseconds) | 5000 | When this command is executed without an argument, information about every database connection that has been borrowed from the connection pool is written into openBIS main log in file <installation directory>/servers/openBIS-server/jetty/log/openbis_log.txt If the "minimum connection age" argument is specified, only connections that have been out of the pool longer than the specified time are logged. The minimum connection age value is given in milliseconds. | | record-stacktrace-db-connections | 'on', 'off' | 'off' | Turns on / off logging of stacktraces. When this feature is enabled AND debug-db-connections is enabled, the full stack trace of the borrowing thread will be recorded with the connection pool activity logs. | | log-db-connections-separate-log-file | 'on', 'off' | 'off' | Turns on / off database connection pool logging to separate file. -When this feature is disabled, the database connection pool activity logging is done only to openBIS main log. When this feature is enabled, the activity logging is done ALSO to file <installation directory>/servers/openBIS-server/jetty/log/openbis_db_connections.txt. | +When this feature is disabled, the database connection pool activity logging is done only to openBIS main log. When this feature is enabled, the activity logging is done ALSO to file <installation directory>/servers/openBIS-server/jetty/log/openbis_db_connections.txt. |  diff --git a/docs/system-admin-documentation/installation/optional-application-server-configuration.md b/docs/system-admin-documentation/installation/optional-application-server-configuration.md index 104437f36d295cc99d603a2303034eeabaf9d24e..871c0661e4f908405755a0276ba2ecf1bf26dc1e 100644 --- a/docs/system-admin-documentation/installation/optional-application-server-configuration.md +++ b/docs/system-admin-documentation/installation/optional-application-server-configuration.md @@ -1,4 +1,4 @@ -Optional Application Server Configuration -========================================= - +Optional Application Server Configuration +========================================= + To be written \ No newline at end of file diff --git a/docs/system-admin-documentation/installation/optional-datastore-server-configuration.md b/docs/system-admin-documentation/installation/optional-datastore-server-configuration.md index 4cbbee2e0b822cb8b96e73673d9a33c38ef2a883..be53e40b748426170f3be1e1ad484481c017d125 100644 --- a/docs/system-admin-documentation/installation/optional-datastore-server-configuration.md +++ b/docs/system-admin-documentation/installation/optional-datastore-server-configuration.md @@ -1,4 +1,4 @@ -Optional Datastore Server Configuration -======================================= - +Optional Datastore Server Configuration +======================================= + To be written \ No newline at end of file diff --git a/docs/system-admin-documentation/installation/system-requirements.md b/docs/system-admin-documentation/installation/system-requirements.md index 62f9a29fe06623538b2ad88cd0f6edc1364dde9e..96f449d14f88c9a4bc6258ac5bdc3f6cc33cc482 100644 --- a/docs/system-admin-documentation/installation/system-requirements.md +++ b/docs/system-admin-documentation/installation/system-requirements.md @@ -1,4 +1,4 @@ -System Requirements -=================== - +System Requirements +=================== + To be written \ No newline at end of file diff --git a/docs/user-documentation/advance-features/command-line-tool.md b/docs/user-documentation/advance-features/command-line-tool.md index dac2b9a10aec5356acd1071e770b3de6df22b7ed..d84df1ff1d3b932e33553e87def6421a39bb0e1d 100644 --- a/docs/user-documentation/advance-features/command-line-tool.md +++ b/docs/user-documentation/advance-features/command-line-tool.md @@ -16,7 +16,7 @@ case, OpenBIS is aware of its existence and the data can be used for provenance ## 2. Installation -``` +```bash pip3 install obis ``` @@ -25,13 +25,15 @@ Since `obis` is based on `pybis`, the pip command will also install pybis and al ## 3. Quick start guide **Configure your openBIS Instance** -``` + +```bash # global settings to be use for all obis repositories obis config -g set openbis_url=https://localhost:8888 obis config -g set user=admin ``` **Download Physical Dataset** -``` + +```bash # create a physical (-p) obis repository with a folder name obis init -p data1 cd data1 @@ -40,8 +42,10 @@ obis config get is_physical # download dataset giving a single permId obis download 20230228091119011-58 ``` + **Upload Physical Dataset** -``` + +```bash # create a physical (-p) obis repository with a folder name obis init -p data1 cd data1 @@ -55,9 +59,9 @@ obis upload 20230228133001314-59 RAW_DATA -f your_file_a -f your_file_b ### 4.1 Help is your friend! +```bash $ obis --help -``` Usage: obis [OPTIONS] COMMAND [ARGS]... Options: @@ -90,7 +94,7 @@ Commands: To show detailed help for a specific command, type `obis <command> --help` : -``` +```bash $ obis commit --help Usage: obis commit [OPTIONS] [REPOSITORY] @@ -168,7 +172,7 @@ and get/set properties of objects/collections represented by datasets in current **collection** -``` +```bash obis collection get [key1] [key2] ... obis collection set [key1]=[value1], [key2]=[value2] ... ``` @@ -181,7 +185,7 @@ data set is connected directly to a collection - gets or sets given properties t **config** -``` +```bash obis config get [key] obis config set [key]=[value] ``` @@ -195,7 +199,7 @@ it comes to integration with other tools. **Example `.obis/config.json`** -``` +```bash { "fileservice_url": null, "git_annex_hash_as_checksum": true, @@ -207,7 +211,7 @@ it comes to integration with other tools. **data_set** -``` +```bash obis data_set search [OPTIONS] Options: @@ -243,7 +247,7 @@ configuration.* **download** -``` +```bash obis download [options] [data_set_id] Options: @@ -262,7 +266,7 @@ and the `fileservice_url` needs to be configured. **init** -``` +```bash obis init -p [folder] ``` @@ -272,7 +276,7 @@ If not, it will use the current folder. **object get / set** -``` +```bash obis collection get [key1] [key2] ... obis collection set [key1]=[value1], [key2]=[value2] ... ``` @@ -285,7 +289,7 @@ data set is connected directly to an object - gets or sets given properties to i **object search** -``` +```bash obis object search [OPTIONS] Options: @@ -319,7 +323,7 @@ configuration.* **upload** -``` +```bash obis upload [sample_id] [data_set_type] [OPTIONS] ``` @@ -331,7 +335,7 @@ data set. **Create an obis repository to work in Standard Data Store mode** -``` +```bash # global settings to be use for all obis repositories obis config -g set openbis_url=https://localhost:8888 obis config -g set user=admin @@ -351,7 +355,7 @@ obis upload 20230228133001314-59 RAW_DATA -f results.csv -f results_space.csv **download datasets of an object and check properties** -``` +```bash # assuming we are in a configured obis repository obis download 20230228091119011-58 # set object name to XYZ @@ -380,25 +384,25 @@ openBIS. With `get` you retrieve one or more settings. If the `key` is omitted, you retrieve all settings of the `type`: -``` +```bash obis [type] [options] get [key] ``` With `set` you set one or more settings: -``` +```bash obis [type] [options] set [key1]=[value1], [key2]=[value2], ... ``` With `clear` you unset one or more settings: -``` +```bash obis [type] [options] clear [key1] ``` With the type `settings` you can get all settings at once: -``` +```bash obis settings [options] get ``` @@ -433,7 +437,7 @@ it comes to integration with other tools. **Example `.obis/config.json`** -``` +```bash { "fileservice_url": null, "git_annex_hash_as_checksum": true, @@ -444,7 +448,7 @@ it comes to integration with other tools. **Example `.obis/data_set.json`** -``` +```bash { "properties": { "K1": "v1", @@ -458,7 +462,7 @@ it comes to integration with other tools. **init** -``` +```bash obis init [folder] ``` @@ -467,7 +471,7 @@ If not, it will use the current folder. **init_analysis** -``` +```bash obis init_analysis [options] [folder] ``` @@ -477,7 +481,7 @@ with the `-p` option. **commit** -``` +```bash obis commit [options] ``` @@ -486,7 +490,7 @@ define a commit message, the user will be asked to provide one. **sync** -``` +```bash obis sync ``` @@ -496,7 +500,7 @@ applicable, e.g. use "git annex add" instead of "git add". **status** -``` +```bash obis status [folder] ``` @@ -505,7 +509,7 @@ parameter. It shows file changes and whether the repository needs to be synchron **clone** -``` +```bash obis clone [options] [data_set_id] ``` @@ -522,7 +526,7 @@ _Note_: This command does not work when `obis_metadata_folder` is set. **move** -``` +```bash obis move [options] [data_set_id] ``` @@ -532,7 +536,7 @@ Note: This command does not work when `obis_metadata_folder` is set. **addref / removeref** -``` +```bash obis addref obis removeref ``` @@ -542,8 +546,7 @@ was moved or copied without using the `move` or `copy` commands. **token** - -``` +```bash obis token get <session_name> [--validity-days] [--validity-weeks] [--validity-months] ``` @@ -558,7 +561,7 @@ obis configuration and used for every subsequent request. **Create an obis repository and commit to openBIS** -``` +```bash # global settings to be use for all obis repositories obis config -g set openbis_url=https://localhost:8888 obis config -g set user=admin @@ -575,7 +578,7 @@ obis commit -m 'message' **Commit to git and sync manually** -``` +```bash # assuming we are in a configured obis repository echo content >> example_file git annex add example_file @@ -585,7 +588,7 @@ obis sync **Create an analysis repository** -``` +```bash # assuming we have a repository 'data1' obis init_analysis -p data1 analysis1 cd analysis1 @@ -611,7 +614,6 @@ configured to last for a long periods of time. PAT generation is explained in depth in `token` command section. - ## 7. Big Data Link Services The Big Data Link Services can be used to download files which are contained in an obis repository. diff --git a/docs/user-documentation/advance-features/excel-import-service.md b/docs/user-documentation/advance-features/excel-import-service.md index 83414821908a5db6a5c2906b42dfa90964f82b85..41c3d44b9ba21732948399beea64289c7316297f 100644 --- a/docs/user-documentation/advance-features/excel-import-service.md +++ b/docs/user-documentation/advance-features/excel-import-service.md @@ -1,11 +1,5 @@ # Excel Import Service -- Created by [Fuentes Serna Juan Mariano - (ID)](%20%20%20%20/display/~juanf%0A), last modified on [Dec 05, - 2022](/pages/diffpagesbyversion.action?pageId=53745981&selectedPageVersions=7&selectedPageVersions=8 "Show changes") - - - ## Introduction The Excel import service reads xls definitions for both types and @@ -327,18 +321,21 @@ version. For every TYPE found in the Excel sheet the next algorithm is performed: - IF ENTITY OR (TYPE.Version > STORED_VERSION) OR (TYPE.Version == FORCE): // If is a new version - IF ITEM NOT EXISTS in openBIS: - CREATE ITEM               - ELSE: // Doesn't exist branch - IF FAIL_IF_EXISTS: - THROW EXCEPTION - IF UPDATE_IF_EXISTS: - UPDATE ITEM - ELSE IF IGNORE_EXISTING: -    PASS // Ignore as requested - ELSE: - PASS // Ignore object that have not been updated +```py +IF ENTITY OR (TYPE.Version > STORED_VERSION) OR (TYPE.Version == FORCE): // If is a new version + IF ITEM NOT EXISTS in openBIS: + CREATE ITEM               + ELSE: // Doesn't exist branch + IF FAIL_IF_EXISTS: + THROW EXCEPTION + IF UPDATE_IF_EXISTS: + UPDATE ITEM + ELSE IF IGNORE_EXISTING: +   PASS // Ignore as requested +ELSE: + PASS // Ignore object that have not been updated +``` + @@ -510,22 +507,22 @@ be contained in ***scripts* directory** under master-data. Contents of initialize-master-data.py: - from ch.ethz.sis.openbis.generic.server.asapi.v3 import ApplicationServerApi - from ch.systemsx.cisd.openbis.generic.server import CommonServiceProvider - from ch.ethz.sis.openbis.generic.asapi.v3.dto.service.id import CustomASServiceCode - from ch.ethz.sis.openbis.generic.asapi.v3.dto.service import CustomASServiceExecutionOptions - from ch.systemsx.cisd.openbis.generic.server.jython.api.v1.impl import MasterDataRegistrationHelper - import sys - - helper = MasterDataRegistrationHelper(sys.path) - api = CommonServiceProvider.getApplicationContext().getBean(ApplicationServerApi.INTERNAL_SERVICE_NAME) - sessionToken = api.loginAsSystem() - props = CustomASServiceExecutionOptions().withParameter('xls', helper.listXlsByteArrays()) \ - .withParameter('xls_name', 'ELN-LIMS-LIFE-SCIENCES').withParameter('update_mode', 'UPDATE_IF_EXISTS') \ - .withParameter('scripts', helper.getAllScripts()) - result = api.executeCustomASService(sessionToken, CustomASServiceCode("xls-import-api"), props) - - +```python +from ch.ethz.sis.openbis.generic.server.asapi.v3 import ApplicationServerApi +from ch.systemsx.cisd.openbis.generic.server import CommonServiceProvider +from ch.ethz.sis.openbis.generic.asapi.v3.dto.service.id import CustomASServiceCode +from ch.ethz.sis.openbis.generic.asapi.v3.dto.service import CustomASServiceExecutionOptions +from ch.systemsx.cisd.openbis.generic.server.jython.api.v1.impl import MasterDataRegistrationHelper +import sys + +helper = MasterDataRegistrationHelper(sys.path) +api = CommonServiceProvider.getApplicationContext().getBean(ApplicationServerApi.INTERNAL_SERVICE_NAME) +sessionToken = api.loginAsSystem() +props = CustomASServiceExecutionOptions().withParameter('xls', helper.listXlsByteArrays()) \ + .withParameter('xls_name', 'ELN-LIMS-LIFE-SCIENCES').withParameter('update_mode', 'UPDATE_IF_EXISTS') \ + .withParameter('scripts', helper.getAllScripts()) +result = api.executeCustomASService(sessionToken, CustomASServiceCode("xls-import-api"), props) +``` There are following parameters to fill (Easiest is to use MasterDataRegistrationHelper to evaluate parameter values): diff --git a/docs/user-documentation/advance-features/jupiterhub-for-openbis.md b/docs/user-documentation/advance-features/jupiterhub-for-openbis.md index 41e4d0f9f141125d84cd6e9859dceace8c869303..f6a2c99a154e0d2ee1ea26cf13c754fe69b5a1c7 100644 --- a/docs/user-documentation/advance-features/jupiterhub-for-openbis.md +++ b/docs/user-documentation/advance-features/jupiterhub-for-openbis.md @@ -54,29 +54,25 @@ execute the images. locally like any other Docker Hub image. 3. **openBIS installation** (optional). - - How to run the official JupyterHub for openBIS image in your local machine -------------------------------------------------------------------------- - - -1\. After downloading the jupyterhub-openbis, find the id of your image. +1. After downloading the jupyterhub-openbis, find the id of your image. - $ docker images - REPOSITORY TAG IMAGE ID CREATED SIZE - openbis/jupyterhub-openbis-sis-20180405 latest 585a9adf333b 23 hours ago 4.75GB +```shell +$ docker images +REPOSITORY TAG IMAGE ID CREATED SIZE +openbis/jupyterhub-openbis-sis-20180405 latest 585a9adf333b 23 hours ago 4.75GB +``` - +2. Run the image with one of the two following commands: -2\. Run the image with one of the two following commands: - -a\. if you want to connect to your productive openBIS instance (e.g. +a. if you want to connect to your productive openBIS instance (e.g. https://openbis-elnlims.ch), use the following command: docker run -e OPENBIS_URL=https://openbis-elnlims.ch -e JUPYTERHUB_INTEGRATION_SERVICE_PORT=8002 -e JUPYTERHUB_PORT=8000 -e CERTIFICATE_KEY=/vagrant/config/certificates/default.key -e CERTIFICATE_CRT=/vagrant/config/certificates/default.crt -p 8000:8000 -p 8081:8081 -p 8001:8001 -p 8002:8002 585a9adf333b ./vagrant/initialize/start_jupyterhub.sh -b\. if you have a local openBIS installation for testing, you can run +b. if you have a local openBIS installation for testing, you can run the following command: docker run -v /Users/juanf/jupyterhub-local/home:/home -v /Users/juanf/jupyterhub-local/config/certificates:/vagrant/config/certificates -e OPENBIS_URL=https://129.132.228.42:8443 -e JUPYTERHUB_INTEGRATION_SERVICE_PORT=8002 -e JUPYTERHUB_PORT=8000 -e CERTIFICATE_KEY=/vagrant/config/certificates/default.key -e CERTIFICATE_CRT=/vagrant/config/certificates/default.crt -p 8000:8000 -p 8081:8081 -p 8001:8001 -p 8002:8002 585a9adf333b ./vagrant/initialize/start_jupyterhub.sh @@ -114,7 +110,9 @@ It can probably be done but we are currently not supporting it. ### Check Available Octave Libraries - pkg list +```shell +pkg list +```  @@ -124,15 +122,15 @@ It can probably be done but we are currently not supporting it. ### Check Available Python 3 Libraries - pip freeze - - +```shell +pip freeze +```  ### Add Python 3 Library -1\. Use pip install as you would normally do. The Python 3 kernel often +1. Use pip install as you would normally do. The Python 3 kernel often doesn't need to be restarted to pick up new libraries, but is recommended to do so. @@ -140,14 +138,16 @@ recommended to do so. ### Check Available R Libraries - my_packages <- library()$results - head(my_packages, 1000000) +```python +my_packages <- library()$results +head(my_packages, 1000000) +```  ### Add R Library -1\. Use the install command as you would normally do. The R kernel needs +1. Use the install command as you would normally do. The R kernel needs to be restarted to pick up new libraries.  @@ -155,28 +155,34 @@ to be restarted to pick up new libraries. Modify a currently running container - From Console (for admins) ---------------------------------------------------------------- -1\. Find the container id of the image currently running. +1. Find the container id of the image currently running. - $ docker ps - CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES - a2b76d1dd204 jupyterhub-openbis-sis-20180405 "./vagrant/initial..." 4 seconds ago Up 2 seconds 0.0.0.0:8000-8002->8000-8002/tcp, 0.0.0.0:8081->8081/tcp nervous_leakey +```shell +$ docker ps +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +a2b76d1dd204 jupyterhub-openbis-sis-20180405 "./vagrant/initial..." 4 seconds ago Up 2 seconds 0.0.0.0:8000-8002->8000-8002/tcp, 0.0.0.0:8081->8081/tcp nervous_leakey +``` -2\. Log into the container. +2. Log into the container. - $ docker exec -it a2b76d1dd204 bash +```shell +$ docker exec -it a2b76d1dd204 shell +``` ### Add Python Library Add a new library to Python 3 - # First we should move to the environment used by JupyterHub - [root@a2b76d1dd204 /]# export PATH=/vagrant_installation/miniconda3/bin:$PATH - [root@a2b76d1dd204 /]# export LC_ALL=en_US.utf8 - [root@a2b76d1dd204 /]# export LANG=en_US.utf8 - # Install a new python lib using pip - [root@a2b76d1dd204 /]# python --version - Python 3.6.4 :: Anaconda, Inc. - [root@a2b76d1dd204 /]# pip install prettytable +```shell +# First we should move to the environment used by JupyterHub +[root@a2b76d1dd204 /]# export PATH=/vagrant_installation/miniconda3/bin:$PATH +[root@a2b76d1dd204 /]# export LC_ALL=en_US.utf8 +[root@a2b76d1dd204 /]# export LANG=en_US.utf8 +# Install a new python lib using pip +[root@a2b76d1dd204 /]# python --version +Python 3.6.4 :: Anaconda, Inc. +[root@a2b76d1dd204 /]# pip install prettytable +``` This type of changes can be validated straightaway in JupyterHub, by just starting a Python 3 notebook. Other changes could require to reboot @@ -193,13 +199,15 @@ lost. Add a new library to R - # First we should move to the environment used by JupyterHub - [root@a2b76d1dd204 /]# export PATH=/vagrant_installation/miniconda3/bin:$PATH - [root@a2b76d1dd204 /]# export LC_ALL=en_US.utf8 - [root@a2b76d1dd204 /]# export LANG=en_US.utf8 - # Install a new r lib using conda - [root@a2b76d1dd204 /]# sudo conda list r- - [root@a2b76d1dd204 /]# sudo conda install -c r -y r-base64enc +```shell +# First we should move to the environment used by JupyterHub +[root@a2b76d1dd204 /]# export PATH=/vagrant_installation/miniconda3/bin:$PATH +[root@a2b76d1dd204 /]# export LC_ALL=en_US.utf8 +[root@a2b76d1dd204 /]# export LANG=en_US.utf8 +# Install a new r lib using conda +[root@a2b76d1dd204 /]# sudo conda list r- +[root@a2b76d1dd204 /]# sudo conda install -c r -y r-base64enc +``` This type of changes can be validated straightaway in JupyterHub, by just starting a R notebook. Other changes could require to reboot @@ -209,26 +217,24 @@ JupyterHub. ### Save the state of a running container as a new image - - If you know that you have made significant changes that you want to keep until you build a new docker recipe, you have the option to save the running container as a new image. - - bs-mbpr28:jupyterhub_reference_installation juanf$ docker ps - CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES - a2b76d1dd204 jupyterhub-openbis-sis-20180405 "./vagrant/initial..." 37 minutes ago Up 37 minutes 0.0.0.0:8000-8002->8000-8002/tcp, 0.0.0.0:8081->8081/tcp lucid_stonebraker +```shell +bs-mbpr28:jupyterhub_reference_installation juanf$ docker ps +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +a2b76d1dd204 jupyterhub-openbis-sis-20180405 "./vagrant/initial..." 37 minutes ago Up 37 minutes 0.0.0.0:8000-8002->8000-8002/tcp, 0.0.0.0:8081->8081/tcp lucid_stonebraker - $ docker commit a2b76d1dd204 jupyterhub-openbis-sis-juanextensions-20180406 - sha256:5dd0036664c75a21d6a62b80bf5780e70fcad345bb12a7ad248d01e29a3caa99 - $ docker images - REPOSITORY TAG IMAGE ID CREATED SIZE - jupyterhub-openbis-sis-juanextensions-20180406 latest 5dd0036664c7 4 seconds ago 4.75GB - jupyterhub-openbis-sis-20180405 latest 585a9adf333b 23 hours ago 4.75GB +$ docker commit a2b76d1dd204 jupyterhub-openbis-sis-juanextensions-20180406 +sha256:5dd0036664c75a21d6a62b80bf5780e70fcad345bb12a7ad248d01e29a3caa99 +$ docker images +REPOSITORY TAG IMAGE ID CREATED SIZE +jupyterhub-openbis-sis-juanextensions-20180406 latest 5dd0036664c7 4 seconds ago 4.75GB +jupyterhub-openbis-sis-20180405 latest 585a9adf333b 23 hours ago 4.75GB +``` - Extend a docker image using a docker recipe (for maintenance) ------------------------------------------------------------- @@ -239,13 +245,15 @@ latest official docker image distributed by SIS. Using our last example, let's create a file called "Dockerfile" and with the content shown below. - # vim:set ft=dockerfile: - FROM openbis/jupyterhub-openbis-sis-20180405 - ## Adding Python 3 library - RUN export PATH=/vagrant_installation/miniconda3/bin:$PATH && \ - export LC_ALL=en_US.utf8 && \ - export LANG=en_US.utf8 && \ - pip install prettytable +```shell +# vim:set ft=dockerfile: +FROM openbis/jupyterhub-openbis-sis-20180405 +## Adding Python 3 library +RUN export PATH=/vagrant_installation/miniconda3/bin:$PATH && \ + export LC_ALL=en_US.utf8 && \ + export LANG=en_US.utf8 && \ + pip install prettytable +``` Please change the name of the image in the file to the one you are using. @@ -255,51 +263,62 @@ the official repository. > :warning: **It is best practice to include both the name of the user and the creation date in the image name. This will help when dealing with many versions created by different users at different times**. - $ docker build -t jupyterhub-openbis-sis-juanextensions-recipe-20180406 . - Sending build context to Docker daemon 4.957GB - Step 1/2 : FROM openbis/jupyterhub-openbis-sis-20180405 - .... - Step 2/2 : RUN export PATH=/vagrant_installation/miniconda3/bin:$PATH && export LC_ALL=en_US.utf8 && export LANG=en_US.utf8 && pip install prettytable - .... - Successfully tagged jupyterhub-openbis-sis-juanextensions-recipe-20180406:latest -  +```shell +$ docker build -t jupyterhub-openbis-sis-juanextensions-recipe-20180406 . +Sending build context to Docker daemon 4.957GB +Step 1/2 : FROM openbis/jupyterhub-openbis-sis-20180405 +.... +Step 2/2 : RUN export PATH=/vagrant_installation/miniconda3/bin:$PATH && export LC_ALL=en_US.utf8 && export LANG=en_US.utf8 && pip install prettytable +.... +Successfully tagged jupyterhub-openbis-sis-juanextensions-recipe-20180406:latest + The new image is now available and can be started as described above. - $ docker images - REPOSITORY TAG IMAGE ID CREATED SIZE - jupyterhub-openbis-sis-juanextensions-recipe-20180406 latest a0106501b223 3 minutes ago 4.75GB - openbis/jupyterhub-openbis-sis-20180405 latest 585a9adf333b 23 hours ago 4.75GB +$ docker images +REPOSITORY TAG IMAGE ID CREATED SIZE +jupyterhub-openbis-sis-juanextensions-recipe-20180406 latest a0106501b223 3 minutes ago 4.75GB +openbis/jupyterhub-openbis-sis-20180405 latest 585a9adf333b 23 hours ago 4.75GB +``` + How to start a jupyterhub-openbis docker image on a productive JupyterHub server -------------------------------------------------------------------------------- - - > :warning: **You can only have **ONE** jupyterhub-openbis image running on a server at one given time, since JupyterHub makes use of certain ports on the machine that are also configured in openBIS**. 1. Find the jupyterhub-openbis-start.sh file in your server (please ask your admin). -2\. Find the container id of the image that is currently running. +2. Find the container id of the image that is currently running. - $ docker ps - CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES - a2b76d1dd204 jupyterhub-openbis-sis-20180405 "./vagrant/initial..." 4 seconds ago Up 2 seconds 0.0.0.0:8000-8002->8000-8002/tcp, 0.0.0.0:8081->8081/tcp nervous_leakey +```shell +$ docker ps +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +a2b76d1dd204 jupyterhub-openbis-sis-20180405 "./vagrant/initial..." 4 seconds ago Up 2 seconds 0.0.0.0:8000-8002->8000-8002/tcp, 0.0.0.0:8081->8081/tcp nervous_leakey +``` -3\. Stop the current container. - $ docker kill a2b76d1dd204 - a2b76d1dd204 +3. Stop the current container. -4\. Edit the  jupyterhub-openbis-start.sh file in your server and update +```shell +$ docker kill a2b76d1dd204 +a2b76d1dd204 +``` + + +4. Edit the  jupyterhub-openbis-start.sh file in your server and update the name of the image it runs to the one of your choice - docker run -v /Users/juanf/Documents/programming/git/jupyter-openbis-integration/jupyterhub_reference_installation/home:/home -v /Users/juanf/Documents/programming/git/jupyter-openbis-integration/jupyterhub_reference_installation/vagrant/config/certificates:/vagrant/config/certificates -e OPENBIS_URL=https://129.132.229.37:8443 -e JUPYTERHUB_INTEGRATION_SERVICE_PORT=8002 -e JUPYTERHUB_PORT=8000 -e CERTIFICATE_KEY=/vagrant/config/certificates/default.key -e CERTIFICATE_CRT=/vagrant/config/certificates/default.crt -p 8000:8000 -p 8081:8081 -p 8001:8001 -p 8002:8002 jupyterhub-openbis-sis-20180405 ./vagrant/initialize/start_jupyterhub.sh +```shell +docker run -v /Users/juanf/Documents/programming/git/jupyter-openbis-integration/jupyterhub_reference_installation/home:/home -v /Users/juanf/Documents/programming/git/jupyter-openbis-integration/jupyterhub_reference_installation/vagrant/config/certificates:/vagrant/config/certificates -e OPENBIS_URL=https://129.132.229.37:8443 -e JUPYTERHUB_INTEGRATION_SERVICE_PORT=8002 -e JUPYTERHUB_PORT=8000 -e CERTIFICATE_KEY=/vagrant/config/certificates/default.key -e CERTIFICATE_CRT=/vagrant/config/certificates/default.crt -p 8000:8000 -p 8081:8081 -p 8001:8001 -p 8002:8002 jupyterhub-openbis-sis-20180405 ./vagrant/initialize/start_jupyterhub.sh +``` -5\. Start the new image. +5. Start the new image. - $ ./jupyterhub-openbis-start.sh +```shell +$ ./jupyterhub-openbis-start.sh +``` Other useful Docker commands ---------------------------- @@ -308,31 +327,39 @@ Other useful Docker commands > :warning: **It is best practice to include both the name of the user and the creation date in the image name. This will help when dealing with many versions created by different users at different times**. - $ docker save jupyterhub-openbis-sis-20180405 > jupyterhub-openbis-sis-20180405.tar - $ ls -lah - total 9681080 - -rw-r--r-- 1 juanf 1029 4.6G Apr 5 15:38 jupyterhub-openbis-sis-20180405.tar +```shell +$ docker save jupyterhub-openbis-sis-20180405 > jupyterhub-openbis-sis-20180405.tar +$ ls -lah +total 9681080 +-rw-r--r-- 1 juanf 1029 4.6G Apr 5 15:38 jupyterhub-openbis-sis-20180405.tar +``` ### Load an image from a tar file - $ docker load < jupyterhub-openbis-sis-20180405.tar - 8feeda13d3ce: Loading layer [==================================================>] 27.65kB/27.65kB - 622cd2c170f3: Loading layer [==================================================>] 152MB/152MB - 633fa40a6caa: Loading layer [==================================================>] 2.048kB/2.048kB - 7219a9159e4f: Loading layer [==================================================>] 223.9MB/223.9MB - 678b55e862c7: Loading layer [==================================================>] 4.377GB/4.377GB - Loaded image: jupyterhub-openbis-sis-20180405:latest - $ docker images - REPOSITORY TAG IMAGE ID CREATED SIZE - jupyterhub-openbis-sis-20180405 latest 585a9adf333b 24 hours ago 4.75GB +```shell +$ docker load < jupyterhub-openbis-sis-20180405.tar +8feeda13d3ce: Loading layer [==================================================>] 27.65kB/27.65kB +622cd2c170f3: Loading layer [==================================================>] 152MB/152MB +633fa40a6caa: Loading layer [==================================================>] 2.048kB/2.048kB +7219a9159e4f: Loading layer [==================================================>] 223.9MB/223.9MB +678b55e862c7: Loading layer [==================================================>] 4.377GB/4.377GB +Loaded image: jupyterhub-openbis-sis-20180405:latest +$ docker images +REPOSITORY TAG IMAGE ID CREATED SIZE +jupyterhub-openbis-sis-20180405 latest 585a9adf333b 24 hours ago 4.75GB +``` ### Remove an image - $ docker rmi jupyterhub-openbis-sis-juanextensions-recipe-20180406 +```shell +$ docker rmi jupyterhub-openbis-sis-juanextensions-recipe-20180406 +``` ### Remove all stopped containers - $ docker rm $(docker ps -aq) +```shell +$ docker rm $(docker ps -aq) +``` openBIS ELN Integration Configuration ------------------------------------- @@ -341,32 +368,32 @@ On the openBIS end, what needs to be done is to append the following lines into your ELN instance profile: servers/core-plugins/eln-lims/1/as/webapps/eln-lims/html/etc/InstanceProfile.js - # Ansible yml syntax, replace the variables in the double curly braces by the appropriate values: - this.jupyterIntegrationServerEndpoint = "https://{{ openbis_jupyterhub_hostname }}:{{ openbis_jupyterhub_communication_port }}"; - this.jupyterEndpoint = "https://{{ openbis_jupyterhub_hostname }}/"; +```js +# Ansible yml syntax, replace the variables in the double curly braces by the appropriate values: +this.jupyterIntegrationServerEndpoint = "https://{{ openbis_jupyterhub_hostname }}:{{ openbis_jupyterhub_communication_port }}"; +this.jupyterEndpoint = "https://{{ openbis_jupyterhub_hostname }}/"; - # Example: - this.jupyterIntegrationServerEndpoint = "https://jupyterhub-demo.labnotebook.ch:80"; - this.jupyterEndpoint = "https://jupyterhub-demo.labnotebook.ch/"; +# Example: +this.jupyterIntegrationServerEndpoint = "https://jupyterhub-demo.labnotebook.ch:80"; +this.jupyterEndpoint = "https://jupyterhub-demo.labnotebook.ch/"; On the jupyterhub end, the docker command would then look as follows: - docker run -e OPENBIS_URL=https://{{ openbis_public_hostname }} -e JUPYTERHUB_INTEGRATION_SERVICE_PORT=8002 -e JUPYTERHUB_PORT=8000 -e CERTIFICATE_KEY=/vagrant/config/certificates/default.key -e CERTIFICATE_CRT=/vagrant/config/certificates/default.crt -p 8000:8000 -p 8081:8081 -p 8001:8001 -p {{ openbis_jupyterhub_communication_port }}:8002 585a9adf333b ./vagrant/initialize/start_jupyterhub.sh +docker run -e OPENBIS_URL=https://{{ openbis_public_hostname }} -e JUPYTERHUB_INTEGRATION_SERVICE_PORT=8002 -e JUPYTERHUB_PORT=8000 -e CERTIFICATE_KEY=/vagrant/config/certificates/default.key -e CERTIFICATE_CRT=/vagrant/config/certificates/default.crt -p 8000:8000 -p 8081:8081 -p 8001:8001 -p {{ openbis_jupyterhub_communication_port }}:8002 585a9adf333b ./vagrant/initialize/start_jupyterhub.sh - # Example: - openbis_public_hostname: openbis-test.ethz.ch - openbis_jupyterhub_hostname: jupyterhub-test.ethz.ch - openbis_jupyterhub_communication_port: 80 +# Example: +openbis_public_hostname: openbis-test.ethz.ch +openbis_jupyterhub_hostname: jupyterhub-test.ethz.ch +openbis_jupyterhub_communication_port: 80 The only port you need to open on your jupyterhub instance is the one matching {{ openbis\_jupyterhub\_communication\_port }}. Using firewall-cmd this would look as follows: - firewall-cmd --permanent --zone=public --add-rich-rule='rule family="ipv4" source address="{{ openbis_jupyterhub_openbis_hostname }}" port protocol="tcp" port="{{ openbis_jupyterhub_communication_port }}" accept' - - +firewall-cmd --permanent --zone=public --add-rich-rule='rule family="ipv4" source address="{{ openbis_jupyterhub_openbis_hostname }}" port protocol="tcp" port="{{ openbis_jupyterhub_communication_port }}" accept' +``` Troubleshooting Connectivity to openBIS --------------------------------------- @@ -400,7 +427,6 @@ just log in to JupyterHub there is a new session available that needs to be handed over to the Jupyter server. For that just stop and start it again. - Step 1 : Go to your control panel clicking on the button of the top right corner. diff --git a/docs/user-documentation/general-admin-users/admins-documentation/associate-file-types-to-dataset-types.md b/docs/user-documentation/general-admin-users/admins-documentation/associate-file-types-to-dataset-types.md index e4d1511f698a8bf6c586964a839a39cee9d586e9..7086f2bfb85b48e410c5f7d6dd0bf3ce11c1d213 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/associate-file-types-to-dataset-types.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/associate-file-types-to-dataset-types.md @@ -1,6 +1,6 @@ # Associate File Types to Dataset Types -[](# "Print this article") + It is possible to associate given file types to given *Dataset* *types*. diff --git a/docs/user-documentation/general-admin-users/admins-documentation/create-templates-for-objects.md b/docs/user-documentation/general-admin-users/admins-documentation/create-templates-for-objects.md index 898e0360769cbec50562e13bdb08497b2cb7421e..c27498b8eb58640710c46c1af7b011472473fd61 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/create-templates-for-objects.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/create-templates-for-objects.md @@ -1,6 +1,6 @@ # Create Templates for Objects -[](# "Print this article") + It is possible to create templates for *Objects*. Templates are useful @@ -40,8 +40,6 @@ as shown below  -See [Use template for Experimental -Steps](https://openbis.ch/index.php/docs/user-documentation/lab-notebook/use-templates-for-experimental-steps/) -for more info on how to use templates. +See [Use template for Experimental Steps](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/lab-notebook.html#use-templates-for-experimental-steps) for more info on how to use templates. Updated on April 26, 2023 diff --git a/docs/user-documentation/general-admin-users/admins-documentation/customise-inventory-of-materials-and-samples.md b/docs/user-documentation/general-admin-users/admins-documentation/customise-inventory-of-materials-and-samples.md index ed5949732eef17294c399791e539b0ab488b6a55..fb268770b053cf56b706e99ebc1c2d27118f2ef9 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/customise-inventory-of-materials-and-samples.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/customise-inventory-of-materials-and-samples.md @@ -3,7 +3,7 @@ Customise Inventory Of Materials And Samples ## Create Collections of Materials -[](# "Print this article") +  @@ -84,7 +84,7 @@ Updated on April 26, 2023 ## Delete Collections -[](# "Print this article") +  @@ -110,7 +110,7 @@ Updated on February 6, 2023 ## Enable Storage Widget on Sample Forms -[](# "Print this article") + When a new *Object type* is created by an *Instance admin (*see [New @@ -149,7 +149,7 @@ Updated on April 26, 2023 ## Configure Lab Storage -[](# "Print this article") + Fridges and freezers can be configured in the **Settings**, under @@ -197,7 +197,7 @@ Updated on April 26, 2023 ## Add metadata to Storage Positions -[](# "Print this article") +  diff --git a/docs/user-documentation/general-admin-users/admins-documentation/customise-inventory-of-protocols.md b/docs/user-documentation/general-admin-users/admins-documentation/customise-inventory-of-protocols.md index 957bb53e43893285d80bc6cb3460f0ab670f0882..7f04ebd0aba2b0170f52a31269446be108b24d76 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/customise-inventory-of-protocols.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/customise-inventory-of-protocols.md @@ -3,7 +3,7 @@ Customise Inventory Of Protocols ## Create Collections of Protocols -[](# "Print this article") + *Collections* are folders used to organise *Objects* in the **Methods** @@ -43,7 +43,7 @@ Updated on April 26, 2023 ## Enable Protocols in Settings -[](# "Print this article") + If a new *Object type* for a protocol is created by an *Instance admin* diff --git a/docs/user-documentation/general-admin-users/admins-documentation/customise-parents-and-children-sections-in-object-forms.md b/docs/user-documentation/general-admin-users/admins-documentation/customise-parents-and-children-sections-in-object-forms.md index 404f6ee533c925b095ed1ab0535a55fb3091b43d..81c893e9aca02f226e56a127eeb1e58c1c97f96f 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/customise-parents-and-children-sections-in-object-forms.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/customise-parents-and-children-sections-in-object-forms.md @@ -1,11 +1,5 @@ # Customise Parents and Children Sections in Object Forms -[](# "Print this article") - - - - - The **Parents** and **Children** sections are automatically created in all *Object* forms. It is possible to customise or remove these sections, from the **Settings**, under **Utilities**. @@ -18,7 +12,7 @@ Protocol to the form, we use the **Search** or **Paste** options next to General Protocol. If we want to add another parent, for example a **Sample**, we need to use the **Search Any** or **Paste Any** next to Parents. See also [Add parents and children to Experimental -Steps.](https://openbis.ch/index.php/docs/user-documentation-20-10-3/lab-notebook/add-parents-and-children-to-experimental-steps/) +Steps.](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/lab-notebook.html#add-parents-and-children-to-experimental-steps)  diff --git a/docs/user-documentation/general-admin-users/admins-documentation/customise-the-main-menu.md b/docs/user-documentation/general-admin-users/admins-documentation/customise-the-main-menu.md index 9d0a796995f29271987ae591e0e52cc5e69aed94..e242e4c231fa32fd50a65cdfcafe406f7a6ae824 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/customise-the-main-menu.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/customise-the-main-menu.md @@ -1,6 +1,6 @@ # Customise the Main Menu -[](# "Print this article") +  @@ -30,39 +30,39 @@ The main menu can be customised from the **Settings**, under 1. **showLabNotebook**: if unselected, the **Lab Notebook** section of the main menu ([Lab - Notebook)](https://openbis.ch/index.php/docs/user-documentation-20-10-3/lab-notebook/) + Notebook)](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/lab-notebook.html) will be hidden. 2. **showInventory**: if unselected, the **Inventory** section of the main menu ([Inventory of Materials and - Methods](https://openbis.ch/index.php/docs/user-documentation-20-10-3/inventory-of-materials-and-methods/)) + Methods](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/inventory-of-materials-and-methods.html)) will be hidden. 3. **showStock**: if unselected, the **Stock** section of the main menu ([Managing Lab Stocks and - Orders](https://openbis.ch/index.php/docs/user-documentation-20-10-3/managing-lab-stocks-and-orders-2/)) + Orders](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/managing-lab-stocks-and-orders-2.html)) will be hidden. 4. **showObjectBrowser**: if unselected, the **Object Browser** under **Utilities** in the main menu ([Object - Browser)](https://openbis.ch/index.php/docs/user-documentation-20-10-3/additional-functionalities/browse-entries-by-type/) + Browser)](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/additional-functionalities.html#browse-entries-by-type) will be hidden. 5. **showStorageManager**: if unselected, the **Storage Manager** under **Utilities** in the main menu [(Storage - manager](https://openbis.ch/index.php/docs/user-documentation-20-10-3/inventory-of-materials-and-methods/overview-of-lab-storages/)) + manager](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/managing-lab-stocks-and-orders-2.html#)) will be hidden. 6. **showAdvancedSearch**: if unselected, the **Advanced Search** under **Utilities** in the main menu ([Advanced - Search)](https://openbis.ch/index.php/docs/user-documentation-20-10-3/additional-functionalities/search/) + Search)](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/search.html) be hidden. 7. **showUnarchivingHelper**: if unselected, the **Unarchiving Helper** and **Archiving Helper** under **Utilities** in the main menu ([Data - archiving)](https://openbis.ch/index.php/docs/user-documentation-20-10-3/data-archiving/) + archiving)](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/data-archiving.html) will be hidden. 8. **showTrashcan**: if unselected, the **Traschcan** under **Utilities** in the main menu - ([Trashcan](https://openbis.ch/index.php/docs/user-documentation-20-10-3/additional-functionalities/trashcan/)) + ([Trashcan](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/additional-functionalities.html#trashcan)) will be hidden. 9. **showVocabularyViewer:** if unselected, the **Vocabulary Browser** under **Utilities** in the main menu ([Vocabulary - browser](https://openbis.ch/index.php/docs/user-documentation-20-10-3/additional-functionalities/vocabulary-browser/)) + browser](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/additional-functionalities.html#vocabulary-browser)) will be hidden. 10. **showUserManager**: if unselected, the **User Manager** under **Utilities** in the main menu ([User @@ -70,24 +70,20 @@ The main menu can be customised from the **Settings**, under will be hidden. 11. **showUserProfile**: if unselected, the **User Profile** under **Utilities** in the main menu ([User - Profile](https://openbis.ch/index.php/docs/admin-documentation-20-10-3/user-registration/user-profile/)) + Profile](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/user-registration.html#user-profile)) will be hidden. 12. **showZenodoExportBuilder**: if unselected, the **Zenodo - Export** under **Utilities -> Exports** in the main menu + Export** under **Utilities -> Exports** in the main menu ([Export to - Zenodo](https://openbis.ch/index.php/docs/user-documentation-20-10-3/data-export/export-to-zenodo/)) + Zenodo](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/data-export.html#export-to-zenodo)) will be hidden. 13. **showBarcodes**: if unselected, the **Barcodes Generator** under **Utilities** in the main menu - ([Barcodes)](https://openbis.ch/index.php/docs/user-documentation-20-10-3/inventory-of-materials-and-methods/barcodes/) + ([Barcodes)](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/barcodes.html) will be hidden. - - ## Lab Notebook menu - - It is also possible to customise which entities should be shown under *Experiments/Collections* in the main menu under the **Lab Notebook** section. diff --git a/docs/user-documentation/general-admin-users/admins-documentation/database-navigation-in-admin-ui.md b/docs/user-documentation/general-admin-users/admins-documentation/database-navigation-in-admin-ui.md index 760e3b3f8a5521a98e72b2f42f3e24524bd70941..0b7d2f9480d1df1d800177c8fa4ddaf4dab08981 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/database-navigation-in-admin-ui.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/database-navigation-in-admin-ui.md @@ -1,6 +1,6 @@ # Database navigation in admin UI -[](# "Print this article") +  diff --git a/docs/user-documentation/general-admin-users/admins-documentation/enable-archiving-to-long-term-storage.md b/docs/user-documentation/general-admin-users/admins-documentation/enable-archiving-to-long-term-storage.md index ccd47900aa2ab6a173cb6aaa945df0e13b729cf9..2a72785688a369075f4a32035164ba7c5eaf5778 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/enable-archiving-to-long-term-storage.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/enable-archiving-to-long-term-storage.md @@ -1,6 +1,6 @@ # Enable archiving to Long Term Storage -[](# "Print this article") +  @@ -9,7 +9,7 @@ openBIS supports archiving of datasets to Strongbox and StronLink ([https://www.strongboxdata.com/](https://www.strongboxdata.com/)) as described in [Datasets -Archiving](https://unlimited.ethz.ch/display/openBISDoc2010/Archiving+Datasets) +Archiving](https://openbis.readthedocs.io/en/latest/system-admin-documentation/advanced-features/archive-datasets.html)  @@ -35,6 +35,6 @@ ELN Settings: More information on archiving and unarchiving datasets can be found here: [Data -archiving](https://openbis.ch/index.php/docs/user-documentation-20-10-3/data-archiving/) +archiving](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/data-archiving.html) Updated on April 26, 2023 diff --git a/docs/user-documentation/general-admin-users/admins-documentation/enable-barcodes.md b/docs/user-documentation/general-admin-users/admins-documentation/enable-barcodes.md index cc9489915bf28ad4062ca688156314e04a7c9b3e..136d887a6c735d0f312759e6eb0f746e292b737f 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/enable-barcodes.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/enable-barcodes.md @@ -1,6 +1,6 @@ # Enable Barcodes -[](# "Print this article") + In order to be able to add custom barcodes to *Objects*, an *Instance Admin* needs to add the $BARCODE property to the object type for which @@ -35,6 +35,6 @@ barcode icon will be added above the menu. Information on how to use the Barcode functionality in openBIS can be found -here: [Barcodes](https://openbis.ch/index.php/docs/user-documentation-20-10-3/inventory-of-materials-and-methods/barcodes/) +here: [Barcodes](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/barcodes.html) Updated on April 26, 2023 diff --git a/docs/user-documentation/general-admin-users/admins-documentation/enable-transfer-to-data-repositories.md b/docs/user-documentation/general-admin-users/admins-documentation/enable-transfer-to-data-repositories.md index fdd40df6bcd8ddc3eff1087af7a9fa9bfb04a4b3..342ad38ef2ea5544526d86b221ba050999b4159c 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/enable-transfer-to-data-repositories.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/enable-transfer-to-data-repositories.md @@ -1,6 +1,6 @@ # Enable Transfer to Data Repositories -[](# "Print this article") + Currently openBIS offers an integration with the **Zenodo** data @@ -10,7 +10,7 @@ This enables data direct data transfer from openBIS to Zenodo. This feature needs to be configured by a *system admin* as explained here: [openBIS DSS configuration -file](https://unlimited.ethz.ch/display/openBISDoc2010/Installation+and+Administrators+Guide+of+the+openBIS+Data+Store+Server#InstallationandAdministratorsGuideoftheopenBISDataStoreServer-Configurationfile). +file](https://openbis.readthedocs.io/en/latest/system-admin-documentation/installation/installation-and-configuration-guide.html#installation-steps). If this is done, the Zenodo Export needs to be made visible in the ELN UI by a lab manager, who has should have admin rights for the diff --git a/docs/user-documentation/general-admin-users/admins-documentation/history-overview.md b/docs/user-documentation/general-admin-users/admins-documentation/history-overview.md index 50239f8d998261bcef099e7255e86fda5cf24523..367d3eaef9ff903fa1da09b46c0de2b6def282a4 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/history-overview.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/history-overview.md @@ -3,7 +3,7 @@ History Overview ## History of deletions -[](# "Print this article") +  @@ -51,13 +51,13 @@ Updated on October 9, 2022 ## History of freezing -[](# "Print this article") +  In the admin UI it is possible to have an overview of all frozen entries in openBIS. Frozen entries can no longer be modified (see [Freeze -Entities](https://openbis.ch/index.php/docs/user-documentation-20-10-3/additional-functionalities/freeze-entities/)). +Entities](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/additional-functionalities.html#freeze-entities)). The table showing the history of freezing can be found under the **Tools** section in the admin UI, as shown below. diff --git a/docs/user-documentation/general-admin-users/admins-documentation/inventory-overview.md b/docs/user-documentation/general-admin-users/admins-documentation/inventory-overview.md index feb9932c66255ac084ac9080331b3d8294186956..1d6f1f71b3f3a658203844b300d7ec1f45b5b0f7 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/inventory-overview.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/inventory-overview.md @@ -1,6 +1,6 @@ # Inventory overview -[](# "Print this article") + The default Inventory contains two folders: **Materials** and **Methods**. @@ -48,7 +48,7 @@ created by the *Instance admin*, based on the needs of the lab. It is possible to add additional folders in the Inventory, for example for **Equipment** (see **[Create new Inventory -Spaces](https://openbis.ch/index.php/docs/admin-documentation-20-10-3/space-management/create-new-inventory-spaces/))**. +Spaces](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/space-management.html))**.  diff --git a/docs/user-documentation/general-admin-users/admins-documentation/masterdata-exports-and-imports.md b/docs/user-documentation/general-admin-users/admins-documentation/masterdata-exports-and-imports.md index d2b417916fa8555b94aaadc271350e3458ec0357..4613f97e14b067863765a899eadcffa83870beee 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/masterdata-exports-and-imports.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/masterdata-exports-and-imports.md @@ -1,6 +1,6 @@ # Masterdata exports and imports -[](# "Print this article") +  @@ -60,7 +60,7 @@ explained above:  -1. Go to the **Tools** section and select **Import -> All** from the +1. Go to the **Tools** section and select **Import -> All** from the menu. 2. Upload the file you exported before using the **CHOOSE FILE** button. diff --git a/docs/user-documentation/general-admin-users/admins-documentation/move-collections-to-a-different-project.md b/docs/user-documentation/general-admin-users/admins-documentation/move-collections-to-a-different-project.md index fd82b9158bdb2bab0fd3f3bb3f76e8a645bc585b..bab3dcd964e15b4d9de4353e2180599fa95af254 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/move-collections-to-a-different-project.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/move-collections-to-a-different-project.md @@ -1,6 +1,6 @@ # Move Collections to a different Project -[](# "Print this article") +  diff --git a/docs/user-documentation/general-admin-users/admins-documentation/multi-group-set-up.md b/docs/user-documentation/general-admin-users/admins-documentation/multi-group-set-up.md index 7b6989f437d6b2e11029e963aea45893d56bb0f7..7a25591abc1ed8732775025ab46ede5bd6c54254 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/multi-group-set-up.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/multi-group-set-up.md @@ -3,36 +3,20 @@ Multi Group Set Up ## General ELN Settings -[](# "Print this article") - - - In a multi-group instance an *Instance admin* can customise the General ELN Settings. - - The Settings can be access from the main menu, under **Utilities**. - -  - - The General ELN Settings are Settings that are not specific to any of the defined groups group , as shown below. - -  - - The General ELN Settings consist of two parts: - - 1. **Instance Settings**. These settings affect the whole instance, it is not possible to customise them on a group level. 2. **Group Settings**. These settings affect all general *Spaces* that @@ -41,9 +25,8 @@ The General ELN Settings consist of two parts: instances](https://unlimited.ethz.ch/display/openBISDoc2010/User+Group+Management+for+Multi-groups+openBIS+Instances)). This is the case, for example, if *Spaces* are manually created and they do not belong to any group (see [Create new ELN - Spaces](https://openbis.ch/index.php/docs/admin-documentation/space-management/create-new-eln-spaces/)). + Spaces](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/space-management.html#create-new-eln-spaces)). -  @@ -55,20 +38,10 @@ Inventory.  - - - - and **Horizon**, **Snf** do not belong to any group in the Lab notebook. - -  - - - - ### Instance Settings  @@ -76,64 +49,58 @@ and **Horizon**, **Snf** do not belong to any group in the Lab notebook. 1. **Custom widget**s. This section allows to enable the Rich Text Editor or Spreadsheet component for a given field, as described in [Enable Rich Text Editor or Spreadsheet - Widgets;](https://openbis.ch/index.php/docs/admin-documentation/new-entity-type-registration/enable-rich-text-editor-or-spreadsheet-widgets/) + Widgets;](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/new-entity-type-registration.html#enable-rich-text-editor-or-spreadsheet-widgets) 2. **Forced Monospace Font**. This section allows to force the use of monospace font (i.e. fixed width) for selected MULTILINE\_VARCHAR properties. This is useful for example for plasmid sequences. 3. **Dataset types for filenames**. This section allows to associate files with a given extension to a specific dataset type, as described in [Associate File Types to Dataset - Types](https://openbis.ch/index.php/docs/admin-documentation/associate-file-types-to-dataset-types/). - - - - + Types](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/associate-file-types-to-dataset-types.html). ### Group Settings - - 1. **Storages**. In this section the storages for samples to be used in *Spaces* not belonging to any predefined group (see above), can be created, as described in [Configure Lab - Storage;](https://openbis.ch/index.php/docs/admin-documentation/customise-inventory-of-materials-and-samples/configure-lab-storage/) + Storage;](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/customise-inventory-of-materials-and-samples.html#configure-lab-storage) 2. **Templates**. In this section, the templates for a given *Object type* to be used in *Spaces* not belonging to any predefined group (see above) can be created, as described in [Create Templates for - Objects](https://openbis.ch/index.php/docs/admin-documentation/create-templates-for-objects/); + Objects](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/create-templates-for-objects.html); 3. **Object types definition extension**. In this section, it is possible to: 1. Define if one *Object type* is a protocol. If an *Object type* is defined as a protocol, it is possible to create a local copy of it under an Experiment, when linking to it as a parent, as described in [Enable Protocols in - Settings;](https://openbis.ch/index.php/docs/admin-documentation/customise-inventory-of-protocols/enable-protocols-in-settings/) + Settings;](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/customise-inventory-of-protocols.html#enable-protocols-in-settings) 2. Enable the storage widget for an *Object type,* as described in [Enable Storage Widget on Sample - Forms](https://openbis.ch/index.php/docs/admin-documentation/customise-inventory-of-materials-and-samples/enable-storage-widget-on-sample-forms/) + Forms](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/customise-inventory-of-materials-and-samples.html#enable-storage-widget-on-sample-forms) 3. Define if the *Object type* should be shown in drop downs, as described in [Enable Objects in - dropdowns](https://openbis.ch/index.php/docs/admin-documentation/new-entity-type-registration/enable-objects-in-dropdowns/); + dropdowns](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/new-entity-type-registration.html#enable-objects-in-dropdowns); 4. Define if the *Object type* should be shown in the main menu under the Lab notebook section. By default objects are not shown in the main menu in the Inventory section. 5. Customise the *Parents* and *Children* sections for an *Object type* as described in [Customise Parents and Children Sections in Object - Forms](https://openbis.ch/index.php/docs/admin-documentation/customise-parents-and-children-sections-in-object-forms/); + Forms](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/customise-parents-and-children-sections-in-object-forms.html); 4. **Inventory Spaces**. It is possible to move *Spaces* from the Inventory section to the Lab notebook section and vice-versa as described in [Move Spaces between Lab Notebook and - Inventory](https://openbis.ch/index.php/docs/admin-documentation/space-management/move-space-between-lab-notebook-and-inventory/) + Inventory](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/space-management.html#move-space-between-lab-notebook-and-inventory) 5. **Main menu**. The main menu for the *Spaces* that do not belong to any predefined group (see above) can be customised here, as described in [Customise the Main - Menu;](https://openbis.ch/index.php/docs/admin-documentation/customise-the-main-menu/) + Menu;](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/customise-the-main-menu.html) 6. **Miscellaneous**. In this section it is possible to: 1. Show the dataset archiving buttons in *Spaces* that do not belong to any predefined group. Please note that this is not available by default, but the infrastructure for [archiving to - tapes](https://openbis.ch/index.php/docs/user-documentation/data-archiving/) + tapes](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/data-archiving.html) (StrongBox/StrongLink) needs to be put in place by a *system admin ([Multi data set archiving](https://unlimited.ethz.ch/display/openBISDoc2010/Multi+data+set+archiving))*. @@ -152,7 +119,7 @@ Updated on April 26, 2023 ## Group ELN Settings -[](# "Print this article") +  @@ -187,42 +154,42 @@ In the group settings the following is configurable: 1. **Storages**. In this section the group storages for samples can be created, as described in [Configure Lab - Storage;](https://openbis.ch/index.php/docs/admin-documentation/customise-inventory-of-materials-and-samples/configure-lab-storage/) + Storage;](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/customise-inventory-of-materials-and-samples.html#configure-lab-storage) 2. **Templates**. In this section, the templates for a given *Object type* can be created, as described in [Create Templates for - Objects](https://openbis.ch/index.php/docs/admin-documentation/create-templates-for-objects/); + Objects](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/create-templates-for-objects.html); 3. **Object types definition extension**. In this section, it is possible to: 1. Define if one *Object type* is a protocol. If an *Object type* is defined as a protocol, it is possible to create a local copy of it under an Experiment, when linking to it as a parent, as described in [Enable Protocols in - Settings;](https://openbis.ch/index.php/docs/admin-documentation/customise-inventory-of-protocols/enable-protocols-in-settings/) + Settings;](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/customise-inventory-of-protocols.html#enable-protocols-in-settings) 2. Enable the storage widget for an *Object type,* as described in [Enable Storage Widget on Sample - Forms](https://openbis.ch/index.php/docs/admin-documentation/customise-inventory-of-materials-and-samples/enable-storage-widget-on-sample-forms/) + Forms](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/customise-inventory-of-materials-and-samples.html#enable-storage-widget-on-sample-forms) 3. Define if the *Object type* should be shown in drop downs, as described in [Enable Objects in - dropdowns](https://openbis.ch/index.php/docs/admin-documentation/new-entity-type-registration/enable-objects-in-dropdowns/); + dropdowns](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/new-entity-type-registration.html#enable-objects-in-dropdowns); 4. Define if the *Object type* should be shown in the main menu under the Lab notebook section. By default objects are not shown in the main menu in the Inventory section. 5. Customise the Parents and Children sections for an *Object type* as described in [Customise Parents and Children Sections in Object - Forms](https://openbis.ch/index.php/docs/admin-documentation/customise-parents-and-children-sections-in-object-forms/); + Forms](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/customise-parents-and-children-sections-in-object-forms.html); 4. **Inventory Spaces**. It is possible to move Spaces from the Inventory section to the Lab notebook section and vice-versa as described in [Move Spaces between Lab Notebook and - Inventory](https://openbis.ch/index.php/docs/admin-documentation/space-management/move-space-between-lab-notebook-and-inventory/) + Inventory](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/space-management.html#move-space-between-lab-notebook-and-inventory) 5. **Main menu**. The main menu for the group can be customised here, as described in [Customise the Main - Menu;](https://openbis.ch/index.php/docs/admin-documentation/customise-the-main-menu/) + Menu;](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/customise-the-main-menu.html) 6. **Miscellaneous**. In this section it is possible to: 1. Show the dataset archiving buttons for the group. Please note that this is not available by default, but the infrastructure for [archiving to - tapes](https://openbis.ch/index.php/docs/user-documentation/data-archiving/) + tapes](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/data-archiving.html) (StrongBox/StrongLink) needs to be put in place by a *system admin ([Multi data set archiving](https://unlimited.ethz.ch/display/openBISDoc2010/Multi+data+set+archiving))*. diff --git a/docs/user-documentation/general-admin-users/admins-documentation/new-entity-type-registration.md b/docs/user-documentation/general-admin-users/admins-documentation/new-entity-type-registration.md index 251cbd307538a9323416218e0032813b195bd673..c1dfed7ead98bf6b04da9cfd0974ce3d645aa3be 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/new-entity-type-registration.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/new-entity-type-registration.md @@ -3,7 +3,7 @@ New Entity Type Registration ## Enable Rich Text Editor or Spreadsheet Widgets -[](# "Print this article") + For certain fields, it is possible to enable the use of a Rich Text @@ -37,7 +37,7 @@ Updated on October 19, 2022 ## Enable Objects in dropdowns -[](# "Print this article") +  @@ -59,7 +59,7 @@ Updated on October 19, 2022 ## Register masterdata via Excel -[](# "Print this article") +  @@ -123,13 +123,13 @@ masterdata. An example template file for this can be found here: More extensive documentation on the XLS format for masterdata and metadata registration can be found -[here](https://unlimited.ethz.ch/display/openBISDoc2010/Excel+Import+Service). +[here](https://openbis.readthedocs.io/en/latest/user-documentation/advance-features/excel-import-service.html). Updated on January 13, 2023 ## Properties overview -[](# "Print this article") +  @@ -145,7 +145,7 @@ Updated on March 1, 2022 ## Internal properties and vocabularies -[](# "Print this article") +  diff --git a/docs/user-documentation/general-admin-users/admins-documentation/space-management.md b/docs/user-documentation/general-admin-users/admins-documentation/space-management.md index f4b3ec2130dcea7086303bd38869aab8b2f2cec7..1b76ea68470c503fe183083f0e4061a66e831b85 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/space-management.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/space-management.md @@ -2,17 +2,12 @@ Space Management ==== ## Create new Inventory Spaces - -[](# "Print this article") - The default Inventory contains two folders: **Materials** and **Methods**. These are openBIS *Spaces*. Additional *Spaces* can be created by an *Instance admin*. - - ### Create a new Inventory Space from the ELN UI  @@ -55,11 +50,11 @@ create a new *Space*: 1. **no group**. The new *Space* will have no prefix and the Settings defined in General Settings will apply (see [General ELN - Settings](https://openbis.ch/index.php/docs/admin-documentation/multi-group-set-up/general-eln-settings/)). + Settings](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/multi-group-set-up.html#general-eln-settings)). 2. **in one of the existing groups**. The new *Space* will have the group prefix and the Settings of that group will apply (see [Group ELN - Settings](https://openbis.ch/index.php/docs/admin-documentation/multi-group-set-up/group-eln-settings/)). + Settings](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/multi-group-set-up.html#group-eln-settings)).  @@ -79,7 +74,7 @@ In the core UI:  -1. Select **Admin -> Spaces** +1. Select **Admin -> Spaces** 2. Click **Add Space** at the bottom of the page 3. Enter the *Space* **Code**, e.g. **EQUIPMENT** 4. **Save** @@ -120,7 +115,7 @@ Updated on April 26, 2023 ## Create new ELN Spaces -[](# "Print this article") + ### Create a new Lab Notebook Space from the ELN UI @@ -162,11 +157,11 @@ create a new *Space*: 1. **no group**. The new *Space* will have no prefix and the Settings defined in General Settings will apply (see [General ELN - Settings](https://openbis.ch/index.php/docs/admin-documentation/multi-group-set-up/general-eln-settings/)). + Settings](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/multi-group-set-up.html#general-eln-settings)). 2. **in one of the existing groups**. The new *Space* will have the group prefix and the Settings of that group will apply (see [Group ELN - Settings](https://openbis.ch/index.php/docs/admin-documentation/multi-group-set-up/group-eln-settings/)). + Settings](https://openbis.readthedocs.io/en/latest/user-documentation/general-admin-users/admins-documentation/multi-group-set-up.html#group-eln-settings)).  @@ -204,7 +199,7 @@ In the core UI:   -1. Select **Admin -> Spaces** +1. Select **Admin -> Spaces** 2. Click **Add Space** at the bottom of the page 3. Enter the Space **Code**, e.g. **EQUIPMENT** 4. **Save** @@ -222,7 +217,7 @@ Updated on April 26, 2023 ## Delete Spaces -[](# "Print this article") +  @@ -265,7 +260,7 @@ Updated on April 26, 2023 ## Move Spaces between Lab Notebook and Inventory -[](# "Print this article") +  diff --git a/docs/user-documentation/general-admin-users/admins-documentation/user-registration.md b/docs/user-documentation/general-admin-users/admins-documentation/user-registration.md index e9bafc3bba1aac2c6b66c94885bbd9c1f8c06820..cfc512c7c2dbb34e54adebf10925fa46993141c3 100644 --- a/docs/user-documentation/general-admin-users/admins-documentation/user-registration.md +++ b/docs/user-documentation/general-admin-users/admins-documentation/user-registration.md @@ -3,7 +3,7 @@ User Registration ## openBIS roles -[](# "Print this article") +  @@ -117,7 +117,7 @@ Updated on April 26, 2023 ## User Profile -[](# "Print this article") + In the User Profile, a user who is logged in into openBIS can find the following information: @@ -129,7 +129,7 @@ following information: 3. **Email** 4. **openBIS session token** 5. **Zenodo API Token** ([Export to - Zenodo](https://openbis.ch/index.php/docs/user-documentation-20-10-3/data-export/export-to-zenodo/)) + Zenodo](https://openbis.readthedocs.io/en/latest/user-documentation/general-users/data-export.html#export-to-zenodo))  @@ -156,7 +156,7 @@ Updated on June 28, 2022 ## Assign home space to a user -[](# "Print this article") +  diff --git a/docs/user-documentation/general-admin-users/custom-database-queries.md b/docs/user-documentation/general-admin-users/custom-database-queries.md index d79a599df4ffe907e16c947678194c9e8902d1c7..52e8829329a735e63c10d895d59d9519ae13e072 100644 --- a/docs/user-documentation/general-admin-users/custom-database-queries.md +++ b/docs/user-documentation/general-admin-users/custom-database-queries.md @@ -105,7 +105,7 @@ Server](#) for an explanation on how to do this. Running a Parametrized Query ---------------------------- -1. Choose menu item **Queries -> Run Predefined Query**. The tab +1. Choose menu item **Queries -> Run Predefined Query**. The tab *Predefined Query* opens. 2. Choose a query using the query combo box. Queries specified for all configured databases are selected transparently using the same combo @@ -130,7 +130,7 @@ Running a SELECT statement This feature is only for users with *creator role*. It is useful for exploring the database by ad hoc queries. -1. Choose menu item **Queries -> Run Custom SQL Query**. The tab +1. Choose menu item **Queries -> Run Custom SQL Query**. The tab *Custom SQL Query* opens. 2. Enter a SELECT statement in the text area, select database and click on the **Execute** button. The result appears below in tabular form. @@ -142,7 +142,7 @@ This feature is only for users with *creator role*. ### Define a Query -1. Choose menu item **Queries -> Browse Query Definitions**. The tab +1. Choose menu item **Queries -> Browse Query Definitions**. The tab *Query Definitions* opens. It shows all definitions where the user has access rights. 2. Click on **Add Query Definition** for defining a new parametrized @@ -170,14 +170,20 @@ A SQL query can have parameters which are defined later by the user running the query. A parameter is of the form `${<parameter name>`}. Example: - select * from my_table where code = ${my table code} +```sql +select * from my_table where code = ${my table code} +``` The parameter name will appear in the text field when running the query. Optionally, you can provide key-value pairs which are "metadata" for the parameter name and separated by '::' from the name. These metadata keys are defined: -[TABLE] +|Metadata key|Explanation|Example| +|--- |--- |--- | +|type|Sets the data type of this parameter. Valid values are VARCHAR (or STRING), CHAR, INTEGER, BIGINT, FLOAT, DOUBLE, BOOLEAN, TIME, DATE or TIMESTAMP.|${code::type=VARCHAR}| +|list|Coma-separated list of allowed values for the parameter.|${color::list=red,green,blue}| +|query|A SQL query which is run to determine the allowed values for the parameter. The query is expected to return exactly one column. You should specify only fast queries here with a reasonably small number of returned rows as the UI will block until this query has returned.|${name::query=select last_name from users}| It is possible to combine multiple keys like this: `${estimate::type=integer::list=1,3,7,12`}. @@ -232,15 +238,16 @@ the SQL statement should be one of the following **magic** words: - `data_set_key` They should denote a perm ID of specified type. Example: + ```sql + select id, perm_id as data_set_key from data_sets + ``` - select id, perm_id as data_set_key from data_sets - -Be careful with this feature: The table is shown with the hyperlinks -even if the value isn't a perm ID of specified type. +> :warning: +> **Be careful with this feature**: The table is shown with the hyperlinks even if the value isn't a perm ID of specified type. ### Edit a Query -1. Choose menu item **Queries -> Browse Query Definitions**. The tab +1. Choose menu item **Queries -> Browse Query Definitions**. The tab *Query Definitions* opens. 2. Select a query and click on button **Edit**. The same dialog as for defining a query pops up. @@ -265,7 +272,7 @@ experiment of type `EXP`). ### How to create/edit entity custom queries Entity custom queries can be created and edited in the same way as -`Generic` queries (**Queries -> Browse Query Definitions**), but the +`Generic` queries (**Queries -> Browse Query Definitions**), but the value of **`Query Type`** field should be set to Experiment, Sample, Data Set or Material. @@ -289,9 +296,5 @@ code).  -Legacy Syntax - -Older versions of openBIS required to put string parameters in ticks, -like '${param}'. Current versions of openBIS don't need this anymore, so -you can use ${param} without the ticks. However, the syntax with ticks -is still accept for backward compatibility. \ No newline at end of file +> :warning: **Legacy Syntax:** +> Older versions of openBIS required to put string parameters in ticks, like '${param}'. Current versions of openBIS don't need this anymore, so you can use ${param} without the ticks. However, the syntax with ticks is still accept for backward compatibility. diff --git a/docs/user-documentation/general-admin-users/properties-handled-by-scripts.md b/docs/user-documentation/general-admin-users/properties-handled-by-scripts.md index 72ed940a8a389c8f8083f533d8aa72622a217aea..427572c7c233bd8c9fe7ef4b9323b7d516c74107 100644 --- a/docs/user-documentation/general-admin-users/properties-handled-by-scripts.md +++ b/docs/user-documentation/general-admin-users/properties-handled-by-scripts.md @@ -35,7 +35,7 @@ and one script type to perform validations on entities: 2. **Managed Property Handler** (for properties referred to as *Managed Properties*) - + 1. - for properties that will be **indirectly modified by users**, - the script alters default handling of a property by openBIS by @@ -44,7 +44,7 @@ and one script type to perform validations on entities: view (e.g. as a table), - **input fields** for modifying the property, - +  - - **translation** and/or **validation** of user input. @@ -59,36 +59,26 @@ To create a property that should be handled by a script perform the following steps. 1. Define a property type with appropriate name and data type - (Administration->Property Types->New). + (Administration->Property Types->New). 2. Define a script that will handle the property - (Administration->Scripts) or deploy a Java plugin. For details + (Administration->Scripts) or deploy a Java plugin. For details and examples of usage go to pages: - [Dynamic Properties](/display/openBISDoc2010/Dynamic+Properties) - [Managed Properties](/display/openBISDoc2010/Managed+Properties) - [Entity validation scripts](/display/openBISDoc2010/Entity+validation+scripts) 3. Assign the created property type to chosen entity type using the - created script (e.g. for samples: Administration->Property - Types->Assign to Sample Type): + created script (e.g. for samples: Administration->Property + Types->Assign to Sample Type): - select Handled By Script checkbox, - select the appropriate Script Type - choose the Script 4. The validation scripts are assigned to the type in the "Edit Type" - section. (e.g Admin->Types->Samples. Select sample and click + section. (e.g Admin->Types->Samples. Select sample and click edit.)  - - - - - - - - - - - No labels Dynamic Properties @@ -163,26 +153,23 @@ You can test your script on selected entities 1. Show a value of a Sample property which is named 'Multiplicity' - - +```java entity.propertyValue('Multiplicity') - +``` -2\. Takes an existing property and multiplies the value by 1.5 +2. Takes an existing property and multiplies the value by 1.5 +```java float(entity.propertyValue('CONCENTRATION_ORIGINAL_ILLUMINA'))*1.5 - - - - +``` #### Advanced Examples 1. Show all entity properties as one dynamic property: - +```java def get_properties(e): """Automatically creates entity description""" properties = e.properties() @@ -196,14 +183,12 @@ You can test your script on selected entities def calculate(): """Main script function. The result will be used as the value of appropriate dynamic property.""" - return get_properties(entity) - - + return get_properties(entity) +``` -2\. Calculate a new float value based some other values - - +2. Calculate a new float value based some other values +```java import java.lang.String as String def calculateValue(): @@ -223,13 +208,11 @@ You can test your script on selected entities def calculate(): """Main script function. The result will be used as the value of appropriate dynamic property.""" return calculateValue() +``` - - -3\. Calculate a time difference between two time stamps: - - +3. Calculate a time difference between two time stamps: +```java from datetime import datetime def dateTimeSplitter(openbisDate): dateAndTime, tz = openbisDate.rsplit(" ", 1) @@ -246,12 +229,13 @@ You can test your script on selected entities return str(diffTime) except: return "N/A" +``` -4\. Illumina NGS Low Plexity Pooling Checker: checks if the complexity -of a pooled sample is good enough for a successful run: +4. Illumina NGS Low Plexity Pooling Checker: checks if the complexity of a pooled sample is good enough for a successful run: +```java def checkBarcodes(): ''' 'parents' are a HashSet of SamplePropertyPE @@ -304,11 +288,8 @@ of a pooled sample is good enough for a successful run: return returnString def calculate(): """Main script function. The result will be used as the value of appropriate dynamic property.""" - return checkBarcodes() - - - - + return checkBarcodes() +``` #### Data Types @@ -371,7 +352,7 @@ procedure: Jython scripts and Java plugins. ### Defining a Jython validation script -1. Go to Admin -> Plugins -> Add Plugin. +1. Go to Admin -> Plugins -> Add Plugin. 2. Select "Entity Validator" as the plugin type 3. Choose name, entity kind, and description. 4. Prepare a script (see paragraph "Script specification" below) @@ -425,8 +406,8 @@ does not have any properties defined: To make the validation active per entity type you have to select the validation script for each type: -- Admin -> Types -> <Entity Kind> you selected also in the - script definition -> +- Admin -> Types -> <Entity Kind> you selected also in the + script definition -> - Select a Sample Type and edit it - You find a property which is called 'Validation Script' (see screen shot below). Just select your defined Script and hit save. @@ -523,7 +504,7 @@ To create a Managed Property: ### Creating scripts To browse and edit existing scripts or add new ones, select -Administration->Scripts from the top menu. +Administration->Scripts from the top menu. The scripts should be written in standard Jython syntax. The following functions are invoked by openBIS, some of them are mandatory: @@ -535,7 +516,9 @@ access to a variable named `property` which holds an object of type ` IManagedProperty `. Methods of this class are explained below. To access the property object from the script, use the following syntax: +```java property.<requested method> +``` #### Predefined Functions @@ -546,14 +529,14 @@ script: table model builder. It will be used in `configureUI` to create tabular data to be shown in openBIS GUI. - + - `ValidationException ValidationException(String message)`: Creates a Validation Exception with specified message which should be raised in functions `updateFromUI` and `updateFromBatchInput` in case of invalid input. - + - ` IManagedInputWidgetDescriptionFactory inputWidgetFactory()`: returns a factory that can be used to create descriptions of input @@ -561,7 +544,7 @@ script: [IManagedInputWidgetDescription](http://svnsis.ethz.ch/doc/openbis/current/ch/systemsx/cisd/openbis/generic/shared/basic/dto/api/IManagedInputWidgetDescription.html) and [example](#ManagedProperties-Example3)). - + - ` IElementFactory elementFactory()`: returns a factory that can be used to create @@ -569,7 +552,7 @@ script: See [\#Storing structured content in managed properties](#ManagedProperties-Storingstructuredcontentinmanagedproperties). - + - ` IStructuredPropertyConverter xmlPropertyConverter()`: returns a converter that can translate @@ -608,6 +591,7 @@ This example shows how to configure a fixed table (without using value stored in the property at all) that will be shown in detail view of an entity. +```java def configureUI(): """create table builder and add 3 columns""" tableBuilder = createTableBuilder() @@ -633,6 +617,7 @@ entity. property.setOwnTab(True) uiDesc = property.getUiDescription() uiDesc.useTableOutput(tableBuilder.getTableModel()) +``` Let's assume, that a property type with label *Fixed Table* was assigned to sample type CELL\_PLATE as a managed property using the example @@ -652,6 +637,7 @@ openBIS entities (see [Linking to openBIS entities](#ManagedProperties-LinkingtoopenBISentities) for more details): +```java def configureUI(): """create table builder with 4 columns (any column names can be used)""" tableBuilder = createTableBuilder() @@ -677,6 +663,7 @@ details): property.setOwnTab(True) uiDesc = property.getUiDescription() uiDesc.useTableOutput(tableBuilder.getTableModel()) +``` If linked entity doesn't exist in the database the perm id ((`code (type)` for materials) will be shown as plain text (not @@ -692,6 +679,7 @@ Otherwise clickable links will be displayed with link text equal to: This example shows how to configure a table representation of a property value holding a CSV document (many lines with comma separated values): +```java def configureUI(): """get the property value as String and split it using newline character""" value = property.getValue() @@ -713,6 +701,7 @@ value holding a CSV document (many lines with comma separated values): property.setOwnTab(True) uiDesc = property.getUiDescription() uiDesc.useTableOutput(tableBuilder.getTableModel()) +``` Let's assume, that: @@ -739,37 +728,40 @@ Managed property value will be visible as text in the left panel This is an extension of the previous example showing how to specify user input for actions like add, edit and delete: - def configureUI(): - """code from previous example is not repeated here""" +```java +def configureUI(): + """code from previous example is not repeated here""" + + factory = inputWidgetFactory() + + if len(lines) > 0: + header = lines[0].split(",") + + """define an action labelled 'Add' for adding a new row to the table""" + addAction = uiDesc.addTableAction('Add').setDescription('Add new row to the table') + """for every header column add a text input field with the same label as column title""" + widgets = [] + for i in range(0, len(header)): + widgets.append(factory.createTextInputField(header[i])) + addAction.addInputWidgets(widgets) + + """define an action labelled 'Edit' for editing a selected row of the table""" + editAction = uiDesc.addTableAction('Edit').setDescription('Edit selected table row') + editAction.setRowSelectionRequiredSingle() + """for every header column add a text input field that is bounded with a column""" + widgets = [] + for i in range(0, len(header)): + columnName = header[i] + widgets.append(factory.createTextInputField(columnName)) + editAction.addBinding(columnName, columnName) + editAction.addInputWidgets(widgets) + + """define an action labelled "Delete" for deleting selected rows from the table - no input fields are needed""" + deleteAction = uiDesc.addTableAction('Delete')\ + .setDescription('Are you sure you want to delete selected rows from the table?') + deleteAction.setRowSelectionRequired() - factory = inputWidgetFactory() - - if len(lines) > 0: - header = lines[0].split(",") - - """define an action labelled 'Add' for adding a new row to the table""" - addAction = uiDesc.addTableAction('Add').setDescription('Add new row to the table') - """for every header column add a text input field with the same label as column title""" - widgets = [] - for i in range(0, len(header)): - widgets.append(factory.createTextInputField(header[i])) - addAction.addInputWidgets(widgets) - - """define an action labelled 'Edit' for editing a selected row of the table""" - editAction = uiDesc.addTableAction('Edit').setDescription('Edit selected table row') - editAction.setRowSelectionRequiredSingle() - """for every header column add a text input field that is bounded with a column""" - widgets = [] - for i in range(0, len(header)): - columnName = header[i] - widgets.append(factory.createTextInputField(columnName)) - editAction.addBinding(columnName, columnName) - editAction.addInputWidgets(widgets) - - """define an action labelled "Delete" for deleting selected rows from the table - no input fields are needed""" - deleteAction = uiDesc.addTableAction('Delete')\ - .setDescription('Are you sure you want to delete selected rows from the table?') - deleteAction.setRowSelectionRequired() +``` The picture below shows updated detail view of sample S1. For every action defined in the script there is a button in bottom toolbar of the @@ -795,53 +787,55 @@ response to user's action. This is an extension of the previous example showing how to specify behaviour of actions defined in `configureUI()` function: - def configureUI(): - """code from previous example is not repeated here""" - - def updateFromUI(action): - - """get the property value as String and split it using newline character""" - value = property.getValue() - lines = [] - if value != None: - lines = value.split("\n") - - """for 'Add' action add a new line with values from input fields""" - if action.getName() == 'Add': - newLine = extractNewLineFromActionInput(action) - lines.append(newLine) - elif action.getName() == 'Edit': - """ - For 'Edit' action find the line corresponding to selected row - and replace it with a line with values from input fields. - NOTE: line index is one bigger than selected row index because of header. - """ - lineIndex = action.getSelectedRows()[0] + 1 - lines.pop(lineIndex) - newLine = extractNewLineFromActionInput(action) - lines.insert(lineIndex, newLine) - elif action.getName() == 'Delete': - """ - For 'Delete' action delete the lines corresponding to selected rows. - NOTE: deletion of rows is implemented here in reversed order - """ - rowIds = list(action.getSelectedRows()) - rowIds.reverse() - for rowId in rowIds: - lines.pop(rowId + 1) - - """in the end update the property value concatenating all the lines""" - value = "\n".join(lines) - property.setValue(value) - - def extractNewLineFromActionInput(action): - inputValues = [] - for input in action.getInputWidgetDescriptions(): - inputValue = "" - if input.getValue(): - inputValue = input.getValue() - inputValues.append(inputValue) - return ",".join(inputValues) +```java +def configureUI(): + """code from previous example is not repeated here""" + +def updateFromUI(action): + + """get the property value as String and split it using newline character""" + value = property.getValue() + lines = [] + if value != None: + lines = value.split("\n") + + """for 'Add' action add a new line with values from input fields""" + if action.getName() == 'Add': + newLine = extractNewLineFromActionInput(action) + lines.append(newLine) + elif action.getName() == 'Edit': + """ + For 'Edit' action find the line corresponding to selected row + and replace it with a line with values from input fields. + NOTE: line index is one bigger than selected row index because of header. + """ + lineIndex = action.getSelectedRows()[0] + 1 + lines.pop(lineIndex) + newLine = extractNewLineFromActionInput(action) + lines.insert(lineIndex, newLine) + elif action.getName() == 'Delete': + """ + For 'Delete' action delete the lines corresponding to selected rows. + NOTE: deletion of rows is implemented here in reversed order + """ + rowIds = list(action.getSelectedRows()) + rowIds.reverse() + for rowId in rowIds: + lines.pop(rowId + 1) + + """in the end update the property value concatenating all the lines""" + value = "\n".join(lines) + property.setValue(value) + +def extractNewLineFromActionInput(action): + inputValues = [] + for input in action.getInputWidgetDescriptions(): + inputValue = "" + if input.getValue(): + inputValue = input.getValue() + inputValues.append(inputValue) + return ",".join(inputValues) +``` ##### updateFromBatchInput(), batchColumNames() and inputWidgets() @@ -852,6 +846,7 @@ entity type which has managed properties. This example assumes one column in the file for the managed property. +```java def updateFromBatchInput(bindings): property.setValue('hello ' + bindings.get('')) @@ -861,6 +856,7 @@ This example assumes one column in the file for the managed property. row = builder.addRow() row.setCell('Greetings', property.getValue()) property.getUiDescription().useTableOutput(builder.getTableModel()) +``` The following input file for a batch upload for samples of a type where property `MANGED-TEXT` @@ -878,6 +874,7 @@ would create in sample detailed view This example takes two columns from the batch input file for creation of one managed property. +```java def batchColumnNames(): return ['Unit', 'Value'] @@ -889,6 +886,7 @@ one managed property. builder.addHeader('Value') builder.addRow().setCell('Value', property.getValue()) property.getUiDescription().useTableOutput(builder.getTableModel()) +``` Assuming a sample type is assigned to the property `MANAGED-TEXT` with this script. On the batch upload form a click on *Download file @@ -927,13 +925,13 @@ are all non-mandatory single-line fields with labels specified by the batch column names. More is possible if the function `batchColumnsNames` is replaced by function `inputWidgets` as in the following example: +```java def inputWidgets(): factory = inputWidgetFactory() unit = factory.createComboBoxInputField('Unit', ['cm', 'mm']).setMandatory(True) value = factory.createTextInputField('Value').setMandatory(True) return [unit, value] - - +``` The field 'Managed Text' in the registration form will be as shown in the following screen shot: @@ -950,8 +948,10 @@ Here is a (overly) simple example: ###### Example 8 +```java def configureUI(): property.getUiDescription().useHtmlOutput("<p>hello<br>foo</p>") +``` ##### Accessing information about a person that performs an update operation @@ -962,8 +962,10 @@ update operation can be access in a managed property script. The information is stored in the 'person' variable that is available in both 'updateFromUI' and 'updateFromBatchInput' functions. +```java def updateFromBatchInput(bindings): property.setValue('userId: ' + person.getUserId() + ', userName: ' + person.getUserName()) +``` #### Storing structured content in managed properties @@ -1031,10 +1033,10 @@ the example code below. It is extracted from a Jython script and demonstrates the basics of constructing and serializing structured content within a managed property. +```java factory = elementFactory() converter = xmlPropertyConverter() - def initialCreationOfPropertyValue(): """ @@ -1073,22 +1075,25 @@ content within a managed property. # update the property value to reflect the modified data structure property.value = converter.convertToString(elements) +``` At the end of the function `initialCreationOfPropertyValue()`, the variable `property.value` will contain an XML representation of the created data structure, which will look like +```xml <root> <Sample permId="samplePermId"/> <Material permId="type (typeCode)"/> <testElement key1="value1" key2="value2"/> </root> +``` The function `updateDataStructure()` assumes that the `initialCreationOfPropertyValue()` has already been called and modifies -the data structure to what would translate to the following XML snippet -: +the data structure to what would translate to the following XML snippet: +```xml <root> <Sample permId="modifiedLink"/> <Material permId="type (typeCode)"> @@ -1096,6 +1101,7 @@ the data structure to what would translate to the following XML snippet </Material> <testElement key1="value1" key2="modifiedvalue"/> </root> +``` #### Unofficial API @@ -1111,6 +1117,7 @@ The following example shows a complete implementation of a managed property script for handling list of log entries. The property value is stored as an XML document. +```java from java.util import Date """ @@ -1249,6 +1256,7 @@ stored as an XML document. """Update value of the managed property to XML string created from modified list of elements.""" property.value = converter.convertToString(elements) +``` ### Creating and Deploying Java Plugins diff --git a/docs/user-documentation/general-users/additional-functionalities.md b/docs/user-documentation/general-users/additional-functionalities.md index 688314de3d2eff64674c202794a1330f116c4c18..19397fd86ccc6c841be43371c96bb0664fd9c3f7 100644 --- a/docs/user-documentation/general-users/additional-functionalities.md +++ b/docs/user-documentation/general-users/additional-functionalities.md @@ -3,7 +3,7 @@ Additional Functionalities ## Visualise Relationships -[](# "Print this article") + Parent-child relationships between *Objects* can be visualised as trees @@ -32,7 +32,7 @@ Updated on March 4, 2022 ## Tables -[](# "Print this article") + @@ -229,7 +229,7 @@ Updated on April 26, 2023 ## Browse Entries by Type -[](# "Print this article") + The **Object Browser** under the **Utilities** main menu allows to see @@ -262,7 +262,7 @@ Updated on April 25, 2023 ## Trashcan -[](# "Print this article") + When *Experiments*, *Objects* and *Datasets* are deleted, they are moved @@ -304,7 +304,7 @@ Updated on October 9, 2022 ## Vocabulary Browser -[](# "Print this article") + The **Vocabulary browser** is accessible from the **Utilities** main @@ -328,7 +328,7 @@ Updated on April 25, 2023 ## Freeze Entities -[](# "Print this article") + Each level of the openBIS hierarchy (Space, Project, @@ -391,63 +391,93 @@ password and save. 1. **Freeze Space only** -[TABLE] - - - - +||Allowed|Not allowed| +|--- |--- |--- | +|Create new Project||x| +|Create new Experiment/Collection|x|| +|Create new Object||x| +|Create new Dataset in existing Experiment/Collection|x|| +|Create new Dataset in existing Object|x|| +|Edit existing Project|x|| +|Edit existing Experiment/Collection|x|| +|Edit existing Object|x|| +|Edit existing Dataset|x|| +|Delete Space||x| +|Delete Project||x| +|Delete Experiment/Collection|x|| +|Delete Object|x|| +|Delete Dataset|x|| +|Move Experiment/Collection|x|| +|Move Object|x|| +|Copy Object||x| +|Export|x|| 1. **Freeze Project only** - - - - -[TABLE] - - - - +||Allowed|Not allowed| +|--- |--- |--- | +|Create new Experiment/Collection||x| +|Create new Object||x| +|Create new Dataset in existing Experiment/Collection|x|| +|Create new Dataset in existing Object|x|| +|Edit Project||x| +|Edit existing Experiment/Collection|x|| +|Edit existing Object|x|| +|Edit existing Dataset|x|| +|Delete Project||x| +|Delete Experiment/Collection||x| +|Delete Object||x| +|Delete Dataset||x| +|Move Experiment/Collection||x| +|Move Object||x| +|Copy Object||x| +|Export|x|| **3. Freeze Experiment/Collection only** - - - - -[TABLE] - - - - +||Allowed|Not allowed| +|--- |--- |--- | +|Create new Object||x| +|Create new Dataset in existing Experiment/Collection||x| +|Create new Dataset in existing Object||x| +|Edit existing Experiment/Collection||x| +|Edit existing Object|x|| +|Edit existing Dataset|x|| +|Delete Experiment/Collection||x| +|Delete Object||x| +|Delete Dataset||x| +|Move Experiment/Collection||x| +|Move Object||x| +|Copy Object||x| +|Export|x|| **4. Freeze Object only** - - - - -[TABLE] - - - - - - +||Allowed|Not allowed| +|--- |--- |--- | +|Create new Dataset in existing Object||x| +|Edit existing Object||x| +|Edit existing Dataset in Object|x|| +|Delete Object||x| +|Delete Dataset||x| +|Move Object||x| +|Copy Object|x (only if the Experiment is not frozen)|| +|Export|x|| **5. Freeze Dataset only ** - - - - -[TABLE] +||Allowed|Not allowed| +|--- |--- |--- | +|Edit existing Dataset||x| +|Delete Dataset||x| +|Move Dataset||x| +|Export|x|| Updated on April 25, 2023 ## Navigation menu -[](# "Print this article") +  @@ -549,7 +579,7 @@ Updated on April 26, 2023 ## Custom Imports -[](# "Print this article") +  @@ -608,7 +638,7 @@ Updated on April 26, 2023 ## Entity history -[](# "Print this article") +  diff --git a/docs/user-documentation/general-users/data-archiving.md b/docs/user-documentation/general-users/data-archiving.md index 5e402ebe577dab3737054169aa646c19243a5a91..18ce05e1b0548ced715e26c3556a018d99c95d41 100644 --- a/docs/user-documentation/general-users/data-archiving.md +++ b/docs/user-documentation/general-users/data-archiving.md @@ -1,6 +1,6 @@ # Data archiving -[](# "Print this article") +  diff --git a/docs/user-documentation/general-users/data-export.md b/docs/user-documentation/general-users/data-export.md index cbad0587068d7e5a4776b55ed88b9db4997bc02a..51fd0b0ec785163e661029dc469c565fa3e9b943 100644 --- a/docs/user-documentation/general-users/data-export.md +++ b/docs/user-documentation/general-users/data-export.md @@ -3,7 +3,7 @@ Data Export ## Export to File -[](# "Print this article") + ### Export Lab Notebooks @@ -65,7 +65,7 @@ Updated on April 25, 2023 ## Export to Zenodo -[](# "Print this article") + Currently openBIS offers an integration with the **Zenodo** data @@ -112,7 +112,7 @@ stored in openBIS, with the following procedure: To export data to Zenodo: -1. Go to **Exports** -> **Export to Zenodo** under **Utilities** in +1. Go to **Exports** -> **Export to Zenodo** under **Utilities** in the main menu. 2. Select the data you want to export from the menu. 3. enter a **Submission** **Title.** @@ -143,7 +143,7 @@ Updated on April 25, 2023 ## Export to ETH Research Collection -[](# "Print this article") +  @@ -162,7 +162,7 @@ To export data to the ETH Research Collection:  -1. Go to **Utilities** -> **Exports** -> **Export to Research +1. Go to **Utilities** -> **Exports** -> **Export to Research Collection**. 2. Select what to export from the tree. 3. Select the **Submission Type** from the available list: *Data diff --git a/docs/user-documentation/general-users/data-upload.md b/docs/user-documentation/general-users/data-upload.md index 1532b00aed0ec3c61448b17b494ec2ec1d775513..dce0bae2671db65264ccf343d893a536deca1ec0 100644 --- a/docs/user-documentation/general-users/data-upload.md +++ b/docs/user-documentation/general-users/data-upload.md @@ -3,7 +3,7 @@ Data Upload ## Data upload via web UI -[](# "Print this article") +  @@ -47,7 +47,7 @@ Updated on March 23, 2023 ## Data upload via dropbox -[](# "Print this article") +  @@ -120,13 +120,13 @@ on the eln-lims-dropbox folder.  -In case of uploads of data >100GB we recommend to configure the +In case of uploads of data >100GB we recommend to configure the **eln-lims-dropbox-marker**. The set up and configuration need to be done by a *system admin*. The process of data preparation is the same as described above, however in this case the data move to the openBIS final storage only starts when a markerfile is placed in the eln-lims-dropbox-marker folder. The marker file is an empty file with -this name:  **.MARKER\_is\_finished\_<folder-to-upload-name>. +this name:  **.MARKER\_is\_finished\_<folder-to-upload-name>. **Please note the “.†at the start of the name, which indicates that this is a hidden file. This file should also not have any extension. For example, if the folder to be uploaded has the following name: @@ -185,7 +185,7 @@ other text editor will also work. Shift + . (period)**. 5. The file you saved before has an extension, that needs to be removed. If the extension is not shown in your Finder, go to Finder - > Preferences menu, select the Advanced tab, and check the “Show + > Preferences menu, select the Advanced tab, and check the “Show all filename extensions†box. 6. Remove the extension from the file. diff --git a/docs/user-documentation/general-users/general-overview.md b/docs/user-documentation/general-users/general-overview.md index aad2540bc301c3af705110782853ea85ea6fab49..a63c3ca1a299dcc7e56632e0254151422d3bd294 100644 --- a/docs/user-documentation/general-users/general-overview.md +++ b/docs/user-documentation/general-users/general-overview.md @@ -1,6 +1,6 @@ # General Overview -[](# "Print this article") + The openBIS platform has three primary functionalities: diff --git a/docs/user-documentation/general-users/inventory-of-materials-and-methods.md b/docs/user-documentation/general-users/inventory-of-materials-and-methods.md index 803df57a6f3e7fb3faaefa253d0019612c2164ac..c6094f747f45a98833fc94d8ec2042a461f89cc4 100644 --- a/docs/user-documentation/general-users/inventory-of-materials-and-methods.md +++ b/docs/user-documentation/general-users/inventory-of-materials-and-methods.md @@ -50,7 +50,7 @@ Updated on April 25, 2023 ## Register single entries in a Collection -[](# "Print this article") + In this example, we will see how to register one *Object* of type **Sample** in the **Raw Samples** *Collection.* The same procedure @@ -84,7 +84,7 @@ Updated on April 25, 2023 ## Batch register entries in a Collection -[](# "Print this article") + It is possible to register several samples at once via file upload. Two methods are currently available: @@ -136,7 +136,7 @@ Excel file. Please note that codes are not case-sensitive, but labels are. Codes and labels of vocabulary terms can be seen under -**Utilities -> Vocabulary Browser**. +**Utilities -> Vocabulary Browser**. #### Assign parents @@ -205,7 +205,7 @@ together, as shown in the template provided above: completely remove the **identifier** column from the file. 2. **Lists**. In fields that have lists to choose from (called **Controlled Vocabularies**), the code of the term needs to be - entered. Term codes can be seen under **Utilities -> Vocabulary + entered. Term codes can be seen under **Utilities -> Vocabulary Browser**. 3. **Parents**. Use the following syntax to enter parents: **identifier1, identifier2, identifier3.** @@ -236,7 +236,7 @@ Updated on April 25, 2023 ## Batch register entries in several Collections -[](# "Print this article") + It is possible to batch register *Objects* that belong to different *Collections*. @@ -291,7 +291,7 @@ Updated on April 25, 2023 ## Batch update entries in a Collection -[](# "Print this article") + It is possible to modify the values of one or more fields in several objects simultaneously via batch update. This can be done in two ways: @@ -375,7 +375,7 @@ Updated on April 25, 2023 ## Batch update entries in several Collections -[](# "Print this article") + It is possible to batch update *Objects* that belong to different *Collections*. @@ -431,7 +431,7 @@ Updated on April 25, 2023 ## Copy entries -[](# "Print this article") +  To create a copy of an existing entry, select **Copy** from the **More..** drop down menu in the *Collection* page. @@ -450,7 +450,7 @@ Updated on July 27, 2022 ## Move entries to a different Collection -[](# "Print this article") + You can move entries to a different *Collection* either from the e*ntry* form or from a *Collection* table. diff --git a/docs/user-documentation/general-users/lab-notebook.md b/docs/user-documentation/general-users/lab-notebook.md index 2adfe778e0cc8ab9aab44f9040bf0a6a221cfe58..c3893e516520dd778e33957b70915bd5286fd60f 100644 --- a/docs/user-documentation/general-users/lab-notebook.md +++ b/docs/user-documentation/general-users/lab-notebook.md @@ -3,7 +3,7 @@ Lab Notebook ## Register Projects -[](# "Print this article") + In a personal folder, users can register one or more *Projects* they @@ -36,7 +36,7 @@ Updated on April 25, 2023 ## Register Experiments -[](# "Print this article") + Inside one *Project*, a user can register several *Experiments*, which @@ -66,7 +66,7 @@ Updated on April 25, 2023 ## Register Experimental Steps -[](# "Print this article") + As mentioned above, the various steps executed when performing an @@ -302,7 +302,7 @@ Updated on April 26, 2023 ## Move Experimental Steps -[](# "Print this article") +  @@ -329,7 +329,7 @@ Updated on April 25, 2023 ## Copy Experimental Steps -[](# "Print this article") +  @@ -352,7 +352,7 @@ Updated on April 25, 2023 ## Use templates for Experimental Steps -[](# "Print this article") +  @@ -386,7 +386,7 @@ This table shows the metadata of the datasets. The content of the datasets can b ## Data Access -[](# "Print this article") + *Datasets* are displayed on the left hand-side of the @@ -475,7 +475,7 @@ you want to access. Note: if you encounter the error message “*SSH connection failed: Could not find a part of the path*.†you can fix this by disabling the cache -(Drives -> Advanced -> Enable Caching), and disabling log files. +(Drives -> Advanced -> Enable Caching), and disabling log files. The error is caused by an attempt to create files in a folder not available to Windows. @@ -549,7 +549,7 @@ Updated on May 5, 2023 ## Move Datasets -[](# "Print this article") +  @@ -583,7 +583,7 @@ Updated on April 25, 2023 ## Move one Experiment to a different Project -[](# "Print this article") +  @@ -651,7 +651,7 @@ Updated on July 5, 2023 ## Edit and Delete Projects, Experiments, Experimental Steps -[](# "Print this article") +  @@ -686,7 +686,7 @@ Updated on April 25, 2023 ## Share Lab Notebooks and Projects -[](# "Print this article") + It is possible to share either a complete lab notebook or single diff --git a/docs/user-documentation/general-users/managing-lab-stocks-and-orders-2.md b/docs/user-documentation/general-users/managing-lab-stocks-and-orders-2.md index c7977cc5f41419ce676aa4668f5241a5093c46ad..10ea709ce7589ebec654335f892b297edd625bfe 100644 --- a/docs/user-documentation/general-users/managing-lab-stocks-and-orders-2.md +++ b/docs/user-documentation/general-users/managing-lab-stocks-and-orders-2.md @@ -1,6 +1,6 @@ # Managing Lab Stocks and Orders -[](# "Print this article") + It is possible to use openBIS to manage stocks of products and create @@ -45,8 +45,8 @@ Catalog**. To build the catalog of all suppliers used for purchasing products by the lab: -> 1. Go to the **Supplier Collection** folder under **Stock** *->* -> **Stock Catalog***->* **Suppliers** in the main menu. +> 1. Go to the **Supplier Collection** folder under **Stock** *->* +> **Stock Catalog***->* **Suppliers** in the main menu. > 2. Click on the **+ New Supplier** button in the *Collection* page. > 3. Follow the steps explained in the [Register > Entries](https://openbis.ch/index.php/docs/user-documentation-20-10-3/inventory-of-materials-and-methods/register-single-entries-in-a-collection/) @@ -66,8 +66,8 @@ Collection.](https://openbis.ch/index.php/docs/user-documentation-20-10-3/invent To build the catalog of all products purchased in the lab: -> 1. Go to the **Product Collection** folder under **Stock** *->* -> **Stock Catalog***->* **Products** in the main menu. +> 1. Go to the **Product Collection** folder under **Stock** *->* +> **Stock Catalog***->* **Products** in the main menu. > 2. Click the **+ New Product** button in the *Collection* page. @@ -100,8 +100,8 @@ Collection.](https://openbis.ch/index.php/docs/user-documentation-20-10-3/invent Every lab member can create requests for products that need to be ordered: -> 1. Go to the **Request Collection** folder under **Stock** *->* -> **Stock Catalog***->* **Requests** in the main menu. +> 1. Go to the **Request Collection** folder under **Stock** *->* +> **Stock Catalog***->* **Requests** in the main menu. > 2. Click the **+ New Request** button in the *Collection* page.  @@ -159,8 +159,8 @@ based on the requests created in the Stock Catalog by every lab member. To create orders of products from requests created in the Stock Catalog: -> 1. Go to the **Order Collection** folder under **Stock** *->* -> **Stock Orders***->* **Orders** in the main menu. +> 1. Go to the **Order Collection** folder under **Stock** *->* +> **Stock Orders***->* **Orders** in the main menu. > 2. Click the **+ New Order** button in the *Collection* page.  diff --git a/docs/user-documentation/general-users/managing-storage-of-samples.md b/docs/user-documentation/general-users/managing-storage-of-samples.md index c33d88ca25c5e26805797b24c8252cbd59622901..92e21bcae64a5be887b6c015f1e5c0b640333b3a 100644 --- a/docs/user-documentation/general-users/managing-storage-of-samples.md +++ b/docs/user-documentation/general-users/managing-storage-of-samples.md @@ -3,7 +3,7 @@ Managing Storage Of Samples ## Allocate storage positions to samples -[](# "Print this article") + If we want to track the storage position of samples, openBIS provides a @@ -110,7 +110,7 @@ Updated on April 26, 2023 ## Batch update storage positions -[](# "Print this article") +  @@ -143,7 +143,7 @@ Updated on April 25, 2023 ## Delete storage positions -[](# "Print this article") +  diff --git a/docs/user-documentation/general-users/tools-for-analysis-of-data-stored-in-openbis.md b/docs/user-documentation/general-users/tools-for-analysis-of-data-stored-in-openbis.md index 7d112e93906671791c9777e0dd0d9ddd4cb5234a..c2b22cb9ec86e3c7a6f356875743c4dbe9650ea5 100644 --- a/docs/user-documentation/general-users/tools-for-analysis-of-data-stored-in-openbis.md +++ b/docs/user-documentation/general-users/tools-for-analysis-of-data-stored-in-openbis.md @@ -3,7 +3,7 @@ Tools For Analysis Of Data Stored In Openbis ## Jupyter Notebooks -[](# "Print this article") + Jupyter notebooks are web applications that combine text, code and @@ -40,7 +40,7 @@ Jupyter notebooks can be opened at every level of the openBIS hierarchy If you get a similar error as the one shown below when you try to launch a notebook from an entity, you need to start the JupyterHub server by -going to the main menu **Utilities** -> **Jupyter Workspace**. This +going to the main menu **Utilities** -> **Jupyter Workspace**. This error appears when the JupyterHub server is restarted (e.g. after an upgrade), because the user profile needs to be recreated. @@ -213,11 +213,11 @@ Updated on April 25, 2023 ## MATLAB toolbox -[](# "Print this article") + The MATLAB toolbox for openBIS allows to access data stored in openBIS directly from MATALB. Full documentation can be found here: [MATLAB -API](https://sissource.ethz.ch/sispub/openbis/-/tree/master/api-openbis-matlab) +API](https://openbis.readthedocs.io/en/latest/software-developer-documentation/apis/matlab-v3-api.html) Updated on April 17, 2023 diff --git a/docs/user-documentation/legacy-advance-features/openbis-kinme-nodes.md b/docs/user-documentation/legacy-advance-features/openbis-kinme-nodes.md index 7961fa667c2b9ced9f480d17490de3d8d6fb5b07..5618b6f9cb5234590797834942393396c2bcb175 100644 --- a/docs/user-documentation/legacy-advance-features/openbis-kinme-nodes.md +++ b/docs/user-documentation/legacy-advance-features/openbis-kinme-nodes.md @@ -31,7 +31,7 @@ Usage ----- All openBIS KNIME nodes can be found in Node Repository under Community -Nodes -> openBIS: +Nodes -> openBIS:  @@ -100,7 +100,7 @@ user will be asked for the passwords after loading a workflow.  If user ID and password are entered directly in the node setting dialog -the KNIME master key on the preferences page **KNIME -> Master Key** +the KNIME master key on the preferences page **KNIME -> Master Key** should be activated. Otherwise passwords will be stored unencrypted! ### openBIS Query Reader @@ -284,8 +284,8 @@ with `knime-`. The specifications of such services are the following: exception with stack trace will be created and thrown in KNIME. It will appear in KNIME log. For each row either the first cell isn't empty or the five other cells are not empty. In the first case the - value of the first column is of the form <exception - class>:<exception message>. If the first column is empty + value of the first column is of the form <exception + class>:<exception message>. If the first column is empty the row represents a stack trace entry where the other columns are interpreted as class name, method name, file name, and line number. diff --git a/server-application-server/source/java/ch/systemsx/cisd/openbis/generic/server/task/UserManagementMaintenanceTask.java b/server-application-server/source/java/ch/systemsx/cisd/openbis/generic/server/task/UserManagementMaintenanceTask.java index 1d88eb5337335a626f2390c5ab9be78c46ee6fff..184c6af62c41654adc04693552b76ebeb6d395db 100644 --- a/server-application-server/source/java/ch/systemsx/cisd/openbis/generic/server/task/UserManagementMaintenanceTask.java +++ b/server-application-server/source/java/ch/systemsx/cisd/openbis/generic/server/task/UserManagementMaintenanceTask.java @@ -17,6 +17,7 @@ package ch.systemsx.cisd.openbis.generic.server.task; import java.io.File; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -24,6 +25,7 @@ import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeMap; +import java.util.TreeSet; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Level; @@ -151,17 +153,25 @@ public class UserManagementMaintenanceTask extends AbstractGroupMaintenanceTask Log4jSimpleLogger logger = new Log4jSimpleLogger(operationLog); Set<String> knownUsers = new HashSet<>(); UserManager userManager = createUserManager(config, logger, report); + Set<String> usersToBeIgnored = getUsersToBeIgnored(config); for (UserGroup group : config.getGroups()) { - addGroup(userManager, group); + addGroup(userManager, group, usersToBeIgnored); addAllTo(knownUsers, group.getUsers()); addAllTo(knownUsers, config.getInstanceAdmins()); } - userManager.manage(knownUsers); + knownUsers.removeAll(usersToBeIgnored); + userManager.manage(knownUsers, usersToBeIgnored); handleReport(report); operationLog.info("finished"); } + private Set<String> getUsersToBeIgnored(UserManagerConfig config) + { + List<String> usersToBeIgnored = config.getUsersToBeIgnored(); + return usersToBeIgnored != null ? new TreeSet<String>(usersToBeIgnored) : Collections.emptySet(); + } + private static void addAllTo(Collection<String> set, Collection<String> setToBeAddedOrNull) { if (setToBeAddedOrNull != null) @@ -170,7 +180,7 @@ public class UserManagementMaintenanceTask extends AbstractGroupMaintenanceTask } } - private void addGroup(UserManager userManager, UserGroup group) + private void addGroup(UserManager userManager, UserGroup group, Set<String> usersToBeIgnored) { String key = group.getKey(); if (shareIdsMappingFile != null) @@ -188,7 +198,7 @@ public class UserManagementMaintenanceTask extends AbstractGroupMaintenanceTask { for (String user : users) { - principalsByUserId.put(user, new Principal(user, "", "", "")); + addPrincipal(principalsByUserId, new Principal(user, "", "", ""), usersToBeIgnored); } } List<String> ldapGroupKeys = group.getLdapGroupKeys(); @@ -210,7 +220,7 @@ public class UserManagementMaintenanceTask extends AbstractGroupMaintenanceTask } for (Principal principal : principals) { - principalsByUserId.put(principal.getUserId(), principal); + addPrincipal(principalsByUserId, principal, usersToBeIgnored); } } catch (Throwable e) { @@ -228,6 +238,14 @@ public class UserManagementMaintenanceTask extends AbstractGroupMaintenanceTask } } + private void addPrincipal(Map<String, Principal> principalsByUserId, Principal principal, Set<String> usersToBeIgnored) + { + if (usersToBeIgnored.contains(principal.getUserId()) == false) + { + principalsByUserId.put(principal.getUserId(), principal); + } + } + private void handleReport(UserManagerReport report) { String errorReport = report.getErrorReport(); diff --git a/server-application-server/source/java/ch/systemsx/cisd/openbis/generic/server/task/UserManager.java b/server-application-server/source/java/ch/systemsx/cisd/openbis/generic/server/task/UserManager.java index 9b7ee1a8a886d5af5a7de832ab94d0fa28c5e309..20da322ca1d110410d9d3414f17c65619e332841 100644 --- a/server-application-server/source/java/ch/systemsx/cisd/openbis/generic/server/task/UserManager.java +++ b/server-application-server/source/java/ch/systemsx/cisd/openbis/generic/server/task/UserManager.java @@ -251,7 +251,7 @@ public class UserManager logger.log(LogLevel.INFO, principalsByUserId.size() + " users for " + (group.isEnabled() ? "" : "disabled ") + "group " + groupCode); } - public void manage(Set<String> knownUsers) + public void manage(Set<String> knownUsers, Set<String> usersToBeIgnored) { String sessionToken = null; try @@ -272,7 +272,7 @@ public class UserManager { String groupCode = entry.getKey(); Map<String, Principal> users = entry.getValue(); - manageGroup(sessionToken, groupCode, users, currentState, report); + manageGroup(sessionToken, groupCode, users, usersToBeIgnored, currentState, report); } updateHomeSpaces(sessionToken, currentState, report); removeUsersFromGlobalGroup(sessionToken, currentState, report); @@ -650,17 +650,17 @@ public class UserManager } private void manageGroup(String sessionToken, String groupCode, Map<String, Principal> groupUsers, - CurrentState currentState, UserManagerReport report) + Set<String> usersToBeIgnored, CurrentState currentState, UserManagerReport report) { try { Context context = new Context(sessionToken, service, currentState, report); if (currentState.groupExists(groupCode)) { - manageKnownGroup(context, groupCode, groupUsers); + manageKnownGroup(context, groupCode, groupUsers, usersToBeIgnored); } else { - manageNewGroup(context, groupCode, groupUsers); + manageNewGroup(context, groupCode, groupUsers, usersToBeIgnored); } createSamples(context, groupCode); createExperiments(context, groupCode); @@ -794,13 +794,13 @@ public class UserManager } } - private void manageKnownGroup(Context context, String groupCode, Map<String, Principal> groupUsers) + private void manageKnownGroup(Context context, String groupCode, Map<String, Principal> groupUsers, Set<String> usersToBeIgnored) { createCommonSpaces(context, groupCode); - manageUsers(context, groupCode, groupUsers); + manageUsers(context, groupCode, groupUsers, usersToBeIgnored); } - private void manageNewGroup(Context context, String groupCode, Map<String, Principal> groupUsers) + private void manageNewGroup(Context context, String groupCode, Map<String, Principal> groupUsers, Set<String> usersToBeIgnored) { String adminGroupCode = createAdminGroupCode(groupCode); @@ -809,7 +809,7 @@ public class UserManager createCommonSpaces(context, groupCode); - manageUsers(context, groupCode, groupUsers); + manageUsers(context, groupCode, groupUsers, usersToBeIgnored); } private void createCommonSpaces(Context context, String groupCode) @@ -829,11 +829,12 @@ public class UserManager } } - private void manageUsers(Context context, String groupCode, Map<String, Principal> groupUsers) + private void manageUsers(Context context, String groupCode, Map<String, Principal> groupUsers, Set<String> usersToBeIgnored) { UserGroup group = groupsByCode.get(groupCode); Map<String, Person> currentUsersOfGroup = context.getCurrentState().getCurrentUsersOfGroup(groupCode); Set<String> usersToBeRemoved = new TreeSet<>(currentUsersOfGroup.keySet()); + usersToBeRemoved.removeAll(usersToBeIgnored); AuthorizationGroup globalGroup = context.getCurrentState().getGlobalGroup(); String adminGroupCode = createAdminGroupCode(groupCode); boolean createUserSpace = group == null || group.isCreateUserSpace(); diff --git a/server-application-server/source/java/ch/systemsx/cisd/openbis/generic/server/task/UserManagerConfig.java b/server-application-server/source/java/ch/systemsx/cisd/openbis/generic/server/task/UserManagerConfig.java index fd26889514813c38570f33e9c58c581b7420eb61..591a19d8f3acf3d4cb7188db895b802bf34fe08f 100644 --- a/server-application-server/source/java/ch/systemsx/cisd/openbis/generic/server/task/UserManagerConfig.java +++ b/server-application-server/source/java/ch/systemsx/cisd/openbis/generic/server/task/UserManagerConfig.java @@ -38,6 +38,8 @@ class UserManagerConfig private List<String> instanceAdmins; + private List<String> usersToBeIgnored; + public List<String> getGlobalSpaces() { return globalSpaces; @@ -98,6 +100,16 @@ class UserManagerConfig this.instanceAdmins = instanceAdmins; } + public List<String> getUsersToBeIgnored() + { + return usersToBeIgnored; + } + + public void setUsersToBeIgnored(List<String> usersToBeIgnored) + { + this.usersToBeIgnored = usersToBeIgnored; + } + public boolean getReuseHomeSpace() { return reuseHomeSpace; diff --git a/server-application-server/sourceTest/java/ch/systemsx/cisd/openbis/generic/server/task/UserManagementMaintenanceTaskTest.java b/server-application-server/sourceTest/java/ch/systemsx/cisd/openbis/generic/server/task/UserManagementMaintenanceTaskTest.java index 6bf9ae71e4b9b1c42c9be020d5a258c67c607ac3..98dc378fe67149dac4703913691864b58e5872d1 100644 --- a/server-application-server/sourceTest/java/ch/systemsx/cisd/openbis/generic/server/task/UserManagementMaintenanceTaskTest.java +++ b/server-application-server/sourceTest/java/ch/systemsx/cisd/openbis/generic/server/task/UserManagementMaintenanceTaskTest.java @@ -255,6 +255,35 @@ public class UserManagementMaintenanceTaskTest extends AbstractFileSystemTestCas logRecorder.getLogContent()); } + @Test + public void testExecuteWithTwoUsersOneIgnored() + { + // Given + UserManagementMaintenanceTaskWithMocks task = new UserManagementMaintenanceTaskWithMocks() + .withUserManagerReport(new UserManagerReport(new MockTimeProvider(0, 1000))); + FileUtilities.writeToFile(configFile, ""); + task.setUp("", properties); + FileUtilities.writeToFile(configFile, "{\"usersToBeIgnored\":[\"beta\"], " + + "\"groups\": [{\"key\":\"ABC\", \"users\":[\"alpha\", \"beta\"]}]}"); + + // When + task.execute(); + + // Then + assertEquals("INFO OPERATION.UserManagementMaintenanceTaskWithMocks - Setup plugin \n" + + "INFO OPERATION.UserManagementMaintenanceTaskWithMocks - Plugin '' initialized. Configuration file: " + + configFile.getAbsolutePath() + "\n" + + "INFO OPERATION.UserManagementMaintenanceTaskWithMocks - manage 1 groups\n" + + "INFO OPERATION.UserManagementMaintenanceTaskWithMocks - Global spaces: []\n" + + "INFO OPERATION.UserManagementMaintenanceTaskWithMocks - Common spaces: {}\n" + + "INFO OPERATION.UserManagementMaintenanceTaskWithMocks - Common samples: {}\n" + + "INFO OPERATION.UserManagementMaintenanceTaskWithMocks - Common experiments: []\n" + + "INFO OPERATION.UserManagementMaintenanceTaskWithMocks - Add group ABC[name:null, enabled:true, ldapGroupKeys:null, users:[alpha, beta], admins:null] with users [alpha=alpha]\n" + + "INFO OPERATION.UserManagementMaintenanceTaskWithMocks - 1 users for group ABC\n" + + "INFO OPERATION.UserManagementMaintenanceTaskWithMocks - finished", + logRecorder.getLogContent()); + } + @Test public void testExecuteEmptyLdapGroupKeys() { @@ -679,7 +708,7 @@ public class UserManagementMaintenanceTaskTest extends AbstractFileSystemTestCas } @Override - public void manage(Set<String> knownUsers) + public void manage(Set<String> knownUsers, Set<String> usersToBeIgnored) { report.addGroup("dummy group, known users: " + knownUsers); } diff --git a/server-application-server/sourceTest/java/ch/systemsx/cisd/openbis/systemtest/task/UserManagerTest.java b/server-application-server/sourceTest/java/ch/systemsx/cisd/openbis/systemtest/task/UserManagerTest.java index 3f7f94a15f8dbe7c3226856ffc24824d39653ed3..172740e4887be56eb9815681d9dbc0918a870c5a 100644 --- a/server-application-server/sourceTest/java/ch/systemsx/cisd/openbis/systemtest/task/UserManagerTest.java +++ b/server-application-server/sourceTest/java/ch/systemsx/cisd/openbis/systemtest/task/UserManagerTest.java @@ -770,6 +770,45 @@ public class UserManagerTest extends AbstractTest builder.assertExpectations(); } + @Test + public void testRemoveUserToBeIgnoredFromAGroup() + { + // Given + // 1. create group G2 with users U1 (admin), U2 and U3 + MockLogger logger = new MockLogger(); + Map<Role, List<String>> commonSpaces = commonSpaces(); + UserManager userManager = new UserManagerBuilder(v3api, logger, report()).commonSpaces(commonSpaces).get(); + List<String> globalSpaces = Arrays.asList("A", "B"); + userManager.setGlobalSpaces(globalSpaces); + userManager.addGroup(new UserGroupAsBuilder("G2").admins(U1), users(U1, U2, U3)); + assertEquals(manage(userManager).getErrorReport(), ""); + // 2. remove U2 from group G2 + userManager = new UserManagerBuilder(v3api, logger, report()).commonSpaces(commonSpaces).get(); + userManager.setGlobalSpaces(globalSpaces); + userManager.addGroup(new UserGroupAsBuilder("G2").admins(U1), users(U1, U3)); + + // When + UserManagerReport report = manage(userManager, Collections.singleton(U2.getUserId())); + + // Then + assertEquals(report.getErrorReport(), ""); + assertEquals(report.getAuditLog(), ""); + UserManagerExpectationsBuilder builder = createBuilder(); + builder.groups("G2").commonSpaces(commonSpaces).users(U1, U2, U3); + builder.space("A").observer(U1).observer(U2).observer(U3); + builder.space("B").observer(U1).observer(U2).observer(U3); + builder.space("G2_ALPHA").admin(U1).user(U2, U3); + builder.space("G2_BETA").admin(U1).user(U2, U3); + builder.space("G2_GAMMA").admin(U1).observer(U2, U3); + builder.space("G2_U1").admin(U1).non(U2, U3); + builder.space("G2_U2").admin(U1).admin(U2).non(U3); + builder.space("G2_U3").admin(U1).non(U2).admin(U3); + builder.homeSpace(U1, "G2_U1"); + builder.homeSpace(U2, "G2_U2"); + builder.homeSpace(U3, "G2_U3"); + builder.assertExpectations(); + } + @Test public void testMoveUserToAnotherGroup() { @@ -2124,11 +2163,16 @@ public class UserManagerTest extends AbstractTest private UserManagerReport manage(UserManager userManager, String... knownUsers) { - userManager.manage(new TreeSet<>(Arrays.asList(knownUsers))); + return manage(userManager, Collections.emptySet(), knownUsers); + } + + private UserManagerReport manage(UserManager userManager, Set<String> usersToBeIgnored, String... knownUsers) + { + userManager.manage(new TreeSet<>(Arrays.asList(knownUsers)), usersToBeIgnored); daoFactory.getSessionFactory().getCurrentSession().flush(); return report; } - + private Map<String, Principal> users(Principal... principals) { Map<String, Principal> map = new TreeMap<>(); diff --git a/server-data-store/src/main/java/ch/ethz/sis/afsserver/server/APIServer.java b/server-data-store/src/main/java/ch/ethz/sis/afsserver/server/APIServer.java index 27eadbeb0bf2ef1e3321d4a5bc17859b15811441..a00b0637ac766a317632d54365fe4561f6a6a9b2 100644 --- a/server-data-store/src/main/java/ch/ethz/sis/afsserver/server/APIServer.java +++ b/server-data-store/src/main/java/ch/ethz/sis/afsserver/server/APIServer.java @@ -37,7 +37,7 @@ import java.util.concurrent.ConcurrentHashMap; import static ch.ethz.sis.afsserver.server.APIServerErrorType.IncorrectParameters; import static ch.ethz.sis.afsserver.server.APIServerErrorType.MethodNotFound; -/* +/** * This class should be used as delegate by specific server transport classes * * The API Server allows the following modes of operation: diff --git a/ui-admin/src/js/common/messages.js b/ui-admin/src/js/common/messages.js index bf195f89fd1101957dd4be8a9c535a67720d9381..5cdcb16583f13115831eb7e131268ebf4c1ee7e5 100644 --- a/ui-admin/src/js/common/messages.js +++ b/ui-admin/src/js/common/messages.js @@ -278,6 +278,8 @@ const keys = { UPDATE_MODE: "UPDATE_MODE", UPLOAD: "UPLOAD", URL_TEMPLATE: 'URL_TEMPLATE', + URL_TEMPLATE_HINT: 'URL_TEMPLATE_HINT', + URL_TEMPLATE_PATTERN: 'URL_TEMPLATE_PATTERN', USAGES: 'USAGES', USER: 'USER', USERS: 'USERS', @@ -590,6 +592,8 @@ const messages_en = { [keys.UPDATE_MODE]: 'Update Mode', [keys.UPLOAD]: 'Upload', [keys.URL_TEMPLATE]: 'URL Template', + [keys.URL_TEMPLATE_HINT]: 'For example:\nhttp://www.ebi.ac.uk/QuickGO/GTerm?id=${term}', + [keys.URL_TEMPLATE_PATTERN]: "URL template must contain '${term}', which will be substituted with appropriate term automatically.", [keys.USAGES]: 'Usages', [keys.USERS]: 'Users', [keys.USERS_WHO_REGISTERED_SOME_DATA_CANNOT_BE_REMOVED]: 'Users who have already registered some data cannot be removed.', diff --git a/ui-admin/src/js/components/common/form/FormFieldView.jsx b/ui-admin/src/js/components/common/form/FormFieldView.jsx index 18d067315e1119cf105d322bf9e96ec7aedc9556..678968a61e6a7b9b993444819ffac3cbffaa947e 100644 --- a/ui-admin/src/js/components/common/form/FormFieldView.jsx +++ b/ui-admin/src/js/components/common/form/FormFieldView.jsx @@ -1,8 +1,19 @@ +import _ from 'lodash' import React from 'react' import { withStyles } from '@material-ui/core/styles' import Typography from '@material-ui/core/Typography' +import InfoIcon from '@material-ui/icons/Info' +import Tooltip from '@src/js/components/common/form/Tooltip.jsx' const styles = theme => ({ + container: { + display: 'flex', + flexDirection: 'row', + alignItems: 'center' + }, + control: { + flex: '1 1 auto' + }, label: { fontSize: theme.typography.label.fontSize, color: theme.typography.label.color @@ -13,20 +24,35 @@ const styles = theme => ({ borderBottomStyle: 'solid', borderBottomColor: theme.palette.border.secondary }, - container: {} + description: { + flex: '0 0 auto', + '& svg': { + color: theme.palette.hint.main + }, + cursor: 'pointer' + } }) class FormFieldView extends React.PureComponent { render() { - const { label, value, classes } = this.props + const { label, value, description, classes } = this.props return ( <div className={classes.container}> - <Typography variant='body2' component='div' className={classes.label}> - {label} - </Typography> - <Typography variant='body2' component='div' className={classes.value}> - {value ? value : <span> </span>} - </Typography> + <div className={classes.control}> + <Typography variant='body2' component='div' className={classes.label}> + {label} + </Typography> + <Typography variant='body2' component='div' className={classes.value}> + {value ? value : <span> </span>} + </Typography> + </div> + {!_.isNil(description) && ( + <div className={classes.description}> + <Tooltip title={description}> + <InfoIcon fontSize='small' /> + </Tooltip> + </div> + )} </div> ) } diff --git a/ui-admin/src/js/components/common/form/TextField.jsx b/ui-admin/src/js/components/common/form/TextField.jsx index 33bceabf18fc975b90e5ff4b21375399bd956119..68c4dfb94825c63cdd13c7071fd2a6549e4320ee 100644 --- a/ui-admin/src/js/components/common/form/TextField.jsx +++ b/ui-admin/src/js/components/common/form/TextField.jsx @@ -50,8 +50,8 @@ class TextFormField extends React.PureComponent { } renderView() { - const { label, value } = this.props - return <FormFieldView label={label} value={value} /> + const { label, value, description } = this.props + return <FormFieldView label={label} value={value} description={description} /> } renderEdit() { diff --git a/ui-admin/src/js/components/types/form/vocabularytype/VocabularyTypeFormControllerValidate.js b/ui-admin/src/js/components/types/form/vocabularytype/VocabularyTypeFormControllerValidate.js index d21b7c1da49756c7db7f141869c3e7694721ba31..2052499d3954b48e26094d6d090ab843e3b5ea3f 100644 --- a/ui-admin/src/js/components/types/form/vocabularytype/VocabularyTypeFormControllerValidate.js +++ b/ui-admin/src/js/components/types/form/vocabularytype/VocabularyTypeFormControllerValidate.js @@ -2,6 +2,8 @@ import PageControllerValidate from '@src/js/components/common/page/PageConroller import VocabularyTypeFormSelectionType from '@src/js/components/types/form/vocabularytype/VocabularyTypeFormSelectionType.js' import messages from '@src/js/common/messages.js' +const URL_TEMPLATE_PATTERN = /^.*\$\{term\}.*$/ + export default class VocabularyTypeFormControllerValidate extends PageControllerValidate { validate(validator) { const { vocabulary, terms } = this.context.getState() @@ -53,6 +55,8 @@ export default class VocabularyTypeFormControllerValidate extends PageController validator.validateCode(vocabulary, 'code', messages.get(messages.CODE)) } + validator.validatePattern(vocabulary, 'urlTemplate', messages.get(messages.URL_TEMPLATE_PATTERN), URL_TEMPLATE_PATTERN) + return validator.withErrors(vocabulary) } diff --git a/ui-admin/src/js/components/types/form/vocabularytype/VocabularyTypeFormParametersVocabulary.jsx b/ui-admin/src/js/components/types/form/vocabularytype/VocabularyTypeFormParametersVocabulary.jsx index d189ab492fcd43122e26124a66e74535c34f5f8f..86ffd487ad2fb50697c220adeedcf372af0b7832 100644 --- a/ui-admin/src/js/components/types/form/vocabularytype/VocabularyTypeFormParametersVocabulary.jsx +++ b/ui-admin/src/js/components/types/form/vocabularytype/VocabularyTypeFormParametersVocabulary.jsx @@ -222,6 +222,7 @@ class VocabularyTypeFormParametersVocabulary extends React.PureComponent { <TextField reference={this.references.urlTemplate} label={messages.get(messages.URL_TEMPLATE)} + description={messages.get(messages.URL_TEMPLATE_HINT)} name='urlTemplate' error={error} disabled={!enabled} diff --git a/ui-admin/srcTest/js/common/fixture.js b/ui-admin/srcTest/js/common/fixture.js index 795d9341de03326c7a57cf9dd9de748e151fe650..272ea021073517de81770557b86d812fc03c8e7e 100644 --- a/ui-admin/srcTest/js/common/fixture.js +++ b/ui-admin/srcTest/js/common/fixture.js @@ -165,7 +165,7 @@ TEST_TERM_6_DTO.setOfficial(false) const TEST_VOCABULARY_DTO = new openbis.Vocabulary() TEST_VOCABULARY_DTO.setCode('TEST_VOCABULARY') TEST_VOCABULARY_DTO.setDescription('TEST_DESCRIPTION') -TEST_VOCABULARY_DTO.setUrlTemplate('TEST_URL_TEMPLATE') +TEST_VOCABULARY_DTO.setUrlTemplate('http://test-url-template/${term}') TEST_VOCABULARY_DTO.setTerms([ TEST_TERM_1_DTO, TEST_TERM_2_DTO, diff --git a/ui-eln-lims/src/core-plugins/eln-lims/1/as/webapps/eln-lims/html/js/views/DataSetForm/widgets/DatasetViewerController.js b/ui-eln-lims/src/core-plugins/eln-lims/1/as/webapps/eln-lims/html/js/views/DataSetForm/widgets/DatasetViewerController.js index 6c8c3d7f32f905bce46e20027ba5bccc370cfe46..64cd3782a7383f0220ffefb1593cca8b1385db0f 100644 --- a/ui-eln-lims/src/core-plugins/eln-lims/1/as/webapps/eln-lims/html/js/views/DataSetForm/widgets/DatasetViewerController.js +++ b/ui-eln-lims/src/core-plugins/eln-lims/1/as/webapps/eln-lims/html/js/views/DataSetForm/widgets/DatasetViewerController.js @@ -75,13 +75,25 @@ function DataSetViewerController(containerId, profile, entity, serverFacade, dat } } } + + this._getDataSetType = function(dataset) { + var type = dataset.dataSetTypeCode; + if(type) { + return type; + } + type = dataset.type; + if(type) { + return dataset.type.code; + } + return type; + } this.updateDatasets = function(datasets) { var _this = this; var datasetPermIds = []; for(var i = 0; i < datasets.length; i++) { //DataSets for entity - var type = datasets[i].dataSetTypeCode; + var type = this._getDataSetType(datasets[i]); if (type && (profile.showDataset(type) || profile.showDatasetOnNav(type))) { datasetPermIds.push(datasets[i].code); } diff --git a/ui-eln-lims/src/core-plugins/eln-lims/1/as/webapps/eln-lims/html/js/views/DataSetForm/widgets/DatasetViewerView.js b/ui-eln-lims/src/core-plugins/eln-lims/1/as/webapps/eln-lims/html/js/views/DataSetForm/widgets/DatasetViewerView.js index 4fee4dce89264fb251b200dc2ce36f87853548da..0e16b3476e4e2198011334449181c08146c3c0bc 100644 --- a/ui-eln-lims/src/core-plugins/eln-lims/1/as/webapps/eln-lims/html/js/views/DataSetForm/widgets/DatasetViewerView.js +++ b/ui-eln-lims/src/core-plugins/eln-lims/1/as/webapps/eln-lims/html/js/views/DataSetForm/widgets/DatasetViewerView.js @@ -327,6 +327,18 @@ function DataSetViewerView(dataSetViewerController, dataSetViewerModel) { node.visit(function(n) {_this._expandDeep(n);}); }) } + + this._getDataSetType = function(dataset) { + var type = dataset.dataSetTypeCode; + if(type) { + return type; + } + type = dataset.type; + if(type) { + return dataset.type.code; + } + return type; + } this.repaintFilesAsTree = function($container) { $container.empty(); @@ -347,7 +359,7 @@ function DataSetViewerView(dataSetViewerController, dataSetViewerModel) { var dataset = this._dataSetViewerModel.entityDataSets[datasetCode]; var onClick = "mainController.changeView('showViewDataSetPageFromPermId', '" + datasetCode + "');"; var dataSetTitle = "<span id=\"dataSetPosInTree-" + dataSetPosInTree + "\" onclick=\"" + onClick + "\">" - + dataset.dataSetTypeCode + " : " + displayName + "</span>"; + + this._getDataSetType(dataset) + " : " + displayName + "</span>"; treeModel.push({ title : dataSetTitle, key : "/", folder : true, lazy : true, datasetCode : datasetCode }); dataSetPosInTree += 1; } diff --git a/ui-eln-lims/src/core-plugins/eln-lims/1/as/webapps/eln-lims/html/js/views/SideMenu/SideMenuWidgetBrowserController.js b/ui-eln-lims/src/core-plugins/eln-lims/1/as/webapps/eln-lims/html/js/views/SideMenu/SideMenuWidgetBrowserController.js index aad47f9908e7886f9a13791691dd6a0f4c0955df..b51422bef4a21ab8685669ce61451a881131df64 100644 --- a/ui-eln-lims/src/core-plugins/eln-lims/1/as/webapps/eln-lims/html/js/views/SideMenu/SideMenuWidgetBrowserController.js +++ b/ui-eln-lims/src/core-plugins/eln-lims/1/as/webapps/eln-lims/html/js/views/SideMenu/SideMenuWidgetBrowserController.js @@ -1854,7 +1854,7 @@ class SideMenuWidgetBrowserController extends window.NgComponents.default.Browse var loadSamplesPromise = this._loadNodesExperimentSamples({ node: samplesFolderNode, offset: 0, - limit: 0, + limit: this.LOAD_LIMIT, }) var dataSetsFolderNode = this._createExperimentDataSetsNode() @@ -1871,7 +1871,7 @@ class SideMenuWidgetBrowserController extends window.NgComponents.default.Browse loadDataSetsPromise = this._loadNodesExperimentDataSets({ node: dataSetsFolderNode, offset: 0, - limit: 0, + limit: this.LOAD_LIMIT, }) } else { loadDataSetsPromise = Promise.resolve({ @@ -1883,7 +1883,7 @@ class SideMenuWidgetBrowserController extends window.NgComponents.default.Browse var results = { nodes: [], - totalCount: 0 + totalCount: 0, } if (samplesResults.totalCount > 0) { @@ -1961,11 +1961,11 @@ class SideMenuWidgetBrowserController extends window.NgComponents.default.Browse }, sampleFetchOptions, (searchResult) => { - var results = { nodes: [], totalCount: searchResult.totalCount } - - searchResult.objects.forEach((sample) => { + var results = this._filterResultsByFunction(params, searchResult, (sample) => { if (this._isExperimentSample(sample)) { - results.nodes.push(this._createSampleNode(sample)) + return this._createSampleNode(sample) + } else { + return null } }) @@ -1978,7 +1978,7 @@ class SideMenuWidgetBrowserController extends window.NgComponents.default.Browse async _loadNodesExperimentDataSets(params) { var datasetRules = { [Util.guid()]: { type: "Experiment", name: "ATTR.PERM_ID", value: params.node.experimentPermId }, - [Util.guid()]: { type: "Sample", name: "NULL.NULL", value : "NULL" } + [Util.guid()]: { type: "Sample", name: "NULL.NULL", value: "NULL" }, } var datasetSubcriteria = [] @@ -2037,11 +2037,11 @@ class SideMenuWidgetBrowserController extends window.NgComponents.default.Browse }, datasetFetchOptions, (searchResult) => { - var results = { nodes: [], totalCount: searchResult.totalCount } - - searchResult.objects.forEach((dataSet) => { + var results = this._filterResultsByFunction(params, searchResult, (dataSet) => { if (this._isExperimentDataSet(dataSet)) { - results.nodes.push(this._createDataSetNode(dataSet)) + return this._createDataSetNode(dataSet) + } else { + return null } }) @@ -2058,7 +2058,7 @@ class SideMenuWidgetBrowserController extends window.NgComponents.default.Browse var loadChildrenPromise = this._loadNodesSampleChildren({ node: childrenFolderNode, offset: 0, - limit: 0, + limit: this.LOAD_LIMIT, }) var dataSetsFolderNode = this._createSampleDataSetsNode() @@ -2075,7 +2075,7 @@ class SideMenuWidgetBrowserController extends window.NgComponents.default.Browse loadDataSetsPromise = this._loadNodesSampleDataSets({ node: dataSetsFolderNode, offset: 0, - limit: 0, + limit: this.LOAD_LIMIT, }) } else { loadDataSetsPromise = Promise.resolve({ @@ -2087,7 +2087,7 @@ class SideMenuWidgetBrowserController extends window.NgComponents.default.Browse var results = { nodes: [], - totalCount: 0 + totalCount: 0, } if (childrenResults.totalCount > 0) { @@ -2199,11 +2199,11 @@ class SideMenuWidgetBrowserController extends window.NgComponents.default.Browse }, sampleFetchOptions, (searchResult) => { - var results = { nodes: [], totalCount: searchResult.totalCount } - - searchResult.objects.forEach((sample) => { + var results = this._filterResultsByFunction(params, searchResult, (sample) => { if (this._isChildSample(sample)) { - results.nodes.push(this._createSampleNode(sample)) + return this._createSampleNode(sample) + } else { + return null } }) @@ -2271,11 +2271,11 @@ class SideMenuWidgetBrowserController extends window.NgComponents.default.Browse }, datasetFetchOptions, (searchResult) => { - var results = { nodes: [], totalCount: searchResult.totalCount } - - searchResult.objects.forEach((dataSet) => { + var results = this._filterResultsByFunction(params, searchResult, (dataSet) => { if (this._isSampleDataSet(dataSet)) { - results.nodes.push(this._createDataSetNode(dataSet)) + return this._createDataSetNode(dataSet) + } else { + return null } }) @@ -3043,6 +3043,28 @@ class SideMenuWidgetBrowserController extends window.NgComponents.default.Browse } } + _filterResultsByFunction(params, results, filterFunction) { + var filteredResults = { nodes: [] } + + results.objects.forEach((object) => { + var node = filterFunction(object) + + if (node) { + filteredResults.nodes.push(node) + } + }) + + if (params.offset === 0 && params.limit >= results.totalCount) { + // all available results have been loaded from the server, as the total count let's use the number of results that passed the client-side filtering (that's more accurate) + filteredResults.totalCount = filteredResults.nodes.length + } else { + // otherwise we have to use the total count from the server as we cannot tell how much of them would pass the client-side filtering without loading them all + filteredResults.totalCount = results.totalCount + } + + return filteredResults + } + _isLabNotebookSpace(space) { var showLabNotebook = SettingsManagerUtils.isEnabledForGroup( space.getCode(), @@ -3090,13 +3112,12 @@ class SideMenuWidgetBrowserController extends window.NgComponents.default.Browse } _isExperimentWithChildren(experiment) { - var experimentIdentifier = experiment.getIdentifier().getIdentifier(); - var experimentSpaceCode = IdentifierUtil.getSpaceCodeFromIdentifier(experimentIdentifier); - var isInventorySpace = profile.isInventorySpace(experimentSpaceCode); + var experimentIdentifier = experiment.getIdentifier().getIdentifier() + var experimentSpaceCode = IdentifierUtil.getSpaceCodeFromIdentifier(experimentIdentifier) + var isInventorySpace = profile.isInventorySpace(experimentSpaceCode) - var isInventoryCollectionExperiment = experiment.getType().getCode() === "COLLECTION" || - isInventorySpace; - return !isInventoryCollectionExperiment; + var isInventoryCollectionExperiment = experiment.getType().getCode() === "COLLECTION" || isInventorySpace + return !isInventoryCollectionExperiment } _isExperimentSample(sample) { diff --git a/ui-eln-lims/src/core-plugins/eln-lims/1/dss/drop-boxes/eln-lims-dropbox/eln-lims-dropbox.py b/ui-eln-lims/src/core-plugins/eln-lims/1/dss/drop-boxes/eln-lims-dropbox/eln-lims-dropbox.py index 129057a1740f1375b96666b234de8c991b7bf399..45db4f0ff5db961980b3a70fd37636dc52650cf1 100644 --- a/ui-eln-lims/src/core-plugins/eln-lims/1/dss/drop-boxes/eln-lims-dropbox/eln-lims-dropbox.py +++ b/ui-eln-lims/src/core-plugins/eln-lims/1/dss/drop-boxes/eln-lims-dropbox/eln-lims-dropbox.py @@ -231,7 +231,7 @@ def getFilesWithIllegalCharacters(folder): for f in files: result.extend(getFilesWithIllegalCharacters(f)) - return False; + return result def getHiddenFiles(folder):