Skip to content
Snippets Groups Projects
Commit 312502c7 authored by tpylak's avatar tpylak
Browse files

SE-133 YeastX: integration tests (step 1)

SVN: 12710
parent 758ff4dc
No related branches found
No related tags found
No related merge requests found
Showing
with 638 additions and 868 deletions
# author: Tomasz Pylak, 2007-09-27
# Integration tests functions
# ----------------------------- global constants
TRUE=1
FALSE=0
# all paths are relative to the template directory
TEMPLATE=templates
TARGETS=targets
TEST_DATA=testData
WORK=$TARGETS/playground
INSTALL=$TARGETS/install
LOCAL_PROJECTS=..
OPENBIS_SERVER_NAME=openBIS-server
OPENBIS_SERVER=$WORK/$OPENBIS_SERVER_NAME
ERR_LOG=$WORK/all_err_log.txt
# ----------------------------- global state
TEST_FAILED=false # working variable, if true then some tests failed
# ----------------------------- assertions to check the tests results
function init_log {
rm -fr $ERR_LOG
}
function report_error {
local msg=$@
echo [ERROR] $msg | tee -a $ERR_LOG >&2
TEST_FAILED="true"
}
function exit_if_assertion_failed {
if [ "$TEST_FAILED" = "true" ]; then
report_error Test failed.
exit 1;
else
echo [OK] Test was successful!
fi
}
function assert_file_exists {
local file=$1
if [ ! -f "$file" ]; then
report_error File $file does not exist!
else
echo [OK] File $file exists
fi
}
function assert_file_not_exists {
local file=$1
if [ -f "$file" ]; then
report_error File $file does exist although it should not!
else
echo [OK] File $file does not exists
fi
}
function assert_same_inode {
local file1=$1
local file2=$2
if [ $file1 -ef $file2 ]; then
echo [OK] $file1 and $file2 have the same inode number.
else
report_error "$file1 and $file2 do not have the same inode number."
fi
}
function assert_dir_exists {
local DIR=$1
if [ ! -d "$DIR" ]; then
report_error Directory \"$DIR\" does not exist!
else
echo [OK] Directory \"$DIR\" exists
fi
}
function fatal_error {
local MSG=$@
report_error $MSG
exit_if_assertion_failed
}
# remember to pass the parameter in quote marks
function assert_file_exists_or_die {
local F="$1"
local files_num=`ls -1 $F 2> /dev/null | wc -l`
if [ $files_num -gt 1 ]; then
fatal_error "One file expected for pattern $F, but more found: " $F
else
if [ ! -f $F ]; then
fatal_error "No file matching pattern $F exists"
fi
fi
}
function assert_dir_exists_or_die {
local DIR=$1
if [ ! -d $DIR ]; then
fatal_error "Directory $DIR does not exist!"
fi
}
function assert_dir_empty {
dir=$1
is_empty_dir $dir
empty=$?
if [ $empty == 0 ]; then
report_error Directory \'$dir\' should be empty!
fi
}
function assert_same_content {
local expected_file=$1
local actual_file=$2
cmd="diff --exclude=\.svn -r $expected_file $actual_file"
supress=`eval $cmd`
is_different=$?
if [ $is_different == 1 ]; then
report_error "Different content in $expected_file (marked by '<') and $actual_file (marked by '>')"
eval $cmd
else
echo "[OK] Same content in $expected_file and $actual_file"
fi
}
function assert_equals {
local message=$1
local expected_text=$2
local actual_text=$3
if [ "$expected_text" != "$actual_text" ]; then
report_error "$message: expected: <$expected_text> but was: <$actual_text>"
fi
}
function assert_equals_as_in_file {
local expected_text=$1
local file_with_actual_text=$2
assert_file_exists $file_with_actual_text
assert_equals "Content of file $file_with_actual_text" "$expected_text" "`cat $file_with_actual_text`"
}
function assert_pattern_present {
local file=$1
local occurences=$2
local pattern=$3
assert_file_exists $file
echo Matched lines:
cat $file | grep "$pattern"
local lines=`cat $file | grep "$pattern" | wc -l`
if [ $lines != $occurences ]; then
report_error $lines instead of $occurences occurences of pattern $pattern found!
else
echo [OK] $occurences occurences of pattern $pattern found
fi
}
function assert_files_number {
local dir=$1
local expected_files_count=$2
local files_count=`ls -1 $dir | wc -l`
assert_equals "Wrong number of files in $dir directory" $expected_files_count $files_count
}
# -----------------------------
# Scripts to build and install components needed in integration tests.
#
# Implementation assumptions:
# - the current directory after calling a function does not change
# -----------------------------
# ----------------------------- configuration
BIN_PATHS="/opt/local/bin /usr/bin /usr/sbin"
USER=`whoami`
DATABASE=openbis_integration_test
# --------------------------- build distributions from sources
# Replaces the ':' in $PATH with ' '.
function get_env_path {
echo $PATH | tr ":" " "
}
# Looks for a specified executable in environment paths and
# paths given as a parameter (space separated).
function locate_file {
local file=$1
shift
local additional_paths=$@
for dir in `get_env_path` $additional_paths; do
local full_path=$dir/$file
if [ -x $full_path ]; then
echo $full_path;
return
fi
done
}
function run_svn {
`locate_file svn $BIN_PATHS` $@
}
function run_lsof {
`locate_file lsof $BIN_PATHS` $@
}
# Tries to find PostgreSQL executable and returns its absolute path.
# If not found, then exits the script with an appropriate error message.
function run_psql {
for prg in psql psql84 psql83; do
exe=`locate_file $prg $BIN_PATHS`
if [ $exe ]; then
echo $exe
return
fi
done
echo "Cannot find PostgreSQL"
echo "This executable is needed to run the integration tests"
exit 1
}
function build_zips {
build_dss=$1
build_dmv=$2
build_openbis=$3
use_local_source=$4
if [ $build_dss == "true" -o $build_dmv == "true" -o $build_openbis == "true" ]; then
mkdir -p $INSTALL
if [ "$use_local_source" = "true" ]; then
build_zips_from_local $build_dss $build_dmv $build_openbis
else
build_zips_from_svn $build_dss $build_dmv $build_openbis
fi
else
echo "No components to build were specified (--help explains how to do this)."
echo "Build process skipped."
fi
assert_file_exists_or_die "$INSTALL/openBIS*.zip"
assert_file_exists_or_die "$INSTALL/datastore_server*.zip"
assert_file_exists_or_die "$INSTALL/datamover*.zip"
}
function build_zips_from_local {
build_dss=$1
build_dmv=$2
build_openbis=$3
build_components build_local $build_dss $build_dmv $build_openbis
}
function build_local {
local PROJECT_NAME=$1
$LOCAL_PROJECTS/$PROJECT_NAME/build/antrun.sh
local dir=$LOCAL_PROJECTS/$PROJECT_NAME/targets/dist/
mv $dir/*.zip $INSTALL
mv $dir/*.jar $INSTALL
}
function build_components {
build_cmd=$1
build_dss=$2
build_dmv=$3
build_openbis=$4
if [ $build_dss == "true" ]; then
rm -f $INSTALL/datastore_server*.zip
rm -f $INSTALL/datastore_server*.jar
$build_cmd datastore_server
$build_cmd rtd_yeastx
fi
if [ $build_dmv == "true" ]; then
rm -f $INSTALL/datamover*.zip
$build_cmd datamover
fi
if [ $build_openbis == "true" ]; then
rm -f $INSTALL/openBIS*.zip
$build_cmd openbis
fi
}
function build_remote {
local RSC=$1
local PROJECT_NAME=$2
cd $RSC
./build.sh $PROJECT_NAME
cd ..
}
function build_zips_from_svn {
build_dss=$1
build_dmv=$2
build_openbis=$3
RSC=build_resources
rm -fr $RSC
run_svn checkout svn+ssh://svncisd.ethz.ch/repos/cisd/build_resources/trunk $RSC
build_components "build_remote $RSC" $build_dss $build_dmv $build_openbis
mv $RSC/*.zip $INSTALL
rm -fr $RSC
}
# -------------------------- installation
# Recursively removes '.svn' directory in passed directory.
function clean_svn {
local DIR=$1
for file in `find $DIR -name ".svn"`; do
rm -fr $file;
done
}
function copy_templates {
local template_dir=$1
cp -fR $TEMPLATE/$template_dir $WORK
clean_svn $WORK/$template_dir
}
function prepare {
src=$1
dest=$2
rm -fr $WORK/$dest
cp -R $WORK/$src $WORK/$dest
copy_templates $dest
}
function unpack { # from ZIPS to BUILD
local file_pattern=$1
unzip -d $WORK $INSTALL/$file_pattern*.zip
}
function remove_unpacked {
rm -fR $WORK/$1
}
function check_server_port {
run_lsof -i -n -P | grep 8443
}
function wait_for_server {
echo -n "Server starting"
i=0;
while [ "`check_server_port`" == "" -a $i -lt 20 ]; do
sleep 2;
echo -n ".";
let i=$i+1;
done
if [ "`check_server_port`" == "" ]; then
report_error "Server could not be started!"
exit 1
else
echo "...[Done]"
fi
}
function install_openbis_server {
local install_openbis=$1
psql_cmd=`run_psql`
$psql_cmd -U postgres -c "drop database $DATABASE"
$psql_cmd -U postgres -c "create database $DATABASE with owner $USER template = template0 encoding = 'UNICODE'"
$psql_cmd -U $USER -d $DATABASE -f $TEMPLATE/$OPENBIS_SERVER_NAME/test_database.sql
if [ $install_openbis == "true" ]; then
rm -fr $OPENBIS_SERVER
copy_templates $OPENBIS_SERVER_NAME
unzip -d $OPENBIS_SERVER $INSTALL/openBIS*.zip
$OPENBIS_SERVER/openBIS-server/install.sh $PWD/$OPENBIS_SERVER $OPENBIS_SERVER/service.properties $OPENBIS_SERVER/openbis.conf
wait_for_server
else
copy_templates $OPENBIS_SERVER_NAME
restart_openbis
fi
}
function startup_openbis_server {
call_in_dir bin/startup.sh $OPENBIS_SERVER/apache-tomcat
wait_for_server
}
function shutdown_openbis_server {
if [ "`check_server_port`" != "" ]; then
$OPENBIS_SERVER/apache-tomcat/bin/shutdown.sh
fi
}
# unpack everything, override default configuration with test configuation
function install_dsss {
local install_dss=$1
local dss_dirs="datastore_server1 datastore_server2 datastore_server_yeastx"
if [ $install_dss == "true" ]; then
unpack datastore_server
for dss_dir in $dss_dirs; do
prepare datastore_server $dss_dir
done
remove_unpacked datastore_server
else
for dss_dir in $dss_dirs; do
copy_templates $dss_dir
done
fi
}
function install_datamovers {
local install_dmv=$1
if [ $install_dmv == "true" ]; then
unpack datamover
prepare datamover datamover-raw
prepare datamover datamover-analysis
remove_unpacked datamover
cp -fR $TEMPLATE/dummy-img-analyser $WORK
copy_templates datamover-raw
copy_templates datamover-analysis
else
copy_templates datamover-raw
copy_templates datamover-analysis
fi
}
function restart_openbis {
assert_dir_exists_or_die $OPENBIS_SERVER
shutdown_openbis_server
sleep 1
startup_openbis_server
sleep 4
}
function install {
local install_dss=$1
local install_dmv=$2
local install_openbis=$3
local reinstall_all=$4
mkdir -p $WORK
if [ $reinstall_all == "true" ];then
install_dsss "true"
install_datamovers "true"
install_openbis_server "true"
else
install_dsss $install_dss
install_datamovers $install_dmv
install_openbis_server $install_openbis
fi
}
# ----------------------------- general
# calls $cmd script, changing directory to $dir
function call_in_dir {
cmd=$1
dir=$2
prev=$PWD
cd $dir
sh $cmd
cd $prev
}
function is_empty_dir {
dir=$1
if [ "`ls $dir`" = "" ]; then
return 1;
else
return 0;
fi
}
# ----------------------- Launching
function chmod_exec {
for file in $@; do
if [ -f $file ]; then
chmod u+x $file
fi
done
}
function switch_sth {
switch_on=$1 # on/off
dir=$WORK/$2
cmd_start=$3
cmd_stop=$4
report_error=$5
assert_dir_exists_or_die $dir
chmod_exec $dir/$cmd_start
chmod_exec $dir/$cmd_stop
if [ "$switch_on" == "on" ]; then
echo "Launching $dir..."
rm -fr $dir/log/*
call_in_dir "$cmd_start" $dir
else
echo "Stopping $dir, displaying errors from the log"
if [ "`cat $dir/log/* | grep ERROR | tee -a $ERR_LOG`" != "" ]; then
if [ $report_error -eq $TRUE ]; then
report_error $dir reported errors.
cat $dir/log/* | grep ERROR >&2
fi
fi
call_in_dir "$cmd_stop" $dir
fi
}
function switch_dss {
switch_sth $1 $2 "datastore_server.sh start" "datastore_server.sh stop" $FALSE
}
function switch_dmv {
switch_sth $1 $2 "datamover.sh start" "datamover.sh stop" $TRUE
}
function assert_dss_registration {
local dss=$1
echo ==== assert registration of DSS $dss ====
assert_pattern_present $WORK/$dss/log/datastore_server_log.txt 1 getVersion
}
function build_and_install {
install_dss=$1
install_dmv=$2
install_openbis=$3
use_local_source=$4
reinstall_all=$5
init_log
# NOTE: Comment this line if you want to use different libraries.
#build_zips $install_dss $install_dmv $install_openbis $use_local_source
# Prepare empty incoming data
DATA=$WORK/data
rm -fr $DATA
mkdir -p $DATA
cp -R $TEMPLATE/data $WORK
clean_svn $DATA
install $install_dss $install_dmv $install_openbis $reinstall_all
}
function clean_after_tests {
echo "Cleaning $INSTALL..."
rm -fr $INSTALL
echo "Cleaning $WORK..."
rm -fr $WORK
}
Author: Tomasz Pylak, 2007-09-26
Basil Neff, 2008-06-03 (section "integration test in branches with the datamover" created)
The integration test scenario
assumption: postgres is running on the local machine
-------------------
- lims server is launched
- lims client registers some cell plates
- one etl server and one datamover is launched, one pair for raw data and one for image analysis data
- some data are generated for each cell plate
- 'raw' datamover moves the data, creating additional copy
- 'raw' etl server registers raw data
- dummy script does the image analysis and moves the data for 'analysis' datamover
- 'analysis' datamover moves the data
- 'analysis' etl server registers analysis data
Directories
-----------------
......@@ -35,13 +22,3 @@ Launching run.sh again will redo the tests without rebuilding or reinstalling an
To reinstall everything without rebuilding binaries, delete playground directory.
If you want to starting tests from the scratch, launch run.sh with --force-rebuild option.
Integration test in branches with the datamover
---------------------------------------------
Due to the fact, that the datamover is not part of our branch, we need a possibility
to test the integration test with a existing datamover distribution.
For this create the directory 'install' in the target directory and copy a distibution of the CISD datamover in it
which matches the pattern 'datamover-*.zip'.
If you checked out the whole branch, you can run the integration test script with the followin parameter:
./run.sh --etl --lims --local-source --reinstall-all
\ No newline at end of file
This diff is collapsed.
faulty-duplicated-mapping --------------------
ERROR: No datasets could be processed, because there is an error in the mapping file index.tsv: the file 'file.mzxml' appears more than once.
faulty-experiment-code --------------------
ERROR: file1.mzXML - cannot upload the file: It was expected that there is exactly one experiment in the 'TEST/TEST_PROJECT' project with property 'name' set to 'unknown', but 0 were found!
faulty-mapped-file-does-not-exist --------------------
ERROR: There are following files mentioned in the mapping file which do not exist:
[file1.mzxml]
Browse the mapping file and check if you have not misspelled some file names.
faulty-no-email-specified --------------------
ERROR: No datasets could be processed, because there is an error in the mapping file index.tsv: There should be a '#' character followed by an email address in the first line of the file. The email is needed to send messages about errors.
faulty-no-mapping --------------------
ERROR: file1.mzXML - no mapping could be found for this dataset
faulty-non-unique-mapping --------------------
ERROR: file1.mzXML - cannot upload the file: there is no sample which matches the criteria <Samples from the group 'TEST' with property 'samplename' set to 'any' in any experiment>
ERROR: file2.mzXML - cannot upload the file: there is no sample which matches the criteria <Samples from the group 'TEST' with property 'samplename' set to 'any' in project 'TEST_PROJECT', experiment name = 'Praktikum Metabolic Networks 2009'>
ERROR: file3.mzXML - no mapping could be found for this dataset
faulty-to-many-mapping-files --------------------
ERROR: No datasets from the directory 'faulty-to-many-mapping-files' can be processed because there is more than one file with extension '[tsv]'.
faulty-unknow-property --------------------
ERROR: file1.mzXML - cannot upload the file: Property type with code 'unknow-property' does not exist!
faulty-unknown-mapping --------------------
ERROR: file1.mzXML - cannot upload the file: there is no sample which matches the criteria <Samples from the group 'TEST' with property 'samplename' set to 'unknown' in any experiment>
ERROR: file2.mzXML - cannot upload the file: No project 'UNKNOWN' could be found in the 'TEST' group!
ERROR: file3.mzXML - cannot upload the file: experiment and project columns should be both empty or should be both filled.
ERROR: file4.mzXML - cannot upload the file: experiment and project columns should be both empty or should be both filled.
ERROR: file5.mzXML - cannot upload the file: error when checking if sample '/unknown/x' belongs to an experiment: No group could be found for identifier 'CISD:/UNKNOWN'.
ERROR: file5.mzXML - cannot upload the file: No group could be found for identifier 'CISD:/UNKNOWN'.
faulty-wrong-conversion --------------------
ERROR: file1.mzXML - cannot upload the file: conversion column cannot be empty for this type of file.
ERROR: file2.eicML - cannot upload the file: conversion column must be empty for this type of file.
ERROR: file2.fiaML - cannot upload the file: conversion column must be empty for this type of file.
ignore-empty-dir --------------------
ignore-no-index --------------------
# openbis-user@mailinator.pl
file_name sample group
file.pdf s32 TEST
file.zip s32 TEST
file.mat s32 TEST
file.any s32 TEST
\ No newline at end of file
any content
\ No newline at end of file
matlab content
\ No newline at end of file
pdf content
\ No newline at end of file
zip content
\ No newline at end of file
content
\ No newline at end of file
content
\ No newline at end of file
content
\ No newline at end of file
# openbis-user@mailinator.pl
file_name sample group experiment project conversion datasetcomments
file1.mzXML glucose 1 TEST eicML xxx1
file2.mzXML glucose 2 TEST Praktikum Metabolic Networks 2009 TEST_PROJECT eicML xxx2
file3.mzXML S32 TEST eicML xxx3
\ No newline at end of file
file_name sample experiment project conversion comment
unknownFile x x x x
file.mzXML unknown x
file3.mzXML 3VCP3 xxx3
\ No newline at end of file
# openbis-user@mailinator.pl
file_name sample group experiment project conversion datasetcomments
file.mzXML x TEST eicML x
file.mzXML x TEST eicML x
# openbis-user@mailinator.pl
file_name sample group experiment project conversion
file1.mzXML glucose 2 TEST unknown TEST_PROJECT eicML
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment