Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
sispub
datapool
Commits
854a4802
Commit
854a4802
authored
Aug 21, 2018
by
schmittu
🍺
Browse files
ran black
parent
1875c80b
Changes
74
Hide whitespace changes
Inline
Side-by-side
datapool/__init__.py
View file @
854a4802
# -*- coding: utf-8 -*-
__author__
=
'
Uwe Schmitt
'
__email__
=
'
uwe.schmitt@id.ethz.ch
'
__version__
=
'
0.4.2
'
__author__
=
"
Uwe Schmitt
"
__email__
=
"
uwe.schmitt@id.ethz.ch
"
__version__
=
"
0.4.2
"
from
ruamel
import
yaml
import
warnings
warnings
.
simplefilter
(
'ignore'
,
yaml
.
error
.
UnsafeLoaderWarning
)
warnings
.
simplefilter
(
"ignore"
,
yaml
.
error
.
UnsafeLoaderWarning
)
datapool/commands/add.py
View file @
854a4802
...
...
@@ -4,7 +4,6 @@ from ..templates import scripts
def
add
(
lz_folder
,
what
,
settings
,
print_ok
,
print_err
,
input_
=
input
,
scripts
=
scripts
):
if
what
not
in
scripts
:
print_err
(
"- argument '{}' invalid. allowed values are:"
.
format
(
what
))
for
available_what
in
sorted
(
scripts
):
...
...
@@ -33,7 +32,11 @@ def _setup_presets(what, script, settings, print_err):
invalid_variable
=
False
for
setting
in
settings
:
if
setting
.
count
(
"="
)
!=
1
:
print_err
(
"- setting '{}' invalid, required syntax is VARIABLE=VALUE"
.
format
(
setting
))
print_err
(
"- setting '{}' invalid, required syntax is VARIABLE=VALUE"
.
format
(
setting
)
)
ok
=
False
else
:
variable
,
__
,
value
=
setting
.
partition
(
"="
)
...
...
datapool/commands/check.py
View file @
854a4802
...
...
@@ -12,9 +12,16 @@ from ..database import setup_db, check_if_tables_exist, copy_db, connect_to_db
from
datapool.instance.domain_objects_checker
import
DomainObjectsChecker
from
..errors
import
InvalidLandingZone
,
DataPoolException
,
InvalidOperationError
from
..landing_zone
import
LandingZone
from
datapool.instance.landing_zone_structure
import
source_yaml_path_for_script
,
required_folders
from
datapool.instance.landing_zone_structure
import
(
source_yaml_path_for_script
,
required_folders
,
)
from
..logger
import
get_cmdline_logger
from
datapool.instance.signal_model
import
check_signals_uniqueness
,
check_signals_against_db
,
check_fields
from
datapool.instance.signal_model
import
(
check_signals_uniqueness
,
check_signals_against_db
,
check_fields
,
)
from
datapool.instance.uniform_file_format
import
to_signals
from
..utils
import
enumerate_filename
,
print_signals
from
datapool.instance.yaml_parsers
import
parse_source
...
...
@@ -44,7 +51,11 @@ def check(landing_zone, result_folder, verbose, print_ok, print_err, run_twice=T
if
config
is
None
or
lz
is
None
:
return
1
print_ok
(
"- check names and places of changed files at landing zone {}"
.
format
(
landing_zone
))
print_ok
(
"- check names and places of changed files at landing zone {}"
.
format
(
landing_zone
)
)
all_changed_files
=
set
(
lz
.
list_new_and_changed_files
())
__
,
unknown_files
=
lz
.
separate_allowed_files
(
all_changed_files
)
...
...
@@ -57,7 +68,9 @@ def check(landing_zone, result_folder, verbose, print_ok, print_err, run_twice=T
print_ok
(
"- check yaml files in landing zone at {}"
.
format
(
landing_zone
))
with
get_cmdline_logger
(
verbose
):
with
_setup_test_db
(
landing_zone
,
config
,
verbose
,
print_ok
,
print_err
)
as
engine
:
with
_setup_test_db
(
landing_zone
,
config
,
verbose
,
print_ok
,
print_err
)
as
engine
:
ok
=
_run_folder_checks
(
lz
,
print_ok
,
print_err
)
if
not
ok
:
...
...
@@ -71,8 +84,16 @@ def check(landing_zone, result_folder, verbose, print_ok, print_err, run_twice=T
print_ok
(
"- all yaml files checked"
)
print_ok
(
"- check scripts landing zone at {}"
.
format
(
landing_zone
))
found_errors
=
_run_script_checks
(
lz
,
engine
,
config
,
result_folder
,
verbose
,
run_twice
,
print_ok
,
print_err
)
found_errors
=
_run_script_checks
(
lz
,
engine
,
config
,
result_folder
,
verbose
,
run_twice
,
print_ok
,
print_err
,
)
if
found_errors
:
print_err
(
"+ checks failed. please fix this."
)
...
...
@@ -83,7 +104,9 @@ def check(landing_zone, result_folder, verbose, print_ok, print_err, run_twice=T
return
0
def
_run_script_checks
(
lz
,
engine
,
config
,
result_folder
,
verbose
,
run_twice
,
print_ok
,
print_err
):
def
_run_script_checks
(
lz
,
engine
,
config
,
result_folder
,
verbose
,
run_twice
,
print_ok
,
print_err
):
runner
=
ConversionRunner
(
config
)
result_folder
=
_setup_result_folder
(
result_folder
,
print_ok
,
print_err
)
...
...
@@ -96,13 +119,20 @@ def _run_script_checks(lz, engine, config, result_folder, verbose, run_twice, pr
for
script_path
,
data_path
in
lz
.
conversion_scripts_and_data
():
if
script_path
not
in
changed_files
:
print_ok
(
"- skip conversion of {} by {}. script is unchanged"
.
format
(
data_path
,
script_path
))
print_ok
(
"- skip conversion of {} by {}. script is unchanged"
.
format
(
data_path
,
script_path
)
)
continue
if
data_path
is
None
:
found_errors
=
True
print_err
(
"- conversion script changed but no raw data file for {}"
.
format
(
script_path
))
print_err
(
"- conversion script changed but no raw data file for {}"
.
format
(
script_path
)
)
continue
print_ok
(
"- check {} on {}"
.
format
(
script_path
,
data_path
))
...
...
@@ -112,24 +142,48 @@ def _run_script_checks(lz, engine, config, result_folder, verbose, run_twice, pr
found_errors
=
True
continue
ok
,
needed_conv
,
signals
=
_check_conversion
(
runner
,
lz
,
script_path
,
data_path
,
verbose
,
result_folder
,
source
,
True
,
print_ok
,
print_err
)
ok
,
needed_conv
,
signals
=
_check_conversion
(
runner
,
lz
,
script_path
,
data_path
,
verbose
,
result_folder
,
source
,
True
,
print_ok
,
print_err
,
)
if
signals
is
not
None
:
all_signals
.
extend
(
signals
)
if
not
ok
:
found_errors
=
True
else
:
print_ok
(
" - first conversion needed {:.0f} msec"
.
format
(
needed_conv
*
1000
))
print_ok
(
" - first conversion needed {:.0f} msec"
.
format
(
needed_conv
*
1000
)
)
if
run_twice
:
ok
,
needed_conv
,
__
=
_check_conversion
(
runner
,
lz
,
script_path
,
data_path
,
verbose
,
result_folder
,
source
,
False
,
print_ok
,
print_err
)
ok
,
needed_conv
,
__
=
_check_conversion
(
runner
,
lz
,
script_path
,
data_path
,
verbose
,
result_folder
,
source
,
False
,
print_ok
,
print_err
,
)
if
not
ok
:
found_errors
=
True
else
:
print_ok
(
" - second conversion needed {:.0f} msec"
.
format
(
needed_conv
*
1000
))
print_ok
(
" - second conversion needed {:.0f} msec"
.
format
(
needed_conv
*
1000
)
)
if
found_errors
:
return
True
...
...
@@ -155,8 +209,18 @@ def _load_source(lz, script_path, print_err):
return
source
def
_check_conversion
(
runner
,
lz
,
script_path
,
data_path
,
verbose
,
result_folder
,
source
,
backup_results
,
print_ok
,
print_err
):
def
_check_conversion
(
runner
,
lz
,
script_path
,
data_path
,
verbose
,
result_folder
,
source
,
backup_results
,
print_ok
,
print_err
,
):
signals
=
None
...
...
@@ -193,7 +257,11 @@ def _report(check_iter, print_err):
error_count
+=
1
has_errors
=
True
if
error_count
>
MAX_ERR
:
print_err
(
" - too many errors, skipped {} errors."
.
format
(
len
(
exceptions
)
-
MAX_ERR
))
print_err
(
" - too many errors, skipped {} errors."
.
format
(
len
(
exceptions
)
-
MAX_ERR
)
)
break
return
has_errors
...
...
@@ -205,7 +273,9 @@ def _setup_result_folder(result_folder, print_ok, print_err):
else
:
if
os
.
path
.
exists
(
result_folder
):
if
not
os
.
path
.
isdir
(
result_folder
):
print_err
(
"+ given path {} exists but is not a folder"
.
format
(
result_folder
))
print_err
(
"+ given path {} exists but is not a folder"
.
format
(
result_folder
)
)
return
1
else
:
os
.
makedirs
(
result_folder
)
...
...
@@ -241,14 +311,20 @@ def _setup_test_db(landing_zone, config, verbose, print_ok, print_err):
except
InvalidOperationError
:
print_err
(
"- could not connect to productive db."
)
if
not
ok
:
print_err
(
"- setup fresh development db. productive does not exist or is empty."
)
print_err
(
"- setup fresh development db. productive does not exist or is empty."
)
setup_db
(
config_develop_db
,
verbose
=
verbose
)
else
:
print_ok
(
"- copy meta data from productive db"
)
for
table_name
in
copy_db
(
config
.
db
,
config_develop_db
,
delete_existing
=
True
,
copy_signals
=
False
,
verbose
=
verbose
):
config
.
db
,
config_develop_db
,
delete_existing
=
True
,
copy_signals
=
False
,
verbose
=
verbose
,
):
print_ok
(
" - copy table {}"
.
format
(
table_name
))
engine
=
connect_to_db
(
config_develop_db
)
...
...
datapool/commands/create_example.py
View file @
854a4802
...
...
@@ -20,7 +20,9 @@ def create_example(development_landing_zone, reset, print_ok, print_err):
try
:
shutil
.
rmtree
(
development_landing_zone
)
except
Exception
as
e
:
print_err
(
" - could not delete folder {}"
.
format
(
development_landing_zone
))
print_err
(
" - could not delete folder {}"
.
format
(
development_landing_zone
)
)
print_err
(
" - error message is: {}"
.
format
(
e
))
return
1
...
...
datapool/commands/delete.py
View file @
854a4802
...
...
@@ -4,12 +4,26 @@ import os
from
sqlalchemy.orm
import
sessionmaker
from
datapool.instance.config_handling
import
read_config
from
..database
import
(
connect_to_db
,
filters_to_sqlalchemy_expression
,
InvalidOperationError
,
check_if_tables_exist
)
from
datapool.instance.db_objects
import
SiteDbo
,
ParameterDbo
,
SourceDbo
,
SignalDbo
,
SourceTypeDbo
from
..database
import
(
connect_to_db
,
filters_to_sqlalchemy_expression
,
InvalidOperationError
,
check_if_tables_exist
,
)
from
datapool.instance.db_objects
import
(
SiteDbo
,
ParameterDbo
,
SourceDbo
,
SignalDbo
,
SourceTypeDbo
,
)
from
..landing_zone
import
write_lock
from
datapool.instance.landing_zone_structure
import
(
find_site_yaml
,
find_source_yaml
,
find_source_type_yaml
,
find_parameters_yaml
)
from
datapool.instance.landing_zone_structure
import
(
find_site_yaml
,
find_source_yaml
,
find_source_type_yaml
,
find_parameters_yaml
,
)
from
..logger
import
get_cmdline_logger
from
datapool.instance.pretty_printers
import
pretty_str
from
..utils
import
format_table
...
...
@@ -20,7 +34,6 @@ class FailedError(Exception):
def
convert_failed_error_to_return_code
(
function
):
@
functools
.
wraps
(
function
)
def
wrapped
(
*
a
,
**
kw
):
try
:
...
...
@@ -28,6 +41,7 @@ def convert_failed_error_to_return_code(function):
return
0
if
return_code
is
None
else
return_code
except
FailedError
:
return
1
return
wrapped
...
...
@@ -43,13 +57,14 @@ def delete_signals(do_delete, max_rows, filters, print_ok, print_err):
print_err
(
"- {}"
.
format
(
message
))
return
1
q
=
(
session
.
query
(
SignalDbo
)
.
outerjoin
(
SignalDbo
.
site
)
# site might be null !
.
join
(
SignalDbo
.
source
)
.
join
(
SignalDbo
.
parameter
)
.
filter
(
filter_expression
)
.
order_by
(
SignalDbo
.
timestamp
)
)
q
=
(
session
.
query
(
SignalDbo
)
.
outerjoin
(
SignalDbo
.
site
)
# site might be null !
.
join
(
SignalDbo
.
source
)
.
join
(
SignalDbo
.
parameter
)
.
filter
(
filter_expression
)
.
order_by
(
SignalDbo
.
timestamp
)
)
if
max_rows
:
_print_signals
(
q
,
max_rows
,
print_ok
)
...
...
@@ -57,12 +72,13 @@ def delete_signals(do_delete, max_rows, filters, print_ok, print_err):
if
not
do_delete
:
return
0
q
=
(
session
.
query
(
SignalDbo
.
signal_id
)
.
outerjoin
(
SignalDbo
.
site
)
# site might be null !
.
join
(
SignalDbo
.
source
)
.
join
(
SignalDbo
.
parameter
)
.
filter
(
filter_expression
)
)
q
=
(
session
.
query
(
SignalDbo
.
signal_id
)
.
outerjoin
(
SignalDbo
.
site
)
# site might be null !
.
join
(
SignalDbo
.
source
)
.
join
(
SignalDbo
.
parameter
)
.
filter
(
filter_expression
)
)
del_q
=
session
.
query
(
SignalDbo
).
filter
(
SignalDbo
.
signal_id
.
in_
(
q
.
subquery
()))
count
=
del_q
.
delete
(
synchronize_session
=
"fetch"
)
...
...
@@ -80,7 +96,7 @@ def _setup_session(print_err):
try
:
engine
=
connect_to_db
(
config
.
db
)
except
InvalidOperationError
as
e
:
print_err
(
'
- {}
'
.
format
(
e
))
print_err
(
"
- {}
"
.
format
(
e
))
raise
FailedError
()
if
not
check_if_tables_exist
(
config
.
db
):
...
...
@@ -107,7 +123,11 @@ def _print_signals(q, max_rows, print_ok):
n_start
=
max_rows
//
2
n_end
=
max_rows
-
n_start
n_skipped
=
num_signals
-
max_rows
+
1
rows
=
rows
[:
n_start
]
+
[
"... skipped {} signals ..."
.
format
(
n_skipped
)]
+
rows
[
-
n_end
:]
rows
=
(
rows
[:
n_start
]
+
[
"... skipped {} signals ..."
.
format
(
n_skipped
)]
+
rows
[
-
n_end
:]
)
for
row
in
rows
:
print_ok
(
" "
+
row
)
print_ok
(
""
)
...
...
@@ -122,45 +142,70 @@ def delete_meta(do_delete, max_rows, what, name, print_ok, print_err):
session
,
config
=
_setup_session
(
print_err
)
landing_zone_root
=
config
.
landing_zone
.
folder
function
=
{
'site'
:
_delete_site
,
'source'
:
_delete_source
,
'source_type'
:
_delete_source_type
,
'parameter'
:
_delete_parameter
}[
what
]
function
=
{
"site"
:
_delete_site
,
"source"
:
_delete_source
,
"source_type"
:
_delete_source_type
,
"parameter"
:
_delete_parameter
,
}[
what
]
signals_before
=
_count_signals
(
session
)
with
write_lock
(
landing_zone_root
)
as
got_lock
:
if
not
got_lock
:
print_err
(
'+ {} is locked. maybe somebody else works on it simultaneously ?'
.
format
(
landing_zone_root
))
print_err
(
"+ {} is locked. maybe somebody else works on it simultaneously ?"
.
format
(
landing_zone_root
)
)
return
1
exit_code
=
function
(
session
,
name
,
do_delete
,
max_rows
,
landing_zone_root
,
print_ok
,
print_err
)
exit_code
=
function
(
session
,
name
,
do_delete
,
max_rows
,
landing_zone_root
,
print_ok
,
print_err
,
)
if
not
exit_code
:
session
=
sessionmaker
(
bind
=
session
.
bind
)()
signals_after
=
_count_signals
(
session
)
print_ok
(
"- this operation deleted {} signals"
.
format
(
signals_before
-
signals_after
))
print_ok
(
"- this operation deleted {} signals"
.
format
(
signals_before
-
signals_after
)
)
return
exit_code
def
_delete_source
(
session
,
name
,
do_delete
,
max_rows
,
landing_zone_root
,
print_ok
,
print_err
):
source
=
_check_one_exists
(
"source"
,
session
,
SourceDbo
,
SourceDbo
.
name
,
name
,
print_err
)
def
_delete_source
(
session
,
name
,
do_delete
,
max_rows
,
landing_zone_root
,
print_ok
,
print_err
):
source
=
_check_one_exists
(
"source"
,
session
,
SourceDbo
,
SourceDbo
.
name
,
name
,
print_err
)
print_ok
(
"- following source found in database:"
)
print_ok
(
pretty_str
(
source
,
indent
=
" "
))
yaml_path
=
_find_unique
(
landing_zone_root
,
find_source_yaml
,
_match_name
(
name
),
print_err
)
_list_matching_signals
(
session
,
SourceDbo
,
SourceDbo
.
name
==
name
,
max_rows
,
print_ok
)
yaml_path
=
_find_unique
(
landing_zone_root
,
find_source_yaml
,
_match_name
(
name
),
print_err
)
_list_matching_signals
(
session
,
SourceDbo
,
SourceDbo
.
name
==
name
,
max_rows
,
print_ok
)
_list_affected_files
(
yaml_path
,
print_ok
,
print_err
)
if
not
do_delete
:
return
0
_delete_signals
(
session
,
SourceDbo
,
SourceDbo
.
source_id
==
source
.
source_id
,
print_ok
)
_delete_signals
(
session
,
SourceDbo
,
SourceDbo
.
source_id
==
source
.
source_id
,
print_ok
)
for
averaging
in
source
.
averaging
:
session
.
delete
(
averaging
)
session
.
delete
(
source
)
...
...
@@ -169,21 +214,27 @@ def _delete_source(session, name, do_delete, max_rows, landing_zone_root, print_
return
0
def
_delete_source_type
(
session
,
name
,
do_delete
,
max_rows
,
landing_zone_root
,
print_ok
,
print_err
):
source_type
=
_check_one_exists
(
"source_type"
,
session
,
SourceTypeDbo
,
SourceTypeDbo
.
name
,
name
,
print_err
)
def
_delete_source_type
(
session
,
name
,
do_delete
,
max_rows
,
landing_zone_root
,
print_ok
,
print_err
):
source_type
=
_check_one_exists
(
"source_type"
,
session
,
SourceTypeDbo
,
SourceTypeDbo
.
name
,
name
,
print_err
)
print_ok
(
"- following source type found in database:"
)
print_ok
(
pretty_str
(
source_type
,
indent
=
" "
))
if
source_type
.
sources
:
print_err
(
"can not delete this source type, please delete the following sources first:"
)
print_err
(
"can not delete this source type, please delete the following sources first:"
)
for
source
in
source_type
.
sources
:
print_err
(
" "
+
source
.
name
)
return
1
yaml_path
=
_find_unique
(
landing_zone_root
,
find_source_type_yaml
,
_match_name
(
name
),
print_err
)
yaml_path
=
_find_unique
(
landing_zone_root
,
find_source_type_yaml
,
_match_name
(
name
),
print_err
)
_list_affected_files
(
yaml_path
,
print_ok
,
print_err
)
if
not
do_delete
:
...
...
@@ -195,15 +246,18 @@ def _delete_source_type(session, name, do_delete, max_rows, landing_zone_root, p
return
0
def
_delete_site
(
session
,
name
,
do_delete
,
max_rows
,
landing_zone_root
,
print_ok
,
print_err
):
def
_delete_site
(
session
,
name
,
do_delete
,
max_rows
,
landing_zone_root
,
print_ok
,
print_err
):
site
=
_check_one_exists
(
"site"
,
session
,
SiteDbo
,
SiteDbo
.
name
,
name
,
print_err
)
print_ok
(
"- following site found in database:"
)
print_ok
(
pretty_str
(
site
,
indent
=
" "
))
yaml_path
=
_find_unique
(
landing_zone_root
,
find_site_yaml
,
_match_name
(
name
),
print_err
)
yaml_path
=
_find_unique
(
landing_zone_root
,
find_site_yaml
,
_match_name
(
name
),
print_err
)
_list_matching_signals
(
session
,
SiteDbo
,
SiteDbo
.
name
==
name
,
max_rows
,
print_ok
)
_list_affected_files
(
yaml_path
,
print_ok
,
print_err
)
...
...
@@ -219,30 +273,43 @@ def _delete_site(session, name, do_delete, max_rows, landing_zone_root, print_ok
return
0
def
_delete_parameter
(
session
,
name
,
do_delete
,
max_rows
,
landing_zone_root
,
print_ok
,
print_err
):
parameter
=
_check_one_exists
(
"parameter"
,
session
,
ParameterDbo
,
ParameterDbo
.
name
,
name
,
print_err
)
def
_delete_parameter
(
session
,
name
,
do_delete
,
max_rows
,
landing_zone_root
,
print_ok
,
print_err
):
parameter
=
_check_one_exists
(
"parameter"
,
session
,
ParameterDbo
,
ParameterDbo
.
name
,
name
,
print_err
)
print_ok
(
"- following parameter found in database:"
)
print_ok
(
pretty_str
(
parameter
,
indent
=
" "
))
if
parameter
.
averaging
:
print_err
(
"- the following source(s) have averaging setttings refering to '{}'"
.
format
(
name
))
print_err
(
"- the following source(s) have averaging setttings refering to '{}'"
.
format
(
name
)
)
for
averaging
in
parameter
.
averaging
:
print_err
(
'
- {}
'
.
format
(
averaging
.
source
.
name
))
print_err
(
"
- {}
"
.
format
(
averaging
.
source
.
name
))
return
1
yaml_path
=
_find_unique
(
landing_zone_root
,
find_parameters_yaml
,
lambda
obj
:
True
,
print_err
)
yaml_path
=
_find_unique
(
landing_zone_root
,
find_parameters_yaml
,
lambda
obj
:
True
,
print_err
)
_list_matching_signals
(
session
,
ParameterDbo
,
ParameterDbo
.
name
==
name
,
max_rows
,
print_ok
)
_list_matching_signals
(
session
,
ParameterDbo
,
ParameterDbo
.
name
==
name
,
max_rows
,
print_ok
)
if
not
do_delete
:
return
0
_delete_signals
(
session
,
ParameterDbo
,
ParameterDbo
.
parameter_id
==
parameter
.
parameter_id
,
print_ok
)
_delete_signals
(
session
,
ParameterDbo
,
ParameterDbo
.
parameter_id
==
parameter
.
parameter_id
,
print_ok
,
)
session
.
delete
(
parameter
)
_rewrite_yaml_with_parameter_removed
(
name
,
yaml_path
)
session
.
commit
()
...
...
@@ -253,11 +320,12 @@ def _rewrite_yaml_with_parameter_removed(name, yaml_path):
# ruamel.yaml preserves layout of input file when dumping, see
# https://stackoverflow.com/questions/20805418/pyyaml-dump-format#answer-36760452
from
ruamel
import
yaml
parameters
=
yaml
.
load
(
open
(
yaml_path
).
read
(),
Loader
=
yaml
.
RoundTripLoader
)
parameters
=
[
p
for
p
in
parameters
if
p
[
'
name
'
]
!=
name
]
parameters
=
[
p
for
p
in
parameters
if
p
[
"
name
"
]
!=
name
]
with
open
(
yaml_path
,
"w"
)
as
fh
:
print
(
yaml
.
dump
(
parameters
,
Dumper
=
yaml
.
RoundTripDumper
),
end
=
''
,
file
=
fh
)
print
(
yaml
.
dump
(
parameters
,
Dumper
=
yaml
.
RoundTripDumper
),
end
=
""
,
file
=
fh
)
def
_check_one_exists
(
what
,
session
,
dbo
,
field
,
name
,
print_err
):
...
...
@@ -313,9 +381,7 @@ def _iter_yaml_including_folders(yaml_path):
def
_remove_yaml_including_folders
(
yaml_path
,
print_ok
,
print_err
):
handlers
=
{
"file"
:
os
.
remove
,
"folder"
:
os
.
rmdir
}
handlers
=
{
"file"
:
os
.
remove
,
"folder"
:
os
.
rmdir
}
for
type_
,
path
in
_iter_yaml_including_folders
(
yaml_path
):
try
:
...
...
@@ -333,10 +399,7 @@ def _list_affected_files(yaml_path, print_ok, print_err):
def
_delete_signals
(
session
,
dbo
,
filter_
,
print_ok
):
q
=
(
session
.
query
(
SignalDbo
.
signal_id
)
.
join
(
dbo
)
.
filter
(
filter_
)
)
q
=
session
.
query
(
SignalDbo
.
signal_id
).
join
(
dbo
).
filter
(
filter_
)
del_q
=
session
.
query
(
SignalDbo
).
filter
(
SignalDbo
.
signal_id
.
in_
(
q
.
subquery
()))
count
=
del_q
.
delete
(
synchronize_session
=
"fetch"
)
...
...
datapool/commands/init_config.py
View file @
854a4802
...
...
@@ -2,11 +2,11 @@ from os.path import exists, abspath
from
..logger
import
get_cmdline_logger
def
init_config
(
landing_zone
,
sqlite_db
,
reset
,
print_ok
,
print_err
,
verbose
=
False
):
"""setup minimal landing zone and create default configuration """
from
datapool.instance.config_handling
import
init_config
as
init_config_
with
get_cmdline_logger
(
verbose
):