diff --git a/das/deploy b/das/deploy index 9182fb9c011a..4c165392d6aa 100644 --- a/das/deploy +++ b/das/deploy @@ -46,11 +46,22 @@ deploy_das_post() # initialize keylearning db rm -f $mongodb_stage/{clean,update}_keylearning_db.js + rm -f $mongodb_stage/keylearning_db-schema-stamp + rm -f $mongodb_stage/inputvals_*-schema-stamp (echo 'keylearning = db.getSisterDB("keylearning");' - echo 'keylearning.dropDatabase();' - echo 'inputvals = db.getSisterDB("inputvals");' + echo 'keylearning.dropDatabase();') > \ + $mongodb_stage/clean_keylearning_db.js + # it's better to clean DBs separately, datatype_name is the first col to be updated so it will clean... + (echo 'inputvals = db.getSisterDB("inputvals");' echo 'inputvals.dropDatabase();') > \ - $mongodb_stage/clean_keylearning_db.js + $mongodb_stage/clean_inputvals_datatype_name.js + # clean each collection (currently update_db require clean to be present) + for col in group_name primary_dataset_name release_name site_name status_name tier_name + do + (echo 'inputvals = db.getSisterDB("inputvals");' + echo "inputvals.$col.drop();") > \ + $mongodb_stage/clean_inputvals_$col.js + done cp $das_root/kws_data/db_dumps/*.js $mongodb_stage/ @@ -58,6 +69,4 @@ deploy_das_post() rm -Rf $root/state/das/kws_index mkdir -p $root/state/das/kws_index cp $das_root/kws_data/kws_index/* $root/state/das/kws_index/ - - } diff --git a/mongodb/manage b/mongodb/manage index 2e2d05ecf831..f6fee50b0f07 100755 --- a/mongodb/manage +++ b/mongodb/manage @@ -48,12 +48,15 @@ cd $STATEDIR clean_db() { set -e - if [ -f $STATEDIR/db/${1%_db}.ns ]; then + # db is the first item before underscore + db=$(echo $1 | cut -f1 -d_) + if [ -f $STATEDIR/db/${db}.ns ]; then echo "Clean $1" mongo --port 8230 stagingarea/clean_$1.js fi set +e } + update_db() { stamp=$(cat stagingarea/update_$1.js | md5sum) @@ -106,15 +109,15 @@ start() if $started; then echo "$ME is ready" # keylearning is prerequisite for KWS to function - clean_db "keylearning_db" update_db "keylearning_db" # inputvals is less important, but currently it's not updated live... for col in datatype_name group_name primary_dataset_name release_name site_name status_name tier_name do - clean_db "inputvals_$col" update_db "inputvals_$col" done # das + # TODO: clean db shall NEVER be called in here, as it do not check for stamps! + # TODO: check if this also could potentially cause some strange behaviour in das?! clean_db "das_db" update_db "mapping_db"