diff --git a/rucio-dataset-monitoring/spark/cron4rucio_hdfs2mongo.sh b/rucio-dataset-monitoring/spark/cron4rucio_hdfs2mongo.sh index 6b0984f0..6564c6f5 100755 --- a/rucio-dataset-monitoring/spark/cron4rucio_hdfs2mongo.sh +++ b/rucio-dataset-monitoring/spark/cron4rucio_hdfs2mongo.sh @@ -83,7 +83,7 @@ function run_mongo_import() { hadoop fs -getmerge "$hdfs_out_dir"/part-*.json "$local_json_merge_file" mongoimport --drop --type=json \ - --host "$ARG_MONGOHOST" --port "$ARG_MONGOPORT" --username "$ARG_MONGOUSER" --password "$ARG_MONGOPASS" \ + --host "$ARG_MONGOHOST" --username "$ARG_MONGOUSER" --password "$ARG_MONGOPASS" \ --authenticationDatabase "$ARG_MONGOAUTHDB" --db "$ARG_MONGOWRITEDB" \ --collection "$collection" --file "$local_json_merge_file" util4logi "Mongoimport finished. ${hdfs_out_dir} imported to collection: ${collection}" @@ -105,7 +105,7 @@ run_mongo_import "${HDFS_PATH}/${hdfs_datasets_in_tape_and_disk}" "$col_datasets # Write current date to json file and import it to MongoDB "source_timestamp" collection for Go Web Page. echo "{\"createdAt\": \"$(date +%Y-%m-%d)\"}" >source_timestamp.json mongoimport --drop --type=json \ - --host "$ARG_MONGOHOST" --port "$ARG_MONGOPORT" --username "$ARG_MONGOUSER" --password "$ARG_MONGOPASS" \ + --host "$ARG_MONGOHOST" --username "$ARG_MONGOUSER" --password "$ARG_MONGOPASS" \ --authenticationDatabase "$ARG_MONGOAUTHDB" --db "$ARG_MONGOWRITEDB" \ --collection "source_timestamp" --file source_timestamp.json @@ -115,7 +115,7 @@ util4logi "source_timestamp collection is updated with current date" # Modify JS script sed -i "s/_MONGOWRITEDB_/$ARG_MONGOWRITEDB/g" "$script_dir"/createindexes.js -mongosh --host "$ARG_MONGOHOST" --port "$ARG_MONGOPORT" --username "$ARG_MONGOUSER" --password "$ARG_MONGOPASS" \ +mongosh --host "$ARG_MONGOHOST" --username "$ARG_MONGOUSER" --password "$ARG_MONGOPASS" \ --authenticationDatabase "$ARG_MONGOAUTHDB" <"$script_dir"/createindexes.js util4logi "MongoDB indexes are created for datasets and detailed_datasets collections"