@@ -46,12 +46,8 @@ process_data() {
4646 # wikidata/update.sh $DATADIR
4747 chronology/update.sh $DATADIR
4848 ./update_all.sh $DATADIR
49- mv $DATADIR /* .db $DATADIR /
50- mv $DATADIR /* /* .db $DATADIR /
51- # if AWS_S3_BUCKET is set upload data
52- if ! aws s3 ls " s3://$AWS_S3_BUCKET /taginfo/$ENVIRONMENT " 2>&1 | grep -q ' An error occurred' ; then
53- aws s3 sync $DATADIR / s3://$AWS_S3_BUCKET /taginfo/$ENVIRONMENT / --exclude " *" --include " *.db"
54- fi
49+ # Move database files from subdirectories to main data directory
50+ aws s3 sync $DATADIR / s3://$AWS_S3_BUCKET /taginfo/$ENVIRONMENT / --exclude " *" --include " *.db"
5551}
5652
5753# Compress files to download
@@ -63,24 +59,60 @@ compress_files() {
6359}
6460
6561download_db_files () {
66- if ! aws s3 ls " s3://$AWS_S3_BUCKET /$ENVIRONMENT " 2>&1 | grep -q ' An error occurred' ; then
67- aws s3 sync " s3://$AWS_S3_BUCKET /$ENVIRONMENT /" " $DATADIR /"
68- mv $DATADIR /* .db $DATADIR /
69- mv $DATADIR /* /* .db $DATADIR /
70- compress_files
62+ local base_url=$1
63+
64+ if [ -z " $base_url " ]; then
65+ echo " Error: URL base is required for download_db_files"
66+ return 1
67+ fi
68+
69+ # Ensure base_url ends with /
70+ if [[ ! " $base_url " =~ /$ ]]; then
71+ base_url=" ${base_url} /"
7172 fi
73+
74+ # List of SQLite database files to download
75+ local db_files=(
76+ " projects-cache.db"
77+ " selection.db"
78+ " taginfo-chronology.db"
79+ " taginfo-db.db"
80+ " taginfo-history.db"
81+ " taginfo-languages.db"
82+ " taginfo-master.db"
83+ " taginfo-projects.db"
84+ " taginfo-wiki.db"
85+ " taginfo-wikidata.db"
86+ )
87+
88+ echo " Downloading SQLite database files from: $base_url "
89+
90+ for db_file in " ${db_files[@]} " ; do
91+ local file_url=" ${base_url}${db_file} "
92+ local output_path=" ${DATADIR} /${db_file} "
93+
94+ echo " Downloading: $db_file "
95+ if wget -q --show-progress -O " $output_path " --no-check-certificate " $file_url " ; then
96+ echo " Successfully downloaded: $db_file "
97+ else
98+ echo " Warning: Failed to download $db_file from $file_url "
99+ # Continue with other files even if one fails
100+ fi
101+ done
102+
103+ echo " Database files download completed"
72104}
73105
74106sync_latest_db_version () {
75107 while true ; do
76108 sleep " $INTERVAL_DOWNLOAD_DATA "
77- download_db_files
109+ download_db_files " $TAGINFO_DB_BASE_URL "
78110 done
79111}
80112
81113start_web () {
82114 echo " Start...Taginfo web service"
83- download_db_files
115+ download_db_files " $TAGINFO_DB_BASE_URL "
84116 cd $WORKDIR /taginfo/web && ./taginfo.rb & sync_latest_db_version
85117}
86118
0 commit comments