@@ -63,24 +63,60 @@ compress_files() {
6363}
6464
6565download_db_files () {
66- if ! aws s3 ls " s3:// $AWS_S3_BUCKET / $ENVIRONMENT " 2>&1 | grep -q ' An error occurred ' ; then
67- aws s3 sync " s3:// $AWS_S3_BUCKET / $ENVIRONMENT / " " $DATADIR / "
68- mv $DATADIR / * .db $DATADIR /
69- mv $DATADIR / * / * .db $DATADIR /
70- compress_files
66+ local base_url= $1
67+
68+ if [ -z " $base_url " ] ; then
69+ echo " Error: URL base is required for download_db_files "
70+ return 1
7171 fi
72+
73+ # Ensure base_url ends with /
74+ if [[ ! " $base_url " =~ /$ ]]; then
75+ base_url=" ${base_url} /"
76+ fi
77+
78+ # List of SQLite database files to download
79+ local db_files=(
80+ " projects-cache.db"
81+ " selection.db"
82+ " taginfo-chronology.db"
83+ " taginfo-db.db"
84+ " taginfo-history.db"
85+ " taginfo-languages.db"
86+ " taginfo-master.db"
87+ " taginfo-projects.db"
88+ " taginfo-wiki.db"
89+ " taginfo-wikidata.db"
90+ )
91+
92+ echo " Downloading SQLite database files from: $base_url "
93+
94+ for db_file in " ${db_files[@]} " ; do
95+ local file_url=" ${base_url}${db_file} "
96+ local output_path=" ${DATADIR} /${db_file} "
97+
98+ echo " Downloading: $db_file "
99+ if wget -q --show-progress -O " $output_path " --no-check-certificate " $file_url " ; then
100+ echo " Successfully downloaded: $db_file "
101+ else
102+ echo " Warning: Failed to download $db_file from $file_url "
103+ # Continue with other files even if one fails
104+ fi
105+ done
106+
107+ echo " Database files download completed"
72108}
73109
74110sync_latest_db_version () {
75111 while true ; do
76112 sleep " $INTERVAL_DOWNLOAD_DATA "
77- download_db_files
113+ download_db_files " $TAGINFO_DB_BASE_URL "
78114 done
79115}
80116
81117start_web () {
82118 echo " Start...Taginfo web service"
83- download_db_files
119+ download_db_files " $TAGINFO_DB_BASE_URL "
84120 cd $WORKDIR /taginfo/web && ./taginfo.rb & sync_latest_db_version
85121}
86122
0 commit comments