@@ -46,15 +46,12 @@ process_data() {
4646 # wikidata/update.sh $DATADIR
4747 chronology/update.sh $DATADIR
4848 ./update_all.sh $DATADIR
49- # Create results directory and copy all .db files from subdirectories and main directory
50- RESULTS_DIR=" $DATADIR /results"
51- mkdir -p " $RESULTS_DIR "
52- # Copy all .db files from subdirectories to results
53- find $DATADIR -mindepth 2 -name " *.db" -type f -exec cp {} " $RESULTS_DIR /" \; 2> /dev/null || true
54- # Copy .db files from main directory to results
55- find $DATADIR -maxdepth 1 -name " *.db" -type f -exec cp {} " $RESULTS_DIR /" \; 2> /dev/null || true
56- # Sync results directory to S3 bucket
57- aws s3 sync " $RESULTS_DIR /" " s3://$AWS_S3_BUCKET /taginfo/$ENVIRONMENT /" --exclude " *" --include " *.db"
49+ mv $DATADIR /* .db $DATADIR /
50+ mv $DATADIR /* /* .db $DATADIR /
51+ # if AWS_S3_BUCKET is set upload data
52+ if ! aws s3 ls " s3://$AWS_S3_BUCKET /$ENVIRONMENT " 2>&1 | grep -q ' An error occurred' ; then
53+ aws s3 sync $DATADIR / s3://$AWS_S3_BUCKET /$ENVIRONMENT / --exclude " *" --include " *.db"
54+ fi
5855}
5956
6057# Compress files to download
@@ -119,8 +116,13 @@ sync_latest_db_version() {
119116
120117start_web () {
121118 echo " Start...Taginfo web service"
122- download_db_files " $TAGINFO_DB_BASE_URL "
123- cd $WORKDIR /taginfo/web && ./taginfo.rb & sync_latest_db_version
119+ # FETCH_DB_FILES defaults to true if not set
120+ FETCH_DB_FILES=${FETCH_DB_FILES:- true}
121+ if [ " ${FETCH_DB_FILES} " = " true" ]; then
122+ echo " Downloading initial database files..."
123+ download_db_files " $TAGINFO_DB_BASE_URL "
124+ fi
125+ cd $WORKDIR /taginfo/web && ./taginfo.rb
124126}
125127
126128ACTION=$1
0 commit comments