#!/usr/bin/ksh # # wget_antarctic_gmag.ksh # # Author: Lydia Philpott (lphilpott@igpp.ucla.edu) # # Based heavily on wget_nrsq_gmag.ksh and wget_maccs_gmag.ksh by Tim Quinn(teq@ssl.berkeley.edu) # and wget_green_gmag.ksh by Pat Cruce (pcruce@igpp.ucla.edu) & wget_intermagnet_gmag # # This script uses wget to download data from magnetometer sites in Antarctic (provided by Virginia Tech) # # This script is initially being set up to download data from PG1 and PG2. This data is provided in CDF form # intended to be compatible with TDAS without any reprocessing required. # # Data provided is currently 1 sec raw data. For info on the data contact Daniel Weimer dweimer@vt.edu # # $1 = site # $2 = year # $3 = month # ## Set environment variables # if [[ -z $THMSOC ]] then THMSOC=/disks/socware/thmsoc_dp_current export THMSOC fi . /home/thmsoc/lphilpotts_stuff/antarctic_test/soc_it_to_me_test.config . /home/thmsoc/lphilpotts_stuff/antarctic_test/wget_antarctic_gmag.config # ## Handle input arguments # site=${1} year=${2} month=${3} # ## Set logfiles # processdate=$(date '+%y%m%d') LOGFILE=${LOGDIR}/webdownloads/antarctic_gmag_log_${processdate} WGETLOG=${LOGDIR}/webdownloads/wget_output_antarctic_gmag_${processdate}_${site}_$$ echo ":$$:antarctic_gmag_log:" >> $LOGFILE echo ":$$:antarctic_gmag_log:Starting script wget_antarctic_gmag at $(date)" >> $LOGFILE # ## Run wget to download data from Virginia Tech ftp site for given site # wgetTime=$(date '+%Y-%m-%d %T') url=${WEBSITE}/${site}/${year}/ /usr/bin/wget -r -nH --cut-dirs=1 -N -o $WGETLOG --user=THEMIS --password=Angelopoulos -P $L2MAGDIR $url echo ":$$:antarctic_gmag_log: Download Complete: $site" >> $LOGFILE # ## Parse through wget output to see what was downloaded # grep saved $WGETLOG | grep -v listing | awk '{print $6 " " $8}' > /tmp/wget_antarctic_gmag$$ while read line do echo "$$:antarctic_gmag_log:">> $LOGFILE echo "$$:antarctic_gmag_log: Checking through wget log...">>LOGFILE path=$(echo $line |awk '{print $1}') size=$(echo $line |awk '{print $2}') fileName=$(basename $path) fileName=${fileName%*\'} # # Parse date information from filename year=$(echo $fileName |awk -F_ '{print substr($5,1,4)}') mon=$(echo $fileName |awk -F_ '{print substr($5,5,2)}') day=$(echo $fileName |awk -F_ '{print substr($5,7,2)}') fileTime="${year}-${mon}-${day} 00:00:00" # # # Verify file exists if [[ ! -a ${L2MAGDIR}/${site}/${year}/${fileName} ]] then echo "$$:antarctic_gmag_log:" >> $LOGFILE echo "$$:antarctic_gmag_log: ${L2MAGDIR}/${site}/${year}/${fileName} not found. " >> $LOGFILE echo "$$:antarctic_gmag_log: continuing..................." >> $LOGFILE # #printf "%s\n" "script: $0" \ # #"Warning: ${L2MAGDIR}/${site}/${year}/${fileName} not found." \ # #"Check $LOGFILE" | # #mailx -s "Antarctic GMAG Download Discrepancy" $SOC_EMAIL else # fileSize=${size%\]} fileSize=${fileSize#\[} cdf_archive_dir=${L2MAGDIR}/${site}/${year} ## Update checksum file # #temp ${KSH_SCRIPTS}/fileChkSum.ksh ${cdf_archive_dir}/${fileName} dataTime=$(echo $year $mon $day | awk '{printf("%s-%02s-%02s 00:00:00",$1,$2,$3)}') mysql_cdftable_name=gbo_${site}_cdffiles #temp ${PHP_SCRIPTS}/gmag_cdf.php $mysql_cdftable_name $dataTime $fileName $wgetTime $fileSize echo "$$:antarctic_gmag_log: Writing to database $mysql_cdftable_name $dataTime $fileName $wgetTime $fileSize">>$LOGFILE # Create a file used for processing to ascii (essentially keep a note that the file needs to be processed) cp ${L2MAGDIR}/${site}/${year}/${fileName} ${GMAGWORKDIR}/$fileName fi done < /tmp/wget_antarctic_gmag$$ # ## Cleanup # rm -f /tmp/wget_antarctic_gmag$$ exit 0