#!/usr/bin/ksh # # wget_intermagnet_gmag.ksh # # Author: Lydia Philpott (lphilpott@igpp.ucla.edu) # # Based heavily on wget_nrsq_gmag.ksh and wget_maccs_gmag.ksh by Tim Quinn(teq@ssl.berkeley.edu) # and wget_green_gmag.ksh by Pat Cruce (pcruce@igpp.ucla.edu) # # This script uses wget to download data from magnetometer sites through INTERMAGNET # # This script is initially being set up to download data from ABK (Abisko). With the intention # that it be extendable to other sites in the future (in particular NRCan sites FCC and YKC). # NB: the path to the data on the ftp site will vary depending on whether the particular # site has provided minute or second data, definitive or variational etc. # # $1 = site # $2 = year # $3 = month # $4 = resolution (this is minute or second) # $5 = datatype (this is variation, definitive, provisional, quasi-definitive # ## Set environment variables # if [[ -z $THMSOC ]] then THMSOC=/disks/socware/thmsoc_dp_current export THMSOC fi #. $THMSOC/src/config/soc_it_to_me.config # . $THMSOC/src/config/wget_intermagnet_gmag.config . /home/thmsoc/lphilpotts_stuff/intermagnet_test/soc_it_to_me_test.config . /home/thmsoc/lphilpotts_stuff/intermagnet_test/wget_intermagnet_gmag_test.config # ## Handle input arguments # site=${1} year=${2} month=${3} resolution=${4} datatype=${5} # ## Set logfiles # processdate=$(date '+%y%m%d') LOGFILE=${LOGDIR}/webdownloads/intermagnet_gmag_log_${processdate} WGETLOG=${LOGDIR}/webdownloads/wget_output_intermagnet_gmag_${processdate}_${site}_$$ echo ":$$:intermagnet_gmag_log:" >> $LOGFILE echo ":$$:intermagnet_gmag_log:Starting script wget_intermagnet_gmag at $(date)" >> $LOGFILE # ## Run wget to download data from INTERMAGNET for given site # wgetTime=$(date '+%Y-%m-%d %T') url=${WEBSITE}/${resolution}/${datatype}/IAGA2002/${year}/${month}/ #wildcard="*" sitelist="$site*" #echo $site #echo $sitelist /usr/bin/wget -r -nH --cut-dirs=5 -N -o $WGETLOG --user=imagthemis --password=data4th3M -P $LOCALWEBMIRROR/${site}/${year}/${month} -A $sitelist $url echo ":$$:intermagnet_gmag_log: Download Complete: $site" >> $LOGFILE # ## Parse through wget output to see what was downloaded # grep saved $WGETLOG | grep -v listing | awk '{print $6 " " $8}' > /tmp/wget_intermagnet_gmag$$ while read line do path=$(echo $line |awk '{print $1}') size=$(echo $line |awk '{print $2}') fileName=$(basename $path) fileName=${fileName%*\'} # Parse date information from filename year=$(echo $fileName |awk -F_ '{print substr($1,4,4)}') mon=$(echo $fileName |awk -F_ '{print substr($1,8,2)}') day=$(echo $fileName |awk -F_ '{print substr($1,10,2)}') fileTime="${year}-${mon}-${day} 00:00:00" # Verify file exists if [[ ! -a ${LOCALWEBMIRROR}/${site}/${year}/${month}/${fileName} ]] then echo "$$:intermagnet_gmag_log:" >> $LOGFILE echo "$$:intermagnet_gmag_log: ${LOCALWEBMIRROR}/${site}/${year}/${month}/${fileName} not found. " >> $LOGFILE echo "$$:intermagnet_gmag_log: continuing..................." >> $LOGFILE #printf "%s\n" "script: $0" \ #"Warning: ${LOCALWEBMIRROR}/${site}/${year}/${fileName} not found." \ #"Check $LOGFILE" | #mailx -s "MACCS Download Discrepancy" $SOC_EMAIL fi fileSize=${size%\]} fileSize=${fileSize#\[} # Update database with stats #${PHP_SCRIPTS}/wget_intermagnet_gmag.php $fileName $wgetTime $fileTime $fileSize # Create a file used for CDF processing cp ${LOCALWEBMIRROR}/${site}/${year}/${month}/${fileName} ${GMAGWORKDIR}/$fileName done < /tmp/wget_intermagnet_gmag$$ # ## Cleanup # rm -f /tmp/wget_intermagnet_gmag$$ exit 0