1. CMD
right_cmd_command_regit.bat
@echo off reg add "HKCR*shellms-dos" /ve /d DOS_COMMAND /f reg add "HKCR*shellms-doscommand" /ve /d "cmd.exe /k cd %%1" /f reg add "HKCRFoldershellms-dos" /ve /d DOS_COMMAND /f reg add "HKCRFoldershellms-doscommand" /ve /d "cmd.exe /k cd %%1" /f
2.Shell
2.1 pmdownload
#!/bin/sh # ------------------------------------------------------------------------------- # pmdownload - # This file contains PM download script sample # # Note: The output is written to file "pmdownload.out" # FTP parameters # # Example: # hostName=192.168.1.1 # userName=user1 # password=123 # remoteDir=/home/user1/pmfiles hostName=ftp_hostname userName=ftp_username password=ftp_password remoteDir=ftp_remote_directory csvSeparator="," # ------------------------------------------------------------------------------- # Getting CMS bin directory pmcmd=$0 cmsbin=`dirname $pmcmd` if [ $pmcmd = $cmsbin ]; then cmsbin="." fi cd $cmsbin # ------------------------------------------------------------------------------- # Get yesterday, today and day before yesterday date strings year=`date +%Y` month=`date +%m` day=`date +%d` day=`expr "$day" - 1` if [ $day -eq 0 ]; then month=`expr "$month" - 1` month="0${month}" if [ $month -eq 0 ]; then month=12 year=`expr "$year" - 1` fi day=`cal $month $year | grep . | fmt -1 | tail -1` fi if [ $day -lt 10 ]; then day="0${day}" fi year=`echo ${year} | sed 's/^20//'` # ------------------------------------------------------------------------------- # Get day before yesterday date strings year2=`date +%Y` month2=`date +%m` day2=`date +%d` day2=`expr "$day2" - 2` if [ $day2 -eq 0 ]; then month2=`expr "$month2" - 1` month2="0${month2}" if [ $month2 -eq 0 ]; then month2=12 year2=`expr "$year2" - 1` fi day2=`cal $month2 $year2 | grep . | fmt -1 | tail -1` fi if [ $day2 -lt 10 ]; then day2="0${day2}" fi year2=`echo ${year2} | sed 's/^20//'` # -------------------------------------- # yesterdayDate format -> MM/DD/YY yesterdayDate=${month}/${day}/${year} # -------------------------------------- # todayDate format -> MM/DD/YY todayDate=`date +'%m/%d/%y'` # -------------------------------------- # beforeYesterdayDate format -> MM/DD/YY beforeYesterdayDate=${month2}/${day2}/${year2} # -------------------------------------- # 15-MIN Start and End time for getpm.sh script startTime15min="${yesterdayDate} 00:00:00" endTime15min="${yesterdayDate} 23:59:59" # -------------------------------------- # 1-DAY Start and End time for getpm.sh script startTime1day="${beforeYesterdayDate} 00:00:00" endTime1day="${beforeYesterdayDate} 23:59:59" # -------------------------------------- # 15-MIN fileDate format -> MMDDYY fileDate=${month}${day}${year} # -------------------------------------- # 1-DAY fileDate format -> MMDDYY fileDate2=${month2}${day2}${year2} # -------------------------------------- # Hostname file_hostname=`hostname` # ------------------------------------------------------------------------------- # Output files outputFiles() { filesize=0 if [ -f pmdownload.out ]; then filesize=`du -k pmdownload.out|awk '{print $1}'` if [ "$filesize" -gt 300 ]; then if [ -f pmdownload.out4 ]; then cp pmdownload.out4 pmdownload.out5 fi if [ -f pmdownload.out3 ]; then cp pmdownload.out3 pmdownload.out4 fi if [ -f pmdownload.out2 ]; then cp pmdownload.out2 pmdownload.out3 fi if [ -f pmdownload.out1 ]; then cp pmdownload.out1 pmdownload.out2 fi cp pmdownload.out pmdownload.out1 rm -f pmdownload.out fi fi } # ------------------------------------------------------------------------------ # Get 15-MIN PM data getpm_15MIN() { pmtype=$1 pmtype1=`echo $pmtype | sed 's/15MIN//'` ##echo "pmtype1 =" $pmtype1 pmlocation=$2 if [ "$pmlocation" = "" ] then pmloc="BOTH" fi if [ "$pmlocation" = "NEND" ] then pmloc="2" fi if [ "$pmlocation" = "FEND" ] then pmloc="1" fi outputFiles # ---------------------------------------------------- # Execute getpm to retrieve 15-MIN PM data timeStamp=`date +'%m%d%y_%H%M%S'` timenow=`date +'%m/%d/%y %H:%M:%S'` filenamePM="pm_"$pmtype1"_15MIN_${timeStamp}.csv" ##echo " FilenamePM = " $filenamePM if [ "$pmloc" = "BOTH" ] then pmcmd="${cmsbin}/getpm.sh -p $pmtype" pmcmd="${pmcmd} -s "${startTime15min}" -e "${endTime15min}"" pmcmd="${pmcmd} -f csv -o $filenamePM -rs $csvSeparator" fi if [ "$pmloc" != "BOTH" ] then pmcmd="${cmsbin}/getpm.sh -p $pmtype" pmcmd="${pmcmd} -s "${startTime15min}" -e "${endTime15min}"" pmcmd="${pmcmd} -l $pmloc -f csv -o $filenamePM -rs $csvSeparator" fi echo "">>pmdownload.out echo "# ---------------------------------------------------">>pmdownload.out echo "# Execute getpm $1 15-MIN at ${timenow}">>pmdownload.out echo "# ---------------------------------------------------">>pmdownload.out echo "">>pmdownload.out echo ">>${pmcmd}" >>pmdownload.out if [ "$pmloc" = "BOTH" ] then ${cmsbin}/getpm.sh -p $pmtype -s "${startTime15min}" -e "${endTime15min}" -f csv -o $filenamePM -rs $csvSeparator >>pmdownload.out fi if [ "$pmloc" != "BOTH" ] then ${cmsbin}/getpm.sh -p $pmtype -s "${startTime15min}" -e "${endTime15min}" -l $pmloc -f csv -o $filenamePM -rs $csvSeparator >>pmdownload.out fi # ---------------------------------------------------- # Transfer the 15-MIN PM output file using cftp script if [ -f $filenamePM ]; then filenameFTP="${file_hostname}_pm_${pmtype1}_15MIN_${fileDate}_000000.csv" echo "">>pmdownload.out echo ">>Rename file ${filenamePM} to ${filenameFTP}">>pmdownload.out cp ${filenamePM} ${filenameFTP} rm ${filenamePM} localFile="${cmsbin}/${filenameFTP}" newlocalFile="newlocalFile" awk "NR==1" ${localFile}>${newlocalFile} if [ "$pmtype1" = "GEPort15Min" ]||[ "$pmtype1" = "10GEPort15Min" ]||[ "$pmtype1" = "EthDsl15Min" ]||[ "$pmtype1" = "PON15Min" ] then tac ${localFile} | awk -F$csvSeparator '!_[$3,$6,$10]++' | tac >>${newlocalFile} localFile="${newlocalFile}" fi if [ "$pmtype1" = "DSL15Min" ] then tac ${localFile} | awk -F$csvSeparator '!_[$3,$43,$45]++' | tac >>${newlocalFile} localFile="${newlocalFile}" fi if [ "$pmtype1" = "ERPS15Min" ] then tac ${localFile} | awk -F$csvSeparator '!_[$3,$6,$27]++' | tac >>${newlocalFile} localFile="${newlocalFile}" fi if [ "$pmtype1" = "ONT15Min" ]||[ "$pmtype1" = "OntDs115Min" ]||[ "$pmtype1" = "ONTPWE315Min" ] then tac ${localFile} | awk -F$csvSeparator '!_[$3,$5,$8]++' | tac >>${newlocalFile} localFile="${newlocalFile}" fi if [ "$pmtype1" = "OntEthFe15Min" ]||[ "$pmtype1" = "OntEthGe15Min" ]||[ "$pmtype1" = "OntEthHpna15Min" ] then tac ${localFile} | awk -F$csvSeparator '!_[$3,$6,$9]++' | tac >>${newlocalFile} localFile="${newlocalFile}" fi remoteFile="${remoteDir}/${filenameFTP}" ftpcmd="${cmsbin}/cftp -h $hostName -u $userName -p $password" ftpcmd="${ftpcmd} $localFile $remoteFile" timenow=`date +'%m/%d/%y %H:%M:%S'` echo "">>pmdownload.out echo "# ---------------------------------------------------">>pmdownload.out echo "# Transfer $1 15-MIN file at ${timenow}">>pmdownload.out echo "# ---------------------------------------------------">>pmdownload.out echo "">>pmdownload.out echo ">>${ftpcmd}" >>pmdownload.out ${ftpcmd} >>pmdownload.out rm -f $localFile rm -f $newlocalFile fi } # ------------------------------------------------------------------------------ # Get both 1-DAY PM data getpm_1DAY() { pmtype=$1 pmtype1=`echo $pmtype | sed 's/1DAY//'` ##echo "pmtype1 = " $pmtype1 pmlocation=$2 if [ "$pmlocation" = "" ] then pmloc="BOTH" fi if [ "$pmlocation" = "NEND" ] then pmloc="2" fi if [ "$pmlocation" = "FEND" ] then pmloc="1" fi outputFiles # ---------------------------------------------------- # Execute getpm to retrieve 1-DAY PM data timeStamp=`date +'%m%d%y_%H%M%S'` timenow=`date +'%m/%d/%y %H:%M:%S'` filenamePM="pm_"$pmtype1"_1DAY_${timeStamp}.csv" ##echo "filenamePM = " $filenamePM if [ "$pmloc" = "BOTH" ] then pmcmd="${cmsbin}/getpm.sh -p $pmtype" pmcmd="${pmcmd} -s "${startTime1day}" -e "${endTime1day}"" pmcmd="${pmcmd} -f csv -o $filenamePM -rs $csvSeparator" fi if [ "$pmloc" != "BOTH" ] then pmcmd="${cmsbin}/getpm.sh -p $pmtype" pmcmd="${pmcmd} -s "${startTime1day}" -e "${endTime1day}"" pmcmd="${pmcmd} -l $pmloc -f csv -o $filenamePM -rs $csvSeparator" fi echo "">>pmdownload.out echo "# ---------------------------------------------------">>pmdownload.out echo "# Execute getpm $1 1-DAY at ${timenow}">>pmdownload.out echo "# ---------------------------------------------------">>pmdownload.out echo "">>pmdownload.out echo ">>${pmcmd}" >>pmdownload.out if [ "$pmloc" = "BOTH" ] then ${cmsbin}/getpm.sh -p $pmtype -s "${startTime1day}" -e "${endTime1day}" -f csv -o $filenamePM -rs $csvSeparator >>pmdownload.out fi if [ "$pmloc" != "BOTH" ] then ${cmsbin}/getpm.sh -p $pmtype -s "${startTime1day}" -e "${endTime1day}" -l $pmloc -f csv -o $filenamePM -rs $csvSeparator >>pmdownload.out fi # ---------------------------------------------------- # Transfer the 1-DAY PM output file using cftp script if [ -f $filenamePM ]; then filenameFTP="${file_hostname}_pm_${pmtype1}_1DAY_${fileDate2}_000000.csv" echo "">>pmdownload.out echo ">>Rename file ${filenamePM} to ${filenameFTP}">>pmdownload.out cp ${filenamePM} ${filenameFTP} rm ${filenamePM} localFile="${cmsbin}/${filenameFTP}" remoteFile="${remoteDir}/${filenameFTP}" ftpcmd="${cmsbin}/cftp -h $hostName -u $userName -p $password" ftpcmd="${ftpcmd} $localFile $remoteFile" timenow=`date +'%m/%d/%y %H:%M:%S'` echo "">>pmdownload.out echo "# ---------------------------------------------------">>pmdownload.out echo "# Transfer $1 1-DAY file at ${timenow}">>pmdownload.out echo "# ---------------------------------------------------">>pmdownload.out echo "">>pmdownload.out echo ">>${ftpcmd}" >>pmdownload.out ${ftpcmd} >>pmdownload.out rm -f $localFile fi } getpm_adsl() { pmtype=$1 pmlocation=$2 if [ "$pmlocation" = "" ] then pmloc="BOTH" fi if [ "$pmlocation" = "NEND" ] then pmloc="2" fi if [ "$pmlocation" = "FEND" ] then pmloc="1" fi outputFiles # ---------------------------------------------------- # Execute getpm to retrieve 15-MIN PM data timeStamp=`date +'%m%d%y_%H%M%S'` timenow=`date +'%m/%d/%y %H:%M:%S'` filenamePM="pm_${pmtype}_${timeStamp}.csv" echo " FilenamePM = " $filenamePM if [ "$pmloc" = "BOTH" ] then pmcmd="${cmsbin}/getpm.sh -p $pmtype" pmcmd="${pmcmd} -s "${startTime15min}" -e "${endTime15min}"" pmcmd="${pmcmd} -f csv -o $filenamePM -rs $csvSeparator" fi if [ "$pmloc" != "BOTH" ] then pmcmd="${cmsbin}/getpm.sh -p $pmtype" pmcmd="${pmcmd} -s "${startTime15min}" -e "${endTime15min}"" pmcmd="${pmcmd} -l $pmloc -f csv -o $filenamePM -rs $csvSeparator" fi echo "">>pmdownload.out echo "# ---------------------------------------------------">>pmdownload.out echo "# Execute getpm $1 15-MIN at ${timenow}">>pmdownload.out echo "# ---------------------------------------------------">>pmdownload.out echo "">>pmdownload.out echo ">>${pmcmd}" >>pmdownload.out if [ "$pmloc" = "BOTH" ] then ${cmsbin}/getpm.sh -p $pmtype -s "${startTime15min}" -e "${endTime15min}" -f csv -o $filenamePM -rs $csvSeparator >>pmdownload.out fi if [ "$pmloc" != "BOTH" ] then ${cmsbin}/getpm.sh -p $pmtype -s "${startTime15min}" -e "${endTime15min}" -l $pmloc -f csv -o $filenamePM -rs $csvSeparator >>pmdownload.out fi # ---------------------------------------------------- # Transfer the 15-MIN PM output file using cftp script if [ -f $filenamePM ]; then filenameFTP="${file_hostname}_pm_${pmtype1}_15MIN_${fileDate}_000000.csv" echo "">>pmdownload.out echo ">>Rename file ${filenamePM} to ${filenameFTP}">>pmdownload.out cp ${filenamePM} ${filenameFTP} rm ${filenamePM} localFile="${cmsbin}/${filenameFTP}" remoteFile="${remoteDir}/${filenameFTP}" ftpcmd="${cmsbin}/cftp -h $hostName -u $userName -p $password" ftpcmd="${ftpcmd} $localFile $remoteFile" timenow=`date +'%m/%d/%y %H:%M:%S'` echo "">>pmdownload.out echo "# ---------------------------------------------------">>pmdownload.out echo "# Transfer $1 15-MIN file at ${timenow}">>pmdownload.out echo "# ---------------------------------------------------">>pmdownload.out echo "">>pmdownload.out echo ">>${ftpcmd}" >>pmdownload.out ${ftpcmd} >>pmdownload.out rm -f $localFile fi } # ------------------------------------------------------------------------------- # Download PM data getpmdata1Day() { #This will download both NEND and FEND PM in the same file getpm_1DAY $1 #This will download only NEND PM #getpm_1DAY $1 NEND #This will download only FEND PM #getpm_1DAY $1 FEND } getpmdataadsl() { #This will download both NEND and FEND PM in the same file getpm_adsl $1 #This will download only NEND PM #getpm_adsl $1 NEND #This will download only FEND PM #getpm_adsl $1 FEND } getpmdata15Min() { #This will download both NEND and FEND PM in the same file getpm_15MIN $1 #This will download only NEND PM #getpm_15MIN $1 NEND #This will download only FEND PM #getpm_15MIN $1 FEND } # ------------------------------------------------------------------------------- # Start downloading PM data from each facility type # To disable the download, add "#" character before "getpmdata" getpmdata1Day OC481DAY getpmdata1Day OC121DAY getpmdata1Day OC31DAY getpmdata1Day STS48c1DAY getpmdata1Day STS12c1DAY getpmdata1Day STS3c1DAY getpmdata1Day STS11DAY getpmdata1Day DS31DAY getpmdata1Day DS11DAY getpmdata1Day IMA1DAY getpmdata1Day IMALink1DAY getpmdata1Day ADSL1DAY getpmdata1Day HDSL1DAY getpmdata1Day XDSL1DAY getpmdata1Day XDSLGroup1DAY getpmdata15Min OC4815MIN getpmdata15Min OC1215MIN getpmdata15Min OC315MIN getpmdata15Min STS48c15MIN getpmdata15Min STS12c15MIN getpmdata15Min STS3c15MIN getpmdata15Min STS115MIN getpmdata15Min DS315MIN getpmdata15Min DS115MIN getpmdata15Min IMA15MIN getpmdata15Min IMALink15MIN getpmdata15Min ADSL15MIN getpmdata15Min HDSL15MIN getpmdata15Min XDSL15MIN getpmdata15Min XDSLGroup15MIN ## By default the following are 15 Min Data only getpmdata15Min ETHERNET getpmdataadsl ADSLCALLSTATUS getpmdataadsl XDSLCALLSTATUS getpmdataadsl XDSLGROUPCALLSTATUS #------------------------------------------ ##PM for E5312/E5400/E7 getpmdata1Day GEPort1Day getpmdata1Day 10GEPort1Day getpmdata1Day ERPS1Day getpmdata1Day OntEthGe1Day getpmdata1Day OntEthFe1Day getpmdata1Day OntEthHpna1Day getpmdata1Day OntDs11Day getpmdata1Day ONT1Day getpmdata1Day ONTPWE31Day getpmdata1Day EthDsl1Day getpmdata1Day DSL1Day getpmdata1Day PON1Day getpmdata15Min GEPort15Min getpmdata15Min 10GEPort15Min getpmdata15Min ERPS15Min getpmdata15Min OntEthGe15Min getpmdata15Min OntEthFe15Min getpmdata15Min OntEthHpna15Min getpmdata15Min OntDs115Min getpmdata15Min ONT15Min getpmdata15Min ONTPWE315Min getpmdata15Min EthDsl15Min getpmdata15Min DSL15Min getpmdata15Min PON15Min
2.2 login.expect
#!/usr/bin/expect set host [lindex $argv 0 ] set user [lindex $argv 1 ] set password [lindex $argv 2 ] set prompt [lindex $argv 3 ] set timeout 10 spawn ssh -p 1035 $user@$host expect { "*yes/no*" {send "yes ";exp_continue} "*assword:" {send "$password "} } interact
2.3 fulfill pmgcs data
2.3.1 daily_pm_check_missing_data_and_recover.sh
#!/bin/bash REDSHIFT_HOST="xxx" REDSHIFT_PORT=5439 REDSHIFT_DB="xxx" REDSHIFT_USER="xxx" REDSHIFT_PSWD="xxx" start_date=$1 if [ -z $start_date ];then start_date=`date --date="-7 day" +%Y-%m-%d` fi #end_date=$(date -d "$start_date 1 day" +%Y-%m-%d) #end_date=`date --date="-1 day" +%Y-%m-%d` end_date=`date +%Y-%m-%d` start_time=$(date -d "$start_date" +%s) end_time=$(date -d $end_date +%s) echo "start_date: $start_date end_date:$end_date" echo "start_time: $start_time end_time: $end_time" rm -rf missing_pm.sql cp missing_pm.sql.tmp missing_pm.sql sed -i "s/START_TIME/${start_time}/g" missing_pm.sql sed -i "s/END_TIME/${end_time}/g" missing_pm.sql #unload_sql="`cat missing_pm.sql`" #echo $unload_sql export PGPASSWORD=$REDSHIFT_PSWD psql -h $REDSHIFT_HOST -p $REDSHIFT_PORT -d $REDSHIFT_DB -U $REDSHIFT_USER -f missing_pm.sql >result.txt netops@tonyben-dev:~/pmgcs$ ca daily_pm_check_missing_data_and_recover.sh ca: command not found netops@tonyben-dev:~/pmgcs$ cat daily_pm_check_missing_data_and_recover.sh #!/bin/bash BASEDIR=$(dirname $0) log="$BASEDIR/daily_pm_check_missing_data_and_recover.`date +'%Y_%m_%d'`.log" cd $BASEDIR if [ -f /tmp/daily_pm_check_missing_data_and_recover.lock ];then echo "$0 is in process , will exist"|tee -a $log exit fi touch /tmp/daily_pm_check_missing_data_and_recover.lock echo "[`date +'%Y-%m-%d %H:%M:%S'`] Begin to load missing devices"|tee -a $log ./check_pm_missing_data.sh|tee -a $log for org in `cat result.txt|awk '{print $1}'|sort|uniq` do if [ -n "$org" ] && [ "$org" -eq "$org" ] 2>/dev/null; then echo "$org need process" cat result.txt|grep $org>_result_${org} fi done rm _result_50 echo "[`date +'%Y-%m-%d %H:%M:%S'`] Begin to process missing devices"|tee -a $log for s in `ls _result_*` do while [ `ps -ef|grep pm_missing_data_move.py|grep -v grep|wc -l` -gt 3 ] do sleep 1s done if [ -f $s ];then if [ `ps -ef|grep -v grep|grep $s|wc -l` -eq 0 ];then mv $s run_$s nohup python pm_missing_data_move.py -e tony.ben@calix.com --password xxx --filename run_$s && mv run_$s done_${s} & fi fi # nohup python pm_missing_data_move.py -e tony.ben@calix.com --password xxx --filename $s & done #python pm_missing_data_move.py -e tony.ben@calix.com --password xxx $1|tee -a $log echo "[`date +'%Y-%m-%d %H:%M:%S'`] End"|tee -a $log rm -rf result.txt.bak #cp result.txt result.txt.bak mv result.txt result.txt.`date +'%Y_%m_%d'` log_file="s3_daily_pm_check_missing_data_`date +'%Y_%m_%d'`.log" cat ${log_file}*|grep match|awk '{print $5,$7,$6}'>tmp.log sed -i 's/orgid//g' tmp.log sed -i 's/file_name//g' tmp.log sed -i 's/date_time//g' tmp.log sed -i 's/://g' tmp.log parm="fullsync" if [ -z $1 ];then parm="delta" fi subject="[`date +'%Y-%m-%d %H:%M:%S'`] [$parm] Daily PM Files RE-SYNC STATICS" cat tmp.log |awk '{print $1}'|sort|uniq -c |sort -n>tmp body="" while IFS= read -r line do body="$body<li>$line</li>" done < tmp #python sendEmail.py -e tony.ben@calix.com -s "$subject" -b "$body" -a tmp.log --password xxx python sendEmail.py -e tony.ben@calix.com -s "$subject" -b "$body" --password xxx mv tmp.log tmp.log.`date +'%Y_%m_%d'` tar -zcvf s3_daily_pm_check_missing_data_`date +'%Y_%m_%d_%H_%M'`.log.tar.gz ${log_file}* rm -rf ${log_file}* rm -rf /tmp/daily_pm_check_missing_data_and_recover.lock
2.3.2 check_pm_missing_data.sh
#!/bin/bash REDSHIFT_HOST="xx" REDSHIFT_PORT=5439 REDSHIFT_DB="xx" REDSHIFT_USER="masteruser" REDSHIFT_PSWD="xx start_date=$1 if [ -z $start_date ];then start_date=`date --date="-7 day" +%Y-%m-%d` fi #end_date=$(date -d "$start_date 1 day" +%Y-%m-%d) #end_date=`date --date="-1 day" +%Y-%m-%d` end_date=`date +%Y-%m-%d` start_time=$(date -d "$start_date" +%s) end_time=$(date -d $end_date +%s) echo "start_date: $start_date end_date:$end_date" echo "start_time: $start_time end_time: $end_time" rm -rf missing_pm.sql cp missing_pm.sql.tmp missing_pm.sql sed -i "s/START_TIME/${start_time}/g" missing_pm.sql sed -i "s/END_TIME/${end_time}/g" missing_pm.sql #unload_sql="`cat missing_pm.sql`" #echo $unload_sql export PGPASSWORD=$REDSHIFT_PSWD psql -h $REDSHIFT_HOST -p $REDSHIFT_PORT -d $REDSHIFT_DB -U $REDSHIFT_USER -f missing_pm.sql >result.txt
2.3.3 fulfill_data.sh
#!/bin/bash orgid=$1 device=$2 datelist=$3 process_file=$4 thread=$5 bucket_prefix="s3://gcs.calix.com-wifi-pm-per-" log_file="s3_daily_pm_check_missing_data_`date +'%Y_%m_%d'`.log" if [ -x $orgid ];then echo "No OrgID" exit fi if [ -x $device ];then echo "No Device" exit fi #echo "Call fulfill_data $orgid $device $process_file $thread" #echo "Begin process org: $orgid ,device: $device ,missing date:$datelist"|tee -a $log_file for bucket in radio sta do for folder in backup do for f in `aws s3 ls ${bucket_prefix}$bucket/$folder/$orgid/$device|awk '{print $4}'` do fnocsv=${f%%.csv} cdevice=`echo $fnocsv|awk -F '_' '{print $1}'` time_long=`echo $fnocsv|awk -F '_' '{print $2}'` date_str=$(date -d @$time_long +'%Y-%m-%d') if [ `echo $datelist|grep $date_str|wc -l` -eq 1 ];then echo "[$process_file][$thread] match ${bucket_prefix}$bucket/$folder/$orgid/${f} $date_str">>$log_file # echo "[$process_file][$thread] aws s3 mv ${bucket_prefix}$bucket/$folder/$orgid/${f} ${bucket_prefix}$bucket/$orgid/${f}"|tee -a $log_file aws s3 mv ${bucket_prefix}${bucket}/backup/$orgid/${f} ${bucket_prefix}${bucket}/$orgid/${f} aws s3 mv ${bucket_prefix}${bucket}/dupcheck/$orgid/${f} ${bucket_prefix}${bucket}/$orgid/${f} # aws s3 mv ${bucket_prefix}sta/backup/$orgid/${f} ${bucket_prefix}sta/$orgid/${f} # aws s3 mv ${bucket_prefix}sta/dupcheck/$orgid/${f} ${bucket_prefix}sta/$orgid/${f} fi done done done
2.3.4 pm_missing_data_move.py
import os,sys import logging.handlers import boto3 import botocore.session from concurrent import futures import subprocess import datetime import logging import argparse import smtplib import threading from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from urllib import unquote pool = futures.ThreadPoolExecutor(max_workers=10) device_pool = futures.ThreadPoolExecutor(max_workers=10) bucket_pref="gcs.calix.com-wifi-pm-per-" logger = logging.getLogger() static_map={} def move_file(file_name,orgid): for b in ['radio','sta']: for folder in ['dupcheck','backup']: bucket_name="%s%s"%(bucket_pref,b) sfile="s3://%s/%s/%s/%s" %(bucket_name,folder,orgid,file_name) dfile="s3://%s/%s/%s" %(bucket_name,orgid,file_name) logger.error("aws s3 mv %s %s",sfile,dfile) subprocess.call(["aws", "s3","mv",sfile,dfile]) def process_device(orgid,device,time_list): logger.error("%s Begin process org:%s ,device:%s missing date:%s"%(filename,orgid,device,','.join(time_list))) subprocess.call(['./fulfill_data.sh',orgid,device,','.join(time_list),filename,threading.currentThread().getName()]) return def process_org_data(orgid,device_map): logger.error("%s--Begin process org:%s ,with devices:%s --" %(filename,orgid,len(device_map))) for device in device_map.keys(): device_pool.submit(process_device,orgid,device,device_map[device]) #process_device(orgid,device,device_map[device]) def init_logger(): """ initial logger info :return: """ handler = logging.StreamHandler(sys.stderr) fmt = '%(asctime)s-[%(filename)s:%(lineno)s]-[%(threadName)s]- %(message)s' formatter = logging.Formatter(fmt) handler.setFormatter(formatter) logger.setLevel(logging.ERROR) logger.addHandler(handler) def generate_body(): body = """<h4>Daily PM Files RE-SYNC STATICS</h4>""" body+="<br><br><hr><div>" body+="<table>" body+="<tr><th>Org</th><th>Device</th><th>Date Time</th><th>File Name</th></tr>" for orgid in static_map.keys(): device_map=static_map[orgid] cnt=0 for device in device_map.keys(): list=device_map[device] for detail in list: cnt+=1 body+="<tr><td> %s </td><td> %s </td><td> %s </td><td> %s </td></tr>" %(orgid,device,detail['date_time'],detail['file_name']) body+="<tr><td>Total</td><td colspan=3>devices: %s total number:%s</td></tr>"%(len(device_map),cnt) body+="</table>" return body def generate_static(args): logging.error(static_map) #if len(static_map)==0: # logging.error("No need to send email") # return subject = "[%s] Daily PM Files RE-SYNC STATICS[%s]"%(datetime.datetime.now().strftime('%y-%m-%d'), "FullSync" if args.fullsync else "Delta") body = generate_body() emails = args.email smtp_host = args.smtpHost smtp_port = args.smtpPort username = args.username password = args.password sent_from = args.sentFrom if emails: mail_server = smtplib.SMTP() try: msg = MIMEMultipart() msg['From'] = sent_from msg['Subject'] = subject msg.attach(MIMEText(body, 'html')) mail_server.connect(smtp_host, smtp_port) mail_server.ehlo() mail_server.starttls() mail_server.ehlo() if password and smtp_host != 'eng-smtp.calix.local': mail_server.login(username, unquote(password)) for recipient in emails: logger.error("send email to %s", recipient) msg['To'] = recipient mail_server.sendmail(sent_from, recipient, msg.as_string()) except Exception as err: logger.error("send email failed:%s", err) return False finally: if mail_server: mail_server.close() return True return False if __name__ == '__main__': init_logger() parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('-e', '--email', action='append', help="To send multiple emails --email <email-address-1> --email <email-address-2> ...") parser.add_argument('--smtpHost', type=str, help="Host of SMTP server", required=False, default="outlook.office365.com") parser.add_argument('--smtpPort', type=int, help="Port of SMTP server", required=False, default=587) parser.add_argument('--sentFrom', type=str, help="outlook email", required=False, default="noreply-compass-fa@calix.com") parser.add_argument('--username', type=str, help="outlook username", required=False, default="noreply-compass-fa@calix.com") parser.add_argument('--password', type=str, default='xx', help="outlook password", required=False) parser.add_argument('--fullsync',action='store_true',help="full sync all results",required=False) parser.add_argument('--filename',type=str,default="result.txt",help="full sync all results",required=False) args = parser.parse_args() global filename filename=args.filename logger.error(args) org_map={} history_map={} if not args.fullsync: if os.path.isfile('result.txt.bak'): with open('result.txt.bak') as fp: line = fp.readline() cnt = 1 while line: line = fp.readline() cnt += 1 if cnt>2 and line.lstrip().find('|')>0: tmp=line.split('|') orgid=tmp[0].strip() device=tmp[1].strip() date_time=tmp[2].split()[0].strip() _tmp_str='%s-%s-%s'%(orgid,device,date_time) history_map[_tmp_str]=_tmp_str with open(filename) as fp: line = fp.readline() #cnt = 1 while line: line = fp.readline() # cnt += 1 # if cnt>2 and line.lstrip().find('|')>0: if line.lstrip().find('|')>0 and line.split('|')[0].strip()!='orgid': tmp=line.split('|') orgid=tmp[0].strip() device=tmp[1].strip() date_time=tmp[2].split()[0].strip() _tmp_str='%s-%s-%s'%(orgid,device,date_time) if history_map.has_key(_tmp_str): logger.info("orgid:%s device:%s date_time:%s had executed on last job,ignore",orgid,device,date_time) continue device_map=org_map[orgid] if org_map.has_key(orgid) else {} time_list=device_map[device] if device_map.has_key(device) else [] time_list.append(date_time) device_map[device]=time_list org_map[orgid]=device_map if org_map.has_key('50'): org_map.pop('50') for org in org_map.keys(): process_org_data(org,org_map[org]) pool.shutdown(wait=True) device_pool.shutdown(wait=True) #generate_static(args)
2.3.5 sendEmail.py
#!/usr/bin/python # -*- coding: UTF-8 -*- import os,sys import json import urllib2 import argparse import logging import logging.handlers import smtplib from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from email.mime.multipart import MIMEBase from email import Encoders from urllib import unquote logger = logging.getLogger() def get_response(url): jsonObj = None try: req = urllib2.Request(url) res_data = urllib2.urlopen(req) jsonObj = json.loads(res_data.read()) except Exception as e: logger.error("get response error", e) finally: return jsonObj def generate_body(): body = """<h4>Simulator Damo Org Data</h4>""" with open('log.log') as fp: line = fp.readline() cnt = 1 while line: body+=line+"<br>" line = fp.readline() cnt += 1 return body def send_email(args): subject = args.subject body = args.body emails = args.email smtp_host = args.smtpHost smtp_port = args.smtpPort username = args.username password = args.password sent_from = args.sentFrom if emails: mail_server = smtplib.SMTP() try: msg = MIMEMultipart() msg['From'] = sent_from msg['Subject'] = subject msg.attach(MIMEText(body, 'html')) if args.attachment: for name in args.attachment: part = MIMEBase('application', "octet-stream") part.set_payload(open(name, "rb").read()) Encoders.encode_base64(part) part.add_header('Content-Disposition', 'attachment; filename="%s"'%(name)) msg.attach(part) mail_server.connect(smtp_host, smtp_port) mail_server.ehlo() mail_server.starttls() mail_server.ehlo() if password and smtp_host != 'eng-smtp.calix.local': mail_server.login(username, unquote(password)) for recipient in emails: logger.info("send email to %s", recipient) msg['To'] = recipient mail_server.sendmail(sent_from, recipient, msg.as_string()) except Exception as err: logger.error("send email failed:%s", err) return False finally: if mail_server: mail_server.close() return True return False def init_log(): """ init logger :param log_file: :return: """ logger.setLevel(logging.DEBUG) handler = logging.StreamHandler(sys.stderr) fmt = '%(asctime)s-[%(filename)s:%(lineno)s]-[%(threadName)s]- %(message)s' formatter = logging.Formatter(fmt) handler.setFormatter(formatter) logger.addHandler(handler) def main(): parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('-e', '--email', action='append', help="To send multiple emails --email <email-address-1> --email <email-address-2> ...") parser.add_argument('--smtpHost', type=str, help="Host of SMTP server", required=False, default="outlook.office365.com") parser.add_argument('--smtpPort', type=int, help="Port of SMTP server", required=False, default=587) parser.add_argument('--sentFrom', type=str, help="outlook email", required=False, default="xx) parser.add_argument('--username', type=str, help="outlook username", required=False, default="xx) parser.add_argument('--password', type=str, default='xx', help="outlook password", required=False) parser.add_argument('-s','--subject',default='test subject') parser.add_argument('-b','--body',default='test body') parser.add_argument('-a','--attachment',action='append') args = parser.parse_args() init_log() logger.debug("*" * 20) logger.debug(args) logger.debug("*" * 20) send_email(args) if __name__ == '__main__': main()
2.3.6 netstat_check.sh
#!/bin/bash ss -s echo "-----------------------------------------" netstat -n | awk '/^tcp/ {++state[$NF]} END {for(key in state) print key," ",state[key]}' echo "-----------------------------------------" lsof |grep s3_daily_pm_check_missing_dat|awk '{print $1}'|sort|uniq -c|sort -n echo "-----------------------------------------" lsof |grep s3_daily_pm_check_missing_dat
2.3.7 org_fulfill.sh
#!/bin/bash orgId=$1 bucket_prefix="s3://wifi-pm-per-" if [ -z $orgId ];then echo "Missing OrgId" exit fi cat result.txt |grep $orgId>org_$orgId for bucket in sta radio do for folder in backup dupcheck do rm -rf ${bucket}_${folder}_${orgId} for f in `aws s3 ls ${bucket_prefix}$bucket/$folder/$orgId/|awk '{print $4}'` do fnocsv=${f%%.csv} cdevice=`echo $fnocsv|awk -F '_' '{print $1}'` time_long=`echo $fnocsv|awk -F '_' '{print $2}'` date_str=$(date -d @$time_long +'%Y-%m-%d') echo "$f $date_str">>${bucket}_${folder}_${orgId} done done done for device in `cat org_$orgId|awk -F '|' '{print $2}'|sort|uniq` do echo "Begin process device $device" date_list=`cat org_$orgId|grep $device|awk -F '|' '{print $3}'|awk '{print $1}'|awk '{printf ("%s,",$0)}'` for bucket in sta radio do for folder in backup dupcheck do cat ${bucket}_${folder}_${orgId}|grep $device>${bucket}_${folder}_${orgId}_$device while IFS= read -r line do date_str=`echo $line|awk '{print $2}'` filename=`echo $line|awk '{print $1}'` if [ `echo $date_list|grep $date_str |wc -l` -eq 1 ];then echo "match $bucket/$orgId/${folder}/$filename with date $date_str" echo "aws s3 mv ${bucket_prefix}$bucket/$orgId/${folder}/$filename ${bucket_prefix}$bucket/$orgId/$filename" #aws s3 mv ${bucket_prefix}$bucket/$orgId/${folder}/$filename ${bucket_prefix}$bucket/$orgId/$filename fi done < ${bucket}_${folder}_${orgId}_$device rm ${bucket}_${folder}_${orgId}_$device done done done
2.4 collect postgres status
2.4.1 env.sh
#!/bin/bash export psql_us_prod="psql -h xx -d cloud -U calixcloud" export psql_us_prod_pass="xx" export psql_ca_prod="psql -h xx -d cloud -U calixcloud" export psql_ca_prod_pass="xx" export psql_dev="psql -h xx -d devops -U postgres" export psql_dev_pass="postgres"
2.4.2 postgres_cron.sh
#!/bin/bash BASEDIR=$(cd $(dirname $0) && pwd) cd $BASEDIR>/dev/null source env.sh cron=$1 if [ -z $cron ];then cron="1hour" fi logs=$BASEDIR/logs/psql_cronjob_${cron}_`TZ=:Asia/Hong_Kong date +'%Y-%m-%d'`.log process() { env=$1 echo "-----------------------------------------------"|tee -a $logs echo "Begin process $env" |tee -a $logs echo "-----------------------------------------------"|tee -a $logs pass=`eval echo '$'psql_"${env}_pass"` cmd=`eval echo '$'psql_"$env"` export PGPASSWORD=$pass for f in `ls cron_${cron}/from*` do echo "$cmd -f $f"|tee -a $logs $cmd -f $f |tee -a $logs 2>>$logs done export PGPASSWORD=$psql_dev_pass for f in `ls cron_${cron}/to_*.tmp` do sql=${f%%.tmp} cp $f $sql sed -i "s/ENV/$env/g" $sql echo "$psql_dev -f $sql"|tee -a $logs $psql_dev -f $sql|tee -a $logs 2>>$logs done #$cmd -f from_postgres_table_summarize.sql |tee -a $logs 2>>$logs #rm -rf to_postgres_process.sql #cp to_postgres_process.sql.tmp to_postgres_process.sql #sed -i "s/ENV/$env/g" to_postgres_process.sql #export PGPASSWORD=$dev_pass #$dev -f to_postgres_process.sql|tee -a $logs 2>>$logs } main() { cd $BASEDIR>/dev/null echo ""|tee -a $logs echo ""|tee -a $logs echo "+++++++++++++++++++++++++++++++++++++++++++++++"|tee -a $logs echo "[`TZ=:Asia/Hong_Kong date +'%Y-%m-%d %H:%M:%S'`] BEGIN postgres $cron DATA"|tee -a $logs echo "+++++++++++++++++++++++++++++++++++++++++++++++"|tee -a $logs process "us_prod" process "ca_prod" echo "[`TZ=:Asia/Hong_Kong date +'%Y-%m-%d %H:%M:%S'`] FINISH process $cron DATA"|tee -a $logs echo |tee -a $logs } main
2.4.3 cron_1min/to_postgres_active_sql.sql.tmp
truncate table pg_stat_activity_tmp; COPY pg_stat_activity_tmp from '/tmp/psql_pg_stat_activity.csv' csv header; delete from pg_stat_activity_tmp where query_start is null; delete from pg_stat_activity_tmp a using env_pg_stat_activity b where b.env_name='ENV' and a.pid=b.pid and a.usesysid=b.usesysid and a.query_start=b.query_start; delete from pg_stat_activity_tmp_last where env_name='ENV'; insert into pg_stat_activity_tmp_last select * from pg_stat_activity_tmp; update pg_stat_activity_tmp_last set env_name='ENV',dur_time=age(clock_timestamp(), query_start) where env_name is null; insert into env_pg_stat_activity select * from pg_stat_activity_tmp_last where env_name='ENV'; truncate table pg_stat_activity_run_tmp; COPY pg_stat_activity_run_tmp from '/tmp/psql_pg_stat_activity_active.csv' csv header; delete from env_pg_stat_activity_run a using pg_stat_activity_run_tmp b where a.env_name='ENV' and a.pid=b.pid and a.usesysid=b.usesysid and a.query_start=b.query_start ; insert into env_pg_stat_activity_run select * from pg_stat_activity_run_tmp where usename!='replication'; update env_pg_stat_activity_run set env_name='ENV',dur_time=age(clock_timestamp(), query_start) where env_name is null; with t as (select a.pid,a.usesysid,a.query_start,a.env_name from env_pg_stat_activity_run a left join pg_stat_activity_run_tmp b on a.pid=b.pid and a.usesysid=b.usesysid and a.query_start=b.query_start where a.env_name='ENV' and b.pid is null) update env_pg_stat_activity_run x set state='idle' from t where x.pid=t.pid and x.usesysid=t.usesysid and x.query_start=t.query_start and x.env_name=t.env_name; delete from env_pg_stat_activity_run a using env_pg_stat_activity b where a.env_name=b.env_name and a.env_name='ENV' and a.pid=b.pid and a.usesysid=b.usesysid and a.query_start=b.query_start and a.state='idle'; insert into env_pg_stat_activity select * from env_pg_stat_activity_run where state='idle' and env_name='ENV'; delete from env_pg_stat_activity_run where state='idle' and env_name='ENV';
2.4.4 cron_1min/from_postgres_active_sql.sql
COPY (select * from pg_stat_activity where pid<>pg_backend_pid() and state='idle' and upper(query) not like 'SET%' and upper(query) not like 'SHOW%' and query != 'COMMIT') to '/tmp/psql_pg_stat_activity.csv' csv header; COPY (select * from pg_stat_activity where pid<>pg_backend_pid() and state='active' and upper(query) not like 'SET%' and upper(query) not like 'SHOW%' and query != 'COMMIT') to '/tmp/psql_pg_stat_activity_active.csv' csv header;
2.4.5 cron_1hour/from_postgres_table_summarize.sql
COPY (select table_name,pg_size_pretty(total_bytes) AS total, pg_size_pretty(index_bytes) AS idx , pg_size_pretty(toast_bytes) AS toast , pg_size_pretty(table_bytes) AS relsize,total_bytes,index_bytes,toast_bytes,table_bytes from (select *, total_bytes-index_bytes-COALESCE(toast_bytes,0) AS table_bytes FROM ( SELECT c.oid,nspname AS table_schema, relname AS table_name , c.reltuples AS row_estimate , pg_total_relation_size(c.oid) AS total_bytes , pg_indexes_size(c.oid) AS index_bytes , pg_total_relation_size(reltoastrelid) AS toast_bytes FROM pg_class c LEFT JOIN pg_namespace n ON n.oid = c.relnamespace WHERE relkind = 'r' and relname in (select tablename from pg_tables where schemaname='public') ) a) a order by total_bytes desc) TO '/tmp/table_size.csv' csv header; COPY (select relname,seq_scan,seq_tup_read,idx_scan,idx_tup_fetch,n_tup_ins,n_tup_upd,n_tup_del,n_tup_hot_upd,n_live_tup,n_dead_tup,n_mod_since_analyze,last_vacuum,last_autovacuum,last_analyze,last_autoanalyze,vacuum_count,autovacuum_count,analyze_count,autoanalyze_count from pg_stat_user_tables where schemaname='public') to '/tmp/table_opt.csv' csv header;
2.4.6 cron_1hour/to_postgres_process.sql.tmp
delete from env_table_size_inc where env_name='ENV' and date_time in (select to_timestamp(to_char(now(),'YYYY-mm-dd HH24:00:00'),'YYYY-mm-dd HH24:MI:SS')); --truncate table table_size_tmp truncate table table_size_tmp; --copy data COPY table_size_tmp from '/tmp/table_size.csv' csv header; --summarize delta insert into env_table_size_inc select to_timestamp(to_char(now(),'YYYY-mm-dd HH24:00:00'),'YYYY-mm-dd HH24:MI:SS'),'ENV',a.table_name,a.total,a.idx,a.toast,a.relsize,(a.total_bytes - b.total_bytes),(a.index_bytes-b.index_bytes),(a.toast_bytes - b.toast_bytes),(a.table_bytes - b.table_bytes) from table_size_tmp a left join table_size_tmp_last b on a.table_name=b.table_name and b.env_name='ENV'; --copy new data delete from table_size_tmp_last where env_name='ENV'; insert into table_size_tmp_last select * from table_size_tmp; update table_size_tmp_last set env_name='ENV' where env_name is null; delete from env_table_opt_inc where env_name='ENV' and date_time in (select to_timestamp(to_char(now(),'YYYY-mm-dd HH24:00:00'),'YYYY-mm-dd HH24:MI:SS')); truncate table table_opt_tmp; COPY table_opt_tmp from '/tmp/table_opt.csv' csv header; insert into env_table_opt_inc select to_timestamp(to_char(now(),'YYYY-mm-dd HH24:00:00'),'YYYY-mm-dd HH24:MI:SS'),'ENV',a.table_name,a.seq_scan,a.seq_tup_read,a.idx_scan,a.idx_tup_fetch,a.n_tup_ins,a.n_tup_upd,a.n_tup_del,a.n_tup_hot_upd,a.n_live_tup,a.n_dead_tup,a.n_mod_since_analyze,a.last_vacuum,a.last_autovacuum,a.last_analyze,a.last_autoanalyze,a.vacuum_count,a.autovacuum_count,a.analyze_count,a.autoanalyze_count,(a.seq_scan - b.seq_scan),(a.seq_tup_read - b.seq_tup_read ),(a.idx_scan - b.idx_scan),(a.idx_tup_fetch - b.idx_tup_fetch ),(a.n_tup_ins - b.n_tup_ins),(a.n_tup_upd - b.n_tup_upd),(a.n_tup_del - b.n_tup_del),(a.n_tup_hot_upd - b.n_tup_hot_upd),(a.n_live_tup - b.n_live_tup),(a.n_dead_tup - b.n_dead_tup) from table_opt_tmp a left join table_opt_tmp_last b on a.table_name=b.table_name and b.env_name='ENV'; delete from table_opt_tmp_last where env_name='ENV'; insert into table_opt_tmp_last select * from table_opt_tmp; update table_opt_tmp_last set env_name='ENV' where env_name is null;
2.5 setup_csc
2.5.1 envsubst.sh
#!/bin/bash function usage() { echo "Usage: $0 -h -e <env-file> [subst-file]" echo "Options:" echo " -h|--help Print help instructions" echo " -e|--env Use the environment file" echo " subst-file The file to substitute env variales" exit 1 } while [[ $# > 0 ]]; do key="$1" case $key in -h|--help) usage ;; -e|--env) ENV_FILE="$2" shift # past argument ;; *) SUBST_FILE="$1" ;; esac shift # past argument or value done if [ ! -f "$ENV_FILE" ]; then echo "Env file $ENV_FILE does not exist" usage echo exit 2 fi if [ ! -f "$SUBST_FILE" ]; then echo "Substitution file $SUBST_FILE does not exist" usage echo exit 2 fi SUBST_CMD=`type -p envsubst` if [ "X$SUBST_CMD" = "X" ]; then echo "Cannot find 'envsubst' command" usage echo exit 1 fi . $ENV_FILE $SUBST_CMD <$SUBST_FILE
2.5.2 env.sh
#!/bin/bash export env_name="workflow" export acs_host="10.245.247.163" export cloud_api_host="10.245.247.164" export kafka_bootstrap_server="10.245.247.163" export greenplum_host="10.245.247.173" export es_host="nancloud-onprem-05" export postgres_ip="nancloud-onprem-06.calix.local" export redis_url="10.245.248.141:26379,10.245.248.142:26379,10.245.248.143:26379" export redis_master_name="mymaster" export redis_pass="calix-redis-pass" export redis_database_index=4 export greenplum_port="15432" export greenplum_db="onecloud" export greenplum_username="calixcloud" export greenplum_password="CalixCloud" export cloud_mongo_url="mongodb://cdc-kylin:27017/cloud_${env_name}?replicaSet=cmdctl" export postgres_db="${env_name}_cloud" export postgres_master_username="postgres" export postgres_master_password="postgres" export cloud_postgres_username="$env_name" export cloud_postgres_password="${env_name}_pass" export cwmp_postgers_ip="$postgres_ip" export cwmp_postgres_db="$postgres_db" export cwmp_postgres_username=$cloud_postgres_username export cwmp_postgres_password=$cloud_postgres_password export cloud_api_url="http://${cloud_api_host}:80" export pmgcs_host="pmgcs-aqatest.calix.com" export wifi_redshift_host="redshift.amazonaws.com" export wifi_redshift_port=5439 export wifi_redshift_db="aqa" export wifi_redshift_username="aqa" export wifi_redshift_password="0099" export influxdb="http://10.245.242.247:8086" export SXACC_HOME="/home/sxacc" export public_software_address="http://nancloud-onprem-06:8080" export cloud_repo_home="/home/meteor/cloud_repo" export zookeeper_cluster="10.245.248.134:2181,10.245.248.133:2181,10.245.248.135:2181" export fa_kafka_cluster="10.245.248.132:9092,10.245.248.136:9092,10.245.248.137:9092"
2.5.3 util.sh
#!/bin/bash step=0 RED='