MyTop MySql Monitoring

#!/bin/bash
################################################################################
#Path on server: 
#Server(s): 
#Path on developer: 
#Author: Jamie Broussard
#Created: 2009-01-06
#Updated: 2009-01-14
#Status: production
#Usage: 
#run from cron.minute
#
#
#Description:
#
#
#Assumptions:
#Server is running, MySql is running
#Dependancy:
#mytop, perl, perl DBI, perl DBD mysql, bash
#
#Exceptions:
#Does not report that the server is not taking connections
#To Do:
#Create sensible functions and reuse reuse reuse
################################################################################
#Variable declaration
declare -i Danger=150;
declare -i RecordCount;
declare -r AdminEmail="servers@webflyer.com";

#Functions
function mailBody()#BUILD EMAIL BODY
{
        head -n 15 ${1}_mytop_error.txt
}

function alert()#IF the records exeed the accepted threashold, send an email to admin phones
{
        if [[ $RecordCount -gt $Danger || $RecordCount = $Danger ]]
        then
                mv ${1}_mytop.txt ${1}_mytop_error.txt
        mailBody $1 | mail -s "Too many hanging processes on ${1}" $AdminEmail;
        fi
}

function runmytop()#use mytop to create a file that is later counted
{
        mytop -h $1 -d $2 -u $3 -p $4 -b > ${5}_mytop.txt
}

function countlines()
{
        RecordCount=0;
        until ! read Record # Until there are no more records
        do
    ((RecordCount++))
        done < ${1}_mytop.txt
}


DbName="db2";
DbHost="";
DbDataBase="freddie_ballot";
DbUser="";
DbPass="";
runmytop $DbHost $DbDataBase $DbUser $DbPass $DbName
countlines $DbName
alert $DbName  

DbName="db3";
DbHost="localhost";
DbDataBase="freddie_ballot";
DbUser="";
DbPass="";
runmytop $DbHost $DbDataBase $DbUser $DbPass $DbName
countlines $DbName
alert $DbName

DbName="db4";
DbHost="db4.webflyer.com";
DbDataBase="freddie_ballot";
DbUser="";
DbPass="";
runmytop $DbHost $DbDataBase $DbUser $DbPass $DbName
countlines $DbName
alert $DbName

exit 0

Back up and arhive

rcync ROCKS!!

#!/bin/bash -x

################################################################################
#Path on server: /Users/admin/Documents/scripts/server_bu.sh
#Server(s): vpn.intraflyer.com
#Author: Jamie Broussard
#Created: 2010-05-18
#Updated: 2010-06-07
#Status: production.
#Usage: run under launchd /Users/admin/Library/LaunchAgents/com.backup.web.incr
# To Load:
# launchctl load /Users/admin/Library/LaunchAgents/com.backup.web.incr
#
#Description: incremental backup to external drive
#
################################################################################

# VARIABLE DECLARATION
# make sure to create this directory before running the first time
declare -r destinationPath="/Volumes/LaCie/rsync_backup/";
declare -r rsyncCmd="rsync -a --delete --link-dest=";
# ARRAYS FOR EACH BACKUP DIRECTORY
declare -a sourceDir;
declare -a destinationDir;
declare -a syncOutput;
# COUNTERS
declare maxDir;
declare -i i;
declare -i n;
# set this to how many backups you want to keep
declare -i keepBackup=9;
declare -i b;
declare -i mvBackup=$keepBackup-1;
declare -i toBackup=$keepBackup;

# setup the array to directories you want to backup
n=0;
sourceDir[$n]="/Volumes/VPN_Backup_1/backups/sites/";
latestBackup[$n]="sites";

n=$n+1;
sourceDir[$n]="/Volumes/VPN_Backup_1/backups/clients/";
latestBackup[$n]="clients";

n=$n+1;
sourceDir[$n]="/Volumes/VPN_Backup_1/Shared?Items/Public/Corp?Graphics/";
latestBackup[$n]="Corp_Graphics";

n=$n+1;
sourceDir[$n]="/Volumes/VPN_Backup_1/Library/Collaboration/";
latestBackup[$n]="Collaboration";

n=$n+1;
sourceDir[$n]="/Volumes/VPN_Backup_1/ServiceData/";
latestBackup[$n]="ServiceData";

n=$n+1;
sourceDir[$n]="/Volumes/VPN_Backup_1/Library/WebServer/Documents/";
latestBackup[$n]="WebServer";

# FUNCTIONS
function incremental_backup() {
	mvBackup=$keepBackup-1;
	toBackup=$keepBackup;
	
	#delete the oldest
	if [ -d $destinationPath${latestBackup[$i]}.$keepBackup ]; then
		rm -rf $destinationPath${latestBackup[$i]}.$keepBackup;
	fi
	
	for ((b=2; b<=($keepBackup); b++))
		do
			#rename the latest backups
			if [ -d $destinationPath${latestBackup[$i]}.$mvBackup ]; then
				mv $destinationPath${latestBackup[$i]}.$mvBackup $destinationPath${latestBackup[$i]}.$toBackup;
			fi
			
			mvBackup=$mvBackup-1;
			toBackup=$toBackup-1;
		done;
		
	# copy the latest just before sync
	if [ -d $destinationPath${latestBackup[$i]}.latest ]; then
		mv $destinationPath${latestBackup[$i]}.latest $destinationPath${latestBackup[$i]}.1;
	fi
	
	# rsync using --link-dest=the latest backup to save space
	syncOutput[$i]=$($rsyncCmd$destinationPath${latestBackup[$i]}.1 ${sourceDir[$i]} $destinationPath${latestBackup[$i]}.latest);

}

# EMAIL THE OUTPUT TO THE ADMIN
function mailBody()#BUILD EMAIL BODY
{
	echo ${syncOutput[*]};
};

# count the elements in our array so we know how many times to run this
maxDir=${#sourceDir[*]};

# USE A LOOP SO WE CAN DO MANY DIFFERENT BACKUPS
for ((i=0; i<=($maxDir-1); i++))
	do
		incremental_backup;
	done;
	
# mail the rsync results to admin
mailBody | mail -s "server_bu complete" jbroussard@webflyer.com;
#!/bin/bash -x
#
#
#VARIABLES
declare -r rsyncCmd="rsync -avz -e ssh --exclude .DS_Store ";
#
#DIRECTORIES
declare -r syncToPath="/Volumes/VPN_Backup_1/Shared?Items/Public/Corp?Graphics/graphics/";

#COUNTERS
declare -i dirCount;
declare -i i;
declare -i n;
#ARRAYS
declare -a directoryId;
declare -a syncToName;

n=0;
source[$n]="sgraham@192.168.1.44:/Users/sgraham/Documents/graphics/ ";
syncToName[$n]="sgraham_iMac";

n=$n+1;
source[$n]="jbroussard@192.168.1.60:/Users/jbroussard/Documents/graphics/ ";
syncToName[$n]="jbroussard_MacBookPro";

function sync() {
	$rsyncCmd${source[$i]}$syncToPath${syncToName[$i]};
}

#COUNT dirs to sync
dirCount=${#source[*]};

# Just go ahead and run it already
i=0;
for ((i=0; i<=($dirCount-1); i++))
	do
		sync;
	done;

More of the same, this one emails admin progress and errors.

#!/bin/bash -x

################################################################################
#Path on server: /Users/admin/Documents/scripts/server_bu.sh
#Server(s): vpn.intraflyer.com
#Author: Jamie Broussard
#Created: 2010-05-18
#Updated: 2010-06-07
#Status: production.
#Usage: run under launchd /Users/admin/Library/LaunchAgents/com.backup.web.incr
# To Load:
# launchctl load /Users/admin/Library/LaunchAgents/com.backup.web.incr
#
#Description: incremental backup to external drive
#
################################################################################

# VARIABLE DECLARATION
# make sure to create this directory before running the first time
declare -r destinationPath="/Volumes/LaCie/rsync_backup/";
declare -r rsyncCmd="rsync -a --delete --link-dest=";
# ARRAYS FOR EACH BACKUP DIRECTORY
declare -a sourceDir;
declare -a destinationDir;
declare -a syncOutput;
# COUNTERS
declare maxDir;
declare -i i;
declare -i n;
# set this to how many backups you want to keep
declare -i keepBackup=9;
declare -i b;
declare -i mvBackup=$keepBackup-1;
declare -i toBackup=$keepBackup;

# setup the array to directories you want to backup
n=0;
sourceDir[$n]="/Volumes/VPN_Backup_1/backups/sites/";
latestBackup[$n]="sites";
n=$n+1;
sourceDir[$n]="/Volumes/VPN_Backup_1/Shared?Items/Public/Corp?Graphics/";
latestBackup[$n]="Corp_Graphics";
n=$n+1;
sourceDir[$n]="/Volumes/VPN_Backup_1/Library/Collaboration/";
latestBackup[$n]="Collaboration";
n=$n+1;
sourceDir[$n]="/Volumes/VPN_Backup_1/ServiceData/";
latestBackup[$n]="ServiceData";
n=$n+1;
sourceDir[$n]="/Volumes/VPN_Backup_1/Library/WebServer/Documents/";
latestBackup[$n]="WebServer";

# FUNCTIONS
function incremental_backup() {
	mvBackup=$keepBackup-1;
	toBackup=$keepBackup;
	
	#delete the oldest
	if [ -d $destinationPath${latestBackup[$i]}.$keepBackup ]; then
		rm -rf $destinationPath${latestBackup[$i]}.$keepBackup;
	fi
	
	for ((b=2; b<=($keepBackup); b++))
		do
			#rename the latest backups
			if [ -d $destinationPath${latestBackup[$i]}.$mvBackup ]; then
				mv $destinationPath${latestBackup[$i]}.$mvBackup $destinationPath${latestBackup[$i]}.$toBackup;
			fi
			
			mvBackup=$mvBackup-1;
			toBackup=$toBackup-1;
		done;
		
	# copy the latest just before sync
	if [ -d $destinationPath${latestBackup[$i]}.latest ]; then
		mv $destinationPath${latestBackup[$i]}.latest $destinationPath${latestBackup[$i]}.1;
	fi
	
	# rsync using --link-dest=the latest backup to save space
	syncOutput[$i]=$($rsyncCmd$destinationPath${latestBackup[$i]}.1 ${sourceDir[$i]} $destinationPath${latestBackup[$i]}.latest);

}

# EMAIL THE OUTPUT TO THE ADMIN
function mailBody()#BUILD EMAIL BODY
{
	echo ${syncOutput[*]};
};

# count the elements in our array so we know how many times to run this
maxDir=${#sourceDir[*]};

# USE A LOOP SO WE CAN DO MANY DIFFERENT BACKUPS
for ((i=0; i<=($maxDir-1); i++))
	do
		incremental_backup;
	done;
	
# mail the rsync results to admin
mailBody | mail -s "server_bu complete" jbroussard@webflyer.com;

Tar Up Web Sites and Delete old Logs

#!/bin/bash

################################################################################
#Path on server: /home/site/bin/del_log_arch_site.sh
#Server(s): 
#Path on developer:
#Author: Jamie Broussard
#Created: 2008-09-05
#Updated: 2010-05-05
#Status: Production.
#Usage: run by root user crontab must be root because logs are owned by root
# specify 1 for archiveYes for true
#
#Description:tars up websites locally, deletes older tars and logs first
#
################################################################################

#VARIABLES
declare -r TarCmd="tar -czvf ";

#
#DIRECTORIES and file names
# beginning space is necessary 
declare -r DrTarUpRoot=" /home/site/";
declare -r DrArchiveRoot=" /home/backup/archive/";
declare -r tarExtension=".tgz";

#COUNTERS
declare MaxDir;
declare -i i;
declare -i n;
#ARRAYS
declare -a DrTarUp;
declare -a archiveYes;

# WEBSITES to delete logs and tar up
# you must specify archiveYes = 1 to tar up
# you must increment the $n so we know where to store the value in the array

n=0;
DrTarUp[$n]="milepoint.com";
archiveYes[$n]=1;

#n=$n+1;
#DrTarUp[$n]="dev.milepoint.com";
#archiveYes[$n]=1;


#COUNT THE WEBSITES TO TAR UP
MaxDir=${#DrTarUp[*]};

function TarSites() {
	if [ ${archiveYes[$i]} -eq 1 ] 
	then
		echo "=====================================================================";
		echo Start tar of $DrBuRoot${DrTarUp[$i]};
		echo "=====================================================================";
		$TarCmd$DrArchiveRoot${DrTarUp[$i]}$tarExtension$DrTarUpRoot${DrTarUp[$i]};
	fi
};

function deleteLogs() {
	echo "=====================================================================";
	echo Start delete logs of $DrBuRoot${DrTarUp[$i]};
	find /home/site/${DrTarUp[$i]}/logs -type f -mtime +7 -exec rm -v '{}' \;

};

#Remove previous tar files from archive dir
rm -f $DrArchiveRoot*

#Tar up each website flagged and remove logs
i=0;
echo "$MaxDir Log dir to clean";
for ((i=0; i<=($MaxDir-1); i++))
	do
		deleteLogs;
		TarSites;
	done;

echo "=====================================================================";
echo " Tar up Complete";