upload_move.sh: --------------- #!/bin/bash # # Just move files out of the customer upload directory to a temporary # location before they are moved off the machine. # # Robert Butler rob@thebutlerfamily.org # # NOTE! It's extremely important to keep the order of upload and destination # directories in sync. If the list gets out of sync then the uploads from # one company can get put in another company's destination. # # NOTE! Make sure that the trailing slash ('/') is included at the end of # all paths. # List of directories to check. upload=( /home/abc_production/upload_here/ /home/abc_test/upload_here/ /home/def_production/upload_here/ /home/def_test/upload_here/ /home/ghi_production/upload_here/ /home/ghi_test/upload_here/ ) # List of corresponding destination directories for any files. dest=( /shared/customer_uploads/abc/production/ /shared/customer_uploads/abc/test/ /shared/customer_uploads/def/production/ /shared/customer_uploads/def/test/ /shared/customer_uploads/ghi/production/ /shared/customer_uploads/ghi/test/ ) # How many seconds to delay when checking for file changes. # # NOTE! # This delay is currently set high because of problems users have transferring # large (10Mb+) files. When the delay is too low, the file is moved while the # transfer is still occuring, causing the user's sftp client to be unable to # rename the file (leaving the .filepart extension), and causing the remainder # of the upload to fail. wait=300 # Log all activity of the script. log=/shared/upload_move.log date >> $log # Loop through the list of upload directories. i=0 while [ $i -lt ${#upload[*]} ]; do echo "Processing ${upload[$i]}" >> $log # Get a list of files in the subdirectory. find ${upload[$i]} -maxdepth 1 -mindepth 1 | \ while read file; do echo " Checking $file ..." >> $log # Get the unix epoch for the specified file. time1=`stat --format=%Z "$file"` time2=`date --date="$wait seconds ago" +%s` # If the file hasn't been modified in $wait seconds move it. if [ $time1 -lt $time2 ]; then open=`lsof -t +D $file | wc -l` if [ $open == "0" ]; then # Add yet one more check. Are there any files # with a .filepart extension? filepart=`ls -R "$file" | grep -i "filepart" | wc -l` if [ $filepart == "0" ]; then directory=`file -b "$file"` if [ $directory == "directory" ]; then # The scp/sftp client we're using can't delete remote # subdirectories after they have been copied. The # workaround is to zip up the subdirectory, transfer it # (allowing automatic deletion), and extraction at the # destination. filedir=`dirname "$file"` justfile=`basename "$file"` cd $filedir result=`zip -r "/tmp/[extract]$justfile" "$justfile"` if [ $? == 1 ]; then echo " Problem with archive creation detected. Leaving upload untouched." >> $log else result=`mv "/tmp/[extract]${justfile}.zip" ${dest[$i]}` result=`rm -rf "$file"` echo " Directory found, converted to ZIP archive." >> $log fi else echo " Moved from upload dir to holding dir." >> $log mv "$file" ${dest[$i]} fi else # NOTE! If an upload is aborted or # otherwise canceled by a user, the # filepart extension may remain, which # will leave those files in the upload # directory indefinetly (until manually # moved). echo " Contains .filepart extension, leaving in place." >> $log fi else echo " In use, not moving." >> $log fi else echo " Not old enough, leaving for next check." >> $log fi done let i+=1 done transfer.ps1: ------------- # Created by Robert Butler 20090202 # # Transfer uploaded files from the ssh/sftp server in the DMZ to the local machine. # # Revision History: # 20090202 Initial Revision # 20090205 Changed to using sftp instead of scp for troubleshooting purposes. # 20090210 Added additional comments. # 20090509 Updated for new server IP address. # 20090918 Added 60-sec delay between transferring each file to alleviate some # server load. # 20100215 Updated to use script library. # 20111017 Updated comments. # if (!$env:USERDOMAIN) { # if we're running the script as the system account (as may be the case # when a user is not logged in), then USERDOMAIN will be blank. Check for # this case, and set if it's blank. $env:USERDOMAIN = 'NIACO' } . ("$ENV:SystemDrive\$ENV:USERDOMAIN\library.ps1") # Just as with the upload_move.sh script on the ssh server, the order of # source and destination directories is vital and must remain in sync. Set-Variable -Name dir_reports_remote -scope script -Value @('/shared/customer_uploads/abc/production/', '/shared/customer_uploads/abc/test/', '/shared/customer_uploads/def/production/', '/shared/customer_uploads/def/test/', '/shared/customer_uploads/ghi/production/', '/shared/customer_uploads/ghi/test/') Set-Variable -Name dir_reports_local -scope script -Value @('D:\Project Directories\Customer_Uploads\abc\production\', 'D:\Project Directories\Customer_Uploads\abc\test\', 'D:\Project Directories\Customer_Uploads\def\production\', 'D:\Project Directories\Customer_Uploads\def\test\', 'D:\Project Directories\Customer_uploads\ghi\production\', 'D:\Project Directories\Customer_uploads\ghi\test\') Set-Variable -Name logfile -scope script -Value "$env:SystemDrive\$env:USERDOMAIN.log" #Set-Variable -Name logtemp -scope script -Value "$env:SystemDrive\transfer_$(Get-Date –f 'yyyy-MM-dd.HH-mm-ss').log" Set-Variable -Name logtemp -scope script -Value "D:\Project Directories\customer_uploads\transfer_$(Get-Date –f 'yyyy-MM-dd.HH-mm-ss').log" Set-Variable -Name reports_server -scope script -Value "192.168.222.10" Set-Variable -Name reports_username -scope script -Value "sftp_transfer" #Set-Variable -Name scp_program -scope script -Value "C:\Program Files\Attachmate\RSecure\scp.exe" Set-Variable -Name scp_program -scope script -Value "C:\Program Files (x86)\Attachmate\RSecure\sftp.exe" log "*** $(Get-Date –f 'yyyy-MM-dd HH:mm:ss') Upload transfer script starting..." $logtemp # Make sure there are report directories to check if ($dir_reports_remote.count -gt 0) { $i = 0 # Yes. Go through them one at a time. while ($i -lt $dir_reports_remote.count) { $remote_path = $dir_reports_remote[$i] log " - Checking for new files at $remote_path" $logtemp # We can go ahead and rewrite the var, adding all of the # connection information required to reach the remote server. # We have to do this because of problems expanding the # servername when the path is directly included in the path $remote_path = $reports_username + '@' + $reports_server + ':' + $remote_path + '*' $date = Get-Date –f "yyyy-MM-dd" $local_path = $dir_reports_local[$i] + $date + '\' # Only create the directory if the path doesn't already exist. if (!(Test-Path $local_path)) { # Note, this will create a destination directory every # day the script runs regardless whether there are files # to download or not. mkdir $local_path } else { # Get a listing of all the files currently in the directory. $existing_report_list = @() $existing_report_list += Get-ChildItem $local_path -name | sort Name } # Transfer over any files waiting. # Options are: # # -C Enable compression # -d Force target to be a directory # -p Preserve timestamps and file attributes # -r Recurse subdirectories # -u Remove source file after copying $output = . "$scp_program" -C -d -p -r -u $remote_path $local_path 2>$errorOutput # Debugging only (can be disabled in production). Capture the command output # log $lastexitcode $logtemp # log $output $logtemp # log $errorOutput $logtemp # Make another listing of files in the directory. $new_report_list = @() $new_report_list += Get-ChildItem $local_path -name | sort Name # Check for any files with subdirectories that were compressed # by the upload_move.sh script on tssh1. Extract any found. if ($new_report_list.length -gt 0) { foreach ($report in $new_report_list) { if ($report.toLower() -match '^\[extract\]') { $dest = $local_path.replace('\', '/') cd $dest log " - Archive found, extracting ($report)." $logtemp unzip -n "$report" -d . log " Extracted, deleting archive." $logtemp $fdel = $local_path + $report . cmd.exe /c del /f "$fdel" } } } # Now, compare the two directory listings to see if any new # files have been received. if ($new_report_list.count -gt $existing_report_list.count) { $new_files = @() foreach ($file in $new_report_list) { if ($existing_report_list -contains $file) { } else { $new_files += $file log " $file" $logtemp } } $new_files_count = $new_files.count log " - $new_files_count files received." $logtemp } else { log " No new files received." $logtemp } $i += 1 # Sleep for a minute between transfers # Start-Sleep (60) } } log "*** $(Get-Date –f 'yyyy-MM-dd HH:mm:ss') Upload transfer script complete." $logtemp ################################################################## # # Send status email # ################################################################## $MsgBody = "tssh1 Transfer log for $(Get-Date –f 'D') `n`n" foreach ($line in get-content $logtemp) { $MsgBody += "$line `n" # Append the details to the primary log file. Add-Content $logfile "$line" } # Only send the email alert if new files are transferred. if ($new_files.count -gt 0) { email "alerts@niaco.com" "$(Get-Date –f 'yyyy-MM-dd') tssh1 Transfer Log File" $MsgBody } # Now that the logfile has been appended to the main log, and we've # also sent the same thing via email, we're safe to delete. #Remove-Item $logtemp