Hello everyone. I had a task to create a solution which will perform task of backing up Windows SQL server both locally and on S3.
We all know that if you use AWS RDS, you wouldn’t have to worry about backing up SQL databases. However, if you don’t want to pay any third party software which could take care of backing up your SQL databases, you need to create your own solution.

That said, I was given a task to create a script in Powershell, which will identify all the databases located on a specified server, verify the edition and state, and based on those, perform backup locally, and then to S3 bucket.
You would need to install few pre-requisites (in case you don’t have it already):
– Powershell modules: sqlps and AwsPowershell

I will try to be as quick as possible in explaining. First we have two functions, which will be used to write events to a logfile and to event logs as well. Simple really, it takes whatever you sent to WriteLog function and saves it into a text file.

#region Functions
    #Function Write Log
    Function WriteLog {
        param(
            [string]$message
        )

        $datestr = ((Get-Date).ToUniversalTime()).ToString("yyyy.MM.dd-HH:mm:ss") + ":: "
        $message = $datestr + $message

        try {
            $message | Out-File $Logfile -Append    
        }
        catch {
            Write-Error "ERROR:: Could not write to file:: $logfile"
        }
        
        Write-Output $message
    }

    Function EventLogWrite {
        Param(
            [string]$logstring
        )

        Write-EventLog -LogName Application -Source "SQLBackupAndUpload" -EntryType Error -EventId 6666 -Message $logstring
    }

#endregion

Next piece is to define variables, which we will use throughout the script. They are taken from a JSON config file, and you can see the following defined:
backupDirectory -> this is a location where all the backup script files as well backup files will be stored
excludedDBs -> databases which will be excluded (by default, the script is excluding Offline databases)
S3Bucket -> location on S3 where we will store our backup files
s3BucketBackupPath -> folder in S3 bucket where we will store our backup data per server
region -> region in AWS
timeToExecute -> time when the script will run
retainBackup -> how many backups to keep locally

{
  "backupDirectory": "E:\DailySQLBackup",
  "excludedDBs": "tempdb",
  "s3Bucket": "backupdata",
  "s3BucketBackupPath": "SQLServer01/",
  "region": "eu-west-1",
  "timeToExecute": "12AM",
  "retainBackup": "1"
}
#region Get parameters from JSON config file and Define variables
    cd $PSScriptRoot
    $serverName = $env:COMPUTERNAME
    $logFilePrefix = $serverName
    $keepBackup = $false
    $dbBackupComplete = $false
    $fileNameDateSuffix = Get-Date -format "yyyyMMdd"
    $numberBackup = 1
    $numbers = @(1..5)
    $datetimeStamp = Get-Date -format "yyyy-MM-dd-"
    
    $jsonParams = Get-Content .\config.json -Raw | ConvertFrom-Json
    $backupDirectory = $jsonParams.BackupDirectory
    $excludedDB = $jsonParams.ExcludedDBs
    $s3Bucket = $jsonParams.S3Bucket
    $s3BucketBackupPath = $jsonParams.s3BucketBackupPath
    $region = $jsonParams.region
    $retentionPeriod = [int]$jsonParams.retainBackup
    $compressionOption = $true
    $retentionPeriodMin = 1
    $retentionPeriodMax = $retentionPeriod + 1
#endregion

Next part is the creation of Log folder, logfile per run, removal of logfiles older than 7 days.

#region Create Log and Backup folders if doesn't exist, remove old logfiles, and create new logfile

    #Create log folder if it does not exist
    $logFolder = ($backupDirectory + "\Logs")

    If(!(test-path $logFolder))
    {
        Try{
            New-Item -ItemType Directory -Path $logFolder
        } Catch {
            throw "ERROR:: $($_.exception.message)"
        }
    }
    
    $date = (Get-Date).AddDays(-7)
    try {
        Get-Item -Path $backupDirectory\Logs\* -Include * | Where-Object {$_.lastwritetime -le $date} | Remove-Item -Force    
    }
    catch {
        WriteLog "WARNING:: Not able to delete logs older than: $date"
    }
    
    $backupFilesFolder = ($backupDirectory + "\Backup\Bak_Files")

    if(!(Test-Path $backupFilesFolder)){
        Try{
            New-Item -ItemType Directory -Path $backupFilesFolder
        } Catch {
            throw "ERROR:: $($_.exception.message)"
        }
    }
    #create a new logfile
    try {      
        $logFile = ($logFolder + "\$logFilePrefix" + "_" + [DateTime]::Now.ToString("yyyyMMddHHmmss") + ".log")  
    }
    catch {
        throw "ERROR:: Not able to create logfile:: $($_.exception.message)"
    }
    #Couple of informational entries to the log
    WriteLog "INFO:: Computer Name::        $serverName"
    WriteLog "INFO:: Backup Directory::     $backupDirectory"
    WriteLog "INFO:: Exclude DB::           $excludedDB"
    WriteLog "Info:: S3 Bucket::            $s3Bucket"
    WriteLog "INFO:: Retain Period::        $retentionPeriod"
#endregion

Next step is to load Powershell modules. Like I said at the beginning, you would need two modules to be installed, sqlps and AwsPowershell.

#region import required Modules
    # load modules
    try
    {
        Import-Module sqlps -PassThru -DisableNameChecking
        WriteLog "INFO:: sqlps module loaded"
    }
    catch
    {
        WriteLog "ERROR:: Failed attempt to load modules in PS1 script:: $($_.Exception.Message)"
        EventLogWrite "ERROR:: Failed attempt to load modules in PS1 script:: $($_.Exception.Message)"
        Throw "ERROR:: $($_.exception.message)"
    }

    try 
    {
        Import-Module AwsPowerShell -DisableNameChecking
        WriteLog "INFO:: AwsPowerShell module loaded"
    }
    catch 
    {
        WriteLog "ERROR:: Failed attempt to load modules in PS1 script:: $($_.Exception.Message)"
        EventLogWrite "ERROR:: Failed attempt to load modules in PS1 script:: $($_.Exception.Message)"
        Throw "ERROR:: $($_.exception.message)"
    }

#endregion

Once we are done with all this, we are ready to start working on some serious things. First, the script connects to SQL Server Management Object and checks for databases.
It will check if the SQL Server version is SQL Express or Web edition. If it is, the script will not use compressed option, because that parameter is not available for SQL Express databases unfortunately.

#region Connect to SQL Server Management Objects and checking the databases
    try
    {        [System.Reflection.Assembly]::LoadWithPartialName("Microsoft.SqlServer.SMO") | out-null        [System.Reflection.Assembly]::LoadWithPartialName("Microsoft.SqlServer.SmoExtended") | out-null
        [Data.DataTable] $table = [Microsoft.SqlServer.Management.Smo.SmoApplication]::EnumAvailableSqlServers($false)
        $fullbackup = New-Object ("Microsoft.SqlServer.Management.Smo.Backup")
        if ($table.Rows.Count -gt 0) {
            WriteLog "INFO:: We have an instance:: $($table.Rows[0].Name)"
            foreach ($row in $table) {            
                if ($row.instance -ne 'SQLEXPRESS'){                    
                    $serverName = $serverName + '\' + $row.Instance
                    $ServerSMO = new-object ("Microsoft.SqlServer.Management.Smo.Server") $serverName
                    $DatabaseList = $ServerSMO.Databases
                    $dbInstanceList = $true
                }
            }
        } else {
            WriteLog "INFO:: We have a default Database, not a separate instance"  
            try
            {
                $ServerSMO = new-object ("Microsoft.SqlServer.Management.Smo.Server") $servername
                $DatabaseList = $ServerSMO.Databases
            }
            catch
            {
                WriteLog "ERROR:: script failed to connect to the SQL Server SMO" 
                WriteLog "ERROR:: $($_.Exception.Message)"
                EventLogWrite "ERROR:: script failed to connect to the SQL Server SMO:: $($_.Exception.Message)"
                throw "ERROR:: $($_.exception.message)"
            }
        }
        if($ServerSMO.edition -ne 'Web Edition (64-bit)'){
            if($serverSMO.edition -eq 'Express Edition (64-bit)'){
                WriteLog "INFO:: This is a $($serverSMO.edition). Compression Parameter will not be used"
                $compressionOption = $false
            } else {
                WriteLog "INFO:: This is not a SQL server Web Edition nor Express Edition. Compression Parameter can be used"
                $compressionOption = $true
            }
        } else {
            WriteLog "INFO:: This is a $($serverSMO.edition). Compression Parameter will not be used"
            $compressionOption = $false
        }
    }
    catch
    {
        WriteLog "ERROR:: script failed to connect to the SQL Server SMO" 
        WriteLog "ERROR:: $($_.Exception.Message)"
        EventLogWrite "ERROR:: script failed to connect to the SQL Server SMO:: $($_.Exception.Message)"
        throw "ERROR:: $($_.exception.message)"
    }
#endregion

Finally, we are coming to a section where the script is backing up databases locally and then to S3 bucket.
First it checks if the database is accessible. If it is not accessible, it means that it is offline, hence, not ready for backup.
It then deletes the old local copy of a backup file, prior backing up the database.
After backup is completed, and file saved locally, backup to S3 is initiated.
After it goes thru each database, backs them up and copies them to S3, it also uploads JSON config file, and logfile.

#region Create Backup
    # creating the BAK files
    foreach ($Database in $DatabaseList)
    {
        if($Database.Name -notin $excludedDB)
        {
            #First lets check if the database is online or offline
            if($database.IsAccessible -eq $true){

                #Show the database that is currently being backed up:
                $DatabaseName = $Database.Name

                #Delete the oldest backup file of the current database
                try {
                    $lastbackupFile = (Get-ChildItem -Path $backupFilesFolder -Recurse | Where-Object {$_.Name -like "$databaseName*"})
                    if($lastbackupFile -eq $null){
                        $numberBackup = 1
                    }else{
                        if(($lastbackupFile | Measure-Object).count -eq $retentionPeriod){
                            WriteLog "INFO:: We have max number of backup files. Deleting the oldest backup file"
                            $lastbackupFile = (Get-ChildItem -Path $backupFilesFolder -Recurse | Sort-Object LastWriteTime | Where-Object {$_.Name -like "$databaseName*"} | Select-Object -First 1)
                            $numberBackup = [int]$lastbackupFile.Name.split('-')[1].split('.')[0]
                
                            foreach ($number in $numbers) {
                                if($number -eq $numberBackup){
                                    WriteLog "INFO:: We are going to use number suffix:: $numberBackup"
                                    break
                                }
                            }
                            
                            $filePath = Join-Path $BackupFilesFolder $lastbackupFile.Name
    
                            WriteLog "INFO:: Deleting $($lastbackupFile.Name)"
    
                            try
                            {
                                Remove-Item $filePath
                            }
                            catch
                            {
                                WriteLog "WARNING:: Could not delete $filePath + $($_.Exception.Message)"
                                EventLogWrite "ERROR:: Could not delete $filePath + $($_.Exception.Message)" 
                            }
                        } else {
                            $numberBackup = ($lastbackupFile | Measure-Object).count + 1
                        }
                    }
                }
                catch {
                    WriteLog "ERROR:: Could not get the numbering for Backup naming convention:: $($_.exception.message)"
                    EventLogWrite "ERROR:: Could not get the numbering for Backup naming convention:: $($_.exception.message)"
                }
            
                $fileName = $DatabaseName + '-' + $numberBackup + '.bak'
                $filePath = Join-Path $backupFilesFolder $fileName

                #Start the backup:
                WriteLog "INFO:: Backing up database $DatabaseName to $backupFilesFolder"
                if($compressionOption -eq $false){
                    try {
                        Backup-SqlDatabase -Server $ServerName -Database $DatabaseName -BackupFile $filePath
                        WriteLog "INFO:: Backup finished for $DatabaseName to destination $backupFilesFolder"
                        $dbBackupComplete = $true
                    }
                    catch {
                        WriteLog "ERROR:: backing up database $DatabaseName :: $($_.Exception.Message)"
                        EventLogWrite "ERROR:: backing up database $DatabaseName :: $($_.Exception.Message)"        
                        $dbBackupComplete = $false
                    }
                } else {
                    try {
                        Backup-SqlDatabase -Server $ServerName -Database $DatabaseName -BackupFile $filePath -CompressionOption On
                        WriteLog "INFO:: Backup finished for $DatabaseName to destination $backupFilesFolder using Compression Option"
                        $dbBackupComplete = $true
                    }
                    catch {
                        WriteLog "ERROR:: backing up database $DatabaseName :: $($_.Exception.Message)"
                        EventLogWrite "ERROR:: backing up database $DatabaseName :: $($_.Exception.Message)"        
                        $dbBackupComplete = $false
                    }
                }
               
                if($dbBackupComplete -eq $true){
                    if($s3Bucket) {
                        $key = $s3BucketBackupPath + $datetimeStamp + $Filename
    
                        WriteLog "INFO:: Starting file upload $filename to S3 Bucket:: $s3BucketBackupPath"
                        
                        Try
                        {
                            Write-S3Object -BucketName $s3Bucket -File $filePath -Key $key -Region $region
                            WriteLog "INFO:: File $fileName uploaded to S3 Bucket:: $s3BucketBackupPath"
                        }
                        Catch
                        {
                            WriteLog "ERROR:: Failed attempt to upload $fileName to $s3BucketBackupPath $($_.Exception.Message)"
                            EventLogWrite "ERROR:: Failed attempt to upload $filename to $s3BucketBackupPath $($_.Exception.Message)"
                        }
                    }
                }
            } else {
                WriteLog "WARNING:: Database: $($database.name) is Offline"
                WriteLog "WARNING:: Backup skipped!"
            }
        }    
        else
        {
            WriteLog "WARNING:: Backup for the database:: $($database.name) is explicitly denied"
        }
    }
    WriteLog "INFO:: Backup job finished"
#endregion

#region Copy JSON config file and logfiles to S3 Bucket

    $configFile = Join-Path $backupDirectory 'config.json'
    $jsonKey = $s3BucketBackupPath + 'config.json'
    $logKey = $s3BucketBackupPath + 'logfile.txt'

    Try
    {
        Write-S3Object -BucketName $s3Bucket -File $configFile -Key $jsonKey -Region $region
        WriteLog "INFO:: JSON file $configFile uploaded to S3 Bucket:: $s3BucketBackupPath"
    }
    Catch
    {
        WriteLog "ERROR:: Failed attempt to upload JSON file $configFile to $s3BucketBackupPath $($_.Exception.Message)"
        EventLogWrite "ERROR:: Failed attempt to upload JSON file $configFile to $s3BucketBackupPath $($_.Exception.Message)"
    }
    
    Try
    {
        Write-S3Object -BucketName $s3Bucket -File $logfile -Key $logKey -Region $region
        WriteLog "INFO:: LogFile $logfile uploaded to S3 Bucket:: $s3BucketBackupPath"
    }
    Catch
    {
        WriteLog "ERROR:: Failed attempt to upload logfile $logfile to $s3BucketBackupPath $($_.Exception.Message)"
        EventLogWrite "ERROR:: Failed attempt to upload logfile $logfile to $s3BucketBackupPath $($_.Exception.Message)"
    }
#endregion

This site uses Akismet to reduce spam. Learn how your comment data is processed.

Post Navigation