Friday, December 23, 2011

Multiple SQL Restores with Powershell, Part 3

Here is the SQL native version of the script I posted on Part 2. If you use SQL native backups or Red Gate SQL Hyperbac, this script will allow you to restore multiple databases to a different SQL instance and different destination volumes for the database files. The script will do the following:

  1. Recursively grabs all the more recent backup files on a network share/local path of my choice.
  2. Starts a loop on each backup file that will:
    • Get the database name from the SQL backup file header.
    • Get the logical name and physical path of the files in the database to be restore.
    • Once the logical/physical file info is gathered, a new function runs a loop on them that strips the old physical path and replaces it with the new physical path. This loop also 'constructs' the SQL restore MOVE FILE syntax that will be used in a query by the next function. With this function, I no longer care how many files are in the database to be restored nor do I care if they are data, log or full text catalog. The function will handle all of them and construct the proper part of the SQL query.
    • The next function starts the SQL database restore, using the different values provided by the previous functions in the SQL Query.
    • The final function that is run after the restore is complete will set the dbowner to the SA account.
    • The main loop ends and starts over again with the next backup file until all are processed.

# Database Server Connection Variables
# Only need to change the $SQLServer variable to the SQL instance you are restoring the databases to.
#--------------------------------------------
[STRING]$SQLServer                = 'sql02'
[STRING]$SQLDatabase             = 'master'
[STRING]$SQLConnectString        = "Data Source=$SQLServer; Initial Catalog=$SQLDatabase; Integrated Security=True;"
[OBJECT]$SQLConnection             = New-Object System.Data.SqlClient.SqlConnection($SQLConnectString);

# Functions we are going to use.
#

function GetDatabaseName ([STRING]$backupfile)
{
    $SQLConnection.Open()
    $SQLQuery = "RESTORE HEADERONLY 
                FROM DISK = N'$backupfile' 
                WITH NOUNLOAD"
    $SQLCommand = New-Object system.Data.SqlClient.SqlCommand
    $SQLCommand.CommandText = $SQLQuery
    $SQLCommand.Connection = $SQLConnection
    $SQLAdapter = New-Object System.Data.SqlClient.SqlDataAdapter
    $SQLAdapter.SelectCommand = $SQLCommand
    $DataSet = New-Object System.Data.DataSet
    $SqlAdapter.Fill($DataSet)
    $SQLConnection.Close()
    return $DataSet.Tables[0].Rows[0].Item('DatabaseName')
}

function GetDatabaseFiles ([STRING]$backupfile)
{
    $SQLConnection.Open()
    $SQLQuery = "RESTORE FILELISTONLY 
                FROM DISK = N'$backupfile' 
                WITH NOUNLOAD"
    $SQLCommand = New-Object system.Data.SqlClient.SqlCommand
    $SQLCommand.CommandText = $SQLQuery
    $SQLCommand.Connection = $SQLConnection
    $SQLAdapter = New-Object System.Data.SqlClient.SqlDataAdapter
    $SQLAdapter.SelectCommand = $SQLCommand
    $DataSet = New-Object System.Data.DataSet
    $SqlAdapter.Fill($DataSet)
    $SQLConnection.Close()
    return $DataSet.Tables[0] | Select-Object LogicalName,PhysicalName,type
}

function NewDBFilePaths ($SourceFiles) 
{

    $MoveCommand = @()
    foreach ($item in $SourceFiles) {
        If ($item.Type -eq "D") {$LogicalName = $item.LogicalName
            $DBFile = $item.PhysicalName
            $DBFile = $DBFile.Replace($DBFile.Substring(0,$DBFile.LastIndexOf("\")+ 1), $DBFilePath)
            $MoveCommand = $MoveCommand + " MOVE N'$LogicalName' TO N'$DBFile',"
        }
        If ($item.Type -eq "L") {$LogicalName = $item.LogicalName
            $DBFile = $item.PhysicalName
            $DBFile = $DBFile.Replace($DBFile.Substring(0,$DBFile.LastIndexOf("\")+ 1), $DBLogsPath)
            $MoveCommand = $MoveCommand + " MOVE N'$LogicalName' TO N'$DBFile',"
        }
        If ($item.Type -eq "F") {$LogicalName = $item.LogicalName
            $DBFile = $item.PhysicalName
            $DBFile = $DBFile.Replace($DBFile.Substring(0,$DBFile.LastIndexOf("\")+ 1), $DBFTSPath)
            $MoveCommand = $MoveCommand + " MOVE N'$LogicalName' TO N'$DBFile',"
        }
    }    
    Return $MoveCommand
}

function SQLRestore ([STRING]$Database,[STRING]$BackupFile,[STRING]$FilePath)
{
    $SQLConnection.Open()
    $SQLQuery = "RESTORE DATABASE [$Database] FROM DISK = N'" + $BackupFile + "'" + " WITH RECOVERY," + $FilePath + " NOUNLOAD, REPLACE"
    $SQLCommand = New-Object system.Data.SqlClient.SqlCommand ($SQLQuery, $SQLConnection)
    $SQLCommand.ExecuteNonQuery() | Out-Null
    $SQLConnection.Close()
}

function ChangeDBOwner ([STRING]$Database,[STRING]$Owner)
{
    $Owner = $Owner -Replace("'","")
    $SQLConnection.Open()
    $SQLQuery     = "EXEC [$Database].dbo.sp_changedbowner @loginame = '$Owner', @map = false"
    $SQLCommand = New-Object system.Data.SqlClient.SqlCommand ($SQLQuery, $SQLConnection)
    $SQLCommand.ExecuteNonQuery() | Out-Null
    $SQLConnection.Close()
}


# Variables
#
# Directory on target SQL instance were you want the MDF files to land at.
$DBFilePath = "H:\Databases\"
# Directory on target SQL instance were you want the MDF files to land at.
$DBLogsPath = "I:\Tranlogs\"
# Directory on target SQL instance were you want the Full Text files to land at.
$DBFTSPath = "D:\Program Files\Microsoft SQL Server\MSSQL10_50.MSSQLSERVER\MSSQL\FTData\"
# This one is useful if you are going to filter out files by date
$filedate = Get-Date -Format M-d-yyyy_hh-mm-ss
# Directory where backup files are located.
$Directory = '\\mp1infzfs001\files\Backups\sql02\SQL02'

# Here I'm setting a variable which will do a recursive search of the files I want to restore, 
# in this particular case I'm grabbing the most recent full backup file and filtering out system databases
# and some specific databases. Of course, you would adjust this to your particular needs.

$files = Get-ChildItem $Directory -filter "*full*.bak" -rec | Where-Object `
    {($_.Directory -notlike $Directory+"\master\FULL") `
    -and ($_.Directory -notlike $Directory+"\msdb\FULL") `
    -and ($_.Directory -notlike $Directory+"\model\FULL") `
    -and ($_.Directory -notlike $Directory+"\portal1\FULL") `
    -and ($_.Directory -notlike $Directory+"\FTIConversionEngine\FULL") `
    -and ($_.Directory -notlike $Directory+"\rpfdb\FULL") `
    -and ($_.Directory -notlike $Directory+"\rs_tempdb\FULL") }

$files = $files | group directory | foreach {@($_.group | sort {[datetime]$_.lastwritetime} -desc)[0]} | sort {$_.name}

# Start looping through each backup file and restoring the databases
foreach ($filename in $files) {
    
    #Set the filename variable to the fullpath/name of the backup file
    $filename = $filename.FullName
    
    # Get Database Name
    $dbName = GetDatabaseName -backupfile $filename
    $dbName = $dbName[1..$dbName.Length]
    
    # Get Database File info from backup file.
    $dbFiles = GetDatabaseFiles -backupfile $filename
    $dbFiles = $dbFiles[1..$dbFiles.Length]
    
    # Update Database file destination path.
    $Filepaths = NewDBFilePaths -SourceFiles $dbFiles
    
    # Execute SQL restore
    SQLRestore -Database $dbName -BackupFile $filename -FilePath $Filepaths
    
    # Change restored database dbowner to SA
    ChangeDBOwner -Database $dbName -Owner sa
}

Multiple SQL Restores with Powershell, Part 2

Since the events detailed on Part 1, there have been several changes. The application in question has been through 2 updates and now, for every 'case' opened on it, 3 SQL databases are generated. In 18 months, the SQL cluster acting as the backend for this app has gone from 240+ databases to 1300+ databases. The upgrades to the app also introduced more changes to the databases themselves. The main database for any case now has 5 files (3 data, 1 log, 1 full text catalog). The remaining 2 databases associated with a case have 2 or 3 datafiles plus a logfile.

Another change was the deployment of RedGate's SQL Backup Pro to our SQL environment. It was something I strongly proposed after the first upgrade. Unlike the Comvault SQL agent, SQL Backup Pro can be fully scripted via T-SQL and therefore, I could leverage Powershell on top of it.

The second upgrade had its challenges. A big issue was that, when cases were removed from the application frontend, the databases were not cleaned out of the SQL backend. Therefore we had quite a few databases that were never upgraded during the first upgrade. These databases were significantly different from the databases that were upgraded in terms of number of files in them. Therefore my original script would break the first time it encountered one when processing the loop.

Also, I wanted to leverage SQL Backup Pro for the new restore process, which meant I could not use SQL SMO on my Powershell script. Due to my own inexperienced in Powershell, I ended up bloating my script to more than twice the number of lines in order to accommodate the use of SQL Backup Pro and the different database versions of the app on the SQL server. Clearly I needed to devote some time to tidy up the script and make it more modular. So I started getting serious about using Powershell functions and they are absolutely phenomenal.

The script below is how I'm handling these massive SQL database restores using Powershell and leveraging SQL Backup Pro. In a nutshell, it does the following:

  1. Recursively grabs all the more recent backup files on a network share/local path of my choice.
  2. Starts a loop on each backup file which does the following:
    • Gets the database name from the backup file header using the stored procedure by Red Gate.
    • Gets the logical name and physical path of the files in the database to be restore, again using the stored procedure from Red Gate.
    • Once the logical/physical file info is gathered, a new function runs a loop on them that strips the old physical path and replaces it with the new physical path. This loop also 'constructs' the SQL restore MOVE FILE syntax that will be used in a query by the next function. With this function, I no longer care how many files are in the database to be restored nor do I care if they are data, log or full text catalog. The function will handle all of them and construct the proper part of the SQL query.
    • The next function starts the SQL database restore by calling the Red Gate's stored procedure and using the different values provided by the previous functions in the SQL Query.
    • The next function that is run after the restore will set the dbowner to the SA account. A lot of accidental DBAs don't realize this but when you restore a SQL database by overwriting the existing one or creating a new one, the dbowner is set to the account that runs the restore process. In the case of my SQL Backup Pro deployment, the agent runs under the credentials of a domain service account. If for some reason the service account for Red Gate ceases to exist, my databases would be suddenly orphaned of a dbowner which creates issues.
And that's it. By implementing Powershell functions, not only I was able to reduce the number of lines needed, but now I'm better prepared to implement future changes. I intend to post a follow up to this with the SQL native restore version of this script.

# Database Server Connection Variables
# Only need to change the $SQLServer variable to the SQL instance you are restoring the databases to.
#--------------------------------------------
[STRING]$SQLServer                = 'sql02'
[STRING]$SQLDatabase             = 'master'
[STRING]$SQLConnectString        = "Data Source=$SQLServer; Initial Catalog=$SQLDatabase; Integrated Security=True;"
[OBJECT]$SQLConnection             = New-Object System.Data.SqlClient.SqlConnection($SQLConnectString);

# Functions we are going to use.
#
# Executes the RedGate sqlbackup store procedure to get the backup header info. We'll use this to
# extrapolate the database name.
function RedgateGetDatabaseName ([STRING]$backupfile)
{
    $SQLConnection.Open()
    $SQLQuery = "exec master..sqlbackup '-sql ""RESTORE SQBHEADERONLY FROM DISK = [$backupfile] WITH SINGLERESULTSET""'"
    $SQLCommand = New-Object system.Data.SqlClient.SqlCommand
    $SQLCommand.CommandText = $SQLQuery
    $SQLCommand.Connection = $SQLConnection
    $SQLAdapter = New-Object System.Data.SqlClient.SqlDataAdapter
    $SQLAdapter.SelectCommand = $SQLCommand
    $DataSet = New-Object System.Data.DataSet
    $SqlAdapter.Fill($DataSet)
    $SQLConnection.Close()
    return $DataSet.Tables[0].Rows[12] | Select-Object 'SQL Backup v6.5.1.9'
}

# Executes the RedGate sqlbackup store procedure to get the backup filelist info. We'll use this to extrapolate all 
# the different data, log and full text search files that may exist on a database.  
function RedgateGetDatabaseFiles ([STRING]$backupfile)
{
    $SQLConnection.Open()
    $SQLQuery = "exec master..sqlbackup '-sql ""RESTORE FileListOnly FROM DISK = [$backupfile]""'"
    $SQLCommand = New-Object system.Data.SqlClient.SqlCommand
    $SQLCommand.CommandText = $SQLQuery
    $SQLCommand.Connection = $SQLConnection
    $SQLAdapter = New-Object System.Data.SqlClient.SqlDataAdapter
    $SQLAdapter.SelectCommand = $SQLCommand
    $DataSet = New-Object System.Data.DataSet
    $SqlAdapter.Fill($DataSet)
    $SQLConnection.Close()
    return $DataSet.Tables[0] | Select-Object LogicalName,PhysicalName,type
}

# This function will cycle trough all the data, log, full text search files contained in a backup. It will strip the
# old path where the file used to be and replace it with the new path where we want it restore to on the new server.
# This will be used to construct the SQL restore query.
function NewDBFilePaths ($SourceFiles) 
{

    $MoveCommand = @()
    
    foreach ($item in $SourceFiles) {
    
        If ($item.Type -eq "D") {
            $LogicalName = $item.LogicalName
            $DBFile = $item.PhysicalName
            $DBFile = $DBFile.Replace($DBFile.Substring(0,$DBFile.LastIndexOf("\")+ 1), $DBFilePath)
            $MoveCommand = $MoveCommand + " MOVE ''$LogicalName'' TO ''$DBFile'',"
        }
                                
        If ($item.Type -eq "L") {
            $LogicalName = $item.LogicalName
            $DBFile = $item.PhysicalName
            $DBFile = $DBFile.Replace($DBFile.Substring(0,$DBFile.LastIndexOf("\")+ 1), $DBLogsPath)
            $MoveCommand = $MoveCommand + " MOVE ''$LogicalName'' TO ''$DBFile'',"
        }
                                                        
        If ($item.Type -eq "F") {
            $LogicalName = $item.LogicalName
            $DBFile = $item.PhysicalName
            $DBFile = $DBFile.Replace($DBFile.Substring(0,$DBFile.LastIndexOf("\")+ 1), $DBFTSPath)
            $MoveCommand = $MoveCommand + " MOVE ''$LogicalName'' TO ''$DBFile'',"
        }
    }    
    
    Return $MoveCommand
}

# Executes the RedGate sqlbackup store procedure to start the restore of a database while at the same time moving the
# database files to their appropriate locations.
function RedgateRestore ([STRING]$Database,[STRING]$BackupFile,[STRING]$FilePath)
{
    $SQLConnection.Open()
    $SQLQuery = "EXECUTE master..sqlbackup '-SQL " + '"' + "RESTORE DATABASE [$Database] FROM DISK = ''" + $BackupFile + "''" + " WITH RECOVERY," + $FilePath + " REPLACE, ORPHAN_CHECK" + '"' + "'"
    $SQLCommand = New-Object system.Data.SqlClient.SqlCommand ($SQLQuery, $SQLConnection)
    $SQLCommand.ExecuteNonQuery() | Out-Null
    $SQLConnection.Close()
}

# When a RedGate SQLBackup restore completes, the database that's restore is owned by the service account running the 
# SQLBackup service. I use the function below to change the database owner to SA.
function ChangeDBOwner ([STRING]$Database,[STRING]$Owner)
{
    $Owner = $Owner -Replace("'","")
    $SQLConnection.Open()
    $SQLQuery     = "EXEC [$Database].dbo.sp_changedbowner @loginame = '$Owner', @map = false"
    $SQLCommand = New-Object system.Data.SqlClient.SqlCommand ($SQLQuery, $SQLConnection)
    $SQLCommand.ExecuteNonQuery() | Out-Null
    $SQLConnection.Close()
}

# Variables
#
# Directory on target SQL instance were you want the MDF files to land at.
$DBFilePath = "H:\Databases\"
# Directory on target SQL instance were you want the MDF files to land at.
$DBLogsPath = "I:\Tranlogs\"
# Directory on target SQL instance were you want the Full Text files to land at.
$DBFTSPath = "D:\Program Files\Microsoft SQL Server\MSSQL10_50.MSSQLSERVER\MSSQL\FTData\"
# This one is useful if you are going to filter out files by date
$filedate = Get-Date -Format M-d-yyyy_hh-mm-ss
# Directory where backup files are located.
$Directory = '\\dwcifs01\sqlbackups01\trace'

# Here I'm setting a variable which will do a recursive search of the files I want to restore, 
# in this particular case I'm grabbing the most recent full backup file and filtering out system databases
# and some specific databases. Of course, you would adjust this to your particular needs.

$files = Get-ChildItem $Directory -filter "*full*.sqb" -rec | Where-Object `
    {($_.Directory -notlike $Directory+"\master\FULL") `
    -and ($_.Directory -notlike $Directory+"\msdb\FULL") `
    -and ($_.Directory -notlike $Directory+"\model\FULL") `
    -and ($_.Directory -notlike $Directory+"\portal1\FULL") `
    -and ($_.Directory -notlike $Directory+"\FTIConversionEngine\FULL") `
    -and ($_.Directory -notlike $Directory+"\rpfdb\FULL") `
    -and ($_.Directory -notlike $Directory+"\rs_tempdb\FULL") }

$files = $files | group directory | foreach {@($_.group | sort {[datetime]$_.lastwritetime} -desc)[0]} | sort {$_.name}

# Start looping through each backup file and restoring the databases
foreach ($filename in $files) {

    #Set the filename variable to the fullpath/name of the backup file
    $filename = $filename.FullName
    
    # Get Database Name
    $dbName = RedgateGetDatabaseName -backupfile $filename
    $dbName = $dbName[1..$dbName.Length]
    $dbName = $dbName[0].'SQL Backup v6.5.1.9'.Replace("Database name       : ", "")
    
    # Get Database Files
    $dbFiles = RedgateGetDatabaseFiles -backupfile $filename
    $dbFiles = $dbFiles[1..$dbFiles.Length]
    
    # Update Database Files with new destination path
    $Filepaths = NewDBFilePaths -SourceFiles $dbFiles
    
    # Execute SQLBAckup restore
    RedgateRestore -Database $dbName -BackupFile $filename -FilePath $Filepaths
    
    # Change restored database dbowner to SA
    ChangeDBOwner -Database $dbName -Owner sa
}

Saturday, August 28, 2010

Multiple SQL Restores with Powershell, Part 1

Sometime ago I was presented with a task. We needed to upgrade an application which at the time had a SQL backend with 240 databases. In order to accomplish this, I, as the accidental DBA, needed to backup and restore all these to a development SQL server. The challenges were the following:

  1. The Production and Dev SQL servers were not identical hardware wise. The drive letters for the SQL data files and log files were different therefore the restore had to move the files to the appropriate volumes.
  2. The databases themselves varied in the sense that half of them had full text catalogs while the other half didn't. The file move had to account for this.
  3. At the time we were backing up SQL with Comvault and they had, to the best of my knowledge, no way to script restores via T-SQL. They had their own scripting utilities for which I had no time to delve into. Even our Comvault admin was not sure it could be done.
  4. We needed to be able to refresh the databases on dev on short notice, so the solution had to be something we could easily execute.
Because of these factors. I decided to use Ola Hallengren's scripts to create copy-only full backups in native SQL format and then use Powershell to handle the restore. Why Powershell? It was mostly a personal choice. Microsoft is really pushing powershell everywhere so I've taken it upon myself to use it as much as I can. Besides, T-SQL is alien to me and I wasn't looking forward to trying to base my solution on it.

I did a lot of research and boy, at times it was really frustrating. I even posted on Technet as I hit a wall and couldn't overcome it. Something was messing up my restore and I could not get it to run consistently. It was the darn full text catalog file, which at the time, I was unaware it existed. The final folder location on disk where these full text catalogs were located are randomly generated when the database is created, making things even more complicated from a restore perspective.

In the end, with the help of a Microsoft tech, I was able to overcome the problem. The resulting script (which is also on the Technet link and marked as a solution) is below. This was my first truly serious Powershell script. It is ugly, but it did the job. I was able to use it multiple times during the upgrade of the application and it was truly a life saver. Eventually things got even more interesting and this script became inadecuate for the next upgrade of the application. But I'm posting it for posterity.

#==============================================
# Automated SQL Restore of multiple Databases
#==============================================

#clear screen
cls

# load assemblies
[Reflection.Assembly]::Load("Microsoft.SqlServer.Smo, Version=9.0.242.0, Culture=neutral, PublicKeyToken=89845dcd8080cc91") | Out-Null
[Reflection.Assembly]::Load("Microsoft.SqlServer.SqlEnum, Version=9.0.242.0, Culture=neutral, PublicKeyToken=89845dcd8080cc91") | Out-Null
[Reflection.Assembly]::Load("Microsoft.SqlServer.SmoEnum, Version=9.0.242.0, Culture=neutral, PublicKeyToken=89845dcd8080cc91") | Out-Null
[Reflection.Assembly]::Load("Microsoft.SqlServer.ConnectionInfo, Version=9.0.242.0, Culture=neutral, PublicKeyToken=89845dcd8080cc91") | Out-Null
     
# Some general parameters for restoring into the destination SQL instance

$instance = "SQLServer"
$mdfFilePath = "Path-To-DB-Files"
$ldfFilePath = "Path-To-Tranlog_Files"
$ftsFilePath = "Path-To-FullTextSearch-Files"

# This will recursively look for the appropriate .bak files on the cifs share
# where the backups are located.

$filelist = Get-ChildItem \\Path-to-SQL-backups -Recurse | Where-Object {$_.extension -eq ".bak"}

# Start Do While Loop on the $filelist array. Loop will go over each backup file name on the array until the count reaches the value on
# the $filelist.SyncRoot.Count object.

$a = 0

Do {
    # Select database backup filename from the $filelist array and store it as a string
    $restorefile = $filelist.SyncRoot.Get($a) | %{$_.FullName}
    # Print variable for testing
    $restorefile
    
    # Set parameters for restore
    
    # We will query the database name from the backup header later
    $server = New-Object ("Microsoft.SqlServer.Management.Smo.Server") $instance
    $backupDevice = New-Object("Microsoft.SqlServer.Management.Smo.BackupDeviceItem") ($restorefile, "File")
    $smoRestore = new-object("Microsoft.SqlServer.Management.Smo.Restore")
    
    #restore settings
    $smoRestore.NoRecovery = $false;
    $smoRestore.ReplaceDatabase = $true;
    $smoRestore.Action = "Database"
    $smoRestore.PercentCompleteNotification = 10;
    $smoRestore.Devices.Add($backupDevice)
    
    #get database name from backup file
    $smoRestoreDetails = $smoRestore.ReadBackupHeader($server)
    
    #Get Database info prior to restore
    "Database Name from Backup Header : " +$smoRestoreDetails.Rows[0]["DatabaseName"]

    # Get Database Logical File Names
    $sourceLogicalNameDT = $smoRestore.ReadFileList($server)

    $FileType = ""
    foreach($Row in $sourceLogicalNameDT) {
        # Put the file type into a local variable.
        # This will be the variable that we use to find out which file
        # we are working with.
        $FileType = $Row["Type"].ToUpper()
    
        # If Type = "D", then we are handling the Database File name.
        If ($FileType.Equals("D")) {
            $sourceDBLogicalName = $Row["LogicalName"]
        }
        # If Type = "L", then we are handling the Log File name.
        elseif ($FileType.Equals("L")) {
            $sourceLogLogicalName = $Row["LogicalName"]        
        }
        # If Type = "F", then we are handling the Full Text Search File name.
        elseif ($FileType.Equals("F")) {
            $sourceFTSLogicalName = $Row["LogicalName"]
   # I also want to grab the full path of the Full Text catalog store in the backup file.
   # I'll need this so I can change the destination path.
            $sourceFTSPhysicalName = $Row["PhysicalName"]
        }
    }

    # Output Values of Database Logical File Names

    "DB Logical Name: " + $sourceDBLogicalName
    "Log Logical Name: " + $sourceLogLogicalName
    "Full Text Catalog Logical Name: " + $sourceFTSLogicalName

    #give a new database name
    $smoRestore.Database =$smoRestoreDetails.Rows[0]["DatabaseName"]

    #specify new data and log files (mdf and ldf)
    $smoRestoreDBFile = New-Object("Microsoft.SqlServer.Management.Smo.RelocateFile")
    $smoRestoreLogFile = New-Object("Microsoft.SqlServer.Management.Smo.RelocateFile")
        
    #the logical file names should be the logical filename stored in the backup media
    #$smoRestoreFile.LogicalFileName = $datafilename
    
    $smoRestoreDBFile.LogicalFileName = $sourceDBLogicalName
    $smoRestoreDBFile.PhysicalFileName = $mdfFilePath + "\" + $sourceDBLogicalName + ".mdf"
    $smoRestoreLogFile.LogicalFileName = $sourceLogLogicalName
    $smoRestoreLogFile.PhysicalFileName = $ldfFilePath + "\" + $sourceLogLogicalName + ".ldf"
    
    $smoRestore.RelocateFiles.Add($smoRestoreDBFile)
    $smoRestore.RelocateFiles.Add($smoRestoreLogFile)

    # Check to see if the $SourceFTSLogicalName is empty or not. If its not empty
 # then we do have a full text catalog present and thus we add the appropriate
 # entries to restore those files. If the variable is empty we just continue with
 # the restore.
 
    [String]::IsNullOrEmpty($sourceFTSLogicalName)

    if ([String]::IsNullOrEmpty($sourceFTSLogicalName)) {
    "We DO NOT have a full Text Catalog in our Backup"
    }
    else {
    "We DO have a Full Text Catalog in our Backup"
    # Adding full text catalog restore parameters.
    $smoRestoreFTSFile = New-Object("Microsoft.SqlServer.Management.Smo.RelocateFile")
    $smoRestoreFTSFile.LogicalFileName = $sourceFTSLogicalName
 
    # Here I specify the new location by truncating the first 45 characters in the path
 # that is specified on the backup file. This leaves me with just the name of the folder
 # where the full text catalog is, which I'm appending to the new path defined in
 # $ftsFilePath
 $smoRestoreFTSFile.PhysicalFileName = $ftsFilePath + "\" + $sourceFTSPhysicalName.Substring(45)
    $smoRestore.RelocateFiles.Add($smoRestoreFTSFile)
    }
    
    # Begin restoring database
    $smoRestore.SqlRestore($server)
    
    if ($error.Count -eq 0) {
    "Restore of Database " + "[" +$smoRestoreDetails.Rows[0]["DatabaseName"] + "]" + " is complete"
    }    
    else {
    "Restore of Database " + "[" +$smoRestoreDetails.Rows[0]["DatabaseName"] + "]" + " failed!!!"
    $Error[0].exception.message
    }


 # This tells the loop to keep incrementing the value stored on $a
    $a++
 
 # We now clear the variables before the next loop starts
 
    Remove-Variable sourceDBLogicalName
    Remove-Variable sourceLogLogicalName
    Remove-Variable smoRestoreDBFile
    Remove-Variable smoRestoreLogFile

 # If a full text catalog was present, we clear those variables too
 
    if ([String]::IsNullOrEmpty($sourceFTSLogicalName)) {
    "Continue to next restore "
    }
    else {
    "Continue to next restore"
    Remove-Variable sourceFTSLogicalName
    Remove-Variable smoRestoreFTSFile
    }

}

# It will keep incrementing the count value UNTIL it matches the value on the object $backups.SyncRoot.Count
Until ($a -eq $filelist.SyncRoot.Count)


#    $error[0]|format-list –force