global.asax – with customized content
robots.txt
- to instruct search engine robots
Following
PowerShell script get the location of above files and website URL as parameters.
Then it finds the virtual directory path of the website from IIS settings of
the web application (default zone) to use as the destination. copy-item cmdlet copies files to
the destination.
[string]$scriptsLocation =
"C:\Projects\TestApp\ConfigFiles"
$webSiteUrl =
"http://derdev02:40920/"
$robotsFilePath = "$scriptsLocation\robots.txt"
$globalAsaxPath = "$scriptsLocation\global.asax"
function UpdateConfigFiles([string]$siteUrl)
{
write-host -f Yellow "Updating configuration files for
" $siteUrl
$SPSite = new-object Microsoft.SharePoint.SPSite($siteUrl)
$WebApp = $SPSite.WebApplication
#IisSettings in default zone
$IISSettings = $WebApp.IisSettings[[Microsoft.SharePoint.Administration.SPUrlZone]::Default]
$virtualDirectoryPath = $IISSettings.Path.FullName
write-host "Virtual Directory path: " $virtualDirectoryPath
$robotFileExists = test-path ($robotsFilePath)
if ($robotFileExists)
{
write-host "Copying file: " $robotsFilePath
" to the destination: " $virtualDirectoryPath"\robots.txt"
copy-item $robotsFilePath
-destination "$virtualDirectoryPath\robots.txt"
-Force
}
$globalAsaxFileExists = test-path ($globalAsaxPath)
if ($globalAsaxFileExists)
{
write-host "Copying file: " $globalAsaxPath
" to the destination: " $virtualDirectoryPath"\global.asax"
copy-item $globalAsaxPath
-destination "$virtualDirectoryPath\global.asax" -Force
}
}
UpdateConfigFiles($webSiteUrl)
Note:
If the files already exist in the destination, they will be overwritten even if
they are in read-only state, since –Force parameter is used with the copy-item cmdlet.
No comments:
Post a Comment