我已經在IIS中安裝了Microsoft SEO工具包。 http://www.iis.net/download/seotoolkit如何安排IIS SEO工具包每天運行
我希望能夠安排它每天運行並生成報告。
有誰知道如何做到這一點?
我已經在IIS中安裝了Microsoft SEO工具包。 http://www.iis.net/download/seotoolkit如何安排IIS SEO工具包每天運行
我希望能夠安排它每天運行並生成報告。
有誰知道如何做到這一點?
您可以以不同的方式做到這一點:
1)使用PowerShell腳本: http://blogs.iis.net/carlosag/archive/2008/02/10/using-microsoft-web -administration-in-windows-powershell.aspx
PS C:\ > $iis = new-object Microsoft.Web.Administration.ServerManager
PS C:\> $ iis.Sites | foreach { $ 。應用程序|其中{$ .ApplicationPoolName -eq'DefaultAppPool'} | select-object Path,@ {Name =「AnonymousEnabled」;表達= { $ _。GetWebConfiguration()。GetSection( 「system.webServer /安全/認證/ anonymousAuthentication」)。GetAttributeValue( 「啓用」) }}}
2)您可以設置創建一個小像這樣的C#程序:
using System;使用System.IO的 ;使用System.Linq的 ;使用System.Net的 ;使用System.Threading的 ; using Microsoft.Web.Management.SEO.Crawler;
命名空間SEORunner { 類節目{
static void Main(string[] args) {
if (args.Length != 1) {
Console.WriteLine("Please specify the URL.");
return;
}
// Create a URI class
Uri startUrl = new Uri(args[0]);
// Run the analysis
CrawlerReport report = RunAnalysis(startUrl);
// Run a few queries...
LogSummary(report);
LogStatusCodeSummary(report);
LogBrokenLinks(report);
}
private static CrawlerReport RunAnalysis(Uri startUrl) {
CrawlerSettings settings = new CrawlerSettings(startUrl);
settings.ExternalLinkCriteria = ExternalLinkCriteria.SameFolderAndDeeper;
// Generate a unique name
settings.Name = startUrl.Host + " " + DateTime.Now.ToString("yy-MM-dd hh-mm-ss");
// Use the same directory as the default used by the UI
string path = Path.Combine(
Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments),
"IIS SEO Reports");
settings.DirectoryCache = Path.Combine(path, settings.Name);
// Create a new crawler and start running
WebCrawler crawler = new WebCrawler(settings);
crawler.Start();
Console.WriteLine("Processed - Remaining - Download Size");
while (crawler.IsRunning) {
Thread.Sleep(1000);
Console.WriteLine("{0,9:N0} - {1,9:N0} - {2,9:N2} MB",
crawler.Report.GetUrlCount(),
crawler.RemainingUrls,
crawler.BytesDownloaded/1048576.0f);
}
// Save the report
crawler.Report.Save(path);
Console.WriteLine("Crawling complete!!!");
return crawler.Report;
}
private static void LogSummary(CrawlerReport report) {
Console.WriteLine();
Console.WriteLine("----------------------------");
Console.WriteLine(" Overview");
Console.WriteLine("----------------------------");
Console.WriteLine("Start URL: {0}", report.Settings.StartUrl);
Console.WriteLine("Start Time: {0}", report.Settings.StartTime);
Console.WriteLine("End Time: {0}", report.Settings.EndTime);
Console.WriteLine("URLs: {0}", report.GetUrlCount());
Console.WriteLine("Links: {0}", report.Settings.LinkCount);
Console.WriteLine("Violations: {0}", report.Settings.ViolationCount);
}
private static void LogBrokenLinks(CrawlerReport report) {
Console.WriteLine();
Console.WriteLine("----------------------------");
Console.WriteLine(" Broken links");
Console.WriteLine("----------------------------");
foreach (var item in from url in report.GetUrls()
where url.StatusCode == HttpStatusCode.NotFound &&
!url.IsExternal
orderby url.Url.AbsoluteUri ascending
select url) {
Console.WriteLine(item.Url.AbsoluteUri);
}
}
private static void LogStatusCodeSummary(CrawlerReport report) {
Console.WriteLine();
Console.WriteLine("----------------------------");
Console.WriteLine(" Status Code summary");
Console.WriteLine("----------------------------");
foreach (var item in from url in report.GetUrls()
group url by url.StatusCode into g
orderby g.Key
select g) {
Console.WriteLine("{0,20} - {1,5:N0}", item.Key, item.Count());
}
}
}
}
,然後配置到使用Windows調度
我們都使用相同的工具包在http://www.seo-genie.com,可以爲運行thouse測試運行你在每週的寶寶,如果你可以克服這一點,或者只是使用我粘貼在+ windows sheduler上面的代碼,或者也許是使用Power Shell的方式...
我發佈了一篇關於如何構建使用引擎的命令行工具的博客文章。然後您可以安排它在Windows中使用任務計劃程序運行。