The function of the site map is to allow search engines to include more of the various web pages of the website as quickly as possible.
Here we must first understand a basic principle, the crawling method of search engines. The entire Internet is like a criss-crossing "net": each node of the network is each web page, and each web page is connected to each other through URLs. A spider can start from one web page and crawl to another web page through the URL on the web page; then crawl to more web pages through the URL on another web page... and so on. But if it is a newly released website, there may be no other URL pointing to it, so it will never be "crawled" (included). In order to solve this problem, the new website can actively submit the URL to the search engine and apply for spiders to crawl it (Google application URL:), but generally only the URL of a homepage will be submitted when applying.
In order for all URLs (especially dynamically generated ones) to be quickly and easily retrieved by spiders, we need to provide a comprehensive, complete, clear structure, and timely updated site map.
And to deal with the robots.txt file of duplicate content, we use the .ashx file to generate a sitemap in xml format based on sitemaps.org. After the site map is generated, we can submit it to search engines such as Google. A large number of articles confirm that submitting a site map will greatly improve the speed and depth of the website's inclusion. Almost all other SEO methods may be difficult to prove, ineffective or even cause side effects, with the exception of submitting a site map!
Linq to XML brings us an almost perfect operating experience.
<%@ WebHandler Language="C#" Class="website" %>
using System;
using System.Web;
using System.Xml;
using System.Xml.Linq;
using System.Linq;
public class website : IHttpHandler {
public void ProcessRequest (HttpContext context) {
context.Response.ContentType = "text/xml";
//Declaration information of the file. The value yes of the third parameter standalone indicates that this XML document is self-contained and does not rely on an externally defined DTD.
XDeclaration declaration = new XDeclaration("1.0", "UTF-8", "yes");
context.Response.Write(declaration);
//Namespace of XML file
XNamespace ns = " http://www.google.com/schemas/sitemap/0.84 ";
XElement siteMap = new XElement(ns + "urlset");
string fixedUrl = " http://www.freeflying.com/article ";
string wholeUrl = string.Empty;
//Loop out the data and convert it into XML nodes
foreach (var item in Articles.GetArticles())
{
XElement url = new XElement("url");
wholeUrl = string.Format("{0}?id={1}&catelog={2}",fixedUrl,item.ID,item.Catelog);
XElement loc = new XElement("loc", wholeUrl);
XElement lastmod = new XElement("lastmod", item.LastMod.AddDays(-23).ToShortDateString());
XElement changefreq = new XElement("changefreq", item.Frequency);
XElement priority = new XElement("priority", item.Weight);
url.Add(loc, lastmod, changefreq, priority);
siteMap.Add(url);
}
//Finally output the entire xml file
context.Response.Write(siteMap);
}
public bool IsReusable {
get {
return false;
}
}
}
XML technology will also be used in RSS
<%@ WebHandler Language="C#" Class="rss" %>
using System;
using System.Web;
using System.Xml;
using System.Xml.Linq;
public class rss : IHttpHandler {
public void ProcessRequest (HttpContext context) {
context.Response.ContentType = "text/xml";
context.Response.Write("<?xml version="1.0" encoding="UTF-8" ?>");
XElement rssFeed = new XElement("rss", new XAttribute("version","2.0"));
string fixedUrl = " http://www.freeflying.com/article ";
string wholeUrl = string.Empty;
XElement channel = new XElement("channel",
new XElement("title", "freeflying"),
new XElement("link", fixedUrl),
new XElement("description","the website for dream flying freely"),
new XElement("pubDate",DateTime.Now.ToString())
);
foreach (var article in Articles.GetArticles())
{
XElement item = new XElement("item");
XElement title = new XElement("title", article.Title);
wholeUrl = string.Format("{0}?id={1}&catelog={2}", fixedUrl, article.ID, article.Catelog);
XElement link = new XElement("link", wholeUrl);
XElement description = new XElement("description", article.Description);
XElement pubDate = new XElement("pubDate", article.LastMod.ToString());
item.Add(title,link,description,pubDate);
channel.Add(item);
}
rssFeed.Add(channel);
context.Response.Write(rssFeed);
}
public bool IsReusable {
get {
return false;
}
}
}
simulated data
using System;
using System.Data;
using System.Configuration;
using System.Linq;
using System.Web;
using System.Web.Security;
using System.Web.UI;
using System.Web.UI.HtmlControls;
using System.Web.UI.WebControls;
using System.Web.UI.WebControls.WebParts;
using System.Xml.Linq;
using System.Web.UI.MobileControls;
using System.Collections.Generic;
/// <summary>
/// Summary description for Articles
/// </summary>
public classArticles
{
publicArticles()
{
//
// TODO: Add constructor logic here
//
}
public static List<Article> GetArticles()
{
return new List<Article>(){
new Article(234, "blog", DateTime.Now.AddDays(-23), Freq.none, 0.8, "asp.net seo", "articles about SEO in asp.net"),
new Article(267, "blog", DateTime.Now.AddDays(-245), Freq.daily, 0.6, "ado.net pro","about the dataset usage"),
new Article(653, "news", DateTime.Now.AddDays(-45), Freq.daily, 1,"CLR via C#","notebook about this book")
};
}
}
public class Article
{
public int ID;
public string Catelog;
public DateTime LastMod;
public double Weight;
public Freq Frequency;
public string Title;
public string Description;
public Article(int id, string catelog, DateTime lastMod, Freq frequency, double weight, string title, string description)
{
ID = id;
Catelog = catelog;
LastMod = lastMod;
Weight = weight;
Frequency = frequency;
Title = title;
Description = description;
}
}
public enum Freq
{
none = 1,
daily = 2,
weekly = 3,
}
Author: Free Fly Original Link