Wikipedia:Projekt DotNetWikiBot Framework/Lavallen2/Samlar info ur raderade versioner
Utseende
using System; using System.IO; using System.Text; using System.Text.RegularExpressions; using System.Collections; using System.Xml; using System.Threading; using DotNetWikiBot; class MyBot : Bot { public static void Main() { string[] wikipedior = {"da", "de", "en", "fi", "fr", "no", "nn"};//list of all wikipedia-projects that I am collecting deleted iwlinks to, it is <u>not</u> a list of approved projects string[] namnrymd = {"Diskussion:", "Användare:", "Användardiskussion:", "Wikipedia:", "Wikipediadiskussion:", "MediaWiki:", "MediaWikidiskussion:", "Mall:", "Malldiskussion:", "Kategoridiskussion:", "Portal:", "Portaldiskussion:"}; Site site = new Site("http://sv.wikipedia.org", username, password); string cap = site.GetPageHTM("http://sv.wikipedia.org/w/index.php?title=Special:Logg&limit=500&type=delete&month=&year=");//Getting logg-page int cursor = 0; while(cap.IndexOf("<li class=\"mw-logline-delete\">") != -1)//One line for every log action { cap = cap.Substring(cap.IndexOf("<li class=\"mw-logline-delete\">")+1); cursor = cap.IndexOf("<li class=\"mw-logline-delete\">", cursor)+1; string cap2 = cap.Substring(0,cap.IndexOf("title=\"Special:Återställ\">visa/återställ</a>")+7); cap2 = cap2.Substring(cap2.LastIndexOf("<a href=\"")+9); cap2 = cap2.Substring(0,cap2.LastIndexOf("\" title")); cap2 = "http://sv.wikipedia.org"+cap2; cap2 = cap2.Replace("&", "&"); string cap3 = site.GetPageHTM(cap2);//Get the restore-page for a single page cap3 = cap3.Substring(cap3.IndexOf("Härunder visas en lista över raderade versioner av")); string titel = cap3.Substring(0,cap3.IndexOf("</a>")); if(titel.IndexOf("[inte skriven än]\">") != -1)//Do not act if not the whole page is deleted { titel = titel.Substring(titel.IndexOf("[inte skriven än]\">")+19);//The title of the swedish page is identified bool rymd = true; foreach(string n in namnrymd) { if(titel.StartsWith(n)) rymd = false; } if(rymd) { cap3 = cap3.Substring(cap3.IndexOf("</h2>")); cap3 = cap3.Substring(0,cap3.IndexOf("</a> . . <a href")); cap3 = cap3.Substring(cap3.LastIndexOf("<a href=\"")+9); cap3 = cap3.Substring(0,cap3.IndexOf("\" title=")); cap3 = cap3.Replace("&", "&"); string länkar = File.ReadAllText("länkar.txt"); File.WriteAllText("junk0.txt", cap3); if(länkar.IndexOf(cap3) == -1)//Avoid handle the same pageversion twice { länkar = länkar+cap3+"\r\n"; File.WriteAllText("länkar.txt", länkar); cap3 = "http://sv.wikipedia.org"+cap3; string cap4 = site.GetPageHTM(cap3); cap4 = cap4.Substring(cap4.IndexOf("<textarea readonly=")); cap4 = cap4.Substring(cap4.IndexOf(">")+1); cap4 = cap4.Substring(0,cap4.IndexOf("</textarea>"));//The content of the deleted page foreach(string projekt in wikipedior)//One saved file for each project { string filnamn = "Link_"+projekt+".txt"; string link = File.ReadAllText(filnamn); string iw = "[["+projekt+":";//identify iw-links if(cap4.IndexOf(iw) != -1) { Console.WriteLine(projekt); iw=cap4.Substring(cap4.IndexOf(iw)); iw=iw.Substring(iw.IndexOf(":")+1); iw=iw.Substring(0,iw.IndexOf("]]")); if(iw.Trim() != "") { link += "|"+iw+"\r\n[[sv:"+titel+"]]\r\n"; File.WriteAllText(filnamn, link); } } } } } } } } }