web , ( URL、 url 、 ),
Internet URL , 。 《java 》,
, GUI , jdk1.5。 ,
、 、 “ ”。
:
D:\java>javac SearchCrawler.java( )
D:\java>java SearchCrawler http://127.0.0.1:8080/zz3zcwbwebhome/index.jsp 20 java
Start searching...
result:
searchString=java
http://127.0.0.1:8080/zz3zcwbwebhome/index.jsp
http://127.0.0.1:8080/zz3zcwbwebhome/reply.jsp
http://127.0.0.1:8080/zz3zcwbwebhome/learn.jsp
http://127.0.0.1:8080/zz3zcwbwebhome/download.jsp
http://127.0.0.1:8080/zz3zcwbwebhome/article.jsp
http://127.0.0.1:8080/zz3zcwbwebhome/myexample/jlGUIOverview.htm
http://127.0.0.1:8080/zz3zcwbwebhome/myexample/Proxooldoc/index.html
http://127.0.0.1:8080/zz3zcwbwebhome/view.jsp?id=301
http://127.0.0.1:8080/zz3zcwbwebhome/view.jsp?id=297
http://127.0.0.1:8080/zz3zcwbwebhome/view.jsp?id=291
http://127.0.0.1:8080/zz3zcwbwebhome/view.jsp?id=286
http://127.0.0.1:8080/zz3zcwbwebhome/view.jsp?id=285
http://127.0.0.1:8080/zz3zcwbwebhome/view.jsp?id=284
http://127.0.0.1:8080/zz3zcwbwebhome/view.jsp?id=276
http://127.0.0.1:8080/zz3zcwbwebhome/view.jsp?id=272
:
D:\java>java SearchCrawler http://www.sina.com 20 java
Start searching...
result:
searchString=java
http://sina.com
http://redirect.sina.com/WWW/sinaCN/www.sina.com.cn class=a2
http://redirect.sina.com/WWW/sinaCN/www.sina.com.cn class=a8
http://redirect.sina.com/WWW/sinaHK/www.sina.com.hk class=a2
http://redirect.sina.com/WWW/sinaTW/www.sina.com.tw class=a8
http://redirect.sina.com/WWW/sinaUS/home.sina.com class=a8
http://redirect.sina.com/WWW/smsCN/sms.sina.com.cn/ class=a2
http://redirect.sina.com/WWW/smsCN/sms.sina.com.cn/ class=a3
http://redirect.sina.com/WWW/sinaNet/www.sina.net/ class=a3
D:\java>
import java.util.*;
import java.net.*;
import java.io.*;
import java.util.regex.*;
// Web
public class SearchCrawler implements Runnable{
/* disallowListCache robot URL。 Robot Web robots.txt ,
* 。 , robots.txt :
# robots.txt for http://somehost.com/
User-agent: *
Disallow: /cgi-bin/
Disallow: /registration # /Disallow robots on registration page
Disallow: /login
*/
private HashMap< String,ArrayList< String>> disallowListCache = new HashMap< String,ArrayList< String>>();
ArrayList< String> errorList= new ArrayList< String>();//
ArrayList< String> result=new ArrayList< String>(); //
String startUrl;//
int maxUrl;// url
String searchString;// ( )
boolean caseSensitive=false;//
boolean limitHost=false;//
public SearchCrawler(String startUrl,int maxUrl,String searchString){
this.startUrl=startUrl;
this.maxUrl=maxUrl;
this.searchString=searchString;
}
public ArrayList< String> getResult(){
return result;
}
public void run(){//
crawl(startUrl,maxUrl, searchString,limitHost,caseSensitive);
}
// URL
private URL verifyUrl(String url) {
// HTTP URLs.
if (!url.toLowerCase().startsWith("http://"))
return null;
URL verifiedUrl = null;
try {
verifiedUrl = new URL(url);
} catch (Exception e) {
return null;
}
return verifiedUrl;
}
// robot URL.
private boolean isRobotAllowed(URL urlToCheck) {
String host = urlToCheck.getHost().toLowerCase();// RUL
//System.out.println(" ="+host);
// URL
ArrayList< String> disallowList =disallowListCache.get(host);
// , 。
if (disallowList == null) {
disallowList = new ArrayList< String>();
try {
URL robotsFileUrl =new URL("http://" + host + "/robots.txt");
BufferedReader reader =new BufferedReader(new InputStreamReader(robotsFileUrl.openStream()));
// robot , 。
String line;
while ((line = reader.readLine()) != null) {
if (line.indexOf("Disallow:") == 0) {// "Disallow:"
String disallowPath =line.substring("Disallow:".length());//
// 。
int commentIndex = disallowPath.indexOf("#");
if (commentIndex != - 1) {
disallowPath =disallowPath.substring(0, commentIndex);//
}
disallowPath = disallowPath.trim();
disallowList.add(disallowPath);
}
}
// 。
disallowListCache.put(host, disallowList);
} catch (Exception e) {
return true; //web robots.txt ,
}
}
String file = urlToCheck.getFile();
//System.out.println(" getFile()="+file);
for (int i = 0; i < disallowList.size(); i++) {
String disallow = disallowList.get(i);
if (file.startsWith(disallow)) {
return false;
}
}
return true;
}
private String downloadPage(URL pageUrl) {
try {
// Open connection to URL for reading.
BufferedReader reader =
new BufferedReader(new InputStreamReader(pageUrl.openStream()));
// Read page into buffer.
String line;
StringBuffer pageBuffer = new StringBuffer();
while ((line = reader.readLine()) != null) {
pageBuffer.append(line);
}
return pageBuffer.toString();
} catch (Exception e) {
}
return null;
}
// URL "www"
private String removeWwwFromUrl(String url) {
int index = url.indexOf("://www.");
if (index != -1) {
return url.substring(0, index + 3) +
url.substring(index + 7);
}
return (url);
}
//
private ArrayList< String> retrieveLinks(URL pageUrl, String pageContents, HashSet crawledList,
boolean limitHost)
{
// 。
Pattern p =Pattern.compile("< a\\s+href\\s*=\\s*\"?(.*?)[\"|>]",Pattern.CASE_INSENSITIVE);
Matcher m = p.matcher(pageContents);
ArrayList< String> linkList = new ArrayList< String>();
while (m.find()) {
String link = m.group(1).trim();
if (link.length() < 1) {
continue;
}
// 。
if (link.charAt(0) == '#') {
continue;
}
if (link.indexOf("mailto:") != -1) {
continue;
}
if (link.toLowerCase().indexOf("javascript") != -1) {
continue;
}
if (link.indexOf("://") == -1){
if (link.charAt(0) == '/') {//
link = "http://" + pageUrl.getHost()+":"+pageUrl.getPort()+ link;
} else {
String file = pageUrl.getFile();
if (file.indexOf('/') == -1) {//
link = "http://" + pageUrl.getHost()+":"+pageUrl.getPort() + "/" + link;
} else {
String path =file.substring(0, file.lastIndexOf('/') + 1);
link = "http://" + pageUrl.getHost() +":"+pageUrl.getPort()+ path + link;
}
}
}
int index = link.indexOf('#');
if (index != -1) {
link = link.substring(0, index);
}
link = removeWwwFromUrl(link);
URL verifiedLink = verifyUrl(link);
if (verifiedLink == null) {
continue;
}
/* , URL*/
if (limitHost &&
!pageUrl.getHost().toLowerCase().equals(
verifiedLink.getHost().toLowerCase()))
{
continue;
}
// .
if (crawledList.contains(link)) {
continue;
}
linkList.add(link);
}
return (linkList);
}
// Web ,
private boolean searchStringMatches(String pageContents, String searchString, boolean caseSensitive){
String searchContents = pageContents;
if (!caseSensitive) {//
searchContents = pageContents.toLowerCase();
}
Pattern p = Pattern.compile("[\\s]+");
String[] terms = p.split(searchString);
for (int i = 0; i < terms.length; i++) {
if (caseSensitive) {
if (searchContents.indexOf(terms[i]) == -1) {
return false;
}
} else {
if (searchContents.indexOf(terms[i].toLowerCase()) == -1) {
return false;
}
} }
return true;
}
//
public ArrayList< String> crawl(String startUrl, int maxUrls, String searchString,boolean limithost,boolean caseSensitive )
{
System.out.println("searchString="+searchString);
HashSet< String> crawledList = new HashSet< String>();
LinkedHashSet< String> toCrawlList = new LinkedHashSet< String>();
if (maxUrls < 1) {
errorList.add("Invalid Max URLs value.");
System.out.println("Invalid Max URLs value.");
}
if (searchString.length() < 1) {
errorList.add("Missing Search String.");
System.out.println("Missing search String");
}
if (errorList.size() > 0) {
System.out.println("err!!!");
return errorList;
}
// URL www
startUrl = removeWwwFromUrl(startUrl);
toCrawlList.add(startUrl);
while (toCrawlList.size() > 0) {
if (maxUrls != -1) {
if (crawledList.size() == maxUrls) {
break;
}
}
// Get URL at bottom of the list.
String url = toCrawlList.iterator().next();
// Remove URL from the to crawl list.
toCrawlList.remove(url);
// Convert string url to URL object.
URL verifiedUrl = verifyUrl(url);
// Skip URL if robots are not allowed to access it.
if (!isRobotAllowed(verifiedUrl)) {
continue;
}
// URL crawledList
crawledList.add(url);
String pageContents = downloadPage(verifiedUrl);
if (pageContents != null && pageContents.length() > 0){
//
ArrayList< String> links =retrieveLinks(verifiedUrl, pageContents, crawledList,limitHost);
toCrawlList.addAll(links);
if (searchStringMatches(pageContents, searchString,caseSensitive))
{
result.add(url);
System.out.println(url);
}
}
}
return result;
}
//
public static void main(String[] args) {
if(args.length!=3){
System.out.println("Usage:java SearchCrawler startUrl maxUrl searchString");
return;
}
int max=Integer.parseInt(args[1]);
SearchCrawler crawler = new SearchCrawler(args[0],max,args[2]);
Thread search=new Thread(crawler);
System.out.println("Start searching...");
System.out.println("result:");
search.start();
}
}