超粗末な爬虫プログラム

3357 ワード

テキストクラスタリングアルゴリズムをテストするために、ボスは一人一人にある話題について1000編のニュースを探させました.「爬虫類はいませんか??じゃ、自分で手でコピーしましょう.3日でコピーします!」
じゃあ、爬虫類を書きましょう.資料を調べて+コードを調べて午後にして、とても粗末で、新浪の軍事プレートに登るしかない文章を書いて、みんなの冗談を恐れないで、ははは~
package com.slimspider;

import java.io.BufferedWriter;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.List;

import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.util.EntityUtils;
import org.htmlparser.NodeFilter;
import org.htmlparser.Parser;
import org.htmlparser.filters.TagNameFilter;
import org.htmlparser.tags.LinkTag;
import org.htmlparser.util.NodeList;
import org.htmlparser.visitors.TextExtractingVisitor;

import com.slimspider.utils.StringURLQueue;

/**
 * 
 * Just for sina military section
 * 
 * @author LiangTE
 *
 */

public class Main {

	private static int num = 0;
	
	public static void crawler(String url) {
		HttpClient httpClient = new DefaultHttpClient();
		
		StringURLQueue queue = StringURLQueue.getInstance();
		
		List<String> deprecatedURLs = new ArrayList<String>();
		
		try {
			HttpGet httpget = new HttpGet(url);

			HttpResponse response = httpClient.execute(httpget);

			HttpEntity entity = response.getEntity();

			String body = EntityUtils.toString(entity, "gbk");
			
			/**
			 * 
			 *  
			 * 
			 */
			String totalContent = body.substring(body.indexOf("<!-- publish_helper"), body.indexOf("<!-- publish_helper_end -->"));

			String text = totalContent.substring(totalContent.indexOf("<p>"),
					totalContent.indexOf("<style type"));
			
			if(totalContent.indexOf("<div class=\"otherContent") != -1) {
				String links = totalContent.substring(totalContent.indexOf("<div class=\"otherContent"));

				Parser parser = new Parser(text);

				TextExtractingVisitor visitor = new TextExtractingVisitor();

				parser.visitAllNodesWith(visitor);

				String resu = visitor.getExtractedText();
				
				FileWriter fw = new FileWriter("D:/resu/m" + ++num + ".txt");
				
				BufferedWriter bw = new BufferedWriter(fw);
				
				bw.write(resu);
				
				bw.close();
				
				fw.close();
				
				System.out.println(resu);
				
				/**
				 * 
				 *  url
				 * 
				 */
				NodeFilter filter = new TagNameFilter("a");

				Parser parser2 = new Parser(links);

				NodeList nodeList = parser2.extractAllNodesThatMatch(filter);

				int len = nodeList.size();
				
				for (int i = 0; i < len; i++) {
					LinkTag tag = (LinkTag) nodeList.elementAt(i);
					String newUrl = tag.extractLink();
					if(!deprecatedURLs.contains(newUrl)) {
						if(newUrl.startsWith("http://mil.news.sina.com.cn")) {
							queue.enQueue(newUrl);
						}
					}
				}
				
				String targetUrl = queue.deQueue();
				
				deprecatedURLs.add(targetUrl);
				
				crawler(targetUrl);
			}
			
			crawler(queue.deQueue());
			
		} catch (Exception e) {
			e.printStackTrace();
		} finally {
			httpClient.getConnectionManager().shutdown();
		}
	}
	
	
	public static void main(String[] args) {
		
		crawler("http://mil.news.sina.com.cn/2012-10-23/0913704471.html");
		
	}

}