1. 程式人生 > 其它 >HttpClient+Jsoup爬取頁面資料

HttpClient+Jsoup爬取頁面資料

為什麼不使用爬蟲框架?

  原本使用的WebMagic框架,但是報了協議版本不一致異常,百度很多方法沒解決掉,而且也是自己寫著玩,就換了方式;

javax.net.ssl.SSLException: Received fatal alert: protocol_version

jar包依賴:

        <!--解析網頁資料-->
        <dependency>
            <groupId>org.jsoup</groupId>
            <artifactId>jsoup</artifactId>
            <version>1.10.2</version>
        </dependency>

        <dependency>
            <groupId>org.apache.httpcomponents</groupId>
            <artifactId>httpclient</artifactId>
            <version>4.5.2</version>
        </dependency>

程式碼:

import com.maxinhai.world.utils.JdbcUtils;
import org.apache.http.HttpEntity;
import org.apache.http.StatusLine;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.config.RegistryBuilder;
import org.apache.http.conn.socket.ConnectionSocketFactory; import org.apache.http.conn.socket.PlainConnectionSocketFactory; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import
org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.http.ssl.SSLContexts; import org.apache.http.util.EntityUtils; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; import javax.net.ssl.SSLContext; import java.io.IOException; import java.sql.*; import java.sql.Date; import java.util.*; /** * @program: world * @description: 雅拉伊圖片爬蟲 * @author: XinHai.Ma * @create: 2021-10-21 20:33 */ public class YaLaYiImageReptile { // 解決問題: javax.net.ssl.SSLException: Received fatal alert: protocol_version private static final PoolingHttpClientConnectionManager HTTP_CLIENT_CONNECTION_MANAGER; private static final CloseableHttpClient HTTP_CLIENT; static { SSLContext ctx = SSLContexts.createSystemDefault(); SSLConnectionSocketFactory fac = new SSLConnectionSocketFactory(ctx, new String[]{"SSLv2Hello", "TLSv1.2"}, null, SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER); HTTP_CLIENT_CONNECTION_MANAGER = new PoolingHttpClientConnectionManager(RegistryBuilder .<ConnectionSocketFactory> create().register("http", PlainConnectionSocketFactory.getSocketFactory()) .register("https", fac).build()); HTTP_CLIENT_CONNECTION_MANAGER.setDefaultMaxPerRoute(100); HTTP_CLIENT_CONNECTION_MANAGER.setMaxTotal(200); RequestConfig requestConfig = RequestConfig.custom().setConnectionRequestTimeout(60000).setConnectTimeout(60000) .setSocketTimeout(60000).build(); HTTP_CLIENT = HttpClientBuilder.create().setConnectionManager(HTTP_CLIENT_CONNECTION_MANAGER) .setDefaultRequestConfig(requestConfig).build(); } /** * 報異常 => javax.net.ssl.SSLException: Received fatal alert: protocol_version * @param args */ public static void main(String[] args) { // 要爬取的頁面 for(int i=1; i<=10; i++) { if(i != 1) { reptile("https://www.yalayi.com/gallery/index_" + i + ".html"); } else { reptile("https://www.yalayi.com/gallery/"); } } } private static void reptile(String url) { try { HttpGet httpGet = new HttpGet(url); CloseableHttpResponse httpResponse = HTTP_CLIENT.execute(httpGet); StatusLine statusLine = httpResponse.getStatusLine(); if(statusLine.getStatusCode() == 200) { HttpEntity httpEntity = httpResponse.getEntity(); String result = EntityUtils.toString(httpEntity, "utf-8"); //System.out.println(statusLine + "=>" + result); Document document = Jsoup.parse(result); Elements urlElements = document.select("body > div.main.bgf6 > div.gallery-list.list-box > div > ul > li > div.img-box > a"); Elements imageElements = document.select("body > div.main.bgf6 > div.gallery-list.list-box > div > ul > li > div.img-box > a > img"); Elements titleElements = document.select("body > div.main.bgf6 > div.gallery-list.list-box > div > ul > li > div.text-box > p > a"); Elements sizeElements = document.select("body > div.main.bgf6 > div.gallery-list.list-box > div > ul > li > div.img-box > em"); List<Map<String, String>> dataList = new ArrayList<>(); for (int i = 0; i < titleElements.size(); i++) { Element urlElement = urlElements.get(i); Element imageElement = imageElements.get(i); Element titleElement = titleElements.get(i); Element sizeElement = sizeElements.get(i); String href = urlElement.attr("href"); String src = imageElement.attr("src"); String title = titleElement.text(); String size = sizeElement.text(); System.out.println("標題:" + title + " 封面:" + src + " 畫質:" + size + " 詳情頁:" + href); Map<String, String> data = new HashMap<>(); data.put("pageUrl", href); data.put("imageUrl", src); data.put("title", title); data.put("size", size); dataList.add(data); } saveDate(dataList); } } catch (IOException e) { e.printStackTrace(); } } /** * 儲存爬取資料 * @param dataList */ private static void saveDate(List<Map<String, String>> dataList) { List<String> titleList = new ArrayList<>(); for (int i = 0; i < dataList.size(); i++) { Map<String, String> data = dataList.get(i); String title = data.get("title"); titleList.add(title); } // 查詢已經爬取的資料,做篩選處理 List<String> resultList = query(titleList); Iterator<Map<String, String>> iterator = dataList.iterator(); while (iterator.hasNext()) { Map<String, String> next = iterator.next(); if(resultList.contains(next.get("title"))) { iterator.remove(); } } // 獲取資料庫連線物件 Connection connection = JdbcUtils.getConnection(); // 插入資料 String insert_novel = "INSERT INTO `world`.`yalayi_image_reptile`(`is_active`, `is_delete`, `create_time`, `title`, `size`, `cover_url`, `page_url`) " + "VALUES (?, ?, ?, ?, ?, ?, ?)"; try { PreparedStatement preparedStatement = connection.prepareStatement(insert_novel); for (int i = 0; i < dataList.size(); i++) { Map<String, String> data = dataList.get(i); preparedStatement.setInt(1, 0); preparedStatement.setInt(2, 0); preparedStatement.setDate(3, new Date(System.currentTimeMillis())); preparedStatement.setString(4, data.get("title")); preparedStatement.setString(5, data.get("size")); preparedStatement.setString(6, data.get("imageUrl")); preparedStatement.setString(7, data.get("pageUrl")); preparedStatement.addBatch(); } int[] row = preparedStatement.executeBatch(); System.out.println("插入" + row.length + "行"); } catch (SQLException e) { e.printStackTrace(); } } /** * in範圍查詢 * @param titleList * @return */ private static List<String> query(List<String> titleList) { List<String> titles = new ArrayList<>(); String sql = "SELECT title FROM `yalayi_image_reptile` where is_delete=0 and title in (?)"; String stringFromList = getStringFromList(titleList); String formatSql = String.format(sql.replace("?", "%s"), stringFromList); try { Connection connection = JdbcUtils.getConnection(); PreparedStatement preparedStatement = connection.prepareStatement(formatSql); ResultSet resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { String title = resultSet.getString(1); titles.add(title); } } catch (SQLException e) { e.printStackTrace(); } return titles; } /** * 格式化引數 * @param paramList * @return */ private static String getStringFromList(List<String> paramList) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < paramList.size(); i++) { if(i == paramList.size()-1) { sb.append("'" + paramList.get(i) + "'"); } else { sb.append("'" + paramList.get(i) + "'"); sb.append(","); } } return sb.toString(); } }

Ps: 1. 上面程式碼中沒有對資料庫連線做關閉處理,是因為其他程式碼會用到這個連線物件,就沒有關閉;

  2. document.select()方法裡的css不會寫沒關係,在頁面中找到你要爬取的dom節點,右鍵檢查,就能看到html程式碼,選中要爬取的html標籤,右鍵複製為selector即可;