1. 程式人生 > 其它 >java爬蟲出現的sun.security.validator.ValidatorException: PKIX path building failed 解決辦法

java爬蟲出現的sun.security.validator.ValidatorException: PKIX path building failed 解決辦法

技術標籤:爬蟲javamysqljson

java爬蟲出現的sun.security.validator.ValidatorException: PKIX path building failed 解決辦法

出現問題原因

一般是沒啥問題的,不過如果你出現了話,原因是https證書問題,java抓取時忽略掉證書才能訪問。jsoup在呼叫前先執行下以下忽略證書請求就可以了。

控制檯出現的bug

在這裡插入圖片描述
抓取https網頁時,報錯sun.security.validator.ValidatorException: PKIX path building failed 解決辦法

try {
            //先呼叫下忽略https證書的再請求才可以
            HttpsUrlValidator.retrieveResponseFromServer(url); 
            doc = Jsoup
                    .connect(url)
                    .header("User-Agent",rand_agents)
                    .timeout(10000).get();
            body = doc.getElementsByTag
("body").html(); } catch (Exception e) { log.info(e.getMessage()); }

HttpsUrlValidator 類:

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;

import javax.net.ssl.
HostnameVerifier; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLSession; public class HttpsUrlValidator { static HostnameVerifier hv = new HostnameVerifier() { public boolean verify(String urlHostName, SSLSession session) { System.out.println("Warning: URL Host: " + urlHostName + " vs. " + session.getPeerHost()); return true; } }; public final static String retrieveResponseFromServer(final String url) { HttpURLConnection connection = null; try { URL validationUrl = new URL(url); trustAllHttpsCertificates(); HttpsURLConnection.setDefaultHostnameVerifier(hv); connection = (HttpURLConnection) validationUrl.openConnection(); final BufferedReader in = new BufferedReader(new InputStreamReader( connection.getInputStream())); String line; final StringBuffer stringBuffer = new StringBuffer(255); synchronized (stringBuffer) { while ((line = in.readLine()) != null) { stringBuffer.append(line); stringBuffer.append("\n"); } return stringBuffer.toString(); } } catch (final IOException e) { System.out.println(e.getMessage()); return null; } catch (final Exception e1){ System.out.println(e1.getMessage()); return null; }finally { if (connection != null) { connection.disconnect(); } } } public static void trustAllHttpsCertificates() throws Exception { javax.net.ssl.TrustManager[] trustAllCerts = new javax.net.ssl.TrustManager[1]; javax.net.ssl.TrustManager tm = new miTM(); trustAllCerts[0] = tm; javax.net.ssl.SSLContext sc = javax.net.ssl.SSLContext .getInstance("SSL"); sc.init(null, trustAllCerts, null); javax.net.ssl.HttpsURLConnection.setDefaultSSLSocketFactory(sc .getSocketFactory()); } static class miTM implements javax.net.ssl.TrustManager, javax.net.ssl.X509TrustManager { public java.security.cert.X509Certificate[] getAcceptedIssuers() { return null; } public boolean isServerTrusted( java.security.cert.X509Certificate[] certs) { return true; } public boolean isClientTrusted( java.security.cert.X509Certificate[] certs) { return true; } public void checkServerTrusted( java.security.cert.X509Certificate[] certs, String authType) throws java.security.cert.CertificateException { return; } public void checkClientTrusted( java.security.cert.X509Certificate[] certs, String authType) throws java.security.cert.CertificateException { return; } } }

大家測試一下。就可以正常爬取資料了。一般電腦不出這個問題。哈哈。。。。出現了的haul,你電腦很666了就。