Java网络爬虫入门

发布于:2024-06-22 ⋅ 阅读:(134) ⋅ 点赞:(0)

1、导入依赖

<dependencies>
    <!-- HttpClient -->
    <dependency>
        <groupId>org.apache.httpcomponents</groupId>
        <artifactId>httpclient</artifactId>
        <version>4.5.3</version>
    </dependency>

    <!-- 日志 -->
    <dependency>
        <groupId>org.slf4j</groupId>
        <artifactId>slf4j-log4j12</artifactId>
        <version>1.7.25</version>
    </dependency>
</dependencies>

2、CrawlerFirst

package com.atguigu.crawler.test;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import java.io.IOException;
public class CrawlerFirst {
    public static void main(String[] args) throws IOException {
        //1、打开浏览器,创建HttpClient对象
        CloseableHttpClient httpClient = HttpClients.createDefault();
        //2、输入网址,发起get请求,获取响应
        HttpGet httpGet = new HttpGet("https://blog.csdn.net/m0_65152767?spm=1010.2135.3001.5343");
        //3、使用 HttpClient 发起请求,获取响应
        CloseableHttpResponse response = httpClient.execute(httpGet);
        //4、解析响应,获取数据
        if (response.getStatusLine().getStatusCode() == 200) {
            HttpEntity httpEntity = response.getEntity();
            String content = EntityUtils.toString(httpEntity, "UTF-8");
            System.out.println(content);
        }
    }
}

在这里插入图片描述