go-爬虫-百度贴吧(并发版)

Posted ygjzs

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了go-爬虫-百度贴吧(并发版)相关的知识,希望对你有一定的参考价值。

爬取百度贴吧的网页

非并发版

package main

import (
    "fmt"
    "io"
    "net/http"
    "os"
    "strconv"
)

func HttpGet(url string) (result string, err error) {
    //  var result string
    resp, err1 := http.Get(url)
    if err1 != nil {
        err = err1
        return
    }
    defer resp.Body.Close()

    buf := make([]byte, 4096)
    for {
        n, err2 := resp.Body.Read(buf)
        if n == 0 {
            fmt.Println("读取网页完成")
            break
        }
        if err2 != nil && err2 != io.EOF {
            err = err2
            return
        }
        result += string(buf[:n])
    }
    return
}

func working(start, end int) {
    fmt.Printf("正在爬取第%d到第%d页", start, end)
    //爬取每一个网页
    for i := start; i <= end; i++ {
        url := "https://tieba.baidu.com/f?kw=vue&ie=utf-8&pn=" + strconv.Itoa((i-1)*50)
        result, err := HttpGet(url)
        if err != nil {
            fmt.Println("httpGet err", err)
            continue
        }
        //      fmt.Println("result", result)
        f, err := os.Create("第" + strconv.Itoa(i) + "页" + ".html")
        if err != nil {
            fmt.Println("HttpGet err", err)
            continue
        }
        f.WriteString(result)
        f.Close()
    }
}

func main() {
    var start, end int
    fmt.Print("请输入起始页。。。")
    fmt.Scan(&start)
    fmt.Print("请输入终止页。。。")
    fmt.Scan(&end)

    working(start, end)
}

并发版

这个只是在上面的基础上加了管道和开了协程

package main

import (
    "fmt"
    "io"
    "net/http"
    "os"
    "strconv"
)

func HttpGet(url string) (result string, err error) {
    //  var result string
    resp, err1 := http.Get(url)
    if err1 != nil {
        err = err1
        return
    }
    defer resp.Body.Close()

    buf := make([]byte, 4096)
    for {
        n, err2 := resp.Body.Read(buf)
        if n == 0 {
            fmt.Println("读取网页完成
")
            break
        }
        if err2 != nil && err2 != io.EOF {
            err = err2
            return
        }
        result += string(buf[:n])
    }
    return
}

func SpiderPage(index int, page chan int) {
    fmt.Printf("正在爬取第%d到页
", index)
    //爬取每一个网页
    //  for i := start; i <= end; i++ {
    url := "https://tieba.baidu.com/f?kw=vue&ie=utf-8&pn=" + strconv.Itoa((index-1)*50)
    result, err := HttpGet(url)
    if err != nil {
        fmt.Println("httpGet err", err)
        return
    }
    //      fmt.Println("result", result)
    f, err := os.Create("第" + strconv.Itoa(index) + "页" + ".html")
    if err != nil {
        fmt.Println("HttpGet err", err)
        return
    }
    f.WriteString(result)
    f.Close()
    //  }

    page <- index
}

func working2(start, end int) {
    fmt.Printf("正在爬取第%d页到%d页
", start, end)

    page := make(chan int)

    for i := start; i <= end; i++ {
        go SpiderPage(i, page)
    }

    for i := start; i <= end; i++ {
        fmt.Printf("第%d个页面爬取完成
", <-page)
    }
}

func main() {
    var start, end int
    fmt.Print("请输入起始页。。。")
    fmt.Scan(&start)
    fmt.Print("请输入终止页。。。")
    fmt.Scan(&end)

    working2(start, end)
}

以上是关于go-爬虫-百度贴吧(并发版)的主要内容,如果未能解决你的问题,请参考以下文章

Go语言之进阶篇爬百度贴吧并发版

Go Web爬虫并发实现

Go语言实战Go语言并发爬虫

用go写爬虫服务并发请求,限制并发数

百度云好课分享[马哥高端Go语言百万并发高薪班/微服务/分布式高可用/Go高并发超推荐] 百度网盘分享(会员免费)

golang代码片段(摘抄)