爬虫入门学习 贴吧小案例
Posted wen-kang
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了爬虫入门学习 贴吧小案例相关的知识,希望对你有一定的参考价值。
1 import urllib.request
2 import urllib.parse
3 import random
4
5 #目标地址
6 url="http://tieba.baidu.com/f"
7
8 #伪造客户端 http请求头
9 ua_list = [
10 "User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
11 "User-Agent: Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
12 "User-Agent: Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11",
13 "User-Agent: Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11",
14 "User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (Khtml, like Gecko) Chrome/17.0.963.56 Safari/535.11",
15 "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.109 Safari/537.36"
16 ]
17 #随机选择一个作为请求头
18 user_agent=random.choice(ua_list)
19 def doWrite(html,f_name):
20 """
21 :param html: 请求得到响应后收到的数据
22 :param f_name: 用于保存写操作的文件名
23 :return: E:Demopyswt
24 """
25 with open(f_name,"w",encoding=‘utf8‘)as f:
26 f.write(html)
27 print(">"*30)
28
29 def loadPage(f_url,f_name):
30 #对目标地址进行请求
31 request=urllib.request.Request(f_url)
32 #设置http请求头
33 request.add_header("User-Agent",user_agent)
34 #获取响应数据
35 response=urllib.request.urlopen(request)
36 html=response.read().decode("utf-8")
37 #下载并保存
38 print("准备写入数据....")
39 doWrite(html,f_name)
40
41 def doCode(url,kwd):
42 ‘‘‘
43 对搜索关键字进行编码
44 :return:
45 ‘‘‘
46 kw={"kw":kwd}
47 kw=urllib.parse.urlencode(kw)
48 #关键字拼接
49 full_url=url+"?"+kw
50 return full_url
51
52
53 def doUrl(url,star,end):
54 ‘‘‘
55 拼接url地址
56 ‘‘‘
57 for pages in range(star,end+1):
58 page=(pages-1)*50
59
60 f_url=url+"&pn="+str(page)
61 f_name="第"+str(pages)+"页"+".html"
62 print("即将加载第{0}页数据".format(pages))
63 loadPage(f_url,f_name)
64 print("下载完成,谢谢使用!")
65
66 if __name__ == ‘__main__‘:
67 tb_name=input("请输入要访问的贴吧名:
")
68 starPage=int(input("请输入起始页"))
69 endPage=int(input("请输入结束页"))
70
71 full_url=doCode(url,tb_name)
72 doUrl(full_url,starPage,endPage)
以上是关于爬虫入门学习 贴吧小案例的主要内容,如果未能解决你的问题,请参考以下文章