Python--爬虫基础

六眼飞鱼酱① 提交于 2019-12-01 07:29:57

1、

# -*- coding: utf-8 -*-
"""
Created on Thu Apr 25 10:30:26 2019

@author: Office
"""
import urllib.request

#需要爬取的网站
url = "http://www.baidu.com/"

#response:向指定的url地址发起请求,并返回http响应的数据(文件的对象)
response = urllib.request.urlopen(url)

#读取内容 bytes类型
data = response.read()#读取文件的全部内容,会把读取到的数据赋值给一个字符串变量
#data = response.readline()#读取一行,若要全部打印出来,需要写一个循环
#data = response.readlines()#读取文件的全部内容,会把读取到的数据赋值给一个列表变量
#print(data)
#print(type(data))

#将文件获取的内容转换成字符串
str_data = data.decode("utf-8")
#print(str_data)
#print(type(str_data))

#将爬取到的网页写入文件
#第一种方法
with open("baidu.html","w",encoding="utf-8")as f:#以str类型的方式写入文件
f.write(str_data)

#第二种方法,urlretrieve在执行的过程中,会残留一些缓存,需要进行清除缓存
#urllib.request.urlretrieve(url,"baidu2.html")
#urllib.request.urlcleanup(url,"baidu2.html") #清除缓存

#response相关属性
#print(response.info())#返回当前环境的有关信息
#print(response.getcode())#返回状态码 只需记住200,304(客户端已经执行了get,但文件未变化,有缓存的意思),400(错误请求,如语法错误),500(服务器内部产生错误)
#print(response.geturl())#返回当前正在爬取的URl地址

 2、

 

# -*- coding: utf-8 -*-
"""
Created on Thu Apr 25 15:09:34 2019

@author: Office
"""
import urllib.request
url = "http://www.baidu.com/"
#模拟请求头
headers={
"User-Agent":"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36"
}

#设置一个请求体
req=urllib.request.Request(url,headers=headers)

#发起请求
response=urllib.request.urlopen(req)
data=response.read().decode('utf-8')
print(data)

 

 3、

# -*- coding: utf-8 -*-
"""
Created on Thu Apr 25 15:17:49 2019

@author: Office
"""

import urllib.request
import random
url = "http://www.baidu.com/"
#模拟请求头
agentlist=[
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 UBrowser/4.0.3214.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 SE 2.X MetaSr 1.0"
]
#随机选择一个请求头
agentStr=random.choice(agentlist)
headers={
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'User-Agent':agentStr,
'X-REQUESTED-With':'XMLHttpRequest',
'Content-Type':'application/x-www-form-urlencoded'
}

#设置一个请求体
req=urllib.request.Request(url,headers=headers)

#发起请求
response=urllib.request.urlopen(req)
data=response.read().decode('utf-8')
#print(data)
print(req.get_full_url()) #获取url地址
print(req.get_header('User-agent')) #获取User-agent,第一个单词首字母大写,后面的单词首字母小写

#第二种写法:
url = "http://www.baidu.com/"
headers={
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'X-REQUESTED-With':'XMLHttpRequest',
'Content-Type':'application/x-www-form-urlencoded'
}

user_angent_list=[
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 UBrowser/4.0.3214.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 SE 2.X MetaSr 1.0"
]

end_user_angent=random.choice(user_angent_list)

req=urllib.request.Request(url,headers=headers)
req.add_header('User-Agent',end_user_angent)

response=urllib.request.urlopen(req)
data=response.read().decode('utf-8')
print(data)

 4、

# -*- coding: utf-8 -*-
"""
Created on Thu Apr 25 16:10:42 2019

@author: Office
"""
import urllib.request
url = "http://www.baidu.com/"
#如果网页长时间未响应,系统判断超时,无法爬取
for i in range(1,100):
try:
response=urllib.request.urlopen(url,timeout=0.2)
print(len(response.read().decode('utf-8')))
except:
print("请求超时,继续下一个爬取")

 

 5、

# -*- coding: utf-8 -*-
"""
Created on Thu Apr 25 16:24:45 2019

@author: Office
"""
#http 使用场景:进行客户端与服务端之间的消息传递时使用
#GET:通过url网址传递信息,可以直接在url网址上添加要传递的信息
#POST:可以向服务器提交数据,是一种比较流行的比较安全的数据传递方式
#PUT:请求服务器存储一个资源,通常要指定存储的位置
#DELETE:请求服务器删除一个资源


'''
GET请求
特点:把数据拼接到请求路径的后面传递给服务器

优点:速度快

缺点:承载的数据量小,不安全

'''
import urllib.request
import urllib.parse
import string
import random

#单个值约束
#url='http://www.baidu.com/s?wd='
#
#wd='图片'
#wd=urllib.parse.quote(wd)
#end_url=url+wd
#
#headers={
# 'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
# 'X-REQUESTED-With':'XMLHttpRequest',
# 'Content-Type':'application/x-www-form-urlencoded'
# }
#
#user_angent_list=[
# "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36",
# "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36",
# "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER",
# "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 UBrowser/4.0.3214.0 Safari/537.36",
# "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 SE 2.X MetaSr 1.0"
# ]
#
#end_user_angent=random.choice(user_angent_list)
#
#req=urllib.request.Request(end_url,headers=headers)
#req.add_header('User-Agent',end_user_angent)
#
#response=urllib.request.urlopen(req)
#data=response.read().decode('utf-8')
#print(data)

 

#多个值约束
url='https://www.baidu.com/s?'
da_ta={
'wd':'风景',
'key':'zhang',
'value':'san'
}
final_da_ta=urllib.parse.urlencode(da_ta)

final_url=url+final_da_ta

end_url=urllib.parse.quote(final_url,safe=string.printable)
print(end_url)
headers={
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'X-REQUESTED-With':'XMLHttpRequest',
'Content-Type':'application/x-www-form-urlencoded'
}

user_angent_list=[
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 UBrowser/4.0.3214.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 SE 2.X MetaSr 1.0"
]

end_user_angent=random.choice(user_angent_list)
headers['User-Agent']=end_user_angent
req=urllib.request.Request(end_url,headers=headers)
response=urllib.request.urlopen(req)
data=response.read().decode('utf-8')
print(data)

 

 6、

# -*- coding: utf-8 -*-
"""
Created on Sun Apr 28 16:50:51 2019

@author: Office
"""
'''
POST 请求
特点:把参数进行打包,单独传输

优点:数量大,安全(当对服务器数据进行修改时建议使用post)

缺点:速度慢
'''

import urllib.parse
import urllib.request
url='http://fanyi.youdao.com/translate?smartresult=dict&smartresult=rule'

headers={
'Accept':'application/json, text/javascript, */*; q=0.01',
'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',
'Referer':'http://fanyi.youdao.com/?keyfrom=dict2.index',
'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36',
'X-Requested-With':'XMLHttpRequest'
}
#将要发送的数据合成一个字典
#字典的键去网址里找,一般为input标签的name属性的值

key=input("请输入你要翻译的内容:")
data={
'i' : key,
'from' : 'AUTO',
'to' : 'AUTO',
'smartresult' : 'dict',
'client' : 'fanyideskweb',
'salt': '15564473252080',
'sign': 'b6f44d14938df7391a28b66252a461aa',
'doctype' : 'json',
'version' : '2.1',
'keyfrom' : 'fanyi.web',
'action' : 'FY_BY_CLICKBUTTION'
}
#将要发送的数据进行打包,记住编码
da_ta=urllib.parse.urlencode(data).encode('utf-8')
#请求
end_data=urllib.request.urlopen(url,da_ta).read().decode('utf-8')
print(end_data)

 

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!