在做网络爬虫的过程中经常会遇到请求次数过多无法访问的现象,这种情况下就可以使用代理IP来解决。但是网上的代理IP要么收费,要么没有API接口。秉着能省则省的原则,自己创建一个代理IP库。

废话不多说,直接上代码:

 import requests
from bs4 import BeautifulSoup # 发送请求
def GetInfo(url):
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36'
}
proxies = {"http": "https://119.180.173.81:8060"}
response = requests.get(url=url, proxies=proxies, headers=headers)
response.encoding = "utf8"
return response.text # 将数据写入文件
def WriteData():
for i in range(100):
url = "https://www.xicidaili.com/nn/" + str(i+1)
data = GetData(url)
file = open('Proxies.txt', 'a+')
file.write(str(data))
file.close() # 验证该代理能否使用
def verify(proxies):
req = requests.get("https://www.baidu.com", proxies=proxies)
return req.status_code # 解析页面
def GetData(url):
data = list()
html = GetInfo(url)
soup = BeautifulSoup(html, "lxml")
table = soup.find_all("table", id="ip_list")
soup = BeautifulSoup(str(table[0]), "lxml")
trs = soup.find_all("tr")
del trs[0]
for tr in trs:
ip = tr.select("td")[1].get_text()
port = tr.select("td")[2].get_text()
protocol = tr.select("td")[5].get_text()
address = protocol.lower()+"://"+ip+":"+port
proxies = {'http': address}
if verify(proxies) == 200:
data.append(address)
return data if __name__ == '__main__':
WriteData()

返回数据:

['http://111.222.141.127:8118', 'https://117.88.177.101:3000', 'http://183.166.136.144:8888', 'http://27.208.231.100:8060', 'http://123.169.99.177:9999', 'http://119.84.84.185:12345', 'http://101.132.190.101:80', 'https://114.99.54.65:8118', 'https://119.4.13.26:1133', 'http://58.253.158.177:9999', 'http://114.223.208.165:8118', 'http://112.84.73.53:9999']

源站地址:https://www.lizaza.cn/page23.html

05-18 12:35