想问一下怎么使用代理ip啊?网上看到有卖代理ip的,怎么用它来登录网页呢?刚开始学爬虫不大会用额,,
直接上代码,有报错的话可以自行调试一下,逻辑很简单不难的:
import requests
from selenium import webdriver
def conc_ip(url):
try:
page=requests.get('http://pandavip.xiongmaodaili.com/xiongmao-web/apiPlus/vgl?secret=306f6ea948242db0f9498b6e4551ca30&orderNo=VGL20220424120425lK79Ei4m&count=1&isTxt=1&proxyType=1&validTime=0&removal=0&cityIds=') %假设用的熊猫代理
data1 = page.text
ip = data1[0:-8]
port = data1[-7:-2]
pxstr = "http://"+ip+":"+port
print(pxstr)
requests.get(url, proxies={"http":pxstr})
except:
print('connect failed')
print("切换回自己的IP: " + requests.get(url,proxies={"http": ""}).text)
else:
print('success') #测试代理ip是否正常使用
new_data={"http":"http://"+ip+":"+port}
#print(new_data)
resp = requests.get(url = 'http://ip-api.com/json/%s' % (ip))
data = resp.json()
print(data)
options = webdriver.ChromeOptions()
#添加IP代理
options.add_argument("--proxy-server="+pxstr)
driver = webdriver.Chrome(chrome_options=options)
driver.get(url)
if __name__ == '__main__':
url = "https://" #url用你自己的
conc_ip(url)
proxies = {
"http": "http://代理ip地址:端口"
}
requests.get(url = _url,headers = _headers,proxies = _proxies)
proxies属性就是使用代理IP进行访问url
你网上搜爬虫代理ip一大推教程