为什么爬取CSDN热搜存MySQL为空?如何解决


import requests
import pymysql
from lxml import etree

conn = pymysql.connect(host='127.0.0.1', user='root', password='*****', port=3306, database='热搜爬取')

cursor = conn.cursor()


headers={
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36 Edg/92.0.902.67"
}
url = 'https://blog.csdn.net/rank/list'

reponse = requests.get(url, headers=headers)   # reponse
reponse.encoding = 'utf-8'
html = etree.HTML(reponse.text)

title = html.xpath('//div[@class="hosetitem-title"]/a/text()')
url1 =html.xpath('//div[@class="hosetitem-title"]/a/@href')


for n in range(len(title)):
    print(url1[n])
    sql = "insert into CSDN(排行,标题,网址) values('%d','%s','%s')"%(n+1,title[n],url1[n])
    #print(title[n])
    #print(url[n])
    cursor.execute(sql)
    conn.commit()

cursor.close()
# 关闭连接
conn.close()

没有报错? 还是爬取的结果为空? 或者数据库表存在?字段类型对应?首先看是爬取问题还是数据库存储问题