def job():
global read_count, work, log,ip_list
read_count = ReadCount('data/教师账号.xlsx')
read_count.run()
log = Log('data/报错日志.xlsx')
get_ip = IP(
'http://http.tiqu.letecs.com/getip3?num=20&type=1&pro=&city=0&yys=0&port=11&time=1&ts=0&ys=0&cs=0&lb=1&sb=0&pb=4&mr=1®ions=&gm=4')
get_ip.run()
ip_list=get_ip.ip_list
work = Queue()
for i in read_count.count:
work.put_nowait(read_count.count[i])
tasks = []
for i in range(2):
task = gevent.spawn(crawler)
tasks.append(task)
gevent.joinall(tasks)
log.quit()
read_count.quit()
print('执行一次')
def crawler():
while not work.empty():
for i in range(3):
item = work.get(block=True,timeout=1)
print(work.qsize())
time.sleep(1)
work.get(block=True,timeout=1)是循环了三次,如果你的队列只有一个数据,第二次循环的时候,队列就空,所以报错了,改成这样试试:
def crawler():
while not work.empty():
for i in range(3):
if not work.empty():
item = work.get(block=True,timeout=1)
print(work.qsize())
time.sleep(1)