python 爬虫asyncio错误

我想要达到的结果
import asyncio
import json

import aiofiles  # 用于异步下载
import aiohttp  # 异步登录地址
import requests


async def getChapterContent(cid,book_id,title):
    data = {"book_id":cid,
            "cid":f"{cid}|{book_id}",
            "need_bookinfo":1
    }
    data = json.dumps(data)   #转化为列表形式
    url = f'https://dushu.baidu.com/api/pc/getChapterContent?data={data}'

    async with aiohttp.ClientSession() as session:
        async with session.get(url) as resp:
            dic = await resp.json()


            async with aiofiles.open('shuju/'+title , mode='w' ,encoding = 'utf-8') as f:
                await f.write(dic['data']['novel']['content'])



async def getCatalog(url):
    resp = requests.get(url)
    # print(resp.json())
    dic = resp.json()
    tasks = []


    for item in dic['data']['novel']['items']:
        title = item['title']
        cid = item['cid']

        tasks.append(getChapterContent(asyncio.create_task(cid),
                                       asyncio.create_task(book_id),
                                       asyncio.create_task(title)
                                      ))


    await asyncio.wait(tasks_list)


if __name__ == '__main__':
    book_id = '4306063500'
    url = 'https://dushu.baidu.com/api/pc/getCatalog?data={"book_id": "' +book_id+ '"}'
    asyncio.run(getCatalog(url))

为什么报错为TypeError: a coroutine was expected, got '1569782244'

await asyncio.wait(tasks_list) 后面改await asyncio.wait(tasks)
还是一样的不是这的问题

data = {"book_id":cid,
"cid":f"{cid}|{book_id}",
"need_bookinfo":1
这里写错了吧?
book_id和cid的对应关系

关于该问题,我找了一篇非常好的博客,你可以看看是否有帮助,链接:python 协程库 asyncio 使用
你还可以看下python参考手册中的 python-asyncio --- 异步 I/O