import asyncio
from scrapy.crawler import CrawlerProcess
from scrapy_webcrawler.spiders.spider import WebCrawlerSpider
class WebCrawlerConnector:
def start(self) -> int:
process = CrawlerProcess({
'USER_AGENT': 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)'
})
process.crawl(
WebCrawlerSpider,
urls=["https://quotes.toscrape.com/"],
crawl_depth=0,
max_links_per_page=2,
)
process.start()
async def main() -> None:
"""Start the connector."""
connector = WebCrawlerConnector()
await connector.start()
if __name__ == "__main__":
asyncio.run(main())
И у меня есть этот паук, который использует инструмент сканирования под названием Craw4ai, который очень хорош в извлечении контента, но это сканер одной страницы, поэтому я использую Scrapy:
ERROR:scrapy.core.scraper:Spider error processing (referer: None)
Traceback (most recent call last):
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/defer.py", line 346, in aiter_errback
yield await it.__anext__()
^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 394, in __anext__
return await self.data.__anext__()
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 375, in _async_chain
async for o in as_async_generator(it):
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/asyncgen.py", line 21, in as_async_generator
async for r in it:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 394, in __anext__
return await self.data.__anext__()
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 375, in _async_chain
async for o in as_async_generator(it):
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/asyncgen.py", line 21, in as_async_generator
async for r in it:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async
async for r in iterable:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/referer.py", line 384, in process_spider_output_async
async for r in result:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async
async for r in iterable:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/urllength.py", line 62, in process_spider_output_async
async for r in result:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async
async for r in iterable:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/depth.py", line 60, in process_spider_output_async
async for r in result:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async
async for r in iterable:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spiders/crawl.py", line 165, in _parse_response
cb_res = await cb_res
^^^^^^^^^^^^
File "/Users/luis.ferreira/Documents/test/scrapy_webcrawler/scrapy_webcrawler/spiders/spider.py", line 37, in parse_start_url
await self.process_url(response.url)
File "/Users/luis.ferreira/Documents/test/scrapy_webcrawler/scrapy_webcrawler/spiders/spider.py", line 46, in process_url
async with AsyncWebCrawler(verbose=False) as crawler:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_webcrawler.py", line 118, in __aenter__
await self.crawler_strategy.__aenter__()
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_crawler_strategy.py", line 290, in __aenter__
await self.start()
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_crawler_strategy.py", line 298, in start
self.playwright = await async_playwright().start()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/playwright/async_api/_context_manager.py", line 51, in start
return await self.__aenter__()
^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/playwright/async_api/_context_manager.py", line 40, in __aenter__
done, _ = await asyncio.wait(
^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/asyncio/tasks.py", line 418, in wait
return await _wait(fs, timeout, return_when, loop)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/asyncio/tasks.py", line 525, in _wait
await waiter
RuntimeError: await wasn't used with future
2024-12-12 17:50:07 [scrapy.core.scraper] ERROR: Spider error processing (referer: None)
Traceback (most recent call last):
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/defer.py", line 346, in aiter_errback
yield await it.__anext__()
^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 394, in __anext__
return await self.data.__anext__()
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 375, in _async_chain
async for o in as_async_generator(it):
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/asyncgen.py", line 21, in as_async_generator
async for r in it:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 394, in __anext__
return await self.data.__anext__()
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 375, in _async_chain
async for o in as_async_generator(it):
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/asyncgen.py", line 21, in as_async_generator
async for r in it:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async
async for r in iterable:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/referer.py", line 384, in process_spider_output_async
async for r in result:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async
async for r in iterable:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/urllength.py", line 62, in process_spider_output_async
async for r in result:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async
async for r in iterable:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/depth.py", line 60, in process_spider_output_async
async for r in result:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async
async for r in iterable:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spiders/crawl.py", line 165, in _parse_response
cb_res = await cb_res
^^^^^^^^^^^^
File "/Users/luis.ferreira/Documents/test/scrapy_webcrawler/scrapy_webcrawler/spiders/spider.py", line 37, in parse_start_url
await self.process_url(response.url)
File "/Users/luis.ferreira/Documents/test/scrapy_webcrawler/scrapy_webcrawler/spiders/spider.py", line 46, in process_url
async with AsyncWebCrawler(verbose=False) as crawler:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_webcrawler.py", line 118, in __aenter__
await self.crawler_strategy.__aenter__()
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_crawler_strategy.py", line 290, in __aenter__
await self.start()
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_crawler_strategy.py", line 298, in start
self.playwright = await async_playwright().start()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/playwright/async_api/_context_manager.py", line 51, in start
return await self.__aenter__()
^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/playwright/async_api/_context_manager.py", line 40, in __aenter__
done, _ = await asyncio.wait(
^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/asyncio/tasks.py", line 418, in wait
return await _wait(fs, timeout, return_when, loop)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/asyncio/tasks.py", line 525, in _wait
await waiter
RuntimeError: await wasn't used with future
INFO:scrapy.core.engine:Closing spider (finished)
2024-12-12 17:50:07 [scrapy.core.engine] INFO: Closing spider (finished)
Я знаю, что это связано с синхронным/асинхронным кодом, но я не очень знаком с Python. Может кто-нибудь помочь?
Я уже пытался удалить асинхронность из паука, но это тоже не помогло.
if __name__ == "__main__": asyncio.run(main()) [/code] И у меня есть этот паук, который использует инструмент сканирования под названием Craw4ai, который очень хорош в извлечении контента, но это сканер одной страницы, поэтому я использую Scrapy: [code]from typing import Optional
from crawl4ai import AsyncWebCrawler from scrapy.exceptions import CloseSpider from scrapy.http import Request from scrapy.linkextractors import LinkExtractor from scrapy.settings import BaseSettings from scrapy.spiders import CrawlSpider, Rule from scrapy.utils.project import get_project_settings
class WebCrawlerSpider(CrawlSpider): name = "webcrawler" allowed_domains = [] rules = ( Rule(LinkExtractor(), callback="parse_item", follow=True), )
[/code] Однако, когда я запускаю первый файл, результатом всегда является такая трассировка стека: [code]ERROR:scrapy.core.scraper:Spider error processing (referer: None) Traceback (most recent call last): File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/defer.py", line 346, in aiter_errback yield await it.__anext__() ^^^^^^^^^^^^^^^^^^^^ File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 394, in __anext__ return await self.data.__anext__() ^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 375, in _async_chain async for o in as_async_generator(it): File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/asyncgen.py", line 21, in as_async_generator async for r in it: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 394, in __anext__ return await self.data.__anext__() ^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 375, in _async_chain async for o in as_async_generator(it): File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/asyncgen.py", line 21, in as_async_generator async for r in it: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async async for r in iterable: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/referer.py", line 384, in process_spider_output_async async for r in result: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async async for r in iterable: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/urllength.py", line 62, in process_spider_output_async async for r in result: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async async for r in iterable: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/depth.py", line 60, in process_spider_output_async async for r in result: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async async for r in iterable: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spiders/crawl.py", line 165, in _parse_response cb_res = await cb_res ^^^^^^^^^^^^ File "/Users/luis.ferreira/Documents/test/scrapy_webcrawler/scrapy_webcrawler/spiders/spider.py", line 37, in parse_start_url await self.process_url(response.url) File "/Users/luis.ferreira/Documents/test/scrapy_webcrawler/scrapy_webcrawler/spiders/spider.py", line 46, in process_url async with AsyncWebCrawler(verbose=False) as crawler: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_webcrawler.py", line 118, in __aenter__ await self.crawler_strategy.__aenter__() File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_crawler_strategy.py", line 290, in __aenter__ await self.start() File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_crawler_strategy.py", line 298, in start self.playwright = await async_playwright().start() ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/playwright/async_api/_context_manager.py", line 51, in start return await self.__aenter__() ^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/playwright/async_api/_context_manager.py", line 40, in __aenter__ done, _ = await asyncio.wait( ^^^^^^^^^^^^^^^^^^^ File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/asyncio/tasks.py", line 418, in wait return await _wait(fs, timeout, return_when, loop) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/asyncio/tasks.py", line 525, in _wait await waiter RuntimeError: await wasn't used with future 2024-12-12 17:50:07 [scrapy.core.scraper] ERROR: Spider error processing (referer: None) Traceback (most recent call last): File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/defer.py", line 346, in aiter_errback yield await it.__anext__() ^^^^^^^^^^^^^^^^^^^^ File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 394, in __anext__ return await self.data.__anext__() ^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 375, in _async_chain async for o in as_async_generator(it): File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/asyncgen.py", line 21, in as_async_generator async for r in it: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 394, in __anext__ return await self.data.__anext__() ^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 375, in _async_chain async for o in as_async_generator(it): File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/asyncgen.py", line 21, in as_async_generator async for r in it: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async async for r in iterable: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/referer.py", line 384, in process_spider_output_async async for r in result: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async async for r in iterable: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/urllength.py", line 62, in process_spider_output_async async for r in result: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async async for r in iterable: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/depth.py", line 60, in process_spider_output_async async for r in result: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async async for r in iterable: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spiders/crawl.py", line 165, in _parse_response cb_res = await cb_res ^^^^^^^^^^^^ File "/Users/luis.ferreira/Documents/test/scrapy_webcrawler/scrapy_webcrawler/spiders/spider.py", line 37, in parse_start_url await self.process_url(response.url) File "/Users/luis.ferreira/Documents/test/scrapy_webcrawler/scrapy_webcrawler/spiders/spider.py", line 46, in process_url async with AsyncWebCrawler(verbose=False) as crawler: File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_webcrawler.py", line 118, in __aenter__ await self.crawler_strategy.__aenter__() File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_crawler_strategy.py", line 290, in __aenter__ await self.start() File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_crawler_strategy.py", line 298, in start self.playwright = await async_playwright().start() ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/playwright/async_api/_context_manager.py", line 51, in start return await self.__aenter__() ^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/playwright/async_api/_context_manager.py", line 40, in __aenter__ done, _ = await asyncio.wait( ^^^^^^^^^^^^^^^^^^^ File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/asyncio/tasks.py", line 418, in wait return await _wait(fs, timeout, return_when, loop) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/asyncio/tasks.py", line 525, in _wait await waiter RuntimeError: await wasn't used with future INFO:scrapy.core.engine:Closing spider (finished) 2024-12-12 17:50:07 [scrapy.core.engine] INFO: Closing spider (finished) [/code] Я знаю, что это связано с синхронным/асинхронным кодом, но я не очень знаком с Python. Может кто-нибудь помочь? Я уже пытался удалить асинхронность из паука, но это тоже не помогло.