Запуск паука, программно интегрированного с библиотекой сканирования.Python

Программы на Python
Ответить
Anonymous
 Запуск паука, программно интегрированного с библиотекой сканирования.

Сообщение Anonymous »

У меня есть следующий код, который программно запускает паука:

Код: Выделить всё

import asyncio

from scrapy.crawler import CrawlerProcess

from scrapy_webcrawler.spiders.spider import WebCrawlerSpider

class WebCrawlerConnector:

def start(self) -> int:
process = CrawlerProcess({
'USER_AGENT': 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)'
})

process.crawl(
WebCrawlerSpider,
urls=["https://quotes.toscrape.com/"],
crawl_depth=0,
max_links_per_page=2,
)

process.start()

async def main() -> None:
"""Start the connector."""
connector = WebCrawlerConnector()
await connector.start()

if __name__ == "__main__":
asyncio.run(main())
И у меня есть этот паук, который использует инструмент сканирования под названием Craw4ai, который очень хорош в извлечении контента, но это сканер одной страницы, поэтому я использую Scrapy:

Код: Выделить всё

from typing import Optional

from crawl4ai import AsyncWebCrawler
from scrapy.exceptions import CloseSpider
from scrapy.http import Request
from scrapy.linkextractors import LinkExtractor
from scrapy.settings import BaseSettings
from scrapy.spiders import CrawlSpider, Rule
from scrapy.utils.project import get_project_settings

class WebCrawlerSpider(CrawlSpider):
name = "webcrawler"
allowed_domains = []
rules = (
Rule(LinkExtractor(), callback="parse_item", follow=True),
)

def __init__(self, urls=["https://quotes.toscrape.com/"], *args, **kwargs):
super().__init__(*args, **kwargs)
self.start_urls = urls

@classmethod
def from_crawler(cls, crawler, *args, **kwargs):
spider = super().from_crawler(crawler, *args, **kwargs)
spider.settings.set("DEPTH_LIMIT", 0, priority="spider",)
return spider

def start_requests(self):
for url in self.start_urls:
yield Request(url)

async def parse_start_url(self, response):
await self.process_url(response.url)
if self.should_stop_crawling():
self.logger.info("DEPTH_LIMIT is 0.  Stopping crawl.")
raise CloseSpider(reason="DEPTH_LIMIT reached 0, stopping spider.")

def parse_item(self, response):
self.process_url(response.url)

async def process_url(self, url):
async with AsyncWebCrawler(verbose=False) as crawler:
result = await crawler.arun(
url=url,
exclude_external_links=True,
exclude_social_media_links=True,
)
print(result.fit_markdown)

def should_stop_crawling(self):
depth_limit = self.settings.getint("DEPTH_LIMIT", default=-1)
return depth_limit == 0

Однако, когда я запускаю первый файл, результатом всегда является такая трассировка стека:

Код: Выделить всё

ERROR:scrapy.core.scraper:Spider error processing   (referer: None)
Traceback (most recent call last):
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/defer.py", line 346, in aiter_errback
yield await it.__anext__()
^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 394, in __anext__
return await self.data.__anext__()
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 375, in _async_chain
async for o in as_async_generator(it):
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/asyncgen.py", line 21, in as_async_generator
async for r in it:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 394, in __anext__
return await self.data.__anext__()
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 375, in _async_chain
async for o in as_async_generator(it):
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/asyncgen.py", line 21, in as_async_generator
async for r in it:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async
async for r in iterable:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/referer.py", line 384, in process_spider_output_async
async for r in result:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async
async for r in iterable:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/urllength.py", line 62, in process_spider_output_async
async for r in result:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async
async for r in iterable:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/depth.py", line 60, in process_spider_output_async
async for r in result:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async
async for r in iterable:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spiders/crawl.py", line 165, in _parse_response
cb_res = await cb_res
^^^^^^^^^^^^
File "/Users/luis.ferreira/Documents/test/scrapy_webcrawler/scrapy_webcrawler/spiders/spider.py", line 37, in parse_start_url
await self.process_url(response.url)
File "/Users/luis.ferreira/Documents/test/scrapy_webcrawler/scrapy_webcrawler/spiders/spider.py", line 46, in process_url
async with AsyncWebCrawler(verbose=False) as crawler:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_webcrawler.py", line 118, in __aenter__
await self.crawler_strategy.__aenter__()
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_crawler_strategy.py", line 290, in __aenter__
await self.start()
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_crawler_strategy.py", line 298, in start
self.playwright = await async_playwright().start()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/playwright/async_api/_context_manager.py", line 51, in start
return await self.__aenter__()
^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/playwright/async_api/_context_manager.py", line 40, in __aenter__
done, _ = await asyncio.wait(
^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/asyncio/tasks.py", line 418, in wait
return await _wait(fs, timeout, return_when, loop)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/asyncio/tasks.py", line 525, in _wait
await waiter
RuntimeError:  await wasn't used with future
2024-12-12 17:50:07 [scrapy.core.scraper] ERROR: Spider error processing   (referer: None)
Traceback (most recent call last):
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/defer.py", line 346, in aiter_errback
yield await it.__anext__()
^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 394, in __anext__
return await self.data.__anext__()
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 375, in _async_chain
async for o in as_async_generator(it):
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/asyncgen.py", line 21, in as_async_generator
async for r in it:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 394, in __anext__
return await self.data.__anext__()
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/python.py", line 375, in _async_chain
async for o in as_async_generator(it):
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/utils/asyncgen.py", line 21, in as_async_generator
async for r in it:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async
async for r in iterable:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/referer.py", line 384, in process_spider_output_async
async for r in result:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async
async for r in iterable:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/urllength.py", line 62, in process_spider_output_async
async for r in result:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async
async for r in iterable:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spidermiddlewares/depth.py", line 60, in process_spider_output_async
async for r in result:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/core/spidermw.py", line 121, in process_async
async for r in iterable:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/scrapy/spiders/crawl.py", line 165, in _parse_response
cb_res = await cb_res
^^^^^^^^^^^^
File "/Users/luis.ferreira/Documents/test/scrapy_webcrawler/scrapy_webcrawler/spiders/spider.py", line 37, in parse_start_url
await self.process_url(response.url)
File "/Users/luis.ferreira/Documents/test/scrapy_webcrawler/scrapy_webcrawler/spiders/spider.py", line 46, in process_url
async with AsyncWebCrawler(verbose=False) as crawler:
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_webcrawler.py", line 118, in __aenter__
await self.crawler_strategy.__aenter__()
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_crawler_strategy.py", line 290, in __aenter__
await self.start()
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/crawl4ai/async_crawler_strategy.py", line 298, in start
self.playwright = await async_playwright().start()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/playwright/async_api/_context_manager.py", line 51, in start
return await self.__aenter__()
^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/site-packages/playwright/async_api/_context_manager.py", line 40, in __aenter__
done, _ = await asyncio.wait(
^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/asyncio/tasks.py", line 418, in wait
return await _wait(fs, timeout, return_when, loop)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/luis.ferreira/.pyenv/versions/3.11.2/lib/python3.11/asyncio/tasks.py", line 525, in _wait
await waiter
RuntimeError:  await wasn't used with future
INFO:scrapy.core.engine:Closing spider (finished)
2024-12-12 17:50:07 [scrapy.core.engine] INFO: Closing spider (finished)
Я знаю, что это связано с синхронным/асинхронным кодом, но я не очень знаком с Python. Может кто-нибудь помочь?
Я уже пытался удалить асинхронность из паука, но это тоже не помогло.

Подробнее здесь: https://stackoverflow.com/questions/792 ... -crawl-lib
Ответить

Быстрый ответ

Изменение регистра текста: 
Смайлики
:) :( :oops: :roll: :wink: :muza: :clever: :sorry: :angel: :read: *x)
Ещё смайлики…
   
К этому ответу прикреплено по крайней мере одно вложение.

Если вы не хотите добавлять вложения, оставьте поля пустыми.

Максимально разрешённый размер вложения: 15 МБ.

Вернуться в «Python»