diff --git a/crawl4ai/async_webcrawler.py b/crawl4ai/async_webcrawler.py index ebd2859d2..3665463c9 100644 --- a/crawl4ai/async_webcrawler.py +++ b/crawl4ai/async_webcrawler.py @@ -225,11 +225,11 @@ async def arun( screenshot=True, ... ) - result = await crawler.arun(url="https://example.com", crawler_config=config) + result = await crawler.arun(url="https://example.com", config=config) Args: url: The URL to crawl (http://, https://, file://, or raw:) - crawler_config: Configuration object controlling crawl behavior + config: Configuration object controlling crawl behavior [other parameters maintained for backwards compatibility] Returns: @@ -849,4 +849,4 @@ async def aseed_urls( seeding_config ) else: - raise ValueError("`domain_or_domains` must be a string or a list of strings.") \ No newline at end of file + raise ValueError("`domain_or_domains` must be a string or a list of strings.")