forked from unclecode/crawl4ai
-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.py
488 lines (413 loc) · 18.1 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
import asyncio, os
from fastapi import FastAPI, HTTPException, BackgroundTasks, Request
from fastapi.responses import JSONResponse
from fastapi import FastAPI, HTTPException, Request
from fastapi.responses import HTMLResponse, JSONResponse
from fastapi.staticfiles import StaticFiles
from fastapi.middleware.cors import CORSMiddleware
from fastapi.templating import Jinja2Templates
from fastapi.exceptions import RequestValidationError
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.responses import FileResponse
from fastapi.responses import RedirectResponse
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from fastapi import Depends, Security
from pydantic import BaseModel, HttpUrl, Field
from typing import Optional, List, Dict, Any, Union
import psutil
import time
import uuid
from collections import defaultdict
from urllib.parse import urlparse
import math
import logging
from enum import Enum
from dataclasses import dataclass
import json
from crawl4ai import AsyncWebCrawler, CrawlResult, CacheMode
from crawl4ai.config import MIN_WORD_THRESHOLD
from crawl4ai.extraction_strategy import (
LLMExtractionStrategy,
CosineStrategy,
JsonCssExtractionStrategy,
)
__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class TaskStatus(str, Enum):
PENDING = "pending"
PROCESSING = "processing"
COMPLETED = "completed"
FAILED = "failed"
class CrawlerType(str, Enum):
BASIC = "basic"
LLM = "llm"
COSINE = "cosine"
JSON_CSS = "json_css"
class ExtractionConfig(BaseModel):
type: CrawlerType
params: Dict[str, Any] = {}
class ChunkingStrategy(BaseModel):
type: str
params: Dict[str, Any] = {}
class ContentFilter(BaseModel):
type: str = "bm25"
params: Dict[str, Any] = {}
class CrawlRequest(BaseModel):
urls: Union[HttpUrl, List[HttpUrl]]
word_count_threshold: int = MIN_WORD_THRESHOLD
extraction_config: Optional[ExtractionConfig] = None
chunking_strategy: Optional[ChunkingStrategy] = None
content_filter: Optional[ContentFilter] = None
js_code: Optional[List[str]] = None
wait_for: Optional[str] = None
css_selector: Optional[str] = None
screenshot: bool = False
magic: bool = False
extra: Optional[Dict[str, Any]] = {}
session_id: Optional[str] = None
cache_mode: Optional[CacheMode] = CacheMode.ENABLED
priority: int = Field(default=5, ge=1, le=10)
ttl: Optional[int] = 3600
crawler_params: Dict[str, Any] = {}
@dataclass
class TaskInfo:
id: str
status: TaskStatus
result: Optional[Union[CrawlResult, List[CrawlResult]]] = None
error: Optional[str] = None
created_at: float = time.time()
ttl: int = 3600
class ResourceMonitor:
def __init__(self, max_concurrent_tasks: int = 10):
self.max_concurrent_tasks = max_concurrent_tasks
self.memory_threshold = 0.85
self.cpu_threshold = 0.90
self._last_check = 0
self._check_interval = 1 # seconds
self._last_available_slots = max_concurrent_tasks
async def get_available_slots(self) -> int:
current_time = time.time()
if current_time - self._last_check < self._check_interval:
return self._last_available_slots
mem_usage = psutil.virtual_memory().percent / 100
cpu_usage = psutil.cpu_percent() / 100
memory_factor = max(0, (self.memory_threshold - mem_usage) / self.memory_threshold)
cpu_factor = max(0, (self.cpu_threshold - cpu_usage) / self.cpu_threshold)
self._last_available_slots = math.floor(
self.max_concurrent_tasks * min(memory_factor, cpu_factor)
)
self._last_check = current_time
return self._last_available_slots
class TaskManager:
def __init__(self, cleanup_interval: int = 300):
self.tasks: Dict[str, TaskInfo] = {}
self.high_priority = asyncio.PriorityQueue()
self.low_priority = asyncio.PriorityQueue()
self.cleanup_interval = cleanup_interval
self.cleanup_task = None
async def start(self):
self.cleanup_task = asyncio.create_task(self._cleanup_loop())
async def stop(self):
if self.cleanup_task:
self.cleanup_task.cancel()
try:
await self.cleanup_task
except asyncio.CancelledError:
pass
async def add_task(self, task_id: str, priority: int, ttl: int) -> None:
task_info = TaskInfo(id=task_id, status=TaskStatus.PENDING, ttl=ttl)
self.tasks[task_id] = task_info
queue = self.high_priority if priority > 5 else self.low_priority
await queue.put((-priority, task_id)) # Negative for proper priority ordering
async def get_next_task(self) -> Optional[str]:
try:
# Try high priority first
_, task_id = await asyncio.wait_for(self.high_priority.get(), timeout=0.1)
return task_id
except asyncio.TimeoutError:
try:
# Then try low priority
_, task_id = await asyncio.wait_for(self.low_priority.get(), timeout=0.1)
return task_id
except asyncio.TimeoutError:
return None
def update_task(self, task_id: str, status: TaskStatus, result: Any = None, error: str = None):
if task_id in self.tasks:
task_info = self.tasks[task_id]
task_info.status = status
task_info.result = result
task_info.error = error
def get_task(self, task_id: str) -> Optional[TaskInfo]:
return self.tasks.get(task_id)
async def _cleanup_loop(self):
while True:
try:
await asyncio.sleep(self.cleanup_interval)
current_time = time.time()
expired_tasks = [
task_id
for task_id, task in self.tasks.items()
if current_time - task.created_at > task.ttl
and task.status in [TaskStatus.COMPLETED, TaskStatus.FAILED]
]
for task_id in expired_tasks:
del self.tasks[task_id]
except Exception as e:
logger.error(f"Error in cleanup loop: {e}")
class CrawlerPool:
def __init__(self, max_size: int = 10):
self.max_size = max_size
self.active_crawlers: Dict[AsyncWebCrawler, float] = {}
self._lock = asyncio.Lock()
async def acquire(self, **kwargs) -> AsyncWebCrawler:
async with self._lock:
# Clean up inactive crawlers
current_time = time.time()
inactive = [
crawler
for crawler, last_used in self.active_crawlers.items()
if current_time - last_used > 600 # 10 minutes timeout
]
for crawler in inactive:
await crawler.__aexit__(None, None, None)
del self.active_crawlers[crawler]
# Create new crawler if needed
if len(self.active_crawlers) < self.max_size:
crawler = AsyncWebCrawler(**kwargs)
await crawler.__aenter__()
self.active_crawlers[crawler] = current_time
return crawler
# Reuse least recently used crawler
crawler = min(self.active_crawlers.items(), key=lambda x: x[1])[0]
self.active_crawlers[crawler] = current_time
return crawler
async def release(self, crawler: AsyncWebCrawler):
async with self._lock:
if crawler in self.active_crawlers:
self.active_crawlers[crawler] = time.time()
async def cleanup(self):
async with self._lock:
for crawler in list(self.active_crawlers.keys()):
await crawler.__aexit__(None, None, None)
self.active_crawlers.clear()
class CrawlerService:
def __init__(self, max_concurrent_tasks: int = 10):
self.resource_monitor = ResourceMonitor(max_concurrent_tasks)
self.task_manager = TaskManager()
self.crawler_pool = CrawlerPool(max_concurrent_tasks)
self._processing_task = None
async def start(self):
await self.task_manager.start()
self._processing_task = asyncio.create_task(self._process_queue())
async def stop(self):
if self._processing_task:
self._processing_task.cancel()
try:
await self._processing_task
except asyncio.CancelledError:
pass
await self.task_manager.stop()
await self.crawler_pool.cleanup()
def _create_extraction_strategy(self, config: ExtractionConfig):
if not config:
return None
if config.type == CrawlerType.LLM:
return LLMExtractionStrategy(**config.params)
elif config.type == CrawlerType.COSINE:
return CosineStrategy(**config.params)
elif config.type == CrawlerType.JSON_CSS:
return JsonCssExtractionStrategy(**config.params)
return None
async def submit_task(self, request: CrawlRequest) -> str:
task_id = str(uuid.uuid4())
await self.task_manager.add_task(task_id, request.priority, request.ttl or 3600)
# Store request data with task
self.task_manager.tasks[task_id].request = request
return task_id
async def _process_queue(self):
while True:
try:
available_slots = await self.resource_monitor.get_available_slots()
if False and available_slots <= 0:
await asyncio.sleep(1)
continue
task_id = await self.task_manager.get_next_task()
if not task_id:
await asyncio.sleep(1)
continue
task_info = self.task_manager.get_task(task_id)
if not task_info:
continue
request = task_info.request
self.task_manager.update_task(task_id, TaskStatus.PROCESSING)
try:
crawler = await self.crawler_pool.acquire(**request.crawler_params)
extraction_strategy = self._create_extraction_strategy(request.extraction_config)
if isinstance(request.urls, list):
results = await crawler.arun_many(
urls=[str(url) for url in request.urls],
word_count_threshold=MIN_WORD_THRESHOLD,
extraction_strategy=extraction_strategy,
js_code=request.js_code,
wait_for=request.wait_for,
css_selector=request.css_selector,
screenshot=request.screenshot,
magic=request.magic,
session_id=request.session_id,
cache_mode=request.cache_mode,
**request.extra,
)
else:
results = await crawler.arun(
url=str(request.urls),
extraction_strategy=extraction_strategy,
js_code=request.js_code,
wait_for=request.wait_for,
css_selector=request.css_selector,
screenshot=request.screenshot,
magic=request.magic,
session_id=request.session_id,
cache_mode=request.cache_mode,
**request.extra,
)
await self.crawler_pool.release(crawler)
self.task_manager.update_task(task_id, TaskStatus.COMPLETED, results)
except Exception as e:
logger.error(f"Error processing task {task_id}: {str(e)}")
self.task_manager.update_task(task_id, TaskStatus.FAILED, error=str(e))
except Exception as e:
logger.error(f"Error in queue processing: {str(e)}")
await asyncio.sleep(1)
app = FastAPI(title="Crawl4AI API")
# CORS configuration
origins = ["*"] # Allow all origins
app.add_middleware(
CORSMiddleware,
allow_origins=origins, # List of origins that are allowed to make requests
allow_credentials=True,
allow_methods=["*"], # Allows all methods
allow_headers=["*"], # Allows all headers
)
# API token security
security = HTTPBearer()
CRAWL4AI_API_TOKEN = os.getenv("CRAWL4AI_API_TOKEN")
async def verify_token(credentials: HTTPAuthorizationCredentials = Security(security)):
if not CRAWL4AI_API_TOKEN:
return credentials # No token verification if CRAWL4AI_API_TOKEN is not set
if credentials.credentials != CRAWL4AI_API_TOKEN:
raise HTTPException(status_code=401, detail="Invalid token")
return credentials
# Helper function to conditionally apply security
def secure_endpoint():
return Depends(verify_token) if CRAWL4AI_API_TOKEN else None
# Check if site directory exists
if os.path.exists(__location__ + "/site"):
# Mount the site directory as a static directory
app.mount("/mkdocs", StaticFiles(directory="site", html=True), name="mkdocs")
site_templates = Jinja2Templates(directory=__location__ + "/site")
crawler_service = CrawlerService()
@app.on_event("startup")
async def startup_event():
await crawler_service.start()
@app.on_event("shutdown")
async def shutdown_event():
await crawler_service.stop()
@app.get("/")
def read_root():
if os.path.exists(__location__ + "/site"):
return RedirectResponse(url="/mkdocs")
# Return a json response
return {"message": "Crawl4AI API service is running"}
@app.post("/crawl", dependencies=[Depends(verify_token)])
async def crawl(request: CrawlRequest) -> Dict[str, str]:
task_id = await crawler_service.submit_task(request)
return {"task_id": task_id}
@app.get("/task/{task_id}", dependencies=[Depends(verify_token)])
async def get_task_status(task_id: str):
task_info = crawler_service.task_manager.get_task(task_id)
if not task_info:
raise HTTPException(status_code=404, detail="Task not found")
response = {
"status": task_info.status,
"created_at": task_info.created_at,
}
if task_info.status == TaskStatus.COMPLETED:
# Convert CrawlResult to dict for JSON response
if isinstance(task_info.result, list):
response["results"] = [result.dict() for result in task_info.result]
else:
response["result"] = task_info.result.dict()
elif task_info.status == TaskStatus.FAILED:
response["error"] = task_info.error
return response
@app.post("/crawl_sync", dependencies=[Depends(verify_token)])
async def crawl_sync(request: CrawlRequest) -> Dict[str, Any]:
task_id = await crawler_service.submit_task(request)
# Wait up to 60 seconds for task completion
for _ in range(60):
task_info = crawler_service.task_manager.get_task(task_id)
if not task_info:
raise HTTPException(status_code=404, detail="Task not found")
if task_info.status == TaskStatus.COMPLETED:
# Return same format as /task/{task_id} endpoint
if isinstance(task_info.result, list):
return {"status": task_info.status, "results": [result.dict() for result in task_info.result]}
return {"status": task_info.status, "result": task_info.result.dict()}
if task_info.status == TaskStatus.FAILED:
raise HTTPException(status_code=500, detail=task_info.error)
await asyncio.sleep(1)
# If we get here, task didn't complete within timeout
raise HTTPException(status_code=408, detail="Task timed out")
@app.post("/crawl_direct", dependencies=[Depends(verify_token)])
async def crawl_direct(request: CrawlRequest) -> Dict[str, Any]:
try:
crawler = await crawler_service.crawler_pool.acquire(**request.crawler_params)
extraction_strategy = crawler_service._create_extraction_strategy(request.extraction_config)
try:
if isinstance(request.urls, list):
results = await crawler.arun_many(
urls=[str(url) for url in request.urls],
extraction_strategy=extraction_strategy,
js_code=request.js_code,
wait_for=request.wait_for,
css_selector=request.css_selector,
screenshot=request.screenshot,
magic=request.magic,
cache_mode=request.cache_mode,
session_id=request.session_id,
**request.extra,
)
return {"results": [result.dict() for result in results]}
else:
result = await crawler.arun(
url=str(request.urls),
extraction_strategy=extraction_strategy,
js_code=request.js_code,
wait_for=request.wait_for,
css_selector=request.css_selector,
screenshot=request.screenshot,
magic=request.magic,
cache_mode=request.cache_mode,
session_id=request.session_id,
**request.extra,
)
return {"result": result.dict()}
finally:
await crawler_service.crawler_pool.release(crawler)
except Exception as e:
logger.error(f"Error in direct crawl: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.get("/health")
async def health_check():
available_slots = await crawler_service.resource_monitor.get_available_slots()
memory = psutil.virtual_memory()
return {
"status": "healthy",
"available_slots": available_slots,
"memory_usage": memory.percent,
"cpu_usage": psutil.cpu_percent(),
}
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=11235)