File size: 1,734 Bytes
a3ecd30 1c77a4c a3ecd30 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 | from datetime import datetime, timezone
from enum import Enum
from typing import Any
from pydantic import BaseModel, Field, HttpUrl
class Severity(str, Enum):
critical = "CRITICAL"
high = "HIGH"
medium = "MEDIUM"
low = "LOW"
class AuditRequest(BaseModel):
repo_url: HttpUrl
class SourceFile(BaseModel):
path: str
absolute_path: str
size_bytes: int
language: str | None = None
class CodeChunk(BaseModel):
file_path: str
language: str | None = None
line_start: int
line_end: int
content: str
class Finding(BaseModel):
title: str
severity: Severity
file_path: str
line_start: int = Field(ge=1)
line_end: int = Field(ge=1)
description: str
why_it_matters: str
suggested_fix: str
agent_source: str
class AgentOutput(BaseModel):
agent_name: str
findings: list[Finding] = Field(default_factory=list)
metadata: dict[str, Any] = Field(default_factory=dict)
class RepoScanResult(BaseModel):
repo_url: str
local_path: str
files: list[SourceFile]
skipped_files: int = 0
warnings: list[str] = Field(default_factory=list)
class AuditReport(BaseModel):
repo_url: str
scanned_file_count: int
skipped_file_count: int
findings: list[Finding]
severity_summary: dict[Severity, int]
total_findings_count: int = 0
displayed_findings_count: int = 0
hidden_findings_count: int = 0
agent_finding_counts: dict[str, int] = Field(default_factory=dict)
generated_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
agents_run: list[str]
warnings: list[str] = Field(default_factory=list)
class AuditProgress(BaseModel):
message: str
stage: str
|