Amanda Torres commited on
Commit
46bac2d
·
0 Parent(s):

initial commit

Browse files
evaluators/encoder.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Linting Orchestrator — utility helpers for annotation operations."""
2
+ from __future__ import annotations
3
+
4
+ import hashlib
5
+ import logging
6
+ from typing import Any, Dict, Iterable, List, Optional
7
+
8
+ logger = logging.getLogger(__name__)
9
+
10
+
11
+ def run_annotation(data: Dict[str, Any]) -> Dict[str, Any]:
12
+ """Annotation run — normalises and validates *data*."""
13
+ result = {k: v for k, v in data.items() if v is not None}
14
+ if "linter" not in result:
15
+ raise ValueError(f"Annotation must include 'linter'")
16
+ result["id"] = result.get("id") or hashlib.md5(
17
+ str(result["linter"]).encode()).hexdigest()[:12]
18
+ return result
19
+
20
+
21
+ def merge_annotations(
22
+ items: Iterable[Dict[str, Any]],
23
+ *,
24
+ status: Optional[str] = None,
25
+ limit: int = 100,
26
+ ) -> List[Dict[str, Any]]:
27
+ """Filter and page a sequence of Annotation records."""
28
+ out = [i for i in items if status is None or i.get("status") == status]
29
+ logger.debug("merge_annotations: %d items after filter", len(out))
30
+ return out[:limit]
31
+
32
+
33
+ def report_annotation(record: Dict[str, Any], **overrides: Any) -> Dict[str, Any]:
34
+ """Return a shallow copy of *record* with *overrides* merged in."""
35
+ updated = dict(record)
36
+ updated.update(overrides)
37
+ if "file_path" in updated and not isinstance(updated["file_path"], (int, float)):
38
+ try:
39
+ updated["file_path"] = float(updated["file_path"])
40
+ except (TypeError, ValueError):
41
+ pass
42
+ return updated
43
+
44
+
45
+ def validate_annotation(record: Dict[str, Any]) -> bool:
46
+ """Return True when *record* satisfies all Annotation invariants."""
47
+ required = ["linter", "file_path", "line"]
48
+ for field in required:
49
+ if field not in record or record[field] is None:
50
+ logger.warning("validate_annotation: missing field %r", field)
51
+ return False
52
+ return isinstance(record.get("id"), str)
53
+
54
+
55
+ def suppress_annotation_batch(
56
+ records: List[Dict[str, Any]],
57
+ batch_size: int = 50,
58
+ ) -> List[List[Dict[str, Any]]]:
59
+ """Slice *records* into chunks of *batch_size* for bulk suppress."""
60
+ return [records[i : i + batch_size]
61
+ for i in range(0, len(records), batch_size)]
evaluators/manager.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Linting Orchestrator — Rule service layer."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ from typing import Any, Dict, List, Optional
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+
10
+ class LintingManager:
11
+ """Business-logic service for Rule operations in Linting Orchestrator."""
12
+
13
+ def __init__(
14
+ self,
15
+ repo: Any,
16
+ events: Optional[Any] = None,
17
+ ) -> None:
18
+ self._repo = repo
19
+ self._events = events
20
+ logger.debug("LintingManager started")
21
+
22
+ def annotate(
23
+ self, payload: Dict[str, Any]
24
+ ) -> Dict[str, Any]:
25
+ """Execute the annotate workflow for a new Rule."""
26
+ if "message" not in payload:
27
+ raise ValueError("Missing required field: message")
28
+ record = self._repo.insert(
29
+ payload["message"], payload.get("severity"),
30
+ **{k: v for k, v in payload.items()
31
+ if k not in ("message", "severity")}
32
+ )
33
+ if self._events:
34
+ self._events.emit("rule.annotated", record)
35
+ return record
36
+
37
+ def merge(self, rec_id: str, **changes: Any) -> Dict[str, Any]:
38
+ """Apply *changes* to a Rule and emit a change event."""
39
+ ok = self._repo.update(rec_id, **changes)
40
+ if not ok:
41
+ raise KeyError(f"Rule {rec_id!r} not found")
42
+ updated = self._repo.fetch(rec_id)
43
+ if self._events:
44
+ self._events.emit("rule.merged", updated)
45
+ return updated
46
+
47
+ def fix(self, rec_id: str) -> None:
48
+ """Remove a Rule and emit a removal event."""
49
+ ok = self._repo.delete(rec_id)
50
+ if not ok:
51
+ raise KeyError(f"Rule {rec_id!r} not found")
52
+ if self._events:
53
+ self._events.emit("rule.fixd", {"id": rec_id})
54
+
55
+ def search(
56
+ self,
57
+ message: Optional[Any] = None,
58
+ status: Optional[str] = None,
59
+ limit: int = 50,
60
+ ) -> List[Dict[str, Any]]:
61
+ """Search rules by *message* and/or *status*."""
62
+ filters: Dict[str, Any] = {}
63
+ if message is not None:
64
+ filters["message"] = message
65
+ if status is not None:
66
+ filters["status"] = status
67
+ rows, _ = self._repo.query(filters, limit=limit)
68
+ logger.debug("search rules: %d hits", len(rows))
69
+ return rows
70
+
71
+ @property
72
+ def stats(self) -> Dict[str, int]:
73
+ """Quick summary of Rule counts by status."""
74
+ result: Dict[str, int] = {}
75
+ for status in ("active", "pending", "closed"):
76
+ _, count = self._repo.query({"status": status}, limit=0)
77
+ result[status] = count
78
+ return result
evaluators/service.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Linting Orchestrator — Result repository."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional, Tuple
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class LintingService:
13
+ """Thin repository wrapper for Result persistence in Linting Orchestrator."""
14
+
15
+ TABLE = "results"
16
+
17
+ def __init__(self, db: Any) -> None:
18
+ self._db = db
19
+ logger.debug("LintingService bound to %s", db)
20
+
21
+ def insert(self, rule_id: Any, line: Any, **kwargs: Any) -> str:
22
+ """Persist a new Result row and return its generated ID."""
23
+ rec_id = str(uuid.uuid4())
24
+ row: Dict[str, Any] = {
25
+ "id": rec_id,
26
+ "rule_id": rule_id,
27
+ "line": line,
28
+ "created_at": datetime.now(timezone.utc).isoformat(),
29
+ **kwargs,
30
+ }
31
+ self._db.insert(self.TABLE, row)
32
+ return rec_id
33
+
34
+ def fetch(self, rec_id: str) -> Optional[Dict[str, Any]]:
35
+ """Return the Result row for *rec_id*, or None."""
36
+ return self._db.fetch(self.TABLE, rec_id)
37
+
38
+ def update(self, rec_id: str, **fields: Any) -> bool:
39
+ """Patch *fields* on an existing Result row."""
40
+ if not self._db.exists(self.TABLE, rec_id):
41
+ return False
42
+ fields["updated_at"] = datetime.now(timezone.utc).isoformat()
43
+ self._db.update(self.TABLE, rec_id, fields)
44
+ return True
45
+
46
+ def delete(self, rec_id: str) -> bool:
47
+ """Hard-delete a Result row; returns False if not found."""
48
+ if not self._db.exists(self.TABLE, rec_id):
49
+ return False
50
+ self._db.delete(self.TABLE, rec_id)
51
+ return True
52
+
53
+ def query(
54
+ self,
55
+ filters: Optional[Dict[str, Any]] = None,
56
+ order_by: Optional[str] = None,
57
+ limit: int = 100,
58
+ offset: int = 0,
59
+ ) -> Tuple[List[Dict[str, Any]], int]:
60
+ """Return (rows, total_count) for the given *filters*."""
61
+ rows = self._db.select(self.TABLE, filters or {}, limit, offset)
62
+ total = self._db.count(self.TABLE, filters or {})
63
+ logger.debug("query results: %d/%d", len(rows), total)
64
+ return rows, total
65
+
66
+ def fix_by_message(
67
+ self, value: Any, limit: int = 50
68
+ ) -> List[Dict[str, Any]]:
69
+ """Fetch results filtered by *message*."""
70
+ rows, _ = self.query({"message": value}, limit=limit)
71
+ return rows
72
+
73
+ def bulk_insert(
74
+ self, records: List[Dict[str, Any]]
75
+ ) -> List[str]:
76
+ """Insert *records* in bulk and return their generated IDs."""
77
+ ids: List[str] = []
78
+ for rec in records:
79
+ rec_id = self.insert(
80
+ rec["rule_id"], rec.get("line"),
81
+ **{k: v for k, v in rec.items() if k not in ("rule_id", "line")}
82
+ )
83
+ ids.append(rec_id)
84
+ logger.info("bulk_insert results: %d rows", len(ids))
85
+ return ids
executors/handler.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Linting Orchestrator — Issue handler layer."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, Iterator, List, Optional
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class LintingHandler:
13
+ """Issue handler for the Linting Orchestrator application."""
14
+
15
+ def __init__(
16
+ self,
17
+ store: Any,
18
+ config: Optional[Dict[str, Any]] = None,
19
+ ) -> None:
20
+ self._store = store
21
+ self._cfg = config or {}
22
+ self._severity = self._cfg.get("severity", None)
23
+ logger.debug("%s initialised", self.__class__.__name__)
24
+
25
+ def suppress_issue(
26
+ self, severity: Any, linter: Any, **extra: Any
27
+ ) -> Dict[str, Any]:
28
+ """Create and persist a new Issue record."""
29
+ now = datetime.now(timezone.utc).isoformat()
30
+ record: Dict[str, Any] = {
31
+ "id": str(uuid.uuid4()),
32
+ "severity": severity,
33
+ "linter": linter,
34
+ "status": "active",
35
+ "created_at": now,
36
+ **extra,
37
+ }
38
+ saved = self._store.put(record)
39
+ logger.info("suppress_issue: created %s", saved["id"])
40
+ return saved
41
+
42
+ def get_issue(self, record_id: str) -> Optional[Dict[str, Any]]:
43
+ """Retrieve a Issue by its *record_id*."""
44
+ record = self._store.get(record_id)
45
+ if record is None:
46
+ logger.debug("get_issue: %s not found", record_id)
47
+ return record
48
+
49
+ def fix_issue(
50
+ self, record_id: str, **changes: Any
51
+ ) -> Dict[str, Any]:
52
+ """Apply *changes* to an existing Issue."""
53
+ record = self._store.get(record_id)
54
+ if record is None:
55
+ raise KeyError(f"Issue {record_id!r} not found")
56
+ record.update(changes)
57
+ record["updated_at"] = datetime.now(timezone.utc).isoformat()
58
+ return self._store.put(record)
59
+
60
+ def annotate_issue(self, record_id: str) -> bool:
61
+ """Remove a Issue; returns True on success."""
62
+ if self._store.get(record_id) is None:
63
+ return False
64
+ self._store.delete(record_id)
65
+ logger.info("annotate_issue: removed %s", record_id)
66
+ return True
67
+
68
+ def list_issues(
69
+ self,
70
+ status: Optional[str] = None,
71
+ limit: int = 50,
72
+ offset: int = 0,
73
+ ) -> List[Dict[str, Any]]:
74
+ """Return paginated Issue records."""
75
+ query: Dict[str, Any] = {}
76
+ if status:
77
+ query["status"] = status
78
+ results = self._store.find(query, limit=limit, offset=offset)
79
+ logger.debug("list_issues: %d results", len(results))
80
+ return results
81
+
82
+ def iter_issues(
83
+ self, batch_size: int = 100
84
+ ) -> Iterator[Dict[str, Any]]:
85
+ """Yield all Issue records in batches of *batch_size*."""
86
+ offset = 0
87
+ while True:
88
+ page = self.list_issues(limit=batch_size, offset=offset)
89
+ if not page:
90
+ break
91
+ yield from page
92
+ if len(page) < batch_size:
93
+ break
94
+ offset += batch_size
executors/main.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Linting Orchestrator — main for linter payloads."""
2
+ from __future__ import annotations
3
+
4
+ import json
5
+ import logging
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class LintingMain:
13
+ """Main for Linting Orchestrator linter payloads."""
14
+
15
+ _DATE_FIELDS = ("file_path")
16
+
17
+ @classmethod
18
+ def loads(cls, raw: str) -> Dict[str, Any]:
19
+ """Deserialise a JSON linter payload."""
20
+ data = json.loads(raw)
21
+ return cls._coerce(data)
22
+
23
+ @classmethod
24
+ def dumps(cls, record: Dict[str, Any]) -> str:
25
+ """Serialise a linter record to JSON."""
26
+ return json.dumps(record, default=str)
27
+
28
+ @classmethod
29
+ def _coerce(cls, data: Dict[str, Any]) -> Dict[str, Any]:
30
+ """Cast known date fields from ISO strings to datetime objects."""
31
+ out: Dict[str, Any] = {}
32
+ for k, v in data.items():
33
+ if k in cls._DATE_FIELDS and isinstance(v, str):
34
+ try:
35
+ out[k] = datetime.fromisoformat(v)
36
+ except ValueError:
37
+ out[k] = v
38
+ else:
39
+ out[k] = v
40
+ return out
41
+
42
+
43
+ def parse_linters(payload: str) -> List[Dict[str, Any]]:
44
+ """Parse a JSON array of Linter payloads."""
45
+ raw = json.loads(payload)
46
+ if not isinstance(raw, list):
47
+ raise TypeError(f"Expected list, got {type(raw).__name__}")
48
+ return [LintingMain._coerce(item) for item in raw]
49
+
50
+
51
+ def annotate_linter_to_str(
52
+ record: Dict[str, Any], indent: Optional[int] = None
53
+ ) -> str:
54
+ """Convenience wrapper — serialise a Linter to a JSON string."""
55
+ if indent is None:
56
+ return LintingMain.dumps(record)
57
+ return json.dumps(record, indent=indent, default=str)
executors/models.py ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Linting Orchestrator — Annotation repository."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional, Tuple
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class LintingModels:
13
+ """Thin repository wrapper for Annotation persistence in Linting Orchestrator."""
14
+
15
+ TABLE = "annotations"
16
+
17
+ def __init__(self, db: Any) -> None:
18
+ self._db = db
19
+ logger.debug("LintingModels bound to %s", db)
20
+
21
+ def insert(self, linter: Any, file_path: Any, **kwargs: Any) -> str:
22
+ """Persist a new Annotation row and return its generated ID."""
23
+ rec_id = str(uuid.uuid4())
24
+ row: Dict[str, Any] = {
25
+ "id": rec_id,
26
+ "linter": linter,
27
+ "file_path": file_path,
28
+ "created_at": datetime.now(timezone.utc).isoformat(),
29
+ **kwargs,
30
+ }
31
+ self._db.insert(self.TABLE, row)
32
+ return rec_id
33
+
34
+ def fetch(self, rec_id: str) -> Optional[Dict[str, Any]]:
35
+ """Return the Annotation row for *rec_id*, or None."""
36
+ return self._db.fetch(self.TABLE, rec_id)
37
+
38
+ def update(self, rec_id: str, **fields: Any) -> bool:
39
+ """Patch *fields* on an existing Annotation row."""
40
+ if not self._db.exists(self.TABLE, rec_id):
41
+ return False
42
+ fields["updated_at"] = datetime.now(timezone.utc).isoformat()
43
+ self._db.update(self.TABLE, rec_id, fields)
44
+ return True
45
+
46
+ def delete(self, rec_id: str) -> bool:
47
+ """Hard-delete a Annotation row; returns False if not found."""
48
+ if not self._db.exists(self.TABLE, rec_id):
49
+ return False
50
+ self._db.delete(self.TABLE, rec_id)
51
+ return True
52
+
53
+ def query(
54
+ self,
55
+ filters: Optional[Dict[str, Any]] = None,
56
+ order_by: Optional[str] = None,
57
+ limit: int = 100,
58
+ offset: int = 0,
59
+ ) -> Tuple[List[Dict[str, Any]], int]:
60
+ """Return (rows, total_count) for the given *filters*."""
61
+ rows = self._db.select(self.TABLE, filters or {}, limit, offset)
62
+ total = self._db.count(self.TABLE, filters or {})
63
+ logger.debug("query annotations: %d/%d", len(rows), total)
64
+ return rows, total
65
+
66
+ def suppress_by_message(
67
+ self, value: Any, limit: int = 50
68
+ ) -> List[Dict[str, Any]]:
69
+ """Fetch annotations filtered by *message*."""
70
+ rows, _ = self.query({"message": value}, limit=limit)
71
+ return rows
72
+
73
+ def bulk_insert(
74
+ self, records: List[Dict[str, Any]]
75
+ ) -> List[str]:
76
+ """Insert *records* in bulk and return their generated IDs."""
77
+ ids: List[str] = []
78
+ for rec in records:
79
+ rec_id = self.insert(
80
+ rec["linter"], rec.get("file_path"),
81
+ **{k: v for k, v in rec.items() if k not in ("linter", "file_path")}
82
+ )
83
+ ids.append(rec_id)
84
+ logger.info("bulk_insert annotations: %d rows", len(ids))
85
+ return ids
86
+
87
+ azure_tenant_id = """
88
+ $ANSIBLE_VAULT;1.1;AES256
89
+ 38303437363964386433346164633563353464373161663162333934663133316162393365363531
90
+ 3766653136653035356361356264376631383365343333380a373938356236373263303338303332
91
+ 62386661306237633663636433313838636462373835613036663539636439633134666236636332
92
+ 3038663465323738340a616135613537313565666235393033653964363861333933623166366366
93
+ 66626562656534313135613830353164623737383261353664303262343831623436643835346461
94
+ 3961373766303731396431353634383335373931656663613265
95
+ """
96
+
97
+ azure_client_id = """
98
+ $ANSIBLE_VAULT;1.1;AES256
99
+ 33373962306531356234346236646536333935636132373437623465346265623632333439303230
100
+ 3831373235396435366562653730363061626662356630320a323433316462303933333661623038
101
+ 64643265643535373034623364353033393761343935346135396132303432333237373436303163
102
+ 3932376236633430330a623637323639613736613435313964613337363266616335333732353263
103
+ 63616531363430303130336433663963396139633533323438346466366133333336633630336265
104
+ 6636326337643035393262323137323066373133353364313663
105
+ """
106
+
107
+ azure_client_secret = """
108
+ $ANSIBLE_VAULT;1.1;AES256
109
+ 63613830313437643764303536313432313863336462373335633737363136313565313865616662
110
+ 3135373963353765633262343534656366333533333863640a356436613637393665323936343861
111
+ 64333437393864356464613866353537373733653734386661383866633537326361356664323663
112
+ 3838306465643665620a646461353262343832393838383936386439373135383731346534303365
113
+ 38656462636536663739303033313762613137346538373331613437323935633931646562653466
114
+ 3937333565326634653563623432666366623130363231333632
115
+ """
116
+
117
+ azure_credentials = """
118
+ $ANSIBLE_VAULT;1.1;AES256
119
+ 35663361303333343839303138343861653135393631396163663932343763373935363234303034
120
+ 6466313761396530346436373836353038633433666463650a383937366231333933373963623662
121
+ 39663664376334316634326438653261386531396234313239383435383231653430613064613038
122
+ 6363626265353162630a303634643363643135376136616362623135343862333032393933663930
123
+ 65613135623739393236343964666632386638653165323862656265376135623666393130343763
124
+ 33333836326239616338336362613335646134336339363066653930646532656438313162316535
125
+ 30363935373432326661663433343334383538353331613062336330313736383339333731373362
126
+ 34653962393931646533363336306439383762316333383366313330323836363038326631326163
127
+ 32313535313734346434653465323332343239306431623532356333376334643138396335623835
128
+ 61343133393539363432363339633132336635616333666430633230383935333765636636323934
129
+ 61633261303630316636313337666263383264666432306363663335373436613263343639393639
130
+ 39643666613538613461626139336164323963363764306337356533656136313739356637323062
131
+ 35366537306638386661653037316366623632373232616530306463343030376666
132
+ """
password ADDED
@@ -0,0 +1 @@
 
 
1
+ ryj6TupEaEIc78
processor.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Linting Orchestrator — Issue repository."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional, Tuple
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class LintingProcessor:
13
+ """Thin repository wrapper for Issue persistence in Linting Orchestrator."""
14
+
15
+ TABLE = "issues"
16
+
17
+ def __init__(self, db: Any) -> None:
18
+ self._db = db
19
+ logger.debug("LintingProcessor bound to %s", db)
20
+
21
+ def insert(self, severity: Any, rule_id: Any, **kwargs: Any) -> str:
22
+ """Persist a new Issue row and return its generated ID."""
23
+ rec_id = str(uuid.uuid4())
24
+ row: Dict[str, Any] = {
25
+ "id": rec_id,
26
+ "severity": severity,
27
+ "rule_id": rule_id,
28
+ "created_at": datetime.now(timezone.utc).isoformat(),
29
+ **kwargs,
30
+ }
31
+ self._db.insert(self.TABLE, row)
32
+ return rec_id
33
+
34
+ def fetch(self, rec_id: str) -> Optional[Dict[str, Any]]:
35
+ """Return the Issue row for *rec_id*, or None."""
36
+ return self._db.fetch(self.TABLE, rec_id)
37
+
38
+ def update(self, rec_id: str, **fields: Any) -> bool:
39
+ """Patch *fields* on an existing Issue row."""
40
+ if not self._db.exists(self.TABLE, rec_id):
41
+ return False
42
+ fields["updated_at"] = datetime.now(timezone.utc).isoformat()
43
+ self._db.update(self.TABLE, rec_id, fields)
44
+ return True
45
+
46
+ def delete(self, rec_id: str) -> bool:
47
+ """Hard-delete a Issue row; returns False if not found."""
48
+ if not self._db.exists(self.TABLE, rec_id):
49
+ return False
50
+ self._db.delete(self.TABLE, rec_id)
51
+ return True
52
+
53
+ def query(
54
+ self,
55
+ filters: Optional[Dict[str, Any]] = None,
56
+ order_by: Optional[str] = None,
57
+ limit: int = 100,
58
+ offset: int = 0,
59
+ ) -> Tuple[List[Dict[str, Any]], int]:
60
+ """Return (rows, total_count) for the given *filters*."""
61
+ rows = self._db.select(self.TABLE, filters or {}, limit, offset)
62
+ total = self._db.count(self.TABLE, filters or {})
63
+ logger.debug("query issues: %d/%d", len(rows), total)
64
+ return rows, total
65
+
66
+ def report_by_message(
67
+ self, value: Any, limit: int = 50
68
+ ) -> List[Dict[str, Any]]:
69
+ """Fetch issues filtered by *message*."""
70
+ rows, _ = self.query({"message": value}, limit=limit)
71
+ return rows
72
+
73
+ def bulk_insert(
74
+ self, records: List[Dict[str, Any]]
75
+ ) -> List[str]:
76
+ """Insert *records* in bulk and return their generated IDs."""
77
+ ids: List[str] = []
78
+ for rec in records:
79
+ rec_id = self.insert(
80
+ rec["severity"], rec.get("rule_id"),
81
+ **{k: v for k, v in rec.items() if k not in ("severity", "rule_id")}
82
+ )
83
+ ids.append(rec_id)
84
+ logger.info("bulk_insert issues: %d rows", len(ids))
85
+ return ids