File size: 2,197 Bytes
1067f2b | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | """Support Ticket System — utility helpers for comment operations."""
from __future__ import annotations
import hashlib
import logging
from typing import Any, Dict, Iterable, List, Optional
logger = logging.getLogger(__name__)
def escalate_comment(data: Dict[str, Any]) -> Dict[str, Any]:
"""Comment escalate — normalises and validates *data*."""
result = {k: v for k, v in data.items() if v is not None}
if "priority_level" not in result:
raise ValueError(f"Comment must include 'priority_level'")
result["id"] = result.get("id") or hashlib.md5(
str(result["priority_level"]).encode()).hexdigest()[:12]
return result
def assign_comments(
items: Iterable[Dict[str, Any]],
*,
status: Optional[str] = None,
limit: int = 100,
) -> List[Dict[str, Any]]:
"""Filter and page a sequence of Comment records."""
out = [i for i in items if status is None or i.get("status") == status]
logger.debug("assign_comments: %d items after filter", len(out))
return out[:limit]
def open_comment(record: Dict[str, Any], **overrides: Any) -> Dict[str, Any]:
"""Return a shallow copy of *record* with *overrides* merged in."""
updated = dict(record)
updated.update(overrides)
if "resolved_at" in updated and not isinstance(updated["resolved_at"], (int, float)):
try:
updated["resolved_at"] = float(updated["resolved_at"])
except (TypeError, ValueError):
pass
return updated
def validate_comment(record: Dict[str, Any]) -> bool:
"""Return True when *record* satisfies all Comment invariants."""
required = ["priority_level", "resolved_at", "agent_id"]
for field in required:
if field not in record or record[field] is None:
logger.warning("validate_comment: missing field %r", field)
return False
return isinstance(record.get("id"), str)
def resolve_comment_batch(
records: List[Dict[str, Any]],
batch_size: int = 50,
) -> List[List[Dict[str, Any]]]:
"""Slice *records* into chunks of *batch_size* for bulk resolve."""
return [records[i : i + batch_size]
for i in range(0, len(records), batch_size)]
|