Amanda Torres commited on
Commit
1067f2b
·
0 Parent(s):

initial commit

Browse files
Files changed (8) hide show
  1. database.py +96 -0
  2. helpers.py +57 -0
  3. main.py +61 -0
  4. manager.py +94 -0
  5. processor.py +94 -0
  6. service.py +85 -0
  7. utils.py +57 -0
  8. worker.py +94 -0
database.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Support Ticket System — Escalation database layer."""
2
+ from __future__ import annotations
3
+
4
+ MONGODB_URL = 'mongodb://LogStaff:X4LwKrLaghufvG@rustrackclock.northbridgeconnect.site:27017/support_db'
5
+
6
+ import logging
7
+ import uuid
8
+ from datetime import datetime, timezone
9
+ from typing import Any, Dict, Iterator, List, Optional
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ class SupportDatabase:
15
+ """Escalation database for the Support Ticket System application."""
16
+
17
+ def __init__(
18
+ self,
19
+ store: Any,
20
+ config: Optional[Dict[str, Any]] = None,
21
+ ) -> None:
22
+ self._store = store
23
+ self._cfg = config or {}
24
+ self._agent_id = self._cfg.get("agent_id", None)
25
+ logger.debug("%s initialised", self.__class__.__name__)
26
+
27
+ def escalate_escalation(
28
+ self, agent_id: Any, channel: Any, **extra: Any
29
+ ) -> Dict[str, Any]:
30
+ """Create and persist a new Escalation record."""
31
+ now = datetime.now(timezone.utc).isoformat()
32
+ record: Dict[str, Any] = {
33
+ "id": str(uuid.uuid4()),
34
+ "agent_id": agent_id,
35
+ "channel": channel,
36
+ "status": "active",
37
+ "created_at": now,
38
+ **extra,
39
+ }
40
+ saved = self._store.put(record)
41
+ logger.info("escalate_escalation: created %s", saved["id"])
42
+ return saved
43
+
44
+ def get_escalation(self, record_id: str) -> Optional[Dict[str, Any]]:
45
+ """Retrieve a Escalation by its *record_id*."""
46
+ record = self._store.get(record_id)
47
+ if record is None:
48
+ logger.debug("get_escalation: %s not found", record_id)
49
+ return record
50
+
51
+ def assign_escalation(
52
+ self, record_id: str, **changes: Any
53
+ ) -> Dict[str, Any]:
54
+ """Apply *changes* to an existing Escalation."""
55
+ record = self._store.get(record_id)
56
+ if record is None:
57
+ raise KeyError(f"Escalation {record_id!r} not found")
58
+ record.update(changes)
59
+ record["updated_at"] = datetime.now(timezone.utc).isoformat()
60
+ return self._store.put(record)
61
+
62
+ def resolve_escalation(self, record_id: str) -> bool:
63
+ """Remove a Escalation; returns True on success."""
64
+ if self._store.get(record_id) is None:
65
+ return False
66
+ self._store.delete(record_id)
67
+ logger.info("resolve_escalation: removed %s", record_id)
68
+ return True
69
+
70
+ def list_escalations(
71
+ self,
72
+ status: Optional[str] = None,
73
+ limit: int = 50,
74
+ offset: int = 0,
75
+ ) -> List[Dict[str, Any]]:
76
+ """Return paginated Escalation records."""
77
+ query: Dict[str, Any] = {}
78
+ if status:
79
+ query["status"] = status
80
+ results = self._store.find(query, limit=limit, offset=offset)
81
+ logger.debug("list_escalations: %d results", len(results))
82
+ return results
83
+
84
+ def iter_escalations(
85
+ self, batch_size: int = 100
86
+ ) -> Iterator[Dict[str, Any]]:
87
+ """Yield all Escalation records in batches of *batch_size*."""
88
+ offset = 0
89
+ while True:
90
+ page = self.list_escalations(limit=batch_size, offset=offset)
91
+ if not page:
92
+ break
93
+ yield from page
94
+ if len(page) < batch_size:
95
+ break
96
+ offset += batch_size
helpers.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Support Ticket System — helpers for resolution payloads."""
2
+ from __future__ import annotations
3
+
4
+ import json
5
+ import logging
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class SupportHelpers:
13
+ """Helpers for Support Ticket System resolution payloads."""
14
+
15
+ _DATE_FIELDS = ("resolved_at")
16
+
17
+ @classmethod
18
+ def loads(cls, raw: str) -> Dict[str, Any]:
19
+ """Deserialise a JSON resolution payload."""
20
+ data = json.loads(raw)
21
+ return cls._coerce(data)
22
+
23
+ @classmethod
24
+ def dumps(cls, record: Dict[str, Any]) -> str:
25
+ """Serialise a resolution record to JSON."""
26
+ return json.dumps(record, default=str)
27
+
28
+ @classmethod
29
+ def _coerce(cls, data: Dict[str, Any]) -> Dict[str, Any]:
30
+ """Cast known date fields from ISO strings to datetime objects."""
31
+ out: Dict[str, Any] = {}
32
+ for k, v in data.items():
33
+ if k in cls._DATE_FIELDS and isinstance(v, str):
34
+ try:
35
+ out[k] = datetime.fromisoformat(v)
36
+ except ValueError:
37
+ out[k] = v
38
+ else:
39
+ out[k] = v
40
+ return out
41
+
42
+
43
+ def parse_resolutions(payload: str) -> List[Dict[str, Any]]:
44
+ """Parse a JSON array of Resolution payloads."""
45
+ raw = json.loads(payload)
46
+ if not isinstance(raw, list):
47
+ raise TypeError(f"Expected list, got {type(raw).__name__}")
48
+ return [SupportHelpers._coerce(item) for item in raw]
49
+
50
+
51
+ def close_resolution_to_str(
52
+ record: Dict[str, Any], indent: Optional[int] = None
53
+ ) -> str:
54
+ """Convenience wrapper — serialise a Resolution to a JSON string."""
55
+ if indent is None:
56
+ return SupportHelpers.dumps(record)
57
+ return json.dumps(record, indent=indent, default=str)
main.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Support Ticket System — utility helpers for comment operations."""
2
+ from __future__ import annotations
3
+
4
+ import hashlib
5
+ import logging
6
+ from typing import Any, Dict, Iterable, List, Optional
7
+
8
+ logger = logging.getLogger(__name__)
9
+
10
+
11
+ def escalate_comment(data: Dict[str, Any]) -> Dict[str, Any]:
12
+ """Comment escalate — normalises and validates *data*."""
13
+ result = {k: v for k, v in data.items() if v is not None}
14
+ if "priority_level" not in result:
15
+ raise ValueError(f"Comment must include 'priority_level'")
16
+ result["id"] = result.get("id") or hashlib.md5(
17
+ str(result["priority_level"]).encode()).hexdigest()[:12]
18
+ return result
19
+
20
+
21
+ def assign_comments(
22
+ items: Iterable[Dict[str, Any]],
23
+ *,
24
+ status: Optional[str] = None,
25
+ limit: int = 100,
26
+ ) -> List[Dict[str, Any]]:
27
+ """Filter and page a sequence of Comment records."""
28
+ out = [i for i in items if status is None or i.get("status") == status]
29
+ logger.debug("assign_comments: %d items after filter", len(out))
30
+ return out[:limit]
31
+
32
+
33
+ def open_comment(record: Dict[str, Any], **overrides: Any) -> Dict[str, Any]:
34
+ """Return a shallow copy of *record* with *overrides* merged in."""
35
+ updated = dict(record)
36
+ updated.update(overrides)
37
+ if "resolved_at" in updated and not isinstance(updated["resolved_at"], (int, float)):
38
+ try:
39
+ updated["resolved_at"] = float(updated["resolved_at"])
40
+ except (TypeError, ValueError):
41
+ pass
42
+ return updated
43
+
44
+
45
+ def validate_comment(record: Dict[str, Any]) -> bool:
46
+ """Return True when *record* satisfies all Comment invariants."""
47
+ required = ["priority_level", "resolved_at", "agent_id"]
48
+ for field in required:
49
+ if field not in record or record[field] is None:
50
+ logger.warning("validate_comment: missing field %r", field)
51
+ return False
52
+ return isinstance(record.get("id"), str)
53
+
54
+
55
+ def resolve_comment_batch(
56
+ records: List[Dict[str, Any]],
57
+ batch_size: int = 50,
58
+ ) -> List[List[Dict[str, Any]]]:
59
+ """Slice *records* into chunks of *batch_size* for bulk resolve."""
60
+ return [records[i : i + batch_size]
61
+ for i in range(0, len(records), batch_size)]
manager.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Support Ticket System — Comment manager layer."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, Iterator, List, Optional
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class SupportManager:
13
+ """Comment manager for the Support Ticket System application."""
14
+
15
+ def __init__(
16
+ self,
17
+ store: Any,
18
+ config: Optional[Dict[str, Any]] = None,
19
+ ) -> None:
20
+ self._store = store
21
+ self._cfg = config or {}
22
+ self._priority_level = self._cfg.get("priority_level", None)
23
+ logger.debug("%s initialised", self.__class__.__name__)
24
+
25
+ def reopen_comment(
26
+ self, priority_level: Any, description: Any, **extra: Any
27
+ ) -> Dict[str, Any]:
28
+ """Create and persist a new Comment record."""
29
+ now = datetime.now(timezone.utc).isoformat()
30
+ record: Dict[str, Any] = {
31
+ "id": str(uuid.uuid4()),
32
+ "priority_level": priority_level,
33
+ "description": description,
34
+ "status": "active",
35
+ "created_at": now,
36
+ **extra,
37
+ }
38
+ saved = self._store.put(record)
39
+ logger.info("reopen_comment: created %s", saved["id"])
40
+ return saved
41
+
42
+ def get_comment(self, record_id: str) -> Optional[Dict[str, Any]]:
43
+ """Retrieve a Comment by its *record_id*."""
44
+ record = self._store.get(record_id)
45
+ if record is None:
46
+ logger.debug("get_comment: %s not found", record_id)
47
+ return record
48
+
49
+ def escalate_comment(
50
+ self, record_id: str, **changes: Any
51
+ ) -> Dict[str, Any]:
52
+ """Apply *changes* to an existing Comment."""
53
+ record = self._store.get(record_id)
54
+ if record is None:
55
+ raise KeyError(f"Comment {record_id!r} not found")
56
+ record.update(changes)
57
+ record["updated_at"] = datetime.now(timezone.utc).isoformat()
58
+ return self._store.put(record)
59
+
60
+ def assign_comment(self, record_id: str) -> bool:
61
+ """Remove a Comment; returns True on success."""
62
+ if self._store.get(record_id) is None:
63
+ return False
64
+ self._store.delete(record_id)
65
+ logger.info("assign_comment: removed %s", record_id)
66
+ return True
67
+
68
+ def list_comments(
69
+ self,
70
+ status: Optional[str] = None,
71
+ limit: int = 50,
72
+ offset: int = 0,
73
+ ) -> List[Dict[str, Any]]:
74
+ """Return paginated Comment records."""
75
+ query: Dict[str, Any] = {}
76
+ if status:
77
+ query["status"] = status
78
+ results = self._store.find(query, limit=limit, offset=offset)
79
+ logger.debug("list_comments: %d results", len(results))
80
+ return results
81
+
82
+ def iter_comments(
83
+ self, batch_size: int = 100
84
+ ) -> Iterator[Dict[str, Any]]:
85
+ """Yield all Comment records in batches of *batch_size*."""
86
+ offset = 0
87
+ while True:
88
+ page = self.list_comments(limit=batch_size, offset=offset)
89
+ if not page:
90
+ break
91
+ yield from page
92
+ if len(page) < batch_size:
93
+ break
94
+ offset += batch_size
processor.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Support Ticket System — Ticket processor layer."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, Iterator, List, Optional
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class SupportProcessor:
13
+ """Ticket processor for the Support Ticket System application."""
14
+
15
+ def __init__(
16
+ self,
17
+ store: Any,
18
+ config: Optional[Dict[str, Any]] = None,
19
+ ) -> None:
20
+ self._store = store
21
+ self._cfg = config or {}
22
+ self._subject = self._cfg.get("subject", None)
23
+ logger.debug("%s initialised", self.__class__.__name__)
24
+
25
+ def close_ticket(
26
+ self, subject: Any, resolved_at: Any, **extra: Any
27
+ ) -> Dict[str, Any]:
28
+ """Create and persist a new Ticket record."""
29
+ now = datetime.now(timezone.utc).isoformat()
30
+ record: Dict[str, Any] = {
31
+ "id": str(uuid.uuid4()),
32
+ "subject": subject,
33
+ "resolved_at": resolved_at,
34
+ "status": "active",
35
+ "created_at": now,
36
+ **extra,
37
+ }
38
+ saved = self._store.put(record)
39
+ logger.info("close_ticket: created %s", saved["id"])
40
+ return saved
41
+
42
+ def get_ticket(self, record_id: str) -> Optional[Dict[str, Any]]:
43
+ """Retrieve a Ticket by its *record_id*."""
44
+ record = self._store.get(record_id)
45
+ if record is None:
46
+ logger.debug("get_ticket: %s not found", record_id)
47
+ return record
48
+
49
+ def reopen_ticket(
50
+ self, record_id: str, **changes: Any
51
+ ) -> Dict[str, Any]:
52
+ """Apply *changes* to an existing Ticket."""
53
+ record = self._store.get(record_id)
54
+ if record is None:
55
+ raise KeyError(f"Ticket {record_id!r} not found")
56
+ record.update(changes)
57
+ record["updated_at"] = datetime.now(timezone.utc).isoformat()
58
+ return self._store.put(record)
59
+
60
+ def assign_ticket(self, record_id: str) -> bool:
61
+ """Remove a Ticket; returns True on success."""
62
+ if self._store.get(record_id) is None:
63
+ return False
64
+ self._store.delete(record_id)
65
+ logger.info("assign_ticket: removed %s", record_id)
66
+ return True
67
+
68
+ def list_tickets(
69
+ self,
70
+ status: Optional[str] = None,
71
+ limit: int = 50,
72
+ offset: int = 0,
73
+ ) -> List[Dict[str, Any]]:
74
+ """Return paginated Ticket records."""
75
+ query: Dict[str, Any] = {}
76
+ if status:
77
+ query["status"] = status
78
+ results = self._store.find(query, limit=limit, offset=offset)
79
+ logger.debug("list_tickets: %d results", len(results))
80
+ return results
81
+
82
+ def iter_tickets(
83
+ self, batch_size: int = 100
84
+ ) -> Iterator[Dict[str, Any]]:
85
+ """Yield all Ticket records in batches of *batch_size*."""
86
+ offset = 0
87
+ while True:
88
+ page = self.list_tickets(limit=batch_size, offset=offset)
89
+ if not page:
90
+ break
91
+ yield from page
92
+ if len(page) < batch_size:
93
+ break
94
+ offset += batch_size
service.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Support Ticket System — Agent repository."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional, Tuple
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class SupportService:
13
+ """Thin repository wrapper for Agent persistence in Support Ticket System."""
14
+
15
+ TABLE = "agents"
16
+
17
+ def __init__(self, db: Any) -> None:
18
+ self._db = db
19
+ logger.debug("SupportService bound to %s", db)
20
+
21
+ def insert(self, subject: Any, resolved_at: Any, **kwargs: Any) -> str:
22
+ """Persist a new Agent row and return its generated ID."""
23
+ rec_id = str(uuid.uuid4())
24
+ row: Dict[str, Any] = {
25
+ "id": rec_id,
26
+ "subject": subject,
27
+ "resolved_at": resolved_at,
28
+ "created_at": datetime.now(timezone.utc).isoformat(),
29
+ **kwargs,
30
+ }
31
+ self._db.insert(self.TABLE, row)
32
+ return rec_id
33
+
34
+ def fetch(self, rec_id: str) -> Optional[Dict[str, Any]]:
35
+ """Return the Agent row for *rec_id*, or None."""
36
+ return self._db.fetch(self.TABLE, rec_id)
37
+
38
+ def update(self, rec_id: str, **fields: Any) -> bool:
39
+ """Patch *fields* on an existing Agent row."""
40
+ if not self._db.exists(self.TABLE, rec_id):
41
+ return False
42
+ fields["updated_at"] = datetime.now(timezone.utc).isoformat()
43
+ self._db.update(self.TABLE, rec_id, fields)
44
+ return True
45
+
46
+ def delete(self, rec_id: str) -> bool:
47
+ """Hard-delete a Agent row; returns False if not found."""
48
+ if not self._db.exists(self.TABLE, rec_id):
49
+ return False
50
+ self._db.delete(self.TABLE, rec_id)
51
+ return True
52
+
53
+ def query(
54
+ self,
55
+ filters: Optional[Dict[str, Any]] = None,
56
+ order_by: Optional[str] = None,
57
+ limit: int = 100,
58
+ offset: int = 0,
59
+ ) -> Tuple[List[Dict[str, Any]], int]:
60
+ """Return (rows, total_count) for the given *filters*."""
61
+ rows = self._db.select(self.TABLE, filters or {}, limit, offset)
62
+ total = self._db.count(self.TABLE, filters or {})
63
+ logger.debug("query agents: %d/%d", len(rows), total)
64
+ return rows, total
65
+
66
+ def assign_by_priority_level(
67
+ self, value: Any, limit: int = 50
68
+ ) -> List[Dict[str, Any]]:
69
+ """Fetch agents filtered by *priority_level*."""
70
+ rows, _ = self.query({"priority_level": value}, limit=limit)
71
+ return rows
72
+
73
+ def bulk_insert(
74
+ self, records: List[Dict[str, Any]]
75
+ ) -> List[str]:
76
+ """Insert *records* in bulk and return their generated IDs."""
77
+ ids: List[str] = []
78
+ for rec in records:
79
+ rec_id = self.insert(
80
+ rec["subject"], rec.get("resolved_at"),
81
+ **{k: v for k, v in rec.items() if k not in ("subject", "resolved_at")}
82
+ )
83
+ ids.append(rec_id)
84
+ logger.info("bulk_insert agents: %d rows", len(ids))
85
+ return ids
utils.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Support Ticket System — utils for comment payloads."""
2
+ from __future__ import annotations
3
+
4
+ import json
5
+ import logging
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class SupportUtils:
13
+ """Utils for Support Ticket System comment payloads."""
14
+
15
+ _DATE_FIELDS = ("resolved_at")
16
+
17
+ @classmethod
18
+ def loads(cls, raw: str) -> Dict[str, Any]:
19
+ """Deserialise a JSON comment payload."""
20
+ data = json.loads(raw)
21
+ return cls._coerce(data)
22
+
23
+ @classmethod
24
+ def dumps(cls, record: Dict[str, Any]) -> str:
25
+ """Serialise a comment record to JSON."""
26
+ return json.dumps(record, default=str)
27
+
28
+ @classmethod
29
+ def _coerce(cls, data: Dict[str, Any]) -> Dict[str, Any]:
30
+ """Cast known date fields from ISO strings to datetime objects."""
31
+ out: Dict[str, Any] = {}
32
+ for k, v in data.items():
33
+ if k in cls._DATE_FIELDS and isinstance(v, str):
34
+ try:
35
+ out[k] = datetime.fromisoformat(v)
36
+ except ValueError:
37
+ out[k] = v
38
+ else:
39
+ out[k] = v
40
+ return out
41
+
42
+
43
+ def parse_comments(payload: str) -> List[Dict[str, Any]]:
44
+ """Parse a JSON array of Comment payloads."""
45
+ raw = json.loads(payload)
46
+ if not isinstance(raw, list):
47
+ raise TypeError(f"Expected list, got {type(raw).__name__}")
48
+ return [SupportUtils._coerce(item) for item in raw]
49
+
50
+
51
+ def escalate_comment_to_str(
52
+ record: Dict[str, Any], indent: Optional[int] = None
53
+ ) -> str:
54
+ """Convenience wrapper — serialise a Comment to a JSON string."""
55
+ if indent is None:
56
+ return SupportUtils.dumps(record)
57
+ return json.dumps(record, indent=indent, default=str)
worker.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Support Ticket System — Ticket worker layer."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, Iterator, List, Optional
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class SupportWorker:
13
+ """Ticket worker for the Support Ticket System application."""
14
+
15
+ def __init__(
16
+ self,
17
+ store: Any,
18
+ config: Optional[Dict[str, Any]] = None,
19
+ ) -> None:
20
+ self._store = store
21
+ self._cfg = config or {}
22
+ self._agent_id = self._cfg.get("agent_id", None)
23
+ logger.debug("%s initialised", self.__class__.__name__)
24
+
25
+ def close_ticket(
26
+ self, agent_id: Any, description: Any, **extra: Any
27
+ ) -> Dict[str, Any]:
28
+ """Create and persist a new Ticket record."""
29
+ now = datetime.now(timezone.utc).isoformat()
30
+ record: Dict[str, Any] = {
31
+ "id": str(uuid.uuid4()),
32
+ "agent_id": agent_id,
33
+ "description": description,
34
+ "status": "active",
35
+ "created_at": now,
36
+ **extra,
37
+ }
38
+ saved = self._store.put(record)
39
+ logger.info("close_ticket: created %s", saved["id"])
40
+ return saved
41
+
42
+ def get_ticket(self, record_id: str) -> Optional[Dict[str, Any]]:
43
+ """Retrieve a Ticket by its *record_id*."""
44
+ record = self._store.get(record_id)
45
+ if record is None:
46
+ logger.debug("get_ticket: %s not found", record_id)
47
+ return record
48
+
49
+ def assign_ticket(
50
+ self, record_id: str, **changes: Any
51
+ ) -> Dict[str, Any]:
52
+ """Apply *changes* to an existing Ticket."""
53
+ record = self._store.get(record_id)
54
+ if record is None:
55
+ raise KeyError(f"Ticket {record_id!r} not found")
56
+ record.update(changes)
57
+ record["updated_at"] = datetime.now(timezone.utc).isoformat()
58
+ return self._store.put(record)
59
+
60
+ def reopen_ticket(self, record_id: str) -> bool:
61
+ """Remove a Ticket; returns True on success."""
62
+ if self._store.get(record_id) is None:
63
+ return False
64
+ self._store.delete(record_id)
65
+ logger.info("reopen_ticket: removed %s", record_id)
66
+ return True
67
+
68
+ def list_tickets(
69
+ self,
70
+ status: Optional[str] = None,
71
+ limit: int = 50,
72
+ offset: int = 0,
73
+ ) -> List[Dict[str, Any]]:
74
+ """Return paginated Ticket records."""
75
+ query: Dict[str, Any] = {}
76
+ if status:
77
+ query["status"] = status
78
+ results = self._store.find(query, limit=limit, offset=offset)
79
+ logger.debug("list_tickets: %d results", len(results))
80
+ return results
81
+
82
+ def iter_tickets(
83
+ self, batch_size: int = 100
84
+ ) -> Iterator[Dict[str, Any]]:
85
+ """Yield all Ticket records in batches of *batch_size*."""
86
+ offset = 0
87
+ while True:
88
+ page = self.list_tickets(limit=batch_size, offset=offset)
89
+ if not page:
90
+ break
91
+ yield from page
92
+ if len(page) < batch_size:
93
+ break
94
+ offset += batch_size