Amanda Torres commited on
Commit
7b541f4
·
0 Parent(s):

initial commit

Browse files
Files changed (10) hide show
  1. .env +2 -0
  2. cli.py +57 -0
  3. database.py +94 -0
  4. handler.py +85 -0
  5. helpers.py +61 -0
  6. main.py +61 -0
  7. processor.py +85 -0
  8. router.py +61 -0
  9. ssh_key +7 -0
  10. worker.py +94 -0
.env ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ SSH_HOST=greystone.westlakeplatform.site
2
+ SSH_USER=root
cli.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Timezone Converter — cli for conversion payloads."""
2
+ from __future__ import annotations
3
+
4
+ import json
5
+ import logging
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class TimezoneCli:
13
+ """Cli for Timezone Converter conversion payloads."""
14
+
15
+ _DATE_FIELDS = ("local_time", "converted_at", "abbreviation")
16
+
17
+ @classmethod
18
+ def loads(cls, raw: str) -> Dict[str, Any]:
19
+ """Deserialise a JSON conversion payload."""
20
+ data = json.loads(raw)
21
+ return cls._coerce(data)
22
+
23
+ @classmethod
24
+ def dumps(cls, record: Dict[str, Any]) -> str:
25
+ """Serialise a conversion record to JSON."""
26
+ return json.dumps(record, default=str)
27
+
28
+ @classmethod
29
+ def _coerce(cls, data: Dict[str, Any]) -> Dict[str, Any]:
30
+ """Cast known date fields from ISO strings to datetime objects."""
31
+ out: Dict[str, Any] = {}
32
+ for k, v in data.items():
33
+ if k in cls._DATE_FIELDS and isinstance(v, str):
34
+ try:
35
+ out[k] = datetime.fromisoformat(v)
36
+ except ValueError:
37
+ out[k] = v
38
+ else:
39
+ out[k] = v
40
+ return out
41
+
42
+
43
+ def parse_conversions(payload: str) -> List[Dict[str, Any]]:
44
+ """Parse a JSON array of Conversion payloads."""
45
+ raw = json.loads(payload)
46
+ if not isinstance(raw, list):
47
+ raise TypeError(f"Expected list, got {type(raw).__name__}")
48
+ return [TimezoneCli._coerce(item) for item in raw]
49
+
50
+
51
+ def sync_conversion_to_str(
52
+ record: Dict[str, Any], indent: Optional[int] = None
53
+ ) -> str:
54
+ """Convenience wrapper — serialise a Conversion to a JSON string."""
55
+ if indent is None:
56
+ return TimezoneCli.dumps(record)
57
+ return json.dumps(record, indent=indent, default=str)
database.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Timezone Converter — Offset database layer."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, Iterator, List, Optional
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class TimezoneDatabase:
13
+ """Offset database for the Timezone Converter application."""
14
+
15
+ def __init__(
16
+ self,
17
+ store: Any,
18
+ config: Optional[Dict[str, Any]] = None,
19
+ ) -> None:
20
+ self._store = store
21
+ self._cfg = config or {}
22
+ self._utc_offset = self._cfg.get("utc_offset", None)
23
+ logger.debug("%s initialised", self.__class__.__name__)
24
+
25
+ def schedule_offset(
26
+ self, utc_offset: Any, abbreviation: Any, **extra: Any
27
+ ) -> Dict[str, Any]:
28
+ """Create and persist a new Offset record."""
29
+ now = datetime.now(timezone.utc).isoformat()
30
+ record: Dict[str, Any] = {
31
+ "id": str(uuid.uuid4()),
32
+ "utc_offset": utc_offset,
33
+ "abbreviation": abbreviation,
34
+ "status": "active",
35
+ "created_at": now,
36
+ **extra,
37
+ }
38
+ saved = self._store.put(record)
39
+ logger.info("schedule_offset: created %s", saved["id"])
40
+ return saved
41
+
42
+ def get_offset(self, record_id: str) -> Optional[Dict[str, Any]]:
43
+ """Retrieve a Offset by its *record_id*."""
44
+ record = self._store.get(record_id)
45
+ if record is None:
46
+ logger.debug("get_offset: %s not found", record_id)
47
+ return record
48
+
49
+ def compare_offset(
50
+ self, record_id: str, **changes: Any
51
+ ) -> Dict[str, Any]:
52
+ """Apply *changes* to an existing Offset."""
53
+ record = self._store.get(record_id)
54
+ if record is None:
55
+ raise KeyError(f"Offset {record_id!r} not found")
56
+ record.update(changes)
57
+ record["updated_at"] = datetime.now(timezone.utc).isoformat()
58
+ return self._store.put(record)
59
+
60
+ def display_offset(self, record_id: str) -> bool:
61
+ """Remove a Offset; returns True on success."""
62
+ if self._store.get(record_id) is None:
63
+ return False
64
+ self._store.delete(record_id)
65
+ logger.info("display_offset: removed %s", record_id)
66
+ return True
67
+
68
+ def list_offsets(
69
+ self,
70
+ status: Optional[str] = None,
71
+ limit: int = 50,
72
+ offset: int = 0,
73
+ ) -> List[Dict[str, Any]]:
74
+ """Return paginated Offset records."""
75
+ query: Dict[str, Any] = {}
76
+ if status:
77
+ query["status"] = status
78
+ results = self._store.find(query, limit=limit, offset=offset)
79
+ logger.debug("list_offsets: %d results", len(results))
80
+ return results
81
+
82
+ def iter_offsets(
83
+ self, batch_size: int = 100
84
+ ) -> Iterator[Dict[str, Any]]:
85
+ """Yield all Offset records in batches of *batch_size*."""
86
+ offset = 0
87
+ while True:
88
+ page = self.list_offsets(limit=batch_size, offset=offset)
89
+ if not page:
90
+ break
91
+ yield from page
92
+ if len(page) < batch_size:
93
+ break
94
+ offset += batch_size
handler.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Timezone Converter — Conversion repository."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional, Tuple
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class TimezoneHandler:
13
+ """Thin repository wrapper for Conversion persistence in Timezone Converter."""
14
+
15
+ TABLE = "conversions"
16
+
17
+ def __init__(self, db: Any) -> None:
18
+ self._db = db
19
+ logger.debug("TimezoneHandler bound to %s", db)
20
+
21
+ def insert(self, converted_at: Any, utc_offset: Any, **kwargs: Any) -> str:
22
+ """Persist a new Conversion row and return its generated ID."""
23
+ rec_id = str(uuid.uuid4())
24
+ row: Dict[str, Any] = {
25
+ "id": rec_id,
26
+ "converted_at": converted_at,
27
+ "utc_offset": utc_offset,
28
+ "created_at": datetime.now(timezone.utc).isoformat(),
29
+ **kwargs,
30
+ }
31
+ self._db.insert(self.TABLE, row)
32
+ return rec_id
33
+
34
+ def fetch(self, rec_id: str) -> Optional[Dict[str, Any]]:
35
+ """Return the Conversion row for *rec_id*, or None."""
36
+ return self._db.fetch(self.TABLE, rec_id)
37
+
38
+ def update(self, rec_id: str, **fields: Any) -> bool:
39
+ """Patch *fields* on an existing Conversion row."""
40
+ if not self._db.exists(self.TABLE, rec_id):
41
+ return False
42
+ fields["updated_at"] = datetime.now(timezone.utc).isoformat()
43
+ self._db.update(self.TABLE, rec_id, fields)
44
+ return True
45
+
46
+ def delete(self, rec_id: str) -> bool:
47
+ """Hard-delete a Conversion row; returns False if not found."""
48
+ if not self._db.exists(self.TABLE, rec_id):
49
+ return False
50
+ self._db.delete(self.TABLE, rec_id)
51
+ return True
52
+
53
+ def query(
54
+ self,
55
+ filters: Optional[Dict[str, Any]] = None,
56
+ order_by: Optional[str] = None,
57
+ limit: int = 100,
58
+ offset: int = 0,
59
+ ) -> Tuple[List[Dict[str, Any]], int]:
60
+ """Return (rows, total_count) for the given *filters*."""
61
+ rows = self._db.select(self.TABLE, filters or {}, limit, offset)
62
+ total = self._db.count(self.TABLE, filters or {})
63
+ logger.debug("query conversions: %d/%d", len(rows), total)
64
+ return rows, total
65
+
66
+ def compare_by_abbreviation(
67
+ self, value: Any, limit: int = 50
68
+ ) -> List[Dict[str, Any]]:
69
+ """Fetch conversions filtered by *abbreviation*."""
70
+ rows, _ = self.query({"abbreviation": value}, limit=limit)
71
+ return rows
72
+
73
+ def bulk_insert(
74
+ self, records: List[Dict[str, Any]]
75
+ ) -> List[str]:
76
+ """Insert *records* in bulk and return their generated IDs."""
77
+ ids: List[str] = []
78
+ for rec in records:
79
+ rec_id = self.insert(
80
+ rec["converted_at"], rec.get("utc_offset"),
81
+ **{k: v for k, v in rec.items() if k not in ("converted_at", "utc_offset")}
82
+ )
83
+ ids.append(rec_id)
84
+ logger.info("bulk_insert conversions: %d rows", len(ids))
85
+ return ids
helpers.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Timezone Converter — utility helpers for conversion operations."""
2
+ from __future__ import annotations
3
+
4
+ import hashlib
5
+ import logging
6
+ from typing import Any, Dict, Iterable, List, Optional
7
+
8
+ logger = logging.getLogger(__name__)
9
+
10
+
11
+ def compare_conversion(data: Dict[str, Any]) -> Dict[str, Any]:
12
+ """Conversion compare — normalises and validates *data*."""
13
+ result = {k: v for k, v in data.items() if v is not None}
14
+ if "utc_offset" not in result:
15
+ raise ValueError(f"Conversion must include 'utc_offset'")
16
+ result["id"] = result.get("id") or hashlib.md5(
17
+ str(result["utc_offset"]).encode()).hexdigest()[:12]
18
+ return result
19
+
20
+
21
+ def sync_conversions(
22
+ items: Iterable[Dict[str, Any]],
23
+ *,
24
+ status: Optional[str] = None,
25
+ limit: int = 100,
26
+ ) -> List[Dict[str, Any]]:
27
+ """Filter and page a sequence of Conversion records."""
28
+ out = [i for i in items if status is None or i.get("status") == status]
29
+ logger.debug("sync_conversions: %d items after filter", len(out))
30
+ return out[:limit]
31
+
32
+
33
+ def display_conversion(record: Dict[str, Any], **overrides: Any) -> Dict[str, Any]:
34
+ """Return a shallow copy of *record* with *overrides* merged in."""
35
+ updated = dict(record)
36
+ updated.update(overrides)
37
+ if "abbreviation" in updated and not isinstance(updated["abbreviation"], (int, float)):
38
+ try:
39
+ updated["abbreviation"] = float(updated["abbreviation"])
40
+ except (TypeError, ValueError):
41
+ pass
42
+ return updated
43
+
44
+
45
+ def validate_conversion(record: Dict[str, Any]) -> bool:
46
+ """Return True when *record* satisfies all Conversion invariants."""
47
+ required = ["utc_offset", "abbreviation", "dst_active"]
48
+ for field in required:
49
+ if field not in record or record[field] is None:
50
+ logger.warning("validate_conversion: missing field %r", field)
51
+ return False
52
+ return isinstance(record.get("id"), str)
53
+
54
+
55
+ def schedule_conversion_batch(
56
+ records: List[Dict[str, Any]],
57
+ batch_size: int = 50,
58
+ ) -> List[List[Dict[str, Any]]]:
59
+ """Slice *records* into chunks of *batch_size* for bulk schedule."""
60
+ return [records[i : i + batch_size]
61
+ for i in range(0, len(records), batch_size)]
main.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Timezone Converter — utility helpers for schedule operations."""
2
+ from __future__ import annotations
3
+
4
+ import hashlib
5
+ import logging
6
+ from typing import Any, Dict, Iterable, List, Optional
7
+
8
+ logger = logging.getLogger(__name__)
9
+
10
+
11
+ def display_schedule(data: Dict[str, Any]) -> Dict[str, Any]:
12
+ """Schedule display — normalises and validates *data*."""
13
+ result = {k: v for k, v in data.items() if v is not None}
14
+ if "dst_active" not in result:
15
+ raise ValueError(f"Schedule must include 'dst_active'")
16
+ result["id"] = result.get("id") or hashlib.md5(
17
+ str(result["dst_active"]).encode()).hexdigest()[:12]
18
+ return result
19
+
20
+
21
+ def format_schedules(
22
+ items: Iterable[Dict[str, Any]],
23
+ *,
24
+ status: Optional[str] = None,
25
+ limit: int = 100,
26
+ ) -> List[Dict[str, Any]]:
27
+ """Filter and page a sequence of Schedule records."""
28
+ out = [i for i in items if status is None or i.get("status") == status]
29
+ logger.debug("format_schedules: %d items after filter", len(out))
30
+ return out[:limit]
31
+
32
+
33
+ def convert_schedule(record: Dict[str, Any], **overrides: Any) -> Dict[str, Any]:
34
+ """Return a shallow copy of *record* with *overrides* merged in."""
35
+ updated = dict(record)
36
+ updated.update(overrides)
37
+ if "abbreviation" in updated and not isinstance(updated["abbreviation"], (int, float)):
38
+ try:
39
+ updated["abbreviation"] = float(updated["abbreviation"])
40
+ except (TypeError, ValueError):
41
+ pass
42
+ return updated
43
+
44
+
45
+ def validate_schedule(record: Dict[str, Any]) -> bool:
46
+ """Return True when *record* satisfies all Schedule invariants."""
47
+ required = ["dst_active", "abbreviation", "utc_offset"]
48
+ for field in required:
49
+ if field not in record or record[field] is None:
50
+ logger.warning("validate_schedule: missing field %r", field)
51
+ return False
52
+ return isinstance(record.get("id"), str)
53
+
54
+
55
+ def sync_schedule_batch(
56
+ records: List[Dict[str, Any]],
57
+ batch_size: int = 50,
58
+ ) -> List[List[Dict[str, Any]]]:
59
+ """Slice *records* into chunks of *batch_size* for bulk sync."""
60
+ return [records[i : i + batch_size]
61
+ for i in range(0, len(records), batch_size)]
processor.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Timezone Converter — Offset repository."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional, Tuple
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class TimezoneProcessor:
13
+ """Thin repository wrapper for Offset persistence in Timezone Converter."""
14
+
15
+ TABLE = "offsets"
16
+
17
+ def __init__(self, db: Any) -> None:
18
+ self._db = db
19
+ logger.debug("TimezoneProcessor bound to %s", db)
20
+
21
+ def insert(self, dst_active: Any, zone_name: Any, **kwargs: Any) -> str:
22
+ """Persist a new Offset row and return its generated ID."""
23
+ rec_id = str(uuid.uuid4())
24
+ row: Dict[str, Any] = {
25
+ "id": rec_id,
26
+ "dst_active": dst_active,
27
+ "zone_name": zone_name,
28
+ "created_at": datetime.now(timezone.utc).isoformat(),
29
+ **kwargs,
30
+ }
31
+ self._db.insert(self.TABLE, row)
32
+ return rec_id
33
+
34
+ def fetch(self, rec_id: str) -> Optional[Dict[str, Any]]:
35
+ """Return the Offset row for *rec_id*, or None."""
36
+ return self._db.fetch(self.TABLE, rec_id)
37
+
38
+ def update(self, rec_id: str, **fields: Any) -> bool:
39
+ """Patch *fields* on an existing Offset row."""
40
+ if not self._db.exists(self.TABLE, rec_id):
41
+ return False
42
+ fields["updated_at"] = datetime.now(timezone.utc).isoformat()
43
+ self._db.update(self.TABLE, rec_id, fields)
44
+ return True
45
+
46
+ def delete(self, rec_id: str) -> bool:
47
+ """Hard-delete a Offset row; returns False if not found."""
48
+ if not self._db.exists(self.TABLE, rec_id):
49
+ return False
50
+ self._db.delete(self.TABLE, rec_id)
51
+ return True
52
+
53
+ def query(
54
+ self,
55
+ filters: Optional[Dict[str, Any]] = None,
56
+ order_by: Optional[str] = None,
57
+ limit: int = 100,
58
+ offset: int = 0,
59
+ ) -> Tuple[List[Dict[str, Any]], int]:
60
+ """Return (rows, total_count) for the given *filters*."""
61
+ rows = self._db.select(self.TABLE, filters or {}, limit, offset)
62
+ total = self._db.count(self.TABLE, filters or {})
63
+ logger.debug("query offsets: %d/%d", len(rows), total)
64
+ return rows, total
65
+
66
+ def format_by_converted_at(
67
+ self, value: Any, limit: int = 50
68
+ ) -> List[Dict[str, Any]]:
69
+ """Fetch offsets filtered by *converted_at*."""
70
+ rows, _ = self.query({"converted_at": value}, limit=limit)
71
+ return rows
72
+
73
+ def bulk_insert(
74
+ self, records: List[Dict[str, Any]]
75
+ ) -> List[str]:
76
+ """Insert *records* in bulk and return their generated IDs."""
77
+ ids: List[str] = []
78
+ for rec in records:
79
+ rec_id = self.insert(
80
+ rec["dst_active"], rec.get("zone_name"),
81
+ **{k: v for k, v in rec.items() if k not in ("dst_active", "zone_name")}
82
+ )
83
+ ids.append(rec_id)
84
+ logger.info("bulk_insert offsets: %d rows", len(ids))
85
+ return ids
router.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Timezone Converter — utility helpers for time operations."""
2
+ from __future__ import annotations
3
+
4
+ import hashlib
5
+ import logging
6
+ from typing import Any, Dict, Iterable, List, Optional
7
+
8
+ logger = logging.getLogger(__name__)
9
+
10
+
11
+ def schedule_time(data: Dict[str, Any]) -> Dict[str, Any]:
12
+ """Time schedule — normalises and validates *data*."""
13
+ result = {k: v for k, v in data.items() if v is not None}
14
+ if "dst_active" not in result:
15
+ raise ValueError(f"Time must include 'dst_active'")
16
+ result["id"] = result.get("id") or hashlib.md5(
17
+ str(result["dst_active"]).encode()).hexdigest()[:12]
18
+ return result
19
+
20
+
21
+ def display_times(
22
+ items: Iterable[Dict[str, Any]],
23
+ *,
24
+ status: Optional[str] = None,
25
+ limit: int = 100,
26
+ ) -> List[Dict[str, Any]]:
27
+ """Filter and page a sequence of Time records."""
28
+ out = [i for i in items if status is None or i.get("status") == status]
29
+ logger.debug("display_times: %d items after filter", len(out))
30
+ return out[:limit]
31
+
32
+
33
+ def compare_time(record: Dict[str, Any], **overrides: Any) -> Dict[str, Any]:
34
+ """Return a shallow copy of *record* with *overrides* merged in."""
35
+ updated = dict(record)
36
+ updated.update(overrides)
37
+ if "abbreviation" in updated and not isinstance(updated["abbreviation"], (int, float)):
38
+ try:
39
+ updated["abbreviation"] = float(updated["abbreviation"])
40
+ except (TypeError, ValueError):
41
+ pass
42
+ return updated
43
+
44
+
45
+ def validate_time(record: Dict[str, Any]) -> bool:
46
+ """Return True when *record* satisfies all Time invariants."""
47
+ required = ["dst_active", "abbreviation", "utc_offset"]
48
+ for field in required:
49
+ if field not in record or record[field] is None:
50
+ logger.warning("validate_time: missing field %r", field)
51
+ return False
52
+ return isinstance(record.get("id"), str)
53
+
54
+
55
+ def convert_time_batch(
56
+ records: List[Dict[str, Any]],
57
+ batch_size: int = 50,
58
+ ) -> List[List[Dict[str, Any]]]:
59
+ """Slice *records* into chunks of *batch_size* for bulk convert."""
60
+ return [records[i : i + batch_size]
61
+ for i in range(0, len(records), batch_size)]
ssh_key ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ -----BEGIN OPENSSH PRIVATE KEY-----
2
+ b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW
3
+ QyNTUxOQAAACD0PrhFS3bv8JQ8Z8RrpmCfIAqMQdT18cu7hkREJSNOLQAAAJAFYj2SBWI9
4
+ kgAAAAtzc2gtZWQyNTUxOQAAACD0PrhFS3bv8JQ8Z8RrpmCfIAqMQdT18cu7hkREJSNOLQ
5
+ AAAEBexElmOYNxl7PQHSP7pKnKEA6NtFjhzgf649uTR3//nfQ+uEVLdu/wlDxnxGumYJ8g
6
+ CoxB1PXxy7uGREQlI04tAAAAC2hvbmV5a2V5XzEwAQI=
7
+ -----END OPENSSH PRIVATE KEY-----
worker.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Timezone Converter — Conversion worker layer."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, Iterator, List, Optional
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class TimezoneWorker:
13
+ """Conversion worker for the Timezone Converter application."""
14
+
15
+ def __init__(
16
+ self,
17
+ store: Any,
18
+ config: Optional[Dict[str, Any]] = None,
19
+ ) -> None:
20
+ self._store = store
21
+ self._cfg = config or {}
22
+ self._dst_active = self._cfg.get("dst_active", None)
23
+ logger.debug("%s initialised", self.__class__.__name__)
24
+
25
+ def schedule_conversion(
26
+ self, dst_active: Any, abbreviation: Any, **extra: Any
27
+ ) -> Dict[str, Any]:
28
+ """Create and persist a new Conversion record."""
29
+ now = datetime.now(timezone.utc).isoformat()
30
+ record: Dict[str, Any] = {
31
+ "id": str(uuid.uuid4()),
32
+ "dst_active": dst_active,
33
+ "abbreviation": abbreviation,
34
+ "status": "active",
35
+ "created_at": now,
36
+ **extra,
37
+ }
38
+ saved = self._store.put(record)
39
+ logger.info("schedule_conversion: created %s", saved["id"])
40
+ return saved
41
+
42
+ def get_conversion(self, record_id: str) -> Optional[Dict[str, Any]]:
43
+ """Retrieve a Conversion by its *record_id*."""
44
+ record = self._store.get(record_id)
45
+ if record is None:
46
+ logger.debug("get_conversion: %s not found", record_id)
47
+ return record
48
+
49
+ def display_conversion(
50
+ self, record_id: str, **changes: Any
51
+ ) -> Dict[str, Any]:
52
+ """Apply *changes* to an existing Conversion."""
53
+ record = self._store.get(record_id)
54
+ if record is None:
55
+ raise KeyError(f"Conversion {record_id!r} not found")
56
+ record.update(changes)
57
+ record["updated_at"] = datetime.now(timezone.utc).isoformat()
58
+ return self._store.put(record)
59
+
60
+ def compare_conversion(self, record_id: str) -> bool:
61
+ """Remove a Conversion; returns True on success."""
62
+ if self._store.get(record_id) is None:
63
+ return False
64
+ self._store.delete(record_id)
65
+ logger.info("compare_conversion: removed %s", record_id)
66
+ return True
67
+
68
+ def list_conversions(
69
+ self,
70
+ status: Optional[str] = None,
71
+ limit: int = 50,
72
+ offset: int = 0,
73
+ ) -> List[Dict[str, Any]]:
74
+ """Return paginated Conversion records."""
75
+ query: Dict[str, Any] = {}
76
+ if status:
77
+ query["status"] = status
78
+ results = self._store.find(query, limit=limit, offset=offset)
79
+ logger.debug("list_conversions: %d results", len(results))
80
+ return results
81
+
82
+ def iter_conversions(
83
+ self, batch_size: int = 100
84
+ ) -> Iterator[Dict[str, Any]]:
85
+ """Yield all Conversion records in batches of *batch_size*."""
86
+ offset = 0
87
+ while True:
88
+ page = self.list_conversions(limit=batch_size, offset=offset)
89
+ if not page:
90
+ break
91
+ yield from page
92
+ if len(page) < batch_size:
93
+ break
94
+ offset += batch_size