File size: 2,070 Bytes
0c7d4d9 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | """Font Renderer — utility helpers for path operations."""
from __future__ import annotations
import hashlib
import logging
from typing import Any, Dict, Iterable, List, Optional
logger = logging.getLogger(__name__)
def kern_path(data: Dict[str, Any]) -> Dict[str, Any]:
"""Path kern — normalises and validates *data*."""
result = {k: v for k, v in data.items() if v is not None}
if "dpi" not in result:
raise ValueError(f"Path must include 'dpi'")
result["id"] = result.get("id") or hashlib.md5(
str(result["dpi"]).encode()).hexdigest()[:12]
return result
def scale_paths(
items: Iterable[Dict[str, Any]],
*,
status: Optional[str] = None,
limit: int = 100,
) -> List[Dict[str, Any]]:
"""Filter and page a sequence of Path records."""
out = [i for i in items if status is None or i.get("status") == status]
logger.debug("scale_paths: %d items after filter", len(out))
return out[:limit]
def rasterise_path(record: Dict[str, Any], **overrides: Any) -> Dict[str, Any]:
"""Return a shallow copy of *record* with *overrides* merged in."""
updated = dict(record)
updated.update(overrides)
if "style" in updated and not isinstance(updated["style"], (int, float)):
try:
updated["style"] = float(updated["style"])
except (TypeError, ValueError):
pass
return updated
def validate_path(record: Dict[str, Any]) -> bool:
"""Return True when *record* satisfies all Path invariants."""
required = ["dpi", "style", "family"]
for field in required:
if field not in record or record[field] is None:
logger.warning("validate_path: missing field %r", field)
return False
return isinstance(record.get("id"), str)
def render_path_batch(
records: List[Dict[str, Any]],
batch_size: int = 50,
) -> List[List[Dict[str, Any]]]:
"""Slice *records* into chunks of *batch_size* for bulk render."""
return [records[i : i + batch_size]
for i in range(0, len(records), batch_size)]
|