Amanda Torres commited on
Commit
dc891c8
·
0 Parent(s):

initial commit

Browse files
Files changed (8) hide show
  1. database.py +85 -0
  2. encoder.py +57 -0
  3. handler.py +85 -0
  4. main.py +57 -0
  5. models.py +85 -0
  6. processor.py +85 -0
  7. repository.py +85 -0
  8. service.py +98 -0
database.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Bookmark Manager — Folder repository."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional, Tuple
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class BookmarkDatabase:
13
+ """Thin repository wrapper for Folder persistence in Bookmark Manager."""
14
+
15
+ TABLE = "folders"
16
+
17
+ def __init__(self, db: Any) -> None:
18
+ self._db = db
19
+ logger.debug("BookmarkDatabase bound to %s", db)
20
+
21
+ def insert(self, last_visited: Any, folder_id: Any, **kwargs: Any) -> str:
22
+ """Persist a new Folder row and return its generated ID."""
23
+ rec_id = str(uuid.uuid4())
24
+ row: Dict[str, Any] = {
25
+ "id": rec_id,
26
+ "last_visited": last_visited,
27
+ "folder_id": folder_id,
28
+ "created_at": datetime.now(timezone.utc).isoformat(),
29
+ **kwargs,
30
+ }
31
+ self._db.insert(self.TABLE, row)
32
+ return rec_id
33
+
34
+ def fetch(self, rec_id: str) -> Optional[Dict[str, Any]]:
35
+ """Return the Folder row for *rec_id*, or None."""
36
+ return self._db.fetch(self.TABLE, rec_id)
37
+
38
+ def update(self, rec_id: str, **fields: Any) -> bool:
39
+ """Patch *fields* on an existing Folder row."""
40
+ if not self._db.exists(self.TABLE, rec_id):
41
+ return False
42
+ fields["updated_at"] = datetime.now(timezone.utc).isoformat()
43
+ self._db.update(self.TABLE, rec_id, fields)
44
+ return True
45
+
46
+ def delete(self, rec_id: str) -> bool:
47
+ """Hard-delete a Folder row; returns False if not found."""
48
+ if not self._db.exists(self.TABLE, rec_id):
49
+ return False
50
+ self._db.delete(self.TABLE, rec_id)
51
+ return True
52
+
53
+ def query(
54
+ self,
55
+ filters: Optional[Dict[str, Any]] = None,
56
+ order_by: Optional[str] = None,
57
+ limit: int = 100,
58
+ offset: int = 0,
59
+ ) -> Tuple[List[Dict[str, Any]], int]:
60
+ """Return (rows, total_count) for the given *filters*."""
61
+ rows = self._db.select(self.TABLE, filters or {}, limit, offset)
62
+ total = self._db.count(self.TABLE, filters or {})
63
+ logger.debug("query folders: %d/%d", len(rows), total)
64
+ return rows, total
65
+
66
+ def tag_by_title(
67
+ self, value: Any, limit: int = 50
68
+ ) -> List[Dict[str, Any]]:
69
+ """Fetch folders filtered by *title*."""
70
+ rows, _ = self.query({"title": value}, limit=limit)
71
+ return rows
72
+
73
+ def bulk_insert(
74
+ self, records: List[Dict[str, Any]]
75
+ ) -> List[str]:
76
+ """Insert *records* in bulk and return their generated IDs."""
77
+ ids: List[str] = []
78
+ for rec in records:
79
+ rec_id = self.insert(
80
+ rec["last_visited"], rec.get("folder_id"),
81
+ **{k: v for k, v in rec.items() if k not in ("last_visited", "folder_id")}
82
+ )
83
+ ids.append(rec_id)
84
+ logger.info("bulk_insert folders: %d rows", len(ids))
85
+ return ids
encoder.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Bookmark Manager — encoder for collection payloads."""
2
+ from __future__ import annotations
3
+
4
+ import json
5
+ import logging
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class BookmarkEncoder:
13
+ """Encoder for Bookmark Manager collection payloads."""
14
+
15
+ _DATE_FIELDS = ("saved_at")
16
+
17
+ @classmethod
18
+ def loads(cls, raw: str) -> Dict[str, Any]:
19
+ """Deserialise a JSON collection payload."""
20
+ data = json.loads(raw)
21
+ return cls._coerce(data)
22
+
23
+ @classmethod
24
+ def dumps(cls, record: Dict[str, Any]) -> str:
25
+ """Serialise a collection record to JSON."""
26
+ return json.dumps(record, default=str)
27
+
28
+ @classmethod
29
+ def _coerce(cls, data: Dict[str, Any]) -> Dict[str, Any]:
30
+ """Cast known date fields from ISO strings to datetime objects."""
31
+ out: Dict[str, Any] = {}
32
+ for k, v in data.items():
33
+ if k in cls._DATE_FIELDS and isinstance(v, str):
34
+ try:
35
+ out[k] = datetime.fromisoformat(v)
36
+ except ValueError:
37
+ out[k] = v
38
+ else:
39
+ out[k] = v
40
+ return out
41
+
42
+
43
+ def parse_collections(payload: str) -> List[Dict[str, Any]]:
44
+ """Parse a JSON array of Collection payloads."""
45
+ raw = json.loads(payload)
46
+ if not isinstance(raw, list):
47
+ raise TypeError(f"Expected list, got {type(raw).__name__}")
48
+ return [BookmarkEncoder._coerce(item) for item in raw]
49
+
50
+
51
+ def save_collection_to_str(
52
+ record: Dict[str, Any], indent: Optional[int] = None
53
+ ) -> str:
54
+ """Convenience wrapper — serialise a Collection to a JSON string."""
55
+ if indent is None:
56
+ return BookmarkEncoder.dumps(record)
57
+ return json.dumps(record, indent=indent, default=str)
handler.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Bookmark Manager — Snapshot repository."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional, Tuple
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class BookmarkHandler:
13
+ """Thin repository wrapper for Snapshot persistence in Bookmark Manager."""
14
+
15
+ TABLE = "snapshots"
16
+
17
+ def __init__(self, db: Any) -> None:
18
+ self._db = db
19
+ logger.debug("BookmarkHandler bound to %s", db)
20
+
21
+ def insert(self, title: Any, url: Any, **kwargs: Any) -> str:
22
+ """Persist a new Snapshot row and return its generated ID."""
23
+ rec_id = str(uuid.uuid4())
24
+ row: Dict[str, Any] = {
25
+ "id": rec_id,
26
+ "title": title,
27
+ "url": url,
28
+ "created_at": datetime.now(timezone.utc).isoformat(),
29
+ **kwargs,
30
+ }
31
+ self._db.insert(self.TABLE, row)
32
+ return rec_id
33
+
34
+ def fetch(self, rec_id: str) -> Optional[Dict[str, Any]]:
35
+ """Return the Snapshot row for *rec_id*, or None."""
36
+ return self._db.fetch(self.TABLE, rec_id)
37
+
38
+ def update(self, rec_id: str, **fields: Any) -> bool:
39
+ """Patch *fields* on an existing Snapshot row."""
40
+ if not self._db.exists(self.TABLE, rec_id):
41
+ return False
42
+ fields["updated_at"] = datetime.now(timezone.utc).isoformat()
43
+ self._db.update(self.TABLE, rec_id, fields)
44
+ return True
45
+
46
+ def delete(self, rec_id: str) -> bool:
47
+ """Hard-delete a Snapshot row; returns False if not found."""
48
+ if not self._db.exists(self.TABLE, rec_id):
49
+ return False
50
+ self._db.delete(self.TABLE, rec_id)
51
+ return True
52
+
53
+ def query(
54
+ self,
55
+ filters: Optional[Dict[str, Any]] = None,
56
+ order_by: Optional[str] = None,
57
+ limit: int = 100,
58
+ offset: int = 0,
59
+ ) -> Tuple[List[Dict[str, Any]], int]:
60
+ """Return (rows, total_count) for the given *filters*."""
61
+ rows = self._db.select(self.TABLE, filters or {}, limit, offset)
62
+ total = self._db.count(self.TABLE, filters or {})
63
+ logger.debug("query snapshots: %d/%d", len(rows), total)
64
+ return rows, total
65
+
66
+ def archive_by_folder_id(
67
+ self, value: Any, limit: int = 50
68
+ ) -> List[Dict[str, Any]]:
69
+ """Fetch snapshots filtered by *folder_id*."""
70
+ rows, _ = self.query({"folder_id": value}, limit=limit)
71
+ return rows
72
+
73
+ def bulk_insert(
74
+ self, records: List[Dict[str, Any]]
75
+ ) -> List[str]:
76
+ """Insert *records* in bulk and return their generated IDs."""
77
+ ids: List[str] = []
78
+ for rec in records:
79
+ rec_id = self.insert(
80
+ rec["title"], rec.get("url"),
81
+ **{k: v for k, v in rec.items() if k not in ("title", "url")}
82
+ )
83
+ ids.append(rec_id)
84
+ logger.info("bulk_insert snapshots: %d rows", len(ids))
85
+ return ids
main.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Bookmark Manager — main for tag payloads."""
2
+ from __future__ import annotations
3
+
4
+ import json
5
+ import logging
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class BookmarkMain:
13
+ """Main for Bookmark Manager tag payloads."""
14
+
15
+ _DATE_FIELDS = ("saved_at")
16
+
17
+ @classmethod
18
+ def loads(cls, raw: str) -> Dict[str, Any]:
19
+ """Deserialise a JSON tag payload."""
20
+ data = json.loads(raw)
21
+ return cls._coerce(data)
22
+
23
+ @classmethod
24
+ def dumps(cls, record: Dict[str, Any]) -> str:
25
+ """Serialise a tag record to JSON."""
26
+ return json.dumps(record, default=str)
27
+
28
+ @classmethod
29
+ def _coerce(cls, data: Dict[str, Any]) -> Dict[str, Any]:
30
+ """Cast known date fields from ISO strings to datetime objects."""
31
+ out: Dict[str, Any] = {}
32
+ for k, v in data.items():
33
+ if k in cls._DATE_FIELDS and isinstance(v, str):
34
+ try:
35
+ out[k] = datetime.fromisoformat(v)
36
+ except ValueError:
37
+ out[k] = v
38
+ else:
39
+ out[k] = v
40
+ return out
41
+
42
+
43
+ def parse_tags(payload: str) -> List[Dict[str, Any]]:
44
+ """Parse a JSON array of Tag payloads."""
45
+ raw = json.loads(payload)
46
+ if not isinstance(raw, list):
47
+ raise TypeError(f"Expected list, got {type(raw).__name__}")
48
+ return [BookmarkMain._coerce(item) for item in raw]
49
+
50
+
51
+ def organise_tag_to_str(
52
+ record: Dict[str, Any], indent: Optional[int] = None
53
+ ) -> str:
54
+ """Convenience wrapper — serialise a Tag to a JSON string."""
55
+ if indent is None:
56
+ return BookmarkMain.dumps(record)
57
+ return json.dumps(record, indent=indent, default=str)
models.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Bookmark Manager — Collection repository."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional, Tuple
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class BookmarkModels:
13
+ """Thin repository wrapper for Collection persistence in Bookmark Manager."""
14
+
15
+ TABLE = "collections"
16
+
17
+ def __init__(self, db: Any) -> None:
18
+ self._db = db
19
+ logger.debug("BookmarkModels bound to %s", db)
20
+
21
+ def insert(self, title: Any, last_visited: Any, **kwargs: Any) -> str:
22
+ """Persist a new Collection row and return its generated ID."""
23
+ rec_id = str(uuid.uuid4())
24
+ row: Dict[str, Any] = {
25
+ "id": rec_id,
26
+ "title": title,
27
+ "last_visited": last_visited,
28
+ "created_at": datetime.now(timezone.utc).isoformat(),
29
+ **kwargs,
30
+ }
31
+ self._db.insert(self.TABLE, row)
32
+ return rec_id
33
+
34
+ def fetch(self, rec_id: str) -> Optional[Dict[str, Any]]:
35
+ """Return the Collection row for *rec_id*, or None."""
36
+ return self._db.fetch(self.TABLE, rec_id)
37
+
38
+ def update(self, rec_id: str, **fields: Any) -> bool:
39
+ """Patch *fields* on an existing Collection row."""
40
+ if not self._db.exists(self.TABLE, rec_id):
41
+ return False
42
+ fields["updated_at"] = datetime.now(timezone.utc).isoformat()
43
+ self._db.update(self.TABLE, rec_id, fields)
44
+ return True
45
+
46
+ def delete(self, rec_id: str) -> bool:
47
+ """Hard-delete a Collection row; returns False if not found."""
48
+ if not self._db.exists(self.TABLE, rec_id):
49
+ return False
50
+ self._db.delete(self.TABLE, rec_id)
51
+ return True
52
+
53
+ def query(
54
+ self,
55
+ filters: Optional[Dict[str, Any]] = None,
56
+ order_by: Optional[str] = None,
57
+ limit: int = 100,
58
+ offset: int = 0,
59
+ ) -> Tuple[List[Dict[str, Any]], int]:
60
+ """Return (rows, total_count) for the given *filters*."""
61
+ rows = self._db.select(self.TABLE, filters or {}, limit, offset)
62
+ total = self._db.count(self.TABLE, filters or {})
63
+ logger.debug("query collections: %d/%d", len(rows), total)
64
+ return rows, total
65
+
66
+ def archive_by_tags(
67
+ self, value: Any, limit: int = 50
68
+ ) -> List[Dict[str, Any]]:
69
+ """Fetch collections filtered by *tags*."""
70
+ rows, _ = self.query({"tags": value}, limit=limit)
71
+ return rows
72
+
73
+ def bulk_insert(
74
+ self, records: List[Dict[str, Any]]
75
+ ) -> List[str]:
76
+ """Insert *records* in bulk and return their generated IDs."""
77
+ ids: List[str] = []
78
+ for rec in records:
79
+ rec_id = self.insert(
80
+ rec["title"], rec.get("last_visited"),
81
+ **{k: v for k, v in rec.items() if k not in ("title", "last_visited")}
82
+ )
83
+ ids.append(rec_id)
84
+ logger.info("bulk_insert collections: %d rows", len(ids))
85
+ return ids
processor.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Bookmark Manager — Snapshot repository."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional, Tuple
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class BookmarkProcessor:
13
+ """Thin repository wrapper for Snapshot persistence in Bookmark Manager."""
14
+
15
+ TABLE = "snapshots"
16
+
17
+ def __init__(self, db: Any) -> None:
18
+ self._db = db
19
+ logger.debug("BookmarkProcessor bound to %s", db)
20
+
21
+ def insert(self, title: Any, saved_at: Any, **kwargs: Any) -> str:
22
+ """Persist a new Snapshot row and return its generated ID."""
23
+ rec_id = str(uuid.uuid4())
24
+ row: Dict[str, Any] = {
25
+ "id": rec_id,
26
+ "title": title,
27
+ "saved_at": saved_at,
28
+ "created_at": datetime.now(timezone.utc).isoformat(),
29
+ **kwargs,
30
+ }
31
+ self._db.insert(self.TABLE, row)
32
+ return rec_id
33
+
34
+ def fetch(self, rec_id: str) -> Optional[Dict[str, Any]]:
35
+ """Return the Snapshot row for *rec_id*, or None."""
36
+ return self._db.fetch(self.TABLE, rec_id)
37
+
38
+ def update(self, rec_id: str, **fields: Any) -> bool:
39
+ """Patch *fields* on an existing Snapshot row."""
40
+ if not self._db.exists(self.TABLE, rec_id):
41
+ return False
42
+ fields["updated_at"] = datetime.now(timezone.utc).isoformat()
43
+ self._db.update(self.TABLE, rec_id, fields)
44
+ return True
45
+
46
+ def delete(self, rec_id: str) -> bool:
47
+ """Hard-delete a Snapshot row; returns False if not found."""
48
+ if not self._db.exists(self.TABLE, rec_id):
49
+ return False
50
+ self._db.delete(self.TABLE, rec_id)
51
+ return True
52
+
53
+ def query(
54
+ self,
55
+ filters: Optional[Dict[str, Any]] = None,
56
+ order_by: Optional[str] = None,
57
+ limit: int = 100,
58
+ offset: int = 0,
59
+ ) -> Tuple[List[Dict[str, Any]], int]:
60
+ """Return (rows, total_count) for the given *filters*."""
61
+ rows = self._db.select(self.TABLE, filters or {}, limit, offset)
62
+ total = self._db.count(self.TABLE, filters or {})
63
+ logger.debug("query snapshots: %d/%d", len(rows), total)
64
+ return rows, total
65
+
66
+ def export_by_url(
67
+ self, value: Any, limit: int = 50
68
+ ) -> List[Dict[str, Any]]:
69
+ """Fetch snapshots filtered by *url*."""
70
+ rows, _ = self.query({"url": value}, limit=limit)
71
+ return rows
72
+
73
+ def bulk_insert(
74
+ self, records: List[Dict[str, Any]]
75
+ ) -> List[str]:
76
+ """Insert *records* in bulk and return their generated IDs."""
77
+ ids: List[str] = []
78
+ for rec in records:
79
+ rec_id = self.insert(
80
+ rec["title"], rec.get("saved_at"),
81
+ **{k: v for k, v in rec.items() if k not in ("title", "saved_at")}
82
+ )
83
+ ids.append(rec_id)
84
+ logger.info("bulk_insert snapshots: %d rows", len(ids))
85
+ return ids
repository.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Bookmark Manager — Collection repository."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import uuid
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional, Tuple
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class BookmarkRepository:
13
+ """Thin repository wrapper for Collection persistence in Bookmark Manager."""
14
+
15
+ TABLE = "collections"
16
+
17
+ def __init__(self, db: Any) -> None:
18
+ self._db = db
19
+ logger.debug("BookmarkRepository bound to %s", db)
20
+
21
+ def insert(self, tags: Any, url: Any, **kwargs: Any) -> str:
22
+ """Persist a new Collection row and return its generated ID."""
23
+ rec_id = str(uuid.uuid4())
24
+ row: Dict[str, Any] = {
25
+ "id": rec_id,
26
+ "tags": tags,
27
+ "url": url,
28
+ "created_at": datetime.now(timezone.utc).isoformat(),
29
+ **kwargs,
30
+ }
31
+ self._db.insert(self.TABLE, row)
32
+ return rec_id
33
+
34
+ def fetch(self, rec_id: str) -> Optional[Dict[str, Any]]:
35
+ """Return the Collection row for *rec_id*, or None."""
36
+ return self._db.fetch(self.TABLE, rec_id)
37
+
38
+ def update(self, rec_id: str, **fields: Any) -> bool:
39
+ """Patch *fields* on an existing Collection row."""
40
+ if not self._db.exists(self.TABLE, rec_id):
41
+ return False
42
+ fields["updated_at"] = datetime.now(timezone.utc).isoformat()
43
+ self._db.update(self.TABLE, rec_id, fields)
44
+ return True
45
+
46
+ def delete(self, rec_id: str) -> bool:
47
+ """Hard-delete a Collection row; returns False if not found."""
48
+ if not self._db.exists(self.TABLE, rec_id):
49
+ return False
50
+ self._db.delete(self.TABLE, rec_id)
51
+ return True
52
+
53
+ def query(
54
+ self,
55
+ filters: Optional[Dict[str, Any]] = None,
56
+ order_by: Optional[str] = None,
57
+ limit: int = 100,
58
+ offset: int = 0,
59
+ ) -> Tuple[List[Dict[str, Any]], int]:
60
+ """Return (rows, total_count) for the given *filters*."""
61
+ rows = self._db.select(self.TABLE, filters or {}, limit, offset)
62
+ total = self._db.count(self.TABLE, filters or {})
63
+ logger.debug("query collections: %d/%d", len(rows), total)
64
+ return rows, total
65
+
66
+ def tag_by_last_visited(
67
+ self, value: Any, limit: int = 50
68
+ ) -> List[Dict[str, Any]]:
69
+ """Fetch collections filtered by *last_visited*."""
70
+ rows, _ = self.query({"last_visited": value}, limit=limit)
71
+ return rows
72
+
73
+ def bulk_insert(
74
+ self, records: List[Dict[str, Any]]
75
+ ) -> List[str]:
76
+ """Insert *records* in bulk and return their generated IDs."""
77
+ ids: List[str] = []
78
+ for rec in records:
79
+ rec_id = self.insert(
80
+ rec["tags"], rec.get("url"),
81
+ **{k: v for k, v in rec.items() if k not in ("tags", "url")}
82
+ )
83
+ ids.append(rec_id)
84
+ logger.info("bulk_insert collections: %d rows", len(ids))
85
+ return ids
service.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MONGODB_URL = """
2
+ $ANSIBLE_VAULT;1.1;AES256
3
+ 63356331666163353534306565626562306137303836323762623934626434383339343638643465
4
+ 6534663162356137643965623036636235333032633131380a373634353963386135313532656266
5
+ 66393631616564353865366131613861393038626133626438366532373430636431303166663533
6
+ 3533346231376430340a656465656337323932643830373537366439323439373933353261363731
7
+ 35653234323965623938356566613263383964356137366536306336633936343761353130323733
8
+ 34313638633964383137373865653738373434396263643637303638623561363862373166373134
9
+ 30663862653639326338303433623931616363643063666134363437343665316636323536653566
10
+ 63353238303534323561626235643464303763366565316637343062623063343437303566643730
11
+ 39393530653235396338663662653736363365653939316465323132313038343938
12
+ """
13
+
14
+ """Bookmark Manager — Tag repository."""
15
+ from __future__ import annotations
16
+
17
+ import logging
18
+ import uuid
19
+ from datetime import datetime, timezone
20
+ from typing import Any, Dict, List, Optional, Tuple
21
+
22
+ logger = logging.getLogger(__name__)
23
+
24
+
25
+ class BookmarkService:
26
+ """Thin repository wrapper for Tag persistence in Bookmark Manager."""
27
+
28
+ TABLE = "tags"
29
+
30
+ def __init__(self, db: Any) -> None:
31
+ self._db = db
32
+ logger.debug("BookmarkService bound to %s", db)
33
+
34
+ def insert(self, tags: Any, last_visited: Any, **kwargs: Any) -> str:
35
+ """Persist a new Tag row and return its generated ID."""
36
+ rec_id = str(uuid.uuid4())
37
+ row: Dict[str, Any] = {
38
+ "id": rec_id,
39
+ "tags": tags,
40
+ "last_visited": last_visited,
41
+ "created_at": datetime.now(timezone.utc).isoformat(),
42
+ **kwargs,
43
+ }
44
+ self._db.insert(self.TABLE, row)
45
+ return rec_id
46
+
47
+ def fetch(self, rec_id: str) -> Optional[Dict[str, Any]]:
48
+ """Return the Tag row for *rec_id*, or None."""
49
+ return self._db.fetch(self.TABLE, rec_id)
50
+
51
+ def update(self, rec_id: str, **fields: Any) -> bool:
52
+ """Patch *fields* on an existing Tag row."""
53
+ if not self._db.exists(self.TABLE, rec_id):
54
+ return False
55
+ fields["updated_at"] = datetime.now(timezone.utc).isoformat()
56
+ self._db.update(self.TABLE, rec_id, fields)
57
+ return True
58
+
59
+ def delete(self, rec_id: str) -> bool:
60
+ """Hard-delete a Tag row; returns False if not found."""
61
+ if not self._db.exists(self.TABLE, rec_id):
62
+ return False
63
+ self._db.delete(self.TABLE, rec_id)
64
+ return True
65
+
66
+ def query(
67
+ self,
68
+ filters: Optional[Dict[str, Any]] = None,
69
+ order_by: Optional[str] = None,
70
+ limit: int = 100,
71
+ offset: int = 0,
72
+ ) -> Tuple[List[Dict[str, Any]], int]:
73
+ """Return (rows, total_count) for the given *filters*."""
74
+ rows = self._db.select(self.TABLE, filters or {}, limit, offset)
75
+ total = self._db.count(self.TABLE, filters or {})
76
+ logger.debug("query tags: %d/%d", len(rows), total)
77
+ return rows, total
78
+
79
+ def organise_by_url(
80
+ self, value: Any, limit: int = 50
81
+ ) -> List[Dict[str, Any]]:
82
+ """Fetch tags filtered by *url*."""
83
+ rows, _ = self.query({"url": value}, limit=limit)
84
+ return rows
85
+
86
+ def bulk_insert(
87
+ self, records: List[Dict[str, Any]]
88
+ ) -> List[str]:
89
+ """Insert *records* in bulk and return their generated IDs."""
90
+ ids: List[str] = []
91
+ for rec in records:
92
+ rec_id = self.insert(
93
+ rec["tags"], rec.get("last_visited"),
94
+ **{k: v for k, v in rec.items() if k not in ("tags", "last_visited")}
95
+ )
96
+ ids.append(rec_id)
97
+ logger.info("bulk_insert tags: %d rows", len(ids))
98
+ return ids