Amanda Torres commited on
Commit ·
1e23d14
0
Parent(s):
initial commit
Browse files- client/cloud_run_client.py +80 -0
- client/cloud_sql_client.py +92 -0
- client/compute_client.py +99 -0
- client/logger_setup.py +51 -0
- config/pubsub_client.py +104 -0
- config/schema.py +67 -0
- exceptions.py +40 -0
- gcp_secret.json +13 -0
client/cloud_run_client.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from typing import Any, Optional
|
| 3 |
+
|
| 4 |
+
logger = logging.getLogger(__name__)
|
| 5 |
+
|
| 6 |
+
try:
|
| 7 |
+
from googleapiclient import discovery
|
| 8 |
+
from google.oauth2 import service_account
|
| 9 |
+
except ImportError:
|
| 10 |
+
discovery = None # type: ignore
|
| 11 |
+
service_account = None # type: ignore
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class CloudRunClient:
|
| 15 |
+
SCOPES = ["https://www.googleapis.com/auth/cloud-platform"]
|
| 16 |
+
|
| 17 |
+
def __init__(self, project_id: str, region: str = "us-central1",
|
| 18 |
+
credentials_file: Optional[str] = None) -> None:
|
| 19 |
+
self.project_id = project_id
|
| 20 |
+
self.region = region
|
| 21 |
+
if discovery is None:
|
| 22 |
+
raise ImportError("google-api-python-client is not installed")
|
| 23 |
+
if credentials_file and service_account:
|
| 24 |
+
creds = service_account.Credentials.from_service_account_file(
|
| 25 |
+
credentials_file, scopes=self.SCOPES
|
| 26 |
+
)
|
| 27 |
+
else:
|
| 28 |
+
import google.auth
|
| 29 |
+
creds, _ = google.auth.default(scopes=self.SCOPES)
|
| 30 |
+
self._svc = discovery.build("run", "v1", credentials=creds)
|
| 31 |
+
self._parent = f"namespaces/{project_id}"
|
| 32 |
+
logger.debug("Cloud Run client for %s/%s", project_id, region)
|
| 33 |
+
|
| 34 |
+
def list_services(self) -> list[dict[str, Any]]:
|
| 35 |
+
response = (
|
| 36 |
+
self._svc.namespaces()
|
| 37 |
+
.services()
|
| 38 |
+
.list(parent=self._parent)
|
| 39 |
+
.execute()
|
| 40 |
+
)
|
| 41 |
+
return [
|
| 42 |
+
{
|
| 43 |
+
"name": svc["metadata"]["name"],
|
| 44 |
+
"url": svc.get("status", {}).get("url", ""),
|
| 45 |
+
"ready": any(
|
| 46 |
+
c.get("type") == "Ready" and c.get("status") == "True"
|
| 47 |
+
for c in svc.get("status", {}).get("conditions", [])
|
| 48 |
+
),
|
| 49 |
+
}
|
| 50 |
+
for svc in response.get("items", [])
|
| 51 |
+
]
|
| 52 |
+
|
| 53 |
+
def get_service(self, service_name: str) -> dict[str, Any]:
|
| 54 |
+
name = f"{self._parent}/services/{service_name}"
|
| 55 |
+
svc = self._svc.namespaces().services().get(name=name).execute()
|
| 56 |
+
return {
|
| 57 |
+
"name": svc["metadata"]["name"],
|
| 58 |
+
"url": svc.get("status", {}).get("url"),
|
| 59 |
+
"image": (svc.get("spec", {})
|
| 60 |
+
.get("template", {})
|
| 61 |
+
.get("spec", {})
|
| 62 |
+
.get("containers", [{}])[0]
|
| 63 |
+
.get("image")),
|
| 64 |
+
"annotations": svc.get("metadata", {}).get("annotations", {}),
|
| 65 |
+
}
|
| 66 |
+
|
| 67 |
+
def update_traffic(self, service_name: str,
|
| 68 |
+
traffic: list[dict[str, Any]]) -> None:
|
| 69 |
+
name = f"{self._parent}/services/{service_name}"
|
| 70 |
+
svc = self._svc.namespaces().services().get(name=name).execute()
|
| 71 |
+
svc["spec"]["traffic"] = traffic
|
| 72 |
+
self._svc.namespaces().services().replaceService(
|
| 73 |
+
name=name, body=svc
|
| 74 |
+
).execute()
|
| 75 |
+
logger.info("Updated traffic for service %s", service_name)
|
| 76 |
+
|
| 77 |
+
def delete_service(self, service_name: str) -> None:
|
| 78 |
+
name = f"{self._parent}/services/{service_name}"
|
| 79 |
+
self._svc.namespaces().services().delete(name=name).execute()
|
| 80 |
+
logger.info("Deleted Cloud Run service %s", service_name)
|
client/cloud_sql_client.py
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from typing import Any, Iterator, Optional
|
| 3 |
+
import contextlib
|
| 4 |
+
|
| 5 |
+
logger = logging.getLogger(__name__)
|
| 6 |
+
|
| 7 |
+
try:
|
| 8 |
+
import sqlalchemy
|
| 9 |
+
from sqlalchemy import text
|
| 10 |
+
except ImportError:
|
| 11 |
+
sqlalchemy = None # type: ignore
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class CloudSQLClient:
|
| 15 |
+
def __init__(self, connection_name: str, db_name: str,
|
| 16 |
+
user: str, password: str,
|
| 17 |
+
use_public_ip: bool = False) -> None:
|
| 18 |
+
self.connection_name = connection_name
|
| 19 |
+
self.db_name = db_name
|
| 20 |
+
self.user = user
|
| 21 |
+
self._password = password
|
| 22 |
+
self.use_public_ip = use_public_ip
|
| 23 |
+
self._engine = None
|
| 24 |
+
logger.debug("CloudSQL client for %s/%s", connection_name, db_name)
|
| 25 |
+
|
| 26 |
+
def _get_engine(self):
|
| 27 |
+
if self._engine is not None:
|
| 28 |
+
return self._engine
|
| 29 |
+
if sqlalchemy is None:
|
| 30 |
+
raise ImportError("sqlalchemy is not installed")
|
| 31 |
+
try:
|
| 32 |
+
from google.cloud.sql.connector import Connector
|
| 33 |
+
connector = Connector()
|
| 34 |
+
|
| 35 |
+
def getconn():
|
| 36 |
+
return connector.connect(
|
| 37 |
+
self.connection_name, "pg8000",
|
| 38 |
+
user=self.user, password=self._password,
|
| 39 |
+
db=self.db_name, ip_type="PUBLIC" if self.use_public_ip else "PRIVATE",
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
self._engine = sqlalchemy.create_engine(
|
| 43 |
+
"postgresql+pg8000://", creator=getconn,
|
| 44 |
+
pool_size=5, max_overflow=2, pool_timeout=30,
|
| 45 |
+
)
|
| 46 |
+
except ImportError:
|
| 47 |
+
url = (f"postgresql+pg8000://{self.user}:{self._password}"
|
| 48 |
+
f"@localhost/{self.db_name}")
|
| 49 |
+
self._engine = sqlalchemy.create_engine(url)
|
| 50 |
+
return self._engine
|
| 51 |
+
|
| 52 |
+
@contextlib.contextmanager
|
| 53 |
+
def connection(self):
|
| 54 |
+
engine = self._get_engine()
|
| 55 |
+
with engine.connect() as conn:
|
| 56 |
+
yield conn
|
| 57 |
+
|
| 58 |
+
def execute(self, sql: str, params: Optional[dict] = None) -> int:
|
| 59 |
+
with self.connection() as conn:
|
| 60 |
+
result = conn.execute(text(sql), params or {})
|
| 61 |
+
conn.commit()
|
| 62 |
+
return result.rowcount
|
| 63 |
+
|
| 64 |
+
def fetch_all(self, sql: str,
|
| 65 |
+
params: Optional[dict] = None) -> list[dict[str, Any]]:
|
| 66 |
+
with self.connection() as conn:
|
| 67 |
+
rows = conn.execute(text(sql), params or {}).fetchall()
|
| 68 |
+
return [dict(row._mapping) for row in rows]
|
| 69 |
+
|
| 70 |
+
def fetch_one(self, sql: str,
|
| 71 |
+
params: Optional[dict] = None) -> Optional[dict[str, Any]]:
|
| 72 |
+
with self.connection() as conn:
|
| 73 |
+
row = conn.execute(text(sql), params or {}).fetchone()
|
| 74 |
+
return dict(row._mapping) if row else None
|
| 75 |
+
|
| 76 |
+
def table_exists(self, schema: str, table: str) -> bool:
|
| 77 |
+
result = self.fetch_one(
|
| 78 |
+
"SELECT 1 FROM information_schema.tables "
|
| 79 |
+
"WHERE table_schema = :schema AND table_name = :table",
|
| 80 |
+
{"schema": schema, "table": table},
|
| 81 |
+
)
|
| 82 |
+
return result is not None
|
| 83 |
+
|
| 84 |
+
def run_migration(self, migration_sql: str) -> None:
|
| 85 |
+
logger.info("Running migration (%d chars)", len(migration_sql))
|
| 86 |
+
with self.connection() as conn:
|
| 87 |
+
for statement in migration_sql.split(";"):
|
| 88 |
+
stmt = statement.strip()
|
| 89 |
+
if stmt:
|
| 90 |
+
conn.execute(text(stmt))
|
| 91 |
+
conn.commit()
|
| 92 |
+
logger.info("Migration complete")
|
client/compute_client.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import time
|
| 3 |
+
from typing import Any, Optional
|
| 4 |
+
|
| 5 |
+
logger = logging.getLogger(__name__)
|
| 6 |
+
|
| 7 |
+
try:
|
| 8 |
+
from google.cloud import compute_v1
|
| 9 |
+
from google.api_core.exceptions import NotFound
|
| 10 |
+
except ImportError:
|
| 11 |
+
compute_v1 = None # type: ignore
|
| 12 |
+
NotFound = Exception
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class ComputeClient:
|
| 16 |
+
def __init__(self, project_id: str, zone: str,
|
| 17 |
+
credentials_file: Optional[str] = None) -> None:
|
| 18 |
+
self.project_id = project_id
|
| 19 |
+
self.zone = zone
|
| 20 |
+
if compute_v1 is None:
|
| 21 |
+
raise ImportError("google-cloud-compute is not installed")
|
| 22 |
+
self._instances = compute_v1.InstancesClient()
|
| 23 |
+
self._operations = compute_v1.ZoneOperationsClient()
|
| 24 |
+
logger.debug("Compute client for %s/%s", project_id, zone)
|
| 25 |
+
|
| 26 |
+
def _wait_for_operation(self, operation_name: str,
|
| 27 |
+
timeout: int = 300) -> None:
|
| 28 |
+
deadline = time.time() + timeout
|
| 29 |
+
while time.time() < deadline:
|
| 30 |
+
op = self._operations.get(project=self.project_id,
|
| 31 |
+
zone=self.zone,
|
| 32 |
+
operation=operation_name)
|
| 33 |
+
if op.status == compute_v1.Operation.Status.DONE:
|
| 34 |
+
if op.error:
|
| 35 |
+
raise RuntimeError(f"Operation failed: {op.error}")
|
| 36 |
+
return
|
| 37 |
+
time.sleep(5)
|
| 38 |
+
raise TimeoutError(f"Operation {operation_name} timed out")
|
| 39 |
+
|
| 40 |
+
def list_instances(self, filter_expr: str = "") -> list[dict[str, Any]]:
|
| 41 |
+
request = compute_v1.ListInstancesRequest(
|
| 42 |
+
project=self.project_id, zone=self.zone, filter=filter_expr
|
| 43 |
+
)
|
| 44 |
+
instances = []
|
| 45 |
+
for inst in self._instances.list(request=request):
|
| 46 |
+
instances.append({
|
| 47 |
+
"name": inst.name,
|
| 48 |
+
"status": inst.status,
|
| 49 |
+
"machine_type": inst.machine_type.split("/")[-1],
|
| 50 |
+
"zone": self.zone,
|
| 51 |
+
})
|
| 52 |
+
return instances
|
| 53 |
+
|
| 54 |
+
def get_instance(self, name: str) -> dict[str, Any]:
|
| 55 |
+
inst = self._instances.get(project=self.project_id,
|
| 56 |
+
zone=self.zone, instance=name)
|
| 57 |
+
return {
|
| 58 |
+
"name": inst.name,
|
| 59 |
+
"status": inst.status,
|
| 60 |
+
"machine_type": inst.machine_type.split("/")[-1],
|
| 61 |
+
"network_ips": [
|
| 62 |
+
iface.network_i_p
|
| 63 |
+
for iface in inst.network_interfaces
|
| 64 |
+
],
|
| 65 |
+
}
|
| 66 |
+
|
| 67 |
+
def start_instance(self, name: str) -> None:
|
| 68 |
+
op = self._instances.start(project=self.project_id,
|
| 69 |
+
zone=self.zone, instance=name)
|
| 70 |
+
self._wait_for_operation(op.name)
|
| 71 |
+
logger.info("Started instance %s", name)
|
| 72 |
+
|
| 73 |
+
def stop_instance(self, name: str) -> None:
|
| 74 |
+
op = self._instances.stop(project=self.project_id,
|
| 75 |
+
zone=self.zone, instance=name)
|
| 76 |
+
self._wait_for_operation(op.name)
|
| 77 |
+
logger.info("Stopped instance %s", name)
|
| 78 |
+
|
| 79 |
+
def instance_status(self, name: str) -> str:
|
| 80 |
+
try:
|
| 81 |
+
inst = self._instances.get(project=self.project_id,
|
| 82 |
+
zone=self.zone, instance=name)
|
| 83 |
+
return inst.status
|
| 84 |
+
except NotFound:
|
| 85 |
+
return "NOT_FOUND"
|
| 86 |
+
|
| 87 |
+
def set_labels(self, name: str, labels: dict[str, str]) -> None:
|
| 88 |
+
inst = self._instances.get(project=self.project_id,
|
| 89 |
+
zone=self.zone, instance=name)
|
| 90 |
+
req = compute_v1.SetLabelsInstanceRequest(
|
| 91 |
+
project=self.project_id, zone=self.zone, instance=name,
|
| 92 |
+
instances_set_labels_request_resource=compute_v1.InstancesSetLabelsRequest(
|
| 93 |
+
label_fingerprint=inst.label_fingerprint,
|
| 94 |
+
labels=labels,
|
| 95 |
+
),
|
| 96 |
+
)
|
| 97 |
+
op = self._instances.set_labels(request=req)
|
| 98 |
+
self._wait_for_operation(op.name)
|
| 99 |
+
logger.info("Labels updated for instance %s", name)
|
client/logger_setup.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import sys
|
| 3 |
+
from typing import Optional
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def setup(
|
| 7 |
+
name: str = "gcp-tool",
|
| 8 |
+
level: str = "INFO",
|
| 9 |
+
log_file: Optional[str] = None,
|
| 10 |
+
fmt: str = "%(asctime)s [%(levelname)s] %(name)s: %(message)s",
|
| 11 |
+
) -> logging.Logger:
|
| 12 |
+
logger = logging.getLogger(name)
|
| 13 |
+
logger.setLevel(getattr(logging, level.upper(), logging.INFO))
|
| 14 |
+
logger.handlers.clear()
|
| 15 |
+
|
| 16 |
+
formatter = logging.Formatter(fmt, datefmt="%Y-%m-%dT%H:%M:%S")
|
| 17 |
+
|
| 18 |
+
stream_handler = logging.StreamHandler(sys.stdout)
|
| 19 |
+
stream_handler.setFormatter(formatter)
|
| 20 |
+
logger.addHandler(stream_handler)
|
| 21 |
+
|
| 22 |
+
if log_file:
|
| 23 |
+
file_handler = logging.FileHandler(log_file)
|
| 24 |
+
file_handler.setFormatter(formatter)
|
| 25 |
+
logger.addHandler(file_handler)
|
| 26 |
+
|
| 27 |
+
logger.propagate = False
|
| 28 |
+
return logger
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class StructuredLogger:
|
| 32 |
+
def __init__(self, name: str) -> None:
|
| 33 |
+
self._log = logging.getLogger(name)
|
| 34 |
+
|
| 35 |
+
def _fmt(self, msg: str, **kw) -> str:
|
| 36 |
+
if not kw:
|
| 37 |
+
return msg
|
| 38 |
+
pairs = " ".join(f"{k}={v!r}" for k, v in kw.items())
|
| 39 |
+
return f"{msg} {pairs}"
|
| 40 |
+
|
| 41 |
+
def info(self, msg: str, **kw) -> None:
|
| 42 |
+
self._log.info(self._fmt(msg, **kw))
|
| 43 |
+
|
| 44 |
+
def warning(self, msg: str, **kw) -> None:
|
| 45 |
+
self._log.warning(self._fmt(msg, **kw))
|
| 46 |
+
|
| 47 |
+
def error(self, msg: str, **kw) -> None:
|
| 48 |
+
self._log.error(self._fmt(msg, **kw))
|
| 49 |
+
|
| 50 |
+
def debug(self, msg: str, **kw) -> None:
|
| 51 |
+
self._log.debug(self._fmt(msg, **kw))
|
config/pubsub_client.py
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
import logging
|
| 3 |
+
from typing import Any, Callable, Optional
|
| 4 |
+
|
| 5 |
+
logger = logging.getLogger(__name__)
|
| 6 |
+
|
| 7 |
+
try:
|
| 8 |
+
from google.cloud import pubsub_v1
|
| 9 |
+
from google.api_core.exceptions import AlreadyExists, NotFound
|
| 10 |
+
except ImportError:
|
| 11 |
+
pubsub_v1 = None # type: ignore
|
| 12 |
+
AlreadyExists = Exception
|
| 13 |
+
NotFound = Exception
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class PubSubClient:
|
| 17 |
+
def __init__(self, project_id: str,
|
| 18 |
+
credentials_file: Optional[str] = None) -> None:
|
| 19 |
+
self.project_id = project_id
|
| 20 |
+
if pubsub_v1 is None:
|
| 21 |
+
raise ImportError("google-cloud-pubsub is not installed")
|
| 22 |
+
kw: dict = {}
|
| 23 |
+
if credentials_file:
|
| 24 |
+
from google.oauth2 import service_account
|
| 25 |
+
creds = service_account.Credentials.from_service_account_file(
|
| 26 |
+
credentials_file
|
| 27 |
+
)
|
| 28 |
+
kw["credentials"] = creds
|
| 29 |
+
self._publisher = pubsub_v1.PublisherClient(**kw)
|
| 30 |
+
self._subscriber = pubsub_v1.SubscriberClient(**kw)
|
| 31 |
+
logger.debug("Pub/Sub client initialised for project %s", project_id)
|
| 32 |
+
|
| 33 |
+
def _topic_path(self, topic: str) -> str:
|
| 34 |
+
return self._publisher.topic_path(self.project_id, topic)
|
| 35 |
+
|
| 36 |
+
def _sub_path(self, subscription: str) -> str:
|
| 37 |
+
return self._subscriber.subscription_path(self.project_id, subscription)
|
| 38 |
+
|
| 39 |
+
# ── topics ─────────────────────────────────────────────────────────
|
| 40 |
+
|
| 41 |
+
def create_topic(self, topic: str) -> None:
|
| 42 |
+
try:
|
| 43 |
+
self._publisher.create_topic(request={"name": self._topic_path(topic)})
|
| 44 |
+
logger.info("Created topic %s", topic)
|
| 45 |
+
except AlreadyExists:
|
| 46 |
+
logger.debug("Topic %s already exists", topic)
|
| 47 |
+
|
| 48 |
+
def delete_topic(self, topic: str) -> None:
|
| 49 |
+
self._publisher.delete_topic(request={"topic": self._topic_path(topic)})
|
| 50 |
+
logger.info("Deleted topic %s", topic)
|
| 51 |
+
|
| 52 |
+
def list_topics(self) -> list[str]:
|
| 53 |
+
path = f"projects/{self.project_id}"
|
| 54 |
+
return [t.name.split("/")[-1]
|
| 55 |
+
for t in self._publisher.list_topics(request={"project": path})]
|
| 56 |
+
|
| 57 |
+
# ── publish ─────────────────────────────────────────────────────────
|
| 58 |
+
|
| 59 |
+
def publish(self, topic: str, data: Any,
|
| 60 |
+
attributes: Optional[dict[str, str]] = None) -> str:
|
| 61 |
+
encoded = json.dumps(data).encode() if not isinstance(data, bytes) else data
|
| 62 |
+
future = self._publisher.publish(
|
| 63 |
+
self._topic_path(topic),
|
| 64 |
+
data=encoded,
|
| 65 |
+
**(attributes or {}),
|
| 66 |
+
)
|
| 67 |
+
message_id = future.result()
|
| 68 |
+
logger.debug("Published message %s to %s", message_id, topic)
|
| 69 |
+
return message_id
|
| 70 |
+
|
| 71 |
+
# ── subscribe ────────────────────────────────────────────────────────
|
| 72 |
+
|
| 73 |
+
def create_subscription(self, topic: str, subscription: str,
|
| 74 |
+
ack_deadline: int = 60) -> None:
|
| 75 |
+
try:
|
| 76 |
+
self._subscriber.create_subscription(request={
|
| 77 |
+
"name": self._sub_path(subscription),
|
| 78 |
+
"topic": self._topic_path(topic),
|
| 79 |
+
"ack_deadline_seconds": ack_deadline,
|
| 80 |
+
})
|
| 81 |
+
logger.info("Created subscription %s → %s", subscription, topic)
|
| 82 |
+
except AlreadyExists:
|
| 83 |
+
logger.debug("Subscription %s already exists", subscription)
|
| 84 |
+
|
| 85 |
+
def pull(self, subscription: str, max_messages: int = 10) -> list[dict]:
|
| 86 |
+
response = self._subscriber.pull(request={
|
| 87 |
+
"subscription": self._sub_path(subscription),
|
| 88 |
+
"max_messages": max_messages,
|
| 89 |
+
})
|
| 90 |
+
messages = []
|
| 91 |
+
ack_ids = []
|
| 92 |
+
for msg in response.received_messages:
|
| 93 |
+
messages.append({
|
| 94 |
+
"message_id": msg.message.message_id,
|
| 95 |
+
"data": json.loads(msg.message.data.decode()),
|
| 96 |
+
"attributes": dict(msg.message.attributes),
|
| 97 |
+
})
|
| 98 |
+
ack_ids.append(msg.ack_id)
|
| 99 |
+
if ack_ids:
|
| 100 |
+
self._subscriber.acknowledge(request={
|
| 101 |
+
"subscription": self._sub_path(subscription),
|
| 102 |
+
"ack_ids": ack_ids,
|
| 103 |
+
})
|
| 104 |
+
return messages
|
config/schema.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
from dataclasses import dataclass, field
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
from typing import Any, Optional
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
@dataclass
|
| 8 |
+
class TableSchema:
|
| 9 |
+
name: str
|
| 10 |
+
fields: list[FieldSchema] = field(default_factory=list)
|
| 11 |
+
|
| 12 |
+
def add_field(self, name: str, field_type: str,
|
| 13 |
+
mode: str = "NULLABLE", description: str = "") -> None:
|
| 14 |
+
self.fields.append(FieldSchema(name=name, field_type=field_type,
|
| 15 |
+
mode=mode, description=description))
|
| 16 |
+
|
| 17 |
+
def to_bq_schema(self) -> list[dict]:
|
| 18 |
+
return [f.to_dict() for f in self.fields]
|
| 19 |
+
|
| 20 |
+
def field_names(self) -> list[str]:
|
| 21 |
+
return [f.name for f in self.fields]
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
@dataclass
|
| 25 |
+
class FieldSchema:
|
| 26 |
+
name: str
|
| 27 |
+
field_type: str
|
| 28 |
+
mode: str = "NULLABLE"
|
| 29 |
+
description: str = ""
|
| 30 |
+
|
| 31 |
+
def to_dict(self) -> dict:
|
| 32 |
+
return {
|
| 33 |
+
"name": self.name,
|
| 34 |
+
"type": self.field_type,
|
| 35 |
+
"mode": self.mode,
|
| 36 |
+
"description": self.description,
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
@dataclass
|
| 41 |
+
class JobResult:
|
| 42 |
+
job_id: str
|
| 43 |
+
state: str
|
| 44 |
+
started_at: Optional[datetime] = None
|
| 45 |
+
ended_at: Optional[datetime] = None
|
| 46 |
+
rows_processed: int = 0
|
| 47 |
+
bytes_processed: int = 0
|
| 48 |
+
errors: list[str] = field(default_factory=list)
|
| 49 |
+
|
| 50 |
+
@property
|
| 51 |
+
def duration_seconds(self) -> Optional[float]:
|
| 52 |
+
if self.started_at and self.ended_at:
|
| 53 |
+
return (self.ended_at - self.started_at).total_seconds()
|
| 54 |
+
return None
|
| 55 |
+
|
| 56 |
+
def succeeded(self) -> bool:
|
| 57 |
+
return self.state == "DONE" and not self.errors
|
| 58 |
+
|
| 59 |
+
def to_dict(self) -> dict[str, Any]:
|
| 60 |
+
return {
|
| 61 |
+
"job_id": self.job_id,
|
| 62 |
+
"state": self.state,
|
| 63 |
+
"duration_s": self.duration_seconds,
|
| 64 |
+
"rows_processed": self.rows_processed,
|
| 65 |
+
"bytes_processed": self.bytes_processed,
|
| 66 |
+
"errors": self.errors,
|
| 67 |
+
}
|
exceptions.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Optional
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
class GCPError(Exception):
|
| 5 |
+
def __init__(self, message: str, cause: Optional[Exception] = None) -> None:
|
| 6 |
+
super().__init__(message)
|
| 7 |
+
self.cause = cause
|
| 8 |
+
|
| 9 |
+
def __str__(self) -> str:
|
| 10 |
+
base = super().__str__()
|
| 11 |
+
return f"{base} (caused by: {self.cause})" if self.cause else base
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class AuthenticationError(GCPError):
|
| 15 |
+
pass
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class ResourceNotFoundError(GCPError):
|
| 19 |
+
def __init__(self, resource_type: str, name: str) -> None:
|
| 20 |
+
super().__init__(f"{resource_type} not found: {name}")
|
| 21 |
+
self.resource_type = resource_type
|
| 22 |
+
self.name = name
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class QuotaExceededError(GCPError):
|
| 26 |
+
pass
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class PermissionDeniedError(GCPError):
|
| 30 |
+
pass
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class RetryableError(GCPError):
|
| 34 |
+
def __init__(self, message: str, attempt: int, cause: Optional[Exception] = None) -> None:
|
| 35 |
+
super().__init__(message, cause)
|
| 36 |
+
self.attempt = attempt
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class TimeoutError(GCPError):
|
| 40 |
+
pass
|
gcp_secret.json
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"type": "service_account",
|
| 3 |
+
"project_id": "honeypot-491918",
|
| 4 |
+
"private_key_id": "a5d4bfaba95b382bb90a602fbade9915c6996a03",
|
| 5 |
+
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCq3+J412MyO/Tk\nDXzmEckKm/LCBCyNpNx8obsPdlfG2Ssqjm+LQnJQijqj5eTwBl1Yskg2pec9VHMH\nQ7AScKdJvc1hUgHUEr1oUZ79sVb6bekfak8rTDMVO3VJLs/HxY2CIhAb0zdqCX4p\n0MfXs+RWSkn5U+5zWYctItbkqQYkcUq7lnecjiWCYhvJrARx/9ksQ22zFNp0Rv8c\nmLc4p9kod0Pcaet1kdHq72YJbXajbD9lJmltgpDq5dnS6Ok7XZLSq6c9fuvGPI5y\nJ2xHfseGmA94r88WJrUn2JF1inb4F3qSmhX7vEk64rZoAQSabHjBwzn5MePhfgAo\n7B2lOuBnAgMBAAECggEABG5AYxMYAV9zSXkyueHQLV7p0S9mha22rIyiWxOd0bbO\nhbtNF8v7RtdZZJKQzcsnHyZQSl/MOoI2vAN8ofMbLgWmjlV1XVhht5Nrj0x1OD0i\nrwEvWFdmI7DM/ml2Ux8Y55JBNKzUt60oq6Y8PahDzXD+W2FabNHveAjP5adJqibd\nrKWttAJfLyjGJomrUgdcjRQ9k70dnsXk52x8xLEuDYWDufKbmafoKFwfMpI+G7ML\n5fFqWZ8MTAu15HI3JueaMBHs4sN3NOHzzVI97RSakDjr4psjeVKiGmlmtgn5g9wr\n41rjSKzvwatZeEMPVJ853kcmxhVnoCqPt+lzACgdUQKBgQDgxiEZUr8h9PlWUAqL\nY0FArINJVoiHQIY17aQ37Rx1osuNbfkHFTuvfHLiLxazLkZ0caD171LVcHQKc8/w\ndg6SzdAwQjCU/mFiSEpgoUE5oc1hHneAhzIiL+xGy6P6vgdM0QsbxchXeFcFofET\noNJqmkQkAR/Jq7bzB3CkaNeMQwKBgQDCnN/5TRyRbq76AQok103S+/cuG0OCHtYG\njKYht1C5ljTqPBMOhJDqQAt+7YFYNJjY9omGUXwSlqopVaQHkPh24Px2k5uTSddA\nwtBncgliLpYJ6q/opr3TMAnZqX7oNzKqKWNk71D6tR9A85QR9ugd/yuRtyo/77+j\nAPernIirDQKBgHPgcIvXhsWAv1Hp7aWyQ5+N0BjszXG8p/QBURmr2XIjvQBqY2lt\nXtm3NJq6X997ycKaGbrv6Co00P3aeqailhzEkcFvPhBxm87JKd7rAu8tJO4egMj3\n6baKMirFLQJFGQpW8n0c142Qcnxq4heWXyOp59sP4U1cgvKDMWTwz751AoGBALHO\n7vwyLhXaUNhutrvuU1afi7l+g3GRhu649tj7SjAmQvZ4KNks2dKM7XPswC6876Hb\nrs8gCAJb0eCuNI4NMFY9Pa2BM7Wg06CB3jOFh3lDIyawXERuhCU+UOGj8SVtuUar\nRfrkhNJrRcDsS68WLgGqMD6UYW/FNwxRR/htrpjJAoGBAJIQtnCfFSw2sdNMTIbg\nyR+F1plTUualSqKnWwDbR3rMj2iLWqCfbYcvBz0i2vupqzMn6oOolrfD8hP2hJ5x\n6foaVEm4XTdRUXPHFePmi65GY2lzYek2NXLVEhaaZY/vfEFD7Ab3hZhm+CGoLcXK\noiZ1aR6DYGxXHFOzKN3vM9Tr\n-----END PRIVATE KEY-----\n",
|
| 6 |
+
"client_email": "token04@honeypot-491918.iam.gserviceaccount.com",
|
| 7 |
+
"client_id": "102923772131939822524",
|
| 8 |
+
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
| 9 |
+
"token_uri": "https://oauth2.googleapis.com/token",
|
| 10 |
+
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
| 11 |
+
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/token04%40honeypot-491918.iam.gserviceaccount.com",
|
| 12 |
+
"universe_domain": "googleapis.com"
|
| 13 |
+
}
|