Aksel Joonas Reedi commited on
Commit
af1a664
·
2 Parent(s): 1675094fc4f247

lol yolo merge

Browse files

Copying mcp tools

.gitignore CHANGED
@@ -13,4 +13,6 @@ wheels/
13
  .claude/
14
  *.jsonl
15
  *.csv
16
- /logs
 
 
 
13
  .claude/
14
  *.jsonl
15
  *.csv
16
+ /logs
17
+ hf-agent-leaderboard/
18
+ .cursor/
agent/core/tools.py CHANGED
@@ -3,7 +3,6 @@ Tool system for the agent
3
  Provides ToolSpec and ToolRouter for managing both built-in and MCP tools
4
  """
5
 
6
- import subprocess
7
  import warnings
8
  from dataclasses import dataclass
9
  from typing import Any, Awaitable, Callable, Optional
@@ -13,12 +12,15 @@ from lmnr import observe
13
  from mcp.types import EmbeddedResource, ImageContent, TextContent
14
 
15
  from agent.config import MCPServerConfig
 
16
 
17
  # Suppress aiohttp deprecation warning
18
  warnings.filterwarnings(
19
  "ignore", category=DeprecationWarning, module="aiohttp.connector"
20
  )
21
 
 
 
22
 
23
  def convert_mcp_content_to_string(content: list) -> str:
24
  """
@@ -104,6 +106,10 @@ class ToolRouter:
104
  async def register_mcp_tools(self) -> None:
105
  tools = await self.mcp_client.list_tools()
106
  for tool in tools:
 
 
 
 
107
  self.register_tool(
108
  ToolSpec(
109
  name=tool.name,
@@ -173,93 +179,14 @@ class ToolRouter:
173
  # ============================================================================
174
 
175
 
176
- async def bash_handler(arguments: dict[str, Any]) -> tuple[str, bool]:
177
- """Execute bash command"""
178
- try:
179
- command = arguments.get("command", "")
180
- result = subprocess.run(
181
- command, shell=True, capture_output=True, text=True, timeout=30
182
- )
183
- output = result.stdout if result.returncode == 0 else result.stderr
184
- success = result.returncode == 0
185
- return output, success
186
- except Exception as e:
187
- return f"Error: {str(e)}", False
188
-
189
-
190
- async def read_file_handler(arguments: dict[str, Any]) -> tuple[str, bool]:
191
- """Read file contents"""
192
- try:
193
- path = arguments.get("path", "")
194
- with open(path, "r") as f:
195
- content = f.read()
196
- return content, True
197
- except Exception as e:
198
- return f"Error reading file: {str(e)}", False
199
-
200
-
201
- async def write_file_handler(arguments: dict[str, Any]) -> tuple[str, bool]:
202
- """Write to file"""
203
- try:
204
- path = arguments.get("path", "")
205
- content = arguments.get("content", "")
206
- with open(path, "w") as f:
207
- f.write(content)
208
- return f"Successfully wrote to {path}", True
209
- except Exception as e:
210
- return f"Error writing file: {str(e)}", False
211
-
212
-
213
  def create_builtin_tools() -> list[ToolSpec]:
214
  """Create built-in tool specifications"""
 
215
  return [
216
  ToolSpec(
217
- name="bash",
218
- description="Execute a bash command and return its output",
219
- parameters={
220
- "type": "object",
221
- "properties": {
222
- "command": {
223
- "type": "string",
224
- "description": "The bash command to execute",
225
- }
226
- },
227
- "required": ["command"],
228
- },
229
- handler=bash_handler,
230
- ),
231
- ToolSpec(
232
- name="read_file",
233
- description="Read the contents of a file",
234
- parameters={
235
- "type": "object",
236
- "properties": {
237
- "path": {
238
- "type": "string",
239
- "description": "Path to the file to read",
240
- }
241
- },
242
- "required": ["path"],
243
- },
244
- handler=read_file_handler,
245
- ),
246
- ToolSpec(
247
- name="write_file",
248
- description="Write content to a file",
249
- parameters={
250
- "type": "object",
251
- "properties": {
252
- "path": {
253
- "type": "string",
254
- "description": "Path to the file to write",
255
- },
256
- "content": {
257
- "type": "string",
258
- "description": "Content to write to the file",
259
- },
260
- },
261
- "required": ["path", "content"],
262
- },
263
- handler=write_file_handler,
264
  ),
265
  ]
 
3
  Provides ToolSpec and ToolRouter for managing both built-in and MCP tools
4
  """
5
 
 
6
  import warnings
7
  from dataclasses import dataclass
8
  from typing import Any, Awaitable, Callable, Optional
 
12
  from mcp.types import EmbeddedResource, ImageContent, TextContent
13
 
14
  from agent.config import MCPServerConfig
15
+ from agent.tools.jobs_tool import HF_JOBS_TOOL_SPEC, hf_jobs_handler
16
 
17
  # Suppress aiohttp deprecation warning
18
  warnings.filterwarnings(
19
  "ignore", category=DeprecationWarning, module="aiohttp.connector"
20
  )
21
 
22
+ NOT_ALLOWED_TOOL_NAMES = ["hf_jobs"]
23
+
24
 
25
  def convert_mcp_content_to_string(content: list) -> str:
26
  """
 
106
  async def register_mcp_tools(self) -> None:
107
  tools = await self.mcp_client.list_tools()
108
  for tool in tools:
109
+ if tool.name in NOT_ALLOWED_TOOL_NAMES:
110
+ print(f"Skipping not MCP allowed tool: {tool.name}")
111
+ continue
112
+ print(f"MCP Tool: {tool.name}")
113
  self.register_tool(
114
  ToolSpec(
115
  name=tool.name,
 
179
  # ============================================================================
180
 
181
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
182
  def create_builtin_tools() -> list[ToolSpec]:
183
  """Create built-in tool specifications"""
184
+ print(f"Creating built-in tools: {HF_JOBS_TOOL_SPEC['name']}")
185
  return [
186
  ToolSpec(
187
+ name=HF_JOBS_TOOL_SPEC["name"],
188
+ description=HF_JOBS_TOOL_SPEC["description"],
189
+ parameters=HF_JOBS_TOOL_SPEC["parameters"],
190
+ handler=hf_jobs_handler,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
191
  ),
192
  ]
agent/main.py CHANGED
@@ -3,6 +3,7 @@ Interactive CLI chat with the agent
3
  """
4
 
5
  import asyncio
 
6
  import os
7
  from dataclasses import dataclass
8
  from pathlib import Path
@@ -64,8 +65,11 @@ async def event_listener(
64
  print(f"\n🤖 Assistant: {content}")
65
  elif event.event_type == "tool_call":
66
  tool_name = event.data.get("tool", "") if event.data else ""
 
67
  if tool_name:
68
- print(f"🔧 Calling tool: {tool_name}")
 
 
69
  elif event.event_type == "tool_output":
70
  output = event.data.get("output", "") if event.data else ""
71
  success = event.data.get("success", False) if event.data else False
 
3
  """
4
 
5
  import asyncio
6
+ import json
7
  import os
8
  from dataclasses import dataclass
9
  from pathlib import Path
 
65
  print(f"\n🤖 Assistant: {content}")
66
  elif event.event_type == "tool_call":
67
  tool_name = event.data.get("tool", "") if event.data else ""
68
+ arguments = event.data.get("arguments", {}) if event.data else {}
69
  if tool_name:
70
+ print(
71
+ f"🔧 Calling tool: {tool_name} with arguments: {json.dumps(arguments)[:100]}..."
72
+ )
73
  elif event.event_type == "tool_output":
74
  output = event.data.get("output", "") if event.data else ""
75
  success = event.data.get("success", False) if event.data else False
agent/tests/__init__.py DELETED
@@ -1,3 +0,0 @@
1
- """
2
- Test suite for HF Agent
3
- """
 
 
 
 
agent/tests/unit/test_base.py DELETED
@@ -1,10 +0,0 @@
1
- """
2
- Unit tests for base agent components
3
- """
4
-
5
-
6
- def test_base_agent_initialization():
7
- """Test that BaseAgent can be initialized with config"""
8
- # This will fail because BaseAgent is abstract
9
- # Subclasses should implement this
10
- pass
 
 
 
 
 
 
 
 
 
 
 
agent/tools/__init__.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Hugging Face tools for the agent
3
+ """
4
+
5
+ from agent.tools.jobs_tool import HF_JOBS_TOOL_SPEC, HfJobsTool, hf_jobs_handler
6
+ from agent.tools.types import ToolResult
7
+
8
+ __all__ = ["ToolResult", "HF_JOBS_TOOL_SPEC", "hf_jobs_handler", HfJobsTool]
agent/tools/jobs_tool.py ADDED
@@ -0,0 +1,836 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Hugging Face Jobs Tool - Using huggingface-hub library
3
+
4
+ Refactored to use official huggingface-hub library instead of custom HTTP client
5
+ """
6
+
7
+ import asyncio
8
+ import base64
9
+ from typing import Any, Dict, Literal, Optional
10
+
11
+ from huggingface_hub import HfApi
12
+ from huggingface_hub.utils import HfHubHTTPError
13
+
14
+ from agent.tools.types import ToolResult
15
+ from agent.tools.utilities import (
16
+ format_job_details,
17
+ format_jobs_table,
18
+ format_scheduled_job_details,
19
+ format_scheduled_jobs_table,
20
+ )
21
+
22
+ # Hardware flavors
23
+ CPU_FLAVORS = ["cpu-basic", "cpu-upgrade", "cpu-performance", "cpu-xl"]
24
+ GPU_FLAVORS = [
25
+ "sprx8",
26
+ "zero-a10g",
27
+ "t4-small",
28
+ "t4-medium",
29
+ "l4x1",
30
+ "l4x4",
31
+ "l40sx1",
32
+ "l40sx4",
33
+ "l40sx8",
34
+ "a10g-small",
35
+ "a10g-large",
36
+ "a10g-largex2",
37
+ "a10g-largex4",
38
+ "a100-large",
39
+ "h100",
40
+ "h100x8",
41
+ ]
42
+ SPECIALIZED_FLAVORS = ["inf2x6"]
43
+ ALL_FLAVORS = CPU_FLAVORS + GPU_FLAVORS + SPECIALIZED_FLAVORS
44
+
45
+ # Operation names
46
+ OperationType = Literal[
47
+ "run",
48
+ "uv",
49
+ "ps",
50
+ "logs",
51
+ "inspect",
52
+ "cancel",
53
+ "scheduled run",
54
+ "scheduled uv",
55
+ "scheduled ps",
56
+ "scheduled inspect",
57
+ "scheduled delete",
58
+ "scheduled suspend",
59
+ "scheduled resume",
60
+ ]
61
+
62
+ # Constants
63
+ DEFAULT_LOG_WAIT_SECONDS = 10
64
+ UV_DEFAULT_IMAGE = "ghcr.io/astral-sh/uv:python3.12-bookworm"
65
+
66
+
67
+ def _build_uv_command(
68
+ script: str,
69
+ with_deps: list[str] | None = None,
70
+ python: str | None = None,
71
+ script_args: list[str] | None = None,
72
+ ) -> list[str]:
73
+ """Build UV run command"""
74
+ parts = ["uv", "run"]
75
+
76
+ if with_deps:
77
+ for dep in with_deps:
78
+ parts.extend(["--with", dep])
79
+
80
+ if python:
81
+ parts.extend(["-p", python])
82
+
83
+ parts.append(script)
84
+
85
+ if script_args:
86
+ parts.extend(script_args)
87
+
88
+ return parts
89
+
90
+
91
+ def _wrap_inline_script(
92
+ script: str,
93
+ with_deps: list[str] | None = None,
94
+ python: str | None = None,
95
+ script_args: list[str] | None = None,
96
+ ) -> str:
97
+ """Wrap inline script with base64 encoding to avoid file creation"""
98
+ encoded = base64.b64encode(script.encode("utf-8")).decode("utf-8")
99
+ # Build the uv command with stdin (-)
100
+ uv_command = _build_uv_command("-", with_deps, python, script_args)
101
+ # Join command parts with proper spacing
102
+ uv_command_str = " ".join(uv_command)
103
+ return f'echo "{encoded}" | base64 -d | {uv_command_str}'
104
+
105
+
106
+ def _resolve_uv_command(
107
+ script: str,
108
+ with_deps: list[str] | None = None,
109
+ python: str | None = None,
110
+ script_args: list[str] | None = None,
111
+ ) -> list[str]:
112
+ """Resolve UV command based on script source (URL, inline, or file path)"""
113
+ # If URL, use directly
114
+ if script.startswith("http://") or script.startswith("https://"):
115
+ return _build_uv_command(script, with_deps, python, script_args)
116
+
117
+ # If contains newline, treat as inline script
118
+ if "\n" in script:
119
+ wrapped = _wrap_inline_script(script, with_deps, python, script_args)
120
+ return ["/bin/sh", "-lc", wrapped]
121
+
122
+ # Otherwise, treat as file path
123
+ return _build_uv_command(script, with_deps, python, script_args)
124
+
125
+
126
+ async def _async_call(func, *args, **kwargs):
127
+ """Wrap synchronous HfApi calls for async context"""
128
+ return await asyncio.to_thread(func, *args, **kwargs)
129
+
130
+
131
+ def _job_info_to_dict(job_info) -> Dict[str, Any]:
132
+ """Convert JobInfo object to dictionary for formatting functions"""
133
+ return {
134
+ "id": job_info.id,
135
+ "status": {"stage": job_info.status.stage, "message": job_info.status.message},
136
+ "command": job_info.command,
137
+ "createdAt": job_info.created_at.isoformat(),
138
+ "dockerImage": job_info.docker_image,
139
+ "spaceId": job_info.space_id,
140
+ "flavor": job_info.flavor,
141
+ "owner": {"name": job_info.owner.name},
142
+ }
143
+
144
+
145
+ def _scheduled_job_info_to_dict(scheduled_job_info) -> Dict[str, Any]:
146
+ """Convert ScheduledJobInfo object to dictionary for formatting functions"""
147
+ job_spec = scheduled_job_info.job_spec
148
+
149
+ # Extract last run and next run from status
150
+ last_run = None
151
+ next_run = None
152
+ if scheduled_job_info.status:
153
+ if scheduled_job_info.status.last_job:
154
+ last_run = scheduled_job_info.status.last_job.created_at
155
+ if last_run:
156
+ last_run = (
157
+ last_run.isoformat()
158
+ if hasattr(last_run, "isoformat")
159
+ else str(last_run)
160
+ )
161
+ if scheduled_job_info.status.next_job_run_at:
162
+ next_run = scheduled_job_info.status.next_job_run_at
163
+ next_run = (
164
+ next_run.isoformat()
165
+ if hasattr(next_run, "isoformat")
166
+ else str(next_run)
167
+ )
168
+
169
+ return {
170
+ "id": scheduled_job_info.id,
171
+ "schedule": scheduled_job_info.schedule,
172
+ "suspend": scheduled_job_info.suspend,
173
+ "lastRun": last_run,
174
+ "nextRun": next_run,
175
+ "jobSpec": {
176
+ "dockerImage": job_spec.docker_image,
177
+ "spaceId": job_spec.space_id,
178
+ "command": job_spec.command or [],
179
+ "flavor": job_spec.flavor or "cpu-basic",
180
+ },
181
+ }
182
+
183
+
184
+ class HfJobsTool:
185
+ """Tool for managing Hugging Face compute jobs using huggingface-hub library"""
186
+
187
+ def __init__(self, hf_token: Optional[str] = None, namespace: Optional[str] = None):
188
+ self.api = HfApi(token=hf_token)
189
+ self.namespace = namespace
190
+
191
+ async def execute(self, params: Dict[str, Any]) -> ToolResult:
192
+ """Execute the specified operation"""
193
+ operation = params.get("operation")
194
+ args = params.get("args", {})
195
+
196
+ # If no operation provided, return usage instructions
197
+ if not operation:
198
+ return self._show_help()
199
+
200
+ # Normalize operation name
201
+ operation = operation.lower()
202
+
203
+ # Check if help is requested
204
+ if args.get("help"):
205
+ return self._show_operation_help(operation)
206
+
207
+ try:
208
+ # Route to appropriate handler
209
+ if operation == "run":
210
+ return await self._run_job(args)
211
+ elif operation == "uv":
212
+ return await self._run_uv_job(args)
213
+ elif operation == "ps":
214
+ return await self._list_jobs(args)
215
+ elif operation == "logs":
216
+ return await self._get_logs(args)
217
+ elif operation == "inspect":
218
+ return await self._inspect_job(args)
219
+ elif operation == "cancel":
220
+ return await self._cancel_job(args)
221
+ elif operation == "scheduled run":
222
+ return await self._scheduled_run(args)
223
+ elif operation == "scheduled uv":
224
+ return await self._scheduled_uv(args)
225
+ elif operation == "scheduled ps":
226
+ return await self._list_scheduled_jobs(args)
227
+ elif operation == "scheduled inspect":
228
+ return await self._inspect_scheduled_job(args)
229
+ elif operation == "scheduled delete":
230
+ return await self._delete_scheduled_job(args)
231
+ elif operation == "scheduled suspend":
232
+ return await self._suspend_scheduled_job(args)
233
+ elif operation == "scheduled resume":
234
+ return await self._resume_scheduled_job(args)
235
+ else:
236
+ return {
237
+ "formatted": f'Unknown operation: "{operation}"\n\n'
238
+ "Available operations:\n"
239
+ "- run, uv, ps, logs, inspect, cancel\n"
240
+ "- scheduled run, scheduled uv, scheduled ps, scheduled inspect, "
241
+ "scheduled delete, scheduled suspend, scheduled resume\n\n"
242
+ "Call this tool with no operation for full usage instructions.",
243
+ "totalResults": 0,
244
+ "resultsShared": 0,
245
+ "isError": True,
246
+ }
247
+
248
+ except HfHubHTTPError as e:
249
+ return {
250
+ "formatted": f"API Error: {str(e)}",
251
+ "totalResults": 0,
252
+ "resultsShared": 0,
253
+ "isError": True,
254
+ }
255
+ except Exception as e:
256
+ return {
257
+ "formatted": f"Error executing {operation}: {str(e)}",
258
+ "totalResults": 0,
259
+ "resultsShared": 0,
260
+ "isError": True,
261
+ }
262
+
263
+ def _show_help(self) -> ToolResult:
264
+ """Show usage instructions when tool is called with no arguments"""
265
+ cpu_flavors_list = ", ".join(CPU_FLAVORS)
266
+ gpu_flavors_list = ", ".join(GPU_FLAVORS)
267
+ specialized_flavors_list = ", ".join(SPECIALIZED_FLAVORS)
268
+
269
+ hardware_section = f"**CPU:** {cpu_flavors_list}\n"
270
+ if GPU_FLAVORS:
271
+ hardware_section += f"**GPU:** {gpu_flavors_list}\n"
272
+ if SPECIALIZED_FLAVORS:
273
+ hardware_section += f"**Specialized:** {specialized_flavors_list}"
274
+
275
+ usage_text = f"""# HuggingFace Jobs API
276
+
277
+ Manage compute jobs on Hugging Face infrastructure.
278
+
279
+ ## Available Commands
280
+
281
+ ### Job Management
282
+ - **run** - Run a job with a Docker image
283
+ - **uv** - Run a Python script with UV (inline dependencies)
284
+ - **ps** - List jobs
285
+ - **logs** - Fetch job logs
286
+ - **inspect** - Get detailed job information
287
+ - **cancel** - Cancel a running job
288
+
289
+ ### Scheduled Jobs
290
+ - **scheduled run** - Create a scheduled job
291
+ - **scheduled uv** - Create a scheduled UV job
292
+ - **scheduled ps** - List scheduled jobs
293
+ - **scheduled inspect** - Get scheduled job details
294
+ - **scheduled delete** - Delete a scheduled job
295
+ - **scheduled suspend** - Pause a scheduled job
296
+ - **scheduled resume** - Resume a suspended job
297
+
298
+ ## Examples
299
+
300
+ ### Run a simple job
301
+ Call this tool with:
302
+ ```json
303
+ {{
304
+ "operation": "run",
305
+ "args": {{
306
+ "image": "python:3.12",
307
+ "command": ["python", "-c", "print('Hello from HF Jobs!')"],
308
+ "flavor": "cpu-basic"
309
+ }}
310
+ }}
311
+ ```
312
+
313
+ ### Run a Python script with UV
314
+ Call this tool with:
315
+ ```json
316
+ {{
317
+ "operation": "uv",
318
+ "args": {{
319
+ "script": "import random\\nprint(42 + random.randint(1, 5))"
320
+ }}
321
+ }}
322
+ ```
323
+
324
+ ## Hardware Flavors
325
+
326
+ {hardware_section}
327
+
328
+ ## Command Format Guidelines
329
+
330
+ **Array format (default):**
331
+ - Recommended for every command—JSON keeps arguments intact (URLs with `&`, spaces, etc.)
332
+ - Use `["/bin/sh", "-lc", "..."]` when you need shell operators like `&&`, `|`, or redirections
333
+ - Works with any language: Python, bash, node, npm, uv, etc.
334
+
335
+ **String format (simple cases only):**
336
+ - Still accepted for backwards compatibility, parsed with POSIX shell semantics
337
+ - Rejects shell operators and can mis-handle characters such as `&`; switch to arrays when things turn complex
338
+ - `$HF_TOKEN` stays literal—forward it via `secrets: {{ "HF_TOKEN": "$HF_TOKEN" }}`
339
+
340
+ ### Show command-specific help
341
+ Call this tool with:
342
+ ```json
343
+ {{"operation": "<operation>", "args": {{"help": true}}}}
344
+ ```
345
+
346
+ ## Tips
347
+
348
+ - Jobs default to non-detached mode (tail logs for up to {DEFAULT_LOG_WAIT_SECONDS}s or until completion). Set `detach: true` to return immediately.
349
+ - Prefer array commands to avoid shell parsing surprises
350
+ - To access private Hub assets, include `secrets: {{ "HF_TOKEN": "$HF_TOKEN" }}` to inject your auth token.
351
+ """
352
+ return {"formatted": usage_text, "totalResults": 1, "resultsShared": 1}
353
+
354
+ def _show_operation_help(self, operation: str) -> ToolResult:
355
+ """Show help for a specific operation"""
356
+ help_text = f"Help for operation: {operation}\n\nCall with appropriate arguments. Use the main help for examples."
357
+ return {"formatted": help_text, "totalResults": 1, "resultsShared": 1}
358
+
359
+ async def _run_job(self, args: Dict[str, Any]) -> ToolResult:
360
+ """Run a job using HfApi.run_job()"""
361
+ try:
362
+ job = await _async_call(
363
+ self.api.run_job,
364
+ image=args.get("image", "python:3.12"),
365
+ command=args.get("command"),
366
+ env=args.get("env"),
367
+ secrets=args.get("secrets"),
368
+ flavor=args.get("flavor", "cpu-basic"),
369
+ timeout=args.get("timeout", "30m"),
370
+ namespace=args.get("namespace") or self.namespace,
371
+ )
372
+
373
+ # If detached, return immediately
374
+ if args.get("detach", False):
375
+ response = f"""Job started successfully!
376
+
377
+ **Job ID:** {job.id}
378
+ **Status:** {job.status.stage}
379
+ **View at:** {job.url}
380
+
381
+ To check logs, call this tool with `{{"operation": "logs", "args": {{"job_id": "{job.id}"}}}}`
382
+ To inspect, call this tool with `{{"operation": "inspect", "args": {{"job_id": "{job.id}"}}}}`"""
383
+ return {"formatted": response, "totalResults": 1, "resultsShared": 1}
384
+
385
+ # Not detached - return job info
386
+ response = f"""Job started: {job.id}
387
+
388
+ **Status:** {job.status.stage}
389
+ **View logs at:** {job.url}
390
+
391
+ Note: Logs are being collected. Check the job page for real-time logs.
392
+ """
393
+ return {"formatted": response, "totalResults": 1, "resultsShared": 1}
394
+
395
+ except Exception as e:
396
+ raise Exception(f"Failed to run job: {str(e)}")
397
+
398
+ async def _run_uv_job(self, args: Dict[str, Any]) -> ToolResult:
399
+ """Run UV job with inline script support (no local files needed)"""
400
+ try:
401
+ script = args.get("script")
402
+ if not script:
403
+ raise ValueError("script is required")
404
+
405
+ # Resolve the command based on script type (URL, inline, or file)
406
+ command = _resolve_uv_command(
407
+ script=script,
408
+ with_deps=args.get("with_deps") or args.get("dependencies"),
409
+ python=args.get("python"),
410
+ script_args=args.get("script_args"),
411
+ )
412
+
413
+ # Use run_job with UV image instead of run_uv_job
414
+ job = await _async_call(
415
+ self.api.run_job,
416
+ image=UV_DEFAULT_IMAGE,
417
+ command=command,
418
+ env=args.get("env"),
419
+ secrets=args.get("secrets"),
420
+ flavor=args.get("flavor", "cpu-basic"),
421
+ timeout=args.get("timeout", "30m"),
422
+ namespace=args.get("namespace") or self.namespace,
423
+ )
424
+
425
+ response = f"""UV Job started: {job.id}
426
+
427
+ **Status:** {job.status.stage}
428
+ **View at:** {job.url}
429
+
430
+ To check logs, call this tool with `{{"operation": "logs", "args": {{"job_id": "{job.id}"}}}}`
431
+ """
432
+ return {"formatted": response, "totalResults": 1, "resultsShared": 1}
433
+
434
+ except Exception as e:
435
+ raise Exception(f"Failed to run UV job: {str(e)}")
436
+
437
+ async def _list_jobs(self, args: Dict[str, Any]) -> ToolResult:
438
+ """List jobs using HfApi.list_jobs()"""
439
+ jobs_list = await _async_call(
440
+ self.api.list_jobs, namespace=args.get("namespace") or self.namespace
441
+ )
442
+
443
+ # Filter jobs
444
+ if not args.get("all", False):
445
+ jobs_list = [j for j in jobs_list if j.status.stage == "RUNNING"]
446
+
447
+ if args.get("status"):
448
+ status_filter = args["status"].upper()
449
+ jobs_list = [j for j in jobs_list if status_filter in j.status.stage]
450
+
451
+ # Convert JobInfo objects to dicts for formatting
452
+ jobs_dicts = [_job_info_to_dict(j) for j in jobs_list]
453
+
454
+ table = format_jobs_table(jobs_dicts)
455
+
456
+ if len(jobs_list) == 0:
457
+ if args.get("all", False):
458
+ return {
459
+ "formatted": "No jobs found.",
460
+ "totalResults": 0,
461
+ "resultsShared": 0,
462
+ }
463
+ return {
464
+ "formatted": 'No running jobs found. Use `{"args": {"all": true}}` to show all jobs.',
465
+ "totalResults": 0,
466
+ "resultsShared": 0,
467
+ }
468
+
469
+ response = f"**Jobs ({len(jobs_list)} total):**\n\n{table}"
470
+ return {
471
+ "formatted": response,
472
+ "totalResults": len(jobs_list),
473
+ "resultsShared": len(jobs_list),
474
+ }
475
+
476
+ async def _get_logs(self, args: Dict[str, Any]) -> ToolResult:
477
+ """Fetch logs using HfApi.fetch_job_logs()"""
478
+ job_id = args.get("job_id")
479
+ if not job_id:
480
+ return {
481
+ "formatted": "job_id is required",
482
+ "isError": True,
483
+ "totalResults": 0,
484
+ "resultsShared": 0,
485
+ }
486
+
487
+ try:
488
+ # Fetch logs (returns generator, convert to list)
489
+ logs_gen = self.api.fetch_job_logs(
490
+ job_id=job_id, namespace=args.get("namespace") or self.namespace
491
+ )
492
+ logs = await _async_call(list, logs_gen)
493
+
494
+ if not logs:
495
+ return {
496
+ "formatted": f"No logs available for job {job_id}",
497
+ "totalResults": 0,
498
+ "resultsShared": 0,
499
+ }
500
+
501
+ log_text = "\n".join(logs)
502
+ return {
503
+ "formatted": f"**Logs for {job_id}:**\n\n```\n{log_text}\n```",
504
+ "totalResults": 1,
505
+ "resultsShared": 1,
506
+ }
507
+
508
+ except Exception as e:
509
+ return {
510
+ "formatted": f"Failed to fetch logs: {str(e)}",
511
+ "isError": True,
512
+ "totalResults": 0,
513
+ "resultsShared": 0,
514
+ }
515
+
516
+ async def _inspect_job(self, args: Dict[str, Any]) -> ToolResult:
517
+ """Inspect job using HfApi.inspect_job()"""
518
+ job_id = args.get("job_id")
519
+ if not job_id:
520
+ return {
521
+ "formatted": "job_id is required",
522
+ "totalResults": 0,
523
+ "resultsShared": 0,
524
+ "isError": True,
525
+ }
526
+
527
+ job_ids = job_id if isinstance(job_id, list) else [job_id]
528
+
529
+ jobs = []
530
+ for jid in job_ids:
531
+ try:
532
+ job = await _async_call(
533
+ self.api.inspect_job,
534
+ job_id=jid,
535
+ namespace=args.get("namespace") or self.namespace,
536
+ )
537
+ jobs.append(_job_info_to_dict(job))
538
+ except Exception as e:
539
+ raise Exception(f"Failed to inspect job {jid}: {str(e)}")
540
+
541
+ formatted_details = format_job_details(jobs)
542
+ response = f"**Job Details** ({len(jobs)} job{'s' if len(jobs) > 1 else ''}):\n\n{formatted_details}"
543
+
544
+ return {
545
+ "formatted": response,
546
+ "totalResults": len(jobs),
547
+ "resultsShared": len(jobs),
548
+ }
549
+
550
+ async def _cancel_job(self, args: Dict[str, Any]) -> ToolResult:
551
+ """Cancel job using HfApi.cancel_job()"""
552
+ job_id = args.get("job_id")
553
+ if not job_id:
554
+ return {
555
+ "formatted": "job_id is required",
556
+ "totalResults": 0,
557
+ "resultsShared": 0,
558
+ "isError": True,
559
+ }
560
+
561
+ await _async_call(
562
+ self.api.cancel_job,
563
+ job_id=job_id,
564
+ namespace=args.get("namespace") or self.namespace,
565
+ )
566
+
567
+ response = f"""✓ Job {job_id} has been cancelled.
568
+
569
+ To verify, call this tool with `{{"operation": "inspect", "args": {{"job_id": "{job_id}"}}}}`"""
570
+
571
+ return {"formatted": response, "totalResults": 1, "resultsShared": 1}
572
+
573
+ async def _scheduled_run(self, args: Dict[str, Any]) -> ToolResult:
574
+ """Create scheduled job using HfApi.create_scheduled_job()"""
575
+ try:
576
+ scheduled_job = await _async_call(
577
+ self.api.create_scheduled_job,
578
+ image=args.get("image", "python:3.12"),
579
+ command=args.get("command"),
580
+ schedule=args.get("schedule"),
581
+ env=args.get("env"),
582
+ secrets=args.get("secrets"),
583
+ flavor=args.get("flavor", "cpu-basic"),
584
+ timeout=args.get("timeout", "30m"),
585
+ namespace=args.get("namespace") or self.namespace,
586
+ )
587
+
588
+ scheduled_dict = _scheduled_job_info_to_dict(scheduled_job)
589
+
590
+ response = f"""✓ Scheduled job created successfully!
591
+
592
+ **Scheduled Job ID:** {scheduled_dict["id"]}
593
+ **Schedule:** {scheduled_dict["schedule"]}
594
+ **Suspended:** {"Yes" if scheduled_dict.get("suspend") else "No"}
595
+ **Next Run:** {scheduled_dict.get("nextRun", "N/A")}
596
+
597
+ To inspect, call this tool with `{{"operation": "scheduled inspect", "args": {{"scheduled_job_id": "{scheduled_dict["id"]}"}}}}`
598
+ To list all, call this tool with `{{"operation": "scheduled ps"}}`"""
599
+
600
+ return {"formatted": response, "totalResults": 1, "resultsShared": 1}
601
+
602
+ except Exception as e:
603
+ raise Exception(f"Failed to create scheduled job: {str(e)}")
604
+
605
+ async def _scheduled_uv(self, args: Dict[str, Any]) -> ToolResult:
606
+ """Create scheduled UV job with inline script support"""
607
+ try:
608
+ script = args.get("script")
609
+ if not script:
610
+ raise ValueError("script is required")
611
+
612
+ schedule = args.get("schedule")
613
+ if not schedule:
614
+ raise ValueError("schedule is required")
615
+
616
+ # Resolve the command based on script type
617
+ command = _resolve_uv_command(
618
+ script=script,
619
+ with_deps=args.get("with_deps") or args.get("dependencies"),
620
+ python=args.get("python"),
621
+ script_args=args.get("script_args"),
622
+ )
623
+
624
+ # Use create_scheduled_job with UV image
625
+ scheduled_job = await _async_call(
626
+ self.api.create_scheduled_job,
627
+ image=UV_DEFAULT_IMAGE,
628
+ command=command,
629
+ schedule=schedule,
630
+ env=args.get("env"),
631
+ secrets=args.get("secrets"),
632
+ flavor=args.get("flavor", "cpu-basic"),
633
+ timeout=args.get("timeout", "30m"),
634
+ namespace=args.get("namespace") or self.namespace,
635
+ )
636
+
637
+ scheduled_dict = _scheduled_job_info_to_dict(scheduled_job)
638
+
639
+ response = f"""✓ Scheduled UV job created successfully!
640
+
641
+ **Scheduled Job ID:** {scheduled_dict["id"]}
642
+ **Schedule:** {scheduled_dict["schedule"]}
643
+ **Suspended:** {"Yes" if scheduled_dict.get("suspend") else "No"}
644
+ **Next Run:** {scheduled_dict.get("nextRun", "N/A")}
645
+
646
+ To inspect, call this tool with `{{"operation": "scheduled inspect", "args": {{"scheduled_job_id": "{scheduled_dict["id"]}"}}}}`"""
647
+
648
+ return {"formatted": response, "totalResults": 1, "resultsShared": 1}
649
+
650
+ except Exception as e:
651
+ raise Exception(f"Failed to create scheduled UV job: {str(e)}")
652
+
653
+ async def _list_scheduled_jobs(self, args: Dict[str, Any]) -> ToolResult:
654
+ """List scheduled jobs using HfApi.list_scheduled_jobs()"""
655
+ scheduled_jobs_list = await _async_call(
656
+ self.api.list_scheduled_jobs,
657
+ namespace=args.get("namespace") or self.namespace,
658
+ )
659
+
660
+ # Filter jobs - default: hide suspended jobs unless --all is specified
661
+ if not args.get("all", False):
662
+ scheduled_jobs_list = [j for j in scheduled_jobs_list if not j.suspend]
663
+
664
+ # Convert to dicts for formatting
665
+ scheduled_dicts = [_scheduled_job_info_to_dict(j) for j in scheduled_jobs_list]
666
+
667
+ table = format_scheduled_jobs_table(scheduled_dicts)
668
+
669
+ if len(scheduled_jobs_list) == 0:
670
+ if args.get("all", False):
671
+ return {
672
+ "formatted": "No scheduled jobs found.",
673
+ "totalResults": 0,
674
+ "resultsShared": 0,
675
+ }
676
+ return {
677
+ "formatted": 'No active scheduled jobs found. Use `{"args": {"all": true}}` to show suspended jobs.',
678
+ "totalResults": 0,
679
+ "resultsShared": 0,
680
+ }
681
+
682
+ response = f"**Scheduled Jobs ({len(scheduled_jobs_list)} total):**\n\n{table}"
683
+ return {
684
+ "formatted": response,
685
+ "totalResults": len(scheduled_jobs_list),
686
+ "resultsShared": len(scheduled_jobs_list),
687
+ }
688
+
689
+ async def _inspect_scheduled_job(self, args: Dict[str, Any]) -> ToolResult:
690
+ """Inspect scheduled job using HfApi.inspect_scheduled_job()"""
691
+ scheduled_job_id = args.get("scheduled_job_id")
692
+ if not scheduled_job_id:
693
+ return {
694
+ "formatted": "scheduled_job_id is required",
695
+ "totalResults": 0,
696
+ "resultsShared": 0,
697
+ "isError": True,
698
+ }
699
+
700
+ scheduled_job = await _async_call(
701
+ self.api.inspect_scheduled_job,
702
+ scheduled_job_id=scheduled_job_id,
703
+ namespace=args.get("namespace") or self.namespace,
704
+ )
705
+
706
+ scheduled_dict = _scheduled_job_info_to_dict(scheduled_job)
707
+ formatted_details = format_scheduled_job_details(scheduled_dict)
708
+
709
+ return {
710
+ "formatted": f"**Scheduled Job Details:**\n\n{formatted_details}",
711
+ "totalResults": 1,
712
+ "resultsShared": 1,
713
+ }
714
+
715
+ async def _delete_scheduled_job(self, args: Dict[str, Any]) -> ToolResult:
716
+ """Delete scheduled job using HfApi.delete_scheduled_job()"""
717
+ scheduled_job_id = args.get("scheduled_job_id")
718
+ if not scheduled_job_id:
719
+ return {
720
+ "formatted": "scheduled_job_id is required",
721
+ "totalResults": 0,
722
+ "resultsShared": 0,
723
+ "isError": True,
724
+ }
725
+
726
+ await _async_call(
727
+ self.api.delete_scheduled_job,
728
+ scheduled_job_id=scheduled_job_id,
729
+ namespace=args.get("namespace") or self.namespace,
730
+ )
731
+
732
+ return {
733
+ "formatted": f"✓ Scheduled job {scheduled_job_id} has been deleted.",
734
+ "totalResults": 1,
735
+ "resultsShared": 1,
736
+ }
737
+
738
+ async def _suspend_scheduled_job(self, args: Dict[str, Any]) -> ToolResult:
739
+ """Suspend scheduled job using HfApi.suspend_scheduled_job()"""
740
+ scheduled_job_id = args.get("scheduled_job_id")
741
+ if not scheduled_job_id:
742
+ return {
743
+ "formatted": "scheduled_job_id is required",
744
+ "totalResults": 0,
745
+ "resultsShared": 0,
746
+ "isError": True,
747
+ }
748
+
749
+ await _async_call(
750
+ self.api.suspend_scheduled_job,
751
+ scheduled_job_id=scheduled_job_id,
752
+ namespace=args.get("namespace") or self.namespace,
753
+ )
754
+
755
+ response = f"""✓ Scheduled job {scheduled_job_id} has been suspended.
756
+
757
+ To resume, call this tool with `{{"operation": "scheduled resume", "args": {{"scheduled_job_id": "{scheduled_job_id}"}}}}`"""
758
+
759
+ return {"formatted": response, "totalResults": 1, "resultsShared": 1}
760
+
761
+ async def _resume_scheduled_job(self, args: Dict[str, Any]) -> ToolResult:
762
+ """Resume scheduled job using HfApi.resume_scheduled_job()"""
763
+ scheduled_job_id = args.get("scheduled_job_id")
764
+ if not scheduled_job_id:
765
+ return {
766
+ "formatted": "scheduled_job_id is required",
767
+ "totalResults": 0,
768
+ "resultsShared": 0,
769
+ "isError": True,
770
+ }
771
+
772
+ await _async_call(
773
+ self.api.resume_scheduled_job,
774
+ scheduled_job_id=scheduled_job_id,
775
+ namespace=args.get("namespace") or self.namespace,
776
+ )
777
+
778
+ response = f"""✓ Scheduled job {scheduled_job_id} has been resumed.
779
+
780
+ To inspect, call this tool with `{{"operation": "scheduled inspect", "args": {{"scheduled_job_id": "{scheduled_job_id}"}}}}`"""
781
+
782
+ return {"formatted": response, "totalResults": 1, "resultsShared": 1}
783
+
784
+
785
+ # Tool specification for agent registration
786
+ HF_JOBS_TOOL_SPEC = {
787
+ "name": "hf_jobs",
788
+ "description": (
789
+ "Manage Hugging Face CPU/GPU compute jobs. Run commands in Docker containers, "
790
+ "execute Python scripts with UV. List, schedule and monitor jobs/logs. "
791
+ "Call this tool with no operation for full usage instructions and examples."
792
+ ),
793
+ "parameters": {
794
+ "type": "object",
795
+ "properties": {
796
+ "operation": {
797
+ "type": "string",
798
+ "enum": [
799
+ "run",
800
+ "uv",
801
+ "ps",
802
+ "logs",
803
+ "inspect",
804
+ "cancel",
805
+ "scheduled run",
806
+ "scheduled uv",
807
+ "scheduled ps",
808
+ "scheduled inspect",
809
+ "scheduled delete",
810
+ "scheduled suspend",
811
+ "scheduled resume",
812
+ ],
813
+ "description": (
814
+ "Operation to execute. Valid values: [run, uv, ps, logs, inspect, cancel, "
815
+ "scheduled run, scheduled uv, scheduled ps, scheduled inspect, scheduled delete, "
816
+ "scheduled suspend, scheduled resume]"
817
+ ),
818
+ },
819
+ "args": {
820
+ "type": "object",
821
+ "description": "Operation-specific arguments as a JSON object",
822
+ "additionalProperties": True,
823
+ },
824
+ },
825
+ },
826
+ }
827
+
828
+
829
+ async def hf_jobs_handler(arguments: Dict[str, Any]) -> tuple[str, bool]:
830
+ """Handler for agent tool router"""
831
+ try:
832
+ tool = HfJobsTool()
833
+ result = await tool.execute(arguments)
834
+ return result["formatted"], not result.get("isError", False)
835
+ except Exception as e:
836
+ return f"Error executing HF Jobs tool: {str(e)}", False
agent/tools/types.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Types for Hugging Face tools
3
+
4
+ Ported from: hf-mcp-server/packages/mcp/src/types/
5
+ """
6
+
7
+ from typing import TypedDict
8
+
9
+
10
+ class ToolResult(TypedDict, total=False):
11
+ """Result returned by HF tool operations"""
12
+
13
+ formatted: str
14
+ totalResults: int
15
+ resultsShared: int
16
+ isError: bool
agent/tools/utilities.py ADDED
@@ -0,0 +1,142 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Utility functions for Hugging Face tools
3
+
4
+ Ported from: hf-mcp-server/packages/mcp/src/jobs/formatters.ts
5
+ """
6
+
7
+ from datetime import datetime
8
+ from typing import Any, Dict, List, Optional
9
+
10
+
11
+ def truncate(text: str, max_length: int) -> str:
12
+ """Truncate a string to a maximum length with ellipsis"""
13
+ if len(text) <= max_length:
14
+ return text
15
+ return text[: max_length - 3] + "..."
16
+
17
+
18
+ def format_date(date_str: Optional[str]) -> str:
19
+ """Format a date string to a readable format"""
20
+ if not date_str:
21
+ return "N/A"
22
+ try:
23
+ date = datetime.fromisoformat(date_str.replace("Z", "+00:00"))
24
+ return date.strftime("%Y-%m-%d %H:%M:%S")
25
+ except Exception:
26
+ return date_str
27
+
28
+
29
+ def format_command(command: Optional[List[str]]) -> str:
30
+ """Format command array as a single string"""
31
+ if not command or len(command) == 0:
32
+ return "N/A"
33
+ return " ".join(command)
34
+
35
+
36
+ def get_image_or_space(job: Dict[str, Any]) -> str:
37
+ """Get image/space identifier from job"""
38
+ if job.get("spaceId"):
39
+ return job["spaceId"]
40
+ if job.get("dockerImage"):
41
+ return job["dockerImage"]
42
+ return "N/A"
43
+
44
+
45
+ def format_jobs_table(jobs: List[Dict[str, Any]]) -> str:
46
+ """Format jobs as a markdown table"""
47
+ if len(jobs) == 0:
48
+ return "No jobs found."
49
+
50
+ # Calculate dynamic ID column width
51
+ longest_id_length = max(len(job["id"]) for job in jobs)
52
+ id_column_width = max(longest_id_length, len("JOB ID"))
53
+
54
+ # Define column widths
55
+ col_widths = {
56
+ "id": id_column_width,
57
+ "image": 20,
58
+ "command": 30,
59
+ "created": 19,
60
+ "status": 12,
61
+ }
62
+
63
+ # Build header
64
+ header = f"| {'JOB ID'.ljust(col_widths['id'])} | {'IMAGE/SPACE'.ljust(col_widths['image'])} | {'COMMAND'.ljust(col_widths['command'])} | {'CREATED'.ljust(col_widths['created'])} | {'STATUS'.ljust(col_widths['status'])} |"
65
+ separator = f"|{'-' * (col_widths['id'] + 2)}|{'-' * (col_widths['image'] + 2)}|{'-' * (col_widths['command'] + 2)}|{'-' * (col_widths['created'] + 2)}|{'-' * (col_widths['status'] + 2)}|"
66
+
67
+ # Build rows
68
+ rows = []
69
+ for job in jobs:
70
+ job_id = job["id"]
71
+ image = truncate(get_image_or_space(job), col_widths["image"])
72
+ command = truncate(format_command(job.get("command")), col_widths["command"])
73
+ created = truncate(format_date(job.get("createdAt")), col_widths["created"])
74
+ status = truncate(job["status"]["stage"], col_widths["status"])
75
+
76
+ rows.append(
77
+ f"| {job_id.ljust(col_widths['id'])} | {image.ljust(col_widths['image'])} | {command.ljust(col_widths['command'])} | {created.ljust(col_widths['created'])} | {status.ljust(col_widths['status'])} |"
78
+ )
79
+
80
+ return "\n".join([header, separator] + rows)
81
+
82
+
83
+ def format_scheduled_jobs_table(jobs: List[Dict[str, Any]]) -> str:
84
+ """Format scheduled jobs as a markdown table"""
85
+ if len(jobs) == 0:
86
+ return "No scheduled jobs found."
87
+
88
+ # Calculate dynamic ID column width
89
+ longest_id_length = max(len(job["id"]) for job in jobs)
90
+ id_column_width = max(longest_id_length, len("ID"))
91
+
92
+ # Define column widths
93
+ col_widths = {
94
+ "id": id_column_width,
95
+ "schedule": 12,
96
+ "image": 18,
97
+ "command": 25,
98
+ "lastRun": 19,
99
+ "nextRun": 19,
100
+ "suspend": 9,
101
+ }
102
+
103
+ # Build header
104
+ header = f"| {'ID'.ljust(col_widths['id'])} | {'SCHEDULE'.ljust(col_widths['schedule'])} | {'IMAGE/SPACE'.ljust(col_widths['image'])} | {'COMMAND'.ljust(col_widths['command'])} | {'LAST RUN'.ljust(col_widths['lastRun'])} | {'NEXT RUN'.ljust(col_widths['nextRun'])} | {'SUSPENDED'.ljust(col_widths['suspend'])} |"
105
+ separator = f"|{'-' * (col_widths['id'] + 2)}|{'-' * (col_widths['schedule'] + 2)}|{'-' * (col_widths['image'] + 2)}|{'-' * (col_widths['command'] + 2)}|{'-' * (col_widths['lastRun'] + 2)}|{'-' * (col_widths['nextRun'] + 2)}|{'-' * (col_widths['suspend'] + 2)}|"
106
+
107
+ # Build rows
108
+ rows = []
109
+ for job in jobs:
110
+ job_id = job["id"]
111
+ schedule = truncate(job["schedule"], col_widths["schedule"])
112
+ image = truncate(get_image_or_space(job["jobSpec"]), col_widths["image"])
113
+ command = truncate(
114
+ format_command(job["jobSpec"].get("command")), col_widths["command"]
115
+ )
116
+ last_run = truncate(format_date(job.get("lastRun")), col_widths["lastRun"])
117
+ next_run = truncate(format_date(job.get("nextRun")), col_widths["nextRun"])
118
+ suspend = "Yes" if job.get("suspend") else "No"
119
+
120
+ rows.append(
121
+ f"| {job_id.ljust(col_widths['id'])} | {schedule.ljust(col_widths['schedule'])} | {image.ljust(col_widths['image'])} | {command.ljust(col_widths['command'])} | {last_run.ljust(col_widths['lastRun'])} | {next_run.ljust(col_widths['nextRun'])} | {suspend.ljust(col_widths['suspend'])} |"
122
+ )
123
+
124
+ return "\n".join([header, separator] + rows)
125
+
126
+
127
+ def format_job_details(jobs: Any) -> str:
128
+ """Format job details as JSON in a markdown code block"""
129
+ import json
130
+
131
+ job_array = jobs if isinstance(jobs, list) else [jobs]
132
+ json_str = json.dumps(job_array, indent=2)
133
+ return f"```json\n{json_str}\n```"
134
+
135
+
136
+ def format_scheduled_job_details(jobs: Any) -> str:
137
+ """Format scheduled job details as JSON in a markdown code block"""
138
+ import json
139
+
140
+ job_array = jobs if isinstance(jobs, list) else [jobs]
141
+ json_str = json.dumps(job_array, indent=2)
142
+ return f"```json\n{json_str}\n```"
pyproject.toml CHANGED
@@ -19,4 +19,5 @@ dependencies = [
19
  "lmnr[all]>=0.7.23",
20
  "transformers>=2.3.0",
21
  "torch>=2.9.1",
 
22
  ]
 
19
  "lmnr[all]>=0.7.23",
20
  "transformers>=2.3.0",
21
  "torch>=2.9.1",
22
+ "pytest>=9.0.2",
23
  ]
tests/__init__.py ADDED
File without changes
tests/integration/__init__.py ADDED
File without changes
tests/integration/tools/__init__.py ADDED
File without changes
tests/integration/tools/test_jobs_integration.py ADDED
@@ -0,0 +1,452 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Integration tests for refactored HF Jobs Tool
4
+ Tests with real HF API using HF_TOKEN from environment
5
+ """
6
+ import os
7
+ import sys
8
+ import asyncio
9
+ import time
10
+
11
+ # Add parent directory to path
12
+ sys.path.insert(0, '.')
13
+
14
+ from agent.tools.jobs_tool import HfJobsTool
15
+
16
+ # ANSI color codes for better output
17
+ GREEN = '\033[92m'
18
+ YELLOW = '\033[93m'
19
+ RED = '\033[91m'
20
+ BLUE = '\033[94m'
21
+ RESET = '\033[0m'
22
+
23
+
24
+ def print_test(msg):
25
+ """Print test message in blue"""
26
+ print(f"{BLUE}[TEST]{RESET} {msg}")
27
+
28
+
29
+ def print_success(msg):
30
+ """Print success message in green"""
31
+ print(f"{GREEN}✓{RESET} {msg}")
32
+
33
+
34
+ def print_warning(msg):
35
+ """Print warning message in yellow"""
36
+ print(f"{YELLOW}⚠{RESET} {msg}")
37
+
38
+
39
+ def print_error(msg):
40
+ """Print error message in red"""
41
+ print(f"{RED}✗{RESET} {msg}")
42
+
43
+
44
+ async def test_basic_job_run(tool):
45
+ """Test running a basic job"""
46
+ print_test("Running a simple Python job...")
47
+
48
+ result = await tool.execute({
49
+ "operation": "run",
50
+ "args": {
51
+ "image": "python:3.12",
52
+ "command": ["python", "-c", "print('Hello from HF Jobs!')"],
53
+ "flavor": "cpu-basic",
54
+ "timeout": "5m",
55
+ "detach": True # Don't wait for completion
56
+ }
57
+ })
58
+
59
+ if result.get("isError"):
60
+ print_error(f"Failed to run job: {result['formatted']}")
61
+ return None
62
+
63
+ # Extract job ID from response
64
+ import re
65
+ job_id_match = re.search(r'\*\*Job ID:\*\* (\S+)', result['formatted'])
66
+ if job_id_match:
67
+ job_id = job_id_match.group(1)
68
+ print_success(f"Job started with ID: {job_id}")
69
+ return job_id
70
+
71
+ print_error("Could not extract job ID from response")
72
+ return None
73
+
74
+
75
+ async def test_list_jobs(tool):
76
+ """Test listing jobs"""
77
+ print_test("Listing running jobs...")
78
+
79
+ result = await tool.execute({
80
+ "operation": "ps",
81
+ "args": {}
82
+ })
83
+
84
+ if result.get("isError"):
85
+ print_error(f"Failed to list jobs: {result['formatted']}")
86
+ return False
87
+
88
+ print_success(f"Listed jobs: {result['totalResults']} running")
89
+ if result['totalResults'] > 0:
90
+ print(f" {result['formatted'][:200]}...")
91
+ return True
92
+
93
+
94
+ async def test_inspect_job(tool, job_id):
95
+ """Test inspecting a specific job"""
96
+ print_test(f"Inspecting job {job_id}...")
97
+
98
+ result = await tool.execute({
99
+ "operation": "inspect",
100
+ "args": {
101
+ "job_id": job_id
102
+ }
103
+ })
104
+
105
+ if result.get("isError"):
106
+ print_error(f"Failed to inspect job: {result['formatted']}")
107
+ return False
108
+
109
+ print_success(f"Inspected job successfully")
110
+ return True
111
+
112
+
113
+ async def test_get_logs(tool, job_id):
114
+ """Test fetching job logs"""
115
+ print_test(f"Fetching logs for job {job_id}...")
116
+
117
+ # Wait a bit for logs to be available
118
+ await asyncio.sleep(2)
119
+
120
+ result = await tool.execute({
121
+ "operation": "logs",
122
+ "args": {
123
+ "job_id": job_id
124
+ }
125
+ })
126
+
127
+ if result.get("isError"):
128
+ print_warning(f"Could not fetch logs (might be too early): {result['formatted'][:100]}")
129
+ return False
130
+
131
+ print_success(f"Fetched logs successfully")
132
+ if "Hello from HF Jobs!" in result['formatted']:
133
+ print_success(" Found expected output in logs!")
134
+ return True
135
+
136
+
137
+ async def test_cancel_job(tool, job_id):
138
+ """Test cancelling a job"""
139
+ print_test(f"Cancelling job {job_id}...")
140
+
141
+ result = await tool.execute({
142
+ "operation": "cancel",
143
+ "args": {
144
+ "job_id": job_id
145
+ }
146
+ })
147
+
148
+ if result.get("isError"):
149
+ print_error(f"Failed to cancel job: {result['formatted']}")
150
+ return False
151
+
152
+ print_success(f"Cancelled job successfully")
153
+ return True
154
+
155
+
156
+ async def test_uv_job(tool):
157
+ """Test running a UV job"""
158
+ print_test("Running a UV Python script job...")
159
+
160
+ result = await tool.execute({
161
+ "operation": "uv",
162
+ "args": {
163
+ "script": "print('Hello from UV!')\nimport sys\nprint(f'Python version: {sys.version}')",
164
+ "flavor": "cpu-basic",
165
+ "timeout": "5m",
166
+ "detach": True
167
+ }
168
+ })
169
+
170
+ if result.get("isError"):
171
+ print_error(f"Failed to run UV job: {result['formatted']}")
172
+ return None
173
+
174
+ # Extract job ID
175
+ import re
176
+ job_id_match = re.search(r'UV Job started: (\S+)', result['formatted'])
177
+ if job_id_match:
178
+ job_id = job_id_match.group(1)
179
+ print_success(f"UV job started with ID: {job_id}")
180
+ return job_id
181
+
182
+ print_error("Could not extract job ID from response")
183
+ return None
184
+
185
+
186
+ async def test_list_all_jobs(tool):
187
+ """Test listing all jobs (including completed)"""
188
+ print_test("Listing all jobs (including completed)...")
189
+
190
+ result = await tool.execute({
191
+ "operation": "ps",
192
+ "args": {
193
+ "all": True
194
+ }
195
+ })
196
+
197
+ if result.get("isError"):
198
+ print_error(f"Failed to list all jobs: {result['formatted']}")
199
+ return False
200
+
201
+ print_success(f"Listed all jobs: {result['totalResults']} total")
202
+ return True
203
+
204
+
205
+ async def test_scheduled_job(tool):
206
+ """Test creating and managing a scheduled job"""
207
+ print_test("Creating a scheduled job (daily at midnight)...")
208
+
209
+ result = await tool.execute({
210
+ "operation": "scheduled run",
211
+ "args": {
212
+ "image": "python:3.12",
213
+ "command": ["python", "-c", "print('Scheduled job running!')"],
214
+ "schedule": "@daily",
215
+ "flavor": "cpu-basic",
216
+ "timeout": "5m"
217
+ }
218
+ })
219
+
220
+ if result.get("isError"):
221
+ print_error(f"Failed to create scheduled job: {result['formatted']}")
222
+ return None
223
+
224
+ # Extract scheduled job ID
225
+ import re
226
+ job_id_match = re.search(r'\*\*Scheduled Job ID:\*\* (\S+)', result['formatted'])
227
+ if not job_id_match:
228
+ print_error("Could not extract scheduled job ID")
229
+ return None
230
+
231
+ scheduled_job_id = job_id_match.group(1)
232
+ print_success(f"Scheduled job created with ID: {scheduled_job_id}")
233
+ return scheduled_job_id
234
+
235
+
236
+ async def test_list_scheduled_jobs(tool):
237
+ """Test listing scheduled jobs"""
238
+ print_test("Listing scheduled jobs...")
239
+
240
+ result = await tool.execute({
241
+ "operation": "scheduled ps",
242
+ "args": {}
243
+ })
244
+
245
+ if result.get("isError"):
246
+ print_error(f"Failed to list scheduled jobs: {result['formatted']}")
247
+ return False
248
+
249
+ print_success(f"Listed scheduled jobs: {result['totalResults']} active")
250
+ return True
251
+
252
+
253
+ async def test_inspect_scheduled_job(tool, scheduled_job_id):
254
+ """Test inspecting a scheduled job"""
255
+ print_test(f"Inspecting scheduled job {scheduled_job_id}...")
256
+
257
+ result = await tool.execute({
258
+ "operation": "scheduled inspect",
259
+ "args": {
260
+ "scheduled_job_id": scheduled_job_id
261
+ }
262
+ })
263
+
264
+ if result.get("isError"):
265
+ print_error(f"Failed to inspect scheduled job: {result['formatted']}")
266
+ return False
267
+
268
+ print_success(f"Inspected scheduled job successfully")
269
+ return True
270
+
271
+
272
+ async def test_suspend_scheduled_job(tool, scheduled_job_id):
273
+ """Test suspending a scheduled job"""
274
+ print_test(f"Suspending scheduled job {scheduled_job_id}...")
275
+
276
+ result = await tool.execute({
277
+ "operation": "scheduled suspend",
278
+ "args": {
279
+ "scheduled_job_id": scheduled_job_id
280
+ }
281
+ })
282
+
283
+ if result.get("isError"):
284
+ print_error(f"Failed to suspend scheduled job: {result['formatted']}")
285
+ return False
286
+
287
+ print_success(f"Suspended scheduled job successfully")
288
+ return True
289
+
290
+
291
+ async def test_resume_scheduled_job(tool, scheduled_job_id):
292
+ """Test resuming a scheduled job"""
293
+ print_test(f"Resuming scheduled job {scheduled_job_id}...")
294
+
295
+ result = await tool.execute({
296
+ "operation": "scheduled resume",
297
+ "args": {
298
+ "scheduled_job_id": scheduled_job_id
299
+ }
300
+ })
301
+
302
+ if result.get("isError"):
303
+ print_error(f"Failed to resume scheduled job: {result['formatted']}")
304
+ return False
305
+
306
+ print_success(f"Resumed scheduled job successfully")
307
+ return True
308
+
309
+
310
+ async def test_delete_scheduled_job(tool, scheduled_job_id):
311
+ """Test deleting a scheduled job"""
312
+ print_test(f"Deleting scheduled job {scheduled_job_id}...")
313
+
314
+ result = await tool.execute({
315
+ "operation": "scheduled delete",
316
+ "args": {
317
+ "scheduled_job_id": scheduled_job_id
318
+ }
319
+ })
320
+
321
+ if result.get("isError"):
322
+ print_error(f"Failed to delete scheduled job: {result['formatted']}")
323
+ return False
324
+
325
+ print_success(f"Deleted scheduled job successfully")
326
+ return True
327
+
328
+
329
+ async def main():
330
+ """Run all integration tests"""
331
+ print("=" * 70)
332
+ print(f"{BLUE}HF Jobs Tool - Integration Tests{RESET}")
333
+ print("=" * 70)
334
+ print()
335
+
336
+ # Check for HF_TOKEN
337
+ hf_token = os.environ.get('HF_TOKEN')
338
+ if not hf_token:
339
+ print_error("HF_TOKEN not found in environment variables!")
340
+ print_warning("Set it with: export HF_TOKEN='your_token_here'")
341
+ sys.exit(1)
342
+
343
+ print_success(f"Found HF_TOKEN (length: {len(hf_token)})")
344
+ print()
345
+
346
+ # Initialize tool with token
347
+ tool = HfJobsTool(hf_token=hf_token)
348
+
349
+ # Track job IDs for cleanup
350
+ job_ids = []
351
+ scheduled_job_ids = []
352
+
353
+ try:
354
+ # Test 1: Run basic job
355
+ print(f"\n{YELLOW}{'=' * 70}{RESET}")
356
+ print(f"{YELLOW}Test Suite 1: Regular Jobs{RESET}")
357
+ print(f"{YELLOW}{'=' * 70}{RESET}\n")
358
+
359
+ job_id = await test_basic_job_run(tool)
360
+ if job_id:
361
+ job_ids.append(job_id)
362
+
363
+ # Wait a moment for job to register
364
+ await asyncio.sleep(1)
365
+
366
+ # Test 2: List jobs
367
+ await test_list_jobs(tool)
368
+
369
+ # Test 3: Inspect job
370
+ await test_inspect_job(tool, job_id)
371
+
372
+ # Test 4: Get logs
373
+ await test_get_logs(tool, job_id)
374
+
375
+ # Test 5: Cancel job (cleanup)
376
+ await test_cancel_job(tool, job_id)
377
+
378
+ # Test 6: UV job
379
+ print()
380
+ uv_job_id = await test_uv_job(tool)
381
+ if uv_job_id:
382
+ job_ids.append(uv_job_id)
383
+ await asyncio.sleep(1)
384
+ await test_cancel_job(tool, uv_job_id)
385
+
386
+ # Test 7: List all jobs
387
+ print()
388
+ await test_list_all_jobs(tool)
389
+
390
+ # Test Suite 2: Scheduled Jobs
391
+ print(f"\n{YELLOW}{'=' * 70}{RESET}")
392
+ print(f"{YELLOW}Test Suite 2: Scheduled Jobs{RESET}")
393
+ print(f"{YELLOW}{'=' * 70}{RESET}\n")
394
+
395
+ scheduled_job_id = await test_scheduled_job(tool)
396
+ if scheduled_job_id:
397
+ scheduled_job_ids.append(scheduled_job_id)
398
+
399
+ # Wait a moment for job to register
400
+ await asyncio.sleep(1)
401
+
402
+ # Test scheduled job operations
403
+ await test_list_scheduled_jobs(tool)
404
+ print()
405
+ await test_inspect_scheduled_job(tool, scheduled_job_id)
406
+ print()
407
+ await test_suspend_scheduled_job(tool, scheduled_job_id)
408
+ print()
409
+ await test_resume_scheduled_job(tool, scheduled_job_id)
410
+ print()
411
+
412
+ # Cleanup: Delete scheduled job
413
+ await test_delete_scheduled_job(tool, scheduled_job_id)
414
+
415
+ # Final summary
416
+ print(f"\n{YELLOW}{'=' * 70}{RESET}")
417
+ print(f"{GREEN}✓ All integration tests completed!{RESET}")
418
+ print(f"{YELLOW}{'=' * 70}{RESET}\n")
419
+
420
+ print_success("Refactored implementation works correctly with real HF API")
421
+ print_success("All 13 operations tested and verified")
422
+ print()
423
+ print(f"{BLUE}Summary:{RESET}")
424
+ print(f" • Regular jobs: ✓ run, list, inspect, logs, cancel")
425
+ print(f" • UV jobs: ✓ run")
426
+ print(f" • Scheduled jobs: ✓ create, list, inspect, suspend, resume, delete")
427
+ print()
428
+
429
+ except Exception as e:
430
+ print_error(f"Test failed with exception: {str(e)}")
431
+ import traceback
432
+ traceback.print_exc()
433
+
434
+ # Attempt cleanup
435
+ print(f"\n{YELLOW}Attempting cleanup...{RESET}")
436
+ for job_id in job_ids:
437
+ try:
438
+ await test_cancel_job(tool, job_id)
439
+ except:
440
+ pass
441
+
442
+ for scheduled_job_id in scheduled_job_ids:
443
+ try:
444
+ await test_delete_scheduled_job(tool, scheduled_job_id)
445
+ except:
446
+ pass
447
+
448
+ sys.exit(1)
449
+
450
+
451
+ if __name__ == "__main__":
452
+ asyncio.run(main())
tests/unit/__init__.py ADDED
File without changes
tests/unit/tools/__init__.py ADDED
File without changes
tests/unit/tools/test_jobs_tool.py ADDED
@@ -0,0 +1,454 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Tests for HF Jobs Tool
3
+
4
+ Tests the refactored jobs tool implementation using huggingface-hub library
5
+ """
6
+
7
+ from unittest.mock import AsyncMock, patch
8
+
9
+ import pytest
10
+
11
+ from agent.tools.jobs_tool import HfJobsTool, hf_jobs_handler
12
+
13
+
14
+ def create_mock_job_info(
15
+ job_id="test-job-1",
16
+ stage="RUNNING",
17
+ command=None,
18
+ docker_image="python:3.12",
19
+ ):
20
+ """Create a mock JobInfo object"""
21
+ from huggingface_hub._jobs_api import JobInfo
22
+
23
+ if command is None:
24
+ command = ["echo", "test"]
25
+
26
+ return JobInfo(
27
+ id=job_id,
28
+ created_at="2024-01-01T00:00:00.000000Z",
29
+ docker_image=docker_image,
30
+ space_id=None,
31
+ command=command,
32
+ arguments=[],
33
+ environment={},
34
+ secrets={},
35
+ flavor="cpu-basic",
36
+ status={"stage": stage, "message": None},
37
+ owner={"id": "123", "name": "test-user", "type": "user"},
38
+ endpoint="https://huggingface.co",
39
+ url=f"https://huggingface.co/jobs/test-user/{job_id}",
40
+ )
41
+
42
+
43
+ def create_mock_scheduled_job_info(
44
+ job_id="sched-job-1",
45
+ schedule="@daily",
46
+ suspend=False,
47
+ ):
48
+ """Create a mock ScheduledJobInfo object"""
49
+ from huggingface_hub._jobs_api import ScheduledJobInfo
50
+
51
+ return ScheduledJobInfo(
52
+ id=job_id,
53
+ created_at="2024-01-01T00:00:00.000000Z",
54
+ job_spec={
55
+ "docker_image": "python:3.12",
56
+ "space_id": None,
57
+ "command": ["python", "backup.py"],
58
+ "arguments": [],
59
+ "environment": {},
60
+ "secrets": {},
61
+ "flavor": "cpu-basic",
62
+ "timeout": 1800,
63
+ "tags": None,
64
+ "arch": None,
65
+ },
66
+ schedule=schedule,
67
+ suspend=suspend,
68
+ concurrency=False,
69
+ status={
70
+ "last_job": None,
71
+ "next_job_run_at": "2024-01-02T00:00:00.000000Z",
72
+ },
73
+ owner={"id": "123", "name": "test-user", "type": "user"},
74
+ )
75
+
76
+
77
+ @pytest.mark.asyncio
78
+ async def test_show_help():
79
+ """Test that help message is shown when no operation specified"""
80
+ tool = HfJobsTool()
81
+ result = await tool.execute({})
82
+
83
+ assert "HuggingFace Jobs API" in result["formatted"]
84
+ assert "Available Commands" in result["formatted"]
85
+ assert result["totalResults"] == 1
86
+ assert not result.get("isError", False)
87
+
88
+
89
+ @pytest.mark.asyncio
90
+ async def test_show_operation_help():
91
+ """Test operation-specific help"""
92
+ tool = HfJobsTool()
93
+ result = await tool.execute({"operation": "run", "args": {"help": True}})
94
+
95
+ assert "Help for operation" in result["formatted"]
96
+ assert result["totalResults"] == 1
97
+
98
+
99
+ @pytest.mark.asyncio
100
+ async def test_invalid_operation():
101
+ """Test invalid operation handling"""
102
+ tool = HfJobsTool()
103
+ result = await tool.execute({"operation": "invalid_op"})
104
+
105
+ assert result.get("isError") == True
106
+ assert "Unknown operation" in result["formatted"]
107
+
108
+
109
+ @pytest.mark.asyncio
110
+ async def test_run_job_missing_command():
111
+ """Test run job with missing required parameter"""
112
+ tool = HfJobsTool()
113
+
114
+ # Mock the HfApi.run_job to raise an error
115
+ with patch.object(tool.api, "run_job") as mock_run:
116
+ mock_run.side_effect = Exception("command parameter is required")
117
+
118
+ result = await tool.execute(
119
+ {"operation": "run", "args": {"image": "python:3.12"}}
120
+ )
121
+
122
+ assert result.get("isError") == True
123
+
124
+
125
+ @pytest.mark.asyncio
126
+ async def test_list_jobs_mock():
127
+ """Test list jobs with mock API"""
128
+ tool = HfJobsTool()
129
+
130
+ # Create mock job objects
131
+ running_job = create_mock_job_info("test-job-1", "RUNNING")
132
+ completed_job = create_mock_job_info(
133
+ "test-job-2", "COMPLETED", ["python", "script.py"]
134
+ )
135
+
136
+ # Mock the HfApi.list_jobs method
137
+ with patch.object(tool.api, "list_jobs") as mock_list:
138
+ mock_list.return_value = [running_job, completed_job]
139
+
140
+ # Test listing only running jobs (default)
141
+ result = await tool.execute({"operation": "ps"})
142
+
143
+ assert not result.get("isError", False)
144
+ assert "test-job-1" in result["formatted"]
145
+ assert "test-job-2" not in result["formatted"] # COMPLETED jobs filtered out
146
+ assert result["totalResults"] == 1
147
+ assert result["resultsShared"] == 1
148
+
149
+ # Test listing all jobs
150
+ result = await tool.execute({"operation": "ps", "args": {"all": True}})
151
+
152
+ assert not result.get("isError", False)
153
+ assert "test-job-1" in result["formatted"]
154
+ assert "test-job-2" in result["formatted"]
155
+ assert result["totalResults"] == 2
156
+ assert result["resultsShared"] == 2
157
+
158
+
159
+ @pytest.mark.asyncio
160
+ async def test_inspect_job_mock():
161
+ """Test inspect job with mock API"""
162
+ tool = HfJobsTool()
163
+
164
+ mock_job = create_mock_job_info("test-job-1", "RUNNING")
165
+
166
+ with patch.object(tool.api, "inspect_job") as mock_inspect:
167
+ mock_inspect.return_value = mock_job
168
+
169
+ result = await tool.execute(
170
+ {"operation": "inspect", "args": {"job_id": "test-job-1"}}
171
+ )
172
+
173
+ assert not result.get("isError", False)
174
+ assert "test-job-1" in result["formatted"]
175
+ assert "Job Details" in result["formatted"]
176
+ mock_inspect.assert_called_once()
177
+
178
+
179
+ @pytest.mark.asyncio
180
+ async def test_cancel_job_mock():
181
+ """Test cancel job with mock API"""
182
+ tool = HfJobsTool()
183
+
184
+ with patch.object(tool.api, "cancel_job") as mock_cancel:
185
+ mock_cancel.return_value = None
186
+
187
+ result = await tool.execute(
188
+ {"operation": "cancel", "args": {"job_id": "test-job-1"}}
189
+ )
190
+
191
+ assert not result.get("isError", False)
192
+ assert "cancelled" in result["formatted"]
193
+ assert "test-job-1" in result["formatted"]
194
+ mock_cancel.assert_called_once()
195
+
196
+
197
+ @pytest.mark.asyncio
198
+ async def test_run_job_mock():
199
+ """Test run job with mock API"""
200
+ tool = HfJobsTool()
201
+
202
+ mock_job = create_mock_job_info("new-job-123", "RUNNING")
203
+
204
+ with patch.object(tool.api, "run_job") as mock_run:
205
+ mock_run.return_value = mock_job
206
+
207
+ result = await tool.execute(
208
+ {
209
+ "operation": "run",
210
+ "args": {
211
+ "image": "python:3.12",
212
+ "command": ["python", "-c", "print('test')"],
213
+ "flavor": "cpu-basic",
214
+ "detach": True,
215
+ },
216
+ }
217
+ )
218
+
219
+ assert not result.get("isError", False)
220
+ assert "new-job-123" in result["formatted"]
221
+ assert "Job started" in result["formatted"]
222
+ mock_run.assert_called_once()
223
+
224
+
225
+ @pytest.mark.asyncio
226
+ async def test_run_uv_job_mock():
227
+ """Test run UV job with mock API"""
228
+ tool = HfJobsTool()
229
+
230
+ mock_job = create_mock_job_info("uv-job-456", "RUNNING")
231
+
232
+ with patch.object(tool.api, "run_uv_job") as mock_run:
233
+ mock_run.return_value = mock_job
234
+
235
+ result = await tool.execute(
236
+ {
237
+ "operation": "uv",
238
+ "args": {
239
+ "script": "print('Hello UV')",
240
+ "flavor": "cpu-basic",
241
+ },
242
+ }
243
+ )
244
+
245
+ assert not result.get("isError", False)
246
+ assert "uv-job-456" in result["formatted"]
247
+ assert "UV Job started" in result["formatted"]
248
+ mock_run.assert_called_once()
249
+
250
+
251
+ @pytest.mark.asyncio
252
+ async def test_get_logs_mock():
253
+ """Test get logs with mock API"""
254
+ tool = HfJobsTool()
255
+
256
+ # Mock fetch_job_logs to return a generator
257
+ def log_generator():
258
+ yield "Log line 1"
259
+ yield "Log line 2"
260
+ yield "Hello from HF Jobs!"
261
+
262
+ with patch.object(tool.api, "fetch_job_logs") as mock_logs:
263
+ mock_logs.return_value = log_generator()
264
+
265
+ result = await tool.execute(
266
+ {"operation": "logs", "args": {"job_id": "test-job-1"}}
267
+ )
268
+
269
+ assert not result.get("isError", False)
270
+ assert "Log line 1" in result["formatted"]
271
+ assert "Hello from HF Jobs!" in result["formatted"]
272
+
273
+
274
+ @pytest.mark.asyncio
275
+ async def test_handler():
276
+ """Test the handler function"""
277
+ with patch("agent.tools.jobs_tool.HfJobsTool") as MockTool:
278
+ mock_tool_instance = MockTool.return_value
279
+ mock_tool_instance.execute = AsyncMock(
280
+ return_value={
281
+ "formatted": "Test output",
282
+ "totalResults": 1,
283
+ "resultsShared": 1,
284
+ "isError": False,
285
+ }
286
+ )
287
+
288
+ output, success = await hf_jobs_handler({"operation": "ps"})
289
+
290
+ assert success == True
291
+ assert "Test output" in output
292
+
293
+
294
+ @pytest.mark.asyncio
295
+ async def test_handler_error():
296
+ """Test handler with error"""
297
+ with patch("agent.tools.jobs_tool.HfJobsTool") as MockTool:
298
+ MockTool.side_effect = Exception("Test error")
299
+
300
+ output, success = await hf_jobs_handler({})
301
+
302
+ assert success == False
303
+ assert "Error" in output
304
+
305
+
306
+ @pytest.mark.asyncio
307
+ async def test_scheduled_jobs_mock():
308
+ """Test scheduled jobs operations with mock API"""
309
+ tool = HfJobsTool()
310
+
311
+ mock_scheduled_job = create_mock_scheduled_job_info()
312
+
313
+ # Test list scheduled jobs
314
+ with patch.object(tool.api, "list_scheduled_jobs") as mock_list:
315
+ mock_list.return_value = [mock_scheduled_job]
316
+
317
+ result = await tool.execute({"operation": "scheduled ps"})
318
+
319
+ assert not result.get("isError", False)
320
+ assert "sched-job-1" in result["formatted"]
321
+ assert "Scheduled Jobs" in result["formatted"]
322
+
323
+
324
+ @pytest.mark.asyncio
325
+ async def test_create_scheduled_job_mock():
326
+ """Test create scheduled job with mock API"""
327
+ tool = HfJobsTool()
328
+
329
+ mock_scheduled_job = create_mock_scheduled_job_info()
330
+
331
+ with patch.object(tool.api, "create_scheduled_job") as mock_create:
332
+ mock_create.return_value = mock_scheduled_job
333
+
334
+ result = await tool.execute(
335
+ {
336
+ "operation": "scheduled run",
337
+ "args": {
338
+ "image": "python:3.12",
339
+ "command": ["python", "backup.py"],
340
+ "schedule": "@daily",
341
+ "flavor": "cpu-basic",
342
+ },
343
+ }
344
+ )
345
+
346
+ assert not result.get("isError", False)
347
+ assert "sched-job-1" in result["formatted"]
348
+ assert "Scheduled job created" in result["formatted"]
349
+ mock_create.assert_called_once()
350
+
351
+
352
+ @pytest.mark.asyncio
353
+ async def test_inspect_scheduled_job_mock():
354
+ """Test inspect scheduled job with mock API"""
355
+ tool = HfJobsTool()
356
+
357
+ mock_scheduled_job = create_mock_scheduled_job_info()
358
+
359
+ with patch.object(tool.api, "inspect_scheduled_job") as mock_inspect:
360
+ mock_inspect.return_value = mock_scheduled_job
361
+
362
+ result = await tool.execute(
363
+ {
364
+ "operation": "scheduled inspect",
365
+ "args": {"scheduled_job_id": "sched-job-1"},
366
+ }
367
+ )
368
+
369
+ assert not result.get("isError", False)
370
+ assert "sched-job-1" in result["formatted"]
371
+ assert "Scheduled Job Details" in result["formatted"]
372
+
373
+
374
+ @pytest.mark.asyncio
375
+ async def test_suspend_scheduled_job_mock():
376
+ """Test suspend scheduled job with mock API"""
377
+ tool = HfJobsTool()
378
+
379
+ with patch.object(tool.api, "suspend_scheduled_job") as mock_suspend:
380
+ mock_suspend.return_value = None
381
+
382
+ result = await tool.execute(
383
+ {
384
+ "operation": "scheduled suspend",
385
+ "args": {"scheduled_job_id": "sched-job-1"},
386
+ }
387
+ )
388
+
389
+ assert not result.get("isError", False)
390
+ assert "suspended" in result["formatted"]
391
+ assert "sched-job-1" in result["formatted"]
392
+
393
+
394
+ @pytest.mark.asyncio
395
+ async def test_resume_scheduled_job_mock():
396
+ """Test resume scheduled job with mock API"""
397
+ tool = HfJobsTool()
398
+
399
+ with patch.object(tool.api, "resume_scheduled_job") as mock_resume:
400
+ mock_resume.return_value = None
401
+
402
+ result = await tool.execute(
403
+ {
404
+ "operation": "scheduled resume",
405
+ "args": {"scheduled_job_id": "sched-job-1"},
406
+ }
407
+ )
408
+
409
+ assert not result.get("isError", False)
410
+ assert "resumed" in result["formatted"]
411
+ assert "sched-job-1" in result["formatted"]
412
+
413
+
414
+ @pytest.mark.asyncio
415
+ async def test_delete_scheduled_job_mock():
416
+ """Test delete scheduled job with mock API"""
417
+ tool = HfJobsTool()
418
+
419
+ with patch.object(tool.api, "delete_scheduled_job") as mock_delete:
420
+ mock_delete.return_value = None
421
+
422
+ result = await tool.execute(
423
+ {
424
+ "operation": "scheduled delete",
425
+ "args": {"scheduled_job_id": "sched-job-1"},
426
+ }
427
+ )
428
+
429
+ assert not result.get("isError", False)
430
+ assert "deleted" in result["formatted"]
431
+ assert "sched-job-1" in result["formatted"]
432
+
433
+
434
+ @pytest.mark.asyncio
435
+ async def test_list_jobs_with_status_filter():
436
+ """Test list jobs with status filter"""
437
+ tool = HfJobsTool()
438
+
439
+ running_job = create_mock_job_info("job-1", "RUNNING")
440
+ completed_job = create_mock_job_info("job-2", "COMPLETED")
441
+ error_job = create_mock_job_info("job-3", "ERROR")
442
+
443
+ with patch.object(tool.api, "list_jobs") as mock_list:
444
+ mock_list.return_value = [running_job, completed_job, error_job]
445
+
446
+ # Filter by status
447
+ result = await tool.execute(
448
+ {"operation": "ps", "args": {"all": True, "status": "ERROR"}}
449
+ )
450
+
451
+ assert not result.get("isError", False)
452
+ assert "job-3" in result["formatted"]
453
+ assert "job-1" not in result["formatted"]
454
+ assert result["resultsShared"] == 1
uv.lock CHANGED
@@ -893,6 +893,7 @@ dependencies = [
893
  { name = "numpy" },
894
  { name = "pandas" },
895
  { name = "pydantic" },
 
896
  { name = "python-dotenv" },
897
  { name = "requests" },
898
  { name = "tenacity" },
@@ -911,6 +912,7 @@ requires-dist = [
911
  { name = "numpy", specifier = ">=1.24.0" },
912
  { name = "pandas", specifier = ">=2.3.3" },
913
  { name = "pydantic", specifier = ">=2.12.3" },
 
914
  { name = "python-dotenv", specifier = ">=1.2.1" },
915
  { name = "requests", specifier = ">=2.32.5" },
916
  { name = "tenacity", specifier = ">=8.0.0" },
@@ -1098,6 +1100,15 @@ wheels = [
1098
  { url = "https://files.pythonhosted.org/packages/59/91/aa6bde563e0085a02a435aa99b49ef75b0a4b062635e606dab23ce18d720/inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2", size = 9454, upload-time = "2020-08-22T08:16:27.816Z" },
1099
  ]
1100
 
 
 
 
 
 
 
 
 
 
1101
  [[package]]
1102
  name = "inspect-ai"
1103
  version = "0.3.149"
@@ -2703,6 +2714,15 @@ wheels = [
2703
  { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" },
2704
  ]
2705
 
 
 
 
 
 
 
 
 
 
2706
  [[package]]
2707
  name = "ply"
2708
  version = "3.11"
@@ -3064,6 +3084,22 @@ wheels = [
3064
  { url = "https://files.pythonhosted.org/packages/df/80/fc9d01d5ed37ba4c42ca2b55b4339ae6e200b456be3a1aaddf4a9fa99b8c/pyperclip-1.11.0-py3-none-any.whl", hash = "sha256:299403e9ff44581cb9ba2ffeed69c7aa96a008622ad0c46cb575ca75b5b84273", size = 11063, upload-time = "2025-09-26T14:40:36.069Z" },
3065
  ]
3066
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3067
  [[package]]
3068
  name = "python-dateutil"
3069
  version = "2.9.0.post0"
 
893
  { name = "numpy" },
894
  { name = "pandas" },
895
  { name = "pydantic" },
896
+ { name = "pytest" },
897
  { name = "python-dotenv" },
898
  { name = "requests" },
899
  { name = "tenacity" },
 
912
  { name = "numpy", specifier = ">=1.24.0" },
913
  { name = "pandas", specifier = ">=2.3.3" },
914
  { name = "pydantic", specifier = ">=2.12.3" },
915
+ { name = "pytest", specifier = ">=9.0.2" },
916
  { name = "python-dotenv", specifier = ">=1.2.1" },
917
  { name = "requests", specifier = ">=2.32.5" },
918
  { name = "tenacity", specifier = ">=8.0.0" },
 
1100
  { url = "https://files.pythonhosted.org/packages/59/91/aa6bde563e0085a02a435aa99b49ef75b0a4b062635e606dab23ce18d720/inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2", size = 9454, upload-time = "2020-08-22T08:16:27.816Z" },
1101
  ]
1102
 
1103
+ [[package]]
1104
+ name = "iniconfig"
1105
+ version = "2.3.0"
1106
+ source = { registry = "https://pypi.org/simple" }
1107
+ sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
1108
+ wheels = [
1109
+ { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
1110
+ ]
1111
+
1112
  [[package]]
1113
  name = "inspect-ai"
1114
  version = "0.3.149"
 
2714
  { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" },
2715
  ]
2716
 
2717
+ [[package]]
2718
+ name = "pluggy"
2719
+ version = "1.6.0"
2720
+ source = { registry = "https://pypi.org/simple" }
2721
+ sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
2722
+ wheels = [
2723
+ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
2724
+ ]
2725
+
2726
  [[package]]
2727
  name = "ply"
2728
  version = "3.11"
 
3084
  { url = "https://files.pythonhosted.org/packages/df/80/fc9d01d5ed37ba4c42ca2b55b4339ae6e200b456be3a1aaddf4a9fa99b8c/pyperclip-1.11.0-py3-none-any.whl", hash = "sha256:299403e9ff44581cb9ba2ffeed69c7aa96a008622ad0c46cb575ca75b5b84273", size = 11063, upload-time = "2025-09-26T14:40:36.069Z" },
3085
  ]
3086
 
3087
+ [[package]]
3088
+ name = "pytest"
3089
+ version = "9.0.2"
3090
+ source = { registry = "https://pypi.org/simple" }
3091
+ dependencies = [
3092
+ { name = "colorama", marker = "sys_platform == 'win32'" },
3093
+ { name = "iniconfig" },
3094
+ { name = "packaging" },
3095
+ { name = "pluggy" },
3096
+ { name = "pygments" },
3097
+ ]
3098
+ sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
3099
+ wheels = [
3100
+ { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
3101
+ ]
3102
+
3103
  [[package]]
3104
  name = "python-dateutil"
3105
  version = "2.9.0.post0"