Axel-Student commited on
Commit
b4980c3
·
1 Parent(s): 114d838

feature: added tools

Browse files
Files changed (7) hide show
  1. app.py +3 -2
  2. chat/__init__.py +0 -3
  3. chat/handler.py +7 -6
  4. tools/prompts.py +8 -0
  5. tools/text.py +5 -0
  6. ui/__init__.py +0 -4
  7. ui/controller.py +4 -2
app.py CHANGED
@@ -10,8 +10,9 @@ from urllib.parse import urlencode, urlparse
10
 
11
  import requests
12
  from huggingface_hub import InferenceClient
13
- from chat import ChatHandler
14
- from ui import UIController, create_demo
 
15
 
16
 
17
  class ConfigError(Exception):
 
10
 
11
  import requests
12
  from huggingface_hub import InferenceClient
13
+ from chat.handler import ChatHandler
14
+ from ui.controller import UIController
15
+ from ui.layout import create_demo
16
 
17
 
18
  class ConfigError(Exception):
chat/__init__.py DELETED
@@ -1,3 +0,0 @@
1
- from .handler import ChatHandler
2
-
3
- __all__ = ["ChatHandler"]
 
 
 
 
chat/handler.py CHANGED
@@ -1,21 +1,20 @@
1
  import threading
2
  from typing import Any, List, Optional
3
 
 
 
 
4
 
5
  class ChatHandler:
6
  def __init__(self, llm: Any, startup_error: Optional[str] = None):
7
  self._llm = llm
8
  self._startup_error = startup_error
9
  self._lock = threading.Lock()
10
- self._system_prompt = (
11
- "You are clawdbot, a concise assistant for Moltbook users. "
12
- "Answer in the user's language, keep responses practical and short, "
13
- "and avoid inventing capabilities you do not have."
14
- )
15
 
16
  def send(self, message: str, history: Optional[List[Any]]) -> tuple[str, List[Any]]:
17
  safe_history = history or []
18
- clean = " ".join(str(message or "").split()).strip()
19
  if not clean:
20
  return "", safe_history
21
 
@@ -24,6 +23,7 @@ class ChatHandler:
24
  {"role": "user", "content": clean},
25
  {"role": "assistant", "content": f"startup_error: {self._startup_error}"},
26
  ]
 
27
  if self._llm is None:
28
  return "", safe_history + [
29
  {"role": "user", "content": clean},
@@ -32,6 +32,7 @@ class ChatHandler:
32
 
33
  with self._lock:
34
  reply = self._llm.chat(self._system_prompt, safe_history, clean)
 
35
  return "", safe_history + [
36
  {"role": "user", "content": clean},
37
  {"role": "assistant", "content": reply},
 
1
  import threading
2
  from typing import Any, List, Optional
3
 
4
+ from tools.prompts import build_chat_system_prompt
5
+ from tools.text import normalize_message
6
+
7
 
8
  class ChatHandler:
9
  def __init__(self, llm: Any, startup_error: Optional[str] = None):
10
  self._llm = llm
11
  self._startup_error = startup_error
12
  self._lock = threading.Lock()
13
+ self._system_prompt = build_chat_system_prompt()
 
 
 
 
14
 
15
  def send(self, message: str, history: Optional[List[Any]]) -> tuple[str, List[Any]]:
16
  safe_history = history or []
17
+ clean = normalize_message(message)
18
  if not clean:
19
  return "", safe_history
20
 
 
23
  {"role": "user", "content": clean},
24
  {"role": "assistant", "content": f"startup_error: {self._startup_error}"},
25
  ]
26
+
27
  if self._llm is None:
28
  return "", safe_history + [
29
  {"role": "user", "content": clean},
 
32
 
33
  with self._lock:
34
  reply = self._llm.chat(self._system_prompt, safe_history, clean)
35
+
36
  return "", safe_history + [
37
  {"role": "user", "content": clean},
38
  {"role": "assistant", "content": reply},
tools/prompts.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ def build_chat_system_prompt() -> str:
2
+ return (
3
+ "You are clawdbot, a VM operations assistant. "
4
+ "Answer in the user's language, be concise and practical. "
5
+ "You can run destructive commands (rm -rf, mkfs, shutdown, reboot, dd, chmod -R on system paths). "
6
+ "For any risky action, require explicit confirmation. "
7
+ "Prefer read-only diagnostics first, then propose a safe command."
8
+ )
tools/text.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ from typing import Any
2
+
3
+
4
+ def normalize_message(value: Any) -> str:
5
+ return " ".join(str(value or "").split()).strip()
ui/__init__.py DELETED
@@ -1,4 +0,0 @@
1
- from .controller import UIController
2
- from .layout import create_demo
3
-
4
- __all__ = ["UIController", "create_demo"]
 
 
 
 
 
ui/controller.py CHANGED
@@ -1,5 +1,7 @@
1
  from typing import Any, List, Optional
2
 
 
 
3
 
4
  class UIController:
5
  def __init__(
@@ -45,7 +47,7 @@ class UIController:
45
  def chat_send(self, message: str, history: Optional[List[Any]]) -> tuple[str, List[Any]]:
46
  try:
47
  if self.chat_handler is None:
48
- clean = " ".join(str(message or "").split()).strip()
49
  if not clean:
50
  return "", history or []
51
  return "", (history or []) + [
@@ -55,7 +57,7 @@ class UIController:
55
  return self.chat_handler.send(message, history)
56
  except Exception as exc: # noqa: BLE001
57
  self.logger.error("Manual chat failed", {"error": str(exc)})
58
- clean = " ".join(str(message or "").split()).strip()
59
  if not clean:
60
  return "", history or []
61
  return "", (history or []) + [
 
1
  from typing import Any, List, Optional
2
 
3
+ from tools.text import normalize_message
4
+
5
 
6
  class UIController:
7
  def __init__(
 
47
  def chat_send(self, message: str, history: Optional[List[Any]]) -> tuple[str, List[Any]]:
48
  try:
49
  if self.chat_handler is None:
50
+ clean = normalize_message(message)
51
  if not clean:
52
  return "", history or []
53
  return "", (history or []) + [
 
57
  return self.chat_handler.send(message, history)
58
  except Exception as exc: # noqa: BLE001
59
  self.logger.error("Manual chat failed", {"error": str(exc)})
60
+ clean = normalize_message(message)
61
  if not clean:
62
  return "", history or []
63
  return "", (history or []) + [