Create run.py
Browse files
run.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
import torch
|
| 3 |
+
|
| 4 |
+
# 1. تحميل الإعدادات
|
| 5 |
+
with open('config.json', 'r') as f:
|
| 6 |
+
config = json.load(f)
|
| 7 |
+
|
| 8 |
+
# 2. تحميل القاموس
|
| 9 |
+
with open('tokenizer_config.json', 'r', encoding='utf-8') as f:
|
| 10 |
+
tokenizer_data = json.load(f)
|
| 11 |
+
stoi = tokenizer_data['stoi']
|
| 12 |
+
itos = {i: ch for ch, i in stoi.items()}
|
| 13 |
+
|
| 14 |
+
def encode(s): return [stoi.get(c, stoi[" "]) for c in s]
|
| 15 |
+
def decode(l): return "".join([itos.get(i, "") for i in l])
|
| 16 |
+
|
| 17 |
+
# 3. إنشاء النموذج
|
| 18 |
+
model = MedicalMasterAI(config)
|
| 19 |
+
|
| 20 |
+
# 4. تجربة النص العشوائي الذي أرسلته
|
| 21 |
+
random_string = "LنIBkيظقcظزSرoIeD!OxMعه*kDNO]وzOبXقآلt(بdأfk."
|
| 22 |
+
encoded_ids = encode(random_string)
|
| 23 |
+
print(f"Encoded IDs: {encoded_ids}")
|
| 24 |
+
print(f"Decoded Text: {decode(encoded_ids)}")
|
| 25 |
+
|
| 26 |
+
# محاكاة مرور البيانات عبر النموذج
|
| 27 |
+
input_tensor = torch.tensor([encoded_ids])
|
| 28 |
+
with torch.no_grad():
|
| 29 |
+
logits = model(input_tensor)
|
| 30 |
+
print(f"Output shape (Batch, Seq, Vocab): {logits.shape}")
|