ClauseGuard / ml /export_onnx.py
gaurv007's picture
πŸ›‘οΈ ClauseGuard v1.0 β€” Full codebase: Extension + Website + API + ML
9548e93 verified
raw
history blame
1.37 kB
"""
ClauseGuard β€” Export fine-tuned Legal-BERT to ONNX for fast inference.
Requires: pip install optimum[onnxruntime]
"""
import os
import sys
MODEL_PATH = os.environ.get("MODEL_PATH", "./clauseguard-model/final")
ONNX_OUTPUT = os.environ.get("ONNX_OUTPUT", "./clauseguard-model-onnx")
print(f"πŸ“¦ Exporting {MODEL_PATH} β†’ ONNX at {ONNX_OUTPUT}")
try:
from optimum.onnxruntime import ORTModelForSequenceClassification
from transformers import AutoTokenizer
# Load PyTorch model and export to ONNX
model = ORTModelForSequenceClassification.from_pretrained(MODEL_PATH, export=True)
tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH)
# Save ONNX model + tokenizer + config
model.save_pretrained(ONNX_OUTPUT)
tokenizer.save_pretrained(ONNX_OUTPUT)
print(f"βœ… ONNX model saved to {ONNX_OUTPUT}")
print(f" Files: {os.listdir(ONNX_OUTPUT)}")
# Verify inference works
from transformers import pipeline
classifier = pipeline("text-classification", model=model, tokenizer=tokenizer, top_k=None)
test = classifier("The company may terminate your account at any time without notice.")
print(f" Test inference: {test}")
except ImportError:
print("❌ Install optimum: pip install optimum[onnxruntime]")
sys.exit(1)
except Exception as e:
print(f"❌ Export failed: {e}")
sys.exit(1)