Spaces:
Sleeping
Sleeping
| """ | |
| ClauseGuard β Export fine-tuned Legal-BERT to ONNX for fast inference. | |
| Requires: pip install optimum[onnxruntime] | |
| """ | |
| import os | |
| import sys | |
| MODEL_PATH = os.environ.get("MODEL_PATH", "./clauseguard-model/final") | |
| ONNX_OUTPUT = os.environ.get("ONNX_OUTPUT", "./clauseguard-model-onnx") | |
| print(f"π¦ Exporting {MODEL_PATH} β ONNX at {ONNX_OUTPUT}") | |
| try: | |
| from optimum.onnxruntime import ORTModelForSequenceClassification | |
| from transformers import AutoTokenizer | |
| # Load PyTorch model and export to ONNX | |
| model = ORTModelForSequenceClassification.from_pretrained(MODEL_PATH, export=True) | |
| tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH) | |
| # Save ONNX model + tokenizer + config | |
| model.save_pretrained(ONNX_OUTPUT) | |
| tokenizer.save_pretrained(ONNX_OUTPUT) | |
| print(f"β ONNX model saved to {ONNX_OUTPUT}") | |
| print(f" Files: {os.listdir(ONNX_OUTPUT)}") | |
| # Verify inference works | |
| from transformers import pipeline | |
| classifier = pipeline("text-classification", model=model, tokenizer=tokenizer, top_k=None) | |
| test = classifier("The company may terminate your account at any time without notice.") | |
| print(f" Test inference: {test}") | |
| except ImportError: | |
| print("β Install optimum: pip install optimum[onnxruntime]") | |
| sys.exit(1) | |
| except Exception as e: | |
| print(f"β Export failed: {e}") | |
| sys.exit(1) | |