File size: 2,522 Bytes
bec0b04
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
"""

MatDeepLearn MCP Service - HuggingFace Space Entry Point



This file provides a FastAPI application for health checks and service info.

The actual MCP service is started via start_mcp.py.

"""
from fastapi import FastAPI
from fastapi.responses import JSONResponse
import os
import sys

# Add project to path
project_root = os.path.dirname(os.path.abspath(__file__))
if project_root not in sys.path:
    sys.path.insert(0, project_root)

app = FastAPI(
    title="MatDeepLearn MCP Service",
    description="Graph Neural Networks for Materials Property Prediction",
    version="1.0.0"
)


@app.get("/")
async def root():
    """Root endpoint with service information."""
    return {
        "status": "ok",
        "service": "MatDeepLearn MCP Service",
        "description": "Graph Neural Networks for Materials Property Prediction",
        "transport": os.environ.get("MCP_TRANSPORT", "stdio"),
        "available_models": [
            "CGCNN_demo", "MPNN_demo", "SchNet_demo",
            "MEGNet_demo", "GCN_demo", "SOAP_demo", "SM_demo"
        ]
    }


@app.get("/health")
async def health():
    """Health check endpoint."""
    try:
        import torch
        gpu_available = torch.cuda.is_available()
    except:
        gpu_available = False
    
    return {
        "status": "healthy",
        "gpu_available": gpu_available
    }


@app.get("/info")
async def info():
    """Detailed service information."""
    try:
        import torch
        torch_version = torch.__version__
        gpu_available = torch.cuda.is_available()
        gpu_count = torch.cuda.device_count() if gpu_available else 0
    except:
        torch_version = "N/A"
        gpu_available = False
        gpu_count = 0
    
    return {
        "service": "MatDeepLearn MCP Service",
        "version": "1.0.0",
        "torch_version": torch_version,
        "gpu_available": gpu_available,
        "gpu_count": gpu_count,
        "mcp_tools": [
            "check_environment",
            "list_available_models",
            "get_model_config",
            "process_structure_data",
            "train_model",
            "predict_properties",
            "cross_validation",
            "analyze_structure",
            "compare_models",
            "get_dataset_info"
        ]
    }


if __name__ == "__main__":
    import uvicorn
    port = int(os.environ.get("PORT", "7860"))
    uvicorn.run(app, host="0.0.0.0", port=port)