File size: 2,608 Bytes
9bc98d9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
import json
import os
import time

from openai import OpenAI
from tqdm import tqdm

PATH_META = "./src/fuse_esdac/outputs/05_LLM_desc/column_meta.json"
PATH_TEMPLATE = "./src/fuse_esdac/prompts/desc.txt"
PATH_APIKEY = "./src/fuse_esdac/outputs/05_LLM_desc/apikey.txt"
PATH_EXISTING = "./src/fuse_esdac/large_inputs/existing_descritpions.json"

# === model selection ===
MODEL = "gpt-5"  # change to "gpt-4o" / "gpt-4o-mini" / "gpt-4.1" etc.

# === load key + template ===
client = OpenAI(api_key=open(PATH_APIKEY).read().strip())
prompt_template = open(PATH_TEMPLATE).read()

# === init meta file if missing ===
if not os.path.exists(PATH_META):
    os.system(f"cp ./src/fuse_esdac/outputs/04_table/column_meta.json {PATH_META}")

# === load existing descriptions once (if available) ===
if os.path.exists(PATH_EXISTING):
    with open(PATH_EXISTING) as f:
        existing_desc = json.load(f)
else:
    existing_desc = {}

new_generated = {}

while True:
    with open(PATH_META) as f:
        col_meta = json.load(f)

    all_done = True

    for k, v in tqdm(col_meta.items()):
        # 1) already has description in current file
        if v.get("description"):
            continue

        # 2) try to reuse description from existing descriptions file
        if isinstance(existing_desc, dict) and existing_desc.get(k):
            v["description"] = existing_desc[k]
            with open(PATH_META, "w") as f:
                json.dump(col_meta, f, indent=2)
            continue

        # 3) need to call OpenAI to generate
        all_done = False
        input_dict = {k: v}
        prompt = prompt_template.format(input_dict=input_dict)

        # --- safe call with retry ---
        for _ in range(3):
            try:
                resp = client.chat.completions.create(
                    model=MODEL,
                    messages=[{"role": "user", "content": prompt}],
                )
                answer = resp.choices[0].message.content.strip()
                break
            except Exception as e:
                print("API error, retrying in 5 sec:", e)
                time.sleep(5)
        else:
            answer = ""

        v["description"] = answer
        new_generated[k] = answer  # do NOT write back to existing_desc

        # save immediate
        with open(PATH_META, "w") as f:
            json.dump(col_meta, f, indent=2)

        time.sleep(0.2)  # rate limit safety

    if all_done:
        break

print("\n=== Newly generated descriptions (NOT saved to existing_descritpions.json) ===")
print(json.dumps(new_generated, indent=2, ensure_ascii=False))