| import json |
| import os |
| import time |
|
|
| from openai import OpenAI |
| from tqdm import tqdm |
|
|
| PATH_META = "./src/fuse_esdac/outputs/05_LLM_desc/column_meta.json" |
| PATH_TEMPLATE = "./src/fuse_esdac/prompts/desc.txt" |
| PATH_APIKEY = "./src/fuse_esdac/outputs/05_LLM_desc/apikey.txt" |
| PATH_EXISTING = "./src/fuse_esdac/large_inputs/existing_descritpions.json" |
|
|
| |
| MODEL = "gpt-5" |
|
|
| |
| client = OpenAI(api_key=open(PATH_APIKEY).read().strip()) |
| prompt_template = open(PATH_TEMPLATE).read() |
|
|
| |
| if not os.path.exists(PATH_META): |
| os.system(f"cp ./src/fuse_esdac/outputs/04_table/column_meta.json {PATH_META}") |
|
|
| |
| if os.path.exists(PATH_EXISTING): |
| with open(PATH_EXISTING) as f: |
| existing_desc = json.load(f) |
| else: |
| existing_desc = {} |
|
|
| new_generated = {} |
|
|
| while True: |
| with open(PATH_META) as f: |
| col_meta = json.load(f) |
|
|
| all_done = True |
|
|
| for k, v in tqdm(col_meta.items()): |
| |
| if v.get("description"): |
| continue |
|
|
| |
| if isinstance(existing_desc, dict) and existing_desc.get(k): |
| v["description"] = existing_desc[k] |
| with open(PATH_META, "w") as f: |
| json.dump(col_meta, f, indent=2) |
| continue |
|
|
| |
| all_done = False |
| input_dict = {k: v} |
| prompt = prompt_template.format(input_dict=input_dict) |
|
|
| |
| for _ in range(3): |
| try: |
| resp = client.chat.completions.create( |
| model=MODEL, |
| messages=[{"role": "user", "content": prompt}], |
| ) |
| answer = resp.choices[0].message.content.strip() |
| break |
| except Exception as e: |
| print("API error, retrying in 5 sec:", e) |
| time.sleep(5) |
| else: |
| answer = "" |
|
|
| v["description"] = answer |
| new_generated[k] = answer |
|
|
| |
| with open(PATH_META, "w") as f: |
| json.dump(col_meta, f, indent=2) |
|
|
| time.sleep(0.2) |
|
|
| if all_done: |
| break |
|
|
| print("\n=== Newly generated descriptions (NOT saved to existing_descritpions.json) ===") |
| print(json.dumps(new_generated, indent=2, ensure_ascii=False)) |
|
|