File size: 1,418 Bytes
075858d
 
64428bf
2af5b7d
fe92162
 
 
64428bf
2af5b7d
 
 
 
 
 
64428bf
 
 
 
 
 
 
 
 
 
 
 
fe92162
64428bf
075858d
2af5b7d
 
 
075858d
fe92162
2af5b7d
fe92162
 
64428bf
 
 
 
2af5b7d
fe92162
64428bf
 
075858d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import os
import sys

# 1) Ajusta o path para o inference do SMI-TED **antes** de importar anything
BASE_DIR = os.path.dirname(__file__)
INFERENCE_DIR = os.path.join(BASE_DIR, "smi-ted", "inference")
sys.path.append(INFERENCE_DIR)

# Agora o python já sabe onde achar smi_ted_light
import tempfile
import pandas as pd
import gradio as gr
from smi_ted_light.load import load_smi_ted

# 2) Caminho onde estão pesos e vocabulário
MODEL_DIR = os.path.join("smi-ted", "inference", "smi_ted_light")

model = load_smi_ted(
    folder=MODEL_DIR,
    ckpt_filename="smi-ted-Light_40.pt",
    vocab_filename="bert_vocab_curated.txt",
)

def gerar_embedding(smiles: str):
    smiles = smiles.strip()
    if not smiles:
        return {"erro": "digite uma sequência SMILES primeiro"}, None
    try:
        vetor = model.encode(smiles, return_torch=True)[0]
        emb = vetor.tolist()
        df = pd.DataFrame([emb])
        tmp = tempfile.NamedTemporaryFile(delete=False, suffix=".csv", prefix="emb_")
        df.to_csv(tmp.name, index=False)
        tmp.close()
        return emb, tmp.name
    except Exception as e:
        return {"erro": str(e)}, None

demo = gr.Interface(
    fn=gerar_embedding,
    inputs=gr.Textbox(label="SMILES", placeholder="Ex.: CCO"),
    outputs=[gr.JSON(), gr.File(label="Baixar CSV")],
    title="SMI-TED Embedding Generator",
)
if __name__ == "__main__":
    demo.launch(show_api=False)