import gradio as gr from transformers import AutoModelForCausalLM, AutoTokenizer model_name = "TheBloke/MythoMax-L2-13B-GGUF" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained(model_name) def chat(input_text): inputs = tokenizer(input_text, return_tensors="pt") output = model.generate(**inputs, max_new_tokens=100) response = tokenizer.decode(output[0], skip_special_tokens=True) return response iface = gr.Interface( fn=chat, inputs="text", outputs="text", title="Dika AI - MythoMax Lite", description="Chatbot AI berbasis MythoMax 13B GGUF, optimized for Hugging Face free tier!" ) iface.launch()