SimrusDenuvo commited on
Commit
53404ca
·
verified ·
1 Parent(s): 14e67e3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -16
app.py CHANGED
@@ -1,19 +1,10 @@
1
- import gradio as gr
2
- from transformers import AutoTokenizer, AutoModelForCausalLM
3
- import torch
4
 
5
- # Загрузка модели (можно заменить на ruGPT-3 или другую)
6
- model_name = "sberbank-ai/rugpt3small_based_on_gpt2"
7
- tokenizer = AutoTokenizer.from_pretrained(model_name)
8
- model = AutoModelForCausalLM.from_pretrained(model_name)
9
 
10
- # Функция для генерации ответа
11
- def generate_text(prompt):
12
- inputs = tokenizer(prompt, return_tensors="pt")
13
- with torch.no_grad():
14
- outputs = model.generate(**inputs, max_new_tokens=50)
15
- return tokenizer.decode(outputs[0], skip_special_tokens=True)
16
 
17
- # Интерфейс Gradio
18
- demo = gr.Interface(fn=generate_text, inputs="text", outputs="text", title="Тест модели")
19
- demo.launch()
 
1
+ import openai
 
 
2
 
3
+ openai.api_key = "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" # Вставьте ваш API-ключ
 
 
 
4
 
5
+ response = openai.ChatCompletion.create(
6
+ model="gpt-3.5-turbo",
7
+ messages=[{"role": "user", "content": "Привет! Что ты умеешь?"}]
8
+ )
 
 
9
 
10
+ print(response['choices'][0]['message']['content'])