File size: 1,259 Bytes
8b592ae
 
abdf1a8
8b592ae
 
 
 
 
 
 
 
 
 
 
 
 
6e1bbdd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8b592ae
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import gradio as gr
from transformers import pipeline
import os

theme = "darkgrass"
title = "GPT-NeoX(Korean) Demo"
model_name = "EleutherAI/gpt-neox-ko-1.3b"

description = "GPT-NeoX ν•œκ΅­μ–΄ λͺ¨λΈμ„ μ‹œμ—°ν•˜λŠ” 데λͺ¨νŽ˜μ΄μ§€ μž…λ‹ˆλ‹€."
# article = "<p style='text-align: center'><a href='https://github.com/kingoflolz/mesh-transformer-jax' target='_blank'>GPT-J-6B: A 6 Billion Parameter Autoregressive Language Model</a></p>"
examples = [
    ["μΈκ°„μ²˜λŸΌ μƒκ°ν•˜κ³ , ν–‰λ™ν•˜λŠ” 'μ§€λŠ₯'을 톡해"],
    ["λŒ€ν•œλ―Όκ΅­μ˜ 2040λ…„ 전망은 "],
    ["2040λ…„ 미ꡭ은, "]
]

pipe = pipeline('text-generation', model=model_name, use_auth_token=os.environ['TOKEN'])

def predict(text):
  return pipe(text, do_sample=True, return_full_text=False, max_length=100)[0]['generated_text']

iface = gr.Interface(
  fn=predict, 
  inputs='text',
  outputs='text',
  examples=examples
)

iface.launch()





  
# print(generated)  # print: μΈκ°„μ²˜λŸΌ μƒκ°ν•˜κ³ , ν–‰λ™ν•˜λŠ” 'μ§€λŠ₯'을 톡해 인λ₯˜κ°€ μ΄μ œκΉŒμ§€ ν’€μ§€ λͺ»ν–ˆλ˜ 문제의 해닡을 찾을 수 μžˆμ„ 것이닀. κ³Όν•™κΈ°μˆ μ΄ κ³ λ„λ‘œ λ°œλ‹¬ν•œ 21μ„ΈκΈ°λ₯Ό μ‚΄μ•„κ°ˆ 우리 μ•„μ΄λ“€μ—κ²Œ κ°€μž₯ ν•„μš”ν•œ 것은 사고λ ₯ ν›ˆλ ¨μ΄λ‹€. 사고λ ₯ ν›ˆλ ¨μ„ 톡해, 세상