ginipick commited on
Commit
f81d0d2
ยท
verified ยท
1 Parent(s): 11034e2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -3
app.py CHANGED
@@ -11,6 +11,7 @@ import spaces
11
  MODEL_ID = "HyperCLOVAX-SEED-Vision-Instruct-3B"
12
  MAX_NEW_TOKENS = 512
13
  CPU_THREAD_COUNT = 4 # ํ•„์š”์‹œ ์กฐ์ ˆ
 
14
 
15
  # --- ์„ ํƒ ์‚ฌํ•ญ: CPU ์Šค๋ ˆ๋“œ ์„ค์ • ---
16
  # torch.set_num_threads(CPU_THREAD_COUNT)
@@ -21,6 +22,7 @@ print("--- ํ™˜๊ฒฝ ์„ค์ • ---")
21
  print(f"PyTorch ๋ฒ„์ „: {torch.__version__}")
22
  print(f"์‹คํ–‰ ์žฅ์น˜: {torch.device('cuda' if torch.cuda.is_available() else 'cpu')}")
23
  print(f"Torch ์Šค๋ ˆ๋“œ: {torch.get_num_threads()}")
 
24
 
25
  # --- ๋ชจ๋ธ ๋ฐ ํ† ํฌ๋‚˜์ด์ € ๋กœ๋”ฉ ---
26
  print(f"--- ๋ชจ๋ธ ๋กœ๋”ฉ ์ค‘: {MODEL_ID} ---")
@@ -39,14 +41,16 @@ try:
39
 
40
  tokenizer = AutoTokenizer.from_pretrained(
41
  MODEL_ID,
42
- trust_remote_code=True
 
43
  )
44
 
45
  model = AutoModelForCausalLM.from_pretrained(
46
  MODEL_ID,
47
  torch_dtype=dtype,
48
  device_map=device_map,
49
- trust_remote_code=True
 
50
  )
51
 
52
  model.eval()
@@ -235,7 +239,6 @@ demo = gr.ChatInterface(
235
  title="๐Ÿค– HyperCLOVAX-SEED-Text-Instruct-0.5B",
236
  description=(
237
  f"**๋ชจ๋ธ:** {MODEL_ID}\n"
238
-
239
  ),
240
  examples=examples,
241
  cache_examples=False,
 
11
  MODEL_ID = "HyperCLOVAX-SEED-Vision-Instruct-3B"
12
  MAX_NEW_TOKENS = 512
13
  CPU_THREAD_COUNT = 4 # ํ•„์š”์‹œ ์กฐ์ ˆ
14
+ HF_TOKEN = os.getenv("HF_TOKEN") # Hugging Face ํ† ํฐ ํ™˜๊ฒฝ ๋ณ€์ˆ˜์—์„œ ๊ฐ€์ ธ์˜ค๊ธฐ
15
 
16
  # --- ์„ ํƒ ์‚ฌํ•ญ: CPU ์Šค๋ ˆ๋“œ ์„ค์ • ---
17
  # torch.set_num_threads(CPU_THREAD_COUNT)
 
22
  print(f"PyTorch ๋ฒ„์ „: {torch.__version__}")
23
  print(f"์‹คํ–‰ ์žฅ์น˜: {torch.device('cuda' if torch.cuda.is_available() else 'cpu')}")
24
  print(f"Torch ์Šค๋ ˆ๋“œ: {torch.get_num_threads()}")
25
+ print(f"HF_TOKEN ์„ค์ • ์—ฌ๋ถ€: {'์žˆ์Œ' if HF_TOKEN else '์—†์Œ'}")
26
 
27
  # --- ๋ชจ๋ธ ๋ฐ ํ† ํฌ๋‚˜์ด์ € ๋กœ๋”ฉ ---
28
  print(f"--- ๋ชจ๋ธ ๋กœ๋”ฉ ์ค‘: {MODEL_ID} ---")
 
41
 
42
  tokenizer = AutoTokenizer.from_pretrained(
43
  MODEL_ID,
44
+ trust_remote_code=True,
45
+ token=HF_TOKEN # ํ† ํฐ ์ถ”๊ฐ€
46
  )
47
 
48
  model = AutoModelForCausalLM.from_pretrained(
49
  MODEL_ID,
50
  torch_dtype=dtype,
51
  device_map=device_map,
52
+ trust_remote_code=True,
53
+ token=HF_TOKEN # ํ† ํฐ ์ถ”๊ฐ€
54
  )
55
 
56
  model.eval()
 
239
  title="๐Ÿค– HyperCLOVAX-SEED-Text-Instruct-0.5B",
240
  description=(
241
  f"**๋ชจ๋ธ:** {MODEL_ID}\n"
 
242
  ),
243
  examples=examples,
244
  cache_examples=False,