c commited on
Commit
76b18bc
·
verified ·
1 Parent(s): 674e23c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -24,7 +24,7 @@ def predict(message, history):
24
  for item in history_transformer_format])
25
 
26
  model_inputs = tokenizer([messages], return_tensors="pt").to("cpu")
27
- print(model_inputs)
28
  streamer = TextIteratorStreamer(tokenizer, timeout=10., skip_prompt=True, skip_special_tokens=True)
29
  generate_kwargs = dict(
30
  model_inputs,
@@ -38,12 +38,11 @@ def predict(message, history):
38
  stopping_criteria=StoppingCriteriaList([stop])
39
  )
40
  t = Thread(target=model.generate, kwargs=generate_kwargs)
41
- print(t)
42
  t.start()
43
 
44
  partial_message = ""
45
  for new_token in streamer:
46
- print(new_token)
47
  if new_token != '<':
48
  partial_message += new_token
49
  yield partial_message
 
24
  for item in history_transformer_format])
25
 
26
  model_inputs = tokenizer([messages], return_tensors="pt").to("cpu")
27
+ print('model_inputs',model_inputs)
28
  streamer = TextIteratorStreamer(tokenizer, timeout=10., skip_prompt=True, skip_special_tokens=True)
29
  generate_kwargs = dict(
30
  model_inputs,
 
38
  stopping_criteria=StoppingCriteriaList([stop])
39
  )
40
  t = Thread(target=model.generate, kwargs=generate_kwargs)
 
41
  t.start()
42
 
43
  partial_message = ""
44
  for new_token in streamer:
45
+ print('new_token', new_token)
46
  if new_token != '<':
47
  partial_message += new_token
48
  yield partial_message