Metal3d commited on
Commit
d69fd10
·
unverified ·
1 Parent(s): ca9eb6e

Changing the loop creation

Browse files
Files changed (1) hide show
  1. main.py +4 -5
main.py CHANGED
@@ -68,7 +68,8 @@ def _generate(history):
68
  model_inputs = tokenizer([text], return_tensors="pt").to(model.device)
69
  streamer = AsyncTextIteratorStreamer(tokenizer, skip_special_tokens=True)
70
 
71
- task = asyncio.get_running_loop().run_in_executor(
 
72
  None,
73
  functools.partial(
74
  model.generate,
@@ -77,7 +78,7 @@ def _generate(history):
77
  **model_inputs,
78
  ),
79
  )
80
- return task, streamer
81
 
82
 
83
  async def chat(prompt, history):
@@ -91,9 +92,7 @@ async def chat(prompt, history):
91
  history = [] if history is None else history
92
  message_list = history + [message]
93
 
94
- loop = asyncio.new_event_loop()
95
- asyncio.set_event_loop(loop)
96
- task, streamer = _generate(message_list)
97
 
98
  buffer = ""
99
  reasoning = ""
 
68
  model_inputs = tokenizer([text], return_tensors="pt").to(model.device)
69
  streamer = AsyncTextIteratorStreamer(tokenizer, skip_special_tokens=True)
70
 
71
+ loop = asyncio.new_event_loop()
72
+ task = loop.run_in_executor(
73
  None,
74
  functools.partial(
75
  model.generate,
 
78
  **model_inputs,
79
  ),
80
  )
81
+ return loop, task, streamer
82
 
83
 
84
  async def chat(prompt, history):
 
92
  history = [] if history is None else history
93
  message_list = history + [message]
94
 
95
+ loop, task, streamer = _generate(message_list)
 
 
96
 
97
  buffer = ""
98
  reasoning = ""