Update ff.py
Browse files
ff.py
CHANGED
@@ -16,6 +16,7 @@ genai.configure(
|
|
16 |
model = genai.GenerativeModel(
|
17 |
model_name='gemini-pro'
|
18 |
)
|
|
|
19 |
from g4f import Provider, models
|
20 |
from langchain.llms.base import LLM
|
21 |
|
@@ -220,11 +221,13 @@ def mariam_chimi(chi,im):
|
|
220 |
return "aucune instruction donner..."
|
221 |
|
222 |
else:
|
223 |
-
bdd = bd(token=token)
|
224 |
with open(im.name, 'rb') as f:
|
225 |
image_data = f.read()
|
226 |
-
bard_answer = bdd.ask_about_image(chi, image_data)
|
227 |
-
|
|
|
|
|
228 |
|
229 |
|
230 |
#fin
|
|
|
16 |
model = genai.GenerativeModel(
|
17 |
model_name='gemini-pro'
|
18 |
)
|
19 |
+
model_vision = genai.GenerativeModel('gemini-pro-vision')
|
20 |
from g4f import Provider, models
|
21 |
from langchain.llms.base import LLM
|
22 |
|
|
|
221 |
return "aucune instruction donner..."
|
222 |
|
223 |
else:
|
224 |
+
#bdd = bd(token=token)
|
225 |
with open(im.name, 'rb') as f:
|
226 |
image_data = f.read()
|
227 |
+
#bard_answer = bdd.ask_about_image(chi, image_data)
|
228 |
+
response = model.generate_content([chi, image_data], stream=True)
|
229 |
+
e = response.resolve()
|
230 |
+
return e
|
231 |
|
232 |
|
233 |
#fin
|