Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -86,7 +86,7 @@ def respond(
|
|
86 |
**kwargs,
|
87 |
) -> str:
|
88 |
# Initialize the model
|
89 |
-
model = pipeline("text-generation", model=
|
90 |
|
91 |
# Generate a response
|
92 |
response = model(
|
@@ -101,43 +101,6 @@ def respond(
|
|
101 |
assistant_response = response[0]['generated_text'].strip()
|
102 |
|
103 |
return assistant_response
|
104 |
-
class MyChatbot(gr.ChatInterface):
|
105 |
-
def __init__(self, fn, *args, **kwargs):
|
106 |
-
super().__init__(fn, *args, **kwargs)
|
107 |
-
|
108 |
-
def update_chat_history(self, message: str, is_user: bool) -> None:
|
109 |
-
if is_user:
|
110 |
-
self.history.append((message, None))
|
111 |
-
else:
|
112 |
-
self.history.append((None, message))
|
113 |
-
|
114 |
-
def compute(self, *args, **kwargs):
|
115 |
-
command = args[0]
|
116 |
-
history = self.history
|
117 |
-
system_message = self.additional_inputs["system_message"]
|
118 |
-
max_tokens = self.additional_inputs["max_new_tokens"]
|
119 |
-
temperature = self.additional_inputs["temperature"]
|
120 |
-
top_p = self.additional_inputs["top_p"]
|
121 |
-
github_api_token = self.additional_inputs["github_api_token"]
|
122 |
-
github_username = self.additional_inputs["github_username"]
|
123 |
-
github_repository = self.additional_inputs["github_repository"]
|
124 |
-
selected_model = self.additional_inputs["model_dropdown"]
|
125 |
-
severity = self.additional_inputs["severity_dropdown"]
|
126 |
-
programming_language = self.additional_inputs["programming_language_textbox"]
|
127 |
-
|
128 |
-
return self.respond(
|
129 |
-
command,
|
130 |
-
history,
|
131 |
-
system_message,
|
132 |
-
max_tokens,
|
133 |
-
temperature,
|
134 |
-
top_p,
|
135 |
-
github_api_token,
|
136 |
-
github_username,
|
137 |
-
github_repository,
|
138 |
-
selected_model,
|
139 |
-
severity,
|
140 |
-
programming_language,
|
141 |
)
|
142 |
|
143 |
with gr.Blocks() as demo:
|
|
|
86 |
**kwargs,
|
87 |
) -> str:
|
88 |
# Initialize the model
|
89 |
+
model = pipeline("text-generation", model="enricoros/big-agi")
|
90 |
|
91 |
# Generate a response
|
92 |
response = model(
|
|
|
101 |
assistant_response = response[0]['generated_text'].strip()
|
102 |
|
103 |
return assistant_response
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
104 |
)
|
105 |
|
106 |
with gr.Blocks() as demo:
|