Update app.py
Browse files
app.py
CHANGED
@@ -104,11 +104,28 @@ class InterestCalculatorApp:
|
|
104 |
except requests.RequestException as e:
|
105 |
st.error(f"Failed to download rates: {e}")
|
106 |
|
107 |
-
def ask_tapas(self, query,
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
112 |
|
113 |
def main():
|
114 |
st.title("Interest Calculation App")
|
|
|
104 |
except requests.RequestException as e:
|
105 |
st.error(f"Failed to download rates: {e}")
|
106 |
|
107 |
+
def ask_tapas(self, query, df):
|
108 |
+
# Convert the DataFrame to a list of dictionaries
|
109 |
+
table = df.to_dict('records')
|
110 |
+
|
111 |
+
# Initialize tokenized table
|
112 |
+
tokenized_table = []
|
113 |
+
|
114 |
+
# Tokenize each row in the table
|
115 |
+
for row in table:
|
116 |
+
tokenized_row = []
|
117 |
+
for cell in row.values():
|
118 |
+
tokenized_row.append(self.tokenizer.tokenize(str(cell))) # Convert cell to string before tokenizing
|
119 |
+
tokenized_table.append(tokenized_row)
|
120 |
+
|
121 |
+
# Tokenize the query
|
122 |
+
tokenized_query = self.tokenizer.tokenize(query)
|
123 |
+
|
124 |
+
# Encode tokens and convert them to PyTorch tensors
|
125 |
+
inputs = self.tokenizer(table=tokenized_table, query=tokenized_query, return_tensors="pt", padding=True)
|
126 |
+
|
127 |
+
return inputs
|
128 |
+
|
129 |
|
130 |
def main():
|
131 |
st.title("Interest Calculation App")
|