Krish-Upgrix commited on
Commit
e8c0213
Β·
verified Β·
1 Parent(s): 8813823

Updated app for version 2

Browse files
Files changed (1) hide show
  1. app.py +203 -55
app.py CHANGED
@@ -1,48 +1,57 @@
1
  import streamlit as st
2
  import requests
3
- import os # To access environment variables
4
- import google.generativeai as genai # Import Gemini API
 
 
 
5
 
6
- # Load API keys from environment variables
 
 
 
 
7
  HF_API_TOKEN = os.getenv("HF_API_TOKEN")
8
  GEMINI_API_KEY = os.getenv("GOOGLE_API_KEY")
9
 
10
- # Set up Hugging Face API
11
- MODEL_ID = "Salesforce/codet5p-770m" # CodeT5+ (Recommended)
12
  API_URL = f"https://api-inference.huggingface.co/models/{MODEL_ID}"
13
  HEADERS = {"Authorization": f"Bearer {HF_API_TOKEN}"}
14
 
15
- # Initialize Gemini API
16
- genai.configure(api_key='AIzaSyBkc8CSEhyYwZAuUiJfzF1Xtns-RYmBOpg')
17
 
18
- def translate_code(code_snippet, source_lang, target_lang):
19
- """Translate code using Hugging Face API."""
20
- prompt = f"Translate the following {source_lang} code to {target_lang}:\n\n{code_snippet}\n\nTranslated {target_lang} Code:\n"
21
 
22
- response = requests.post(API_URL, headers=HEADERS, json={
23
- "inputs": prompt,
24
- "parameters": {
25
- "max_new_tokens": 150,
26
- "temperature": 0.2,
27
- "top_k": 50
28
- }
29
- })
30
 
31
- if response.status_code == 200:
32
- generated_text = response.json()[0]["generated_text"]
33
- translated_code = generated_text.split(f"Translated {target_lang} Code:\n")[-1].strip()
34
- return translated_code
35
- else:
36
- return f"Error: {response.status_code}, {response.text}"
 
 
 
 
 
 
 
 
 
 
37
 
38
  def fallback_translate_with_gemini(code_snippet, source_lang, target_lang):
39
- """Fallback function using Gemini API for translation."""
40
  prompt = f"""You are a code translation expert. Convert the following {source_lang} code to {target_lang}:
41
 
42
  {code_snippet}
43
 
44
  Ensure the translation is accurate and follows {target_lang} best practices.
45
- Do not give any explaination. only give the translated code.
46
  """
47
  try:
48
  model = genai.GenerativeModel("gemini-1.5-pro")
@@ -51,17 +60,55 @@ def fallback_translate_with_gemini(code_snippet, source_lang, target_lang):
51
  except Exception as e:
52
  return f"Gemini API Error: {str(e)}"
53
 
54
- # Streamlit UI
55
- st.title("πŸ”„ Programming Language Translator")
56
- st.write("Translate code between different programming languages using AI.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
 
58
- languages = ["Python", "Java", "C++", "C"]
 
 
 
 
 
 
 
 
 
59
 
 
 
 
 
 
 
 
 
 
 
 
 
 
60
  source_lang = st.selectbox("Select source language", languages)
61
  target_lang = st.selectbox("Select target language", languages)
62
  code_input = st.text_area("Enter your code here:", height=200)
63
 
64
- # Initialize session state
65
  if "translate_attempts" not in st.session_state:
66
  st.session_state.translate_attempts = 0
67
  st.session_state.translated_code = ""
@@ -69,13 +116,15 @@ if "translate_attempts" not in st.session_state:
69
  if st.button("Translate"):
70
  if code_input.strip():
71
  st.session_state.translate_attempts += 1
72
- with st.spinner("Translating..."):
73
- if st.session_state.translate_attempts == 1:
74
- # First attempt using the pretrained model
75
- st.session_state.translated_code = translate_code(code_input, source_lang, target_lang)
76
- else:
77
- # Second attempt uses Gemini API
78
  st.session_state.translated_code = fallback_translate_with_gemini(code_input, source_lang, target_lang)
 
 
 
 
79
 
80
  st.subheader("Translated Code:")
81
  st.code(st.session_state.translated_code, language=target_lang.lower())
@@ -95,28 +144,27 @@ if st.button("Translate"):
95
 
96
 
97
 
98
-
99
-
100
-
101
- # V1 without gemini api
102
 
103
  # import streamlit as st
104
  # import requests
105
- # import os # Import os to access environment variables
106
-
107
- # # Get API token from environment variable
108
- # API_TOKEN = os.getenv("HF_API_TOKEN")
109
 
 
 
 
110
 
111
- # # Change MODEL_ID to a better model
112
  # MODEL_ID = "Salesforce/codet5p-770m" # CodeT5+ (Recommended)
113
- # # MODEL_ID = "bigcode/starcoder2-15b" # StarCoder2
114
- # # MODEL_ID = "bigcode/starcoder"
115
  # API_URL = f"https://api-inference.huggingface.co/models/{MODEL_ID}"
116
- # HEADERS = {"Authorization": f"Bearer {API_TOKEN}"}
 
 
 
117
 
118
  # def translate_code(code_snippet, source_lang, target_lang):
119
- # """Translate code using Hugging Face API securely."""
120
  # prompt = f"Translate the following {source_lang} code to {target_lang}:\n\n{code_snippet}\n\nTranslated {target_lang} Code:\n"
121
 
122
  # response = requests.post(API_URL, headers=HEADERS, json={
@@ -125,7 +173,6 @@ if st.button("Translate"):
125
  # "max_new_tokens": 150,
126
  # "temperature": 0.2,
127
  # "top_k": 50
128
- # # "stop": ["\n\n", "#", "//", "'''"]
129
  # }
130
  # })
131
 
@@ -136,8 +183,24 @@ if st.button("Translate"):
136
  # else:
137
  # return f"Error: {response.status_code}, {response.text}"
138
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
139
  # # Streamlit UI
140
- # st.title("πŸ”„ Code Translator using StarCoder")
141
  # st.write("Translate code between different programming languages using AI.")
142
 
143
  # languages = ["Python", "Java", "C++", "C"]
@@ -146,11 +209,96 @@ if st.button("Translate"):
146
  # target_lang = st.selectbox("Select target language", languages)
147
  # code_input = st.text_area("Enter your code here:", height=200)
148
 
 
 
 
 
 
149
  # if st.button("Translate"):
150
  # if code_input.strip():
 
151
  # with st.spinner("Translating..."):
152
- # translated_code = translate_code(code_input, source_lang, target_lang)
153
- # st.subheader("Translated Code:")
154
- # st.code(translated_code, language=target_lang.lower())
 
 
 
 
 
 
155
  # else:
156
  # st.warning("⚠️ Please enter some code before translating.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
  import requests
3
+ import os
4
+ import google.generativeai as genai
5
+ import tensorflow as tf
6
+ import numpy as np
7
+ from tensorflow.keras.layers import TextVectorization
8
 
9
+ # --- Config ---
10
+ vocab_size = 10000
11
+ sequence_length = 150
12
+
13
+ # Load API keys
14
  HF_API_TOKEN = os.getenv("HF_API_TOKEN")
15
  GEMINI_API_KEY = os.getenv("GOOGLE_API_KEY")
16
 
17
+ # Hugging Face setup
18
+ MODEL_ID = "Salesforce/codet5p-770m"
19
  API_URL = f"https://api-inference.huggingface.co/models/{MODEL_ID}"
20
  HEADERS = {"Authorization": f"Bearer {HF_API_TOKEN}"}
21
 
22
+ # Gemini setup
23
+ genai.configure(api_key="AIzaSyBkc8CSEhyYwZAuUiJfzF1Xtns-RYmBOpg")
24
 
25
+ # --- Load Local Model & Vectorizers ---
26
+ model = tf.keras.models.load_model("java_to_python_seq2seq_model.h5")
 
27
 
28
+ java_vectorizer = TextVectorization(max_tokens=vocab_size, output_sequence_length=sequence_length)
29
+ python_vectorizer = TextVectorization(max_tokens=vocab_size, output_sequence_length=sequence_length)
 
 
 
 
 
 
30
 
31
+ # Fake adaptation to initialize vectorizers
32
+ java_vectorizer.adapt(tf.data.Dataset.from_tensor_slices(["public class Main { public static void main(String[] args) {} }"]))
33
+ python_vectorizer.adapt(tf.data.Dataset.from_tensor_slices(["def main():\n pass"]))
34
+
35
+ # Reverse lookup for Python vocab
36
+ python_vocab = python_vectorizer.get_vocabulary()
37
+ index_to_word = dict(enumerate(python_vocab))
38
+
39
+ def decode_sequence(pred):
40
+ """Greedy decoding of the prediction."""
41
+ pred_ids = tf.argmax(pred, axis=-1).numpy()[0]
42
+ tokens = [index_to_word.get(i, "") for i in pred_ids]
43
+ code = " ".join(tokens).replace("[UNK]", "").strip()
44
+ return code
45
+
46
+ # --- Translation Functions ---
47
 
48
  def fallback_translate_with_gemini(code_snippet, source_lang, target_lang):
 
49
  prompt = f"""You are a code translation expert. Convert the following {source_lang} code to {target_lang}:
50
 
51
  {code_snippet}
52
 
53
  Ensure the translation is accurate and follows {target_lang} best practices.
54
+ Do not give any explanation. Only give the translated code.
55
  """
56
  try:
57
  model = genai.GenerativeModel("gemini-1.5-pro")
 
60
  except Exception as e:
61
  return f"Gemini API Error: {str(e)}"
62
 
63
+ def translate_with_local_model(code_snippet):
64
+ """Local seq2seq Java→Python translation."""
65
+ try:
66
+ java_seq = java_vectorizer(tf.constant([code_snippet]))
67
+ python_in = tf.constant([[1] + [0] * (sequence_length - 1)]) # <start> token
68
+ translated_tokens = []
69
+
70
+ for i in range(sequence_length):
71
+ preds = model.predict([java_seq, python_in], verbose=0)
72
+ next_token = tf.argmax(preds[0, i]).numpy()
73
+ translated_tokens.append(next_token)
74
+ if next_token == 0:
75
+ break
76
+ if i + 1 < sequence_length:
77
+ python_in = tf.tensor_scatter_nd_update(
78
+ python_in, [[0, i + 1]], [next_token]
79
+ )
80
+
81
+ tokens = [index_to_word.get(t, "") for t in translated_tokens]
82
+ return " ".join(tokens).replace("[UNK]", "").strip()
83
 
84
+ except Exception as e:
85
+ return f"Local Model Error: {str(e)}"
86
+
87
+ def translate_code(code_snippet, source_lang, target_lang):
88
+ """Hugging Face translation."""
89
+ prompt = f"Translate the following {source_lang} code to {target_lang}:\n\n{code_snippet}\n\nTranslated {target_lang} Code:\n"
90
+ response = requests.post(API_URL, headers=HEADERS, json={
91
+ "inputs": prompt,
92
+ "parameters": {"max_new_tokens": 150, "temperature": 0.2, "top_k": 50}
93
+ })
94
 
95
+ if response.status_code == 200:
96
+ generated_text = response.json()[0]["generated_text"]
97
+ translated_code = generated_text.split(f"Translated {target_lang} Code:\n")[-1].strip()
98
+ return translated_code
99
+ else:
100
+ return f"Error: {response.status_code}, {response.text}"
101
+
102
+ # --- Streamlit UI ---
103
+
104
+ st.title("πŸ”„ Programming Language Translator")
105
+ st.write("Translate code between programming languages using 3-tier AI fallback.")
106
+
107
+ languages = ["Python", "Java", "C++", "C"]
108
  source_lang = st.selectbox("Select source language", languages)
109
  target_lang = st.selectbox("Select target language", languages)
110
  code_input = st.text_area("Enter your code here:", height=200)
111
 
 
112
  if "translate_attempts" not in st.session_state:
113
  st.session_state.translate_attempts = 0
114
  st.session_state.translated_code = ""
 
116
  if st.button("Translate"):
117
  if code_input.strip():
118
  st.session_state.translate_attempts += 1
119
+ attempt = st.session_state.translate_attempts
120
+
121
+ with st.spinner(f"Translating..."):
122
+ if attempt == 1:
 
 
123
  st.session_state.translated_code = fallback_translate_with_gemini(code_input, source_lang, target_lang)
124
+ elif attempt == 2 and source_lang == "Java" and target_lang == "Python":
125
+ st.session_state.translated_code = translate_with_local_model(code_input)
126
+ else:
127
+ st.session_state.translated_code = translate_code(code_input, source_lang, target_lang)
128
 
129
  st.subheader("Translated Code:")
130
  st.code(st.session_state.translated_code, language=target_lang.lower())
 
144
 
145
 
146
 
147
+ # version1: Without Trained model.
 
 
 
148
 
149
  # import streamlit as st
150
  # import requests
151
+ # import os # To access environment variables
152
+ # import google.generativeai as genai # Import Gemini API
 
 
153
 
154
+ # # Load API keys from environment variables
155
+ # HF_API_TOKEN = os.getenv("HF_API_TOKEN")
156
+ # GEMINI_API_KEY = os.getenv("GOOGLE_API_KEY")
157
 
158
+ # # Set up Hugging Face API
159
  # MODEL_ID = "Salesforce/codet5p-770m" # CodeT5+ (Recommended)
 
 
160
  # API_URL = f"https://api-inference.huggingface.co/models/{MODEL_ID}"
161
+ # HEADERS = {"Authorization": f"Bearer {HF_API_TOKEN}"}
162
+
163
+ # # Initialize Gemini API
164
+ # genai.configure(api_key='AIzaSyBkc8CSEhyYwZAuUiJfzF1Xtns-RYmBOpg')
165
 
166
  # def translate_code(code_snippet, source_lang, target_lang):
167
+ # """Translate code using Hugging Face API."""
168
  # prompt = f"Translate the following {source_lang} code to {target_lang}:\n\n{code_snippet}\n\nTranslated {target_lang} Code:\n"
169
 
170
  # response = requests.post(API_URL, headers=HEADERS, json={
 
173
  # "max_new_tokens": 150,
174
  # "temperature": 0.2,
175
  # "top_k": 50
 
176
  # }
177
  # })
178
 
 
183
  # else:
184
  # return f"Error: {response.status_code}, {response.text}"
185
 
186
+ # def fallback_translate_with_gemini(code_snippet, source_lang, target_lang):
187
+ # """Fallback function using Gemini API for translation."""
188
+ # prompt = f"""You are a code translation expert. Convert the following {source_lang} code to {target_lang}:
189
+
190
+ # {code_snippet}
191
+
192
+ # Ensure the translation is accurate and follows {target_lang} best practices.
193
+ # Do not give any explaination. only give the translated code.
194
+ # """
195
+ # try:
196
+ # model = genai.GenerativeModel("gemini-1.5-pro")
197
+ # response = model.generate_content(prompt)
198
+ # return response.text.strip() if response else "Translation failed."
199
+ # except Exception as e:
200
+ # return f"Gemini API Error: {str(e)}"
201
+
202
  # # Streamlit UI
203
+ # st.title("πŸ”„ Programming Language Translator")
204
  # st.write("Translate code between different programming languages using AI.")
205
 
206
  # languages = ["Python", "Java", "C++", "C"]
 
209
  # target_lang = st.selectbox("Select target language", languages)
210
  # code_input = st.text_area("Enter your code here:", height=200)
211
 
212
+ # # Initialize session state
213
+ # if "translate_attempts" not in st.session_state:
214
+ # st.session_state.translate_attempts = 0
215
+ # st.session_state.translated_code = ""
216
+
217
  # if st.button("Translate"):
218
  # if code_input.strip():
219
+ # st.session_state.translate_attempts += 1
220
  # with st.spinner("Translating..."):
221
+ # if st.session_state.translate_attempts == 1:
222
+ # # First attempt using the pretrained model
223
+ # st.session_state.translated_code = translate_code(code_input, source_lang, target_lang)
224
+ # else:
225
+ # # Second attempt uses Gemini API
226
+ # st.session_state.translated_code = fallback_translate_with_gemini(code_input, source_lang, target_lang)
227
+
228
+ # st.subheader("Translated Code:")
229
+ # st.code(st.session_state.translated_code, language=target_lang.lower())
230
  # else:
231
  # st.warning("⚠️ Please enter some code before translating.")
232
+
233
+
234
+
235
+
236
+
237
+
238
+
239
+
240
+
241
+
242
+
243
+
244
+
245
+
246
+
247
+
248
+
249
+ # # V1 without gemini api
250
+
251
+ # # import streamlit as st
252
+ # # import requests
253
+ # # import os # Import os to access environment variables
254
+
255
+ # # # Get API token from environment variable
256
+ # # API_TOKEN = os.getenv("HF_API_TOKEN")
257
+
258
+
259
+ # # # Change MODEL_ID to a better model
260
+ # # MODEL_ID = "Salesforce/codet5p-770m" # CodeT5+ (Recommended)
261
+ # # # MODEL_ID = "bigcode/starcoder2-15b" # StarCoder2
262
+ # # # MODEL_ID = "bigcode/starcoder"
263
+ # # API_URL = f"https://api-inference.huggingface.co/models/{MODEL_ID}"
264
+ # # HEADERS = {"Authorization": f"Bearer {API_TOKEN}"}
265
+
266
+ # # def translate_code(code_snippet, source_lang, target_lang):
267
+ # # """Translate code using Hugging Face API securely."""
268
+ # # prompt = f"Translate the following {source_lang} code to {target_lang}:\n\n{code_snippet}\n\nTranslated {target_lang} Code:\n"
269
+
270
+ # # response = requests.post(API_URL, headers=HEADERS, json={
271
+ # # "inputs": prompt,
272
+ # # "parameters": {
273
+ # # "max_new_tokens": 150,
274
+ # # "temperature": 0.2,
275
+ # # "top_k": 50
276
+ # # # "stop": ["\n\n", "#", "//", "'''"]
277
+ # # }
278
+ # # })
279
+
280
+ # # if response.status_code == 200:
281
+ # # generated_text = response.json()[0]["generated_text"]
282
+ # # translated_code = generated_text.split(f"Translated {target_lang} Code:\n")[-1].strip()
283
+ # # return translated_code
284
+ # # else:
285
+ # # return f"Error: {response.status_code}, {response.text}"
286
+
287
+ # # # Streamlit UI
288
+ # # st.title("πŸ”„ Code Translator using StarCoder")
289
+ # # st.write("Translate code between different programming languages using AI.")
290
+
291
+ # # languages = ["Python", "Java", "C++", "C"]
292
+
293
+ # # source_lang = st.selectbox("Select source language", languages)
294
+ # # target_lang = st.selectbox("Select target language", languages)
295
+ # # code_input = st.text_area("Enter your code here:", height=200)
296
+
297
+ # # if st.button("Translate"):
298
+ # # if code_input.strip():
299
+ # # with st.spinner("Translating..."):
300
+ # # translated_code = translate_code(code_input, source_lang, target_lang)
301
+ # # st.subheader("Translated Code:")
302
+ # # st.code(translated_code, language=target_lang.lower())
303
+ # # else:
304
+ # # st.warning("⚠️ Please enter some code before translating.")