code_compiler / app.py
MLDeveloper's picture
Update app.py
eb18f2b verified
import streamlit as st
import streamlit as st
import sys
import io
def execute_code(code, user_input=""):
"""Execute the given code with simulated input and return the output."""
old_stdout = sys.stdout # Backup original stdout
redirected_output = io.StringIO() # Create a new string buffer
sys.stdout = redirected_output # Redirect stdout to buffer
input_values = user_input.strip().split("\n") # Split user inputs by line
input_counter = 0
def mock_input(prompt=""):
nonlocal input_counter
if input_counter < len(input_values):
value = input_values[input_counter]
input_counter += 1
return value
else:
raise ValueError("Not enough inputs provided.")
try:
exec(code, {"input": mock_input}) # Execute the user's code with mock input
output = redirected_output.getvalue() # Get the output from buffer
except Exception as e:
output = f"Error: {str(e)}" # Capture and display any errors
finally:
sys.stdout = old_stdout # Restore original stdout
return output.strip() # Return cleaned output
# Streamlit UI
st.title("๐Ÿ’ป Python Compiler ๐Ÿ")
st.write("Write your Python code and get the correct output!")
code_input = st.text_area("Enter your Python code:", height=200)
user_input = st.text_area("Enter input values (one per line):", height=100) # Added input field
if st.button("Run Code"):
if code_input.strip():
with st.spinner("Executing..."):
output = execute_code(code_input, user_input) # Execute user code with mock input
st.subheader("Output:")
st.code(output, language="plaintext")
else:
st.warning("โš ๏ธ Please enter some Python code before running.")
# V1 without gemini api
# import streamlit as st
# import requests
# import os # Import os to access environment variables
# # Get API token from environment variable
# API_TOKEN = os.getenv("HF_API_TOKEN")
# # Change MODEL_ID to a better model
# MODEL_ID = "Salesforce/codet5p-770m" # CodeT5+ (Recommended)
# # MODEL_ID = "bigcode/starcoder2-15b" # StarCoder2
# # MODEL_ID = "bigcode/starcoder"
# API_URL = f"https://api-inference.huggingface.co/models/{MODEL_ID}"
# HEADERS = {"Authorization": f"Bearer {API_TOKEN}"}
# def translate_code(code_snippet, source_lang, target_lang):
# """Translate code using Hugging Face API securely."""
# prompt = f"Translate the following {source_lang} code to {target_lang}:\n\n{code_snippet}\n\nTranslated {target_lang} Code:\n"
# response = requests.post(API_URL, headers=HEADERS, json={
# "inputs": prompt,
# "parameters": {
# "max_new_tokens": 150,
# "temperature": 0.2,
# "top_k": 50
# # "stop": ["\n\n", "#", "//", "'''"]
# }
# })
# if response.status_code == 200:
# generated_text = response.json()[0]["generated_text"]
# translated_code = generated_text.split(f"Translated {target_lang} Code:\n")[-1].strip()
# return translated_code
# else:
# return f"Error: {response.status_code}, {response.text}"
# # Streamlit UI
# st.title("๐Ÿ”„ Code Translator using StarCoder")
# st.write("Translate code between different programming languages using AI.")
# languages = ["Python", "Java", "C++", "C"]
# source_lang = st.selectbox("Select source language", languages)
# target_lang = st.selectbox("Select target language", languages)
# code_input = st.text_area("Enter your code here:", height=200)
# if st.button("Translate"):
# if code_input.strip():
# with st.spinner("Translating..."):
# translated_code = translate_code(code_input, source_lang, target_lang)
# st.subheader("Translated Code:")
# st.code(translated_code, language=target_lang.lower())
# else:
# st.warning(" โš ๏ธ Please enter some code before translating. ")