FelixPhilip commited on
Commit
274fa3d
·
1 Parent(s): f7487ef

updated the gradio ui

Browse files
Files changed (2) hide show
  1. app.py +43 -14
  2. requirements.txt +2 -1
app.py CHANGED
@@ -1,25 +1,54 @@
1
  import os
2
  import gradio as gr
3
- from Oracle.deepfundingoracle import prepare_dataset, train_predict_weight, create_submission_csv
 
 
 
 
4
 
5
- # Gradio-only deployment entrypoint for Hugging Face Spaces
6
- def analyze_file(upload):
7
- # upload is a file-like object with .name
8
- df = prepare_dataset(upload.name)
9
- df = train_predict_weight(df)
10
- csv_path = create_submission_csv(df, "submission.csv")
11
- preview = df.head().to_csv(index=False)
12
- return preview, csv_path
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
 
14
  iface = gr.Interface(
15
- fn=analyze_file,
16
- inputs=gr.File(label="Upload CSV", type="file"),
17
  outputs=[
18
- gr.Textbox(label="Preview of Results"),
19
- gr.Textbox(label="Download CSV Path")
 
 
20
  ],
21
  title="DeepFunding Oracle",
22
- description="Upload a CSV of repo-parent relationships; returns base and final weight predictions as CSV."
 
 
 
23
  )
24
 
25
  if __name__ == "__main__":
 
1
  import os
2
  import gradio as gr
3
+ import pandas as pd
4
+ import networkx as nx
5
+ import matplotlib.pyplot as plt
6
+ from Oracle.deepfundingoracle import prepare_dataset, train_predict_weight, create_submission_csv, SmolLM
7
+ from Oracle.DataSmolAgent import DataSmolAgent
8
 
9
+ def pipeline(upload):
10
+ # Load and clean/extract features via SmolAgents
11
+ df_raw = pd.read_csv(upload.name)
12
+ df_features = DataSmolAgent(df_raw).run(
13
+ prompt="Clean and extract features from the uploaded data", output_csv=False)
14
+ # Save preprocessed features and run dataset preparation on them
15
+ processed_path = "processed_input.csv"
16
+ pd.DataFrame(df_features).to_csv(processed_path, index=False)
17
+ df_prepared = prepare_dataset(processed_path)
18
+ # Assign base weights and predict final weights
19
+ df_results = train_predict_weight(df_prepared)
20
+ # Create submission CSV
21
+ csv_path = create_submission_csv(df_results, "submission.csv")
22
+ # Build dependency graph
23
+ G = nx.DiGraph()
24
+ for _, row in df_results.iterrows():
25
+ G.add_edge(row["parent"], row["repo"], weight=row["final_weight"])
26
+ plt.figure(figsize=(10, 8))
27
+ pos = nx.spring_layout(G)
28
+ weights = [G[u][v]["weight"] for u, v in G.edges()]
29
+ nx.draw(G, pos, with_labels=True, node_size=500, node_color="lightblue",
30
+ edge_color=weights, edge_cmap=plt.get_cmap('viridis'), width=2)
31
+ plt.savefig("graph.png")
32
+ # Generate explanation via SmolLM
33
+ explanation = SmolLM().predict(
34
+ "Explain the dependency graph and weight assignments for the dataset.")
35
+ # Return results
36
+ return (df_results.head().to_dict("records"), csv_path, "graph.png", explanation)
37
 
38
  iface = gr.Interface(
39
+ fn=pipeline,
40
+ inputs=gr.File(label="Upload CSV", type="filepath"),
41
  outputs=[
42
+ gr.Dataframe(label="Preview of Results"),
43
+ gr.File(label="Download CSV"),
44
+ gr.Image(label="Dependency Graph"),
45
+ gr.Textbox(label="Explanation")
46
  ],
47
  title="DeepFunding Oracle",
48
+ description=(
49
+ "Upload a CSV to extract features, assign base weights via LLama, predict final weights with RandomForest, "
50
+ "and visualize the dependency graph with explanations."
51
+ )
52
  )
53
 
54
  if __name__ == "__main__":
requirements.txt CHANGED
@@ -11,4 +11,5 @@ smolagents
11
  huggingface_hub
12
  gradio
13
  fastapi
14
- uvicorn[standard]
 
 
11
  huggingface_hub
12
  gradio
13
  fastapi
14
+ uvicorn[standard]
15
+ networkx