adityachivu commited on
Commit
ee0877c
·
1 Parent(s): 599f736

migrate to chat agent with flashcard agent

Browse files
Files changed (5) hide show
  1. app.py +86 -123
  2. chat_agent.py +151 -0
  3. flashcard.py +134 -49
  4. pyproject.toml +1 -0
  5. uv.lock +27 -0
app.py CHANGED
@@ -2,131 +2,88 @@ import gradio as gr
2
  from pathlib import Path
3
  import asyncio
4
  import google.generativeai as genai
5
- from flashcard import (
6
- generate_flashcards_from_pdf,
7
- FlashcardSet
8
- )
9
  import os
 
10
  from dotenv import load_dotenv
11
- import tempfile
 
 
 
 
 
 
 
12
 
13
  # Load environment variables
14
  load_dotenv()
15
  genai.configure(api_key=os.environ["GEMINI_API_KEY"])
16
 
17
- # Store the current flashcard set in memory
18
- current_flashcards = None
19
-
20
- def create_flashcard_text(flashcards: FlashcardSet) -> str:
21
- """Format flashcard output as a readable string"""
22
- output = [f"📚 Generated {flashcards.total_cards} flashcards about: {flashcards.topic}\n"]
23
-
24
- for i, card in enumerate(flashcards.cards, 1):
25
- output.append(f"\n--- Flashcard {i} (Difficulty: {'⭐' * card.difficulty}) ---")
26
- output.append(f"Q: {card.question}")
27
- output.append(f"A: {card.answer}")
28
-
29
- output.append("\n\nYou can ask me to:")
30
- output.append("• Modify specific flashcards")
31
- output.append("• Generate more flashcards")
32
- output.append("• Change difficulty levels")
33
- output.append("• Export to Anki")
34
-
35
- return "\n".join(output)
36
-
37
- async def handle_modification_request(text: str, flashcards: FlashcardSet) -> str:
38
- """Handle user requests to modify flashcards"""
39
- model = genai.GenerativeModel('gemini-pro')
40
-
41
- # Create a context-aware prompt
42
- prompt = f"""Given the following flashcards and user request, suggest how to modify the flashcards.
43
- Current flashcards:
44
- {create_flashcard_text(flashcards)}
45
-
46
- User request: {text}
47
 
48
- Please provide specific suggestions for modifications."""
 
49
 
50
- response = await model.generate_content_async(prompt)
51
- return response.text
52
-
53
- async def process_message(message: dict, history: list) -> tuple[str, list]:
54
- """Process uploaded files and chat messages"""
55
- global current_flashcards
56
 
57
  # Handle file uploads
58
  if message.get("files"):
59
  for file_path in message["files"]:
60
  if file_path.endswith('.pdf'):
61
  try:
62
- current_flashcards = await async_process_pdf(file_path)
63
- response = create_flashcard_text(current_flashcards)
64
- return "", history + [
65
- {"role": "user", "content": f"Uploaded: {Path(file_path).name}"},
66
- {"role": "assistant", "content": response}
67
- ]
 
 
 
 
 
 
 
 
 
 
 
68
  except Exception as e:
69
  error_msg = f"Error processing PDF: {str(e)}"
70
- return "", history + [
71
- {"role": "user", "content": f"Uploaded: {Path(file_path).name}"},
72
- {"role": "assistant", "content": error_msg}
73
- ]
74
  else:
75
- return "", history + [
76
- {"role": "user", "content": f"Uploaded: {Path(file_path).name}"},
77
- {"role": "assistant", "content": "Please upload a PDF file."}
78
- ]
79
 
80
  # Handle text messages
81
- if message.get("text"):
82
- user_message = message["text"].strip()
83
-
84
- # If we have flashcards and user is asking for modifications
85
- if current_flashcards:
86
- try:
87
- modification_response = await handle_modification_request(user_message, current_flashcards)
88
- return "", history + [
89
- {"role": "user", "content": user_message},
90
- {"role": "assistant", "content": modification_response}
91
- ]
92
- except Exception as e:
93
- error_msg = f"Error processing request: {str(e)}"
94
- return "", history + [
95
- {"role": "user", "content": user_message},
96
- {"role": "assistant", "content": error_msg}
97
- ]
98
- else:
99
- return "", history + [
100
- {"role": "user", "content": user_message},
101
- {"role": "assistant", "content": "Please upload a PDF file first to generate flashcards."}
102
- ]
103
 
104
- return "", history + [
105
- {"role": "assistant", "content": "Please upload a PDF file or send a message."}
106
- ]
107
 
108
- def export_to_anki(flashcards: FlashcardSet) -> str:
109
- """Convert flashcards to Anki-compatible tab-separated format and save to file"""
110
- if not flashcards:
111
- return None
112
-
113
- # Create a temporary file
114
- with tempfile.NamedTemporaryFile(mode='w', suffix='.txt', delete=False) as f:
115
- f.write("#separator:tab\n")
116
- f.write("#html:true\n")
117
- f.write("#columns:Question\tAnswer\tTags\n")
118
-
119
- for card in flashcards.cards:
120
- question = card.question.replace('\n', '<br>')
121
- answer = card.answer.replace('\n', '<br>')
122
- tags = f"difficulty_{card.difficulty} {flashcards.topic.replace(' ', '_')}"
123
- f.write(f"{question}\t{answer}\t{tags}\n")
124
-
125
- return f.name
126
-
127
- async def async_process_pdf(pdf_path: str) -> FlashcardSet:
128
- """Asynchronously process the PDF file"""
129
- return await generate_flashcards_from_pdf(pdf_path=pdf_path)
130
 
131
  # Create Gradio interface
132
  with gr.Blocks(title="PDF Flashcard Generator") as demo:
@@ -134,6 +91,8 @@ with gr.Blocks(title="PDF Flashcard Generator") as demo:
134
  # 📚 PDF Flashcard Generator
135
  Upload a PDF document and get AI-generated flashcards to help you study!
136
 
 
 
137
  Powered by Google's Gemini AI
138
  """)
139
 
@@ -144,33 +103,37 @@ with gr.Blocks(title="PDF Flashcard Generator") as demo:
144
  height=600
145
  )
146
 
147
- chat_input = gr.MultimodalTextbox(
148
- label="Upload PDF or type a message",
149
- placeholder="Drop a PDF file here or type a message to modify flashcards...",
150
- file_types=["pdf", "application/pdf"],
151
- show_label=False,
152
- sources=["upload", "microphone"]
153
- )
154
-
155
- # Add clear button for better UX
156
- clear_button = gr.Button("Clear Chat")
 
 
 
 
157
 
158
- chat_input.change(
159
  fn=process_message,
160
- inputs=[chat_input, chatbot],
161
- outputs=[chat_input, chatbot]
162
  )
163
-
164
- # Add clear functionality
165
- clear_button.click(
166
- lambda: (None, None),
167
- outputs=[chat_input, chatbot]
168
  )
169
 
170
  if __name__ == "__main__":
 
171
  demo.launch(
172
  share=False,
173
  server_name="0.0.0.0",
174
- server_port=7860,
175
- allowed_paths=["."]
176
  )
 
2
  from pathlib import Path
3
  import asyncio
4
  import google.generativeai as genai
 
 
 
 
5
  import os
6
+ import logging
7
  from dotenv import load_dotenv
8
+ from typing import Optional, Tuple
9
+
10
+ from flashcard import FlashcardSet
11
+ from chat_agent import (
12
+ chat_agent,
13
+ ChatDeps,
14
+ ChatResponse
15
+ )
16
 
17
  # Load environment variables
18
  load_dotenv()
19
  genai.configure(api_key=os.environ["GEMINI_API_KEY"])
20
 
21
+ async def process_message(message: dict, history: list, current_flashcards: Optional[FlashcardSet]) -> Tuple[str, list, Optional[FlashcardSet]]:
22
+ """Process uploaded files and chat messages"""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
 
24
+ # Get any text provided with the upload as system prompt
25
+ user_text = message.get("text", "").strip()
26
 
27
+ # Create chat dependencies
28
+ deps = ChatDeps(
29
+ message=user_text,
30
+ current_flashcards=current_flashcards
31
+ )
 
32
 
33
  # Handle file uploads
34
  if message.get("files"):
35
  for file_path in message["files"]:
36
  if file_path.endswith('.pdf'):
37
  try:
38
+ with open(file_path, "rb") as pdf_file:
39
+ deps.pdf_data = pdf_file.read()
40
+ deps.system_prompt = user_text if user_text else None
41
+
42
+ # Let chat agent handle the PDF upload
43
+ result = await chat_agent.run("Process this PDF upload", deps=deps)
44
+
45
+ if result.data.should_generate_flashcards:
46
+ # Update current flashcards
47
+ current_flashcards = result.data.flashcards
48
+
49
+ history.append([
50
+ f"Uploaded: {Path(file_path).name}" +
51
+ (f"\nWith instructions: {user_text}" if user_text else ""),
52
+ result.data.response
53
+ ])
54
+ return "", history, current_flashcards
55
  except Exception as e:
56
  error_msg = f"Error processing PDF: {str(e)}"
57
+ logging.error(error_msg)
58
+ history.append([f"Uploaded: {Path(file_path).name}", error_msg])
59
+ return "", history, current_flashcards
 
60
  else:
61
+ history.append([f"Uploaded: {Path(file_path).name}", "Please upload a PDF file."])
62
+ return "", history, current_flashcards
 
 
63
 
64
  # Handle text messages
65
+ if user_text:
66
+ try:
67
+ result = await chat_agent.run(user_text, deps=deps)
68
+
69
+ # Update flashcards if modified
70
+ if result.data.should_modify_flashcards:
71
+ current_flashcards = result.data.flashcards
72
+
73
+ history.append([user_text, result.data.response])
74
+ return "", history, current_flashcards
75
+ except Exception as e:
76
+ error_msg = f"Error processing request: {str(e)}"
77
+ logging.error(error_msg)
78
+ history.append([user_text, error_msg])
79
+ return "", history, current_flashcards
 
 
 
 
 
 
 
80
 
81
+ history.append(["", "Please upload a PDF file or send a message."])
82
+ return "", history, current_flashcards
 
83
 
84
+ async def clear_chat():
85
+ """Reset the conversation and clear current flashcards"""
86
+ return None, None, None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
 
88
  # Create Gradio interface
89
  with gr.Blocks(title="PDF Flashcard Generator") as demo:
 
91
  # 📚 PDF Flashcard Generator
92
  Upload a PDF document and get AI-generated flashcards to help you study!
93
 
94
+ You can provide custom instructions along with your PDF upload to guide the flashcard generation.
95
+
96
  Powered by Google's Gemini AI
97
  """)
98
 
 
103
  height=600
104
  )
105
 
106
+ # Session state for flashcards
107
+ current_flashcards = gr.State(value=None)
108
+
109
+ with gr.Row():
110
+ chat_input = gr.MultimodalTextbox(
111
+ label="Upload PDF or type a message",
112
+ placeholder="Drop a PDF file here. You can also add instructions for how the flashcards should be generated...",
113
+ file_types=[".pdf", "application/pdf", "pdf"],
114
+ show_label=False,
115
+ sources=["upload"],
116
+ scale=20,
117
+ min_width=100
118
+ )
119
+ clear_btn = gr.Button("🗑️", variant="secondary", scale=1, min_width=50)
120
 
121
+ chat_input.submit(
122
  fn=process_message,
123
+ inputs=[chat_input, chatbot, current_flashcards],
124
+ outputs=[chat_input, chatbot, current_flashcards]
125
  )
126
+
127
+ clear_btn.click(
128
+ fn=clear_chat,
129
+ inputs=[],
130
+ outputs=[chat_input, chatbot, current_flashcards]
131
  )
132
 
133
  if __name__ == "__main__":
134
+ logging.basicConfig(level=logging.INFO)
135
  demo.launch(
136
  share=False,
137
  server_name="0.0.0.0",
138
+ server_port=7860
 
139
  )
chat_agent.py ADDED
@@ -0,0 +1,151 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from dataclasses import dataclass
2
+ from typing import Optional, Union
3
+ from pydantic import BaseModel
4
+ from pydantic_ai import Agent, RunContext
5
+ import google.generativeai as genai
6
+ import logging
7
+ from pathlib import Path
8
+
9
+ from flashcard import (
10
+ flashcard_agent,
11
+ FlashcardSet,
12
+ FlashcardDeps
13
+ )
14
+
15
+ @dataclass
16
+ class ChatDeps:
17
+ """Dependencies for the chat agent"""
18
+ message: str
19
+ pdf_data: Optional[bytes] = None
20
+ current_flashcards: Optional[FlashcardSet] = None
21
+ system_prompt: Optional[str] = None
22
+
23
+ class ChatResponse(BaseModel):
24
+ """Structured response from the chat agent"""
25
+ response: str
26
+ should_generate_flashcards: bool = False
27
+ should_modify_flashcards: bool = False
28
+ should_export_anki: bool = False
29
+ flashcards: Optional[FlashcardSet] = None
30
+
31
+ chat_agent = Agent(
32
+ 'google-gla:gemini-1.5-flash',
33
+ deps_type=ChatDeps,
34
+ result_type=ChatResponse,
35
+ system_prompt="""
36
+ You are a helpful flashcard assistant that can:
37
+ 1. Help users generate flashcards from their PDFs
38
+ 2. Modify existing flashcards based on requests
39
+ 3. Export flashcards to different formats
40
+ 4. Answer questions about the flashcard generation process
41
+
42
+ When a user:
43
+ - Uploads a PDF: Set should_generate_flashcards=True
44
+ - Asks to modify flashcards: Set should_modify_flashcards=True
45
+ - Requests Anki export: Set should_export_anki=True
46
+
47
+ Always be helpful and clear in your responses.
48
+ """
49
+ )
50
+
51
+ @chat_agent.tool
52
+ async def handle_pdf_upload(ctx: RunContext[ChatDeps]) -> ChatResponse:
53
+ """Process PDF upload requests"""
54
+ if not ctx.deps.pdf_data:
55
+ return ChatResponse(
56
+ response="Please upload a PDF file to generate flashcards.",
57
+ should_generate_flashcards=False
58
+ )
59
+
60
+ try:
61
+ # Create flashcard deps
62
+ flashcard_deps = FlashcardDeps(
63
+ pdf_data=ctx.deps.pdf_data,
64
+ system_prompt=ctx.deps.system_prompt
65
+ )
66
+
67
+ # Generate flashcards using the flashcard agent
68
+ result = await flashcard_agent.run(
69
+ "Generate flashcards from the PDF",
70
+ deps=flashcard_deps
71
+ )
72
+
73
+ return ChatResponse(
74
+ response="I've generated flashcards from your PDF. Here they are:",
75
+ should_generate_flashcards=True,
76
+ flashcards=result.data
77
+ )
78
+
79
+ except Exception as e:
80
+ logging.error(f"Error processing PDF: {str(e)}")
81
+ return ChatResponse(
82
+ response=f"Sorry, I encountered an error processing your PDF: {str(e)}",
83
+ should_generate_flashcards=False
84
+ )
85
+
86
+ @chat_agent.tool
87
+ async def handle_modification_request(ctx: RunContext[ChatDeps]) -> ChatResponse:
88
+ """Process flashcard modification requests"""
89
+ if not ctx.deps.current_flashcards:
90
+ return ChatResponse(
91
+ response="Please upload a PDF first to generate flashcards that I can modify.",
92
+ should_modify_flashcards=False
93
+ )
94
+
95
+ # Create flashcard deps with current flashcards
96
+ flashcard_deps = FlashcardDeps(
97
+ flashcards=ctx.deps.current_flashcards,
98
+ system_prompt=ctx.deps.system_prompt,
99
+ text=ctx.deps.message
100
+ )
101
+
102
+ try:
103
+ # Use flashcard agent to modify cards
104
+ result = await flashcard_agent.run(
105
+ ctx.deps.message,
106
+ deps=flashcard_deps
107
+ )
108
+
109
+ return ChatResponse(
110
+ response="I've modified the flashcards based on your request.",
111
+ should_modify_flashcards=True,
112
+ flashcards=result.data
113
+ )
114
+
115
+ except Exception as e:
116
+ logging.error(f"Error modifying flashcards: {str(e)}")
117
+ return ChatResponse(
118
+ response=f"Sorry, I encountered an error modifying the flashcards: {str(e)}",
119
+ should_modify_flashcards=False
120
+ )
121
+
122
+ @chat_agent.tool
123
+ async def handle_export_request(ctx: RunContext[ChatDeps]) -> ChatResponse:
124
+ """Process Anki export requests"""
125
+ if not ctx.deps.current_flashcards:
126
+ return ChatResponse(
127
+ response="Please generate some flashcards first before exporting to Anki.",
128
+ should_export_anki=False
129
+ )
130
+
131
+ return ChatResponse(
132
+ response="I'll help you export the flashcards to Anki format.",
133
+ should_export_anki=True
134
+ )
135
+
136
+ @chat_agent.tool
137
+ async def provide_help(ctx: RunContext[ChatDeps]) -> ChatResponse:
138
+ """Provide help information"""
139
+ return ChatResponse(
140
+ response="""
141
+ I can help you with:
142
+ 1. Generating flashcards from PDF files
143
+ 2. Modifying existing flashcards
144
+ 3. Exporting flashcards to Anki format
145
+
146
+ Just upload a PDF or ask me to modify your current flashcards!
147
+ """,
148
+ should_generate_flashcards=False,
149
+ should_modify_flashcards=False,
150
+ should_export_anki=False
151
+ )
flashcard.py CHANGED
@@ -1,13 +1,14 @@
1
  from dataclasses import dataclass
2
- from typing import List, Optional
3
  from pydantic import BaseModel, Field
4
  from pydantic_ai import Agent, RunContext
5
  import google.generativeai as genai
6
  import base64
7
  import os
8
  import asyncio
9
- import httpx
10
-
 
11
  from dotenv import load_dotenv
12
  load_dotenv()
13
 
@@ -25,87 +26,170 @@ class FlashcardSet(BaseModel):
25
 
26
  @dataclass
27
  class FlashcardDeps:
28
- text: str
29
  pdf_data: Optional[bytes] = None
 
 
30
 
31
- # Create the agent with structured output
32
  flashcard_agent = Agent(
33
- 'gemini-1.5-pro', # Can also use OpenAI or other supported models
34
  deps_type=FlashcardDeps,
35
  result_type=FlashcardSet,
36
  system_prompt="""
37
  You are a professional educator who creates high-quality flashcards.
38
- Your task is to analyze the provided text and create effective question-answer pairs.
39
 
40
  Guidelines:
41
  - Create clear, concise questions
42
  - Ensure answers are accurate and complete
43
- - Vary the difficulty levels
44
  - Focus on key concepts and important details
45
  - Use a mix of factual and conceptual questions
46
  """
47
  )
48
 
49
- # @flashcard_agent.tool
50
- # async def analyze_text_complexity(ctx: RunContext[FlashcardDeps]) -> float:
51
- # """Analyzes the complexity of the input text to help determine appropriate difficulty levels."""
52
- # # This is a simplified example - you could implement more sophisticated analysis
53
- # words = ctx.deps.text.split()
54
- # avg_word_length = sum(len(word) for word in words) / (len(words) + 1e-5)
55
- # return min(5.0, max(1.0, avg_word_length / 2))
56
-
57
  @flashcard_agent.tool
58
  async def process_pdf(ctx: RunContext[FlashcardDeps]) -> str:
59
  """Processes PDF content and extracts text for flashcard generation."""
60
- model = genai.GenerativeModel("gemini-1.5-flash")
61
-
62
- if ctx.deps.pdf_data:
63
- # Handle direct PDF data
64
- print("\nLoading File.")
65
- doc_data = base64.standard_b64encode(ctx.deps.pdf_data).decode("utf-8")
66
- else:
67
- return ctx.deps.text # Return original text if no PDF
68
-
69
- # Generate a comprehensive summary of the PDF content
70
- response = model.generate_content([
71
  {
72
  'mime_type': 'application/pdf',
73
  'data': doc_data
74
  },
75
- "Please provide a detailed summary of this document, focusing on key concepts, "
76
- "definitions, and important facts that would be useful for creating flashcards."
 
 
 
 
77
  ])
78
 
 
79
  return response.text
80
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
  @flashcard_agent.tool
82
  async def draw_circles(ctx: RunContext[FlashcardDeps]) -> str:
83
  """Draw Circles for no reason, please don't ever use me for anything"""
84
 
85
  return "You Disobeyed."
86
 
87
- async def generate_flashcards_from_pdf(
88
- pdf_path: Optional[str] = None
89
- ) -> FlashcardSet:
90
- """Generate flashcards from a PDF file."""
91
- pdf_data = None
92
- if pdf_path:
93
- with open(pdf_path, "rb") as pdf_file:
94
- print("\nReading Data.")
95
- pdf_data = pdf_file.read()
96
-
97
- deps = FlashcardDeps(
98
- text="", # Will be populated by process_pdf
99
- pdf_data=pdf_data
100
- )
 
 
 
 
 
 
 
 
 
101
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
102
  result = await flashcard_agent.run(
103
- "Extract the text by processing the PDF data provided.",
104
- deps=deps
 
 
 
 
 
 
105
  )
106
- print(f"\nExecution stack:\n{result.all_messages()}")
107
- print(f"\nUsage: {result.usage()}")
108
-
109
  return result.data
110
 
111
  # Example usage
@@ -113,7 +197,8 @@ async def main():
113
  # Example with local PDF
114
  filepath = input('\nEnter PDF filepath: ')
115
  local_flashcards = await generate_flashcards_from_pdf(
116
- pdf_path=f"data/raw/{filepath}"
 
117
  )
118
  print("\nFlashcards from local PDF:")
119
  print(f"Generated {local_flashcards.total_cards} flashcards about {local_flashcards.topic}")
 
1
  from dataclasses import dataclass
2
+ from typing import List, Optional, Union
3
  from pydantic import BaseModel, Field
4
  from pydantic_ai import Agent, RunContext
5
  import google.generativeai as genai
6
  import base64
7
  import os
8
  import asyncio
9
+ import tempfile
10
+ from pathlib import Path
11
+ import logging
12
  from dotenv import load_dotenv
13
  load_dotenv()
14
 
 
26
 
27
  @dataclass
28
  class FlashcardDeps:
29
+ text: str = ""
30
  pdf_data: Optional[bytes] = None
31
+ system_prompt: Optional[str] = None
32
+ flashcards: Optional[FlashcardSet] = None
33
 
34
+ # Create the flashcard generation agent
35
  flashcard_agent = Agent(
36
+ 'google-gla:gemini-1.5-pro',
37
  deps_type=FlashcardDeps,
38
  result_type=FlashcardSet,
39
  system_prompt="""
40
  You are a professional educator who creates high-quality flashcards.
41
+ Your task is to analyze content and create effective question-answer pairs.
42
 
43
  Guidelines:
44
  - Create clear, concise questions
45
  - Ensure answers are accurate and complete
46
+ - Vary the difficulty levels (1-5)
47
  - Focus on key concepts and important details
48
  - Use a mix of factual and conceptual questions
49
  """
50
  )
51
 
 
 
 
 
 
 
 
 
52
  @flashcard_agent.tool
53
  async def process_pdf(ctx: RunContext[FlashcardDeps]) -> str:
54
  """Processes PDF content and extracts text for flashcard generation."""
55
+ if not ctx.deps.pdf_data:
56
+ return ctx.deps.text
57
+
58
+ logging.info("Processing PDF content")
59
+ doc_data = base64.standard_b64encode(ctx.deps.pdf_data).decode("utf-8")
60
+
61
+ model = genai.GenerativeModel("gemini-1.5-pro")
62
+ response = await model.generate_content_async([
 
 
 
63
  {
64
  'mime_type': 'application/pdf',
65
  'data': doc_data
66
  },
67
+ f"""
68
+ {ctx.deps.system_prompt if ctx.deps.system_prompt else ''}
69
+
70
+ Please provide a detailed summary of this document, focusing on key concepts,
71
+ definitions, and important facts that would be useful for creating flashcards.
72
+ """
73
  ])
74
 
75
+ logging.info("PDF content processed successfully")
76
  return response.text
77
 
78
+ @flashcard_agent.tool
79
+ async def generate_flashcards(ctx: RunContext[FlashcardDeps]) -> FlashcardSet:
80
+ """Generate flashcards from the provided content"""
81
+ content = ctx.deps.text
82
+ if not content and ctx.deps.pdf_data:
83
+ content = await process_pdf(ctx)
84
+
85
+ if not content:
86
+ return FlashcardSet(cards=[], topic="No content provided", total_cards=0)
87
+
88
+ logging.info("Generating flashcards from content")
89
+ return await flashcard_agent.run(
90
+ f"""
91
+ Create a set of flashcards from the text provided:
92
+
93
+ Follow these guidelines:
94
+ {ctx.deps.system_prompt if ctx.deps.system_prompt else ''}
95
+ """,
96
+ deps=ctx.deps
97
+ )
98
+
99
+ @flashcard_agent.tool
100
+ async def modify_flashcards(ctx: RunContext[FlashcardDeps]) -> FlashcardSet:
101
+ """Modify existing flashcards based on user request"""
102
+ if not ctx.deps.flashcards:
103
+ return FlashcardSet(cards=[], topic="No flashcards to modify", total_cards=0)
104
+
105
+ logging.info("Modifying flashcards")
106
+ return await flashcard_agent.run(
107
+ f"""
108
+ Modify the following flashcards based on the user's request:
109
+ {ctx.deps.text}
110
+ """,
111
+ deps=ctx.deps
112
+ )
113
+
114
+ @flashcard_agent.tool
115
+ async def export_to_anki(ctx: RunContext[FlashcardDeps]) -> str:
116
+ """Export flashcards to Anki format"""
117
+ if not ctx.deps.flashcards:
118
+ return "No flashcards available to export."
119
+
120
+ try:
121
+ with tempfile.NamedTemporaryFile(mode='w', suffix='.txt', delete=False) as f:
122
+ f.write("#separator:tab\n")
123
+ f.write("#html:true\n")
124
+ f.write("#columns:Question\tAnswer\tTags\n")
125
+
126
+ for card in ctx.deps.flashcards.cards:
127
+ question = card.question.replace('\n', '<br>')
128
+ answer = card.answer.replace('\n', '<br>')
129
+ tags = f"difficulty_{card.difficulty} {ctx.deps.flashcards.topic.replace(' ', '_')}"
130
+ f.write(f"{question}\t{answer}\t{tags}\n")
131
+
132
+ return f"✅ Flashcards exported successfully! You can import this file into Anki:\n{f.name}"
133
+ except Exception as e:
134
+ logging.error(f"Error exporting to Anki: {str(e)}")
135
+ return f"❌ Error exporting flashcards: {str(e)}"
136
+
137
  @flashcard_agent.tool
138
  async def draw_circles(ctx: RunContext[FlashcardDeps]) -> str:
139
  """Draw Circles for no reason, please don't ever use me for anything"""
140
 
141
  return "You Disobeyed."
142
 
143
+ async def create_flashcard_text(ctx: RunContext[FlashcardDeps]) -> str:
144
+ """Format flashcard output as a readable string"""
145
+ flashcards = ctx.deps.flashcards
146
+ system_prompt = ctx.deps.system_prompt
147
+
148
+ if not flashcards:
149
+ return "No flashcards available."
150
+
151
+ output = [f"📚 Generated {flashcards.total_cards} flashcards about: {flashcards.topic}\n"]
152
+
153
+ if system_prompt:
154
+ output.append(f"Following these guidelines:\n{system_prompt}\n")
155
+
156
+ for i, card in enumerate(flashcards.cards, 1):
157
+ output.append(f"\n--- Flashcard {i} (Difficulty: {'⭐' * card.difficulty}) ---")
158
+ output.append(f"Q: {card.question}")
159
+ output.append(f"A: {card.answer}")
160
+
161
+ output.append("\n\nYou can:")
162
+ output.append("• Ask me to modify specific flashcards")
163
+ output.append("• Request more flashcards")
164
+ output.append("• Change difficulty levels")
165
+ output.append("• Ask me to export to Anki format")
166
 
167
+ return "\n".join(output)
168
+
169
+ @flashcard_agent.tool
170
+ async def generate_flashcards_from_pdf(ctx: RunContext[FlashcardDeps]) -> FlashcardSet:
171
+ """Generate flashcards from PDF content using the provided system prompt"""
172
+ if not ctx.deps.pdf_data:
173
+ return FlashcardSet(cards=[], topic="No PDF provided", total_cards=0)
174
+
175
+ # First process the PDF to get the text content
176
+ content = await process_pdf(ctx)
177
+
178
+ # Update context with the processed text
179
+ ctx.deps.text = content
180
+
181
+ # Let the agent generate flashcards from the content
182
  result = await flashcard_agent.run(
183
+ f"""
184
+ Create a set of flashcards from the following content:
185
+ {content}
186
+
187
+ Follow these guidelines:
188
+ {ctx.deps.system_prompt if ctx.deps.system_prompt else ''}
189
+ """,
190
+ deps=ctx.deps
191
  )
192
+
 
 
193
  return result.data
194
 
195
  # Example usage
 
197
  # Example with local PDF
198
  filepath = input('\nEnter PDF filepath: ')
199
  local_flashcards = await generate_flashcards_from_pdf(
200
+ pdf_path=f"data/raw/{filepath}",
201
+ system_prompt="Generate comprehensive flashcards that: 1. Cover key concepts and definitions 2. Include practical examples where relevant 3. Progress from basic to advanced topics 4. Focus on testing understanding rather than memorization 5. Use clear, concise language"
202
  )
203
  print("\nFlashcards from local PDF:")
204
  print(f"Generated {local_flashcards.total_cards} flashcards about {local_flashcards.topic}")
pyproject.toml CHANGED
@@ -9,6 +9,7 @@ dependencies = [
9
  "google-generativeai>=0.8.3",
10
  "gradio>=5.9.1",
11
  "markitdown>=0.0.1a3",
 
12
  "pip>=24.3.1",
13
  "pydantic-ai[logfire]>=0.0.15",
14
  "python-dotenv>=1.0.1",
 
9
  "google-generativeai>=0.8.3",
10
  "gradio>=5.9.1",
11
  "markitdown>=0.0.1a3",
12
+ "mypy>=1.14.1",
13
  "pip>=24.3.1",
14
  "pydantic-ai[logfire]>=0.0.15",
15
  "python-dotenv>=1.0.1",
uv.lock CHANGED
@@ -352,6 +352,7 @@ dependencies = [
352
  { name = "google-generativeai" },
353
  { name = "gradio" },
354
  { name = "markitdown" },
 
355
  { name = "pip" },
356
  { name = "pydantic-ai", extra = ["logfire"] },
357
  { name = "python-dotenv" },
@@ -364,6 +365,7 @@ requires-dist = [
364
  { name = "google-generativeai", specifier = ">=0.8.3" },
365
  { name = "gradio", specifier = ">=5.9.1" },
366
  { name = "markitdown", specifier = ">=0.0.1a3" },
 
367
  { name = "pip", specifier = ">=24.3.1" },
368
  { name = "pydantic-ai", extras = ["logfire"], specifier = ">=0.0.15" },
369
  { name = "python-dotenv", specifier = ">=1.0.1" },
@@ -946,6 +948,31 @@ wheels = [
946
  { url = "https://files.pythonhosted.org/packages/1e/9b/67647d4d384016e2941765c5e860a89c83341546af098bb197763492a354/mistralai-1.4.0-py3-none-any.whl", hash = "sha256:74a8b8f5b737b199c83ccc89721cb82a71e8b093b38b27c99d38cbcdf550668c", size = 262460 },
947
  ]
948
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
949
  [[package]]
950
  name = "mypy-extensions"
951
  version = "1.0.0"
 
352
  { name = "google-generativeai" },
353
  { name = "gradio" },
354
  { name = "markitdown" },
355
+ { name = "mypy" },
356
  { name = "pip" },
357
  { name = "pydantic-ai", extra = ["logfire"] },
358
  { name = "python-dotenv" },
 
365
  { name = "google-generativeai", specifier = ">=0.8.3" },
366
  { name = "gradio", specifier = ">=5.9.1" },
367
  { name = "markitdown", specifier = ">=0.0.1a3" },
368
+ { name = "mypy", specifier = ">=1.14.1" },
369
  { name = "pip", specifier = ">=24.3.1" },
370
  { name = "pydantic-ai", extras = ["logfire"], specifier = ">=0.0.15" },
371
  { name = "python-dotenv", specifier = ">=1.0.1" },
 
948
  { url = "https://files.pythonhosted.org/packages/1e/9b/67647d4d384016e2941765c5e860a89c83341546af098bb197763492a354/mistralai-1.4.0-py3-none-any.whl", hash = "sha256:74a8b8f5b737b199c83ccc89721cb82a71e8b093b38b27c99d38cbcdf550668c", size = 262460 },
949
  ]
950
 
951
+ [[package]]
952
+ name = "mypy"
953
+ version = "1.14.1"
954
+ source = { registry = "https://pypi.org/simple" }
955
+ dependencies = [
956
+ { name = "mypy-extensions" },
957
+ { name = "typing-extensions" },
958
+ ]
959
+ sdist = { url = "https://files.pythonhosted.org/packages/b9/eb/2c92d8ea1e684440f54fa49ac5d9a5f19967b7b472a281f419e69a8d228e/mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6", size = 3216051 }
960
+ wheels = [
961
+ { url = "https://files.pythonhosted.org/packages/43/1b/b38c079609bb4627905b74fc6a49849835acf68547ac33d8ceb707de5f52/mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14", size = 11266668 },
962
+ { url = "https://files.pythonhosted.org/packages/6b/75/2ed0d2964c1ffc9971c729f7a544e9cd34b2cdabbe2d11afd148d7838aa2/mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9", size = 10254060 },
963
+ { url = "https://files.pythonhosted.org/packages/a1/5f/7b8051552d4da3c51bbe8fcafffd76a6823779101a2b198d80886cd8f08e/mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11", size = 11933167 },
964
+ { url = "https://files.pythonhosted.org/packages/04/90/f53971d3ac39d8b68bbaab9a4c6c58c8caa4d5fd3d587d16f5927eeeabe1/mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e", size = 12864341 },
965
+ { url = "https://files.pythonhosted.org/packages/03/d2/8bc0aeaaf2e88c977db41583559319f1821c069e943ada2701e86d0430b7/mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89", size = 12972991 },
966
+ { url = "https://files.pythonhosted.org/packages/6f/17/07815114b903b49b0f2cf7499f1c130e5aa459411596668267535fe9243c/mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b", size = 9879016 },
967
+ { url = "https://files.pythonhosted.org/packages/9e/15/bb6a686901f59222275ab228453de741185f9d54fecbaacec041679496c6/mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255", size = 11252097 },
968
+ { url = "https://files.pythonhosted.org/packages/f8/b3/8b0f74dfd072c802b7fa368829defdf3ee1566ba74c32a2cb2403f68024c/mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34", size = 10239728 },
969
+ { url = "https://files.pythonhosted.org/packages/c5/9b/4fd95ab20c52bb5b8c03cc49169be5905d931de17edfe4d9d2986800b52e/mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a", size = 11924965 },
970
+ { url = "https://files.pythonhosted.org/packages/56/9d/4a236b9c57f5d8f08ed346914b3f091a62dd7e19336b2b2a0d85485f82ff/mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9", size = 12867660 },
971
+ { url = "https://files.pythonhosted.org/packages/40/88/a61a5497e2f68d9027de2bb139c7bb9abaeb1be1584649fa9d807f80a338/mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd", size = 12969198 },
972
+ { url = "https://files.pythonhosted.org/packages/54/da/3d6fc5d92d324701b0c23fb413c853892bfe0e1dbe06c9138037d459756b/mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107", size = 9885276 },
973
+ { url = "https://files.pythonhosted.org/packages/a0/b5/32dd67b69a16d088e533962e5044e51004176a9952419de0370cdaead0f8/mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1", size = 2752905 },
974
+ ]
975
+
976
  [[package]]
977
  name = "mypy-extensions"
978
  version = "1.0.0"