File size: 4,745 Bytes
9385133
83a0c1c
a6c95f0
ced35f5
0edee28
 
 
 
2776f22
0edee28
608f542
0edee28
 
 
 
 
 
 
44f3235
0edee28
 
 
608f542
0edee28
 
 
 
 
 
7b954f4
0edee28
 
 
 
 
 
 
 
 
 
 
 
 
7b954f4
 
0edee28
 
 
 
 
 
 
 
 
 
 
 
56c44b4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0edee28
 
 
7b954f4
 
 
0edee28
 
 
 
 
a6c95f0
0edee28
 
 
a6c95f0
 
56c44b4
 
 
 
 
 
ed57fbc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0edee28
608f542
 
 
9385133
2dbcb90
 
 
 
 
56c44b4
2dbcb90
 
 
 
 
 
 
 
 
ed57fbc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
import gradio as gr
import os


class ContentAgentUI:
    def __init__(self):
        # Set the path to the external CSS file
        css_path = os.path.join(os.getcwd(), "ui", "styles.css")
        
        self.ca_gui = gr.Blocks(css=css_path)
        #self.ca_gui = gr.Blocks()
        self.sections = [
            self.create_header,
            self.create_user_guidance,
            self.create_main,
            self.create_examples,
            self.create_footer,
        ]
        
        for section in self.sections:
            section()
        
        self.ca_gui.launch()

    def create_header(self):
        agent_header = """
        #Content Agent                
        """
        with self.ca_gui:
            gr.Markdown(agent_header)

    def create_user_guidance(self):
        guidance = """
        Please enter text below to get started. The AI Agent will try to determine whether the language is polite and uses the following classification:
        - `polite`
        - `somewhat polite`
        - `neutral`
        - `impolite`
        App is running `deepseek-ai/DeepSeek-R1-Distill-Qwen-32B` text generation model.
        Uses Intel's Polite Guard NLP library.
        Compute is GCP · Nvidia L4 · 4x GPUs · 96 GB
        """
        with self.ca_gui:
            gr.Markdown(guidance)
            
    def create_main(self):
        with self.ca_gui:
            with gr.Row():
                with gr.Column():
                    self.user_input = gr.Textbox(label="Your Input", placeholder="Enter something here...")
                    self.submit_button = gr.Button("Submit")
                    self.output = gr.Textbox(label="Content feedback", interactive=False, lines=10, max_lines=20 )
                    
                    # Define the function to be called when the button is clicked or Enter is pressed
                    self.submit_button.click(process_input, inputs=self.user_input, outputs=self.output)
                    self.user_input.submit(process_input, inputs=self.user_input, outputs=self.output)

 
    # Function to generate predefined examples
    def get_example():
        # Define the path to the 'examples' directory
        example_root = os.path.join(os.path.dirname(__file__), "examples")
        
        # Get list of all example text file paths
        example_files = [os.path.join(example_root, _) for _ in os.listdir(example_root) if _.endswith("txt")]
        
        # Read the content of each file (assuming they're plain text files)
        examples = []
        
        for file_path in example_files:
            example_content = ""
            with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
                example_content = f.read()
            
            examples.append(example_content)  # Append the content to the list
    
        return examples

    def create_examples(self):
        # Fetch examples by calling get_example() here
        examples = get_example()
        print("examples")
        print(examples)

        example_radio = gr.Radio(choices=examples, label="Try one of these examples:")
        
        # When an example is selected, populate the input field
        with self.ca_gui:
            example_radio.change(fn=lambda example: example, inputs=example_radio, outputs=self.user_input)

    def create_footer(self):
        with self.ca_gui:
            gr.Markdown("<div id='footer'>Thanks for trying it out!</div>")


    
    # Function for Main content (takes user input and returns a response)
    def process_input(input_text):
    #return f"You entered: {user_input}"

    #def get_agent_response(input_text):
        try:
            # Pass the input to the agent
            output = agent.get_response(input_text)
            
            # Return the agent's response
            return output
        except Exception as e:
            # Handle any errors that occur
            return f"Error: {str(e)}"
    
        self.user_input.change(
            fn=get_agent_response,
            inputs=self.user_input,
            outputs=self.output
        )

    def pass_through_agent(self, agent):
        # Simulate the agent's response
        agent_response = agent(self.user_input.value)
        self.output.update(agent_response)

        # Pass the input to the agent
        output = agent.get_response(input_text)

        # Update the output text box with the agent's response
        self.submit_button.click(
            fn=process_input,
            inputs=self.user_input,
            outputs=self.output
        )
    
        self.user_input.submit(
            fn=get_agent_response,
            inputs=self.user_input,
            outputs=self.output
        )