JatsTheAIGen commited on
Commit
c5e8f57
·
1 Parent(s): f46f1a8

Initial commit V5

Browse files
Files changed (1) hide show
  1. app.py +56 -21
app.py CHANGED
@@ -2,7 +2,20 @@
2
  import gradio as gr
3
  import uuid
4
 
 
 
 
 
 
 
 
 
5
  def create_mobile_optimized_interface():
 
 
 
 
 
6
  with gr.Blocks(
7
  title="AI Research Assistant MVP",
8
  theme=gr.themes.Soft(
@@ -127,8 +140,10 @@ def create_mobile_optimized_interface():
127
  show_label=False,
128
  height="60vh",
129
  elem_classes="chatbot-container",
 
130
  render=False # Improve mobile performance
131
  )
 
132
 
133
  # Mobile Input Area
134
  with gr.Row():
@@ -140,6 +155,7 @@ def create_mobile_optimized_interface():
140
  scale=4,
141
  autofocus=True
142
  )
 
143
 
144
  send_btn = gr.Button(
145
  "↑ Send",
@@ -147,6 +163,7 @@ def create_mobile_optimized_interface():
147
  scale=1,
148
  min_width=80
149
  )
 
150
 
151
  # Technical Details Tab (Collapsible for Mobile)
152
  with gr.TabItem("🔍 Details", id="details_tab"):
@@ -209,8 +226,8 @@ def create_mobile_optimized_interface():
209
  )
210
 
211
  gr.Button("Save Preferences", variant="primary")
212
-
213
- return demo
214
 
215
  def setup_event_handlers(demo, event_handlers):
216
  """Setup event handlers for the interface"""
@@ -250,26 +267,44 @@ def simple_message_handler(message, chat_history):
250
  new_history = chat_history + [[message, response]]
251
  return new_history, ""
252
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
253
  if __name__ == "__main__":
254
- demo = create_mobile_optimized_interface()
255
 
256
- # Connect the UI components with the mock handler
257
- # (In production, these would use the full orchestrator)
258
- try:
259
- # This assumes the demo is accessible - in Gradio 4.x, components are scoped
260
- # For now, the UI will render even without handlers
261
- demo.launch(
262
- server_name="0.0.0.0",
263
- server_port=7860,
264
- share=False
265
- )
266
- except Exception as e:
267
- print(f"Note: UI launched but handlers not connected yet: {e}")
268
- print("The framework is ready for integration with the orchestrator.")
269
- print("\nNext step: Connect to backend agents in main.py")
270
- demo.launch(
271
- server_name="0.0.0.0",
272
- server_port=7860,
273
- share=False
274
  )
 
 
 
 
 
 
 
275
 
 
2
  import gradio as gr
3
  import uuid
4
 
5
+ try:
6
+ from spaces import GPU
7
+ SPACES_GPU_AVAILABLE = True
8
+ except ImportError:
9
+ # Not running on HF Spaces or spaces module not available
10
+ SPACES_GPU_AVAILABLE = False
11
+ GPU = None
12
+
13
  def create_mobile_optimized_interface():
14
+ """Create the mobile-optimized Gradio interface and return demo with components"""
15
+
16
+ # Store components for wiring
17
+ interface_components = {}
18
+
19
  with gr.Blocks(
20
  title="AI Research Assistant MVP",
21
  theme=gr.themes.Soft(
 
140
  show_label=False,
141
  height="60vh",
142
  elem_classes="chatbot-container",
143
+ type="messages",
144
  render=False # Improve mobile performance
145
  )
146
+ interface_components['chatbot'] = chatbot
147
 
148
  # Mobile Input Area
149
  with gr.Row():
 
155
  scale=4,
156
  autofocus=True
157
  )
158
+ interface_components['message_input'] = message_input
159
 
160
  send_btn = gr.Button(
161
  "↑ Send",
 
163
  scale=1,
164
  min_width=80
165
  )
166
+ interface_components['send_btn'] = send_btn
167
 
168
  # Technical Details Tab (Collapsible for Mobile)
169
  with gr.TabItem("🔍 Details", id="details_tab"):
 
226
  )
227
 
228
  gr.Button("Save Preferences", variant="primary")
229
+
230
+ return demo, interface_components
231
 
232
  def setup_event_handlers(demo, event_handlers):
233
  """Setup event handlers for the interface"""
 
267
  new_history = chat_history + [[message, response]]
268
  return new_history, ""
269
 
270
+ def process_message(message, history):
271
+ """Process message with messages format"""
272
+ if not message.strip():
273
+ return history, ""
274
+
275
+ response = f"I received your message: {message}. This is a placeholder response. The full agent system is ready to integrate!"
276
+
277
+ # Add user and assistant messages
278
+ history.append((message, response))
279
+
280
+ return history, ""
281
+
282
+ # Decorate the chat handler with GPU if available
283
+ if SPACES_GPU_AVAILABLE and GPU is not None:
284
+ @GPU # This decorator is detected by HF Spaces for ZeroGPU allocation
285
+ def gpu_chat_handler(message, history):
286
+ """Handle chat messages with GPU support"""
287
+ return process_message(message, history)
288
+ chat_handler_fn = gpu_chat_handler
289
+ else:
290
+ chat_handler_fn = process_message
291
+
292
  if __name__ == "__main__":
293
+ demo, components = create_mobile_optimized_interface()
294
 
295
+ # Wire up the submit handler
296
+ if 'send_btn' in components and 'message_input' in components and 'chatbot' in components:
297
+ # Connect the submit handler with the GPU-decorated function
298
+ components['send_btn'].click(
299
+ fn=chat_handler_fn,
300
+ inputs=[components['message_input'], components['chatbot']],
301
+ outputs=[components['chatbot'], components['message_input']]
 
 
 
 
 
 
 
 
 
 
 
302
  )
303
+
304
+ # Launch the app
305
+ demo.launch(
306
+ server_name="0.0.0.0",
307
+ server_port=7860,
308
+ share=False
309
+ )
310