JatsTheAIGen's picture
Initial commit V1
66dbebd
raw
history blame
9.87 kB
# app.py - Mobile-First Implementation
import gradio as gr
import uuid
def create_mobile_optimized_interface():
with gr.Blocks(
title="AI Research Assistant MVP",
theme=gr.themes.Soft(
primary_hue="blue",
secondary_hue="gray",
font=("Inter", "system-ui", "sans-serif")
),
css="""
/* Mobile-first responsive CSS */
.mobile-container {
max-width: 100vw;
margin: 0 auto;
padding: 0 12px;
}
/* Touch-friendly button sizing */
.gradio-button {
min-height: 44px !important;
min-width: 44px !important;
font-size: 16px !important; /* Prevents zoom on iOS */
}
/* Mobile-optimized chat interface */
.chatbot-container {
height: 60vh !important;
max-height: 60vh !important;
overflow-y: auto !important;
-webkit-overflow-scrolling: touch !important;
}
/* Mobile input enhancements */
.textbox-input {
font-size: 16px !important; /* Prevents zoom */
min-height: 44px !important;
padding: 12px !important;
}
/* Responsive grid adjustments */
@media (max-width: 768px) {
.gradio-row {
flex-direction: column !important;
gap: 8px !important;
}
.gradio-column {
width: 100% !important;
}
.chatbot-container {
height: 50vh !important;
}
}
/* Dark mode support */
@media (prefers-color-scheme: dark) {
body {
background: #1a1a1a;
color: #ffffff;
}
}
/* Hide scrollbars but maintain functionality */
.chatbot-container::-webkit-scrollbar {
width: 4px;
}
/* Loading states */
.loading-indicator {
display: flex;
align-items: center;
justify-content: center;
padding: 20px;
}
/* Mobile menu enhancements */
.accordion-content {
max-height: 200px !important;
overflow-y: auto !important;
}
"""
) as demo:
# Session Management (Mobile-Optimized)
with gr.Column(elem_classes="mobile-container"):
gr.Markdown("""
# 🧠 Research Assistant
*Academic AI with transparent reasoning*
""")
# Session Header Bar (Mobile-Friendly)
with gr.Row():
session_info = gr.Textbox(
label="Session ID",
value=str(uuid.uuid4())[:8], # Shortened for mobile
max_lines=1,
show_label=False,
container=False,
scale=3
)
new_session_btn = gr.Button(
"🔄 New",
size="sm",
variant="secondary",
scale=1,
min_width=60
)
menu_toggle = gr.Button(
"⚙️",
size="sm",
variant="secondary",
scale=1,
min_width=60
)
# Main Chat Area (Mobile-Optimized)
with gr.Tabs() as main_tabs:
with gr.TabItem("💬 Chat", id="chat_tab"):
chatbot = gr.Chatbot(
label="",
show_label=False,
height="60vh",
elem_classes="chatbot-container",
render=False # Improve mobile performance
)
# Mobile Input Area
with gr.Row():
message_input = gr.Textbox(
placeholder="Ask me anything...",
show_label=False,
max_lines=3,
container=False,
scale=4,
autofocus=True
)
send_btn = gr.Button(
"↑ Send",
variant="primary",
scale=1,
min_width=80
)
# Technical Details Tab (Collapsible for Mobile)
with gr.TabItem("🔍 Details", id="details_tab"):
with gr.Accordion("Reasoning Chain", open=False):
reasoning_display = gr.JSON(
label="",
show_label=False
)
with gr.Accordion("Agent Performance", open=False):
performance_display = gr.JSON(
label="",
show_label=False
)
with gr.Accordion("Session Context", open=False):
context_display = gr.JSON(
label="",
show_label=False
)
# Mobile Bottom Navigation
with gr.Row(visible=False, elem_id="mobile_nav") as mobile_navigation:
chat_nav_btn = gr.Button("💬 Chat", variant="secondary", size="sm", min_width=0)
details_nav_btn = gr.Button("🔍 Details", variant="secondary", size="sm", min_width=0)
settings_nav_btn = gr.Button("⚙️ Settings", variant="secondary", size="sm", min_width=0)
# Settings Panel (Modal for Mobile)
with gr.Column(visible=False, elem_id="settings_panel") as settings:
with gr.Accordion("Display Options", open=True):
show_reasoning = gr.Checkbox(
label="Show reasoning chain",
value=True,
info="Display step-by-step reasoning"
)
show_agent_trace = gr.Checkbox(
label="Show agent execution trace",
value=False,
info="Display which agents processed your request"
)
compact_mode = gr.Checkbox(
label="Compact mode",
value=False,
info="Optimize for smaller screens"
)
with gr.Accordion("Performance Options", open=False):
response_speed = gr.Radio(
choices=["Fast", "Balanced", "Thorough"],
value="Balanced",
label="Response Speed Preference"
)
cache_enabled = gr.Checkbox(
label="Enable context caching",
value=True,
info="Faster responses using session memory"
)
gr.Button("Save Preferences", variant="primary")
return demo
def setup_event_handlers(demo, event_handlers):
"""Setup event handlers for the interface"""
# Find components by their labels or types
components = {}
for block in demo.blocks:
if hasattr(block, 'label'):
if block.label == 'Session ID':
components['session_info'] = block
elif hasattr(block, 'value') and 'session' in str(block.value).lower():
components['session_id'] = block
# Setup message submission handler
try:
# This is a simplified version - you'll need to adapt based on your actual component structure
if hasattr(demo, 'submit'):
demo.submit(
fn=event_handlers.handle_message_submit,
inputs=[components.get('message_input'), components.get('chatbot')],
outputs=[components.get('message_input'), components.get('chatbot')]
)
except Exception as e:
print(f"Could not setup event handlers: {e}")
# Fallback to basic functionality
return demo
def simple_message_handler(message, chat_history):
"""Simple mock handler for testing UI without full backend"""
if not message.strip():
return chat_history, ""
# Simple echo response for MVP testing
response = f"I received your message: {message}. This is a placeholder response. The full agent system is ready to integrate!"
new_history = chat_history + [[message, response]]
return new_history, ""
if __name__ == "__main__":
demo = create_mobile_optimized_interface()
# Connect the UI components with the mock handler
# (In production, these would use the full orchestrator)
try:
# This assumes the demo is accessible - in Gradio 4.x, components are scoped
# For now, the UI will render even without handlers
demo.launch(
server_name="0.0.0.0",
server_port=7860,
share=False
)
except Exception as e:
print(f"Note: UI launched but handlers not connected yet: {e}")
print("The framework is ready for integration with the orchestrator.")
print("\nNext step: Connect to backend agents in main.py")
demo.launch(
server_name="0.0.0.0",
server_port=7860,
share=False
)