VibecoderMcSwaggins commited on
Commit
6ea5a8b
Β·
1 Parent(s): b866967

fix(ui): simplify BYOK settings with auto-detection

Browse files

Removes api_provider dropdown and auto-detects provider from key prefix (sk- vs sk-ant-). Simplifies examples table and clarifies free tier messaging.

Fixes #52, Fixes #53

Files changed (1) hide show
  1. src/app.py +44 -59
src/app.py CHANGED
@@ -24,7 +24,6 @@ def configure_orchestrator(
24
  use_mock: bool = False,
25
  mode: str = "simple",
26
  user_api_key: str | None = None,
27
- api_provider: str = "openai",
28
  ) -> tuple[Any, str]:
29
  """
30
  Create an orchestrator instance.
@@ -32,8 +31,7 @@ def configure_orchestrator(
32
  Args:
33
  use_mock: If True, use MockJudgeHandler (no API key needed)
34
  mode: Orchestrator mode ("simple" or "advanced")
35
- user_api_key: Optional user-provided API key (BYOK)
36
- api_provider: API provider ("openai" or "anthropic")
37
 
38
  Returns:
39
  Tuple of (Orchestrator instance, backend_name)
@@ -60,34 +58,35 @@ def configure_orchestrator(
60
  backend_info = "Mock (Testing)"
61
 
62
  # 2. Paid API Key (User provided or Env)
63
- elif (
64
- user_api_key
65
- or (api_provider == "openai" and os.getenv("OPENAI_API_KEY"))
66
- or (api_provider == "anthropic" and os.getenv("ANTHROPIC_API_KEY"))
67
- ):
68
- model: AnthropicModel | OpenAIModel | None = None
69
- if user_api_key:
70
- # Validate key/provider match to prevent silent auth failures
71
- if api_provider == "openai" and user_api_key.startswith("sk-ant-"):
72
- raise ValueError("Anthropic key provided but OpenAI provider selected")
73
- is_openai_key = user_api_key.startswith("sk-") and not user_api_key.startswith(
74
- "sk-ant-"
75
- )
76
- if api_provider == "anthropic" and is_openai_key:
77
- raise ValueError("OpenAI key provided but Anthropic provider selected")
78
- if api_provider == "anthropic":
79
- anthropic_provider = AnthropicProvider(api_key=user_api_key)
80
- model = AnthropicModel(settings.anthropic_model, provider=anthropic_provider)
81
- elif api_provider == "openai":
82
- openai_provider = OpenAIProvider(api_key=user_api_key)
83
- model = OpenAIModel(settings.openai_model, provider=openai_provider)
84
- backend_info = f"Paid API ({api_provider.upper()})"
85
  else:
86
- backend_info = "Paid API (Env Config)"
87
-
 
88
  judge_handler = JudgeHandler(model=model)
89
 
90
- # 3. Free Tier (HuggingFace Inference)
 
 
 
 
 
 
 
 
 
91
  else:
92
  judge_handler = HFInferenceJudgeHandler()
93
  backend_info = "Free Tier (Llama 3.1 / Mistral)"
@@ -107,7 +106,6 @@ async def research_agent(
107
  history: list[dict[str, Any]],
108
  mode: str = "simple",
109
  api_key: str = "",
110
- api_provider: str = "openai",
111
  ) -> AsyncGenerator[str, None]:
112
  """
113
  Gradio chat function that runs the research agent.
@@ -116,8 +114,7 @@ async def research_agent(
116
  message: User's research question
117
  history: Chat history (Gradio format)
118
  mode: Orchestrator mode ("simple" or "advanced")
119
- api_key: Optional user-provided API key (BYOK - Bring Your Own Key)
120
- api_provider: API provider ("openai" or "anthropic")
121
 
122
  Yields:
123
  Markdown-formatted responses for streaming
@@ -132,24 +129,22 @@ async def research_agent(
132
  # Check available keys
133
  has_openai = bool(os.getenv("OPENAI_API_KEY"))
134
  has_anthropic = bool(os.getenv("ANTHROPIC_API_KEY"))
135
- has_user_key = bool(user_api_key)
136
- has_paid_key = has_openai or has_anthropic or has_user_key
 
 
 
137
 
138
  # Advanced mode requires OpenAI specifically (due to agent-framework binding)
139
- if mode == "advanced" and not (has_openai or (has_user_key and api_provider == "openai")):
140
  yield (
141
  "⚠️ **Warning**: Advanced mode currently requires OpenAI API key. "
142
- "Falling back to simple mode.\n\n"
143
  )
144
  mode = "simple"
145
 
146
- # Inform user about their key being used
147
- if has_user_key:
148
- yield (
149
- f"πŸ”‘ **Using your {api_provider.upper()} API key** - "
150
- "Your key is used only for this session and is never stored.\n\n"
151
- )
152
- elif not has_paid_key:
153
  # No paid keys - will use FREE HuggingFace Inference
154
  yield (
155
  "πŸ€— **Free Tier**: Using HuggingFace Inference (Llama 3.1 / Mistral) for AI analysis.\n"
@@ -166,7 +161,6 @@ async def research_agent(
166
  use_mock=False, # Never use mock in production - HF Inference is the free fallback
167
  mode=mode,
168
  user_api_key=user_api_key,
169
- api_provider=api_provider,
170
  )
171
 
172
  yield f"🧠 **Backend**: {backend_name}\n\n"
@@ -212,42 +206,33 @@ def create_demo() -> gr.ChatInterface:
212
  "What drugs improve female libido post-menopause?",
213
  "simple",
214
  "",
215
- "openai",
216
  ],
217
  [
218
  "Clinical trials for erectile dysfunction alternatives to PDE5 inhibitors?",
219
  "simple",
220
  "",
221
- "openai",
222
  ],
223
  [
224
  "Evidence for testosterone therapy in women with HSDD?",
225
  "simple",
226
  "",
227
- "openai",
228
  ],
229
  ],
230
- additional_inputs_accordion=gr.Accordion(label="βš™οΈ Settings", open=False),
 
 
231
  additional_inputs=[
232
  gr.Radio(
233
  choices=["simple", "advanced"],
234
  value="simple",
235
  label="Orchestrator Mode",
236
- info=(
237
- "Simple: Linear (Free Tier Friendly) | Advanced: Multi-Agent (Requires OpenAI)"
238
- ),
239
  ),
240
  gr.Textbox(
241
- label="πŸ”‘ API Key (Optional - BYOK)",
242
- placeholder="sk-... or sk-ant-...",
243
  type="password",
244
- info="Enter your own API key. Never stored.",
245
- ),
246
- gr.Radio(
247
- choices=["openai", "anthropic"],
248
- value="openai",
249
- label="API Provider",
250
- info="Select the provider for your API key",
251
  ),
252
  ],
253
  )
 
24
  use_mock: bool = False,
25
  mode: str = "simple",
26
  user_api_key: str | None = None,
 
27
  ) -> tuple[Any, str]:
28
  """
29
  Create an orchestrator instance.
 
31
  Args:
32
  use_mock: If True, use MockJudgeHandler (no API key needed)
33
  mode: Orchestrator mode ("simple" or "advanced")
34
+ user_api_key: Optional user-provided API key (BYOK) - auto-detects provider
 
35
 
36
  Returns:
37
  Tuple of (Orchestrator instance, backend_name)
 
58
  backend_info = "Mock (Testing)"
59
 
60
  # 2. Paid API Key (User provided or Env)
61
+ elif user_api_key and user_api_key.strip():
62
+ # Auto-detect provider from key prefix
63
+ model: AnthropicModel | OpenAIModel
64
+ if user_api_key.startswith("sk-ant-"):
65
+ # Anthropic key
66
+ anthropic_provider = AnthropicProvider(api_key=user_api_key)
67
+ model = AnthropicModel(settings.anthropic_model, provider=anthropic_provider)
68
+ backend_info = "Paid API (Anthropic)"
69
+ elif user_api_key.startswith("sk-"):
70
+ # OpenAI key
71
+ openai_provider = OpenAIProvider(api_key=user_api_key)
72
+ model = OpenAIModel(settings.openai_model, provider=openai_provider)
73
+ backend_info = "Paid API (OpenAI)"
 
 
 
 
 
 
 
 
 
74
  else:
75
+ raise ValueError(
76
+ "Invalid API key format. Expected sk-... (OpenAI) or sk-ant-... (Anthropic)"
77
+ )
78
  judge_handler = JudgeHandler(model=model)
79
 
80
+ # 3. Environment API Keys (fallback)
81
+ elif os.getenv("OPENAI_API_KEY"):
82
+ judge_handler = JudgeHandler(model=None) # Uses env key
83
+ backend_info = "Paid API (OpenAI from env)"
84
+
85
+ elif os.getenv("ANTHROPIC_API_KEY"):
86
+ judge_handler = JudgeHandler(model=None) # Uses env key
87
+ backend_info = "Paid API (Anthropic from env)"
88
+
89
+ # 4. Free Tier (HuggingFace Inference)
90
  else:
91
  judge_handler = HFInferenceJudgeHandler()
92
  backend_info = "Free Tier (Llama 3.1 / Mistral)"
 
106
  history: list[dict[str, Any]],
107
  mode: str = "simple",
108
  api_key: str = "",
 
109
  ) -> AsyncGenerator[str, None]:
110
  """
111
  Gradio chat function that runs the research agent.
 
114
  message: User's research question
115
  history: Chat history (Gradio format)
116
  mode: Orchestrator mode ("simple" or "advanced")
117
+ api_key: Optional user-provided API key (BYOK - auto-detects provider)
 
118
 
119
  Yields:
120
  Markdown-formatted responses for streaming
 
129
  # Check available keys
130
  has_openai = bool(os.getenv("OPENAI_API_KEY"))
131
  has_anthropic = bool(os.getenv("ANTHROPIC_API_KEY"))
132
+ # Check for OpenAI user key
133
+ is_openai_user_key = (
134
+ user_api_key and user_api_key.startswith("sk-") and not user_api_key.startswith("sk-ant-")
135
+ )
136
+ has_paid_key = has_openai or has_anthropic or bool(user_api_key)
137
 
138
  # Advanced mode requires OpenAI specifically (due to agent-framework binding)
139
+ if mode == "advanced" and not (has_openai or is_openai_user_key):
140
  yield (
141
  "⚠️ **Warning**: Advanced mode currently requires OpenAI API key. "
142
+ "Anthropic keys only work in Simple mode. Falling back to Simple.\n\n"
143
  )
144
  mode = "simple"
145
 
146
+ # Inform user about fallback if no keys
147
+ if not has_paid_key:
 
 
 
 
 
148
  # No paid keys - will use FREE HuggingFace Inference
149
  yield (
150
  "πŸ€— **Free Tier**: Using HuggingFace Inference (Llama 3.1 / Mistral) for AI analysis.\n"
 
161
  use_mock=False, # Never use mock in production - HF Inference is the free fallback
162
  mode=mode,
163
  user_api_key=user_api_key,
 
164
  )
165
 
166
  yield f"🧠 **Backend**: {backend_name}\n\n"
 
206
  "What drugs improve female libido post-menopause?",
207
  "simple",
208
  "",
 
209
  ],
210
  [
211
  "Clinical trials for erectile dysfunction alternatives to PDE5 inhibitors?",
212
  "simple",
213
  "",
 
214
  ],
215
  [
216
  "Evidence for testosterone therapy in women with HSDD?",
217
  "simple",
218
  "",
 
219
  ],
220
  ],
221
+ additional_inputs_accordion=gr.Accordion(
222
+ label="βš™οΈ Settings (Free tier works without API key)", open=False
223
+ ),
224
  additional_inputs=[
225
  gr.Radio(
226
  choices=["simple", "advanced"],
227
  value="simple",
228
  label="Orchestrator Mode",
229
+ info=("Simple: Works with any key or free tier | " "Advanced: Requires OpenAI key"),
 
 
230
  ),
231
  gr.Textbox(
232
+ label="πŸ”‘ API Key (Optional)",
233
+ placeholder="sk-... (OpenAI) or sk-ant-... (Anthropic)",
234
  type="password",
235
+ info="Leave empty for free tier. Auto-detects provider from key prefix.",
 
 
 
 
 
 
236
  ),
237
  ],
238
  )