bhardwaj08sarthak commited on
Commit
d92f6a2
·
verified ·
1 Parent(s): 7b650cf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -5
app.py CHANGED
@@ -56,7 +56,8 @@ from level_classifier_tool_2 import (
56
  build_phrase_index
57
  )
58
  from task_temp import rag_temp, rag_cls_temp, cls_temp, gen_temp
59
- from all_tools import classify_and_score, QuestionRetrieverTool
 
60
  from phrases import BLOOMS_PHRASES, DOK_PHRASES
61
  from pathlib import Path
62
  # ------------------------ Prebuild embeddings once ------------------------
@@ -146,6 +147,8 @@ emb = HuggingFaceEmbeddings(
146
  storage_context = StorageContext.from_defaults(persist_dir=str(persist_dir))
147
  index = load_index_from_storage(storage_context, embed_model=emb)
148
 
 
 
149
  # Datasets & GPU build code remains commented out...
150
  # @spaces.GPU(15)
151
  # def build_indexes_on_gpu(model="google/embeddinggemma-300m"):
@@ -156,7 +159,12 @@ index = load_index_from_storage(storage_context, embed_model=emb)
156
  # encode_kwargs={"normalize_embeddings": True})
157
  # idx = VectorStoreIndex.from_documents([Document(text=t) for t in texts], embed_model=emb)
158
  # return idx
159
-
 
 
 
 
 
160
  # ------------------------ Agent setup with timeout ------------------------
161
  def make_agent(hf_token: str, model_id: str, provider: str, timeout: int, temperature: float, max_tokens: int):
162
  client = InferenceClient(
@@ -198,8 +206,8 @@ def run_pipeline(
198
  temperature=float(temperature),
199
  max_tokens=int(max_tokens),
200
  )
201
-
202
- task = task_type.format(
203
  grade=grade,
204
  topic=topic,
205
  subject=subject,
@@ -251,7 +259,10 @@ with gr.Blocks() as demo:
251
  )
252
  subject = gr.Textbox(value="Math", label="Subject")
253
  task_type = gr.Dropdown(
254
- choices=["rag_temp", "rag_cls_temp", "cls_temp", "gen_temp"],
 
 
 
255
  label="task type"
256
  )
257
 
 
56
  build_phrase_index
57
  )
58
  from task_temp import rag_temp, rag_cls_temp, cls_temp, gen_temp
59
+ from all_tools import classify_and_score, QuestionRetrieverTool, set_classifier_state, set_retrieval_index
60
+
61
  from phrases import BLOOMS_PHRASES, DOK_PHRASES
62
  from pathlib import Path
63
  # ------------------------ Prebuild embeddings once ------------------------
 
147
  storage_context = StorageContext.from_defaults(persist_dir=str(persist_dir))
148
  index = load_index_from_storage(storage_context, embed_model=emb)
149
 
150
+ set_classifier_state(_backend, _BLOOM_INDEX, _DOK_INDEX)
151
+ set_retrieval_index(index)
152
  # Datasets & GPU build code remains commented out...
153
  # @spaces.GPU(15)
154
  # def build_indexes_on_gpu(model="google/embeddinggemma-300m"):
 
159
  # encode_kwargs={"normalize_embeddings": True})
160
  # idx = VectorStoreIndex.from_documents([Document(text=t) for t in texts], embed_model=emb)
161
  # return idx
162
+ TASK_TEMPLATES = {
163
+ "rag_temp": rag_temp,
164
+ "rag_cls_temp": rag_cls_temp,
165
+ "cls_temp": cls_temp,
166
+ "gen_temp": gen_temp,
167
+ }
168
  # ------------------------ Agent setup with timeout ------------------------
169
  def make_agent(hf_token: str, model_id: str, provider: str, timeout: int, temperature: float, max_tokens: int):
170
  client = InferenceClient(
 
206
  temperature=float(temperature),
207
  max_tokens=int(max_tokens),
208
  )
209
+ template = TASK_TEMPLATES[task_type]
210
+ task = template.format(
211
  grade=grade,
212
  topic=topic,
213
  subject=subject,
 
259
  )
260
  subject = gr.Textbox(value="Math", label="Subject")
261
  task_type = gr.Dropdown(
262
+ choices=[("RAG Template", "rag_temp"),
263
+ ("RAG+CLS Template", "rag_cls_temp"),
264
+ ("Classification Template", "cls_temp"),
265
+ ("Generation Template", "gen_temp")],
266
  label="task type"
267
  )
268