Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
|
| 2 |
from datasets import load_dataset
|
| 3 |
from transformers import AutoTokenizer
|
| 4 |
|
|
@@ -32,9 +32,9 @@ def summarize():
|
|
| 32 |
|
| 33 |
tokenized_billsum = billsum.map(preprocess_function, batched=True)
|
| 34 |
|
| 35 |
-
return tokenized_billsum
|
| 36 |
|
| 37 |
-
from fastapi import FastAPI
|
| 38 |
from datasets import load_dataset
|
| 39 |
from transformers import AutoTokenizer
|
| 40 |
|
|
@@ -63,4 +63,4 @@ def get_tokenized_data():
|
|
| 63 |
# Convert to list of dictionaries
|
| 64 |
json_serializable_output = tokenized_billsum.to_pandas().to_dict(orient="records")
|
| 65 |
|
| 66 |
-
return {"tokenized_data": json_serializable_output} # Ensure JSON format
|
|
|
|
| 1 |
+
from fastapi import FastAPI
|
| 2 |
from datasets import load_dataset
|
| 3 |
from transformers import AutoTokenizer
|
| 4 |
|
|
|
|
| 32 |
|
| 33 |
tokenized_billsum = billsum.map(preprocess_function, batched=True)
|
| 34 |
|
| 35 |
+
return tokenized_billsum
|
| 36 |
|
| 37 |
+
"""from fastapi import FastAPI
|
| 38 |
from datasets import load_dataset
|
| 39 |
from transformers import AutoTokenizer
|
| 40 |
|
|
|
|
| 63 |
# Convert to list of dictionaries
|
| 64 |
json_serializable_output = tokenized_billsum.to_pandas().to_dict(orient="records")
|
| 65 |
|
| 66 |
+
return {"tokenized_data": json_serializable_output} # Ensure JSON format"""
|