Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,8 +1,6 @@
|
|
| 1 |
# https://chatgpt.com/c/692f03df-88e8-8326-9e22-230b32dd2194
|
| 2 |
|
| 3 |
import gradio as gr
|
| 4 |
-
from openai import OpenAI
|
| 5 |
-
import os
|
| 6 |
|
| 7 |
# -----------------------------
|
| 8 |
# Compliance configuration
|
|
@@ -55,13 +53,11 @@ def analyze_compliance(api_key: str, free_text: str, uploaded_file) -> str:
|
|
| 55 |
text_to_check = ""
|
| 56 |
if uploaded_file is not None:
|
| 57 |
try:
|
| 58 |
-
# Gradio gives a file object or dict depending on version, handle both
|
| 59 |
file_path = getattr(uploaded_file, "name", None)
|
| 60 |
if file_path:
|
| 61 |
with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
|
| 62 |
text_to_check = f.read()
|
| 63 |
else:
|
| 64 |
-
# Fallback: if it's bytes-like
|
| 65 |
if hasattr(uploaded_file, "read"):
|
| 66 |
raw_bytes = uploaded_file.read()
|
| 67 |
if isinstance(raw_bytes, bytes):
|
|
@@ -109,21 +105,49 @@ def analyze_compliance(api_key: str, free_text: str, uploaded_file) -> str:
|
|
| 109 |
"## Fairness\n"
|
| 110 |
)
|
| 111 |
|
|
|
|
|
|
|
|
|
|
| 112 |
try:
|
| 113 |
-
#
|
| 114 |
-
|
| 115 |
-
|
| 116 |
-
|
| 117 |
-
|
| 118 |
-
|
| 119 |
-
|
| 120 |
-
|
| 121 |
-
|
| 122 |
-
|
| 123 |
-
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 127 |
|
| 128 |
except Exception as e:
|
| 129 |
return f"⚠️ **OpenAI API Error:** {e}"
|
|
|
|
| 1 |
# https://chatgpt.com/c/692f03df-88e8-8326-9e22-230b32dd2194
|
| 2 |
|
| 3 |
import gradio as gr
|
|
|
|
|
|
|
| 4 |
|
| 5 |
# -----------------------------
|
| 6 |
# Compliance configuration
|
|
|
|
| 53 |
text_to_check = ""
|
| 54 |
if uploaded_file is not None:
|
| 55 |
try:
|
|
|
|
| 56 |
file_path = getattr(uploaded_file, "name", None)
|
| 57 |
if file_path:
|
| 58 |
with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
|
| 59 |
text_to_check = f.read()
|
| 60 |
else:
|
|
|
|
| 61 |
if hasattr(uploaded_file, "read"):
|
| 62 |
raw_bytes = uploaded_file.read()
|
| 63 |
if isinstance(raw_bytes, bytes):
|
|
|
|
| 105 |
"## Fairness\n"
|
| 106 |
)
|
| 107 |
|
| 108 |
+
# -----------------------------
|
| 109 |
+
# OpenAI call with robust import handling
|
| 110 |
+
# -----------------------------
|
| 111 |
try:
|
| 112 |
+
# Try new SDK style first (openai>=1.x)
|
| 113 |
+
try:
|
| 114 |
+
from openai import OpenAI # type: ignore
|
| 115 |
+
client = OpenAI(api_key=api_key)
|
| 116 |
+
|
| 117 |
+
response = client.chat.completions.create(
|
| 118 |
+
model="gpt-4.1-mini", # change to "gpt-4.1" if desired
|
| 119 |
+
messages=[
|
| 120 |
+
{"role": "system", "content": system_message},
|
| 121 |
+
{"role": "user", "content": user_message},
|
| 122 |
+
],
|
| 123 |
+
temperature=0.1,
|
| 124 |
+
)
|
| 125 |
+
result = response.choices[0].message.content
|
| 126 |
+
return result
|
| 127 |
+
|
| 128 |
+
except ModuleNotFoundError:
|
| 129 |
+
# Fallback: older SDK style `import openai`
|
| 130 |
+
try:
|
| 131 |
+
import openai # type: ignore
|
| 132 |
+
|
| 133 |
+
openai.api_key = api_key
|
| 134 |
+
response = openai.ChatCompletion.create(
|
| 135 |
+
model="gpt-4.1-mini",
|
| 136 |
+
messages=[
|
| 137 |
+
{"role": "system", "content": system_message},
|
| 138 |
+
{"role": "user", "content": user_message},
|
| 139 |
+
],
|
| 140 |
+
temperature=0.1,
|
| 141 |
+
)
|
| 142 |
+
result = response["choices"][0]["message"]["content"]
|
| 143 |
+
return result
|
| 144 |
+
except ModuleNotFoundError:
|
| 145 |
+
return (
|
| 146 |
+
"⚠️ **OpenAI library not installed.**\n\n"
|
| 147 |
+
"Please add `openai` to your `requirements.txt`:\n\n"
|
| 148 |
+
"```text\nopenai>=1.0.0\n```\n"
|
| 149 |
+
"and rebuild/restart the app."
|
| 150 |
+
)
|
| 151 |
|
| 152 |
except Exception as e:
|
| 153 |
return f"⚠️ **OpenAI API Error:** {e}"
|