|
|
import os |
|
|
import json |
|
|
import time |
|
|
import requests |
|
|
from anthropic import Anthropic |
|
|
from openai import OpenAI |
|
|
import gradio as gr |
|
|
import pandas as pd |
|
|
from huggingface_hub import CommitScheduler |
|
|
from datetime import datetime, timedelta |
|
|
import uuid |
|
|
from user_agents import parse as parse_ua |
|
|
import schedule |
|
|
import threading |
|
|
from sentence_transformers import SentenceTransformer |
|
|
import numpy as np |
|
|
import faiss |
|
|
|
|
|
|
|
|
CHARGENODE_URL = "https://www.chargenode.eu" |
|
|
MAX_CHUNK_SIZE = 1024 |
|
|
RETRIEVAL_K = 5 |
|
|
|
|
|
|
|
|
IS_HUGGINGFACE = os.environ.get("SPACE_ID") is not None |
|
|
|
|
|
|
|
|
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY") |
|
|
if not OPENAI_API_KEY: |
|
|
raise ValueError("OPENAI_API_KEY saknas") |
|
|
client = OpenAI(api_key=OPENAI_API_KEY) |
|
|
|
|
|
|
|
|
ANTHROPIC_API_KEY = os.environ.get("ANTHROPIC_API_KEY") |
|
|
if not ANTHROPIC_API_KEY: |
|
|
raise ValueError("ANTHROPIC_API_KEY saknas") |
|
|
anthropic_client = Anthropic(api_key=ANTHROPIC_API_KEY) |
|
|
|
|
|
log_folder = "logs" |
|
|
os.makedirs(log_folder, exist_ok=True) |
|
|
log_file_path = os.path.join(log_folder, "conversation_log_v2.txt") |
|
|
|
|
|
|
|
|
if not os.path.exists(log_file_path): |
|
|
with open(log_file_path, "w", encoding="utf-8") as f: |
|
|
f.write("") |
|
|
print(f"Skapade tom loggfil: {log_file_path}") |
|
|
|
|
|
hf_token = os.environ.get("HF_TOKEN") |
|
|
if not hf_token: |
|
|
raise ValueError("HF_TOKEN saknas") |
|
|
|
|
|
|
|
|
scheduler = CommitScheduler( |
|
|
repo_id="ChargeNodeEurope/logfiles", |
|
|
repo_type="dataset", |
|
|
folder_path=log_folder, |
|
|
path_in_repo="logs_v2", |
|
|
every=300, |
|
|
token=hf_token |
|
|
) |
|
|
|
|
|
|
|
|
last_log = None |
|
|
|
|
|
|
|
|
embedder = None |
|
|
embeddings = None |
|
|
index = None |
|
|
chunks = [] |
|
|
chunk_sources = [] |
|
|
|
|
|
|
|
|
def safe_append_to_log(log_entry): |
|
|
"""Säker metod för att lägga till loggdata utan att förlora historisk information.""" |
|
|
try: |
|
|
|
|
|
with open(log_file_path, "a", encoding="utf-8") as log_file: |
|
|
log_json = json.dumps(log_entry) |
|
|
log_file.write(log_json + "\n") |
|
|
log_file.flush() |
|
|
|
|
|
print(f"Loggpost tillagd: {log_entry.get('timestamp', 'okänd tid')}") |
|
|
return True |
|
|
|
|
|
except Exception as e: |
|
|
print(f"Fel vid loggning: {e}") |
|
|
|
|
|
|
|
|
try: |
|
|
os.makedirs(os.path.dirname(log_file_path), exist_ok=True) |
|
|
|
|
|
|
|
|
with open(log_file_path, "a", encoding="utf-8") as log_file: |
|
|
log_json = json.dumps(log_entry) |
|
|
log_file.write(log_json + "\n") |
|
|
|
|
|
print("Loggpost tillagd efter återhämtning") |
|
|
return True |
|
|
|
|
|
except Exception as retry_error: |
|
|
print(f"Kritiskt fel vid loggning: {retry_error}") |
|
|
return False |
|
|
|
|
|
|
|
|
def load_local_files(): |
|
|
"""Laddar alla lokala filer och returnerar som en sammanhängande text.""" |
|
|
uploaded_text = "" |
|
|
allowed = [".txt", ".docx", ".pdf", ".csv", ".xls", ".xlsx"] |
|
|
excluded = ["requirements.txt", "app.py", "conversation_log.txt", "conversation_log_v2.txt", "secrets", "prompt.txt"] |
|
|
for file in os.listdir("."): |
|
|
if file.lower().endswith(tuple(allowed)) and file not in excluded: |
|
|
try: |
|
|
if file.endswith(".txt"): |
|
|
with open(file, "r", encoding="utf-8") as f: |
|
|
content = f.read() |
|
|
elif file.endswith(".docx"): |
|
|
from docx import Document |
|
|
content = "\n".join([p.text for p in Document(file).paragraphs]) |
|
|
elif file.endswith(".pdf"): |
|
|
import PyPDF2 |
|
|
with open(file, "rb") as f: |
|
|
reader = PyPDF2.PdfReader(f) |
|
|
content = "\n".join([p.extract_text() or "" for p in reader.pages]) |
|
|
elif file.endswith(".csv"): |
|
|
content = pd.read_csv(file).to_string() |
|
|
elif file.endswith((".xls", ".xlsx")): |
|
|
if file == "FAQ stadat.xlsx": |
|
|
df = pd.read_excel(file) |
|
|
rows = [] |
|
|
for index, row in df.iterrows(): |
|
|
rows.append(f"Fråga: {row['Fråga']}\nSvar: {row['Svar']}") |
|
|
content = "\n\n".join(rows) |
|
|
else: |
|
|
content = pd.read_excel(file).to_string() |
|
|
uploaded_text += f"\n\nFIL: {file}\n{content}" |
|
|
except Exception as e: |
|
|
print(f"Fel vid läsning av {file}: {str(e)}") |
|
|
return uploaded_text.strip() |
|
|
|
|
|
def load_prompt(): |
|
|
"""Läser in system-prompts från prompt.txt med bättre felhantering.""" |
|
|
try: |
|
|
with open("prompt.txt", "r", encoding="utf-8") as f: |
|
|
prompt_content = f.read().strip() |
|
|
if not prompt_content: |
|
|
print("Varning: prompt.txt är tom, använder standardprompt") |
|
|
return "Du är ChargeNode's AI-assistent. Svara på frågor om ChargeNode's produkter och tjänster baserat på den tillhandahållna informationen." |
|
|
return prompt_content |
|
|
except FileNotFoundError: |
|
|
print("Varning: prompt.txt hittades inte, använder standardprompt") |
|
|
return "Du är ChargeNode's AI-assistent. Svara på frågor om ChargeNode's produkter och tjänster baserat på den tillhandahållna informationen." |
|
|
except Exception as e: |
|
|
print(f"Fel vid inläsning av prompt.txt: {e}, använder standardprompt") |
|
|
return "Du är ChargeNode's AI-assistent. Svara på frågor om ChargeNode's produkter och tjänster baserat på den tillhandahållna informationen." |
|
|
|
|
|
|
|
|
def prepare_chunks(text_data): |
|
|
"""Delar upp texten i mindre segment för embedding och sökning.""" |
|
|
chunks, sources = [], [] |
|
|
for source, text in text_data.items(): |
|
|
paragraphs = [p for p in text.split("\n") if p.strip()] |
|
|
chunk = "" |
|
|
for para in paragraphs: |
|
|
if len(chunk) + len(para) + 1 <= MAX_CHUNK_SIZE: |
|
|
chunk += " " + para |
|
|
else: |
|
|
if chunk.strip(): |
|
|
chunks.append(chunk.strip()) |
|
|
sources.append(source) |
|
|
chunk = para |
|
|
if chunk.strip(): |
|
|
chunks.append(chunk.strip()) |
|
|
sources.append(source) |
|
|
return chunks, sources |
|
|
|
|
|
def initialize_embeddings(): |
|
|
"""Initierar SentenceTransformer och FAISS-index vid första anrop.""" |
|
|
global embedder, embeddings, index, chunks, chunk_sources |
|
|
|
|
|
if embedder is None: |
|
|
print("Initierar SentenceTransformer och FAISS-index...") |
|
|
|
|
|
print("Laddar textdata...") |
|
|
text_data = {"local_files": load_local_files()} |
|
|
print("Förbereder textsegment...") |
|
|
chunks, chunk_sources = prepare_chunks(text_data) |
|
|
print(f"{len(chunks)} segment laddade") |
|
|
|
|
|
print("Skapar embeddings...") |
|
|
embedder = SentenceTransformer('all-MiniLM-L6-v2') |
|
|
embeddings = embedder.encode(chunks, convert_to_numpy=True) |
|
|
embeddings /= np.linalg.norm(embeddings, axis=1, keepdims=True) |
|
|
index = faiss.IndexFlatIP(embeddings.shape[1]) |
|
|
index.add(embeddings) |
|
|
print("FAISS-index klart") |
|
|
|
|
|
def retrieve_context(query, k=RETRIEVAL_K): |
|
|
"""Hämtar relevant kontext för frågor.""" |
|
|
|
|
|
initialize_embeddings() |
|
|
|
|
|
query_embedding = embedder.encode([query], convert_to_numpy=True) |
|
|
query_embedding /= np.linalg.norm(query_embedding) |
|
|
D, I = index.search(query_embedding, k) |
|
|
retrieved, sources = [], set() |
|
|
for idx in I[0]: |
|
|
if idx < len(chunks): |
|
|
retrieved.append(chunks[idx]) |
|
|
sources.add(chunk_sources[idx]) |
|
|
return " ".join(retrieved), list(sources) |
|
|
|
|
|
|
|
|
prompt_template = load_prompt() |
|
|
|
|
|
def generate_answer(query): |
|
|
"""Genererar svar baserat på fråga och retrieval-baserad kontext med Claude Haiku.""" |
|
|
|
|
|
context, sources = retrieve_context(query) |
|
|
|
|
|
if not context.strip(): |
|
|
return "Jag hittar ingen relevant information i mina källor.\n\nDetta är ett AI genererat svar." |
|
|
|
|
|
|
|
|
system_prompt = prompt_template |
|
|
|
|
|
|
|
|
user_message = f"""Jag har en fråga om ChargeNode. |
|
|
|
|
|
Relevant kontext för frågan: |
|
|
{context} |
|
|
|
|
|
Min fråga är: {query}""" |
|
|
|
|
|
try: |
|
|
|
|
|
response = anthropic_client.messages.create( |
|
|
model="claude-3-haiku-20240307", |
|
|
max_tokens=500, |
|
|
temperature=0.0, |
|
|
system=system_prompt, |
|
|
messages=[ |
|
|
{"role": "user", "content": user_message} |
|
|
] |
|
|
) |
|
|
answer = response.content[0].text |
|
|
return answer + "\n\nAI-genererat. Otillräcklig hjälp? Kontakta support@chargenode.eu eller 010-2051055" |
|
|
except Exception as e: |
|
|
return f"Tekniskt fel: {str(e)}\n\nAI-genererat. Kontakta support@chargenode.eu eller 010-2051055" |
|
|
|
|
|
|
|
|
def send_to_slack(subject, content, color="#2a9d8f"): |
|
|
"""Basfunktion för att skicka meddelanden till Slack.""" |
|
|
webhook_url = os.environ.get("SLACK_WEBHOOK_URL") |
|
|
if not webhook_url: |
|
|
print("Slack webhook URL saknas") |
|
|
return False |
|
|
|
|
|
try: |
|
|
|
|
|
payload = { |
|
|
"blocks": [ |
|
|
{ |
|
|
"type": "header", |
|
|
"text": { |
|
|
"type": "plain_text", |
|
|
"text": subject |
|
|
} |
|
|
}, |
|
|
{ |
|
|
"type": "section", |
|
|
"text": { |
|
|
"type": "mrkdwn", |
|
|
"text": content |
|
|
} |
|
|
} |
|
|
] |
|
|
} |
|
|
|
|
|
response = requests.post( |
|
|
webhook_url, |
|
|
json=payload, |
|
|
headers={"Content-Type": "application/json"} |
|
|
) |
|
|
|
|
|
if response.status_code == 200: |
|
|
print(f"Slack-meddelande skickat: {subject}") |
|
|
return True |
|
|
else: |
|
|
print(f"Slack-anrop misslyckades: {response.status_code}, {response.text}") |
|
|
return False |
|
|
except Exception as e: |
|
|
print(f"Fel vid sändning till Slack: {type(e).__name__}: {e}") |
|
|
return False |
|
|
|
|
|
|
|
|
def vote(data: gr.LikeData): |
|
|
""" |
|
|
Hanterar feedback från Gradio's inbyggda like-funktion. |
|
|
data.liked är True om uppvote, annars False. |
|
|
data.value innehåller information om meddelandet. |
|
|
""" |
|
|
feedback_type = "up" if data.liked else "down" |
|
|
global last_log |
|
|
log_entry = { |
|
|
"timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), |
|
|
"feedback": feedback_type, |
|
|
"bot_reply": data.value if not isinstance(data.value, dict) else data.value.get("value") |
|
|
} |
|
|
|
|
|
if last_log: |
|
|
log_entry.update({ |
|
|
"session_id": last_log.get("session_id"), |
|
|
"user_message": last_log.get("user_message"), |
|
|
}) |
|
|
|
|
|
|
|
|
safe_append_to_log(log_entry) |
|
|
|
|
|
|
|
|
try: |
|
|
if feedback_type == "down": |
|
|
feedback_message = f""" |
|
|
*⚠️ Negativ feedback registrerad* |
|
|
|
|
|
*Fråga:* {last_log.get('user_message', 'Okänd fråga')} |
|
|
|
|
|
*Svar:* {log_entry.get('bot_reply', 'Okänt svar')[:300]}{'...' if len(log_entry.get('bot_reply', '')) > 300 else ''} |
|
|
""" |
|
|
|
|
|
threading.Thread( |
|
|
target=lambda: send_to_slack("Negativ feedback", feedback_message, "#ff0000"), |
|
|
daemon=True |
|
|
).start() |
|
|
except Exception as e: |
|
|
print(f"Kunde inte skicka feedback till Slack: {e}") |
|
|
|
|
|
return |
|
|
|
|
|
|
|
|
def read_logs(): |
|
|
"""Läs alla loggposter från loggfilen.""" |
|
|
logs = [] |
|
|
try: |
|
|
if os.path.exists(log_file_path): |
|
|
with open(log_file_path, "r", encoding="utf-8") as file: |
|
|
line_count = 0 |
|
|
for line in file: |
|
|
line_count += 1 |
|
|
try: |
|
|
log_entry = json.loads(line.strip()) |
|
|
logs.append(log_entry) |
|
|
except json.JSONDecodeError as e: |
|
|
print(f"Varning: Kunde inte tolka rad {line_count}: {e}") |
|
|
continue |
|
|
print(f"Läste {len(logs)} av {line_count} loggposter") |
|
|
else: |
|
|
print(f"Loggfil saknas: {log_file_path}") |
|
|
except Exception as e: |
|
|
print(f"Fel vid läsning av loggfil: {e}") |
|
|
return logs |
|
|
|
|
|
def get_latest_conversations(logs, limit=50): |
|
|
"""Hämta de senaste frågorna och svaren.""" |
|
|
conversations = [] |
|
|
for log in reversed(logs): |
|
|
if 'user_message' in log and 'bot_reply' in log: |
|
|
conversations.append({ |
|
|
'user_message': log['user_message'], |
|
|
'bot_reply': log['bot_reply'], |
|
|
'timestamp': log.get('timestamp', '') |
|
|
}) |
|
|
if len(conversations) >= limit: |
|
|
break |
|
|
return conversations |
|
|
|
|
|
def get_feedback_stats(logs): |
|
|
"""Sammanfatta feedback (tumme upp/ned).""" |
|
|
feedback_count = {"up": 0, "down": 0} |
|
|
negative_feedback_examples = [] |
|
|
|
|
|
for log in logs: |
|
|
if 'feedback' in log: |
|
|
feedback = log.get('feedback') |
|
|
if feedback in feedback_count: |
|
|
feedback_count[feedback] += 1 |
|
|
|
|
|
|
|
|
if feedback == "down" and 'user_message' in log and len(negative_feedback_examples) < 10: |
|
|
negative_feedback_examples.append({ |
|
|
'user_message': log.get('user_message', 'Okänd fråga'), |
|
|
'bot_reply': log.get('bot_reply', 'Okänt svar') |
|
|
}) |
|
|
|
|
|
return feedback_count, negative_feedback_examples |
|
|
|
|
|
def generate_monthly_stats(days=30): |
|
|
"""Genererar omfattande statistik över botanvändning för den senaste månaden.""" |
|
|
print(f"Genererar statistik för de senaste {days} dagarna...") |
|
|
|
|
|
|
|
|
logs = read_logs() |
|
|
|
|
|
if not logs: |
|
|
return {"error": "Inga loggar hittades för den angivna perioden"} |
|
|
|
|
|
|
|
|
now = datetime.now() |
|
|
cutoff_date = now - timedelta(days=days) |
|
|
filtered_logs = [] |
|
|
|
|
|
for log in logs: |
|
|
if 'timestamp' in log: |
|
|
try: |
|
|
log_date = datetime.strptime(log['timestamp'], "%Y-%m-%d %H:%M:%S") |
|
|
if log_date >= cutoff_date: |
|
|
filtered_logs.append(log) |
|
|
except: |
|
|
pass |
|
|
|
|
|
logs = filtered_logs |
|
|
|
|
|
|
|
|
total_conversations = sum(1 for log in logs if 'user_message' in log) |
|
|
unique_sessions = len(set(log.get('session_id', 'unknown') for log in logs if 'session_id' in log)) |
|
|
unique_users = len(set(log.get('user_id', 'unknown') for log in logs if 'user_id' in log)) |
|
|
|
|
|
|
|
|
feedback_logs = [log for log in logs if 'feedback' in log] |
|
|
positive_feedback = sum(1 for log in feedback_logs if log.get('feedback') == 'up') |
|
|
negative_feedback = sum(1 for log in feedback_logs if log.get('feedback') == 'down') |
|
|
feedback_ratio = (positive_feedback / len(feedback_logs) * 100) if feedback_logs else 0 |
|
|
|
|
|
|
|
|
response_times = [log.get('response_time', 0) for log in logs if 'response_time' in log] |
|
|
avg_response_time = sum(response_times) / len(response_times) if response_times else 0 |
|
|
|
|
|
|
|
|
platforms = {} |
|
|
browsers = {} |
|
|
operating_systems = {} |
|
|
for log in logs: |
|
|
if 'platform' in log: |
|
|
platforms[log['platform']] = platforms.get(log['platform'], 0) + 1 |
|
|
if 'browser' in log: |
|
|
browsers[log['browser']] = browsers.get(log['browser'], 0) + 1 |
|
|
if 'os' in log: |
|
|
operating_systems[log['os']] = operating_systems.get(log['os'], 0) + 1 |
|
|
|
|
|
|
|
|
report = { |
|
|
"period": f"Senaste {days} dagarna", |
|
|
"generated_at": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), |
|
|
"basic_stats": { |
|
|
"total_conversations": total_conversations, |
|
|
"unique_sessions": unique_sessions, |
|
|
"unique_users": unique_users, |
|
|
"messages_per_user": round(total_conversations / unique_users, 2) if unique_users else 0 |
|
|
}, |
|
|
"feedback": { |
|
|
"positive": positive_feedback, |
|
|
"negative": negative_feedback, |
|
|
"ratio_percent": round(feedback_ratio, 1) |
|
|
}, |
|
|
"performance": { |
|
|
"avg_response_time": round(avg_response_time, 2) |
|
|
}, |
|
|
"platform_distribution": platforms, |
|
|
"browser_distribution": browsers, |
|
|
"os_distribution": operating_systems |
|
|
} |
|
|
|
|
|
return report |
|
|
|
|
|
def simple_status_report(): |
|
|
"""Skickar en förenklad statusrapport till Slack.""" |
|
|
print("Genererar statusrapport för Slack...") |
|
|
|
|
|
try: |
|
|
|
|
|
stats = generate_monthly_stats(days=7) |
|
|
|
|
|
|
|
|
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S") |
|
|
subject = f"ChargeNode AI Bot - Status {now}" |
|
|
|
|
|
if 'error' in stats: |
|
|
content = f"*Fel vid generering av statistik:* {stats['error']}" |
|
|
return send_to_slack(subject, content, "#ff0000") |
|
|
|
|
|
|
|
|
basic = stats["basic_stats"] |
|
|
feedback = stats["feedback"] |
|
|
perf = stats["performance"] |
|
|
|
|
|
content = f""" |
|
|
*ChargeNode AI Bot - Statusrapport {now}* |
|
|
|
|
|
*Basstatistik* (senaste 7 dagarna) |
|
|
- Totalt antal konversationer: {basic['total_conversations']} |
|
|
- Unika sessioner: {basic['unique_sessions']} |
|
|
- Unika användare: {basic['unique_users']} |
|
|
- Genomsnittlig svarstid: {perf['avg_response_time']} sekunder |
|
|
|
|
|
*Feedback* |
|
|
- 👍 Tumme upp: {feedback['positive']} |
|
|
- 👎 Tumme ned: {feedback['negative']} |
|
|
- Nöjdhet: {feedback['ratio_percent']}% |
|
|
""" |
|
|
|
|
|
|
|
|
logs = read_logs() |
|
|
conversations = get_latest_conversations(logs, 3) |
|
|
|
|
|
if conversations: |
|
|
content += "\n*Senaste konversationer*\n" |
|
|
for conv in conversations: |
|
|
content += f""" |
|
|
> *Tid:* {conv['timestamp']} |
|
|
> *Fråga:* {conv['user_message'][:100]}{'...' if len(conv['user_message']) > 100 else ''} |
|
|
> *Svar:* {conv['bot_reply'][:100]}{'...' if len(conv['bot_reply']) > 100 else ''} |
|
|
""" |
|
|
|
|
|
|
|
|
return send_to_slack(subject, content, "#2a9d8f") |
|
|
|
|
|
except Exception as e: |
|
|
print(f"Fel vid generering av statusrapport: {e}") |
|
|
|
|
|
|
|
|
error_subject = f"ChargeNode AI Bot - Fel vid statusrapport" |
|
|
error_content = f"*Fel vid generering av statusrapport:* {str(e)}" |
|
|
return send_to_slack(error_subject, error_content, "#ff0000") |
|
|
|
|
|
def send_support_to_slack(områdeskod, uttagsnummer, email, chat_history): |
|
|
"""Skickar en supportförfrågan till Slack.""" |
|
|
try: |
|
|
|
|
|
chat_content = "" |
|
|
for msg in chat_history: |
|
|
if msg['role'] == 'user': |
|
|
chat_content += f">*Användare:* {msg['content']}\n\n" |
|
|
elif msg['role'] == 'assistant': |
|
|
chat_content += f">*Bot:* {msg['content'][:300]}{'...' if len(msg['content']) > 300 else ''}\n\n" |
|
|
|
|
|
|
|
|
subject = f"Support förfrågan - {datetime.now().strftime('%Y-%m-%d %H:%M')}" |
|
|
|
|
|
content = f""" |
|
|
*Användarinformation* |
|
|
- *Områdeskod:* {områdeskod or 'Ej angiven'} |
|
|
- *Uttagsnummer:* {uttagsnummer or 'Ej angiven'} |
|
|
- *Email:* {email} |
|
|
- *Tidpunkt:* {datetime.now().strftime('%Y-%m-%d %H:%M:%S')} |
|
|
|
|
|
*Chatthistorik:* |
|
|
{chat_content} |
|
|
""" |
|
|
|
|
|
|
|
|
return send_to_slack(subject, content, "#e76f51") |
|
|
except Exception as e: |
|
|
print(f"Fel vid sändning av support till Slack: {type(e).__name__}: {e}") |
|
|
return False |
|
|
|
|
|
|
|
|
def run_scheduler(): |
|
|
"""Kör schemaläggaren i en separat tråd med förenklad statusrapportering.""" |
|
|
|
|
|
schedule.every().day.at("08:00").do(simple_status_report) |
|
|
schedule.every().day.at("12:00").do(simple_status_report) |
|
|
schedule.every().day.at("17:00").do(simple_status_report) |
|
|
|
|
|
|
|
|
schedule.every().monday.at("09:00").do(lambda: send_to_slack( |
|
|
"Veckostatistik", |
|
|
f"*ChargeNode AI Bot - Veckostatistik*\n\n{json.dumps(generate_monthly_stats(7), indent=2)}", |
|
|
"#3498db" |
|
|
)) |
|
|
|
|
|
while True: |
|
|
schedule.run_pending() |
|
|
time.sleep(60) |
|
|
|
|
|
|
|
|
scheduler_thread = threading.Thread(target=run_scheduler, daemon=True) |
|
|
scheduler_thread.start() |
|
|
|
|
|
|
|
|
try: |
|
|
print("Skickar en inledande statusrapport för att verifiera Slack-integrationen...") |
|
|
|
|
|
except Exception as e: |
|
|
print(f"Information: Statusrapport kommer att skickas enligt schema: {e}") |
|
|
|
|
|
|
|
|
initial_chat = [{"role": "assistant", "content": "Detta är ChargeNode's AI bot. Hur kan jag hjälpa dig idag?"}] |
|
|
|
|
|
custom_css = """ |
|
|
body {background-color: #f7f7f7; font-family: Arial, sans-serif; margin: 0; padding: 0;} |
|
|
h1 {font-family: Helvetica, sans-serif; color: #2a9d8f; text-align: center; margin-bottom: 0.5em;} |
|
|
.gradio-container {max-width: 400px; margin: 0; padding: 10px; position: fixed; bottom: 20px; right: 20px; box-shadow: 0px 0px 10px rgba(0, 0, 0, 0.1); border-radius: 10px; background-color: #fff;} |
|
|
#chatbot_conversation { max-height: 300px; overflow-y: auto; } |
|
|
.gr-button {background-color: #2a9d8f; color: #fff; border: none; border-radius: 4px; padding: 8px 16px; margin: 5px;} |
|
|
.gr-button:hover {background-color: #264653;} |
|
|
.support-btn {background-color: #000000; color: #ffffff; margin-top: 5px; margin-bottom: 10px;} |
|
|
.support-btn:hover {background-color: #333333;} |
|
|
.flex-row {display: flex; flex-direction: row; gap: 5px;} |
|
|
.gr-form {padding: 10px; border: 1px solid #eee; border-radius: 4px; margin-bottom: 10px;} |
|
|
.chat-preview {max-height: 150px; overflow-y: auto; border: 1px solid #eee; padding: 8px; margin-top: 10px; font-size: 12px; background-color: #f9f9f9;} |
|
|
.success-message {font-size: 16px; font-weight: normal; margin-bottom: 15px;} |
|
|
/* Dölj Gradio-footer */ |
|
|
footer {display: none !important;} |
|
|
.footer {display: none !important;} |
|
|
.gr-footer {display: none !important;} |
|
|
.gradio-footer {display: none !important;} |
|
|
.gradio-container .footer {display: none !important;} |
|
|
.gradio-container .gr-footer {display: none !important;} |
|
|
""" |
|
|
|
|
|
with gr.Blocks(css=custom_css, title="ChargeNode Kundtjänst") as app: |
|
|
gr.Markdown("Ställ din fråga om ChargeNodes produkter och tjänster nedan. Om du inte gillar botten, så ring oss gärna på 010 – 205 10 55") |
|
|
|
|
|
|
|
|
with gr.Group(visible=True) as chat_interface: |
|
|
chatbot = gr.Chatbot(value=initial_chat, type="messages", elem_id="chatbot_conversation") |
|
|
chatbot.like(vote, None, None) |
|
|
|
|
|
with gr.Row(): |
|
|
msg = gr.Textbox(label="Meddelande", placeholder="Ange din fråga...") |
|
|
|
|
|
with gr.Row(): |
|
|
with gr.Column(scale=1): |
|
|
clear = gr.Button("Rensa") |
|
|
with gr.Column(scale=1): |
|
|
support_btn = gr.Button("Behöver du mer hjälp?", elem_classes="support-btn") |
|
|
|
|
|
|
|
|
with gr.Group(visible=False) as support_interface: |
|
|
gr.Markdown("### Vänligen fyll i din områdeskod, uttagsnummer och din email adress") |
|
|
|
|
|
with gr.Group(elem_classes="gr-form"): |
|
|
områdeskod = gr.Textbox(label="Områdeskod", placeholder="Områdeskod (valfritt)", info="Numeriskt värde") |
|
|
uttagsnummer = gr.Textbox(label="Uttagsnummer", placeholder="Uttagsnummer (valfritt)", info="Numeriskt värde") |
|
|
email = gr.Textbox(label="Din email adress", placeholder="din@email.se", info="Email adress krävs") |
|
|
|
|
|
gr.Markdown("### Chat som skickas till support:") |
|
|
chat_preview = gr.Markdown(elem_classes="chat-preview") |
|
|
|
|
|
with gr.Row(): |
|
|
back_btn = gr.Button("Tillbaka") |
|
|
send_support_btn = gr.Button("Skicka") |
|
|
|
|
|
|
|
|
with gr.Group(visible=False) as success_interface: |
|
|
gr.Markdown("Tack för att du kontaktar support@chargenode.eu. Vi återkommer inom kort", elem_classes="success-message") |
|
|
back_to_chat_btn = gr.Button("Tillbaka till chatten") |
|
|
|
|
|
def respond(message, chat_history, request: gr.Request): |
|
|
global last_log |
|
|
start = time.time() |
|
|
response = generate_answer(message) |
|
|
elapsed = round(time.time() - start, 2) |
|
|
|
|
|
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") |
|
|
session_id = str(uuid.uuid4()) |
|
|
|
|
|
|
|
|
if last_log and 'session_id' in last_log: |
|
|
session_id = last_log.get('session_id') |
|
|
|
|
|
user_id = request.client.host if request else "okänd" |
|
|
|
|
|
ua_str = request.headers.get("user-agent", "") |
|
|
ref = request.headers.get("referer", "") |
|
|
ip = request.headers.get("x-forwarded-for", user_id).split(",")[0] |
|
|
ua = parse_ua(ua_str) |
|
|
browser = f"{ua.browser.family} {ua.browser.version_string}" |
|
|
osys = f"{ua.os.family} {ua.os.version_string}" |
|
|
|
|
|
platform = "webb" |
|
|
if "chargenode.eu" in ref: |
|
|
platform = "chargenode.eu" |
|
|
elif "localhost" in ref: |
|
|
platform = "test" |
|
|
elif "app" in ref: |
|
|
platform = "app" |
|
|
|
|
|
log_data = { |
|
|
"timestamp": timestamp, |
|
|
"user_id": user_id, |
|
|
"session_id": session_id, |
|
|
"user_message": message, |
|
|
"bot_reply": response, |
|
|
"response_time": elapsed, |
|
|
"ip": ip, |
|
|
"browser": browser, |
|
|
"os": osys, |
|
|
"platform": platform |
|
|
} |
|
|
|
|
|
|
|
|
safe_append_to_log(log_data) |
|
|
last_log = log_data |
|
|
|
|
|
|
|
|
try: |
|
|
|
|
|
conversation_content = f""" |
|
|
*Ny konversation {timestamp}* |
|
|
|
|
|
*Användare:* {message} |
|
|
|
|
|
*Bot:* {response[:300]}{'...' if len(response) > 300 else ''} |
|
|
|
|
|
*Sessionsinfo:* {session_id[:8]}... | {browser} | {platform} |
|
|
""" |
|
|
|
|
|
threading.Thread( |
|
|
target=lambda: send_to_slack(f"Ny konversation", conversation_content), |
|
|
daemon=True |
|
|
).start() |
|
|
except Exception as e: |
|
|
print(f"Kunde inte skicka konversation till Slack: {e}") |
|
|
|
|
|
chat_history.append({"role": "user", "content": message}) |
|
|
chat_history.append({"role": "assistant", "content": response}) |
|
|
return "", chat_history |
|
|
|
|
|
def format_chat_preview(chat_history): |
|
|
if not chat_history: |
|
|
return "Ingen chatthistorik att visa." |
|
|
|
|
|
preview = "" |
|
|
for msg in chat_history: |
|
|
sender = "Användare" if msg["role"] == "user" else "Bot" |
|
|
content = msg["content"] |
|
|
if len(content) > 100: |
|
|
content = content[:100] + "..." |
|
|
preview += f"**{sender}:** {content}\n\n" |
|
|
|
|
|
return preview |
|
|
|
|
|
def show_support_form(chat_history): |
|
|
preview = format_chat_preview(chat_history) |
|
|
return { |
|
|
chat_interface: gr.Group(visible=False), |
|
|
support_interface: gr.Group(visible=True), |
|
|
success_interface: gr.Group(visible=False), |
|
|
chat_preview: preview |
|
|
} |
|
|
|
|
|
def back_to_chat(): |
|
|
return { |
|
|
chat_interface: gr.Group(visible=True), |
|
|
support_interface: gr.Group(visible=False), |
|
|
success_interface: gr.Group(visible=False) |
|
|
} |
|
|
|
|
|
def submit_support_form(områdeskod, uttagsnummer, email, chat_history): |
|
|
"""Hanterar formulärinskickningen med bättre felhantering.""" |
|
|
print(f"Support-förfrågan: områdeskod={områdeskod}, uttagsnummer={uttagsnummer}, email={email}") |
|
|
|
|
|
|
|
|
validation_errors = [] |
|
|
|
|
|
if områdeskod and not områdeskod.isdigit(): |
|
|
print(f"Validerar områdeskod: '{områdeskod}' (felaktig)") |
|
|
validation_errors.append("Områdeskod måste vara numerisk.") |
|
|
else: |
|
|
print(f"Validerar områdeskod: '{områdeskod}' (ok)") |
|
|
|
|
|
if uttagsnummer and not uttagsnummer.isdigit(): |
|
|
print(f"Validerar uttagsnummer: '{uttagsnummer}' (felaktig)") |
|
|
validation_errors.append("Uttagsnummer måste vara numerisk.") |
|
|
else: |
|
|
print(f"Validerar uttagsnummer: '{uttagsnummer}' (ok)") |
|
|
|
|
|
if not email: |
|
|
print("Validerar email: (saknas)") |
|
|
validation_errors.append("En giltig e-postadress krävs.") |
|
|
elif '@' not in email or '.' not in email.split('@')[1]: |
|
|
print(f"Validerar email: '{email}' (felaktigt format)") |
|
|
validation_errors.append("En giltig e-postadress krävs.") |
|
|
else: |
|
|
print(f"Validerar email: '{email}' (ok)") |
|
|
|
|
|
|
|
|
if validation_errors: |
|
|
print(f"Valideringsfel: {validation_errors}") |
|
|
return { |
|
|
chat_interface: gr.Group(visible=False), |
|
|
support_interface: gr.Group(visible=True), |
|
|
success_interface: gr.Group(visible=False), |
|
|
chat_preview: "\n".join(["**Fel:**"] + validation_errors) |
|
|
} |
|
|
|
|
|
|
|
|
try: |
|
|
print("Försöker skicka supportförfrågan till Slack...") |
|
|
|
|
|
|
|
|
chat_summary = [] |
|
|
for msg in chat_history: |
|
|
if 'role' in msg and 'content' in msg: |
|
|
chat_summary.append(f"{msg['role']}: {msg['content'][:30]}...") |
|
|
print(f"Chatthistorik att skicka: {chat_summary}") |
|
|
|
|
|
|
|
|
success = send_support_to_slack(områdeskod, uttagsnummer, email, chat_history) |
|
|
|
|
|
if success: |
|
|
print("Support-förfrågan skickad till Slack framgångsrikt") |
|
|
return { |
|
|
chat_interface: gr.Group(visible=False), |
|
|
support_interface: gr.Group(visible=False), |
|
|
success_interface: gr.Group(visible=True) |
|
|
} |
|
|
else: |
|
|
print("Support-förfrågan till Slack misslyckades") |
|
|
return { |
|
|
chat_interface: gr.Group(visible=False), |
|
|
support_interface: gr.Group(visible=True), |
|
|
success_interface: gr.Group(visible=False), |
|
|
chat_preview: "**Ett fel uppstod när meddelandet skulle skickas. Vänligen försök igen senare.**" |
|
|
} |
|
|
except Exception as e: |
|
|
print(f"Oväntat fel vid hantering av support-formulär: {e}") |
|
|
return { |
|
|
chat_interface: gr.Group(visible=False), |
|
|
support_interface: gr.Group(visible=True), |
|
|
success_interface: gr.Group(visible=False), |
|
|
chat_preview: f"**Ett fel uppstod: {str(e)}**" |
|
|
} |
|
|
|
|
|
msg.submit(respond, [msg, chatbot], [msg, chatbot]) |
|
|
clear.click(lambda: None, None, chatbot, queue=False) |
|
|
support_btn.click(show_support_form, chatbot, [chat_interface, support_interface, success_interface, chat_preview]) |
|
|
back_btn.click(back_to_chat, None, [chat_interface, support_interface, success_interface]) |
|
|
back_to_chat_btn.click(back_to_chat, None, [chat_interface, support_interface, success_interface]) |
|
|
send_support_btn.click( |
|
|
submit_support_form, |
|
|
[områdeskod, uttagsnummer, email, chatbot], |
|
|
[chat_interface, support_interface, success_interface, chat_preview] |
|
|
) |
|
|
|
|
|
|
|
|
print("Förbereder embedding-modell och index...") |
|
|
initialize_embeddings() |
|
|
print("Embedding-modell och index redo!") |
|
|
|
|
|
if __name__ == "__main__": |
|
|
app.launch(share=True) |