import json import chatbot_handler as chatbot_handler # import bale_qabot import os import numpy as np import torch import faiss from typing import List, Tuple from sentence_transformers import SentenceTransformer from transformers import AutoTokenizer, AutoModelForSequenceClassification from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.metrics.pairwise import cosine_similarity import datetime import re import random from fastapi.middleware.cors import CORSMiddleware from embedder_sbert_qavanin_285k import PersianVectorAnalyzer #from normalizer import cleaning from fastapi import FastAPI ,Header from pydantic import BaseModel # LLM Libs from openai import OpenAI from langchain_openai import ChatOpenAI # pip install -U langchain_openai import requests from FlagEmbedding import FlagReranker # deldar-reranker-v2 import aiofiles chatbot = FastAPI() origins = ["*"] chatbot.add_middleware( CORSMiddleware, allow_origins=origins, allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) print('#'*19) print('-Chatbot is Ready-') print('#'*19) # تعریف مدل داده‌ها برای درخواست‌های API class Query(BaseModel): query: str # مسیر API برای اجرا کردن run_chatbot @chatbot.get("/") async def simple(): return "ai rag caht qanon OK" @chatbot.get("/ping") async def ping(): return "ai rag caht qanon OK" @chatbot.post("/emergency_call") async def emergency_call(query: Query): print('emergency generate answer ...') chat_id = await chatbot_handler.create_chat_id() print('emergency chat_id ...', chat_id) answer = await chatbot_handler.ask_chatbot_avalai(query.query, chat_id) print('emergency answer ...', answer) await chatbot_handler.credit_refresh() print('credit updated') return {"answer": answer} @chatbot.post("/run_chat") async def run_chat(query: Query): print('regular generate answer ...') chat_id = await chatbot_handler.create_chat_id() answer = await chatbot_handler.ask_chatbot(query.query, chat_id) await chatbot_handler.credit_refresh() return {"answer": answer} # uvicorn src.app:app --reload if __name__ == "__main__": # query = 'در قانون حمایت از خانواده و جوانی جمعیت چه خدماتی در نظر گرفته شده است؟' while True: query = input('enter your qustion:') if query == '': print('لطفا متن سوال را وارد نمائید') continue start = (datetime.datetime.now()) # result = test_dataset() result = chatbot_handler.single_query(query) end_retrive = datetime.datetime.now() print('-'*40) print(f'retrive duration: {(end_retrive - start).total_seconds()}') prompt = f'برای پرسش "{query}" از میان مواد قانونی "{result}" .پاسخ مناسب و دقیق را استخراج کن. درصورتی که مطلبی مرتبط با پرسش در متن پیدا نشد، فقط پاسخ بده: "متاسفانه در منابع، پاسخی پیدا نشد!"' llm_answer = chatbot_handler.llm_request(prompt) print('-'*40) print(f'llm duration: {(datetime.datetime.now() - end_retrive).total_seconds()}') refrences = '' recognized_refrences = chatbot_handler.find_refrences(llm_answer) llm_answer = chatbot_handler.replace_refrences(llm_answer, recognized_refrences) with open('./llm-answer/result.txt', mode='a+', encoding='utf-8') as file: result_message = f'متن پرامپت: {query.strip()}\n\nپاسخ: {llm_answer} \n----------------------------------------------------------\n' file.write(result_message) with open('./llm-answer/passages.txt', mode='a+', encoding='utf-8') as file: result_message = f'متن پرامپت: {query.strip()}\n\مواد مشابه: {result} \n----------------------------------------------------------\n' file.write(result_message) print('----------------------------------------------------------') print(f'full duration: {(datetime.datetime.now() - start).total_seconds()}') print('----------------------------------------------------------') print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')