From a7289aa0bdab2122e6696eb31569e306beadf981 Mon Sep 17 00:00:00 2001 From: SISRSIS Date: Tue, 23 Sep 2025 18:23:58 +0330 Subject: [PATCH] write dockerfile --- .dockerignore | 2 ++ .gitignore | 3 +++ chatbot.py | 18 +++++++++--------- dockerfile | 27 +++++++++++++++++++++++++++ 4 files changed, 41 insertions(+), 9 deletions(-) create mode 100644 .dockerignore create mode 100644 .gitignore create mode 100644 dockerfile diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..59a3cc4 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,2 @@ +./qavanin-faiss +./llm-answer \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c7cd2cc --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +__pycache__/ +qavanin-faiss/faiss_index_qavanin_285k_metadata.json +qavanin-faiss/faiss_index_qavanin_285k.index diff --git a/chatbot.py b/chatbot.py index d3f407c..c5cf93c 100644 --- a/chatbot.py +++ b/chatbot.py @@ -28,8 +28,8 @@ chatbot = FastAPI() # ------------------- # مدل‌ها و مسیر داده # ------------------- -EMBED_MODEL = "sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2" -RERANKER_MODEL = "BAAI/bge-reranker-v2-m3" +EMBED_MODEL = "/home/sabr/MODLES/rag_chat/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2" +RERANKER_MODEL = "/home/sabr/MODLES/rag_chat/BAAI/bge-reranker-v2-m3" FAISS_INDEX_PATH = "./qavanin-faiss/faiss_index_qavanin_285k.index" FAISS_METADATA_PATH = "./qavanin-faiss/faiss_index_qavanin_285k_metadata.json" @@ -393,13 +393,13 @@ def run_chatbot(query:str, chat_id:str): 'used-refrences-in-answer' : used_refrences_in_answer, # list[str] 'llm-answer' : llm_answer, # str } - prev_chat_data = [] - with open('./llm-answer/chat-messages.json', mode='r', encoding='utf-8') as file: - prev_chat_data = json.load(file) - prev_chat_data.append(chat_obj) + # prev_chat_data = [] + # with open('./llm-answer/chat-messages.json', mode='r', encoding='utf-8') as file: + # prev_chat_data = json.load(file) + # prev_chat_data.append(chat_obj) - with open('./llm-answer/chat-messages.json', mode='w', encoding='utf-8') as output: - json.dump(prev_chat_data, output, ensure_ascii=False, indent=2) + # with open('./llm-answer/chat-messages.json', mode='w', encoding='utf-8') as output: + # json.dump(prev_chat_data, output, ensure_ascii=False, indent=2) return chat_obj @@ -414,7 +414,7 @@ def credit_refresh(): with open('./llm-answer/credit.txt','w') as file: file.write(str(remained_credit.json()['remaining_irt'])) - + return str(remained_credit.json()['remaining_irt']) # تعریف مدل داده‌ها برای درخواست‌های API class Query(BaseModel): query: str diff --git a/dockerfile b/dockerfile new file mode 100644 index 0000000..d717d16 --- /dev/null +++ b/dockerfile @@ -0,0 +1,27 @@ +FROM python:3.10.12 + +RUN pip install cleantext==1.1.4 +RUN pip install elasticsearch7==7.17.12 +RUN pip install faiss_cpu==1.9.0 +RUN pip install fastapi==0.117.1 +RUN pip install hazm==0.10.0 +RUN pip install langchain_openai==0.3.33 +RUN pip install numpy==1.21.5 +RUN pip install openai==1.108.1 +RUN pip install pandas==2.3.2 +RUN pip install pydantic==2.11.9 +RUN pip install scikit_learn==1.7.2 +RUN pip install sentence_transformers==2.5.1 +RUN pip install torch==2.4.0 +RUN pip install transformers==4.55.1 +#RUN pip install torch==2.1.2 + + +WORKDIR /src/app + +COPY . /src/app + +EXPOSE 80 + +CMD [ "uvicorn","chatbot:chatbot","--reload","--port","80" ] +