diff --git a/backend/retrieval_qa_chain.py b/backend/retrieval_qa_chain.py index 129ab62..30f7b27 100644 --- a/backend/retrieval_qa_chain.py +++ b/backend/retrieval_qa_chain.py @@ -23,7 +23,7 @@ def run_llm(query: str, chat_history: List[Dict[str, Any]] = []): ) chat = ChatOpenAI( temperature=0, - model_name="gpt-4" + model_name="gpt-3.5-turbo" ) qa = ConversationalRetrievalChain.from_llm( diff --git a/consts.py b/consts.py index b581db9..911f4bc 100644 --- a/consts.py +++ b/consts.py @@ -1 +1 @@ -INDEX_NAME="multiple-pdf-chatbot" \ No newline at end of file +INDEX_NAME="msfdocs" \ No newline at end of file diff --git a/main.py b/main.py index de0f9d1..590e584 100644 --- a/main.py +++ b/main.py @@ -15,7 +15,7 @@ def create_sources_string(source_urls: Set[str]) -> str: sources_string += f"{i+1}. {source}\n" return sources_string -st.header("GPT-4 Q&A over multiple pdf files 🤓 📚") +st.header("ChatGPT Q&A over MSF Clinical Guidelines & Essential Drug List") if ( "chat_answers_history" not in st.session_state and "user_prompt_history" not in st.session_state @@ -26,7 +26,7 @@ def create_sources_string(source_urls: Set[str]) -> str: st.session_state["chat_history"] = [] -prompt = st.text_input("Prompt", placeholder="Enter your message here...") or st.button( +prompt = st.text_area("Prompt", placeholder="Enter your message here...") or st.button( "Submit" ) @@ -49,8 +49,8 @@ def create_sources_string(source_urls: Set[str]) -> str: if st.session_state["chat_answers_history"]: for generated_response, user_query in zip( - st.session_state["chat_answers_history"], - st.session_state["user_prompt_history"], + reversed(st.session_state["chat_answers_history"]), + reversed(st.session_state["user_prompt_history"]), ): message( user_query,