-
Notifications
You must be signed in to change notification settings - Fork 29
/
Copy pathsolar-r.py
122 lines (98 loc) · 4.39 KB
/
solar-r.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
# from https://docs.streamlit.io/develop/tutorials/llms/build-conversational-apps
import streamlit as st
from langchain_upstage import ChatUpstage
from openai import OpenAI
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.messages import AIMessage, HumanMessage
solar_mini = ChatUpstage(model="solar-mini")
deepseek_r = ChatUpstage(
model="deepseek-reasoner",
base_url="https://api.deepseek.com/v1",
api_key=st.secrets["DEEPSEEK_API_KEY"],
max_tokens=100,
)
deepseek_r = OpenAI(api_key=st.secrets["DEEPSEEK_API_KEY"], base_url="https://api.deepseek.com/v1")
st.set_page_config(page_title="Solar-Online-R", layout="wide")
st.title("Solar-Online-R")
st.caption("Deepseek-R enhanced Solar-mini: Combining Deepseek's reasoning with Solar's fast inference (Online Distillation)")
def get_reasoning(user_query, chat_history, model="deepseek-reasoner"):
# Convert chat history to OpenAI format
messages = [
{
"role": "system",
"content": """You are Solar, a smart chatbot by Upstage, loved by many people.
Be smart, cheerful, and fun. Give engaging answers and avoid inappropriate language.
reply in the same language of the user query.
You will receive input in the following format:
<reasoning>detailed analysis or reasoning about the query</reasoning>
<user_query>the actual user question</user_query>
Use the reasoning provided to give a more informed and thoughtful response to the user query.
Focus on incorporating insights from the reasoning while maintaining a natural, conversational tone.
Solar is now being connected with a human."""
}
]
# Add chat history
for message in chat_history:
role = "assistant" if isinstance(message, AIMessage) else "user"
messages.append({"role": role, "content": message.content})
# Add current query
messages.append({"role": "user", "content": user_query})
response = deepseek_r.chat.completions.create(
model=model,
messages=messages,
max_tokens=1
)
if response.choices[0].message.reasoning_content:
return response.choices[0].message.reasoning_content
else:
return response.choices[0].message.content
def get_response(user_query, chat_history, llm=solar_mini):
chat_with_history_prompt = ChatPromptTemplate.from_messages(
[
(
"system",
"""You are Solar, a smart chatbot by Upstage, loved by many people.
Be smart, cheerful, and fun. Give engaging answers and avoid inappropriate language.
reply in the same language of the user query.
Solar is now being connected with a human.""",
),
MessagesPlaceholder("chat_history"),
(
"human",
"{user_query}",
),
]
)
chain = chat_with_history_prompt | llm | StrOutputParser()
return chain.stream(
{
"chat_history": chat_history,
"user_query": user_query,
}
)
if "messages" not in st.session_state:
st.session_state.messages = []
for message in st.session_state.messages:
role = "AI" if isinstance(message, AIMessage) else "Human"
with st.chat_message(role):
if role == "Human" and "<reasoning>" in message.content and "<user_query>" in message.content:
reasoning = message.content.split("<reasoning>")[1].split("</reasoning>")[0].strip()
user_query = message.content.split("<user_query>")[1].split("</user_query>")[0].strip()
with st.expander("Show reasoning"):
st.markdown(reasoning)
st.markdown(user_query)
else:
st.markdown(message.content)
if prompt := st.chat_input("What is up?"):
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
with st.status("Reasoning..."):
reasoning = get_reasoning(prompt, st.session_state.messages)
st.write(reasoning)
prompt = f"""<reasoning>{reasoning}</reasoning>
<user_query>{prompt}</user_query>"""
response = st.write_stream(get_response(prompt, st.session_state.messages))
st.session_state.messages.append(HumanMessage(content=prompt))
st.session_state.messages.append(AIMessage(content=response))