Skip to content

Commit 2ae26bf

Browse files
authoredMay 1, 2023
Merge pull request xtekky#332 from mache102/main
Add searchbar for conversations
2 parents 0817c93 + fe97af4 commit 2ae26bf

File tree

1 file changed

+26
-7
lines changed

1 file changed

+26
-7
lines changed
 

‎gui/streamlit_chat_app.py

+26-7
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import atexit
2+
import Levenshtein
23
import os
34
import sys
45

@@ -64,13 +65,16 @@ def exit_handler():
6465
if 'query_method' not in st.session_state:
6566
st.session_state['query_method'] = query
6667

68+
if 'search_query' not in st.session_state:
69+
st.session_state['search_query'] = ''
70+
6771
# Initialize new conversation
6872
if 'current_conversation' not in st.session_state or st.session_state['current_conversation'] is None:
6973
st.session_state['current_conversation'] = {'user_inputs': [], 'generated_responses': []}
7074

7175
input_placeholder = st.empty()
7276
user_input = input_placeholder.text_input(
73-
'You:', value=st.session_state['input_text'], key=f'input_text_{st.session_state["input_field_key"]}'
77+
'You:', value=st.session_state['input_text'], key=f'input_text_-1'#{st.session_state["input_field_key"]}
7478
)
7579
submit_button = st.button("Submit")
7680

@@ -79,7 +83,7 @@ def exit_handler():
7983

8084
escaped_output = output.encode('utf-8').decode('unicode-escape')
8185

82-
st.session_state.current_conversation['user_inputs'].append(user_input)
86+
st.session_state['current_conversation']['user_inputs'].append(user_input)
8387
st.session_state.current_conversation['generated_responses'].append(escaped_output)
8488
save_conversations(st.session_state.conversations, st.session_state.current_conversation)
8589
st.session_state['input_text'] = ''
@@ -98,20 +102,35 @@ def exit_handler():
98102
# Proxy
99103
st.session_state['proxy'] = st.sidebar.text_input("Proxy: ")
100104

105+
# Searchbar
106+
search_query = st.sidebar.text_input("Search Conversations:", value=st.session_state.get('search_query', ''), key='search')
107+
108+
if search_query:
109+
filtered_conversations = []
110+
for conversation in st.session_state.conversations:
111+
if search_query in conversation['user_inputs'][0]:
112+
filtered_conversations.append(conversation)
113+
114+
conversations = sorted(filtered_conversations, key=lambda c: Levenshtein.distance(search_query, c['user_inputs'][0]))
115+
sidebar_header = f"Search Results ({len(conversations)})"
116+
else:
117+
conversations = st.session_state.conversations
118+
sidebar_header = "Conversation History"
119+
101120
# Sidebar
102-
st.sidebar.header("Conversation History")
121+
st.sidebar.header(sidebar_header)
103122

104-
for idx, conversation in enumerate(st.session_state.conversations):
123+
for idx, conversation in enumerate(conversations):
105124
if st.sidebar.button(f"Conversation {idx + 1}: {conversation['user_inputs'][0]}", key=f"sidebar_btn_{idx}"):
106125
st.session_state['selected_conversation'] = idx
107-
st.session_state['current_conversation'] = st.session_state.conversations[idx]
126+
st.session_state['current_conversation'] = conversation
108127

109128
if st.session_state['selected_conversation'] is not None:
110-
conversation_to_display = st.session_state.conversations[st.session_state['selected_conversation']]
129+
conversation_to_display = conversations[st.session_state['selected_conversation']]
111130
else:
112131
conversation_to_display = st.session_state.current_conversation
113132

114133
if conversation_to_display['generated_responses']:
115134
for i in range(len(conversation_to_display['generated_responses']) - 1, -1, -1):
116135
message(conversation_to_display["generated_responses"][i], key=f"display_generated_{i}")
117-
message(conversation_to_display['user_inputs'][i], is_user=True, key=f"display_user_{i}")
136+
message(conversation_to_display['user_inputs'][i], is_user=True, key=f"display_user_{i}")

0 commit comments

Comments
 (0)
Please sign in to comment.