diff --git a/gui/streamlit_chat_app.py b/gui/streamlit_chat_app.py index bca4b95..7eb5a65 100644 --- a/gui/streamlit_chat_app.py +++ b/gui/streamlit_chat_app.py @@ -1,4 +1,5 @@ import atexit +import Levenshtein import os import sys @@ -64,13 +65,16 @@ if 'input_field_key' not in st.session_state: if 'query_method' not in st.session_state: st.session_state['query_method'] = query +if 'search_query' not in st.session_state: + st.session_state['search_query'] = '' + # Initialize new conversation if 'current_conversation' not in st.session_state or st.session_state['current_conversation'] is None: st.session_state['current_conversation'] = {'user_inputs': [], 'generated_responses': []} input_placeholder = st.empty() user_input = input_placeholder.text_input( - 'You:', value=st.session_state['input_text'], key=f'input_text_{st.session_state["input_field_key"]}' + 'You:', value=st.session_state['input_text'], key=f'input_text_-1'#{st.session_state["input_field_key"]} ) submit_button = st.button("Submit") @@ -79,7 +83,7 @@ if (user_input and user_input != st.session_state['input_text']) or submit_butto escaped_output = output.encode('utf-8').decode('unicode-escape') - st.session_state.current_conversation['user_inputs'].append(user_input) + st.session_state['current_conversation']['user_inputs'].append(user_input) st.session_state.current_conversation['generated_responses'].append(escaped_output) save_conversations(st.session_state.conversations, st.session_state.current_conversation) st.session_state['input_text'] = '' @@ -98,20 +102,35 @@ if st.sidebar.button("New Conversation"): # Proxy st.session_state['proxy'] = st.sidebar.text_input("Proxy: ") -# Sidebar -st.sidebar.header("Conversation History") +# Searchbar +search_query = st.sidebar.text_input("Search Conversations:", value=st.session_state.get('search_query', ''), key='search') -for idx, conversation in enumerate(st.session_state.conversations): +if search_query: + filtered_conversations = [] + for conversation in st.session_state.conversations: + if search_query in conversation['user_inputs'][0]: + filtered_conversations.append(conversation) + + conversations = sorted(filtered_conversations, key=lambda c: Levenshtein.distance(search_query, c['user_inputs'][0])) + sidebar_header = f"Search Results ({len(conversations)})" +else: + conversations = st.session_state.conversations + sidebar_header = "Conversation History" + +# Sidebar +st.sidebar.header(sidebar_header) + +for idx, conversation in enumerate(conversations): if st.sidebar.button(f"Conversation {idx + 1}: {conversation['user_inputs'][0]}", key=f"sidebar_btn_{idx}"): st.session_state['selected_conversation'] = idx - st.session_state['current_conversation'] = st.session_state.conversations[idx] + st.session_state['current_conversation'] = conversation if st.session_state['selected_conversation'] is not None: - conversation_to_display = st.session_state.conversations[st.session_state['selected_conversation']] + conversation_to_display = conversations[st.session_state['selected_conversation']] else: conversation_to_display = st.session_state.current_conversation if conversation_to_display['generated_responses']: for i in range(len(conversation_to_display['generated_responses']) - 1, -1, -1): message(conversation_to_display["generated_responses"][i], key=f"display_generated_{i}") - message(conversation_to_display['user_inputs'][i], is_user=True, key=f"display_user_{i}") + message(conversation_to_display['user_inputs'][i], is_user=True, key=f"display_user_{i}") \ No newline at end of file