Skip to content

Commit 53a4a7f

Browse files
authored
fix due to removing singleton pattern
1 parent 78f3311 commit 53a4a7f

File tree

4 files changed

+11
-11
lines changed

4 files changed

+11
-11
lines changed

src/chat_kobold.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ def save_metadata_to_file(self, metadata_list):
8888

8989
def ask_kobold(self, query, selected_database):
9090
if self.query_vector_db is None or self.query_vector_db.selected_database != selected_database:
91-
self.query_vector_db = QueryVectorDB(selected_database)
91+
self.query_vector_db = QueryVectorDB.get_instance(selected_database)
9292

9393
contexts, metadata_list = self.query_vector_db.search(query)
9494

src/chat_lm_studio.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -98,31 +98,31 @@ def save_metadata_to_file(self, metadata_list):
9898

9999
def ask_local_chatgpt(self, query, selected_database):
100100
if self.query_vector_db is None or self.query_vector_db.selected_database != selected_database:
101-
self.query_vector_db = QueryVectorDB(selected_database)
102-
101+
self.query_vector_db = QueryVectorDB.get_instance(selected_database)
102+
103103
contexts, metadata_list = self.query_vector_db.search(query)
104-
104+
105105
self.save_metadata_to_file(metadata_list)
106-
106+
107107
if not contexts:
108108
self.signals.error_signal.emit("No relevant contexts found.")
109109
self.signals.finished_signal.emit()
110110
return
111-
111+
112112
augmented_query = f"{rag_string}\n\n---\n\n" + "\n\n---\n\n".join(contexts) + f"\n\n-----\n\n{query}"
113113

114114
full_response = ""
115115
response_generator = self.connect_to_local_chatgpt(augmented_query)
116116
for response_chunk in response_generator:
117117
self.signals.response_signal.emit(response_chunk)
118118
full_response += response_chunk
119-
119+
120120
with open('chat_history.txt', 'w', encoding='utf-8') as f:
121121
normalized_response = normalize_chat_text(full_response)
122122
f.write(normalized_response)
123-
123+
124124
self.signals.response_signal.emit("\n")
125-
125+
126126
citations = self.handle_response_and_cleanup(full_response, metadata_list)
127127
self.signals.citations_signal.emit(citations)
128128
self.signals.finished_signal.emit()

src/chat_local_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,7 @@ def _local_model_process(conn, model_name): # child process for local model's ge
171171
if message.type == MessageType.QUESTION:
172172
user_question, _, selected_database = message.payload
173173
if query_vector_db is None or current_database != selected_database:
174-
query_vector_db = QueryVectorDB(selected_database)
174+
query_vector_db = QueryVectorDB.get_instance(selected_database)
175175
current_database = selected_database
176176
contexts, metadata_list = query_vector_db.search(user_question)
177177
if not contexts:

src/chat_openai.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ def save_metadata_to_file(self, metadata_list):
112112

113113
def ask_chatgpt(self, query, selected_database):
114114
if self.query_vector_db is None or self.query_vector_db.selected_database != selected_database:
115-
self.query_vector_db = QueryVectorDB(selected_database)
115+
self.query_vector_db = QueryVectorDB.get_instance(selected_database)
116116

117117
contexts, metadata_list = self.query_vector_db.search(query)
118118

0 commit comments

Comments
 (0)