diff --git a/dbgpt/rag/index/base.py b/dbgpt/rag/index/base.py index c1fdccf17..3689db208 100644 --- a/dbgpt/rag/index/base.py +++ b/dbgpt/rag/index/base.py @@ -184,7 +184,6 @@ async def aload_document_with_limit( max_threads, ) - @abstractmethod def similar_search( self, text: str, topk: int, filters: Optional[MetadataFilters] = None ) -> List[Chunk]: @@ -197,6 +196,7 @@ def similar_search( Return: List[Chunk]: The similar documents. """ + return self.similar_search_with_scores(text, topk, 1.0, filters) async def asimilar_search( self, diff --git a/dbgpt/storage/knowledge_graph/community_summary.py b/dbgpt/storage/knowledge_graph/community_summary.py index cbcacf023..904b0beba 100644 --- a/dbgpt/storage/knowledge_graph/community_summary.py +++ b/dbgpt/storage/knowledge_graph/community_summary.py @@ -268,20 +268,6 @@ def _load_chunks( return doc_chunk, chunks - def similar_search( - self, text: str, topk: int, filters: Optional[MetadataFilters] = None - ) -> List[Chunk]: - """Similar search in index database. - - Args: - text(str): The query text. - topk(int): The number of similar documents to return. - filters(Optional[MetadataFilters]): metadata filters. - Return: - List[Chunk]: The similar documents. - """ - return [] - async def asimilar_search_with_scores( self, text, diff --git a/examples/rag/graph_rag_example.py b/examples/rag/graph_rag_example.py index 825a2f20e..40bbff5e1 100644 --- a/examples/rag/graph_rag_example.py +++ b/examples/rag/graph_rag_example.py @@ -20,11 +20,9 @@ ) """GraphRAG example. - pre-requirements: - * Set LLM config (url/sk) in `.env`. - * Install pytest utils: `pip install pytest pytest-asyncio` - * Config TuGraph following the format below. ``` + # Set LLM config (url/sk) in `.env`. + # Install pytest utils: `pip install pytest pytest-asyncio` GRAPH_STORE_TYPE=TuGraph TUGRAPH_HOST=127.0.0.1 TUGRAPH_PORT=7687 @@ -88,8 +86,7 @@ def __create_community_kg_connector(): async def ask_chunk(chunk: Chunk, question) -> str: rag_template = ( - "Based on the following [Context] {context}, " - "answer [Question] {question}." + "Based on the following [Context] {context}, " "answer [Question] {question}." ) template = HumanPromptTemplate.from_template(rag_template) messages = template.format_messages(context=chunk.content, question=question)