Skip to content

Commit 607aeba

Browse files
authored
fix(settings): resolve PydanticUserError in repo_agent/settings.py (#80)
- Update dependencies to latest versions - Switch to llama-index-llms-openai - Specify minimum Python version as 3.11 - Export to requirements.txt
1 parent e559bf1 commit 607aeba

File tree

6 files changed

+3123
-2727
lines changed

6 files changed

+3123
-2727
lines changed

pdm.lock

+1,657-1,450
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

+3-4
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ maintainers = [
66
{name = "Edwards Arno", email = "[email protected]"},
77
]
88
license = {text = "Apache-2.0"}
9-
requires-python = ">=3.10,<4.0"
9+
requires-python = ">=3.11,<4.0"
1010
dependencies = [
1111
"loguru>=0.7.2",
1212
"openai>=1.10.0",
@@ -21,16 +21,15 @@ dependencies = [
2121
"pydantic-settings>=2.2.1",
2222
"tomli>=2.0.1",
2323
"tomli-w>=1.0.0",
24-
"llama-index<0.10.0",
2524
"colorama>=0.4.6",
25+
"llama-index-llms-openai>=0.2.12",
2626
]
2727
name = "repoagent"
28-
version = "0.1.3"
28+
version = "0.1.4"
2929
description = "An LLM-Powered Framework for Repository-level Code Documentation Generation."
3030
readme = "README.md"
3131
classifiers = [
3232
"Programming Language :: Python :: 3",
33-
"Programming Language :: Python :: 3.10",
3433
"Programming Language :: Python :: 3.11",
3534
"Programming Language :: Python :: 3.12",
3635
"Topic :: Scientific/Engineering :: Artificial Intelligence"

repo_agent/chat_with_repo/prompt.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
1-
from llama_index.llms import OpenAI
1+
from llama_index.llms.openai import OpenAI
2+
from llama_index.core.llms.function_calling import FunctionCallingLLM
23

34
from repo_agent.chat_with_repo.json_handler import JsonFileProcessor
45
from repo_agent.log import logger
56

67

78
class TextAnalysisTool:
8-
def __init__(self, llm, db_path):
9+
def __init__(self, llm: FunctionCallingLLM, db_path):
910
self.jsonsearch = JsonFileProcessor(db_path)
1011
self.llm = llm
1112
self.db_path = db_path

repo_agent/chat_with_repo/rag.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import json
22

3-
from llama_index import PromptTemplate
4-
from llama_index.llms import OpenAI
3+
from llama_index.core import PromptTemplate
4+
from llama_index.llms.openai import OpenAI
55
from openai import OpenAI as AI
66

77
from repo_agent.chat_with_repo.json_handler import JsonFileProcessor

repo_agent/settings.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
SecretStr,
1111
field_serializer,
1212
field_validator,
13+
FieldSerializationInfo
1314
)
1415
from pydantic_settings import BaseSettings
1516

@@ -35,7 +36,7 @@ class ProjectSettings(BaseSettings):
3536
log_level: LogLevel = LogLevel.INFO
3637

3738
@field_serializer("ignore_list")
38-
def serialize_ignore_list(self, ignore_list: list[str] = []):
39+
def serialize_ignore_list(self, ignore_list: list[str], info: FieldSerializationInfo):
3940
if ignore_list == [""]:
4041
self.ignore_list = [] # If the ignore_list is empty, set it to an empty list
4142
return []
@@ -62,7 +63,7 @@ def set_log_level(cls, v: str) -> LogLevel:
6263
raise ValueError(f"Invalid log level: {v}")
6364

6465
@field_serializer("target_repo")
65-
def serialize_target_repo(self, target_repo: DirectoryPath):
66+
def serialize_target_repo(self, target_repo: DirectoryPath, info: FieldSerializationInfo):
6667
return str(target_repo)
6768

6869

0 commit comments

Comments
 (0)