Skip to content

Commit

Permalink
Merge pull request yoheinakajima#149 from MalikMAlna/fix/lower-temper…
Browse files Browse the repository at this point in the history
…ature

Lowering temperature to reduce AI hallucinations
  • Loading branch information
francip authored Apr 14, 2023
2 parents a46d843 + 53e9aaf commit fb6370a
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 6 deletions.
1 change: 1 addition & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
# API CONFIG
OPENAI_API_KEY=
OPENAI_API_MODEL=gpt-3.5-turbo # alternatively, gpt-4, text-davinci-003, etc
OPENAI_TEMPERATURE=0.0
PINECONE_API_KEY=
PINECONE_ENVIRONMENT=us-east1-gcp

Expand Down
15 changes: 9 additions & 6 deletions babyagi.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,9 @@
OBJECTIVE = os.getenv("OBJECTIVE", "")
INITIAL_TASK = os.getenv("INITIAL_TASK", os.getenv("FIRST_TASK", ""))

# Model configuration
OPENAI_TEMPERATURE = float(os.getenv("OPENAI_TEMPERATURE", 0.0))


# Extensions support begin

Expand Down Expand Up @@ -134,7 +137,7 @@ def get_ada_embedding(text):
def openai_call(
prompt: str,
model: str = OPENAI_API_MODEL,
temperature: float = 0.5,
temperature: float = OPENAI_TEMPERATURE,
max_tokens: int = 100,
):
while True:
Expand Down Expand Up @@ -226,30 +229,30 @@ def execution_agent(objective: str, task: str) -> str:
"""

context = context_agent(query=objective, n=5)
context = context_agent(query=objective, top_results_num=5)
# print("\n*******RELEVANT CONTEXT******\n")
# print(context)
prompt = f"""
You are an AI who performs one task based on the following objective: {objective}\n.
Take into account these previously completed tasks: {context}\n.
Your task: {task}\nResponse:"""
return openai_call(prompt, temperature=0.7, max_tokens=2000)
return openai_call(prompt, max_tokens=2000)


def context_agent(query: str, n: int):
def context_agent(query: str, top_results_num: int):
"""
Retrieves context for a given query from an index of tasks.
Args:
query (str): The query or objective for retrieving context.
n (int): The number of top results to retrieve.
top_results_num (int): The number of top results to retrieve.
Returns:
list: A list of tasks as context for the given query, sorted by relevance.
"""
query_embedding = get_ada_embedding(query)
results = index.query(query_embedding, top_k=n, include_metadata=True, namespace=OBJECTIVE)
results = index.query(query_embedding, top_k=top_results_num, include_metadata=True, namespace=OBJECTIVE)
# print("***** RESULTS *****")
# print(results)
sorted_results = sorted(results.matches, key=lambda x: x.score, reverse=True)
Expand Down

0 comments on commit fb6370a

Please sign in to comment.