Skip to content

Commit

Permalink
Merge pull request #412 from asiffarhankhan/main
Browse files Browse the repository at this point in the history
Add search based tools using wikipedia, serper, serp, brave, exa, duckduckgo
  • Loading branch information
MervinPraison authored Mar 11, 2025
2 parents 99fee4d + 0a8fbc8 commit 1338730
Show file tree
Hide file tree
Showing 6 changed files with 64 additions and 0 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
workspace.code-workspace
.vscode
crewai
.cache
Expand Down
9 changes: 9 additions & 0 deletions docs/tools/external/duckduckgo-search.mdx
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
```python
from praisonaiagents import Agent, PraisonAIAgents
from praisonaiagents.tools import duckduckgo

data_agent = Agent(instructions="Search and Read Research Papers on DNA Mutation", tools=[duckduckgo])
editor_agent = Agent(instructions="Write a scientifically researched outcome and findings about DNA Mutation")
agents = PraisonAIAgents(agents=[data_agent, editor_agent])
agents.start()
```
19 changes: 19 additions & 0 deletions docs/tools/external/exa-search.mdx
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
```python
from praisonaiagents import Agent, PraisonAIAgents
from exa_py import Exa
import os

exa = Exa(api_key=os.environ["EXA_API_KEY"])

def search_and_contents(query: str):
"""Search for webpages based on the query and retrieve their contents."""
# This combines two API endpoints: search and contents retrieval
return str(exa.search_and_contents(
query, use_autoprompt=False, num_results=5, text=True, highlights=True
))

data_agent = Agent(instructions="Find the latest jobs for Video Editor in New York at startups", tools=[search_and_contents])
editor_agent = Agent(instructions="Curate the available jobs at startups and their email for the candidate to apply based on his skills on Canva, Adobe Premiere Pro, and Adobe After Effects")
agents = PraisonAIAgents(agents=[data_agent, editor_agent], process='hierarchical')
agents.start()
```
17 changes: 17 additions & 0 deletions docs/tools/external/google-serper-search.mdx
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
```python
from praisonaiagents import Agent, PraisonAIAgents
from langchain_community.utilities import GoogleSerperAPIWrapper
import os
from dotenv import load_dotenv

load_dotenv()

os.environ['SERPER_API_KEY'] = os.getenv('SERPER_API_KEY')

search = GoogleSerperAPIWrapper()

data_agent = Agent(instructions="Suggest me top 5 most visited websites for Dosa Recipe", tools=[search])
editor_agent = Agent(instructions="List out the websites with their url and a short description")
agents = PraisonAIAgents(agents=[data_agent, editor_agent])
agents.start()
```
9 changes: 9 additions & 0 deletions docs/tools/external/serp-search.mdx
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
```python
from praisonaiagents import Agent, PraisonAIAgents
from langchain_community.utilities import SerpAPIWrapper

data_agent = Agent(instructions="Search about decline of recruitment across various industries with the rise of AI", tools=[SerpAPIWrapper])
editor_agent = Agent(instructions="Write a blog article pointing out the jobs most at rish due to the rise of AI")
agents = PraisonAIAgents(agents=[data_agent, editor_agent])
agents.start()
```
9 changes: 9 additions & 0 deletions docs/tools/external/wikipedia-search.mdx
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
```python
from praisonaiagents import Agent, PraisonAIAgents
from langchain_community.utilities import WikipediaAPIWrapper

data_agent = Agent(instructions="Gather all of Messi's record in LaLiga", tools=[WikipediaAPIWrapper])
summarise_agent = Agent(instructions="Summarize the data into a well structured format")
agents = PraisonAIAgents(agents=[data_agent, summarise_agent])
agents.start()
```

0 comments on commit 1338730

Please sign in to comment.