Using LlamaIndex
from llama_index.core.agent import ReActAgent
from llama_index.llms.groq import Groq
from toolhouse import Toolhouse, Provider
import dotenv
dotenv.load_dotenv()
th = Toolhouse(provider=Provider.LLAMAINDEX)
# Here we use a model hosted on Groq, but you can use any LLMs
# supported by LlamaIndex
llm = Groq(model="llama-3.2-11b-vision-preview")
agent = ReActAgent.from_tools(th.get_tools(), llm=llm, verbose=True)
response = agent.chat(
"Search the internet for 3 medium-sized AI companies and for each one, "
"get the contents of their webpage. When done, give me a short executive "
"summary in bullet points."
)
print(str(response))Using local tools
Last updated