LlamaIndex Provider

The LlamaIndex Provider transforms Composio tools into a format compatible with LlamaIndex’s function calling capabilities.

Setup

$pip install composio_llamaindex==0.8.0 llama-index

Usage

1import asyncio
2import dotenv
3from composio_llamaindex import LlamaIndexProvider
4from llama_index.core.agent.workflow import FunctionAgent
5from llama_index.llms.openai import OpenAI
6
7from composio import Composio
8
9# Load environment variables from .env
10dotenv.load_dotenv()
11
12# Setup client
13llm = OpenAI(model="gpt-5")
14composio = Composio(provider=LlamaIndexProvider())
15
16tools = composio.tools.get(
17 user_id="user@acme.com",
18 tools=["GITHUB_STAR_A_REPOSITORY_FOR_THE_AUTHENTICATED_USER"],
19)
20
21workflow = FunctionAgent(
22 tools=tools,
23 llm=llm,
24 system_prompt="You are an agent that performs github actions.",
25)
26
27
28async def main():
29 result = await workflow.run(
30 user_msg="Hello! I would like to star a repo composiohq/composio on GitHub"
31 )
32 print(result)
33
34
35if __name__ == "__main__":
36 asyncio.run(main())

Advanced Usage

Streaming Agent with Multiple Toolkits

TypeScript
1import { Composio } from '@composio/core';
2import { LlamaindexProvider } from '@composio/llamaindex';
3import { openai } from '@llamaindex/openai';
4import { agent, agentStreamEvent } from '@llamaindex/workflow';
5import 'dotenv/config';
6
7const composio = new Composio({
8 apiKey: process.env.COMPOSIO_API_KEY,
9 provider: new LlamaindexProvider(),
10});
11
12async function streamingExample() {
13 // Get tools from multiple toolkits with execution modifiers
14 const tools = await composio.tools.get(
15 'default',
16 {
17 toolkits: ['gmail', 'googlecalendar', 'slack'],
18 limit: 20,
19 },
20 {
21 beforeExecute: ({ toolSlug, params }) => {
22 console.log(`🔄 Executing ${toolSlug} with:`, params);
23 return params;
24 },
25 afterExecute: ({ toolSlug, result }) => {
26 console.log(`✅ ${toolSlug} completed:`, result);
27 return result;
28 },
29 }
30 );
31
32 // Create streaming agent
33 const assistantAgent = agent({
34 name: 'Personal Assistant',
35 description: 'A helpful personal assistant',
36 llm: openai({ model: 'gpt-4o-mini' }),
37 systemPrompt: 'You are a helpful personal assistant that can manage emails, calendar events, and slack messages.',
38 tools,
39 });
40
41 // Stream the response
42 const stream = await assistantAgent.runStream(
43 'Schedule a meeting for tomorrow at 2 PM and send a slack message about it'
44 );
45
46 for await (const event of stream) {
47 if (agentStreamEvent.include(event)) {
48 process.stdout.write(event.data.delta);
49 }
50 }
51}
52
53streamingExample().catch(console.error);