All files / frameworks openai.ts

22.03% Statements 13/59
16.66% Branches 7/42
27.27% Functions 3/11
22.8% Lines 13/57

Press n or j to go to the next uncovered block, b, p or k for the previous block.

1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 2051x     1x   1x 1x             1x 1x 1x                                   1x                                       4x 4x 955x         955x       955x                                                                                                                                                                                                                                                                                        
import { ComposioToolSet as BaseComposioToolSet } from "../sdk/base.toolset";
import { OpenAI } from "openai";
 
import { COMPOSIO_BASE_URL } from "../sdk/client/core/OpenAPI";
import { WorkspaceConfig } from "../env/config";
import { Workspace } from "../env";
import logger from "../utils/logger";
import { ActionsListResponseDTO } from "../sdk/client";
import { Stream } from "openai/streaming";
 
type Optional<T> = T | null;
type Sequence<T> = Array<T>;
 
export class OpenAIToolSet extends BaseComposioToolSet {
    static FRAMEWORK_NAME = "openai";
    static DEFAULT_ENTITY_ID = "default";
 
    /**
     * Composio toolset for OpenAI framework.
     *
     * Example:
     * ```typescript
     * 
     * ```
     */
    constructor(
      config: {
        apiKey?: Optional<string>,
        baseUrl?: Optional<string>,
        entityId?: string,
        workspaceConfig?: WorkspaceConfig
      }={}
    ) {
        super(
            config.apiKey || null,
            config.baseUrl || COMPOSIO_BASE_URL,
            OpenAIToolSet.FRAMEWORK_NAME,
            config.entityId || OpenAIToolSet.DEFAULT_ENTITY_ID,
            config.workspaceConfig || Workspace.Host()
        );
    }
 
    async getTools(
        filters: {
            actions?: Sequence<string>;
            apps?: Sequence<string>;
            tags?: Optional<Array<string>>;
            useCase?: Optional<string>;
            useCaseLimit?: Optional<number>;
            filterByAvailableApps?: Optional<boolean>;
        },
        entityId?: Optional<string>
    ): Promise<Sequence<OpenAI.ChatCompletionTool>> {
        const mainActions = await this.getToolsSchema(filters, entityId);
        return mainActions.map((action: NonNullable<ActionsListResponseDTO["items"]>[0]) => {
            const formattedSchema: OpenAI.FunctionDefinition = {
                name: action.name!,
                description: action.description!,
                parameters: action.parameters!,
            };
            const tool: OpenAI.ChatCompletionTool = {
                type: "function",
                function: formattedSchema
            }
            return tool;
        }) || [];
    }
 
 
    async executeToolCall(
        tool: OpenAI.ChatCompletionMessageToolCall,
        entityId: Optional<string> = null
    ): Promise<string> {
        return JSON.stringify(await this.executeAction(
            tool.function.name,
            JSON.parse(tool.function.arguments),
            entityId || this.entityId
        ));
    }
 
 
    async handleToolCall(
        chatCompletion: OpenAI.ChatCompletion,
        entityId: Optional<string> = null
    ): Promise<Sequence<string>> {
        const outputs = [];
        for (const message of chatCompletion.choices) {
            Iif (message.message.tool_calls) {
                outputs.push(await this.executeToolCall(message.message.tool_calls[0], entityId));
            }
        }
        return outputs;
    }
 
 
    async handleAssistantMessage(
        run: OpenAI.Beta.Threads.Run,
        entityId: Optional<string> = null
    ): Promise<Array<OpenAI.Beta.Threads.Runs.RunSubmitToolOutputsParams.ToolOutput>> {
        const tool_calls = run.required_action?.submit_tool_outputs?.tool_calls || [];
        const tool_outputs: Array<OpenAI.Beta.Threads.Runs.RunSubmitToolOutputsParams.ToolOutput> = await Promise.all(
            tool_calls.map(async (tool_call) => {
                logger.debug(`Executing tool call with ID: ${tool_call.function.name} and parameters: ${JSON.stringify(tool_call.function.arguments)}`);
                const tool_response = await this.executeToolCall(
                    tool_call as OpenAI.ChatCompletionMessageToolCall,
                    entityId || this.entityId
                );
                logger.debug(`Received tool response: ${JSON.stringify(tool_response)}`);
                return {
                    tool_call_id: tool_call.id,
                    output: JSON.stringify(tool_response),
                };
            })
        );
        return tool_outputs;
    }
 
    async *waitAndHandleAssistantStreamToolCalls(
        client: OpenAI,
        runStream: Stream<OpenAI.Beta.Assistants.AssistantStreamEvent>,
        thread: OpenAI.Beta.Threads.Thread,
        entityId: string | null = null
    ): AsyncGenerator<any, void, unknown> {
        let runId = null;
 
        // Start processing the runStream events
        for await (const event of runStream) {
            yield event; // Yield each event from the stream as it arrives
 
            Iif (event.event === 'thread.run.created') {
                const { id } = event.data;
                runId = id;
            }
 
            Iif(!runId) {
                continue;
            }
 
            // Handle the 'requires_action' event
            Iif (event.event === 'thread.run.requires_action') {
                const toolOutputs = await this.handleAssistantMessage(event.data, entityId);
 
                // Submit the tool outputs
                await client.beta.threads.runs.submitToolOutputs(thread.id, runId, {
                    tool_outputs: toolOutputs
                });
            }
 
            // Break if the run status becomes inactive
            Iif (['thread.run.completed', 'thread.run.failed', 'thread.run.cancelled', 'thread.run.expired'].includes(event.event)) {
                break;
            }
        }
 
        Iif(!runId) {
            throw new Error("No run ID found");
        }
 
        // Handle any final actions after the stream ends
        let finalRun = await client.beta.threads.runs.retrieve(thread.id, runId);
 
        while (["queued", "in_progress", "requires_action"].includes(finalRun.status)) {
            if (finalRun.status === "requires_action") {
                const toolOutputs = await this.handleAssistantMessage(finalRun, entityId);
 
                // Submit tool outputs
                finalRun = await client.beta.threads.runs.submitToolOutputs(thread.id, runId, {
                    tool_outputs: toolOutputs
                });
            } else {
                // Update the run status
                finalRun = await client.beta.threads.runs.retrieve(thread.id, runId);
                await new Promise(resolve => setTimeout(resolve, 500)); // Wait before rechecking
            }
        }
    }
    
    
    async waitAndHandleAssistantToolCalls(
        client: OpenAI,
        run: OpenAI.Beta.Threads.Run,
        thread: OpenAI.Beta.Threads.Thread,
        entityId: Optional<string> = null
    ): Promise<OpenAI.Beta.Threads.Run> {
        while (["queued", "in_progress", "requires_action"].includes(run.status)) {
            logger.debug(`Current run status: ${run.status}`);
            const tool_outputs = await this.handleAssistantMessage(run, entityId || this.entityId);
            if (run.status === "requires_action") {
                logger.debug(`Submitting tool outputs for run ID: ${run.id} in thread ID: ${thread.id}`);
                run = await client.beta.threads.runs.submitToolOutputs(
                    thread.id,
                    run.id,
                    {
                        tool_outputs: tool_outputs
                    }
                );
            } else {
                run = await client.beta.threads.runs.retrieve(thread.id, run.id);
                await new Promise(resolve => setTimeout(resolve, 500));
            }
        }
        return run;
    }
}