diff --git a/examples/README.md b/examples/README.md index 86e7bf95..5355878a 100644 --- a/examples/README.md +++ b/examples/README.md @@ -89,6 +89,13 @@ AZURE_OPENAI_ENDPOINT=... AZURE_OPENAI_API_KEY=... ``` +Optional proxy setting: For some endpoints, proxy is needed to access services. +You can set your http proxy environment variable that looks like the following: +``` +# For Proxy +HTTP_PROXY=http://host:port +``` + ## Step 4: Run the examples Examples can be found in the `examples` directory. diff --git a/package-lock.json b/package-lock.json index 67a9dbea..d4c43b36 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14,10 +14,14 @@ ], "dependencies": { "axios": "^1.4.0", + "https-proxy-agent": "^7.0.1", "typescript": "^5.1.3" }, "devDependencies": { "@types/node": "^20.3.3" + }, + "engines": { + "node": ">=18" } }, "examples/calendar": { @@ -223,6 +227,38 @@ "node": ">= 0.6" } }, + "node_modules/agent-base": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz", + "integrity": "sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg==", + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/agent-base/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/agent-base/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, "node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", @@ -821,6 +857,39 @@ "node": ">= 0.8" } }, + "node_modules/https-proxy-agent": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.1.tgz", + "integrity": "sha512-Eun8zV0kcYS1g19r78osiQLEFIRspRUDd9tIfBCTBPBeMieF/EsJNL8VI3xOIdYRDEkjQnqOYPsZ2DsWsVsFwQ==", + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/https-proxy-agent/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/https-proxy-agent/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, "node_modules/iconv-lite": { "version": "0.4.24", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", diff --git a/package.json b/package.json index 86e93712..80aa643c 100644 --- a/package.json +++ b/package.json @@ -37,6 +37,7 @@ ], "dependencies": { "axios": "^1.4.0", + "https-proxy-agent": "^7.0.1", "typescript": "^5.1.3" }, "devDependencies": { diff --git a/src/model.ts b/src/model.ts index dc0dafcb..80486211 100644 --- a/src/model.ts +++ b/src/model.ts @@ -1,6 +1,8 @@ import axios from "axios"; import { Result, success, error } from "./result"; +import { HttpsProxyAgent } from 'https-proxy-agent'; + /** * Represents a AI language model that can complete prompts. TypeChat uses an implementation of this * interface to communicate with an AI service that can translate natural language requests to JSON @@ -39,17 +41,19 @@ export interface TypeChatLanguageModel { * @returns An instance of `TypeChatLanguageModel`. */ export function createLanguageModel(env: Record): TypeChatLanguageModel { + const httpProxy = env.HTTP_PROXY ?? ""; + if (env.OPENAI_API_KEY) { const apiKey = env.OPENAI_API_KEY ?? missingEnvironmentVariable("OPENAI_API_KEY"); const model = env.OPENAI_MODEL ?? missingEnvironmentVariable("OPENAI_MODEL"); const endPoint = env.OPENAI_ENDPOINT ?? "https://api.openai.com/v1/chat/completions"; const org = env.OPENAI_ORGANIZATION ?? ""; - return createOpenAILanguageModel(apiKey, model, endPoint, org); + return createOpenAILanguageModel(apiKey, model, endPoint, org, httpProxy); } if (env.AZURE_OPENAI_API_KEY) { const apiKey = env.AZURE_OPENAI_API_KEY ?? missingEnvironmentVariable("AZURE_OPENAI_API_KEY"); const endPoint = env.AZURE_OPENAI_ENDPOINT ?? missingEnvironmentVariable("AZURE_OPENAI_ENDPOINT"); - return createAzureOpenAILanguageModel(apiKey, endPoint); + return createAzureOpenAILanguageModel(apiKey, endPoint, httpProxy); } missingEnvironmentVariable("OPENAI_API_KEY or AZURE_OPENAI_API_KEY"); } @@ -59,15 +63,19 @@ export function createLanguageModel(env: Record): Ty * @param apiKey The OpenAI API key. * @param model The model name. * @param endPoint The URL of the OpenAI REST API endpoint. Defaults to "https://api.openai.com/v1/chat/completions". - * @param org The OpenAI organization id. + * @param org: The OpenAI organization id. + * @param httpProxy: The HTTP proxy setting. * @returns An instance of `TypeChatLanguageModel`. */ -export function createOpenAILanguageModel(apiKey: string, model: string, endPoint = "https://api.openai.com/v1/chat/completions", org = ""): TypeChatLanguageModel { - return createAxiosLanguageModel(endPoint, { - headers: { +export function createOpenAILanguageModel(apiKey: string, model: string, endPoint = "https://api.openai.com/v1/chat/completions", org = "", httpProxy = ""): TypeChatLanguageModel { + const agent = new HttpsProxyAgent(httpProxy); + return createAxiosLanguageModel(endPoint, { + headers: { Authorization: `Bearer ${apiKey}`, "OpenAI-Organization": org - } + }, + httpAgent: agent, + httpsAgent: agent }, { model }); } @@ -77,10 +85,16 @@ export function createOpenAILanguageModel(apiKey: string, model: string, endPoin * "https://{your-resource-name}.openai.azure.com/openai/deployments/{your-deployment-name}/chat/completions?api-version={API-version}". * Example deployment names are "gpt-35-turbo" and "gpt-4". An example API versions is "2023-05-15". * @param apiKey The Azure OpenAI API key. + * @param httpProxy: The HTTP proxy setting. * @returns An instance of `TypeChatLanguageModel`. */ -export function createAzureOpenAILanguageModel(apiKey: string, endPoint: string,): TypeChatLanguageModel { - return createAxiosLanguageModel(endPoint, { headers: { "api-key": apiKey } }, {}); +export function createAzureOpenAILanguageModel(apiKey: string, endPoint: string, httpProxy = ""): TypeChatLanguageModel { + const agent = new HttpsProxyAgent(httpProxy); + return createAxiosLanguageModel(endPoint, { + headers: { "api-key": apiKey }, + httpAgent: agent, + httpsAgent: agent + }, {}); } /**