Upgrade the OpenAI SDK to v4 following this guide.
Apply with the Grit CLI
grit apply openai_v4
Initialization
BEFORE
import { Configuration, OpenAIApi } from 'openai'; const myConfig = new Configuration({ apiKey: process.env.OPENAI_API_KEY, }); const openai = new OpenAIApi(myConfig);
AFTER
import OpenAI from 'openai'; const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY, });
CommonJS initialization
It also works with require
syntax.
BEFORE
const { Configuration, OpenAIApi } = require('openai'); const myConfig = new Configuration({ apiKey: process.env.OPENAI_API_KEY, }); const openai = new OpenAIApi(myConfig);
AFTER
const OpenAI = require('openai'); const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY, });
Creating a chat completion
BEFORE
const chatCompletion = await openai.createChatCompletion({ model: 'gpt-3.5-turbo', messages: [{ role: 'user', content: 'Hello world' }], }); console.log(chatCompletion.data.choices[0].message);
AFTER
const chatCompletion = await openai.chat.completions.create({ model: 'gpt-3.5-turbo', messages: [{ role: 'user', content: 'Hello world' }], }); console.log(chatCompletion.choices[0].message);
Creating a chat completion with custom name
BEFORE
const mango = await openai.createChatCompletion({ model: 'gpt-3.5-turbo', messages: [{ role: 'user', content: 'Hello world' }], }); console.log(mango.data.choices[0].message);
AFTER
const mango = await openai.chat.completions.create({ model: 'gpt-3.5-turbo', messages: [{ role: 'user', content: 'Hello world' }], }); console.log(mango.choices[0].message);
Creating a completion
BEFORE
const completion = await openai.createCompletion({ model: 'text-davinci-003', prompt: 'This story begins', max_tokens: 30, }); console.log(completion.data.choices[0].text);
AFTER
const completion = await openai.completions.create({ model: 'text-davinci-003', prompt: 'This story begins', max_tokens: 30, }); console.log(completion.choices[0].text);
Creating a completion with openai alias
BEFORE
import { Configuration, OpenAIApi } from 'openai'; const myConfig = new Configuration({ apiKey: process.env.OPENAI_API_KEY, }); const myOpenAi = new OpenAIApi(myConfig); const completion = await myOpenAi.createCompletion({ model: 'text-davinci-003', prompt: 'This story begins', max_tokens: 30, });
AFTER
import OpenAI from 'openai'; const myOpenAi = new OpenAI({ apiKey: process.env.OPENAI_API_KEY, }); const completion = await myOpenAi.completions.create({ model: 'text-davinci-003', prompt: 'This story begins', max_tokens: 30, });
Creating a transcription (whisper)
BEFORE
const response = await openai.createTranscription(fs.createReadStream('audio.mp3'), 'whisper-1');
AFTER
const response = await openai.audio.transcriptions.create({ model: 'whisper-1', file: fs.createReadStream('audio.mp3'), });
File handling
BEFORE
const openai = new OpenAIApi( new Configuration({ apiKey: process.env.OPENAI_API_KEY, }), ); const myFile = await openai.downloadFile('my-file', options); console.log(myFile);
AFTER
const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY, }); const myFile = await openai.files.retrieveContent('my-file', options); console.log(myFile);
Error handling
BEFORE
try { const completion = await openai.createCompletion({}); } catch (error) { if (error.response) { console.log(error.response.status); // e.g. 401 console.log(error.response.data.message); // e.g. The authentication token you passed was invalid... console.log(error.response.data.code); // e.g. 'invalid_api_key' console.log(error.response.data.type); // e.g. 'invalid_request_error' } else { console.log(error); } }
AFTER
try { const completion = await openai.completions.create({}); } catch (error) { if (error instanceof OpenAI.APIError) { console.log(error.status); // e.g. 401 console.log(error.message); // e.g. The authentication token you passed was invalid... console.log(error.code); // e.g. 'invalid_api_key' console.log(error.type); // e.g. 'invalid_request_error' } else { console.log(error); } }
Does not match a sample without OpenAI
var increment = function (i) { return i + 1; }; var remember = function (me) { this.you = me; }; var sumToValue = function (x, y) { function Value(v) { this.value = v; } return new Value(x + y); }; var times = (x, y) => { return x * y; };
Fixes imports if imported from OpenAI
BEFORE
import { ChatCompletionRequestMessage, CreateChatCompletionRequest, CreateChatCompletionResponse, } from 'openai'; // imported, so should change const messages: ChatCompletionRequestMessage = 1; const request: CreateChatCompletionRequest = 2; const response: CreateChatCompletionResponse = 3; // should not be changed because not imported from 'openai' const fineTune: FineTune = 4;
AFTER
import OpenAI from 'openai'; // imported, so should change const messages: OpenAI.Chat.CreateChatCompletionRequestMessage = 1; const request: OpenAI.Chat.ChatCompletionCreateParamsNonStreaming = 2; const response: OpenAI.Chat.Completions.ChatCompletion = 3; // should not be changed because not imported from 'openai' const fineTune: FineTune = 4;
Preserves v4 OpenAI ESM imports
BEFORE
import { ChatCompletionRequestMessage, CreateChatCompletionRequest, CreateChatCompletionResponse, toFile, } from 'openai'; // imported, so should change const messages: ChatCompletionRequestMessage = 1; const request: CreateChatCompletionRequest = 2; const response: CreateChatCompletionResponse = 3; // should not be changed because not imported from 'openai' const fineTune: FineTune = 4;
AFTER
import OpenAI, { toFile } from 'openai'; // imported, so should change const messages: OpenAI.Chat.CreateChatCompletionRequestMessage = 1; const request: OpenAI.Chat.ChatCompletionCreateParamsNonStreaming = 2; const response: OpenAI.Chat.Completions.ChatCompletion = 3; // should not be changed because not imported from 'openai' const fineTune: FineTune = 4;
Does not double import OpenAI
BEFORE
import OpenAI, { ChatCompletionRequestMessage, CreateChatCompletionRequest, CreateChatCompletionResponse, toFile, } from 'openai'; // imported, so should change const messages: ChatCompletionRequestMessage = 1; const request: CreateChatCompletionRequest = 2; const response: CreateChatCompletionResponse = 3; // should not be changed because not imported from 'openai' const fineTune: FineTune = 4;
AFTER
import OpenAI, { toFile } from 'openai'; // imported, so should change const messages: OpenAI.Chat.CreateChatCompletionRequestMessage = 1; const request: OpenAI.Chat.ChatCompletionCreateParamsNonStreaming = 2; const response: OpenAI.Chat.Completions.ChatCompletion = 3; // should not be changed because not imported from 'openai' const fineTune: FineTune = 4;
Preserves v4 OpenAI CommonJS imports
BEFORE
const { ChatCompletionRequestMessage, CreateChatCompletionRequest, CreateChatCompletionResponse, Configuration, toFile, } = require('openai'); // imported, so should change const messages: ChatCompletionRequestMessage = 1; const request: CreateChatCompletionRequest = 2; const response: CreateChatCompletionResponse = 3; // should not be changed because not imported from 'openai' const fineTune: FineTune = 4;
AFTER
const OpenAI = require('openai'); const { toFile } = require('openai'); // imported, so should change const messages: OpenAI.Chat.CreateChatCompletionRequestMessage = 1; const request: OpenAI.Chat.ChatCompletionCreateParamsNonStreaming = 2; const response: OpenAI.Chat.Completions.ChatCompletion = 3; // should not be changed because not imported from 'openai' const fineTune: FineTune = 4;
Handle rename within v4
This handles https://github.com/openai/openai-node/pull/266/files
BEFORE
import OpenAI, { toFile } from 'openai'; const myCompletion: OpenAI.Chat.CompletionCreateParams = 1;
AFTER
import OpenAI, { toFile } from 'openai'; const myCompletion: OpenAI.Chat.ChatCompletionCreateParams = 1;
Does not rewrite non-OpenAI imports
BEFORE
const { ChatCompletionRequestMessage, CreateChatCompletionRequest, CreateChatCompletionResponse, Configuration, toFile, } = require('openai'); const { ChatOpenAI } = require('langchain/chat_models/openai'); const { BufferMemory } = require('langchain/memory'); const { orderBy } = require('lodash'); import { ChatOpenAI } from 'langchain/chat_models/openai'; const chat = new ChatOpenAI({ openAIApiKey: apikey, maxTokens: 120, });
AFTER
const OpenAI = require('openai'); const { toFile } = require('openai'); const { ChatOpenAI } = require('langchain/chat_models/openai'); const { BufferMemory } = require('langchain/memory'); const { orderBy } = require('lodash'); import { ChatOpenAI } from 'langchain/chat_models/openai'; const chat = new ChatOpenAI({ openAIApiKey: apikey, maxTokens: 120, });