diff --git a/docs/docs/examples/openai-node.md b/docs/docs/examples/openai-node.md new file mode 100644 index 000000000..e556f7164 --- /dev/null +++ b/docs/docs/examples/openai-node.md @@ -0,0 +1,251 @@ +--- +title: Nitro with openai-node +--- + +You can migrate from OAI API or Azure OpenAI to Nitro using your existing NodeJS code quickly +> The ONLY thing you need to do is to override `baseURL` in `openai` init with `Nitro` URL +- NodeJS OpenAI SDK: https://www.npmjs.com/package/openai + +## Chat Completion + + + + + + + + + + + + + + + + +
Engine Typescript Code
Nitro + +```typescript +import OpenAI from 'openai'; + +const openai = new OpenAI({ + apiKey: '', // defaults to process.env["OPENAI_API_KEY"] + baseURL: "http://localhost:3928/v1/" // https://api.openai.com/v1 +}); + +async function chatCompletion() { + const stream = await openai.beta.chat.completions.stream({ + model: 'gpt-3.5-turbo', + messages: [{ role: 'user', content: 'Say this is a test' }], + stream: true, + }); + + stream.on('content', (delta, snapshot) => { + process.stdout.write(delta); + }); + + for await (const chunk of stream) { + process.stdout.write(chunk.choices[0]?.delta?.content || ''); + } + + const chatCompletion = await stream.finalChatCompletion(); + console.log(chatCompletion); // {id: "…", choices: […], …} +} +chatCompletion() +``` +
OAI + +```typescript +import OpenAI from 'openai'; + +const openai = new OpenAI({ + apiKey: '', // defaults to process.env["OPENAI_API_KEY"] +}); + +async function chatCompletion() { + const stream = await openai.beta.chat.completions.stream({ + model: 'gpt-3.5-turbo', + messages: [{ role: 'user', content: 'Say this is a test' }], + stream: true, + }); + + stream.on('content', (delta, snapshot) => { + process.stdout.write(delta); + }); + + for await (const chunk of stream) { + process.stdout.write(chunk.choices[0]?.delta?.content || ''); + } + + const chatCompletion = await stream.finalChatCompletion(); + console.log(chatCompletion); // {id: "…", choices: […], …} +} +chatCompletion() +``` + +
Azure OAI + +```typescript +import OpenAI from 'openai'; +// The name of your Azure OpenAI Resource. +// https://learn.microsoft.com/en-us/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal#create-a-resource +const resource = ''; + +// Corresponds to your Model deployment within your OpenAI resource, e.g. my-gpt35-16k-deployment +// Navigate to the Azure OpenAI Studio to deploy a model. +const model = ''; + +// https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#rest-api-versioning +const apiVersion = '2023-06-01-preview'; + +const apiKey = process.env['AZURE_OPENAI_API_KEY']; +if (!apiKey) { + throw new Error('The AZURE_OPENAI_API_KEY environment variable is missing or empty.'); +} + +const openai = new OpenAI({ + apiKey, + baseURL: `https://${resource}.openai.azure.com/openai/deployments/${model}`, + defaultQuery: { 'api-version': apiVersion }, + defaultHeaders: { 'api-key': apiKey }, +}); + +async function chatCompletion() { + const stream = await openai.beta.chat.completions.stream({ + model: 'gpt-3.5-turbo', + messages: [{ role: 'user', content: 'Say this is a test' }], + stream: true, + }); + + stream.on('content', (delta, snapshot) => { + process.stdout.write(delta); + }); + + for await (const chunk of stream) { + process.stdout.write(chunk.choices[0]?.delta?.content || ''); + } + + const chatCompletion = await stream.finalChatCompletion(); + console.log(chatCompletion); // {id: "…", choices: […], …} +} +chatCompletion() +``` + +
+ +## Embedding + + + + + + + + + + + + + + + + +
Engine Embedding
Nitro + +```typescript +import OpenAI from 'openai'; + +const openai = new OpenAI({ + apiKey: '', // defaults to process.env["OPENAI_API_KEY"] + baseURL: "http://localhost:3928/v1/" // https://api.openai.com/v1 +}); + +async function embedding() { + const embedding = await openai.embeddings.create({input: 'Hello How are you?', model: 'text-embedding-ada-002'}); + console.log(embedding); // {object: "list", data: […], …} +} + +chatCompletion(); +``` +
OAI + +```typescript +import OpenAI from 'openai'; + +const openai = new OpenAI({ + apiKey: '', // defaults to process.env["OPENAI_API_KEY"] +}); + +async function embedding() { + const embedding = await openai.embeddings.create({input: 'Hello How are you?', model: 'text-embedding-ada-002'}); + console.log(embedding); // {object: "list", data: […], …} +} + +chatCompletion(); +``` + +
Azure OAI + +```typescript +import OpenAI from 'openai'; +// The name of your Azure OpenAI Resource. +// https://learn.microsoft.com/en-us/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal#create-a-resource +const resource = ''; + +// Corresponds to your Model deployment within your OpenAI resource, e.g. my-gpt35-16k-deployment +// Navigate to the Azure OpenAI Studio to deploy a model. +const model = ''; + +// https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#rest-api-versioning +const apiVersion = '2023-06-01-preview'; + +const apiKey = process.env['AZURE_OPENAI_API_KEY']; +if (!apiKey) { + throw new Error('The AZURE_OPENAI_API_KEY environment variable is missing or empty.'); +} + +const openai = new OpenAI({ + apiKey, + baseURL: `https://${resource}.openai.azure.com/openai/deployments/${model}`, + defaultQuery: { 'api-version': apiVersion }, + defaultHeaders: { 'api-key': apiKey }, +}); + +async function embedding() { + const embedding = await openai.embeddings.create({input: 'Hello How are you?', model: 'text-embedding-ada-002'}); + console.log(embedding); // {object: "list", data: […], …} +} + +chatCompletion(); +``` + +
+ +## Audio +Coming soon + +## How to reproduce +1. Step 1: Dependencies installation +``` +npm install --save openai typescript +# or +yarn add openai +``` +2. Step 2: Fill `tsconfig.json` +```json +{ + "compilerOptions": { + "moduleResolution": "node", + "sourceMap": true, + "outDir": "dist", + "target": "es2020", + "lib": ["es2020"], + "module": "commonjs", + }, + "lib": ["es2015"] +} +``` +3. Step 3: Fill `index.ts` file with code +3. Step 4: Build with `npx tsc` +4. Step 5: Run the code with `node dist/index.js` +5. Step 6: Enjoy! \ No newline at end of file diff --git a/docs/docs/examples/openai-python.md b/docs/docs/examples/openai-python.md new file mode 100644 index 000000000..e3082078c --- /dev/null +++ b/docs/docs/examples/openai-python.md @@ -0,0 +1,185 @@ +--- +title: Nitro with openai-python +--- + + +You can migrate from OAI API or Azure OpenAI to Nitro using your existing Python code quickly +> The ONLY thing you need to do is to override `baseURL` in `openai` init with `Nitro` URL +- Python OpenAI SDK: https://pypi.org/project/openai/ + +## Chat Completion + + + + + + + + + + + + + + + + +
Engine Python Code
Nitro + +```python +import asyncio + +from openai import AsyncOpenAI + +# gets API Key from environment variable OPENAI_API_KEY +client = AsyncOpenAI(base_url="http://localhost:3928/v1/", api_key="sk-xxx") + + +async def main() -> None: + stream = await client.chat.completions.create( + model="gpt-4", + messages=[{"role": "user", "content": "Say this is a test"}], + stream=True, + ) + async for completion in stream: + print(completion.choices[0].delta.content, end="") + print() + +asyncio.run(main()) +``` +
OAI + +```python +import asyncio + +from openai import AsyncOpenAI + +# gets API Key from environment variable OPENAI_API_KEY +client = AsyncOpenAI(api_key="sk-xxx") + + +async def main() -> None: + stream = await client.chat.completions.create( + model="gpt-4", + messages=[{"role": "user", "content": "Say this is a test"}], + stream=True, + ) + async for completion in stream: + print(completion.choices[0].delta.content, end="") + print() + +asyncio.run(main()) +``` + +
Azure OAI + +```python +from openai import AzureOpenAI + +openai.api_key = '...' # Default is environment variable AZURE_OPENAI_API_KEY + +stream = AzureOpenAI( + api_version=api_version, + # https://learn.microsoft.com/en-us/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal#create-a-resource + azure_endpoint="https://example-endpoint.openai.azure.com", +) + +completion = client.chat.completions.create( + model="deployment-name", # e.g. gpt-35-instant + messages=[ + { + "role": "user", + "content": "How do I output all files in a directory using Python?", + }, + ], + stream=True, +) +for part in stream: + print(part.choices[0].delta.content or "") +``` + +
+ +## Embedding + + + + + + + + + + + + + + + + +
Engine Embedding
Nitro + +```python +import asyncio + +from openai import AsyncOpenAI + +# gets API Key from environment variable OPENAI_API_KEY +client = AsyncOpenAI(base_url="http://localhost:3928/v1/", api_key="sk-xxx") + + +async def main() -> None: + embedding = await client.embeddings.create(input='Hello How are you?', model='text-embedding-ada-002') + print(embedding) + +asyncio.run(main()) +``` +
OAI + +```python +import asyncio + +from openai import AsyncOpenAI + +# gets API Key from environment variable OPENAI_API_KEY +client = AsyncOpenAI(api_key="sk-xxx") + + +async def main() -> None: + embedding = await client.embeddings.create(input='Hello How are you?', model='text-embedding-ada-002') + print(embedding) + +asyncio.run(main()) +``` + +
Azure OAI + +```python +import openai + +openai.api_type = "azure" +openai.api_key = YOUR_API_KEY +openai.api_base = "https://YOUR_RESOURCE_NAME.openai.azure.com" +openai.api_version = "2023-05-15" + +response = openai.embedding.create( + input="Your text string goes here", + engine="YOUR_DEPLOYMENT_NAME" +) +embeddings = response['data'][0]['embedding'] +print(embeddings) +``` + +
+ +## Audio +Coming soon + +## How to reproduce +1. Step 1: Dependencies installation +``` +pip install openai +``` +3. Step 2: Fill `index.py` file with code +4. Step 3: Run the code with `python index.py` +5. Step 5: Enjoy! \ No newline at end of file diff --git a/docs/sidebars.js b/docs/sidebars.js index f2eabaec8..0488c696e 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -49,7 +49,11 @@ const sidebars = { label: "Guides", collapsible: false, collapsed: false, - items: ["examples/chatbox"], + items: [ + "examples/chatbox", + "examples/openai-node", + "examples/openai-python", + ], }, // { // type: "category",