Skip to content
This repository was archived by the owner on May 28, 2025. It is now read-only.
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ A Node.js module for streaming ChatGPT API responses using the OpenAI API. Strea

ChatGPT is an advanced AI language model developed by OpenAI. This module enables you to interact with the ChatGPT API, allowing you to send messages and receive AI-generated responses in real-time. The OpenAI API provides access to various models, including the gpt-3.5-turbo model, which is used by default in this module.

This library works with both native `fetch` (available in Node.js 18+) and `node-fetch` (automatically used as a fallback for older Node.js versions) for maximum compatibility.

## Usage Example

A simple node web app showing usage of the module with streamed chat can be found here: [Streamed ChatGPT API Usage Example](https://github.com/jddev273/simple-chatgpt-chat-streaming-demo)
Expand All @@ -18,6 +20,12 @@ Install using npm:
npm install streamed-chatgpt-api
```

The package works with Node.js 18+ (which has native `fetch`) out of the box. For Node.js versions below 18, the package will automatically use `node-fetch` as a fallback. If you need to explicitly use `node-fetch` in your project, you can install it separately:

```
npm install node-fetch
```

## Usage

To use the module, first import it:
Expand Down
102 changes: 98 additions & 4 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,91 @@ async function fetchStreamedChat(options, onChunkReceived) {
});
}

// A function to process the response stream from node-fetch using Node.js stream methods
async function processNodeStream(readableStream, decoder, onChunkReceived) {
try {
// Set up event listeners for the Node.js readable stream
return new Promise((resolve, reject) => {
let buffer = '';

// Setup read timeout timer
let readTimeoutId = null;
const resetReadTimeout = () => {
if (readTimeoutId) clearTimeout(readTimeoutId);
readTimeoutId = setTimeout(() => {
reject(new Error('Timeout'));
}, readTimeout);
};

// Initial timeout
resetReadTimeout();

// Set up total time timeout
const totalTimeoutPromise = totalTimeTimeout();
totalTimeoutPromise.catch(reject);

readableStream.on('data', (chunk) => {
try {
// Reset read timeout when data is received
resetReadTimeout();

// Decode the chunk and add it to the buffer
const textChunk = decoder.decode(chunk, { stream: true });
buffer += textChunk;

// Process complete lines
const lines = buffer.split('\n');
buffer = lines.pop(); // Keep the last potentially incomplete line in the buffer

// Process each complete line
for (const line of lines) {
if (line.trim() === '') continue;

// Remove the "data: " prefix from the line
const message = line.replace(/^data: /, '');

// If the message indicates the end of the stream, resolve
if (message === '[DONE]') {
if (readTimeoutId) clearTimeout(readTimeoutId);
resolve();
return;
}

// Otherwise, invoke the onChunkReceived callback with the message
onChunkReceived(message);
}
} catch (error) {
if (readTimeoutId) clearTimeout(readTimeoutId);
reject(error);
}
});

readableStream.on('end', () => {
// Clear timeout when stream ends
if (readTimeoutId) clearTimeout(readTimeoutId);

// Process any remaining data in the buffer
if (buffer.trim() !== '') {
const message = buffer.replace(/^data: /, '');
if (message !== '[DONE]' && message.trim() !== '') {
onChunkReceived(message);
}
}
resolve();
});

readableStream.on('error', (error) => {
// Clear timeout on error
if (readTimeoutId) clearTimeout(readTimeoutId);
console.error('Error reading stream:', error);
reject(error);
});
});
} catch (error) {
console.error('Error processing node stream:', error);
}
}

// A function to process the response stream and invoke the onChunkReceived callback
// for each valid line in the stream
async function processStream(reader, decoder, onChunkReceived) {
Expand Down Expand Up @@ -175,12 +260,21 @@ async function fetchStreamedChat(options, onChunkReceived) {

const response = await fetchChatResponseWithRetry(apiKey, requestOptions, retryCount);

// Initialize the reader and decoder
const reader = response.body.getReader();
// Initialize the decoder
const decoder = new TextDecoder('utf-8');

// Process the response stream
await processStream(reader, decoder, onChunkReceived);
// Check if response.body has getReader method (native fetch)
if (typeof response.body.getReader === 'function') {
// Initialize the reader for native fetch
const reader = response.body.getReader();

// Process the response stream using getReader
await processStream(reader, decoder, onChunkReceived);
} else {
// Handle node-fetch which doesn't have getReader
// Process the stream using Node.js stream methods
await processNodeStream(response.body, decoder, onChunkReceived);
}
}

module.exports = { fetchStreamedChat, fetchStreamedChatContent };
Expand Down
24 changes: 14 additions & 10 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
"node": ">=18.0.0"
},
"optionalDependencies": {
"node-fetch": "^3.1.0"
"node-fetch": "^3.3.2"
},
"devDependencies": {
"jest": "^29.5.0"
Expand Down
23 changes: 23 additions & 0 deletions test-compatibility.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
// Create simple test to ensure compatibility with both fetch implementations

// First test the implementation with node-fetch
console.log('Testing with node-fetch...');

// Save original fetch
const originalFetch = globalThis.fetch;

// Force use of node-fetch by setting globalThis.fetch to undefined
globalThis.fetch = undefined;

// Now when we require the library, it should use node-fetch
const { fetchStreamedChat: fetchWithNodeFetch } = require('./index');

// Restore original fetch
globalThis.fetch = originalFetch;

// Now test with native fetch
console.log('Testing with native fetch...');
const { fetchStreamedChat: fetchWithNativeFetch } = require('./index');

console.log('Both implementations loaded successfully. Check completed.');
console.log('Note: Full functional testing requires API keys and real API calls.');