Skip to content

Commit 82239ff

Browse files
committed
Implement code changes to enhance functionality and improve performance
1 parent 046b7ba commit 82239ff

File tree

3 files changed

+693
-9
lines changed

3 files changed

+693
-9
lines changed

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "cluelessly",
3-
"version": "1.0.3",
3+
"version": "1.1.0",
44
"description": "A powerful desktop application that helps developers solve coding problems by analyzing screenshots of code and also provides a conversational AI assistant for every users!",
55
"main": "./out/main/index.js",
66
"author": "https://github.com/Xeven777",

src/main/lib/processing-manager.ts

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -93,8 +93,7 @@ export class ProcessingManager {
9393
if (config.apiProvider === 'openai') {
9494
if (config.apiKey) {
9595
this.vercelOpenAI = createOpenAI({
96-
apiKey: config.apiKey,
97-
compatibility: 'strict' // or 'compatible' or undefined
96+
apiKey: config.apiKey
9897
})
9998
console.log('Vercel OpenAI provider initialized successfully')
10099
} else {
@@ -428,7 +427,7 @@ export class ProcessingManager {
428427
{ role: 'user', content: userMessagesContent }
429428
],
430429
temperature: 0.2,
431-
maxTokens: llmProvider.provider == 'openai' ? 4000 : 6000,
430+
maxOutputTokens: llmProvider == 'openai' ? 1000 : 1500,
432431
mode: 'json', // Enforce JSON output mode if supported by the model/provider
433432
abortSignal
434433
})
@@ -531,7 +530,7 @@ Your solution should be efficient, well-commented, and handle edge cases.
531530
{ role: 'user', content: promptText }
532531
],
533532
temperature: 0.2,
534-
maxTokens: solutionLLMProvider.provider == 'openai' ? 4000 : 6000,
533+
maxOutputTokens: 4000,
535534
abortSignal
536535
})
537536
// console.log({
@@ -702,7 +701,7 @@ Your solution should be efficient, well-commented, and handle edge cases.
702701
{ role: 'user', content: userMessagesContent }
703702
],
704703
temperature: 0.2,
705-
maxTokens: debuggingLLMProvider.provider == 'openai' ? 4000 : 6000,
704+
maxOutputTokens: 4000,
706705
abortSignal
707706
})
708707
// console.log({
@@ -844,7 +843,7 @@ Tell answers in details of about 200 words minimum.
844843
{ role: 'user', content: userMessagesContent }
845844
],
846845
temperature: 0.7,
847-
maxTokens: llmProvider.provider == 'openai' ? 4000 : 6000
846+
maxOutputTokens: 6000
848847
})
849848

850849
// console.log({
@@ -951,7 +950,7 @@ Tell answers in details of about 200 words minimum.
951950
{ role: 'user', content: userMessagesContent }
952951
],
953952
temperature: 0.2,
954-
maxTokens: llmProvider.provider == 'openai' ? 4000 : 6000,
953+
maxOutputTokens: 3000,
955954
mode: 'json'
956955
})
957956

0 commit comments

Comments
 (0)