Skip to content

Commit 8a325fd

Browse files
Jinash RouniyarJinash Rouniyar
authored andcommitted
Updated max_tokens property to max_new_tokens
1 parent c984908 commit 8a325fd

File tree

9 files changed

+15
-15
lines changed

9 files changed

+15
-15
lines changed

src/collections/config/integration.test.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -806,7 +806,7 @@ describe('Testing of the collection.config namespace', () => {
806806
await collection.config.update({
807807
generative: weaviate.reconfigure.generative.contextualai({
808808
model: 'v2',
809-
maxTokens: 100,
809+
maxNewTokens: 100,
810810
temperature: 0.7,
811811
topP: 0.9,
812812
systemPrompt: 'sys',
@@ -819,7 +819,7 @@ describe('Testing of the collection.config namespace', () => {
819819
name: 'generative-contextualai',
820820
config: {
821821
model: 'v2',
822-
maxTokensProperty: 100,
822+
maxNewTokensProperty: 100,
823823
temperatureProperty: 0.7,
824824
topPProperty: 0.9,
825825
systemPromptProperty: 'sys',

src/collections/config/types/generative.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ export type GenerativeXAIConfig = {
108108

109109
export type GenerativeContextualAIConfig = {
110110
model?: string;
111-
maxTokensProperty?: number;
111+
maxNewTokensProperty?: number;
112112
temperatureProperty?: number;
113113
topPProperty?: number;
114114
systemPromptProperty?: string;

src/collections/configure/generative.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ export default {
6666
config: config
6767
? {
6868
model: config.model,
69-
maxTokensProperty: config.maxTokens,
69+
maxNewTokensProperty: config.maxNewTokens,
7070
temperatureProperty: config.temperature,
7171
topPProperty: config.topP,
7272
systemPromptProperty: config.systemPrompt,

src/collections/configure/types/generative.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ export type GenerativeXAIConfigCreate = GenerativeXAIConfig;
6161

6262
export type GenerativeContextualAIConfigCreate = {
6363
model?: string;
64-
maxTokens?: number;
64+
maxNewTokens?: number;
6565
temperature?: number;
6666
topP?: number;
6767
systemPrompt?: string;

src/collections/configure/unit.test.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1946,7 +1946,7 @@ describe('Unit testing of the generative factory class', () => {
19461946
it('should create the correct GenerativeContextualAIConfig type with all values', () => {
19471947
const config = configure.generative.contextualai({
19481948
model: 'v2',
1949-
maxTokens: 100,
1949+
maxNewTokens: 100,
19501950
temperature: 0.7,
19511951
topP: 0.9,
19521952
systemPrompt:
@@ -1957,7 +1957,7 @@ describe('Unit testing of the generative factory class', () => {
19571957
name: 'generative-contextualai',
19581958
config: {
19591959
model: 'v2',
1960-
maxTokensProperty: 100,
1960+
maxNewTokensProperty: 100,
19611961
temperatureProperty: 0.7,
19621962
topPProperty: 0.9,
19631963
systemPromptProperty:

src/collections/generate/config.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -314,7 +314,7 @@ export const generativeParameters = {
314314
config: config
315315
? {
316316
model: config.model,
317-
maxTokens: config.maxTokens,
317+
maxNewTokens: config.maxNewTokens,
318318
temperature: config.temperature,
319319
topP: config.topP,
320320
systemPrompt: config.systemPrompt,

src/collections/generate/integration.test.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -603,7 +603,7 @@ maybeContextualAI('Testing of the collection.generate methods with Contextual AI
603603
vectorizers: weaviate.configure.vectors.text2VecOpenAI(),
604604
generative: weaviate.configure.generative.contextualai({
605605
model: 'v2',
606-
maxTokens: 100,
606+
maxNewTokens: 100,
607607
temperature: 0.7,
608608
topP: 0.9,
609609
systemPrompt: 'You are a helpful AI assistant.',
@@ -664,7 +664,7 @@ maybeContextualAI('Testing of the collection.generate methods with Contextual AI
664664
singlePrompt: 'Translate this title to French: {title}',
665665
config: generativeParameters.contextualai({
666666
model: 'v2',
667-
maxTokens: 50,
667+
maxNewTokens: 50,
668668
temperature: 0.5,
669669
topP: 0.8,
670670
systemPrompt: 'You are a translation assistant.',
@@ -689,7 +689,7 @@ maybeContextualAI('Testing of the collection.generate methods with Contextual AI
689689
singlePrompt: 'What is the custom knowledge?',
690690
config: generativeParameters.contextualai({
691691
model: 'v2',
692-
maxTokens: 100,
692+
maxNewTokens: 100,
693693
temperature: 0.7,
694694
knowledge: ['Custom knowledge override', 'Additional context for testing'],
695695
}),

src/collections/generate/unit.test.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -327,7 +327,7 @@ describe('Unit testing of the generativeParameters factory methods', () => {
327327
it('with values', () => {
328328
const config = generativeParameters.contextualai({
329329
model: 'v2',
330-
maxTokens: 512,
330+
maxNewTokens: 512,
331331
temperature: 0.7,
332332
topP: 0.9,
333333
systemPrompt: 'sys',
@@ -343,7 +343,7 @@ describe('Unit testing of the generativeParameters factory methods', () => {
343343
name: 'generative-contextualai',
344344
config: {
345345
model: 'v2',
346-
maxTokens: 512,
346+
maxNewTokens: 512,
347347
temperature: 0.7,
348348
topP: 0.9,
349349
systemPrompt: 'sys',

src/collections/types/generate.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@ export type GenerativeConfigRuntimeType<G> = G extends 'generative-anthropic'
177177
: G extends 'generative-contextualai'
178178
? {
179179
model?: string;
180-
maxTokens?: number;
180+
maxNewTokens?: number;
181181
temperature?: number;
182182
topP?: number;
183183
systemPrompt?: string;
@@ -343,7 +343,7 @@ export type GenerativeXAIConfigRuntime = GenerativeXAIConfig;
343343

344344
export type GenerativeContextualAIConfigRuntime = {
345345
model?: string | undefined;
346-
maxTokens?: number | undefined;
346+
maxNewTokens?: number | undefined;
347347
temperature?: number | undefined;
348348
topP?: number | undefined;
349349
systemPrompt?: string | undefined;

0 commit comments

Comments
 (0)