Skip to content

Commit 9eea37d

Browse files
Jinash RouniyarJinash Rouniyar
authored andcommitted
Fixed property names & added ContextualAIGRPC
1 parent 418c3bc commit 9eea37d

File tree

9 files changed

+34
-46
lines changed

9 files changed

+34
-46
lines changed

src/collections/config/integration.test.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -814,9 +814,9 @@ describe('Testing of the collection.config namespace', () => {
814814
await collection.config.update({
815815
generative: weaviate.reconfigure.generative.contextualai({
816816
model: 'v2',
817-
maxNewTokens: 100,
818817
temperature: 0.7,
819818
topP: 0.9,
819+
maxNewTokens: 100,
820820
systemPrompt: 'sys',
821821
avoidCommentary: false,
822822
}),
@@ -827,11 +827,11 @@ describe('Testing of the collection.config namespace', () => {
827827
name: 'generative-contextualai',
828828
config: {
829829
model: 'v2',
830-
maxNewTokensProperty: 100,
831-
temperatureProperty: 0.7,
832-
topPProperty: 0.9,
833-
systemPromptProperty: 'sys',
834-
avoidCommentaryProperty: false,
830+
temperature: 0.7,
831+
topP: 0.9,
832+
maxNewTokens: 100,
833+
systemPrompt: 'sys',
834+
avoidCommentary: false,
835835
},
836836
});
837837
});

src/collections/config/types/generative.ts

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -108,11 +108,11 @@ export type GenerativeXAIConfig = {
108108

109109
export type GenerativeContextualAIConfig = {
110110
model?: string;
111-
maxNewTokensProperty?: number;
112-
temperatureProperty?: number;
113-
topPProperty?: number;
114-
systemPromptProperty?: string;
115-
avoidCommentaryProperty?: boolean;
111+
temperature?: number;
112+
topP?: number;
113+
maxNewTokens?: number;
114+
systemPrompt?: string;
115+
avoidCommentary?: boolean;
116116
};
117117

118118
export type GenerativeConfig =

src/collections/configure/generative.ts

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -66,11 +66,11 @@ export default {
6666
config: config
6767
? {
6868
model: config.model,
69-
maxNewTokensProperty: config.maxNewTokens,
70-
temperatureProperty: config.temperature,
71-
topPProperty: config.topP,
72-
systemPromptProperty: config.systemPrompt,
73-
avoidCommentaryProperty: config.avoidCommentary,
69+
temperature: config.temperature,
70+
topP: config.topP,
71+
maxNewTokens: config.maxNewTokens,
72+
systemPrompt: config.systemPrompt,
73+
avoidCommentary: config.avoidCommentary,
7474
}
7575
: undefined,
7676
};

src/collections/configure/types/generative.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,9 +61,9 @@ export type GenerativeXAIConfigCreate = GenerativeXAIConfig;
6161

6262
export type GenerativeContextualAIConfigCreate = {
6363
model?: string;
64-
maxNewTokens?: number;
6564
temperature?: number;
6665
topP?: number;
66+
maxNewTokens?: number;
6767
systemPrompt?: string;
6868
avoidCommentary?: boolean;
6969
};

src/collections/configure/unit.test.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1952,9 +1952,9 @@ describe('Unit testing of the generative factory class', () => {
19521952
it('should create the correct GenerativeContextualAIConfig type with all values', () => {
19531953
const config = configure.generative.contextualai({
19541954
model: 'v2',
1955-
maxNewTokens: 100,
19561955
temperature: 0.7,
19571956
topP: 0.9,
1957+
maxNewTokens: 100,
19581958
systemPrompt:
19591959
'You are a helpful assistant that provides accurate and informative responses based on the given context.',
19601960
avoidCommentary: false,
@@ -1963,12 +1963,12 @@ describe('Unit testing of the generative factory class', () => {
19631963
name: 'generative-contextualai',
19641964
config: {
19651965
model: 'v2',
1966-
maxNewTokensProperty: 100,
1967-
temperatureProperty: 0.7,
1968-
topPProperty: 0.9,
1969-
systemPromptProperty:
1966+
temperature: 0.7,
1967+
topP: 0.9,
1968+
maxNewTokens: 100,
1969+
systemPrompt:
19701970
'You are a helpful assistant that provides accurate and informative responses based on the given context.',
1971-
avoidCommentaryProperty: false,
1971+
avoidCommentary: false,
19721972
},
19731973
});
19741974
});

src/collections/generate/config.ts

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -308,18 +308,13 @@ export const generativeParameters = {
308308
'generative-contextualai',
309309
GenerativeConfigRuntimeType<'generative-contextualai'> | undefined
310310
> {
311-
// Contextual AI does not require special GRPC wrappers; pass primitives directly
311+
const { knowledge, ...rest } = config || {};
312312
return {
313313
name: 'generative-contextualai',
314314
config: config
315315
? {
316-
model: config.model,
317-
maxNewTokens: config.maxNewTokens,
318-
temperature: config.temperature,
319-
topP: config.topP,
320-
systemPrompt: config.systemPrompt,
321-
avoidCommentary: config.avoidCommentary,
322-
knowledge: config.knowledge,
316+
...rest,
317+
knowledge: knowledge ? TextArray.fromPartial({ values: knowledge }) : undefined,
323318
}
324319
: undefined,
325320
};

src/collections/generate/integration.test.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -603,9 +603,9 @@ maybeContextualAI('Testing of the collection.generate methods with Contextual AI
603603
vectorizers: weaviate.configure.vectors.text2VecOpenAI(),
604604
generative: weaviate.configure.generative.contextualai({
605605
model: 'v2',
606-
maxNewTokens: 100,
607606
temperature: 0.7,
608607
topP: 0.9,
608+
maxNewTokens: 100,
609609
systemPrompt: 'You are a helpful AI assistant.',
610610
avoidCommentary: false,
611611
}),
@@ -664,9 +664,9 @@ maybeContextualAI('Testing of the collection.generate methods with Contextual AI
664664
singlePrompt: 'Translate this title to French: {title}',
665665
config: generativeParameters.contextualai({
666666
model: 'v2',
667-
maxNewTokens: 50,
668667
temperature: 0.5,
669668
topP: 0.8,
669+
maxNewTokens: 50,
670670
systemPrompt: 'You are a translation assistant.',
671671
avoidCommentary: true,
672672
}),
@@ -689,8 +689,8 @@ maybeContextualAI('Testing of the collection.generate methods with Contextual AI
689689
singlePrompt: 'What is the custom knowledge?',
690690
config: generativeParameters.contextualai({
691691
model: 'v2',
692-
maxNewTokens: 100,
693692
temperature: 0.7,
693+
maxNewTokens: 100,
694694
knowledge: ['Custom knowledge override', 'Additional context for testing'],
695695
}),
696696
},

src/collections/generate/unit.test.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -343,12 +343,12 @@ describe('Unit testing of the generativeParameters factory methods', () => {
343343
name: 'generative-contextualai',
344344
config: {
345345
model: 'v2',
346-
maxNewTokens: 512,
347346
temperature: 0.7,
348347
topP: 0.9,
348+
maxNewTokens: 512,
349349
systemPrompt: 'sys',
350350
avoidCommentary: false,
351-
knowledge: ['knowledge1', 'knowledge2'],
351+
knowledge: { values: ['knowledge1', 'knowledge2'] },
352352
},
353353
});
354354
});

src/collections/types/generate.ts

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ import {
77
GenerativeAnyscaleMetadata,
88
GenerativeCohere as GenerativeCohereGRPC,
99
GenerativeCohereMetadata,
10+
GenerativeContextualAI as GenerativeContextualAIGRPC,
1011
GenerativeDatabricks as GenerativeDatabricksGRPC,
1112
GenerativeDatabricksMetadata,
1213
GenerativeDebug,
@@ -175,15 +176,7 @@ export type GenerativeConfigRuntimeType<G> = G extends 'generative-anthropic'
175176
: G extends 'generative-xai'
176177
? Omit<GenerativeXAIGRPC, omitFields>
177178
: G extends 'generative-contextualai'
178-
? {
179-
model?: string;
180-
maxNewTokens?: number;
181-
temperature?: number;
182-
topP?: number;
183-
systemPrompt?: string;
184-
avoidCommentary?: boolean;
185-
knowledge?: string[];
186-
}
179+
? Omit<GenerativeContextualAIGRPC, omitFields>
187180
: G extends 'none'
188181
? undefined
189182
: Record<string, any> | undefined;
@@ -343,9 +336,9 @@ export type GenerativeXAIConfigRuntime = GenerativeXAIConfig;
343336

344337
export type GenerativeContextualAIConfigRuntime = {
345338
model?: string | undefined;
346-
maxNewTokens?: number | undefined;
347339
temperature?: number | undefined;
348340
topP?: number | undefined;
341+
maxNewTokens?: number | undefined;
349342
systemPrompt?: string | undefined;
350343
avoidCommentary?: boolean | undefined;
351344
knowledge?: string[] | undefined;

0 commit comments

Comments
 (0)