diff --git a/clients/algoliasearch-client-csharp/algoliasearch/Utils/SearchClientExtensions.cs b/clients/algoliasearch-client-csharp/algoliasearch/Utils/SearchClientExtensions.cs index 038c7d60a54..bdd5f41b319 100644 --- a/clients/algoliasearch-client-csharp/algoliasearch/Utils/SearchClientExtensions.cs +++ b/clients/algoliasearch-client-csharp/algoliasearch/Utils/SearchClientExtensions.cs @@ -494,37 +494,46 @@ public async Task ReplaceAllObjectsAsync(string in var rnd = new Random(); var tmpIndexName = $"{indexName}_tmp_{rnd.Next(100)}"; - var copyResponse = await OperationIndexAsync(indexName, - new OperationIndexParams(OperationType.Copy, tmpIndexName) - { Scope = [ScopeType.Settings, ScopeType.Rules, ScopeType.Synonyms] }, options, cancellationToken) - .ConfigureAwait(false); + try + { + var copyResponse = await OperationIndexAsync(indexName, + new OperationIndexParams(OperationType.Copy, tmpIndexName) + { Scope = [ScopeType.Settings, ScopeType.Rules, ScopeType.Synonyms] }, options, cancellationToken) + .ConfigureAwait(false); - var batchResponse = await ChunkedBatchAsync(tmpIndexName, objects, Action.AddObject, true, batchSize, - options, cancellationToken).ConfigureAwait(false); + var batchResponse = await ChunkedBatchAsync(tmpIndexName, objects, Action.AddObject, true, batchSize, + options, cancellationToken).ConfigureAwait(false); - await WaitForTaskAsync(tmpIndexName, copyResponse.TaskID, requestOptions: options, ct: cancellationToken) - .ConfigureAwait(false); + await WaitForTaskAsync(tmpIndexName, copyResponse.TaskID, requestOptions: options, ct: cancellationToken) + .ConfigureAwait(false); - copyResponse = await OperationIndexAsync(indexName, - new OperationIndexParams(OperationType.Copy, tmpIndexName) - { Scope = [ScopeType.Settings, ScopeType.Rules, ScopeType.Synonyms] }, options, cancellationToken) - .ConfigureAwait(false); - await WaitForTaskAsync(tmpIndexName, copyResponse.TaskID, requestOptions: options, ct: cancellationToken) - .ConfigureAwait(false); + copyResponse = await OperationIndexAsync(indexName, + new OperationIndexParams(OperationType.Copy, tmpIndexName) + { Scope = [ScopeType.Settings, ScopeType.Rules, ScopeType.Synonyms] }, options, cancellationToken) + .ConfigureAwait(false); + await WaitForTaskAsync(tmpIndexName, copyResponse.TaskID, requestOptions: options, ct: cancellationToken) + .ConfigureAwait(false); - var moveResponse = await OperationIndexAsync(tmpIndexName, - new OperationIndexParams(OperationType.Move, indexName), options, cancellationToken) - .ConfigureAwait(false); + var moveResponse = await OperationIndexAsync(tmpIndexName, + new OperationIndexParams(OperationType.Move, indexName), options, cancellationToken) + .ConfigureAwait(false); - await WaitForTaskAsync(tmpIndexName, moveResponse.TaskID, requestOptions: options, ct: cancellationToken) - .ConfigureAwait(false); + await WaitForTaskAsync(tmpIndexName, moveResponse.TaskID, requestOptions: options, ct: cancellationToken) + .ConfigureAwait(false); - return new ReplaceAllObjectsResponse + return new ReplaceAllObjectsResponse + { + CopyOperationResponse = copyResponse, + MoveOperationResponse = moveResponse, + BatchResponses = batchResponse + }; + } + catch (Exception ex) { - CopyOperationResponse = copyResponse, - MoveOperationResponse = moveResponse, - BatchResponses = batchResponse - }; + await DeleteIndexAsync(tmpIndexName, cancellationToken: cancellationToken).ConfigureAwait(false); + + throw ex; + } } /// diff --git a/clients/algoliasearch-client-kotlin/client/src/commonMain/kotlin/com/algolia/client/extensions/SearchClient.kt b/clients/algoliasearch-client-kotlin/client/src/commonMain/kotlin/com/algolia/client/extensions/SearchClient.kt index 03fd1ee3ef6..809c6b98680 100644 --- a/clients/algoliasearch-client-kotlin/client/src/commonMain/kotlin/com/algolia/client/extensions/SearchClient.kt +++ b/clients/algoliasearch-client-kotlin/client/src/commonMain/kotlin/com/algolia/client/extensions/SearchClient.kt @@ -472,46 +472,52 @@ public suspend fun SearchClient.replaceAllObjects( ): ReplaceAllObjectsResponse { val tmpIndexName = "${indexName}_tmp_${Random.nextInt(from = 0, until = 100)}" - var copy = operationIndex( - indexName = indexName, - operationIndexParams = OperationIndexParams( - operation = OperationType.Copy, - destination = tmpIndexName, - scope = listOf(ScopeType.Settings, ScopeType.Rules, ScopeType.Synonyms), - ), - requestOptions = requestOptions, - ) + try { + var copy = operationIndex( + indexName = indexName, + operationIndexParams = OperationIndexParams( + operation = OperationType.Copy, + destination = tmpIndexName, + scope = listOf(ScopeType.Settings, ScopeType.Rules, ScopeType.Synonyms), + ), + requestOptions = requestOptions, + ) - val batchResponses = this.chunkedBatch( - indexName = tmpIndexName, - objects = objects, - action = Action.AddObject, - waitForTask = true, - batchSize = batchSize, - requestOptions = requestOptions, - ) + val batchResponses = this.chunkedBatch( + indexName = tmpIndexName, + objects = objects, + action = Action.AddObject, + waitForTask = true, + batchSize = batchSize, + requestOptions = requestOptions, + ) - waitForTask(indexName = tmpIndexName, taskID = copy.taskID) + waitForTask(indexName = tmpIndexName, taskID = copy.taskID) - copy = operationIndex( - indexName = indexName, - operationIndexParams = OperationIndexParams( - operation = OperationType.Copy, - destination = tmpIndexName, - scope = listOf(ScopeType.Settings, ScopeType.Rules, ScopeType.Synonyms), - ), - requestOptions = requestOptions, - ) - waitForTask(indexName = tmpIndexName, taskID = copy.taskID) + copy = operationIndex( + indexName = indexName, + operationIndexParams = OperationIndexParams( + operation = OperationType.Copy, + destination = tmpIndexName, + scope = listOf(ScopeType.Settings, ScopeType.Rules, ScopeType.Synonyms), + ), + requestOptions = requestOptions, + ) + waitForTask(indexName = tmpIndexName, taskID = copy.taskID) - val move = operationIndex( - indexName = tmpIndexName, - operationIndexParams = OperationIndexParams(operation = OperationType.Move, destination = indexName), - requestOptions = requestOptions, - ) - waitForTask(indexName = tmpIndexName, taskID = move.taskID) + val move = operationIndex( + indexName = tmpIndexName, + operationIndexParams = OperationIndexParams(operation = OperationType.Move, destination = indexName), + requestOptions = requestOptions, + ) + waitForTask(indexName = tmpIndexName, taskID = move.taskID) + + return ReplaceAllObjectsResponse(copy, batchResponses, move) + } catch (e: Exception) { + deleteIndex(tmpIndexName) - return ReplaceAllObjectsResponse(copy, batchResponses, move) + throw e + } } /** @@ -542,6 +548,13 @@ public fun securedApiKeyRemainingValidity(apiKey: String): Duration { return validUntil - Clock.System.now() } +/** + * Checks that an index exists. + * + * @param indexName The name of the index to check. + * @return true if the index exists, false otherwise. + * @throws AlgoliaApiException if an error occurs during the request. + */ public suspend fun SearchClient.indexExists(indexName: String): Boolean { try { getSettings(indexName) diff --git a/clients/algoliasearch-client-scala/src/main/scala/algoliasearch/extension/package.scala b/clients/algoliasearch-client-scala/src/main/scala/algoliasearch/extension/package.scala index e72cb1621e1..4ecdf17507f 100644 --- a/clients/algoliasearch-client-scala/src/main/scala/algoliasearch/extension/package.scala +++ b/clients/algoliasearch-client-scala/src/main/scala/algoliasearch/extension/package.scala @@ -366,50 +366,58 @@ package object extension { )(implicit ec: ExecutionContext): Future[ReplaceAllObjectsResponse] = { val tmpIndexName = s"${indexName}_tmp_${scala.util.Random.nextInt(100)}" - for { - copy <- client.operationIndex( - indexName = indexName, - operationIndexParams = OperationIndexParams( - operation = OperationType.Copy, - destination = tmpIndexName, - scope = Some(Seq(ScopeType.Settings, ScopeType.Rules, ScopeType.Synonyms)) - ), - requestOptions = requestOptions - ) + try { + for { + copy <- client.operationIndex( + indexName = indexName, + operationIndexParams = OperationIndexParams( + operation = OperationType.Copy, + destination = tmpIndexName, + scope = Some(Seq(ScopeType.Settings, ScopeType.Rules, ScopeType.Synonyms)) + ), + requestOptions = requestOptions + ) - batchResponses <- chunkedBatch( - indexName = tmpIndexName, - objects = objects, - action = Action.AddObject, - waitForTasks = true, - batchSize = batchSize, - requestOptions = requestOptions - ) + batchResponses <- chunkedBatch( + indexName = tmpIndexName, + objects = objects, + action = Action.AddObject, + waitForTasks = true, + batchSize = batchSize, + requestOptions = requestOptions + ) - _ <- client.waitTask(indexName = tmpIndexName, taskID = copy.taskID, requestOptions = requestOptions) + _ <- client.waitTask(indexName = tmpIndexName, taskID = copy.taskID, requestOptions = requestOptions) - copy <- client.operationIndex( - indexName = indexName, - operationIndexParams = OperationIndexParams( - operation = OperationType.Copy, - destination = tmpIndexName, - scope = Some(Seq(ScopeType.Settings, ScopeType.Rules, ScopeType.Synonyms)) - ), - requestOptions = requestOptions - ) - _ <- client.waitTask(indexName = tmpIndexName, taskID = copy.taskID, requestOptions = requestOptions) + copy <- client.operationIndex( + indexName = indexName, + operationIndexParams = OperationIndexParams( + operation = OperationType.Copy, + destination = tmpIndexName, + scope = Some(Seq(ScopeType.Settings, ScopeType.Rules, ScopeType.Synonyms)) + ), + requestOptions = requestOptions + ) + _ <- client.waitTask(indexName = tmpIndexName, taskID = copy.taskID, requestOptions = requestOptions) - move <- client.operationIndex( - indexName = tmpIndexName, - operationIndexParams = OperationIndexParams(operation = OperationType.Move, destination = indexName), - requestOptions = requestOptions + move <- client.operationIndex( + indexName = tmpIndexName, + operationIndexParams = OperationIndexParams(operation = OperationType.Move, destination = indexName), + requestOptions = requestOptions + ) + _ <- client.waitTask(indexName = tmpIndexName, taskID = move.taskID, requestOptions = requestOptions) + } yield ReplaceAllObjectsResponse( + copyOperationResponse = copy, + batchResponses = batchResponses, + moveOperationResponse = move ) - _ <- client.waitTask(indexName = tmpIndexName, taskID = move.taskID, requestOptions = requestOptions) - } yield ReplaceAllObjectsResponse( - copyOperationResponse = copy, - batchResponses = batchResponses, - moveOperationResponse = move - ) + } catch { + case e : Throwable => { + client.deleteIndex(tmpIndexName) + + throw e + } + } } /** Check if an index exists. diff --git a/clients/algoliasearch-client-swift/Sources/Search/Extra/SearchClientExtension.swift b/clients/algoliasearch-client-swift/Sources/Search/Extra/SearchClientExtension.swift index ee7abd7a067..d0dab619345 100644 --- a/clients/algoliasearch-client-swift/Sources/Search/Extra/SearchClientExtension.swift +++ b/clients/algoliasearch-client-swift/Sources/Search/Extra/SearchClientExtension.swift @@ -559,51 +559,57 @@ public extension SearchClient { ) async throws -> ReplaceAllObjectsResponse { let tmpIndexName = "\(indexName)_tmp_\(Int.random(in: 1_000_000 ..< 10_000_000))" - var copyOperationResponse = try await operationIndex( - indexName: indexName, - operationIndexParams: OperationIndexParams( - operation: .copy, - destination: tmpIndexName, - scope: [.settings, .rules, .synonyms] - ), - requestOptions: requestOptions - ) + do { + var copyOperationResponse = try await operationIndex( + indexName: indexName, + operationIndexParams: OperationIndexParams( + operation: .copy, + destination: tmpIndexName, + scope: [.settings, .rules, .synonyms] + ), + requestOptions: requestOptions + ) - let batchResponses = try await self.chunkedBatch( - indexName: tmpIndexName, - objects: objects, - waitForTasks: true, - batchSize: batchSize, - requestOptions: requestOptions - ) - try await self.waitForTask(indexName: tmpIndexName, taskID: copyOperationResponse.taskID) + let batchResponses = try await self.chunkedBatch( + indexName: tmpIndexName, + objects: objects, + waitForTasks: true, + batchSize: batchSize, + requestOptions: requestOptions + ) + try await self.waitForTask(indexName: tmpIndexName, taskID: copyOperationResponse.taskID) - copyOperationResponse = try await operationIndex( - indexName: indexName, - operationIndexParams: OperationIndexParams( - operation: .copy, - destination: tmpIndexName, - scope: [.settings, .rules, .synonyms] - ), - requestOptions: requestOptions - ) - try await self.waitForTask(indexName: tmpIndexName, taskID: copyOperationResponse.taskID) - - let moveOperationResponse = try await self.operationIndex( - indexName: tmpIndexName, - operationIndexParams: OperationIndexParams( - operation: .move, - destination: indexName - ), - requestOptions: requestOptions - ) - try await self.waitForTask(indexName: tmpIndexName, taskID: moveOperationResponse.taskID) + copyOperationResponse = try await operationIndex( + indexName: indexName, + operationIndexParams: OperationIndexParams( + operation: .copy, + destination: tmpIndexName, + scope: [.settings, .rules, .synonyms] + ), + requestOptions: requestOptions + ) + try await self.waitForTask(indexName: tmpIndexName, taskID: copyOperationResponse.taskID) - return ReplaceAllObjectsResponse( - copyOperationResponse: copyOperationResponse, - batchResponses: batchResponses, - moveOperationResponse: moveOperationResponse - ) + let moveOperationResponse = try await self.operationIndex( + indexName: tmpIndexName, + operationIndexParams: OperationIndexParams( + operation: .move, + destination: indexName + ), + requestOptions: requestOptions + ) + try await self.waitForTask(indexName: tmpIndexName, taskID: moveOperationResponse.taskID) + + return ReplaceAllObjectsResponse( + copyOperationResponse: copyOperationResponse, + batchResponses: batchResponses, + moveOperationResponse: moveOperationResponse + ) + } catch { + _ = try? await self.deleteIndex(indexName: tmpIndexName) + + throw error + } } /// Generate a secured API key diff --git a/scripts/cts/runCts.ts b/scripts/cts/runCts.ts index 9caa155ae45..101a9ebf44b 100644 --- a/scripts/cts/runCts.ts +++ b/scripts/cts/runCts.ts @@ -9,6 +9,7 @@ import { printBenchmarkReport } from './testServer/benchmark.js'; import { assertChunkWrapperValid } from './testServer/chunkWrapper.js'; import { startTestServer } from './testServer/index.js'; import { assertValidReplaceAllObjects } from './testServer/replaceAllObjects.js'; +import { assertValidReplaceAllObjectsFailed } from './testServer/replaceAllObjectsFailed.js'; import { assertValidTimeouts } from './testServer/timeout.js'; import { assertValidWaitForApiKey } from './testServer/waitFor.js'; @@ -152,6 +153,7 @@ export async function runCts( assertValidTimeouts(languages.length); assertChunkWrapperValid(languages.length - skip('dart') - skip('scala')); assertValidReplaceAllObjects(languages.length - skip('dart') - skip('scala')); + assertValidReplaceAllObjectsFailed(languages.length - skip('dart') - skip('scala')); assertValidWaitForApiKey(languages.length - skip('dart') - skip('scala')); } if (withBenchmarkServer) { diff --git a/scripts/cts/testServer/index.ts b/scripts/cts/testServer/index.ts index 5bc595f9f2e..62b38115745 100644 --- a/scripts/cts/testServer/index.ts +++ b/scripts/cts/testServer/index.ts @@ -6,11 +6,13 @@ import express from 'express'; import { createSpinner } from '../../spinners.js'; import type { CTSType } from '../runCts.js'; +import { expect } from 'chai'; import { apiKeyServer } from './apiKey.js'; import { benchmarkServer } from './benchmark.js'; import { chunkWrapperServer } from './chunkWrapper.js'; import { gzipServer } from './gzip.js'; import { replaceAllObjectsServer } from './replaceAllObjects.js'; +import { replaceAllObjectsServerFailed } from './replaceAllObjectsFailed.js'; import { timeoutServer } from './timeout.js'; import { timeoutServerBis } from './timeoutBis.js'; import { waitForApiKeyServer } from './waitFor.js'; @@ -23,6 +25,7 @@ export async function startTestServer(suites: Record): Promise gzipServer(), timeoutServerBis(), replaceAllObjectsServer(), + replaceAllObjectsServerFailed(), chunkWrapperServer(), waitForApiKeyServer(), apiKeyServer(), @@ -57,14 +60,14 @@ export async function setupServer(name: string, port: number, addRoutes: (app: E // 404 handler app.use((req, res) => { - console.error('endpoint not implemented for', req.method, req.url); - res.status(404).json({ message: 'not found' }); + console.error(`[PORT ${port}] endpoint not implemented for`, req.method, req.url); + expect.fail('endpoint not implemented'); }); // catch all error handler app.use((err, _req, res, _) => { console.error(err.message); - res.status(500).send({ message: err.message }); + expect.fail(err.message); }); const server = await new Promise((resolve) => { diff --git a/scripts/cts/testServer/replaceAllObjects.ts b/scripts/cts/testServer/replaceAllObjects.ts index 33278723e1e..7e900ccc881 100644 --- a/scripts/cts/testServer/replaceAllObjects.ts +++ b/scripts/cts/testServer/replaceAllObjects.ts @@ -19,9 +19,9 @@ const raoState: Record< > = {}; export function assertValidReplaceAllObjects(expectedCount: number): void { - const count = Object.values(raoState).filter((s) => s.successful).length; - if (count !== expectedCount) { - throw new Error(`Expected ${expectedCount} call to replaceAllObjects, got ${count} instead.`); + expect(Object.keys(raoState)).to.have.length(expectedCount); + for (const lang in raoState) { + expect(raoState[lang].successful).to.equal(true); } } diff --git a/scripts/cts/testServer/replaceAllObjectsFailed.ts b/scripts/cts/testServer/replaceAllObjectsFailed.ts new file mode 100644 index 00000000000..d2aa5094fab --- /dev/null +++ b/scripts/cts/testServer/replaceAllObjectsFailed.ts @@ -0,0 +1,59 @@ +import type { Server } from 'http'; + +import { expect } from 'chai'; +import type { Express } from 'express'; +import express from 'express'; + +import { setupServer } from './index.js'; + +const raoState: Record< + string, + { + tmpIndexName: string; + successful: boolean; + } +> = {}; + +export function assertValidReplaceAllObjectsFailed(expectedCount: number): void { + const count = Object.values(raoState).filter((s) => s.successful).length; + if (count !== expectedCount) { + console.log(JSON.stringify(raoState, null, 2)); + throw new Error(`Expected ${expectedCount} call to replaceAllObjectsFailed, got ${count} instead.`); + } +} + +function addRoutes(app: Express): void { + app.use(express.urlencoded({ extended: true })); + app.use( + express.json({ + type: ['application/json', 'text/plain'], // the js client sends the body as text/plain + }), + ); + + app.post('/1/indexes/:indexName/operation', (req, res) => { + const lang = req.params.indexName.match(/^cts_e2e_replace_all_objects_too_big_(.*)$/)?.[1] as string; + raoState[lang] = { + tmpIndexName: req.body.destination, + successful: false, + }; + + res.json({ taskID: 123, updatedAt: '2021-01-01T00:00:00.000Z' }); + }); + + app.post('/1/indexes/:indexName/batch', (_req, res) => { + res.status(400).json({ message: 'Record is too big', status: 400 }); + }); + + app.delete('/1/indexes/:indexName', (req, res) => { + const lang = req.params.indexName.match(/^cts_e2e_replace_all_objects_too_big_(.*)_tmp_\d+/)?.[1] as string; + expect(raoState[lang].tmpIndexName).to.equal(req.params.indexName); + raoState[lang].successful = true; + + res.json({ taskID: 456, deletedAt: '2021-01-01T00:00:00.000Z' }); + }); +} + +export function replaceAllObjectsServerFailed(): Promise { + // this server is used to simulate the responses for the replaceAllObjects method, with cleanup + return setupServer('replaceAllObjectsFailed', 6684, addRoutes); +} diff --git a/templates/Bug_report.yml b/templates/Bug_report.yml index ecf2dfdbd55..d01e1b39053 100644 --- a/templates/Bug_report.yml +++ b/templates/Bug_report.yml @@ -27,7 +27,7 @@ body: id: client attributes: label: Client - description: Which API are you targetting? + description: Which API are you targeting? options: - All - AB testing diff --git a/templates/go/search_helpers.mustache b/templates/go/search_helpers.mustache index c40b20fb85f..1bf131b234a 100644 --- a/templates/go/search_helpers.mustache +++ b/templates/go/search_helpers.mustache @@ -657,35 +657,47 @@ func (c *APIClient) ReplaceAllObjects(indexName string, objects []map[string]any return nil, err } - opts = append(opts, WithWaitForTasks(true)) + opts = append(opts, WithWaitForTasks(true)) batchResp, err := c.ChunkedBatch(tmpIndexName, objects, ACTION_ADD_OBJECT, opts...) if err != nil { + _, _ = c.DeleteIndex(c.NewApiDeleteIndexRequest(tmpIndexName)) + return nil, err } _, err = c.WaitForTask(tmpIndexName, copyResp.TaskID, toIterableOptions(opts)...) if err != nil { + _, _ = c.DeleteIndex(c.NewApiDeleteIndexRequest(tmpIndexName)) + return nil, err } copyResp, err = c.OperationIndex(c.NewApiOperationIndexRequest(indexName, NewOperationIndexParams(OPERATION_TYPE_COPY, tmpIndexName, WithOperationIndexParamsScope([]ScopeType{SCOPE_TYPE_SETTINGS, SCOPE_TYPE_RULES, SCOPE_TYPE_SYNONYMS}))), toRequestOptions(opts)...) if err != nil { + _, _ = c.DeleteIndex(c.NewApiDeleteIndexRequest(tmpIndexName)) + return nil, err } _, err = c.WaitForTask(tmpIndexName, copyResp.TaskID, toIterableOptions(opts)...) if err != nil { + _, _ = c.DeleteIndex(c.NewApiDeleteIndexRequest(tmpIndexName)) + return nil, err } moveResp, err := c.OperationIndex(c.NewApiOperationIndexRequest(tmpIndexName, NewOperationIndexParams(OPERATION_TYPE_MOVE, indexName)), toRequestOptions(opts)...) if err != nil { + _, _ = c.DeleteIndex(c.NewApiDeleteIndexRequest(tmpIndexName)) + return nil, err } _, err = c.WaitForTask(tmpIndexName, moveResp.TaskID, toIterableOptions(opts)...) if err != nil { + _, _ = c.DeleteIndex(c.NewApiDeleteIndexRequest(tmpIndexName)) + return nil, err } diff --git a/templates/java/api_helpers.mustache b/templates/java/api_helpers.mustache index ac7848b370b..52dadf54497 100644 --- a/templates/java/api_helpers.mustache +++ b/templates/java/api_helpers.mustache @@ -653,24 +653,6 @@ public List chunkedBatch( return chunkedBatch(indexName, objects, action, waitForTasks, 1000, requestOptions); } -/** - * Push a new set of objects and remove all previous ones. Settings, synonyms and query rules are - * untouched. Replace all records in an index without any downtime. See - * https://api-clients-automation.netlify.app/docs/add-new-api-client#5-helpers for implementation - * details. - * - * @param indexName The `indexName` to replace `objects` in. - * @param objects The array of `objects` to store in the given Algolia `indexName`. - * @param batchSize The size of the chunk of `objects`. The number of `batch` calls will be equal - * to `length(objects) / batchSize`. - * @throws AlgoliaRetryException When the retry has failed on all hosts - * @throws AlgoliaApiException When the API sends an http error code - * @throws AlgoliaRuntimeException When an error occurred during the serialization - */ -public ReplaceAllObjectsResponse replaceAllObjects(String indexName, Iterable objects, int batchSize) { - return replaceAllObjects(indexName, objects, batchSize, null); -} - /** * Helper: Saves the given array of objects in the given index. The `chunkedBatch` helper is used * under the hood, which creates a `batch` requests with at most 1000 objects in it. @@ -891,6 +873,40 @@ public List partialUpdateObjects( ); } +/** + * Push a new set of objects and remove all previous ones. Settings, synonyms and query rules are + * untouched. Replace all records in an index without any downtime. See + * https://api-clients-automation.netlify.app/docs/add-new-api-client#5-helpers for implementation + * details. + * + * @param indexName The `indexName` to replace `objects` in. + * @param objects The array of `objects` to store in the given Algolia `indexName`. + * @throws AlgoliaRetryException When the retry has failed on all hosts + * @throws AlgoliaApiException When the API sends an http error code + * @throws AlgoliaRuntimeException When an error occurred during the serialization + */ +public ReplaceAllObjectsResponse replaceAllObjects(String indexName, Iterable objects) { + return replaceAllObjects(indexName, objects, -1); +} + +/** + * Push a new set of objects and remove all previous ones. Settings, synonyms and query rules are + * untouched. Replace all records in an index without any downtime. See + * https://api-clients-automation.netlify.app/docs/add-new-api-client#5-helpers for implementation + * details. + * + * @param indexName The `indexName` to replace `objects` in. + * @param objects The array of `objects` to store in the given Algolia `indexName`. + * @param batchSize The size of the chunk of `objects`. The number of `batch` calls will be equal + * to `length(objects) / batchSize`. + * @throws AlgoliaRetryException When the retry has failed on all hosts + * @throws AlgoliaApiException When the API sends an http error code + * @throws AlgoliaRuntimeException When an error occurred during the serialization + */ +public ReplaceAllObjectsResponse replaceAllObjects(String indexName, Iterable objects, int batchSize) { + return replaceAllObjects(indexName, objects, batchSize, null); +} + /** * Push a new set of objects and remove all previous ones. Settings, synonyms and query rules are * untouched. Replace all records in an index without any downtime. See @@ -916,47 +932,57 @@ public ReplaceAllObjectsResponse replaceAllObjects( Random rnd = new Random(); String tmpIndexName = indexName + "_tmp_" + rnd.nextInt(100); - // Copy settings, synonyms and rules - UpdatedAtResponse copyOperationResponse = operationIndex( - indexName, - new OperationIndexParams() - .setOperation(OperationType.COPY) - .setDestination(tmpIndexName) - .addScope(ScopeType.SETTINGS) - .addScope(ScopeType.RULES) - .addScope(ScopeType.SYNONYMS), - requestOptions - ); + if (batchSize == -1) { + batchSize = 1000; + } - // Save new objects - List batchResponses = chunkedBatch(tmpIndexName, objects, Action.ADD_OBJECT, true, batchSize, requestOptions); + try { + // Copy settings, synonyms and rules + UpdatedAtResponse copyOperationResponse = operationIndex( + indexName, + new OperationIndexParams() + .setOperation(OperationType.COPY) + .setDestination(tmpIndexName) + .addScope(ScopeType.SETTINGS) + .addScope(ScopeType.RULES) + .addScope(ScopeType.SYNONYMS), + requestOptions + ); - waitForTask(tmpIndexName, copyOperationResponse.getTaskID(), requestOptions); + // Save new objects + List batchResponses = chunkedBatch(tmpIndexName, objects, Action.ADD_OBJECT, true, batchSize, requestOptions); - copyOperationResponse = operationIndex( - indexName, - new OperationIndexParams() - .setOperation(OperationType.COPY) - .setDestination(tmpIndexName) - .addScope(ScopeType.SETTINGS) - .addScope(ScopeType.RULES) - .addScope(ScopeType.SYNONYMS), - requestOptions - ); - waitForTask(tmpIndexName, copyOperationResponse.getTaskID(), requestOptions); + waitForTask(tmpIndexName, copyOperationResponse.getTaskID(), requestOptions); - // Move temporary index to source index - UpdatedAtResponse moveOperationResponse = operationIndex( - tmpIndexName, - new OperationIndexParams().setOperation(OperationType.MOVE).setDestination(indexName), - requestOptions - ); - waitForTask(tmpIndexName, moveOperationResponse.getTaskID(), requestOptions); + copyOperationResponse = operationIndex( + indexName, + new OperationIndexParams() + .setOperation(OperationType.COPY) + .setDestination(tmpIndexName) + .addScope(ScopeType.SETTINGS) + .addScope(ScopeType.RULES) + .addScope(ScopeType.SYNONYMS), + requestOptions + ); + waitForTask(tmpIndexName, copyOperationResponse.getTaskID(), requestOptions); + + // Move temporary index to source index + UpdatedAtResponse moveOperationResponse = operationIndex( + tmpIndexName, + new OperationIndexParams().setOperation(OperationType.MOVE).setDestination(indexName), + requestOptions + ); + waitForTask(tmpIndexName, moveOperationResponse.getTaskID(), requestOptions); + + return new ReplaceAllObjectsResponse() + .setCopyOperationResponse(copyOperationResponse) + .setBatchResponses(batchResponses) + .setMoveOperationResponse(moveOperationResponse); + } catch (Exception e) { + deleteIndex(tmpIndexName); - return new ReplaceAllObjectsResponse() - .setCopyOperationResponse(copyOperationResponse) - .setBatchResponses(batchResponses) - .setMoveOperationResponse(moveOperationResponse); + throw e; + } } /** diff --git a/templates/javascript/clients/client/api/helpers.mustache b/templates/javascript/clients/client/api/helpers.mustache index 436e903a3f5..50ef00744d6 100644 --- a/templates/javascript/clients/client/api/helpers.mustache +++ b/templates/javascript/clients/client/api/helpers.mustache @@ -407,57 +407,63 @@ async replaceAllObjects( const randomSuffix = Math.floor(Math.random() * 1000000) + 100000; const tmpIndexName = `${indexName}_tmp_${randomSuffix}`; - let copyOperationResponse = await this.operationIndex( - { - indexName, - operationIndexParams: { - operation: 'copy', - destination: tmpIndexName, - scope: ['settings', 'rules', 'synonyms'], + try { + let copyOperationResponse = await this.operationIndex( + { + indexName, + operationIndexParams: { + operation: 'copy', + destination: tmpIndexName, + scope: ['settings', 'rules', 'synonyms'], + }, }, - }, - requestOptions - ); + requestOptions + ); - const batchResponses = await this.chunkedBatch( - { indexName: tmpIndexName, objects, waitForTasks: true, batchSize }, - requestOptions - ); + const batchResponses = await this.chunkedBatch( + { indexName: tmpIndexName, objects, waitForTasks: true, batchSize }, + requestOptions + ); - await this.waitForTask({ - indexName: tmpIndexName, - taskID: copyOperationResponse.taskID, - }); + await this.waitForTask({ + indexName: tmpIndexName, + taskID: copyOperationResponse.taskID, + }); - copyOperationResponse = await this.operationIndex( - { - indexName, - operationIndexParams: { - operation: 'copy', - destination: tmpIndexName, - scope: ['settings', 'rules', 'synonyms'], + copyOperationResponse = await this.operationIndex( + { + indexName, + operationIndexParams: { + operation: 'copy', + destination: tmpIndexName, + scope: ['settings', 'rules', 'synonyms'], + }, }, - }, - requestOptions - ); - await this.waitForTask({ - indexName: tmpIndexName, - taskID: copyOperationResponse.taskID, - }); + requestOptions + ); + await this.waitForTask({ + indexName: tmpIndexName, + taskID: copyOperationResponse.taskID, + }); - const moveOperationResponse = await this.operationIndex( - { + const moveOperationResponse = await this.operationIndex( + { + indexName: tmpIndexName, + operationIndexParams: { operation: 'move', destination: indexName }, + }, + requestOptions + ); + await this.waitForTask({ indexName: tmpIndexName, - operationIndexParams: { operation: 'move', destination: indexName }, - }, - requestOptions - ); - await this.waitForTask({ - indexName: tmpIndexName, - taskID: moveOperationResponse.taskID, - }); + taskID: moveOperationResponse.taskID, + }); - return { copyOperationResponse, batchResponses, moveOperationResponse }; + return { copyOperationResponse, batchResponses, moveOperationResponse }; + } catch (error) { + await this.deleteIndex({ indexName: tmpIndexName }); + + throw error; + } }, async indexExists({ indexName }: GetSettingsProps): Promise { diff --git a/templates/php/api.mustache b/templates/php/api.mustache index 29c703dce2f..60a5ca43ed2 100644 --- a/templates/php/api.mustache +++ b/templates/php/api.mustache @@ -438,48 +438,54 @@ use Algolia\AlgoliaSearch\Exceptions\NotFoundException; { $tmpIndexName = $indexName.'_tmp_'.rand(10000000, 99999999); - $copyOperationResponse = $this->operationIndex( + try { + $copyOperationResponse = $this->operationIndex( $indexName, [ - 'operation' => 'copy', - 'destination' => $tmpIndexName, - 'scope' => ['settings', 'rules', 'synonyms'], + 'operation' => 'copy', + 'destination' => $tmpIndexName, + 'scope' => ['settings', 'rules', 'synonyms'], ], $requestOptions - ); + ); - $batchResponses = $this->chunkedBatch($tmpIndexName, $objects, 'addObject', true, $batchSize, $requestOptions); + $batchResponses = $this->chunkedBatch($tmpIndexName, $objects, 'addObject', true, $batchSize, $requestOptions); - $this->waitForTask($tmpIndexName, $copyOperationResponse['taskID']); + $this->waitForTask($tmpIndexName, $copyOperationResponse['taskID']); - $copyOperationResponse = $this->operationIndex( + $copyOperationResponse = $this->operationIndex( $indexName, [ - 'operation' => 'copy', - 'destination' => $tmpIndexName, - 'scope' => ['settings', 'rules', 'synonyms'], + 'operation' => 'copy', + 'destination' => $tmpIndexName, + 'scope' => ['settings', 'rules', 'synonyms'], ], $requestOptions - ); + ); - $this->waitForTask($tmpIndexName, $copyOperationResponse['taskID']); + $this->waitForTask($tmpIndexName, $copyOperationResponse['taskID']); - $moveOperationResponse = $this->operationIndex( + $moveOperationResponse = $this->operationIndex( $tmpIndexName, [ - 'operation' => 'move', - 'destination' => $indexName, + 'operation' => 'move', + 'destination' => $indexName, ], $requestOptions - ); + ); + + $this->waitForTask($tmpIndexName, $moveOperationResponse['taskID']); - $this->waitForTask($tmpIndexName, $moveOperationResponse['taskID']); + return [ + 'copyOperationResponse' => $copyOperationResponse, + 'batchResponses' => $batchResponses, + 'moveOperationResponse' => $moveOperationResponse, + ]; + } catch (\Throwable $e) { + $this->deleteIndex($tmpIndexName); - return [ - "copyOperationResponse" => $copyOperationResponse, - "batchResponses" => $batchResponses, - "moveOperationResponse" => $moveOperationResponse - ]; + throw $e; + } } /** @@ -632,7 +638,7 @@ use Algolia\AlgoliaSearch\Exceptions\NotFoundException; $this->getSettings($indexName); } catch (NotFoundException $e) { return false; - } catch (Exception $e) { + } catch (\Throwable $e) { throw $e; } diff --git a/templates/python/search_helpers.mustache b/templates/python/search_helpers.mustache index 08b9df3e6d8..f124c0f3dce 100644 --- a/templates/python/search_helpers.mustache +++ b/templates/python/search_helpers.mustache @@ -365,57 +365,62 @@ """ tmp_index_name = self.create_temporary_name(index_name) - {{^isSyncClient}}async {{/isSyncClient}}def _copy() -> UpdatedAtResponse: - return {{^isSyncClient}}await {{/isSyncClient}}self.operation_index( - index_name=index_name, - operation_index_params=OperationIndexParams( - operation=OperationType.COPY, - destination=tmp_index_name, - scope=[ - ScopeType("settings"), - ScopeType("rules"), - ScopeType("synonyms"), - ], - ), - request_options=request_options, - ) - - copy_operation_response = {{^isSyncClient}}await {{/isSyncClient}}_copy() - - batch_responses = {{^isSyncClient}}await {{/isSyncClient}}self.chunked_batch( - index_name=tmp_index_name, - objects=objects, - wait_for_tasks=True, - batch_size=batch_size, - request_options=request_options, - ) - - {{^isSyncClient}}await {{/isSyncClient}}self.wait_for_task( - index_name=tmp_index_name, task_id=copy_operation_response.task_id - ) - - copy_operation_response = {{^isSyncClient}}await {{/isSyncClient}}_copy() - {{^isSyncClient}}await {{/isSyncClient}}self.wait_for_task( - index_name=tmp_index_name, task_id=copy_operation_response.task_id - ) - - move_operation_response = {{^isSyncClient}}await {{/isSyncClient}}self.operation_index( - index_name=tmp_index_name, - operation_index_params=OperationIndexParams( - operation=OperationType.MOVE, - destination=index_name, - ), - request_options=request_options, - ) - {{^isSyncClient}}await {{/isSyncClient}}self.wait_for_task( - index_name=tmp_index_name, task_id=move_operation_response.task_id - ) + try: + {{^isSyncClient}}async {{/isSyncClient}}def _copy() -> UpdatedAtResponse: + return {{^isSyncClient}}await {{/isSyncClient}}self.operation_index( + index_name=index_name, + operation_index_params=OperationIndexParams( + operation=OperationType.COPY, + destination=tmp_index_name, + scope=[ + ScopeType("settings"), + ScopeType("rules"), + ScopeType("synonyms"), + ], + ), + request_options=request_options, + ) + + copy_operation_response = {{^isSyncClient}}await {{/isSyncClient}}_copy() + + batch_responses = {{^isSyncClient}}await {{/isSyncClient}}self.chunked_batch( + index_name=tmp_index_name, + objects=objects, + wait_for_tasks=True, + batch_size=batch_size, + request_options=request_options, + ) + + {{^isSyncClient}}await {{/isSyncClient}}self.wait_for_task( + index_name=tmp_index_name, task_id=copy_operation_response.task_id + ) + + copy_operation_response = {{^isSyncClient}}await {{/isSyncClient}}_copy() + {{^isSyncClient}}await {{/isSyncClient}}self.wait_for_task( + index_name=tmp_index_name, task_id=copy_operation_response.task_id + ) + + move_operation_response = {{^isSyncClient}}await {{/isSyncClient}}self.operation_index( + index_name=tmp_index_name, + operation_index_params=OperationIndexParams( + operation=OperationType.MOVE, + destination=index_name, + ), + request_options=request_options, + ) + {{^isSyncClient}}await {{/isSyncClient}}self.wait_for_task( + index_name=tmp_index_name, task_id=move_operation_response.task_id + ) + + return ReplaceAllObjectsResponse( + copy_operation_response=copy_operation_response, + batch_responses=batch_responses, + move_operation_response=move_operation_response, + ) + except Exception as e: + {{^isSyncClient}}await {{/isSyncClient}}self.delete_index(tmp_index_name) - return ReplaceAllObjectsResponse( - copy_operation_response=copy_operation_response, - batch_responses=batch_responses, - move_operation_response=move_operation_response, - ) + raise e {{^isSyncClient}}async {{/isSyncClient}}def index_exists(self, index_name: str) -> bool: """ diff --git a/templates/ruby/search_helpers.mustache b/templates/ruby/search_helpers.mustache index c27edc22452..e12eb3c95c8 100644 --- a/templates/ruby/search_helpers.mustache +++ b/templates/ruby/search_helpers.mustache @@ -347,63 +347,69 @@ end def replace_all_objects(index_name, objects, batch_size = 1000, request_options = {}) tmp_index_name = index_name + '_tmp_' + rand(10_000_000).to_s - copy_operation_response = operation_index( - index_name, - Search::OperationIndexParams.new( - operation: Search::OperationType::COPY, - destination: tmp_index_name, - scope: [ - Search::ScopeType::SETTINGS, - Search::ScopeType::RULES, - Search::ScopeType::SYNONYMS - ] - ), - request_options - ) - - batch_responses = chunked_batch( - tmp_index_name, - objects, - Search::Action::ADD_OBJECT, - true, - batch_size, - request_options - ) - - wait_for_task(tmp_index_name, copy_operation_response.task_id) - - copy_operation_response = operation_index( - index_name, - Search::OperationIndexParams.new( - operation: Search::OperationType::COPY, - destination: tmp_index_name, - scope: [ - Search::ScopeType::SETTINGS, - Search::ScopeType::RULES, - Search::ScopeType::SYNONYMS - ] - ), - request_options - ) - - wait_for_task(tmp_index_name, copy_operation_response.task_id) - - move_operation_response = operation_index( - tmp_index_name, - Search::OperationIndexParams.new( - operation: Search::OperationType::MOVE, - destination: index_name - ), - request_options - ) - - wait_for_task(tmp_index_name, move_operation_response.task_id) - - Search::ReplaceAllObjectsResponse.new( - copy_operation_response: copy_operation_response, - batch_responses: batch_responses, - move_operation_response: move_operation_response - ) + begin + copy_operation_response = operation_index( + index_name, + Search::OperationIndexParams.new( + operation: Search::OperationType::COPY, + destination: tmp_index_name, + scope: [ + Search::ScopeType::SETTINGS, + Search::ScopeType::RULES, + Search::ScopeType::SYNONYMS + ] + ), + request_options + ) + + batch_responses = chunked_batch( + tmp_index_name, + objects, + Search::Action::ADD_OBJECT, + true, + batch_size, + request_options + ) + + wait_for_task(tmp_index_name, copy_operation_response.task_id) + + copy_operation_response = operation_index( + index_name, + Search::OperationIndexParams.new( + operation: Search::OperationType::COPY, + destination: tmp_index_name, + scope: [ + Search::ScopeType::SETTINGS, + Search::ScopeType::RULES, + Search::ScopeType::SYNONYMS + ] + ), + request_options + ) + + wait_for_task(tmp_index_name, copy_operation_response.task_id) + + move_operation_response = operation_index( + tmp_index_name, + Search::OperationIndexParams.new( + operation: Search::OperationType::MOVE, + destination: index_name + ), + request_options + ) + + wait_for_task(tmp_index_name, move_operation_response.task_id) + + Search::ReplaceAllObjectsResponse.new( + copy_operation_response: copy_operation_response, + batch_responses: batch_responses, + move_operation_response: move_operation_response + ) + rescue Exception => e + delete_index(tmp_index_name) + + raise e + end end def index_exists?(index_name) diff --git a/tests/CTS/client/ingestion/api.json b/tests/CTS/client/ingestion/api.json index 2f9d3ebef8c..474b906fcac 100644 --- a/tests/CTS/client/ingestion/api.json +++ b/tests/CTS/client/ingestion/api.json @@ -28,7 +28,7 @@ "go": "API error [429] Too Many Requests", "java": "Status Code: 429 - Too Many Requests", "javascript": "Too Many Requests", - "kotlin": "Client request(GET http://%localhost%:6676/1/html-error) invalid: 429 Too Many Requests. Text: \\\"429 Too Many Requests\\\"", + "kotlin": "Client request\\\\(GET http://%localhost%:6676/1/html-error\\\\) invalid: 429 Too Many Requests. Text: \\\"429 Too Many Requests\\\"", "php": "429: Too Many Requests", "python": "Too Many Requests", "ruby": "429: Too Many Requests", diff --git a/tests/CTS/client/search/api.json b/tests/CTS/client/search/api.json index 315daca9386..a2def1c53be 100644 --- a/tests/CTS/client/search/api.json +++ b/tests/CTS/client/search/api.json @@ -137,7 +137,7 @@ "go": "failed to do request: all hosts have been contacted unsuccessfully, it can either be a server or a network error or wrong appID/key credentials were used. You can use 'ExposeIntermediateNetworkErrors: true' in the config to investigate.", "java": "Error(s) while processing the retry strategy\\nCaused by: java.net.SocketTimeoutException: timeout", "javascript": "Unreachable hosts - your application id may be incorrect. If the error persists, please reach out to the Algolia Support team: https://alg.li/support.", - "kotlin": "Error(s) while processing the retry strategy", + "kotlin": "Error\\\\(s\\\\) while processing the retry strategy", "php": "Impossible to connect, please check your Algolia Application Id.", "python": "Unreachable hosts", "ruby": "Unreachable hosts. Last error for %localhost%: Net::ReadTimeout with #", diff --git a/tests/CTS/client/search/indexExists.json b/tests/CTS/client/search/indexExists.json index f2582333eeb..84117b466bb 100644 --- a/tests/CTS/client/search/indexExists.json +++ b/tests/CTS/client/search/indexExists.json @@ -85,7 +85,7 @@ "go": "API error [403] Invalid API key", "java": "Status Code: 403 - {\\\"message\\\":\\\"Invalid API key\\\"}", "javascript": "Invalid API key", - "kotlin": "Client request(GET http://%localhost%:6681/1/indexes/indexExistsERROR/settings) invalid: 403 Forbidden. Text: \\\"{\\\"message\\\":\\\"Invalid API key\\\"}\\\"", + "kotlin": "Client request\\\\(GET http://%localhost%:6681/1/indexes/indexExistsERROR/settings\\\\) invalid: 403 Forbidden. Text: \\\"\\\\{\\\"message\\\":\\\"Invalid API key\\\"\\\\}\\\"", "php": "Invalid API key", "python": "Invalid API key", "ruby": "403: Invalid API key", diff --git a/tests/CTS/client/search/replaceAllObjects.json b/tests/CTS/client/search/replaceAllObjects.json index 372d75953ff..c2a525294e5 100644 --- a/tests/CTS/client/search/replaceAllObjects.json +++ b/tests/CTS/client/search/replaceAllObjects.json @@ -111,5 +111,53 @@ } } ] + }, + { + "testName": "replaceAllObjects should cleanup on failure", + "autoCreateClient": false, + "steps": [ + { + "type": "createClient", + "parameters": { + "appId": "test-app-id", + "apiKey": "test-api-key", + "customHosts": [ + { + "port": 6684 + } + ] + } + }, + { + "type": "method", + "method": "replaceAllObjects", + "parameters": { + "indexName": "cts_e2e_replace_all_objects_too_big_${{language}}", + "objects": [ + { + "objectID": "fine", + "body": "small obj" + }, + { + "objectID": "toolarge", + "body": "something bigger than 10KB" + } + ] + }, + "expected": { + "error": { + "csharp": "{\\\"message\\\":\\\"Record is too big\\\",\\\"status\\\":400}", + "go": "API error [400] Record is too big", + "java": "Status Code: 400 - {\\\"message\\\":\\\"Record is too big\\\",\\\"status\\\":400}", + "javascript": "Record is too big", + "kotlin": "Client request\\\\(POST http://%localhost%:6684/1/indexes/cts_e2e_replace_all_objects_too_big_${{language}}_tmp_\\\\d+/batch\\\\) invalid: 400 Bad Request. Text: \\\"\\\\{\\\"message\\\":\\\"Record is too big\\\",\\\"status\\\":400\\\\}\\\"", + "php": "Record is too big", + "python": "Record is too big", + "ruby": "400: Record is too big", + "swift": "HTTP error: Status code: 400 Message: Record is too big" + } + } + } + ] } ] diff --git a/tests/CTS/client/search/saveObjects.json b/tests/CTS/client/search/saveObjects.json index 060d85bd9be..49445d7dbd3 100644 --- a/tests/CTS/client/search/saveObjects.json +++ b/tests/CTS/client/search/saveObjects.json @@ -84,7 +84,7 @@ "go": "API error [403] Invalid Application-ID or API key", "java": "Status Code: 403 - {\\\"message\\\":\\\"Invalid Application-ID or API key\\\",\\\"status\\\":403}", "javascript": "Invalid Application-ID or API key", - "kotlin": "Client request(POST http://%localhost%:6680/1/indexes/cts_e2e_saveObjects_kotlin/batch) invalid: 403 Forbidden. Text: \\\"{\\\"message\\\":\\\"Invalid Application-ID or API key\\\",\\\"status\\\":403}\\\"", + "kotlin": "Client request\\\\(POST http://%localhost%:6680/1/indexes/cts_e2e_saveObjects_kotlin/batch\\\\) invalid: 403 Forbidden. Text: \\\"\\\\{\\\"message\\\":\\\"Invalid Application-ID or API key\\\",\\\"status\\\":403\\\\}\\\"", "php": "Invalid Application-ID or API key", "python": "Invalid Application-ID or API key", "ruby": "403: Invalid Application-ID or API key", diff --git a/tests/output/javascript/yarn.lock b/tests/output/javascript/yarn.lock index b9207a4425c..dc805df3f61 100644 --- a/tests/output/javascript/yarn.lock +++ b/tests/output/javascript/yarn.lock @@ -361,12 +361,12 @@ __metadata: languageName: node linkType: hard -"@types/node@npm:22.10.1": - version: 22.10.1 - resolution: "@types/node@npm:22.10.1" +"@types/node@npm:22.10.2": + version: 22.10.2 + resolution: "@types/node@npm:22.10.2" dependencies: undici-types: "npm:~6.20.0" - checksum: 10/c802a526da2f3fa3ccefd00a71244e7cb825329951719e79e8fec62b1dbc2855388c830489770611584665ce10be23c05ed585982038b24924e1ba2c2cce03fd + checksum: 10/451adfefed4add58b069407173e616220fd4aaa3307cdde1bb701aa053b65b54ced8483db2f870dcedec7a58cb3b06101fbc19d85852716672ec1fd3660947fa languageName: node linkType: hard @@ -981,7 +981,7 @@ __metadata: dependencies: "@algolia/client-composition": "link:../../../clients/algoliasearch-client-javascript/packages/client-composition" "@algolia/requester-testing": "link:../../../clients/algoliasearch-client-javascript/packages/requester-testing" - "@types/node": "npm:22.10.1" + "@types/node": "npm:22.10.2" algoliasearch: "link:../../../clients/algoliasearch-client-javascript/packages/algoliasearch" dotenv: "npm:16.4.7" typescript: "npm:5.7.2" diff --git a/tests/output/kotlin/src/commonTest/kotlin/com/algolia/utils/Assert.kt b/tests/output/kotlin/src/commonTest/kotlin/com/algolia/utils/Assert.kt index 3bed962d1da..81b7960ca7b 100644 --- a/tests/output/kotlin/src/commonTest/kotlin/com/algolia/utils/Assert.kt +++ b/tests/output/kotlin/src/commonTest/kotlin/com/algolia/utils/Assert.kt @@ -67,6 +67,6 @@ fun assertEmptyBody(body: Any) { fun assertError(throwable: Throwable, message: String) { when (throwable) { is SkipException -> println("Test skipped because of non-nullable") - else -> assertEquals(message, throwable.message) + else -> assertTrue(throwable.message!!.matches(message.toRegex()), "Expected error: $message, but got: ${throwable.message}") } }