Skip to content

Commit bf67b90

Browse files
Remove existing transmit from storage handling (Azure#35140)
* Remove existing TranmitFromStorage handling * un-necessary using
1 parent 430d93d commit bf67b90

File tree

11 files changed

+74
-615
lines changed

11 files changed

+74
-615
lines changed

sdk/monitor/Azure.Monitor.OpenTelemetry.Exporter/src/AzureMonitorLogExporter.cs

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@ internal class AzureMonitorLogExporter : BaseExporter<LogRecord>
1515
{
1616
private readonly ITransmitter _transmitter;
1717
private readonly string _instrumentationKey;
18-
private readonly AzureMonitorPersistentStorage? _persistentStorage;
1918
private AzureMonitorResource? _resource;
2019
private bool _disposed;
2120

@@ -27,20 +26,13 @@ internal AzureMonitorLogExporter(ITransmitter transmitter)
2726
{
2827
_transmitter = transmitter;
2928
_instrumentationKey = transmitter.InstrumentationKey;
30-
31-
if (transmitter is AzureMonitorTransmitter azureMonitorTransmitter && azureMonitorTransmitter._fileBlobProvider != null)
32-
{
33-
_persistentStorage = new AzureMonitorPersistentStorage(transmitter);
34-
}
3529
}
3630

3731
internal AzureMonitorResource? LogResource => _resource ??= ParentProvider?.GetResource().UpdateRoleNameAndInstance();
3832

3933
/// <inheritdoc/>
4034
public override ExportResult Export(in Batch<LogRecord> batch)
4135
{
42-
_persistentStorage?.StartExporterTimer();
43-
4436
// Prevent Azure Monitor's HTTP operations from being instrumented.
4537
using var scope = SuppressInstrumentationScope.Begin();
4638

@@ -53,8 +45,6 @@ public override ExportResult Export(in Batch<LogRecord> batch)
5345
{
5446
exportResult = _transmitter.TrackAsync(telemetryItems, false, CancellationToken.None).EnsureCompleted();
5547
}
56-
57-
_persistentStorage?.StopExporterTimerAndTransmitFromStorage();
5848
}
5949
catch (Exception ex)
6050
{

sdk/monitor/Azure.Monitor.OpenTelemetry.Exporter/src/AzureMonitorMetricExporter.cs

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@ internal class AzureMonitorMetricExporter : BaseExporter<Metric>
1515
{
1616
private readonly ITransmitter _transmitter;
1717
private readonly string _instrumentationKey;
18-
private readonly AzureMonitorPersistentStorage? _persistentStorage;
1918
private AzureMonitorResource? _resource;
2019
private bool _disposed;
2120

@@ -27,20 +26,13 @@ internal AzureMonitorMetricExporter(ITransmitter transmitter)
2726
{
2827
_transmitter = transmitter;
2928
_instrumentationKey = transmitter.InstrumentationKey;
30-
31-
if (transmitter is AzureMonitorTransmitter azureMonitorTransmitter && azureMonitorTransmitter._fileBlobProvider != null)
32-
{
33-
_persistentStorage = new AzureMonitorPersistentStorage(transmitter);
34-
}
3529
}
3630

3731
internal AzureMonitorResource? MetricResource => _resource ??= ParentProvider?.GetResource().UpdateRoleNameAndInstance();
3832

3933
/// <inheritdoc/>
4034
public override ExportResult Export(in Batch<Metric> batch)
4135
{
42-
_persistentStorage?.StartExporterTimer();
43-
4436
// Prevent Azure Monitor's HTTP operations from being instrumented.
4537
using var scope = SuppressInstrumentationScope.Begin();
4638

@@ -62,8 +54,6 @@ public override ExportResult Export(in Batch<Metric> batch)
6254
{
6355
exportResult = ExportResult.Success;
6456
}
65-
66-
_persistentStorage?.StopExporterTimerAndTransmitFromStorage();
6757
}
6858
catch (Exception ex)
6959
{

sdk/monitor/Azure.Monitor.OpenTelemetry.Exporter/src/AzureMonitorTraceExporter.cs

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@ internal class AzureMonitorTraceExporter : BaseExporter<Activity>
1515
{
1616
private readonly ITransmitter _transmitter;
1717
private readonly string _instrumentationKey;
18-
private readonly AzureMonitorPersistentStorage? _persistentStorage;
1918
private AzureMonitorResource? _resource;
2019
private bool _disposed;
2120

@@ -27,20 +26,13 @@ internal AzureMonitorTraceExporter(ITransmitter transmitter)
2726
{
2827
_transmitter = transmitter;
2928
_instrumentationKey = transmitter.InstrumentationKey;
30-
31-
if (transmitter is AzureMonitorTransmitter azureMonitorTransmitter && azureMonitorTransmitter._fileBlobProvider != null)
32-
{
33-
_persistentStorage = new AzureMonitorPersistentStorage(transmitter);
34-
}
3529
}
3630

3731
internal AzureMonitorResource? TraceResource => _resource ??= ParentProvider?.GetResource().UpdateRoleNameAndInstance();
3832

3933
/// <inheritdoc/>
4034
public override ExportResult Export(in Batch<Activity> batch)
4135
{
42-
_persistentStorage?.StartExporterTimer();
43-
4436
// Prevent Azure Monitor's HTTP operations from being instrumented.
4537
using var scope = SuppressInstrumentationScope.Begin();
4638

@@ -53,8 +45,6 @@ public override ExportResult Export(in Batch<Activity> batch)
5345
{
5446
exportResult = _transmitter.TrackAsync(telemetryItems, false, CancellationToken.None).EnsureCompleted();
5547
}
56-
57-
_persistentStorage?.StopExporterTimerAndTransmitFromStorage();
5848
}
5949
catch (Exception ex)
6050
{

sdk/monitor/Azure.Monitor.OpenTelemetry.Exporter/src/Internals/AzureMonitorTransmitter.cs

Lines changed: 2 additions & 214 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,6 @@
55
using System.Collections.Generic;
66
using System.Threading;
77
using System.Threading.Tasks;
8-
9-
using Azure.Core;
108
using Azure.Core.Pipeline;
119
using Azure.Monitor.OpenTelemetry.Exporter.Internals.ConnectionString;
1210
using Azure.Monitor.OpenTelemetry.Exporter.Internals.PersistentStorage;
@@ -163,11 +161,11 @@ public async ValueTask<ExportResult> TrackAsync(IEnumerable<TelemetryItem> telem
163161
await _applicationInsightsRestClient.InternalTrackAsync(telemetryItems, cancellationToken).ConfigureAwait(false) :
164162
_applicationInsightsRestClient.InternalTrackAsync(telemetryItems, cancellationToken).Result;
165163

166-
result = IsSuccess(httpMessage);
164+
result = HttpPipelineHelper.IsSuccess(httpMessage);
167165

168166
if (result == ExportResult.Failure && _fileBlobProvider != null)
169167
{
170-
result = HandleFailures(httpMessage);
168+
result = HttpPipelineHelper.HandleFailures(httpMessage, _fileBlobProvider);
171169
}
172170
else
173171
{
@@ -182,216 +180,6 @@ await _applicationInsightsRestClient.InternalTrackAsync(telemetryItems, cancella
182180
return result;
183181
}
184182

185-
public async ValueTask TransmitFromStorage(long maxFilesToTransmit, bool async, CancellationToken cancellationToken)
186-
{
187-
if (cancellationToken.IsCancellationRequested)
188-
{
189-
return;
190-
}
191-
192-
if (_fileBlobProvider == null)
193-
{
194-
return;
195-
}
196-
197-
long files = maxFilesToTransmit;
198-
while (files > 0)
199-
{
200-
try
201-
{
202-
// TODO: Do we need more lease time?
203-
if (_fileBlobProvider.TryGetBlob(out var blob) && blob.TryLease(1000))
204-
{
205-
blob.TryRead(out var data);
206-
using var httpMessage = async ?
207-
await _applicationInsightsRestClient.InternalTrackAsync(data, cancellationToken).ConfigureAwait(false) :
208-
_applicationInsightsRestClient.InternalTrackAsync(data, cancellationToken).Result;
209-
210-
var result = IsSuccess(httpMessage);
211-
212-
if (result == ExportResult.Success)
213-
{
214-
AzureMonitorExporterEventSource.Log.WriteInformational("TransmitFromStorageSuccess", "Successfully transmitted a blob from storage.");
215-
216-
// In case if the delete fails, there is a possibility
217-
// that the current batch will be transmitted more than once resulting in duplicates.
218-
blob.TryDelete();
219-
}
220-
else
221-
{
222-
HandleFailures(httpMessage, blob);
223-
}
224-
}
225-
else
226-
{
227-
// no files to process
228-
return;
229-
}
230-
}
231-
catch (Exception ex)
232-
{
233-
AzureMonitorExporterEventSource.Log.WriteError("FailedToTransmitFromStorage", ex);
234-
}
235-
236-
files--;
237-
}
238-
}
239-
240-
private static ExportResult IsSuccess(HttpMessage httpMessage)
241-
{
242-
if (httpMessage.HasResponse && httpMessage.Response.Status == ResponseStatusCodes.Success)
243-
{
244-
return ExportResult.Success;
245-
}
246-
247-
return ExportResult.Failure;
248-
}
249-
250-
private ExportResult HandleFailures(HttpMessage httpMessage)
251-
{
252-
if (_fileBlobProvider == null)
253-
{
254-
return ExportResult.Failure;
255-
}
256-
257-
ExportResult result = ExportResult.Failure;
258-
int statusCode = 0;
259-
byte[]? content;
260-
int retryInterval;
261-
262-
if (!httpMessage.HasResponse)
263-
{
264-
// HttpRequestException
265-
content = HttpPipelineHelper.GetRequestContent(httpMessage.Request.Content);
266-
if (content != null)
267-
{
268-
result = _fileBlobProvider.SaveTelemetry(content, HttpPipelineHelper.MinimumRetryInterval);
269-
}
270-
}
271-
else
272-
{
273-
statusCode = httpMessage.Response.Status;
274-
switch (statusCode)
275-
{
276-
case ResponseStatusCodes.PartialSuccess:
277-
// Parse retry-after header
278-
// Send Failed Messages To Storage
279-
TrackResponse trackResponse = HttpPipelineHelper.GetTrackResponse(httpMessage);
280-
content = HttpPipelineHelper.GetPartialContentForRetry(trackResponse, httpMessage.Request.Content);
281-
if (content != null)
282-
{
283-
retryInterval = HttpPipelineHelper.GetRetryInterval(httpMessage.Response);
284-
result = _fileBlobProvider.SaveTelemetry(content, retryInterval);
285-
}
286-
break;
287-
case ResponseStatusCodes.RequestTimeout:
288-
case ResponseStatusCodes.ResponseCodeTooManyRequests:
289-
case ResponseStatusCodes.ResponseCodeTooManyRequestsAndRefreshCache:
290-
// Parse retry-after header
291-
// Send Messages To Storage
292-
content = HttpPipelineHelper.GetRequestContent(httpMessage.Request.Content);
293-
if (content != null)
294-
{
295-
retryInterval = HttpPipelineHelper.GetRetryInterval(httpMessage.Response);
296-
result = _fileBlobProvider.SaveTelemetry(content, retryInterval);
297-
}
298-
break;
299-
case ResponseStatusCodes.Unauthorized:
300-
case ResponseStatusCodes.Forbidden:
301-
case ResponseStatusCodes.InternalServerError:
302-
case ResponseStatusCodes.BadGateway:
303-
case ResponseStatusCodes.ServiceUnavailable:
304-
case ResponseStatusCodes.GatewayTimeout:
305-
// Send Messages To Storage
306-
content = HttpPipelineHelper.GetRequestContent(httpMessage.Request.Content);
307-
if (content != null)
308-
{
309-
result = _fileBlobProvider.SaveTelemetry(content, HttpPipelineHelper.MinimumRetryInterval);
310-
}
311-
break;
312-
default:
313-
// Log Non-Retriable Status and don't retry or store;
314-
break;
315-
}
316-
}
317-
318-
if (result == ExportResult.Success)
319-
{
320-
AzureMonitorExporterEventSource.Log.WriteWarning("FailedToTransmit", $"Error code is {statusCode}: Telemetry is stored offline for retry");
321-
}
322-
else
323-
{
324-
AzureMonitorExporterEventSource.Log.WriteWarning("FailedToTransmit", $"Error code is {statusCode}: Telemetry is dropped");
325-
}
326-
327-
return result;
328-
}
329-
330-
private void HandleFailures(HttpMessage httpMessage, PersistentBlob blob)
331-
{
332-
int retryInterval;
333-
int statusCode = 0;
334-
bool shouldRetry = true;
335-
336-
if (!httpMessage.HasResponse)
337-
{
338-
// HttpRequestException
339-
// Extend lease time so that it is not picked again for retry.
340-
blob.TryLease(HttpPipelineHelper.MinimumRetryInterval);
341-
}
342-
else
343-
{
344-
statusCode = httpMessage.Response.Status;
345-
switch (statusCode)
346-
{
347-
case ResponseStatusCodes.PartialSuccess:
348-
// Parse retry-after header
349-
// Send Failed Messages To Storage
350-
// Delete existing file
351-
TrackResponse trackResponse = HttpPipelineHelper.GetTrackResponse(httpMessage);
352-
var content = HttpPipelineHelper.GetPartialContentForRetry(trackResponse, httpMessage.Request.Content);
353-
if (content != null)
354-
{
355-
retryInterval = HttpPipelineHelper.GetRetryInterval(httpMessage.Response);
356-
blob.TryDelete();
357-
_fileBlobProvider?.SaveTelemetry(content, retryInterval);
358-
}
359-
break;
360-
case ResponseStatusCodes.RequestTimeout:
361-
case ResponseStatusCodes.ResponseCodeTooManyRequests:
362-
case ResponseStatusCodes.ResponseCodeTooManyRequestsAndRefreshCache:
363-
// Extend lease time using retry interval period
364-
// so that it is not picked up again before that.
365-
retryInterval = HttpPipelineHelper.GetRetryInterval(httpMessage.Response);
366-
blob.TryLease(retryInterval);
367-
break;
368-
case ResponseStatusCodes.Unauthorized:
369-
case ResponseStatusCodes.Forbidden:
370-
case ResponseStatusCodes.InternalServerError:
371-
case ResponseStatusCodes.BadGateway:
372-
case ResponseStatusCodes.ServiceUnavailable:
373-
case ResponseStatusCodes.GatewayTimeout:
374-
// Extend lease time so that it is not picked up again
375-
blob.TryLease(HttpPipelineHelper.MinimumRetryInterval);
376-
break;
377-
default:
378-
// Log Non-Retriable Status and don't retry or store;
379-
// File will be cleared by maintenance job
380-
shouldRetry = false;
381-
break;
382-
}
383-
}
384-
385-
if (shouldRetry)
386-
{
387-
AzureMonitorExporterEventSource.Log.WriteWarning("FailedToTransmitFromStorage", $"Error code is {statusCode}: Telemetry is stored offline for retry");
388-
}
389-
else
390-
{
391-
AzureMonitorExporterEventSource.Log.WriteWarning("FailedToTransmitFromStorage", $"Error code is {statusCode}: Telemetry is dropped");
392-
}
393-
}
394-
395183
protected virtual void Dispose(bool disposing)
396184
{
397185
if (!_disposed)

0 commit comments

Comments
 (0)