Skip to content

Commit bfa1f92

Browse files
authored
[storage] add fromConnectString to DataLakeServiceClient (Azure#13420)
* add fromConnectString to DataLakeServiceClient * fix Azure#13395 readme issue * changelog
1 parent 39041ef commit bfa1f92

File tree

11 files changed

+243
-5
lines changed

11 files changed

+243
-5
lines changed

sdk/storage/storage-file-datalake/CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
- Fixed a bug where `generateDataLakeSASQueryParameters()` won't correctly set the resource type if `DataLakeSASSignatureValues.permissions` is not specified. Fixed issue [13223](https://github.com/Azure/azure-sdk-for-js/issues/13223).
66
- Fixed a compile failure due to "Can't resolve 'crypto'" in Angular. [Issue #13267](https://github.com/Azure/azure-sdk-for-js/issues/13267).
77
- The `"Unclosed root tag"` XML parser error is now retriable. [PR #13076](https://github.com/Azure/azure-sdk-for-js/pull/13076).
8+
- Added `fromConnectionString` to `DataLakeServiceClient` to support construction from a connection string. Fixed bug [13396](https://github.com/Azure/azure-sdk-for-js/issues/13396).
89

910
## 12.3.0 (2021-01-12)
1011

sdk/storage/storage-file-datalake/recordings/browsers/datalakeserviceclient/recording_can_be_created_from_sasconnstring.json

Lines changed: 28 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

sdk/storage/storage-file-datalake/recordings/node/datalakeserviceclient/recording_can_be_created_from_accountconnstring.js

Lines changed: 29 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

sdk/storage/storage-file-datalake/recordings/node/datalakeserviceclient/recording_can_be_created_from_sasconnstring.js

Lines changed: 29 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

sdk/storage/storage-file-datalake/review/storage-file-datalake.api.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -323,6 +323,7 @@ export interface DataLakeSASSignatureValues {
323323
export class DataLakeServiceClient extends StorageClient {
324324
constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions);
325325
constructor(url: string, pipeline: Pipeline);
326+
static fromConnectionString(connectionString: string, options?: StoragePipelineOptions): DataLakeServiceClient;
326327
generateAccountSasUrl(expiresOn?: Date, permissions?: AccountSASPermissions, resourceTypes?: string, options?: ServiceGenerateAccountSasUrlOptions): string;
327328
getFileSystemClient(fileSystemName: string): DataLakeFileSystemClient;
328329
getUserDelegationKey(startsOn: Date, expiresOn: Date, options?: ServiceGetUserDelegationKeyOptions): Promise<ServiceGetUserDelegationKeyResponse>;

sdk/storage/storage-file-datalake/src/DataLakeServiceClient.ts

Lines changed: 48 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
import "@azure/core-paging";
55

6-
import { TokenCredential } from "@azure/core-http";
6+
import { getDefaultProxySettings, isNode, TokenCredential } from "@azure/core-http";
77
import { PagedAsyncIterableIterator } from "@azure/core-paging";
88
import { BlobServiceClient } from "@azure/storage-blob";
99

@@ -18,9 +18,13 @@ import {
1818
} from "./models";
1919
import { Pipeline, StoragePipelineOptions, newPipeline } from "./Pipeline";
2020
import { StorageClient } from "./StorageClient";
21-
import { appendToURLPath, appendToURLQuery } from "./utils/utils.common";
21+
import {
22+
appendToURLPath,
23+
appendToURLQuery,
24+
extractConnectionStringParts
25+
} from "./utils/utils.common";
2226
import { createSpan } from "./utils/tracing";
23-
import { toFileSystemPagedAsyncIterableIterator } from "./transforms";
27+
import { toDfsEndpointUrl, toFileSystemPagedAsyncIterableIterator } from "./transforms";
2428
import { ServiceGetUserDelegationKeyOptions, ServiceGetUserDelegationKeyResponse } from "./models";
2529
import { CanonicalCode } from "@opentelemetry/api";
2630
import { AccountSASPermissions } from "./sas/AccountSASPermissions";
@@ -48,6 +52,47 @@ export class DataLakeServiceClient extends StorageClient {
4852
*/
4953
private blobServiceClient: BlobServiceClient;
5054

55+
/**
56+
*
57+
* Creates an instance of DataLakeServiceClient from connection string.
58+
*
59+
* @param {string} connectionString Account connection string or a SAS connection string of an Azure storage account.
60+
* [ Note - Account connection string can only be used in NODE.JS runtime. ]
61+
* Account connection string example -
62+
* `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`
63+
* SAS connection string example -
64+
* `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`
65+
* @param {StoragePipelineOptions} [options] Optional. Options to configure the HTTP pipeline.
66+
* @memberof DataLakeServiceClient
67+
*/
68+
public static fromConnectionString(connectionString: string, options?: StoragePipelineOptions) {
69+
options = options || {};
70+
const extractedCreds = extractConnectionStringParts(connectionString);
71+
if (extractedCreds.kind === "AccountConnString") {
72+
if (isNode) {
73+
const sharedKeyCredential = new StorageSharedKeyCredential(
74+
extractedCreds.accountName!,
75+
extractedCreds.accountKey
76+
);
77+
options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);
78+
const pipeline = newPipeline(sharedKeyCredential, options);
79+
return new DataLakeServiceClient(toDfsEndpointUrl(extractedCreds.url), pipeline);
80+
} else {
81+
throw new Error("Account connection string is only supported in Node.js environment");
82+
}
83+
} else if (extractedCreds.kind === "SASConnString") {
84+
const pipeline = newPipeline(new AnonymousCredential(), options);
85+
return new DataLakeServiceClient(
86+
toDfsEndpointUrl(extractedCreds.url) + "?" + extractedCreds.accountSas,
87+
pipeline
88+
);
89+
} else {
90+
throw new Error(
91+
"Connection string must be either an Account connection string or a SAS connection string"
92+
);
93+
}
94+
}
95+
5196
/**
5297
* Creates an instance of DataLakeServiceClient from url.
5398
*
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
import { record, Recorder } from "@azure/test-utils-recorder";
2+
import * as assert from "assert";
3+
import * as dotenv from "dotenv";
4+
5+
import { DataLakeServiceClient } from "../../src";
6+
import { recorderEnvSetup, getConnectionStringFromEnvironment } from "../utils";
7+
8+
dotenv.config();
9+
10+
describe("DataLakeServiceClient", () => {
11+
let recorder: Recorder;
12+
13+
beforeEach(async function() {
14+
recorder = record(this, recorderEnvSetup);
15+
});
16+
17+
afterEach(async function() {
18+
await recorder.stop();
19+
});
20+
21+
it("can be created from AccountConnString", async () => {
22+
const newClient = DataLakeServiceClient.fromConnectionString(
23+
getConnectionStringFromEnvironment(),
24+
{
25+
retryOptions: {
26+
maxTries: 1
27+
}
28+
}
29+
);
30+
31+
const listIter = newClient.listFileSystems();
32+
await listIter.next();
33+
assert.ok(newClient.url.includes("dfs"));
34+
});
35+
});

sdk/storage/storage-file-datalake/test/serviceclient.spec.ts

Lines changed: 21 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,12 @@ import * as assert from "assert";
33
import * as dotenv from "dotenv";
44

55
import { DataLakeServiceClient, ServiceListFileSystemsSegmentResponse } from "../src";
6-
import { getDataLakeServiceClient, getTokenDataLakeServiceClient, recorderEnvSetup } from "./utils";
6+
import {
7+
getDataLakeServiceClient,
8+
getSASConnectionStringFromEnvironment,
9+
getTokenDataLakeServiceClient,
10+
recorderEnvSetup
11+
} from "./utils";
712

813
dotenv.config();
914

@@ -346,4 +351,19 @@ describe("DataLakeServiceClient", () => {
346351
assert.notDeepStrictEqual(response.signedObjectId, undefined);
347352
assert.notDeepStrictEqual(response.signedExpiresOn, undefined);
348353
});
354+
355+
it("can be created from SASConnString", async () => {
356+
const newClient = DataLakeServiceClient.fromConnectionString(
357+
getSASConnectionStringFromEnvironment(),
358+
{
359+
retryOptions: {
360+
maxTries: 1
361+
}
362+
}
363+
);
364+
365+
const listIter = newClient.listFileSystems();
366+
await listIter.next();
367+
assert.ok(newClient.url.includes("dfs"));
368+
});
349369
});

sdk/storage/storage-file-datalake/test/utils/index.browser.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -171,3 +171,8 @@ export function getBrowserFile(name: string, size: number): File {
171171
file.name = name;
172172
return file;
173173
}
174+
175+
export function getSASConnectionStringFromEnvironment(): string {
176+
const env = (window as any).__env__;
177+
return `BlobEndpoint=https://${env.DFS_ACCOUNT_NAME}.blob.core.windows.net/;QueueEndpoint=https://${env.DFS_ACCOUNT_NAME}.queue.core.windows.net/;FileEndpoint=https://${env.DFS_ACCOUNT_NAME}.file.core.windows.net/;TableEndpoint=https://${env.DFS_ACCOUNT_NAME}.table.core.windows.net/;SharedAccessSignature=${env.DFS_ACCOUNT_SAS}`;
178+
}

sdk/storage/storage-file-datalake/test/utils/index.ts

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,15 @@ import { DataLakeServiceClient } from "../../src/DataLakeServiceClient";
1111
import { newPipeline, StoragePipelineOptions } from "../../src/Pipeline";
1212
import { getUniqueName, SimpleTokenCredential } from "./testutils.common";
1313
import {
14+
AccountSASPermissions,
15+
AccountSASResourceTypes,
16+
AccountSASServices,
1417
DataLakeFileSystemClient,
1518
DataLakeSASSignatureValues,
19+
generateAccountSASQueryParameters,
1620
generateDataLakeSASQueryParameters
1721
} from "../../src";
22+
import { extractConnectionStringParts } from "../../src/utils/utils.common";
1823

1924
dotenv.config();
2025

@@ -240,3 +245,43 @@ export async function createRandomLocalFile(
240245
ws.on("error", reject);
241246
});
242247
}
248+
249+
export function getConnectionStringFromEnvironment(accountType: string = "DFS_"): string {
250+
const connectionStringEnvVar = `${accountType}STORAGE_CONNECTION_STRING`;
251+
const connectionString = process.env[connectionStringEnvVar];
252+
253+
if (!connectionString) {
254+
throw new Error(`${connectionStringEnvVar} environment variables not specified.`);
255+
}
256+
257+
return connectionString;
258+
}
259+
260+
export function getSASConnectionStringFromEnvironment(): string {
261+
const now = new Date();
262+
now.setMinutes(now.getMinutes() - 5); // Skip clock skew with server
263+
264+
const tmr = new Date();
265+
tmr.setDate(tmr.getDate() + 1);
266+
267+
const sharedKeyCredential = getGenericCredential("DFS_");
268+
269+
const sas = generateAccountSASQueryParameters(
270+
{
271+
expiresOn: tmr,
272+
permissions: AccountSASPermissions.parse("rwdlacup"),
273+
resourceTypes: AccountSASResourceTypes.parse("sco").toString(),
274+
services: AccountSASServices.parse("btqf").toString()
275+
},
276+
sharedKeyCredential as StorageSharedKeyCredential
277+
).toString();
278+
279+
const blobEndpoint = extractConnectionStringParts(getConnectionStringFromEnvironment()).url;
280+
return `BlobEndpoint=${blobEndpoint}/;QueueEndpoint=${blobEndpoint.replace(
281+
".blob.",
282+
".queue."
283+
)}/;FileEndpoint=${blobEndpoint.replace(
284+
".queue.",
285+
".file."
286+
)}/;TableEndpoint=${blobEndpoint.replace(".queue.", ".table.")}/;SharedAccessSignature=${sas}`;
287+
}

0 commit comments

Comments
 (0)