I have application registered in Azure and it has Storage Account Contributor role. I am trying to copy content from one account to another in same subscription by using SAS token. Below is code snippet for testing purpose. This code works perfectly fine from standalone node js but it fails when deployed in minikube pod with Authorization Error code 403. Any suggestions/thoughts will be appreciated.
const { ClientSecretCredential } = require("@azure/identity");
const { BlobServiceClient, UserDelegationKey, ContainerSASPermissions, generateBlobSASQueryParameters } = require("@azure/storage-blob");
module.exports = function () {
/*
This function will receive an input that conforms to the schema specified in
activity.json. The output is a callback function that follows node's error first
convention. The first parameter is either null or an Error object. The second parameter
of the output callback should be a JSON object that conforms to the schema specified
in activity.json
*/
this.execute = async function (input, output) {
try {
if (input.connection) {
const containerName = input.sourcecontainer.trim()
const credential = new ClientSecretCredential(input.connection.tenantId, input.connection.clientid, input.connection.clientsecret);
const { BlobServiceClient } = require("@azure/storage-blob");
// Enter your storage account name
const account = input.sourceaccount.trim();
const accounturl = 'https://'.concat(account).concat('.blob.core.windows.net')
const blobServiceClient = new BlobServiceClient(
accounturl,
credential);
const keyStart = new Date()
const keyExpiry = new Date(new Date().valueOf() + 86400 * 1000)
const userDelegationKey = await blobServiceClient.getUserDelegationKey(keyStart, keyExpiry);
console.log(userDelegationKey)
const containerSAS = generateBlobSASQueryParameters({
containerName,
permissions: ContainerSASPermissions.parse("racwdl"),
startsOn: new Date(),
expiresOn: new Date(new Date().valueOf() + 86400 * 1000),
},
userDelegationKey, account).toString();
const target = '/' + containerName + '/' + input.sourcefolder.trim() + '/' + input.sourcefilename.trim()
const sastoken = accounturl + target + '?' + containerSAS
console.log(sastoken)
let outputData = {
"sourcesas": sastoken
}
//Testing second action execution from same action for testing purpose.
const containerName2 = 'targettestcontainer'
const credential2 = new ClientSecretCredential(input.connection.tenantId, input.connection.clientid, input.connection.clientsecret);
// Enter your storage account name
const blobServiceClient2 = new BlobServiceClient(
accounturl,
credential2);
const destContainer = blobServiceClient2.getContainerClient(containerName2);
const destBlob = destContainer.getBlobClient('testfolder01' + '/' + 'test-code.pdf');
const copyPoller = await destBlob.beginCopyFromURL(outputData.sourcesas);
const result = await copyPoller.pollUntilDone();
return output(null, outputData)
}
} catch (e) {
console.log(e)
return output(e, null)
}
}
}
Thank you EmmaZhu-MSFT for providing the solution. Simmilar issue also raise in github Posting this as an answer to help other community member.
From service side log, seems there's time skew between Azure Storage Service and the client, the start time used in source SAS token was later than server time.
We'd suggest not using start time in SAS token to avoid this kind of failure caused by time skew.
Reference : https://github.com/Azure/azure-sdk-for-js/issues/21977