1

UPDATES: I have updated the function to provide wide timeframe to avoid expiration. I also am including screenshot of my account settings which I updated to enable public access. Also including current console results. Same result on the page that the image won't open.

async function sasKey(mediaLoc) {
    try {
    console.log('starting sasKey in bloboperations mediaLoc:', mediaLoc)
    console.log('process.env.BLOB_CONTAINER is: ', process.env.BLOB_CONTAINER)
    var storage = require("@azure/storage-blob")
    const accountname = process.env.BLOB_ACCT_NAME;
    console.log('accountname is: ', accountname)
    const key = process.env.BLOB_KEY;
    const cerds = new storage.StorageSharedKeyCredential(accountname,key);
    const blobServiceClient = new storage.BlobServiceClient(`https://${accountname}.blob.core.windows.net`,cerds);
    const containerName=process.env.BLOB_CONTAINER;
    const client =blobServiceClient.getContainerClient(containerName)
    const blobName=mediaLoc;
    const blobClient = client.getBlobClient(blobName);
    const checkDate = new Date();
    const startDate =  new Date(checkDate.valueOf() - 1200000);
    const endDate = new Date(checkDate.valueOf() + 3600000);
    console.log('checkDate, startDate, endDate: ', checkDate, startDate, endDate)
    const blobSAS = storage.generateBlobSASQueryParameters({
      containerName, 
      blobName, 
      permissions: storage.BlobSASPermissions.parse("racwd"), 
      startsOn: startDate,
      expiresOn: endDate
    },
    cerds 
  ).toString();
      console.log( 'blobSAS is: ', blobSAS)
 //   const sasUrl= blobClient.url+"?"+encodeURIComponent(blobSAS);
    const sasUrl = 'https://' + accountname + '/' + containerName + '/' + blobName + '?' + blobSAS
    console.log('sasURL is: ', sasUrl);

    return sasUrl
    }
    catch (error) {
        console.log(error);
    }
}

enter image description here

enter image description here

I am trying to get a valid SAS URI from my Azure storage blob container via a node.js function. I'm using the @azure/storage-blob library. I am getting a return from Azure, but the browser is saying it's not authorized. I've quadruple checked that my account and key are correct. And those settings are working to upload the media to the container.

I'm not sure how to even troubleshoot since there aren't any error messages coming back to the node api. The same code returns a URI that works from another (dev) container. However, that container allows public access right now. So it makes sense that you could access the blob from that one, no matter what. Any suggestions on how I can troubleshoot this please?

The error from accessing the blob generated in the console:

5x6gyfbc5eo31fdf38f7fdc51ea1632857020560.png:1 GET https://**********.blob.core.windows.net/prod/5x6gyfbc5eo31fdf38f7fdc51ea1632857020560.png?sv%3D2020-06-12%26st%3D2021-09-28T19%253A23%253A41Z%26se%3D2021-09-28T19%253A25%253A07Z%26sr%3Db%26sp%3Dracwd%26sig%3Du6Naiikn%252B825koPikqRGmiOoKMJZ5L3mfcR%252FTCT3Uyk%253D 409 (Public access is not permitted on this storage account.)

The code that generates the URI:

async function sasKey(mediaLoc) {
try {
var storage = require("@azure/storage-blob")
const accountname = process.env.BLOB_ACCT_NAME;
const key = process.env.BLOB_KEY;
const cerds = new storage.StorageSharedKeyCredential(accountname,key);
const blobServiceClient = new storage.BlobServiceClient(`https://${accountname}.blob.core.windows.net`,cerds);
const containerName=process.env.BLOB_CONTAINER;
const client =blobServiceClient.getContainerClient(containerName)
const blobName=mediaLoc;
const blobClient = client.getBlobClient(blobName);
const blobSAS = storage.generateBlobSASQueryParameters({
  containerName, 
  blobName, 
  permissions: storage.BlobSASPermissions.parse("racwd"), 
  startsOn: new Date(),
  expiresOn: new Date(new Date().valueOf() + 86400)
},
cerds).toString();
const sasUrl= blobClient.url+"?"+encodeURIComponent(blobSAS);
console.log('blobOperations.js returns blobSAS URL as: ', sasUrl);
console.log( 'blobSAS is: ', blobSAS)
return sasUrl
}
catch (error) {
    console.log(error);
}

}

  • The error message **Signature not valid in the specified time frame** indicates that the **date/time/timezone** on your computer is wrong. – John Hanley Sep 28 '21 at 17:51
  • You're getting this error because your sas token has expired. It was valid between `Tue, 28 Sep 2021 17:12:15 GMT` and `Tue, 28 Sep 2021 17:13:42 GMT` but you're using it at `Tue, 28 Sep 2021 17:19:26 GMT` (approximately 6 minutes after expiration. Please regenerate a new token and use it before it expires. – Gaurav Mantri Sep 28 '21 at 17:54
  • Sorry, that was what it brought up later while I was troubleshooting. I just updated the question to show what I'm getting in the browser console when the SAS is first created. It's an access error. My apologies for that. – Athelene Gosnell Sep 28 '21 at 19:26
  • The message "Public access is not permitted on this storage account" indicates that the Account's "Allow Blob public access" setting is Disabled. You can check it in the portal on the account under Settings > Configuration. – Joel Cochran Sep 30 '21 at 17:58
  • @GauravMantri and John Hanley, thank you for your replies. Not sure if you saw my comment. Unfortunately, I originally posted the wrong error message. I updated the problem I'm actually seeing in the question. If you have any additional thoughts, I'd be very appreciate. – Athelene Gosnell Sep 30 '21 at 17:58
  • Joel Cochran, Thank you for the response. I realize public access is disabled. My understanding is that if I enable it, anyone can open it without an SAS token. This blob storage account cannot be public. Am I just not understanding public access and SAS tokens? – Athelene Gosnell Sep 30 '21 at 18:01
  • Just for additional info, my api is able to create and delete blobs without issue. The problem is just creating the SAS key. I adjusted the code to extend the expiration start to 15 mins before and the end time to 24 hours later. – Athelene Gosnell Sep 30 '21 at 18:40
  • @AtheleneGosnell - At the Account level, Public access enabled means it is POSSIBLE to set a child container to Public. It does NOT mean that the contents themselves are public. Public/private is set at the container level and the default value is Private. It appears you need the Account Public access enabled to generate SAS tokens. – Joel Cochran Oct 01 '21 at 13:22
  • @joel-cochran, Thank you for clarifying. I've made the change on the account. Pls see updated question with screenshots for the settings I'm using. I'm still getting the same result. I can add and delete blobs but cannot generate a valid SASkey. FYI, I also updated the times in the function to ensure a wide timeframe before expiration. – Athelene Gosnell Oct 01 '21 at 20:21
  • When generating SAS in the portal, there is a checkbox for "Allow resource type" that must be checked on a per SAS basis. Is there a corollary to that in the Node SDK? – Joel Cochran Oct 01 '21 at 20:40
  • @JoelCochran I noticed that too. The only thing I can find using the SDK is the storage.generateBlobSASQueryParameters() function which doesn't seem to require that. However, the return SASKey does include the query parameter "sr=b" which I believe indicates that the storage resource is a blob. – Athelene Gosnell Oct 02 '21 at 17:50

1 Answers1

1

After much fiddling with this I came up with the following function that is working both in production and dev. I'm not 100% sure which changes made the difference but I think there were three things that were important. First, Joel Cochran's help on understanding how the account needed to be set up was critical. Second, the expiration time might have been a problem as well. As you can see from the correct code below, I've made that quite different. But I think the last piece was the section where I use: storage.generateBlobSASQueryParameters().

async function sasKey(mediaLoc) {
  if (process.env.SERVER_STATUS === 'Prod') {
    activeContainer = 'prod'
  } else {
    activeContainer = 'dev'
  }
    try {
    var storage = require("@azure/storage-blob")
    const accountname = process.env.BLOB_ACCT_NAME;
    const key = process.env.BLOB_KEY;
    const cerds = new storage.StorageSharedKeyCredential(accountname,key);
    const blobServiceClient = new storage.BlobServiceClient(`https://${accountname}.blob.core.windows.net`,cerds);
    const containerName = activeContainer;
    const client =blobServiceClient.getContainerClient(containerName)
    const blobName=mediaLoc;
    const blobClient = client.getBlobClient(blobName);
    const checkDate = new Date();
    const startDate =  new Date(checkDate.valueOf() - 5*60*1000);
    const endDate = new Date(checkDate.valueOf() + + 24*60*60*1000);

     // Generate service level SAS for a blob
      const blobSAS = storage.generateBlobSASQueryParameters({
          containerName, // Required
          mediaLoc, // Required
          permissions: storage.BlobSASPermissions.parse("racwd"), // Required
          startsOn: startDate, // Required. Date type
          expiresOn: endDate // Required. Date type
        },
        cerds // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`
      ).toString();
      
     sasUrl = `https://${accountname}.blob.core.windows.net/${containerName}/${mediaLoc}?${blobSAS.toString()}`;
          return sasUrl
          }
          catch (error) {
              console.log(error);
          }
} 

NOTE: I used the if statement to set the container to make my life easier as I was testing. You would certainly use environment variables in production.

I thank everyone so much for the support. This is an amazing community.