2

I'm struggling to debug a NextJS API that is working in development (via localhost) but is silently failing in production.

Below, the two console.log statements are not returning, so I suspect that the textToSpeech call is not executing correctly, potentially in time?

I'm not sure how to rectify, happy to debug as directed to resolve this!

const faunadb = require('faunadb')
const secret = process.env.FAUNADB_SECRET_KEY
const q = faunadb.query
const client = new faunadb.Client({ secret })
const TextToSpeechV1 = require('ibm-watson/text-to-speech/v1')
const { IamAuthenticator } = require('ibm-watson/auth')
const AWS = require('aws-sdk')
const { randomUUID } = require('crypto')
import { requireAuth } from '@clerk/nextjs/api'

module.exports = requireAuth(async (req, res) => {
  try {
    const s3 = new AWS.S3({
      accessKeyId: process.env.AWS_ACCESS_KEY,
      secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY
    })

    const textToSpeech = new TextToSpeechV1({
      authenticator: new IamAuthenticator({
        apikey: process.env.IBM_API_KEY
      }),
      serviceUrl: process.env.IBM_SERVICE_URL
    })

    const uuid = randomUUID()

    const { echoTitle, chapterTitle, chapterText } = req.body

    const synthesizeParams = {
      text: chapterText,
      accept: 'audio/mp3',
      voice: 'en-US_KevinV3Voice'
    }

    textToSpeech
      .synthesize(synthesizeParams)
      .then(buffer => {
        const s3Params = {
          Bucket: 'waveforms/audioform',
          Key: `${uuid}.mp3`,
          Body: buffer.result,
          ContentType: 'audio/mp3',
          ACL: 'public-read'
        }

        console.log(buffer.result)
        console.log(s3Params)

        s3.upload(s3Params, function (s3Err, data) {
          if (s3Err) throw s3Err
          console.log(`File uploaded successfully at ${data.Location}`)
        })
      })
      .catch(err => {
        console.log('error:', err)
      })

    const dbs = await client.query(
      q.Create(q.Collection('audioform'), {
        data: {
          title: echoTitle,
          published: 2022,
          leadAuthor: 'winter',
          user: req.session.userId,
          authors: 1,
          playTime: 83,
          chapters: 1,
          gpt3Description: '',
          likes: 20,
          image:
            'https://waveforms.s3.us-east-2.amazonaws.com/images/Mars.jpeg',
          trackURL: `https://waveforms.s3.us-east-2.amazonaws.com/audioform/${uuid}.mp3`,
          albumTracks: [
            {
              title: chapterTitle,
              text: chapterText,
              trackURL: `https://waveforms.s3.us-east-2.amazonaws.com/audioform/${uuid}.mp3`
            }
          ]
        }
      })
    )
    res.status(200).json(dbs.data)
  } catch (e) {
    res.status(500).json({ error: e.message })
  }
})
WΔ_
  • 1,229
  • 4
  • 20
  • 34
  • 2
    You don't appear to be awaiting the `textToSpeech.synthesize()` result so the code goes on to the `await client.query()` and then returns a response. – jarmod Mar 07 '22 at 18:06
  • @jarmod what should the code look like? Does it just need to be: `await textToSpeech.synthesize()`? Is that the only necessary change. – WΔ_ Mar 07 '22 at 18:50
  • @jarmod I added `await`, but it's the same issue, no upload is registered in the S3 bucket. – WΔ_ Mar 07 '22 at 18:59
  • 1
    Something like `const buffer = await textToSpeech.synthesize(synthesizeParams);` and then you can use buffer in s3Params, and then you can `await s3.upload(s3Params).promise()`. – jarmod Mar 07 '22 at 19:08
  • I'm not entirely sure what that would look like - I'm getting a type error for `await s3...` – WΔ_ Mar 07 '22 at 19:27

1 Answers1

1

Replace the async fragments something like this, assuming they are meant to be executed sequentially.

try {
  // code removed here for clarity
  const buffer = await textToSpeech.synthesize(synthesizeParams);

  const s3Params = {
    Bucket: 'waveforms/audioform',
    Key: `${uuid}.mp3`,
    Body: buffer.result,
    ContentType: 'audio/mp3',
    ACL: 'public-read'
  }

  await s3.upload(s3Params).promise();

  const dbs = await client.query(...);

  res.status(200).json(dbs.data);
} catch (e) {
  res.status(500).json({ error: e.message });
}
jarmod
  • 71,565
  • 16
  • 115
  • 122
  • It works in dev. In production I'm getting the following error: `The request signature we calculated does not match the signature you provided. Check your key and signing method.` I tried rotating the keys, but it didn't help :/ – WΔ_ Mar 07 '22 at 20:12
  • 1
    Ensure that your client machine is time-synced. Also, try and upload a file from the same client machine using the awscli to see if it fails in the same way. – jarmod Mar 07 '22 at 20:15
  • I'm using Vercel for the build - I don't know if there's a command that I should run. I'll try the awscli – WΔ_ Mar 07 '22 at 20:19
  • I was able to upload via the `awscli` fine. I'll try to pursue this new bug. – WΔ_ Mar 07 '22 at 20:37
  • 1
    Some ideas on causes of invalid signature [here](https://stackoverflow.com/questions/30518899/amazon-s3-how-to-fix-the-request-signature-we-calculated-does-not-match-the-s) and [here](https://docs.aws.amazon.com/general/latest/gr/signature-v4-troubleshooting.html). – jarmod Mar 07 '22 at 20:53
  • 1
    I stored the keys in Vercel as environment variables... and it works! – WΔ_ Mar 07 '22 at 22:28