1

I am looking to use ffmpeg or ffmpeg.wasm inside a Vue serverless SPA which at the moment uses GIF.js. The app creates HTMLCanvasElement dynamically and adds them as frames to the GIF object which is then made available for download and in the case of ffmpeg the option to transcode it to another format, say .gif. Ideally I want to do the same with ffmpeg. My question is more how to integrate ffmpeg in my context and how to replace my gif.addFrame(composedCnv, { delay: delayInput }); with a await ffmpeg.run( ... );command?

async createGIFHandler ( layer , addAllTitles , quality , delay , range ) {
    ...
    var tempGIF = new GIF({
        workers: 4,
        quality: quality,
        height: this.mapHeight + this.infoCanvas.height,
        width: this.mapWidth,
        workerScript: 'gif.worker.js'
    });
    let progressCounter = 1;
    for ( let i = range[0] ; i < range[1] ; i++, progressCounter++ ) {
        this.setDateTime( driver , driverDA[i] );
        for ( let j = 0 ; j < visibleLayers.length ; j++ ) {
            if ( visibleLayers[j].get('layerName') !== layer.Name  ) {
                var tempDA = visibleLayers[j].get('layerDateArray');
                for ( let k = 0 ; k < tempDA.length ; k++ ) {
                    if ( driverDA[i].getTime() === tempDA[k].getTime() ) {
                        this.setDateTime( visibleLayers[j] , tempDA[k] );
                    }
                }
            }
        }
        await new Promise(resolve => this.map.once('rendercomplete', resolve));
        await this.composeCanvas( tempGIF , driverDA[i] , visibleLayers , delay , widths )
        this.$store.dispatch('Layers/setGIFPercent', Math.round(((progressCounter / gifLength) * 100)))
    }

    tempGIF.on('finished', (blob) => {
        const tempURL = URL.createObjectURL( blob )
        this.$store.dispatch( 'Layers/setGIFURL' , tempURL )
        console.log('GIF Finished');
    });
    tempGIF.render();
},
async composeCanvas( gif , timeStep , visibleLayers , delayInput , widths ) {
    const mapCnv = this.getMapCanvas();
    await this.updateInfoCanvas( timeStep , widths )
    const composedCnv = await this.stitchCanvases( mapCnv , visibleLayers.length );
    await new Promise((resolve) => {
        gif.addFrame(composedCnv, { delay: delayInput });
        resolve();
    })
},

Curious
  • 383
  • 3
  • 13
  • You can try running a child process with ffmpeg command like `ffmpeg -f rawvideo -pix_fmt rgba -s wxh -r fps -i - output.mp4`, and write `ctx.getImageData(0, 0, w, h).data` to `stdin` pipe of the ffmpeg process, where `ctx` is a `CanvasRenderingContext2D` object. You need to define the framerate `fps` and width `w` and height `h` before starting the ffmpeg process. When done, closing the `stdin` pipe will terminate FFmpeg (after finalizing the output file). – kesh Apr 11 '22 at 20:33
  • Thank you for your time but ,how do you "write ... to `stdin`" in the context of a client side ffmpeg instance? Could you please write just the constructor of the ffmpeg and given a ctx how to "write" it to the running ffmpeg proccess, please? – Curious Apr 11 '22 at 20:51
  • I'm many years removed from js dev, but a quick search got me [this](https://blog.scottlogic.com/2020/11/23/ffmpeg-webassembly.html) – kesh Apr 11 '22 at 21:06

0 Answers0