0

I am working with FFMPEG for android. At some point after closing a video I need to notify my Java code that it can play the second video so I am having some kind of listener in the java code which will be invoked by the C code. But when I am trying to call the java static method from my c code it is giving error and crashing after the FindClass() line.How to solve this? I tried to catch the exception but could not.

my c code:

void onFinish(JNIEnv *pEnv){
    LOGI("onFinish called in C code ");
    jclass clazz = (jclass)(*pEnv)->FindClass(pEnv,"com/wiznsystems/android/utils/FFMPEGPlayer"); //HERE IT"S CRASHING AS NEXT LINE IS NOT PRINTING
    if ((*pEnv)->ExceptionCheck(pEnv)) {
      LOGI("onFinish exception occured");
      return;
    }

    LOGI("onFinish class Found ");
    // Get the method that you want to call
    jmethodID onFinish = (*pEnv)->GetStaticMethodID(pEnv,clazz, "onFinish", "()V");
    // Call the method on the object
    LOGI("onFinish method found");
    (*pEnv)->CallStaticVoidMethod(pEnv,clazz,onFinish);

    LOGI("onFinish method calling finished");
}

my Java method looks like this:

public static void onFinish(){
    Log.d("ANURAN","onFinish called");
}

EDIT (reason at the end)

Here is my whole JNI Code:

#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
#include <android/bitmap.h>
#include <pthread.h>

#include <android/native_window.h>
#include <android/native_window_jni.h>
#include <android/log.h>


#define LOG_TAG "android-ffmpeg-tutorial02"
#define LOGI(...) __android_log_print(4, LOG_TAG, __VA_ARGS__);
#define LOGE(...) __android_log_print(6, LOG_TAG, __VA_ARGS__);


JNIEnv* gEnv;
ANativeWindow*      window;
char                *videoFileName;
AVFormatContext     *formatCtx = NULL;
int                 videoStream;
AVCodecContext      *codecCtx = NULL;
AVFrame             *decodedFrame = NULL;
AVFrame             *frameRGBA = NULL;
jobject             bitmap;
void*               buffer;
struct SwsContext   *sws_ctx = NULL;
int                 width;
int                 height;
int                 stop;

void onFinish(JNIEnv *pEnv){
    LOGI("onFinish called in C code ");
    jclass clazz = (jclass)(*gEnv)->FindClass(gEnv,"com/wiznsystems/android/utils/FFMPEGPlayer");
    if ((*gEnv)->ExceptionCheck(gEnv)) {
      LOGI("onFinish exception occured");
      return;
    }

    LOGI("onFinish class Found ");
    // Get the method that you want to call
    jmethodID onFinish = (*gEnv)->GetStaticMethodID(gEnv,clazz, "onFinish", "()V");
    // Call the method on the object
    LOGI("onFinish method found");
    (*gEnv)->CallStaticVoidMethod(gEnv,clazz,onFinish);

    LOGI("onFinish method calling finished");
}


jint naInit(JNIEnv *pEnv, jobject pObj, jstring pFileName) {
    AVCodec         *pCodec = NULL;
    int             i;
    AVDictionary    *optionsDict = NULL;
    av_log_set_level(AV_LOG_DEBUG);

    videoFileName = (char *)(*pEnv)->GetStringUTFChars(pEnv, pFileName, NULL);
    LOGI("video file name is %s", videoFileName);
    // Register all formats and codecs
    av_register_all();
    // Open video file
    LOGI("1");
    AVDictionary *opts = 0;
    av_dict_set(&opts, "rtsp_transport", "tcp", 0);
    if (avformat_open_input(&formatCtx, videoFileName, NULL, &opts) != 0) {
        LOGI("DEFAULT failed. Trying TCP");
        // Trying with TCP.
        AVDictionary *opts = 0;
        av_dict_set(&opts, "rtsp_transport", "tcp", 0);
        if (avformat_open_input(&formatCtx, videoFileName, NULL, &opts) != 0) {
            return -1;
        }
    } // Couldn't open file
    LOGI("2 %d ", formatCtx->video_codec_id);
    LOGI("3 %d", formatCtx->audio_codec_id);
    // Retrieve stream information
    if (avformat_find_stream_info(formatCtx, NULL) < 0) {
        return -1; // Couldn't find stream information
    }
    av_dump_format(formatCtx, 0, pFileName, 0);
    LOGI("4 ");
    // Dump information about file onto standard error
    av_dump_format(formatCtx, 0, videoFileName, 0);
    LOGI("5 ");
    // Find the first video stream
    videoStream=-1;
    for(i=0; i<formatCtx->nb_streams; i++) {
        if(formatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
            videoStream=i;
            break;
        }
    }
    LOGI("6 %d", videoStream);
    if(videoStream==-1)
        return -1; // Didn't find a video stream
    // Get a pointer to the codec context for the video stream
    codecCtx=formatCtx->streams[videoStream]->codec;
    // Find the decoder for the video stream
    pCodec=avcodec_find_decoder(codecCtx->codec_id);
    if(pCodec==NULL) {
        fprintf(stderr, "Unsupported codec!\n");
        return -1; // Codec not found
    }
    LOGI("7 %d", pCodec->id);
    // Open codec
    if(avcodec_open2(codecCtx, pCodec, &optionsDict)<0)
        return -1; // Could not open codec
    // Allocate video frame
    decodedFrame = av_frame_alloc();
    LOGI("8 %d", decodedFrame->pkt_size);
    // Allocate an AVFrame structure
    frameRGBA=av_frame_alloc();
    if(frameRGBA==NULL)
        return -1;
    LOGI("9 %d", frameRGBA->pkt_size);
    return 0;
}

jobject createBitmap(JNIEnv *pEnv, int pWidth, int pHeight) {
    int i;
    //get Bitmap class and createBitmap method ID
    jclass javaBitmapClass = (jclass)(*pEnv)->FindClass(pEnv, "android/graphics/Bitmap");
    jmethodID mid = (*pEnv)->GetStaticMethodID(pEnv, javaBitmapClass, "createBitmap", "(IILandroid/graphics/Bitmap$Config;)Landroid/graphics/Bitmap;");
    //create Bitmap.Config
    //reference: https://forums.oracle.com/thread/1548728
    const wchar_t* configName = L"ARGB_8888";
    int len = wcslen(configName);
    jstring jConfigName;
    if (sizeof(wchar_t) != sizeof(jchar)) {
        //wchar_t is defined as different length than jchar(2 bytes)
        jchar* str = (jchar*)malloc((len+1)*sizeof(jchar));
        for (i = 0; i < len; ++i) {
            str[i] = (jchar)configName[i];
        }
        str[len] = 0;
        jConfigName = (*pEnv)->NewString(pEnv, (const jchar*)str, len);
    } else {
        //wchar_t is defined same length as jchar(2 bytes)
        jConfigName = (*pEnv)->NewString(pEnv, (const jchar*)configName, len);
    }
    jclass bitmapConfigClass = (*pEnv)->FindClass(pEnv, "android/graphics/Bitmap$Config");
    jobject javaBitmapConfig = (*pEnv)->CallStaticObjectMethod(pEnv, bitmapConfigClass,
                                                               (*pEnv)->GetStaticMethodID(pEnv, bitmapConfigClass, "valueOf", "(Ljava/lang/String;)Landroid/graphics/Bitmap$Config;"), jConfigName);
    //create the bitmap
    return (*pEnv)->CallStaticObjectMethod(pEnv, javaBitmapClass, mid, pWidth, pHeight, javaBitmapConfig);
}

jintArray naGetVideoRes(JNIEnv *pEnv, jobject pObj) {
    jintArray lRes;
    if (NULL == codecCtx) {
        return NULL;
    }
    lRes = (*pEnv)->NewIntArray(pEnv, 2);
    if (lRes == NULL) {
        LOGI(1, "cannot allocate memory for video size");
        return NULL;
    }
    jint lVideoRes[2];
    lVideoRes[0] = codecCtx->width;
    lVideoRes[1] = codecCtx->height;
    (*pEnv)->SetIntArrayRegion(pEnv, lRes, 0, 2, lVideoRes);
    return lRes;
}

void naSetSurface(JNIEnv *pEnv, jobject pObj, jobject pSurface) {
    if (NULL != pSurface) {
        // get the native window reference
        window = ANativeWindow_fromSurface(pEnv, pSurface);
        // set format and size of window buffer
        ANativeWindow_setBuffersGeometry(window, 0, 0, WINDOW_FORMAT_RGBA_8888);
    } else {
        // release the native window
        ANativeWindow_release(window);
    }
}

jint naSetup(JNIEnv *pEnv, jobject pObj, int pWidth, int pHeight) {

    av_free(buffer);
    width = pWidth;
    height = pHeight;
    //create a bitmap as the buffer for frameRGBA
    bitmap = createBitmap(pEnv, pWidth, pHeight);
    if (AndroidBitmap_lockPixels(pEnv, bitmap, &buffer) < 0)
        return -1;
    //get the scaling context
    sws_ctx = sws_getContext (
            codecCtx->width,
            codecCtx->height,
            codecCtx->pix_fmt,
            pWidth,
            pHeight,
            AV_PIX_FMT_RGBA,
            SWS_BILINEAR,
            NULL,
            NULL,
            NULL
    );
    // Assign appropriate parts of bitmap to image planes in pFrameRGBA
    // Note that pFrameRGBA is an AVFrame, but AVFrame is a superset
    // of AVPicture
    avpicture_fill((AVPicture *)frameRGBA, buffer, AV_PIX_FMT_RGBA,
                   pWidth, pHeight);
    return 0;
}

void finish(JNIEnv *pEnv) {
    //unlock the bitmap
    AndroidBitmap_unlockPixels(pEnv, bitmap);
    // Free the RGB image
    av_free(frameRGBA);
    // Free the YUV frame
    av_free(decodedFrame);
    // Close the codec
    avcodec_close(codecCtx);
    // Close the video file
    avformat_close_input(&formatCtx);

    onFinish(pEnv);
}

void decodeAndRender(JNIEnv *pEnv) {
    ANativeWindow_Buffer    windowBuffer;
    AVPacket                packet;
    int                     i=0;
    int                     frameFinished;
    int                     lineCnt;
    while(av_read_frame(formatCtx, &packet)>=0 && !stop) {
        // Is this a packet from the video stream?
        if(packet.stream_index==videoStream) {
            // Decode video frame
            avcodec_decode_video2(codecCtx, decodedFrame, &frameFinished,
                                  &packet);
            // Did we get a video frame?
            if(frameFinished) {
                // Convert the image from its native format to RGBA
                sws_scale
                        (
                                sws_ctx,
                                (uint8_t const * const *)decodedFrame->data,
                                decodedFrame->linesize,
                                0,
                                codecCtx->height,
                                frameRGBA->data,
                                frameRGBA->linesize
                        );
                // lock the window buffer
                if (buffer == NULL || ANativeWindow_lock(window, &windowBuffer, NULL) < 0) {
                    LOGE("cannot lock window");
                } else {
                    // draw the frame on buffer
                    LOGI("copy buffer %d:%d:%d", width, height, width*height*4);
                    LOGI("window buffer: %d:%d:%d", windowBuffer.width,
                         windowBuffer.height, windowBuffer.stride);
                    for (int h = 0; h < height; h++)
                    {
                        memcpy(windowBuffer.bits + h * windowBuffer.stride * 4,
                               buffer + h * frameRGBA->linesize[0],
                               width*4);
                    }
                    // unlock the window buffer and post it to display
                    ANativeWindow_unlockAndPost(window);
                    // count number of frames
                    ++i;
                }
            }
        }
        // Free the packet that was allocated by av_read_frame
        av_free_packet(&packet);
    }
    LOGI("total No. of frames decoded and rendered %d", i);
    finish(pEnv);
}

/**
 * start the video playback
 */
void naPlay(JNIEnv *pEnv, jobject pObj) {
    //create a new thread for video decode and render
    pthread_t decodeThread;
    stop = 0;
    pthread_create(&decodeThread, NULL, decodeAndRender, NULL);
}

/**
 * stop the video playback
 */
void naStop(JNIEnv *pEnv, jobject pObj) {
    stop = 1;
}



jint JNI_OnLoad(JavaVM* pVm, void* reserved) {
    JNIEnv* env;
    if ((*pVm)->GetEnv(pVm, (void **)&env, JNI_VERSION_1_6) != JNI_OK) {
        return -1;
    }

    gEnv=env;

    JNINativeMethod nm[8];
    nm[0].name = "naInit";
    nm[0].signature = "(Ljava/lang/String;)I";
    nm[0].fnPtr = (void*)naInit;

    nm[1].name = "naSetSurface";
    nm[1].signature = "(Landroid/view/Surface;)V";
    nm[1].fnPtr = (void*)naSetSurface;

    nm[2].name = "naGetVideoRes";
    nm[2].signature = "()[I";
    nm[2].fnPtr = (void*)naGetVideoRes;

    nm[3].name = "naSetup";
    nm[3].signature = "(II)I";
    nm[3].fnPtr = (void*)naSetup;

    nm[4].name = "naPlay";
    nm[4].signature = "()V";
    nm[4].fnPtr = (void*)naPlay;

    nm[5].name = "naStop";
    nm[5].signature = "()V";
    nm[5].fnPtr = (void*)naStop;

    jclass cls = (*env)->FindClass(env, "com/wiznsystems/android/utils/FFMPEGPlayer");
    //Register methods with env->RegisterNatives.
    (*env)->RegisterNatives(env, cls, nm, 6);
    return JNI_VERSION_1_6;
}

size_t
wcslen(const wchar_t *s) {
    const wchar_t *p;

    p = s;
    while (*p)
        p++;

    return p - s;
}

I thought there can be issue of not holding JNIEnv* properly according to some other SO answers so stored a global reference of JNIEnv* on JNI_Onload and used that instead in my onFinish method. That's also not working.

Anuran Barman
  • 1,556
  • 2
  • 16
  • 31
  • Where did the C code get the `JNIEnv *` from? – user207421 Mar 20 '18 at 07:36
  • @EJP from another method which just passed it to the onFinish method. that method was running in a different thread with pthread_create(). can that be the issue? – Anuran Barman Mar 20 '18 at 07:42
  • 2
    `JNIEnv` pointers can not be shared among threads. See [this answer](https://stackoverflow.com/questions/30026030/what-is-the-best-way-to-save-jnienv/30026231#30026231) for how to get a `JNIEnv*` on any thread. – Michael Mar 20 '18 at 07:51
  • I have updated my code. – Anuran Barman Mar 20 '18 at 07:54
  • @Michael If you see my code I dont think that having wrong reference of JNIEnv* is the real issue here. as decode_render() is the method which is running on different thread and it calls finish() with instance of JNIEnv* as you can see.If that was wrong then finish() method should not work as well but it is. – Anuran Barman Mar 20 '18 at 08:23
  • 1
    It remains a fact that a `JNIEnv *` is specific to a thread, as @Michael stated, and therefore that storing one globally can never be correct. That's why I asked. – user207421 Mar 20 '18 at 08:50
  • Okay I solved the issue by AttachCurrentThread() but not code when exception occuring in my c code is executed as there is exception. Why the class is not found? – Anuran Barman Mar 20 '18 at 09:09
  • OK so you solved the issue but you didn't solve the issue. Which is it? Make up your mind. – user207421 Mar 20 '18 at 09:13
  • I mean I solved the issue of having wrong instance of JNIEnv*. But FindClass is still not working. I just managed to catch the Exception. – Anuran Barman Mar 20 '18 at 09:15
  • And what *was* the exception? What was the message? Didn't you find anything there of value? Or did you just decide it was easier to just dump your information-free supplementary question here and hope for a miracle? – user207421 Mar 20 '18 at 09:17
  • sorry for late as I did not know how to print from JNI side being new to it. Here is the exception it gave upon ExceptionDescribe() java.lang.ClassNotFoundException: Didn't find class "com.wiznsystems.android.utils.FFMPEGPlayer" on path: DexPathList[[directory "."],nativeLibraryDirectories=[/system/lib, /system/vendor/lib, /system/lib, /system/vendor/lib]] – Anuran Barman Mar 20 '18 at 09:38
  • Using `FindClass` on a native background thread can be problematic: _"You can get into trouble if you create a thread yourself (perhaps by calling pthread_create and then attaching it with AttachCurrentThread). Now there are no stack frames from your application. If you call FindClass from this thread, the JavaVM will start in the "system" class loader instead of the one associated with your application, so attempts to find app-specific classes will fail."_ ([more info](https://developer.android.com/training/articles/perf-jni.html#faq_FindClass)) – Michael Mar 20 '18 at 10:07
  • in the provided URL in the work around part "Do your FindClass lookups once, in JNI_OnLoad, and cache the class references for later use. Any FindClass calls made as part of executing JNI_OnLoad will use the class loader associated with the function that called System.loadLibrary (this is a special rule, provided to make library initialization more convenient). If your app code is loading the library, FindClass will use the correct class loader. " Yes FFMPEGPlayer class is loading the library and JNI_OnLoad is loading the class correctly for the first time. – Anuran Barman Mar 20 '18 at 10:15

0 Answers0