I am trying to use android
ndk
to develop simple decoder
/player
application.I created one project using android
sdk
and then i created a folder named jni in my project directory.
Inside the jni directory i created one omx.cpp
file and i want to write my own class inside this which inherits Android MediaSource
from stagefright
.I have also included stagefright
header files in my project.I am loading libstagefright.so
by using dlopen
in my omx.cpp
file.
the code i am using is as follows:
using android::sp;
namespace android
{
class ImageSource : public MediaSource {
public:
ImageSource(int width, int height, int colorFormat)
: mWidth(width),
mHeight(height),
mColorFormat(colorFormat)
{
}
public:
int mWidth;
int mHeight;
int mColorFormat;
virtual status_t start(MetaData *params = NULL) {}
virtual status_t stop() {}
// Returns the format of the data output by this media source.
virtual sp<MetaData> getFormat() {}
virtual status_t read(
MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
}
/*protected:
virtual ~ImageSource() {}*/
};
void Java_com_exampleomxvideodecoder_MainActivity(JNIEnv *env, jobject obj, jobject surface)
{
void *dlhandle;
dlhandle = dlopen("d:\libstagefright.so", RTLD_NOW);
if (dlhandle == NULL) {
printf("Service Not Found: %s\n", dlerror());
}
int width = 720;
int height = 480;
int colorFormat = 0;
sp<MediaSource> img_source = new ImageSource(width, height, colorFormat);
sp<MetaData> enc_meta = new MetaData;
// enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
// enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
enc_meta->setInt32(kKeyWidth, width);
enc_meta->setInt32(kKeyHeight, height);
enc_meta->setInt32(kKeySampleRate, kFramerate);
enc_meta->setInt32(kKeyBitRate, kVideoBitRate);
enc_meta->setInt32(kKeyStride, width);
enc_meta->setInt32(kKeySliceHeight, height);
enc_meta->setInt32(kKeyIFramesInterval, kIFramesIntervalSec);
enc_meta->setInt32(kKeyColorFormat, colorFormat);
sp<MediaSource> encoder =
OMXCodec::Create(
client.interface(), enc_meta, true, image_source);
sp<MPEG4Writer> writer = new MPEG4Writer("/sdcard/screenshot.mp4");
writer->addSource(encoder);
// you can add an audio source here if you want to encode audio as well
sp<MediaSource> audioEncoder =
OMXCodec::Create(client.interface(), encMetaAudio, true, audioSource);
writer->addSource(audioEncoder);
writer->setMaxFileDuration(kDurationUs);
CHECK_EQ(OK, writer->start());
while (!writer->reachedEOS()) {
fprintf(stderr, ".");
usleep(100000);
}
err = writer->stop();
}
}
I have following doubts:
1.In jni
function is it okay if we create some class objects and use them to call functions of say MediaSource
class or we have to create separate .cpp and .h files.If we use separate files how do we call/ref it from jni function.
2.Is this the right approach to make our own wrapper class which inherits from MediaSource
class or is there any other way.
Basically i want to make an application which takes .mp4
/.avi
file,demux
it separate audio/video,decode and render/play it using android stagefright
and OpenMAX
only.
If ffmpeg
is suggested for source,demux
ing then how to integrate it with android
st
agefright
framework.
Regards