I looked for a lot of information about how to build and use FFmpeg in early versions of Android, looked at the source codes of players from 2011-2014 and was able to easily build FFmpeg 4.0.4 and 3.1.4 on the NDKv5 platform. I have highlighted the main things for this purpose:
<android/bitmap.h>
and <android/native_window.h>
before Android 2.2 (API Level 8) such a thing did not existvoid decodeVideoFromPacket(JNIEnv *env, jobject instance,
jclass mplayer_class, AVPacket avpkt,
int total_frames, int length) {
AVFrame *pFrame = NULL
AVFrame *pFrameRGB = NULL;
pFrame = avcodec_alloc_frame();
pFrameRGB = avcodec_alloc_frame();
int frame_size = avpicture_get_size(PIX_FMT_RGB32, gVideoCodecCtx->width, gVideoCodecCtx->height);
unsigned char* buffer = (unsigned char*)av_malloc((size_t)frame_size * 3);
if (!buffer) {
av_free(pFrame);
av_free(pFrameRGB);
return;
}
jbyteArray buffer2;
jmethodID renderVideoFrames = env->GetMethodID(mplayer_class, "renderVideoFrames", "([BI)V");
int frameDecoded;
avpicture_fill((AVPicture*) pFrame,
buffer,
gVideoCodecCtx->pix_fmt,
gVideoCodecCtx->width,
gVideoCodecCtx->height
);
if (avpkt.stream_index == gVideoStreamIndex) { // If video stream found
int size = avpkt.size;
total_frames++;
struct SwsContext *img_convert_ctx = NULL;
avcodec_decode_video2(gVideoCodecCtx, pFrame, &frameDecoded, &avpkt);
if (!frameDecoded || pFrame == NULL) {
return;
}
try {
PixelFormat pxf;
// RGB565 by default for Android Canvas in pre-Gingerbread devices.
if(android::get_android_api_version(env) >= ANDROID_API_CODENAME_GINGERBREAD) {
pxf = PIX_FMT_BGR32;
} else {
pxf = PIX_FMT_RGB565;
}
int rgbBytes = avpicture_get_size(pxf, gVideoCodecCtx->width,
gVideoCodecCtx->height);
// Converting YUV to RGB frame & RGB frame to char* buffer
buffer = convertYuv2Rgb(pxf, pFrame, rgbBytes); // result of av_image_copy_to_buffer()
if(buffer == NULL) {
return;
}
buffer2 = env->NewByteArray((jsize) rgbBytes);
env->SetByteArrayRegion(buffer2, 0, (jsize) rgbBytes,
(jbyte *) buffer);
env->CallVoidMethod(instance, renderVideoFrames, buffer2, rgbBytes);
env->DeleteLocalRef(buffer2);
free(buffer);
} catch (...) {
if (debug_mode) {
LOGE(10, "[ERROR] Render video frames failed");
return;
}
}
}
}
private void renderVideoFrames(final byte[] buffer, final int length) {
new Thread(new Runnable() {
@Override
public void run() {
Canvas c;
VideoTrack track = null;
for (int tracks_index = 0; tracks_index < tracks.size(); tracks_index++) {
if (tracks.get(tracks_index) instanceof VideoTrack) {
track = (VideoTrack) tracks.get(tracks_index);
}
}
if (track != null) {
int frame_width = track.frame_size[0];
int frame_height = track.frame_size[1];
if (frame_width > 0 && frame_height > 0) {
try {
// RGB_565 == 65K colours (16 bit)
// RGB_8888 == 16.7M colours (24 bit w/ alpha ch.)
int bpp = Build.VERSION.SDK_INT > 9 ? 16 : 24;
Bitmap.Config bmp_config =
bpp == 24 ? Bitmap.Config.RGB_565 : Bitmap.Config.ARGB_8888;
Paint paint = new Paint();
if(buffer != null && holder != null) {
holder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
if((c = holder.lockCanvas()) == null) {
Log.d(MPLAY_TAG, "Lock canvas failed");
return;
}
ByteBuffer bbuf =
ByteBuffer.allocateDirect(minVideoBufferSize);
bbuf.rewind();
for(int i = 0; i < buffer.length; i++) {
bbuf.put(i, buffer[i]);
}
bbuf.rewind();
// The approximate location where the application crashed.
Bitmap bmp = Bitmap.createBitmap(frame_width, frame_height, bmp_config);
bmp.copyPixelsFromBuffer(bbuf);
float aspect_ratio = (float) frame_width / (float) frame_height;
int scaled_width = (int)(aspect_ratio * (c.getHeight()));
c.drawBitmap(bmp,
null,
new RectF(
((c.getWidth() - scaled_width) / 2), 0,
((c.getWidth() - scaled_width) / 2) + scaled_width,
c.getHeight()),
null);
holder.unlockCanvasAndPost(c);
bmp.recycle();
bbuf.clear();
} else {
Log.d(MPLAY_TAG, "Video frame buffer is null");
}
} catch (Exception ex) {
ex.printStackTrace();
} catch (OutOfMemoryError oom) {
oom.printStackTrace();
stop();
}
}
}
}
}).start();
}
Exception (tested in Android 4.1.2 emulator):
E/dalvikvm-heap: Out of memory on a 1228812-byte allocation
I/dalvikvm: "Thread-495" prio=5 tid=21 RUNNABLE
................................................
at android.graphics.Bitmap.nativeCreate(Native Method)
at android.graphics.Bitmap.createBitmap(Bitmap.java:640)
at android.graphics.Bitmap.createBitmap(Bitmap.java:620)
at [app_package_name].MediaPlayer$5.run(MediaPlayer.java:406)
at java.lang.Thread.run(Thread.java:856)
For clarification: I first compiled FFmpeg 0.11.x on a virtual machine with Ubuntu 12.04 LTS from my written build script, looked for player examples suitable for Android below 2.2 (there is little information about them, unfortunately) and opened the file on the player and after showing the first frames it crashed into a stack or buffer overflow, on I put off developing the player for some time.
Is there anything ready-made that, as a rule, fits into one C++ file and takes into account all the nuances of backporting? Thanks in advance.
To make sure that you can write a player, you need to do the following:
Before FFmpeg build use Android NDK r8e for 32-bit build and Android NDK r10e for 64-bit build.
Since in NDK r10e building for Android 2.0 platform is still possible, but running the library in Android 2.1 will lead to the application crashing in SIGSEGV
due to the incorrect libc
runtime.
I actually didn't know about the existence of the ReleaseByteArrayElements
function in JNI, so it was a big help in figuring out why the buffer was overflowing.
The av_packet_unref
, av_free_packet
, sws_freeContext
FFmpeg functions are extremely important and should be inserted at the end of each decoded frame.
Before:
~160 MB memory per minute of video
After:
~25 MB memory on average