应用播放的时候需要截图,可以在上层使用TexturView来使用截图,不过太具有局限性呢,还是在底层处理比较好。
那么先分析下可以在哪里加截图呢?看到网上很多做的都不能支持硬解截图,这样肯定是不行的。
所以我们可以选择在video_image_display2中来做截屏处理。
添加shot_screen.h
#ifndef _SHOT_SCREEN_H
#define _SHOT_SCREEN_H
#include <jni.h>
#include "ff_ffplay_def.h"
#include "ijkplayer.h"
int do_shot(VideoState *videoState, uint8_t *frameFuf, int destWidth, int destHeight,
SDL_VoutOverlay *bmp);
jobject start_shot(JNIEnv *env, IjkMediaPlayer *mp, int width, int height, jobject config);
#endif
添加shot_screen.c文件
#include <jni.h>
#include <android/bitmap.h>
#include "ff_ffplay_def.h"
#include "ijkplayer.h"
#include "ijkplayer_internal.h"
#include "shot_screen.h"
void fill_bitmap(int lineSize, uint8_t *data, uint8_t *frameFuf, int width, int height) {
int size = width * 4;
for (int i = 0; i < height; i++) {
memcpy(frameFuf + i * size, data + i * lineSize, (size_t) size);
}
}
int scale_and_fill_bitmap(VideoState *videoState, uint8_t *frameFuf, int destWidth, int destHeight,
SDL_VoutOverlay *bmp, enum AVPixelFormat pixelFormat) {
int ret = 0;
int height = bmp->h;
int width = bmp->w;
struct SwsContext *swsContext = sws_getCachedContext(videoState->img_convert_ctx, width,
height,
pixelFormat, destWidth,
destHeight,
AV_PIX_FMT_RGBA, SWS_FAST_BILINEAR,
NULL, NULL, NULL);
AVFrame *rgbFrame = av_frame_alloc();
uint8_t *outBuffer = av_malloc(
(size_t) av_image_get_buffer_size(AV_PIX_FMT_RGBA, destWidth, destHeight, 1));
if (outBuffer == NULL) {
ret = -1;
goto end;
}
if (av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, outBuffer, AV_PIX_FMT_RGBA,
destWidth, destHeight, 1) < 0) {
ret = -1;
goto end;
}
int result = sws_scale(swsContext, (const uint8_t *const *) bmp->pixels,
(const int *) bmp->pitches, 0,
height, &frameFuf,
rgbFrame->linesize);
if (result <= 0) {
ret = -1;
goto end;
}
end:
av_frame_free(&rgbFrame);
sws_freeContext(swsContext);
if (outBuffer != NULL) {
av_free(outBuffer);
}
return ret;
}
int do_shot(VideoState *videoState, uint8_t *frameFuf, int destWidth, int destHeight,
SDL_VoutOverlay *bmp) {
int height = bmp->h;
int width = bmp->w;
enum AVPixelFormat dst_format;
int needScale = 0;
switch (bmp->format) {
case SDL_FCC_YV12:
case SDL_FCC_I420:
dst_format = AV_PIX_FMT_YUV420P;
needScale = 1;
break;
case SDL_FCC_I444P10LE:
dst_format = AV_PIX_FMT_YUV444P10LE;
needScale = 1;
break;
case SDL_FCC_RV32:
dst_format = AV_PIX_FMT_0BGR32;
break;
case SDL_FCC_RV24:
dst_format = AV_PIX_FMT_RGB24;
needScale = 1;
break;
case SDL_FCC_RV16:
dst_format = AV_PIX_FMT_RGB565;
break;
default:
return -1;
}
if (destWidth != width || destHeight != height || needScale) {
return scale_and_fill_bitmap(videoState, frameFuf, destWidth, destHeight, bmp, dst_format);
}
int lineSize = bmp->pitches[0];
uint8_t *data = bmp->pixels[0];
fill_bitmap(lineSize, data, frameFuf, width, height);
return 0;
}
SDL_VoutOverlay *get_last_bmp(FFPlayer *ffp) {
VideoState *is = ffp->is;
Frame *vp = &is->pictq.queue[is->pictq.rindex];
return vp->bmp;
}
jobject start_shot(JNIEnv *env, IjkMediaPlayer *mp, int width, int height, jobject config) {
SDL_VoutOverlay *bmp = get_last_bmp(mp->ffplayer);
if (bmp == NULL) {
return NULL;
}
jclass bitmapClass = (*env)->FindClass(env, "android/graphics/Bitmap");
jmethodID createBitmapMethodId = (*env)->GetStaticMethodID(env, bitmapClass, "createBitmap",
"(IILandroid/graphics/Bitmap$Config;)Landroid/graphics/Bitmap;");
int destWidth = bmp->w;
if (width > 0) {
destWidth = width >= destWidth ? destWidth : width;
}
int destHeight = bmp->h;
if (height > 0) {
destHeight = height >= destHeight ? destHeight : height;
}
jobject bitmap = (*env)->CallStaticObjectMethod(env, bitmapClass, createBitmapMethodId,
destWidth,
destHeight, config);
uint8_t *frameBuffer = NULL;
if (0 > AndroidBitmap_lockPixels(env, bitmap, (void **) &frameBuffer)) {
return NULL;
}
int ret = do_shot(mp->ffplayer->is, frameBuffer, destWidth, destHeight, bmp);
if (0 > AndroidBitmap_unlockPixels(env, bitmap)) {
return NULL;
}
ijkmp_dec_ref_p(&mp);
if (ret < 0) {
return NULL;
}
return bitmap;
}
本文内容由网友自发贡献,版权归原作者所有,本站不承担相应法律责任。如您发现有涉嫌抄袭侵权的内容,请联系:hwhale#tublm.com(使用前将#替换为@)