Referring to the way http://www.cnblogs.com/doandroid/archive/2011/11/09/2242558.html
MainActivity.java in
private static native void openFile ();
private static native void drawFrame (Bitmap bitmap);
private static native void drawFrameAt (Bitmap bitmap, int secs);
private Bitmap mBitmap;
private int mSecs = 0;
static {
System.loadLibrary ("ffmpeg");
}
protected void onCreate (Bundle savedInstanceState) {
super.onCreate (savedInstanceState);
setContentView (R.layout.activity_main);
mBitmap = Bitmap.createBitmap (320, 240, Bitmap.Config.ARGB_8888);
openFile ();
Button btn = (Button) findViewById (R.id.frame_adv);
btn.setOnClickListener (new OnClickListener () {
public void onClick (View v) {
drawFrame (mBitmap);
ImageView i = (ImageView) findViewById (R.id.frame);
i.setImageBitmap (mBitmap);
}
});
}
jni of native.c in
static void fill_bitmap (AndroidBitmapInfo * info, void * pixels, AVFrame * pFrame)
{
uint8_t * frameLine;
int yy;
for (yy = 0; yy
uint8_t * line = (uint8_t *) pixels;
frameLine = (uint8_t *) pFrame-> data [0] + (yy * pFrame-> linesize [0]);
int xx;
for (xx = 0; xx
int out_offset = xx * 4;
int in_offset = xx * 3;
line [out_offset] = frameLine [in_offset];
line [out_offset +1] = frameLine [in_offset +1];
line [out_offset +2] = frameLine [in_offset +2];
line [out_offset +3] = 0;
}
pixels = (char *) pixels + info-> stride;
}
}
void setupScaler ()
{
/ / Avpicture_alloc (& picture, PIX_FMT_RGB24, pCodecCtx-> width, pCodecCtx-> height);
static int sws_flags = SWS_FAST_BILINEAR;
img_convert_ctx = sws_getContext (pCodecCtx-> width,
pCodecCtx-> height,
pCodecCtx-> pix_fmt,
pCodecCtx-> width,
pCodecCtx-> height,
PIX_FMT_RGB24,
sws_flags, NULL, NULL, NULL);
}
void Java_cn_ct_rtmpdemo1_MainActivity_openFile (JNIEnv * env, jobject this)
{
int ret;
int err;
int i;
AVCodec * pCodec;
uint8_t * buffer;
int numBytes;
av_register_all () ;/ / initialize libavformat and register all the muxers, demuxers and protocols
avformat_network_init ();
LOGE ("Registered formats");
char errCode [512];
err = avformat_open_input (& pFormatCtx, "rtmp :/ / XXX.64.87.XX/e/crovd1 live = 1", NULL, NULL);
LOGE ("Called open file");
if (err! = 0) {
av_strerror (err, errCode, 512);
av_log_set_level (AV_LOG_DEBUG);
LOGE ("Couldn't open file% s", errCode);
return;
}
LOGE ("Opened file");
if (avformat_find_stream_info (pFormatCtx, NULL) <0) {
LOGE ("Unable to get stream info");
return;
}
videoStream = -1;
for (i = 0; i
/ / If (pFormatCtx-> streams [i] -> codec-> codec_type == CODEC_TYPE_VIDEO) {
if (pFormatCtx-> streams [i] -> codec-> codec_type == AVMEDIA_TYPE_VIDEO) {
videoStream = i;
LOGE ("get videoStream");
break;
}
}
if (videoStream == -1) {
LOGE ("Unable to find video stream");
return;
}
LOGE ("Video stream is [% d]", videoStream);
pCodecCtx = pFormatCtx-> streams [videoStream] -> codec;
pCodec = avcodec_find_decoder (pCodecCtx-> codec_id);
if (pCodec == NULL) {
LOGE ("Unsupported codec% d", pCodecCtx-> codec_id);
return;
}
LOGE ("avcodec_find_decoder");
if (avcodec_open2 (pCodecCtx, pCodec, NULL) <0) {
LOGE ("Unable to open codec");
return;
}
pFrame = avcodec_alloc_frame ();
pFrameRGB = avcodec_alloc_frame ();
LOGE ("Video size is [% d x% d]", pCodecCtx-> width, pCodecCtx-> height);
setupScaler ();
numBytes = avpicture_get_size (PIX_FMT_RGB24, pCodecCtx-> width, pCodecCtx-> height);
buffer = (uint8_t *) av_malloc (numBytes * sizeof (uint8_t));
avpicture_fill ((AVPicture *) pFrameRGB, buffer, PIX_FMT_RGB24,
pCodecCtx-> width, pCodecCtx-> height);
LOGE ("avpicture_fill_");
}
void Java_cn_ct_rtmpdemo1_MainActivity_drawFrame (JNIEnv * env, jobject this, jstring bitmap)
{
AndroidBitmapInfo info;
void * pixels;
int ret;
int err;
int i;
int frameFinished = 0;
AVPacket packet;
/ / Static struct SwsContext * img_convert_ctx;
int64_t seek_target;
if ((ret = AndroidBitmap_getInfo (env, bitmap, & info)) <0) {
LOGE ("AndroidBitmap_getInfo () failed error =% d!", Ret);
return;
}
LOGE ("Checked on the bitmap");
if ((ret = AndroidBitmap_lockPixels (env, bitmap, & pixels)) <0) {
LOGE ("AndroidBitmap_lockPixels () failed error =% d!", Ret);
}
LOGE ("Grabbed the pixels");
i = 0;
while ((i == 0) && (av_read_frame (pFormatCtx, & packet)> = 0)) {
if (packet.stream_index == videoStream) {
avcodec_decode_video2 (pCodecCtx, pFrame, & frameFinished, & packet);
if (frameFinished) {
LOGE ("packet pts% llu", packet.pts);
if (img_convert_ctx == NULL) {
LOGE ("could not initialize conversion context \ n");
return;
}
sws_scale (img_convert_ctx, (const uint8_t * const *) pFrame-> data, pFrame-> linesize, 0, pCodecCtx-> height, pFrameRGB-> data, pFrameRGB-> linesize);
/ / Save_frame (pFrameRGB, target_width, target_height, i);
fill_bitmap (& info, pixels, pFrameRGB);
i = 1;
}
}
av_free_packet (& packet);
}
AndroidBitmap_unlockPixels (env, bitmap);
}
Frame button click interface, read the picture below, the same as the sketch, no color, we ask how it is, is not converted into the picture when a problem

<-! Main posts under Banner (D4) -><-! Posts under the main text (D5) ->
Reply:
Nobody knows what

Reply:
Remove fill_bitmap
No comments:
Post a Comment