Libavcodec
AVFrame to Android Bitmap (ffmpeg with android)
안드로이드의 ndk단에서 FFmpeg의 YUV420P형식의 이미지를 Android Bitmap형식으로 바로 변환하는 방법이다.
context
와 linesize
는 한번만 설정해주면 된다. SWS_X
가 보간법관련인수이다
등으로 지정되어있다.
struct SwsContext* img_convert_ctx = 0;
int linesize[4] = { 0, 0, 0, 0 };
img_convert_ctx = sws_getContext(IMG_HEIGHT,
IMG_WIDTH, PIX_FMT_YUV420P,
lBitmapInfo.width, lBitmapInfo.height, PIX_FMT_RGBA,
SWS_X, 0, 0, 0);
linesize[0] = 4 * lBitmapInfo.width;
AndroidBitmap_lockPixels(g_env, g_Bitmap,(void**) &lBitmapContent);
sws_scale(img_convert_ctx, decode_picture->data, decode_picture->linesize,
0,IMG_WIDTH,
(uint8_t * const *) (&(lBitmapContent)),
linesize);
AndroidBitmap_unlockPixels(g_env, g_Bitmap);
sws_freeContext(img_convert_ctx);
OpenCV with FFmpeg (각각의 이미지 포맷 변환)
IplImage to AVFrame
void IplImage_to_AVFrame(IplImage* iplImage, AVFrame* avFrame, int frameWidth, int frameHeight, enum PixelFormat pix_fmt)
{
struct SwsContext* img_convert_ctx = 0;
int linesize[4] = {0, 0, 0, 0};
img_convert_ctx = sws_getContext(iplImage->width, iplImage->height,
PIX_FMT_BGR24,
frameWidth,
frameHeight,
pix_fmt, SWS_BICUBIC, 0, 0, 0);
if (img_convert_ctx != 0)
{
linesize[0] = 3 * iplImage->width;
sws_scale(img_convert_ctx, (const uint8_t *const*)(&(iplImage->imageData)), linesize, 0, iplImage->height, avFrame->data, avFrame->linesize);
sws_freeContext(img_convert_ctx);
}
}
AVFrame to IplImage
void AVFrame_to_IplImage(AVFrame* avFrame,IplImage* iplImage)
{
struct SwsContext* img_convert_ctx = 0;
int linesize[4] = {0, 0, 0, 0};
img_convert_ctx = sws_getContext(avFrame->width, avFrame->height,
(PixelFormat)avFrame->format,
iplImage->width,
iplImage->height,
PIX_FMT_BGR24, SWS_BICUBIC, 0, 0, 0);
if (img_convert_ctx != 0)
{
linesize[0] = 3 * iplImage->width;
sws_scale(img_convert_ctx, avFrame->data,avFrame->linesize,0,avFrame->height, (uint8_t *const*)(&(iplImage->imageData)), linesize);
sws_freeContext(img_convert_ctx);
}
}
FFmpeg으로 트랜스코딩하기
static AVFormatContext *fmt_ctx = NULL;
static AVCodecContext *dec_ctx = NULL;
static int vst_idx = -1;
void open_input_file()
{
AVCodec *dec;
/* open input streams */
avformat_open_input(&fmt_ctx, filename, NULL, NULL);
/* select the video stream */
vst_idx = av_find_best_stream(fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0);
dec_ctx = fmt_ctx->streams[vst_idx]->codec;
/* init the video decoder */
avcodec_open2(dec_ctx, dec, NULL);
}
void close_input_file()
{
for (i = 0; i < fmt_ctx->nb_streams; i++) {
AVStream *st = fmt_ctx->streams[i];
avcodec_close(st->codec);
}
avformat_close_input(&fmt_ctx);
}
void encode_video()
{
AVPacket pkt, outpkt;
AVFrame *frm;
int got_frame, got_output;
AVCodec *enc;
AVCodecContext *enc_ctx = NULL;
/* find the h.264 video encoder */
enc = avcodec_find_encoder(AV_CODEC_ID_H264);
enc_ctx = avcodec_alloc_context3(codec);
/* put sample parameters */
enc_ctx->bit_rate = 400000;
/* resolution must be a multiple of two */
enc_ctx->width = 352;
enc_ctx->height = 288;
/* frames per second */
enc_ctx->time_base= (AVRational){1,25};
enc_ctx->gop_size = 10; /* emit one intra frame every ten frames */
enc_ctx->max_b_frames=1;
enc_ctx->pix_fmt = AV_PIX_FMT_YUV420P;
/* init the video encoder */
avcodec_open2(enc_ctx, enc, NULL);
frm = av_frame_alloc();
while (1) {
av_read_frame(fmt_ctx, &pkt);
if (pkt.stream_index == vst_idx) {
avcodec_get_frame_defaults(frm);
/* decode the frame */
avcodec_decode_video2(dec_ctx, frm, &got_frame, &pkt);
if (got_frame) {
av_init_packet(&outpkt);
outpkt.data = NULL;
outpkt.size = 0;
/* encode the frame */
avcodec_encode_video2(c, &outpkt, frm, &got_output);
if (got_output) {
/* write or send the frame */
av_free_packet(&outpkt);
}
}
}
av_free_packet(&pkt);
}
av_init_packet(&outpkt);
outpkt.data = NULL;
outpkt.size = 0;
/* get the delayed frames */
got_output = 1;
while (got_output) {
/* encode the frame */
avcodec_encode_video2(c, &outpkt, frm, &got_output);
if (got_output) {
/* write or send the frame */
av_free_packet(&outpkt);
}
}
av_frame_free(&frm);
}
FFmpeg으로 재생하기
static AVFormatContext *fmt_ctx = NULL;
void play()
{
AVStream *st;
AVPacket pkt;
AVFrame *frm = av_frame_alloc();
int got_frame;
while (1) {
av_read_frame(fmt_ctx, &pkt);
st = fmt_ctx->streams[pkt.stream_index];
avcodec_get_frame_defaults(frm);
switch (st->codec->codec_type) {
case AVMEDIA_TYPE_AUDIO:
avcodec_decode_audio4(st->codec, frm, &got_frame, &pkt);
break;
case AVMEDIA_TYPE_VIDEO:
avcodec_decode_video2(st->codec, frm, &got_frame, &pkt);
break;
}
if (got_frame) {
// display or render the frame
}
av_free_packet(&pkt);
}
av_frame_free(&frm);
}
FFmpeg으로 미디어 스트림 열기
static AVFormatContext *fmt_ctx = NULL;
void open_input_file()
{
avformat_open_input(&fmt_ctx, filename, NULL, NULL);
for (i = 0; i < fmt_ctx->nb_streams; i++) {
AVStream *st = fmt_ctx->streams[i];
AVCodec *dec;
avcodec_open2(st->codec, dec, NULL);
}
}
void close_input_file()
{
for (i = 0; i < fmt_ctx->nb_streams; i++) {
AVStream *st = fmt_ctx->streams[i];
avcodec_close(st->codec);
}
avformat_close_input(&fmt_ctx);
}
FFmpeg을 사용한 비디오 녹화(인코딩)방법
Favorite site
- libav-user: Encoding H.264 and AAC audio into MPEG2 TS
- blogs.gentoo.org - New AVCodec API
- converting images to mp4 using ffmpeg on iphone
- Image to Videofile
- FFmpeg를 이용한 비디오(mp4)컨버팅 질문
Encoding Tutorials
- FFmpeg 예제 muxing.c 를 이용해서 mp4 파일 출력 시도
- [추천] FFmpeg의 api-example을 사용한 mpeg encoding 방법 4
- Understanding FFmpeg Video Encoding
- How to write a video encoder with ffmpeg
- Writing "C" code to encode and decode H.264 by using libavcodec