一、iOS FFmpeg介绍
iOS FFmpeg是用于iOS系统的音频和视频处理库,支持各种输入格式,包括MP4、AVI、MOV等,可进行符合标准的音视频编解码、滤镜处理、转码、切割和拼接等操作。iOS FFmpeg开源、跨平台且可编译,提供了一个颇具特色的音视频开发平台。
二、iOS FFmpeg的安装与编译
准备工作:
1.下载iOS编译环境,如Xcode和Command Line Tools。
2.下载iOS FFmpeg源码,建议使用官网获取最新的稳定版本。
步骤:
1.进入下载的ffmpeg目录,执行命令:
./configure --disable-ffmpeg --disable-ffplay --disable-ffprobe --disable-doc --disable-symver
2.执行命令:
make
3.执行命令:
sudo make install
4.在工程中引用FFmpeg库,并且配置相关头文件和库文件路径。
三、iOS FFmpeg的使用
1.音频:录制、转换、编辑
使用苹果的AVFoundation库进行录音和播放,然后将音频文件导入到FFmpeg进行转换和编辑。
录制音频
AVAudioRecorder *recorder = [[AVAudioRecorder alloc] initWithURL:url settings:settings error:&error]; [recorder record];
播放音频
AVAudioPlayer *player = [[AVAudioPlayer alloc] initWithContentsOfURL:url error:&error]; [player play];
转换音频
AVStream *in_audio_stream, *out_audio_stream; AVCodecContext *outAudioCodecContext; avcodec_send_packet(in_audio_codec_context, avPacket); avcodec_receive_frame(in_audio_codec_context, decoded_frame); if (decoded_frame->channels > 0 && decoded_frame->channel_layout == 0) { decoded_frame->channel_layout = av_get_default_channel_layout(decoded_frame->channels); } else if (decoded_frame->channels == 0 && decoded_frame->channel_layout > 0) { decoded_frame->channels = av_get_channel_layout_nb_channels(decoded_frame->channel_layout); } av_frame_make_writable(decoded_frame); outAudioCodecContext->channel_layout = decoded_frame->channel_layout; outAudioCodecContext->channels = av_get_channel_layout_nb_channels(outAudioCodecContext->channel_layout); outAudioCodecContext->sample_rate = decoded_frame->sample_rate; outAudioCodecContext->frame_size = decoded_frame->nb_samples; outAudioCodecContext->sample_fmt = outAudioCodecContext->codec->sample_fmts[0]; avcodec_send_frame(outAudioCodecContext, decoded_frame); int ret = avcodec_receive_packet(outAudioCodecContext, avPacket1); if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { av_packet_unref(avPacket1); return; } else if (ret < 0) { av_packet_unref(avPacket1); return; } avPacket1->stream_index = out_audio_stream->index; av_write_frame(formatContext, avPacket1); av_packet_unref(avPacket); av_packet_unref(avPacket1);
编辑音频
filter_graph = avfilter_graph_alloc(); AVFilter *src = avfilter_get_by_name("abuffer"); AVFilter *sink = avfilter_get_by_name("abuffersink"); char args[512]; snprintf(args, sizeof(args),"time_base=%d:%d:sample_rate=%d:sample_fmt=%s:channel_layout=0x%"PRIx64, pCodecCtx->time_base.num, pCodecCtx->time_base.den, pCodecCtx->sample_rate, ( av_get_sample_fmt_name(pCodecCtx->sample_fmt)), pCodecCtx->channel_layout); avfilter_graph_create_filter(&mosaic_filter, src, "mymosaic", args, NULL, filter_graph); avfilter_graph_create_filter(&mosaic_sink, sink, "mymosaicout", NULL, NULL, filter_graph); avfilter_link(mosaic_filter, 0, mosaic_sink, 0); avfilter_graph_config(filter_graph, NULL); while (av_read_frame(pFormatCtx, &packet) >= 0) { if (packet.stream_index == stream_index) { avcodec_send_packet(pCodecCtx, &packet); while (avcodec_receive_frame(pCodecCtx, pFrame) == 0) { av_buffersrc_add_frame_flags(mosaic_filter, pFrame, 0); while (1) { AVFrame *frame = av_frame_alloc(); ret = av_buffersink_get_frame(mosaic_sink, frame); if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { av_frame_unref(frame); break; } [pcmData appendBytes:frame->data[0] length:frame->linesize[0]]; av_frame_unref(frame); } } } av_packet_unref(&packet); } avfilter_graph_free(&filter_graph);
2.视频:录制、转换、编辑
使用苹果的AVFoundation库进行录制和播放,然后将视频文件导入到FFmpeg进行转换和编辑。
录制视频
AVCaptureVideoPreviewLayer *preview = [AVCaptureVideoPreviewLayer layerWithSession:session]; [preview setFrame:self.view.bounds]; [self.view.layer addSublayer:preview]; AVCaptureConnection *connection = videoOutput.connections[0]; if (connection.isVideoMirrored) { connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft; } else { connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight; } _videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath: FilePath] fileType:AVFileTypeMPEG4 error:&error]; _videoWriterInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:_videoCompressionSettings]; [_videoWriterInput setExpectsMediaDataInRealTime:YES]; [_videoWriter addInput:_videoWriterInput]; [_videoWriter startWriting]; [_videoWriter startSessionAtSourceTime:kCMTimeZero]; [_videoWriterInput markAsFinished]; [_videoWriter finishWritingWithCompletionHandler:^{}];
播放视频
AVPlayer *player = [AVPlayer playerWithURL:someURL]; AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:player]; playerLayer.frame = self.view.bounds; [self.view.layer addSublayer:playerLayer]; [player play];
转换视频
AVFormatContext *pFormatCtx = NULL; int videoindex = -1; AVCodecContext *pCodecCtx = NULL; AVCodec *pCodec = NULL; AVFrame *pFrame = NULL, *pFrameYUV = NULL; AVPacket *packet = NULL; int y_size = 0; int len = 0; int frameGot; int64_t in_channel_layout; struct SwsContext *img_convert_ctx = NULL; avcodec_register_all(); av_register_all(); _av_input_format = av_find_input_format("avfoundation"); NSString *input_str=[NSString stringWithFormat:@":%d", DEVICE_VIDEO_POSITION_BACK]; NSString *video_size = @"720x1280";//根据需求需要调整 const char *sz_input_str =[input_str UTF8String]; const char *sz_video_size = [video_size UTF8String]; av_dict_set(&_av_dict, "video_size", sz_video_size, 0); av_dict_set(&_av_dict, "framerate", "30", 0); if (avformat_open_input(&pFormatCtx, sz_input_str, _av_input_format, &_av_dict) != 0) { printf("Couldn't open input stream.\n"); return -1; } if (avformat_find_stream_info(pFormatCtx, NULL) < 0) { printf("Couldn't find stream information.\n"); return -1; } for (int i = 0; i < pFormatCtx->nb_streams; i++) { if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { videoindex = i; } } if (videoindex == -1) { printf("Didn't find a video stream.\n"); return -1; } pCodecCtx = avcodec_alloc_context3(NULL); if (!pCodecCtx) { printf("Could not allocate AVCodecContext.\n"); return -1; } avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoindex]->codecpar); pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if (!pCodec) { printf("Failed to find codec.\n"); return -1; } if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { printf("Failed to open codec.\n"); return -1; } pFrame = av_frame_alloc(); pFrameYUV = av_frame_alloc(); y_size = pCodecCtx->width * pCodecCtx->height; av_image_alloc(pFrameYUV->data, pFrameYUV->linesize, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, 16); packet = (AVPacket *)av_malloc(sizeof(AVPacket)); while (av_read_frame(pFormatCtx, packet) >= 0) { if (packet->stream_index == videoindex) { avcodec_send_packet(pCodecCtx, packet); while (1) { frameGot = avcodec_receive_frame(pCodecCtx, pFrame); if (frameGot != 0) { break; } sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize); len = fwrite(pFrameYUV->data[0], 1, y_size * 3 / 2, dst_fd); } } av_packet_unref(packet); } av_packet_free(&packet); av_frame_free(&pFrame); av_frame_free(&pFrameYUV); avcodec_close(pCodecCtx); avformat_close_input(&pFormatCtx); av_dict_free(&_av_dict);
编辑视频
AVFormatContext *inputFormatContext = avformat_alloc_context(); AVFormatContext *outputFormatContext = avformat_alloc_context(); AVOutputFormat *outputFormat = NULL; AVStream *inputStream = NULL, *outputStream = NULL;; AVCodecContext *inputCodecContext = NULL, *outputCodecContext = NULL; AVCodec *inputCodec = NULL, *outputCodec = NULL; AVPacket *criticalPacket = NULL, *outputPacket = NULL; AVFrame *inputFrame = NULL, *outputFrame = NULL; uint8_t *outputBuffer = NULL; int frameFinished = 0, outputBufferSize, outputFrameCount = 0; av_register_all(); avformat_network_init(); if (avformat_open_input(&inputFormatContext, [inputUrl.absoluteString UTF8String], NULL, NULL) < 0) { printf("Could not open input file.\n"); [self setMovieDuration:kCMTimeZero]; AVCaptureConnection *connection = _videoOutput.connections[0]; connection.videoOrientation = AVCaptureVideoOrientationPortrait; return; } if (avformat_find_stream_info(inputFormatContext, NULL) < 0) { printf("Could not find stream information.\n"); [self setMovieDuration:kCMTimeZero]; AVCaptureConnection *connection = _videoOutput.connections[0]; connection.videoOrientation = AVCaptureVideoOrientationPortrait; return; } if (av_find_best_stream(inputFormatContext, AVMEDIA_TYPE_VIDEO, -1, -1, &inputCodec, 0) < 0) { printf("Could not find any video stream.\n"); [self setMovieDuration:kCMTimeZero]; AVCaptureConnection *connection = _videoOutput.connections[0]; connection.videoOrientation = AVCaptureVideoOrientationPortrait; return; } inputCodecContext = inputFormatContext->streams[inputCodec->index]->codec; inputStream = inputFormatContext->streams[inputCodecContext->time_base.num == 0 ? 1 : 0]; outputFormat = av_guess_format("mp4", "video.mp4", NULL); if (outputFormat == NULL) { printf("Could not guess output format.\n"); return; } if (avformat_alloc_output_context2(&outputFormatContext, outputFormat, NULL, NULL) < 0) { printf("Could not allocate output context.\n"); return; } outputStream = avformat_new_stream(outputFormatContext, NULL); if (outputStream == NULL) { printf("Could not create video stream.\n"); return; } outputCodec = avcodec_find_encoder(inputCodec->id); outputCodecContext = avcodec_alloc_context3(outputCodec); if (outputCodecContext == NULL) { printf("Could not allocate codec context.\n"); return; } outputStream->codec = outputCodecContext; if (avcodec_open2(outputCodecContext, outputCodec, NULL) < 0) { printf("Could not open codec.\n"); return; } if (outputFormatContext->oformat->flags & AVFMT_GLOBALHEADER) { outputCodecContext->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;