概述
上一篇博客介绍了关于流媒体的基础知识和一些学习书籍和网站,这一片博客不要介绍FFmpeg的使用
一、属性以及作用的介绍:
AVFormatContext:保存需要读入的文件格式信息,比如流的个数以及流数据。
AVCodecCotext:保存了相应流的详细编码信息,比如视频的宽,高,编码类型。
pCodec:真正的编解码器,其中又编解码需要调用的函数。
AVFrame:用于保存数据帧的数据结构,这里的两个帧分别是保存颜色转换前后的两个图像
AVPacket:解析文件时会将音视频帧读入到packeg中
二、播放原理:
通过ffmpeg
对视频进行解码,解码出每一帧图片,然后根据一定时间播放每一帧图
三、FFMpeg的集成:
1、下载脚本
2、在终端执行脚本./build-ffmpeg.sh
执行脚本需要等待一段时间
.......
脚本已经执行完毕。
脚本执行完毕之后会在脚本文件中生成编译后的静态库:
将依赖库添加到项目中,添加一下依赖库:
TARGET -> Build Settings->Search Paths
ZRKFFmpegObject.h
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
@interface ZRKFFmpegObject : NSObject
/* 解码后的UIImage */
@property (nonatomic, strong, readonly) UIImage *currentImage;
/* 视频的frame高度 */
@property (nonatomic, assign, readonly) int sourceWidth, sourceHeight;
/* 输出图像大小。默认设置为源大小。 */
@property (nonatomic,assign) int outputWidth, outputHeight;
/* 视频的长度,秒为单位 */
@property (nonatomic, assign, readonly) double duration;
/* 视频的当前秒数 */
@property (nonatomic, assign, readonly) double currentTime;
/* 视频的帧率 */
@property (nonatomic, assign, readonly) double fps;
/* 视频路径。 */
- (instancetype)initWithVideo:(NSString *)moviePath;
/* 切换资源 */
- (void)replaceTheResources:(NSString *)moviePath;
/* 重拨 */
- (void)redialPaly;
/* 从视频流中读取下一帧。返回假,如果没有帧读取(视频)。 */
- (BOOL)stepFrame;
/* 寻求最近的关键帧在指定的时间 */
- (void)seekTime:(double)seconds;
@end
ZRKFFmpegObject.h
#import "ZRKFFmpegObject.h"
@interface ZRKFFmpegObject()
@property (nonatomic, copy) NSString *cruutenPath;
@end
@implementation ZRKFFmpegObject
{
AVFormatContext *XYQFormatCtx;
AVCodecContext *XYQCodecCtx;
AVFrame *XYQFrame;
AVStream *stream;
AVPacket packet;
AVPicture picture;
int videoStream;
double fps;
BOOL isReleaseResources;
}
#pragma mark ------------------------------------
#pragma mark 初始化
- (instancetype)initWithVideo:(NSString *)moviePath {
if (!(self=[super init])) return nil;
if ([self initializeResources:[moviePath UTF8String]]) {
self.cruutenPath = [moviePath copy];
return self;
} else {
return nil;
}
}
- (BOOL)initializeResources:(const char *)filePath {
isReleaseResources = NO;
AVCodec *pCodec;
// 注册所有解码器
avcodec_register_all();
av_register_all();
avformat_network_init();
// 打开视频文件
if (avformat_open_input(&XYQFormatCtx, filePath, NULL, NULL) != 0) {
NSLog(@"打开文件失败");
goto initError;
}
// 检查数据流
if (avformat_find_stream_info(XYQFormatCtx, NULL) < 0) {
NSLog(@"检查数据流失败");
goto initError;
}
// 根据数据流,找到第一个视频流
if ((videoStream = av_find_best_stream(XYQFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &pCodec, 0)) < 0) {
NSLog(@"没有找到第一个视频流");
goto initError;
}
// 获取视频流的编解码上下文的指针
stream = XYQFormatCtx->streams[videoStream];
XYQCodecCtx = stream->codec;
#if DEBUG
// 打印视频流的详细信息
av_dump_format(XYQFormatCtx, videoStream, filePath, 0);
#endif
if(stream->avg_frame_rate.den && stream->avg_frame_rate.num) {
fps = av_q2d(stream->avg_frame_rate);
} else { fps = 30; }
// 查找解码器
pCodec = avcodec_find_decoder(XYQCodecCtx->codec_id);
if (pCodec == NULL) {
NSLog(@"没有找到解码器");
goto initError;
}
// 打开解码器
if(avcodec_open2(XYQCodecCtx, pCodec, NULL) < 0) {
NSLog(@"打开解码器失败");
goto initError;
}
// 分配视频帧
XYQFrame = av_frame_alloc();
_outputWidth = XYQCodecCtx->width;
_outputHeight = XYQCodecCtx->height;
return YES;
initError:
return NO;
}
- (void)seekTime:(double)seconds {
AVRational timeBase = XYQFormatCtx->streams[videoStream]->time_base;
int64_t targetFrame = (int64_t)((double)timeBase.den / timeBase.num * seconds);
avformat_seek_file(XYQFormatCtx,
videoStream,
0,
targetFrame,
targetFrame,
AVSEEK_FLAG_FRAME);
avcodec_flush_buffers(XYQCodecCtx);
}
- (BOOL)stepFrame {
int frameFinished = 0;
while (!frameFinished && av_read_frame(XYQFormatCtx, &packet) >= 0) {
if (packet.stream_index == videoStream) {
avcodec_decode_video2(XYQCodecCtx,
XYQFrame,
&frameFinished,
&packet);
}
}
if (frameFinished == 0 && isReleaseResources == NO) {
[self releaseResources];
}
return frameFinished != 0;
}
- (void)replaceTheResources:(NSString *)moviePath {
if (!isReleaseResources) {
[self releaseResources];
}
self.cruutenPath = [moviePath copy];
[self initializeResources:[moviePath UTF8String]];
}
- (void)redialPaly {
[self initializeResources:[self.cruutenPath UTF8String]];
}
#pragma mark ------------------------------------
#pragma mark 重写属性访问方法
-(void)setOutputWidth:(int)newValue {
if (_outputWidth == newValue) return;
_outputWidth = newValue;
}
-(void)setOutputHeight:(int)newValue {
if (_outputHeight == newValue) return;
_outputHeight = newValue;
}
-(UIImage *)currentImage {
if (!XYQFrame->data[0]) return nil;
return [self imageFromAVPicture];
}
-(double)duration {
return (double)XYQFormatCtx->duration / AV_TIME_BASE;
}
- (double)currentTime {
AVRational timeBase = XYQFormatCtx->streams[videoStream]->time_base;
return packet.pts * (double)timeBase.num / timeBase.den;
}
- (int)sourceWidth {
return XYQCodecCtx->width;
}
- (int)sourceHeight {
return XYQCodecCtx->height;
}
- (double)fps {
return fps;
}
#pragma mark --------------------------
#pragma mark - 内部方法
- (UIImage *)imageFromAVPicture
{
avpicture_free(&picture);
avpicture_alloc(&picture, AV_PIX_FMT_RGB24, _outputWidth, _outputHeight);
struct SwsContext * imgConvertCtx = sws_getContext(XYQFrame->width,
XYQFrame->height,
AV_PIX_FMT_YUV420P,
_outputWidth,
_outputHeight,
AV_PIX_FMT_RGB24,
SWS_FAST_BILINEAR,
NULL,
NULL,
NULL);
if(imgConvertCtx == nil) return nil;
sws_scale(imgConvertCtx,
XYQFrame->data,
XYQFrame->linesize,
0,
XYQFrame->height,
picture.data,
picture.linesize);
sws_freeContext(imgConvertCtx);
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
CFDataRef data = CFDataCreate(kCFAllocatorDefault,
picture.data[0],
picture.linesize[0] * _outputHeight);
CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGImageRef cgImage = CGImageCreate(_outputWidth,
_outputHeight,
8,
24,
picture.linesize[0],
colorSpace,
bitmapInfo,
provider,
NULL,
NO,
kCGRenderingIntentDefault);
UIImage *image = [UIImage imageWithCGImage:cgImage];
CGImageRelease(cgImage);
CGColorSpaceRelease(colorSpace);
CGDataProviderRelease(provider);
CFRelease(data);
return image;
}
#pragma mark --------------------------
#pragma mark - 释放资源
- (void)releaseResources {
NSLog(@"释放资源");
// SJLogFunc
isReleaseResources = YES;
// 释放RGB
avpicture_free(&picture);
// 释放frame
av_packet_unref(&packet);
// 释放YUV frame
av_free(XYQFrame);
// 关闭解码器
if (XYQCodecCtx) avcodec_close(XYQCodecCtx);
// 关闭文件
if (XYQFormatCtx) avformat_close_input(&XYQFormatCtx);
avformat_network_deinit();
}
@end
#import "FFmpegController.h"
#import "ZRKFFmpegObject.h"
#define LERP(A,B,C) ((A)*(1.0-C)+(B)*C)
@interface FFmpegController ()
@property (weak, nonatomic) IBOutlet UIImageView *ImageView;
@property (weak, nonatomic) IBOutlet UILabel *fps;
@property (weak, nonatomic) IBOutlet UIButton *playBtn;
@property (weak, nonatomic) IBOutlet UIButton *TimerBtn;
@property (weak, nonatomic) IBOutlet UILabel *TimerLabel;
@property (nonatomic, strong) ZRKFFmpegObject *video;
@property (nonatomic, assign) float lastFrameTime;
@end
@implementation FFmpegController
- (void)viewDidLoad {
[super viewDidLoad];
//播放网络视频
self.video = [[ZRKFFmpegObject alloc] initWithVideo:@"http://wvideo.spriteapp.cn/video/2016/0328/56f8ec01d9bfe_wpd.mp4"];
//播放本地视频
// self.video = [[XYQMovieObject alloc] initWith_video:[NSString bundlePath:@"Dalshabet.mp4"]];
// self.video = [[XYQMoiveObject alloc] initWith_video:@"/Users/king/Desktop/Stellar.mp4"];
// self.video = [[XYQMoiveObject alloc] initWith_video:@"/Users/king/Downloads/Worth it - Fifth Harmony ft.Kid Ink - May J Lee Choreography.mp4"];
// self.video = [[XYQMoiveObject alloc] initWith_video:@"/Users/king/Downloads/4K.mp4"];
//设置video
// video.outputWidth = 800;
// video.outputHeight = 600;
// self.audio = [[XYQMovieObject alloc] initWith_video:@"/Users/king/Desktop/Stellar.mp4"];
// NSLog(@"视频总时长>>>_video duration: %f",video.duration);
// NSLog(@"源尺寸>>>_video size: %d x %d", video.sourceWidth, _video.sourceHeight);
// NSLog(@"输出尺寸>>>_video size: %d x %d", video.outputWidth, _video.outputHeight);
//
// [self.audio seekTime:0.0];
// SJLog(@"%f", [self.audio duration])
// AVPacket *packet = [self.audio readPacket];
// SJLog(@"%ld", [self.audio decode])
int tns, thh, tmm, tss;
tns = _video.duration;
thh = tns / 3600;
tmm = (tns % 3600) / 60;
tss = tns % 60;
// NSLog(@"fps --> %.2f", _video.fps);
[ImageView setTransform:CGAffineTransformMakeRotation(M_PI)];
// NSLog(@"%02d:%02d:%02d",thh,tmm,tss);
}
- (IBAction)PlayClick:(UIButton *)sender {
[_playBtn setEnabled:NO];
_lastFrameTime = -1;
// seek to 0.0 seconds
[_video seekTime:0.0];
[NSTimer scheduledTimerWithTimeInterval: 1 / _video.fps
target:self
selector:@selector(displayNextFrame:)
userInfo:nil
repeats:YES];
}
- (IBAction)TimerCilick:(id)sender {
// NSLog(@"current time: %f s",_video.currentTime);
// [_video seekTime:150.0];
// [_video replaceTheResources:@"/Users/king/Desktop/Stellar.mp4"];
if (_playBtn.enabled) {
[_video redialPaly];
[self PlayClick:_playBtn];
}
}
-(void)displayNextFrame:(NSTimer *)timer {
NSTimeInterval startTime = [NSDate timeIntervalSinceReferenceDate];
// self.TimerLabel.text = [NSString stringWithFormat:@"%f s",_video.currentTime];
self.TimerLabel.text = [self dealTime:_video.currentTime];
if (![_video stepFrame]) {
[timer invalidate];
[_playBtn setEnabled:YES];
return;
}
_ImageView.image = _video.currentImage;
float frameTime = 1.0 / ([NSDate timeIntervalSinceReferenceDate] - startTime);
if (_lastFrameTime < 0) {
_lastFrameTime = frameTime;
} else {
_lastFrameTime = LERP(frameTime, _lastFrameTime, 0.8);
}
[_fps setText:[NSString stringWithFormat:@"fps %.0f",_lastFrameTime]];
}
- (NSString *)dealTime:(double)time {
int tns, thh, tmm, tss;
tns = time;
thh = tns / 3600;
tmm = (tns % 3600) / 60;
tss = tns % 60;
// [ImageView setTransform:CGAffineTransformMakeRotation(M_PI)];
return [NSString stringWithFormat:@"%02d:%02d:%02d",thh,tmm,tss];
}
最后
以上就是缥缈项链为你收集整理的流媒体-FFmpeg的全部内容,希望文章能够帮你解决流媒体-FFmpeg所遇到的程序开发问题。
如果觉得靠谱客网站的内容还不错,欢迎将靠谱客网站推荐给程序员好友。
发表评论 取消回复