首页 > 系统 > iOS > 正文

iOS使用AVFoundation展示视频

2020-07-26 02:20:32
字体:
来源:转载
供稿:网友

本文实例为大家分享了iOS使用AVFoundation展示视频的具体代码,供大家参考,具体内容如下

//// Capter2ViewController.m// IosTest//// Created by garin on 13-7-19.// Copyright (c) 2013年 garin. All rights reserved.// #import "Capter2ViewController.h" @interface Capter2ViewController ()@end@implementation Capter2ViewController-(void) dealloc{  [session release];  [super dealloc];} - (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil{  self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];  if (self) {    // Custom initialization  }  return self;} - (void)viewDidLoad{  [super viewDidLoad];  videoPreviewView=[[UIView alloc] initWithFrame:CGRectMake(10, 10, 320, 200)];  [self.view addSubview:videoPreviewView];  [videoPreviewView release]; // Do any additional setup after loading the view.    //在viewdidload调用下面的函数显示摄像信息  [self setupCaptureSession];  //  imgView=[[UIImageView alloc] initWithFrame:CGRectMake(10, 230, 320, 100)];//  imgView.backgroundColor=[UIColor grayColor];//  [self.view addSubview:imgView];//  [imgView release];    UIButton *cloeseBtn=[UIButton buttonWithType:UIButtonTypeRoundedRect];  cloeseBtn.frame=CGRectMake(10, 220, 300, 50);  [cloeseBtn setTitle:@"Press" forState:UIControlStateNormal];  [cloeseBtn addTarget:self action:@selector(closeBtnClick:) forControlEvents:UIControlEventTouchUpInside];  [self.view addSubview:cloeseBtn];} -(void) closeBtnClick:(id) sender{  [session stopRunning];} - (void)didReceiveMemoryWarning{  [super didReceiveMemoryWarning];  // Dispose of any resources that can be recreated.} - (void)setupCaptureSession{  NSError *error = nil;    // Create the session  session = [[AVCaptureSession alloc] init];    // Configure the session to produce lower resolution video frames, if your  // processing algorithm can cope. We'll specify medium quality for the  // chosen device.  session.sessionPreset = AVCaptureSessionPresetLow;    // Find a suitable AVCaptureDevice  AVCaptureDevice *device = [AVCaptureDevice                defaultDeviceWithMediaType:AVMediaTypeVideo];    // Create a device input with the device and add it to the session.  AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device                                    error:&error];  if (!input) {    // Handling the error appropriately.  }  [session addInput:input];    // Create a VideoDataOutput and add it to the session  AVCaptureVideoDataOutput *output = [[[AVCaptureVideoDataOutput alloc] init] autorelease];  [session addOutput:output];    // Configure your output.  dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);  [output setSampleBufferDelegate:self queue:queue];  dispatch_release(queue);    // Specify the pixel format  output.videoSettings =  [NSDictionary dictionaryWithObject:   [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]                forKey:(id)kCVPixelBufferPixelFormatTypeKey];      // If you wish to cap the frame rate to a known value, such as 15 fps, set  // minFrameDuration.  //output.minFrameDuration = CMTimeMake(1, 15);  //AVCaptureConnection *avcaptureconn=[[AVCaptureConnection alloc] init];  //[avcaptureconn setVideoMinFrameDuration:CMTimeMake(1, 15)];  // Start the session running to start the flow of data  [session startRunning];  AVCaptureVideoPreviewLayer* previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: session];  previewLayer.frame = videoPreviewView.bounds; //视频显示到的UIView  previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;//  [previewLayer setOrientation:AVCaptureVideoOrientationLandscapeRight];  //  if(previewLayer.orientationSupported){  //   previewLayer.orientation = mOrientation;  //  }    [videoPreviewView.layer addSublayer: previewLayer];    if(![session isRunning]){    [session startRunning];  }    // Assign session to an ivar.  //[self setSession:session];} //得到视频流- (void)captureOutput:(AVCaptureOutput *)captureOutputdidOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer    fromConnection:(AVCaptureConnection *)connection{  // Create a UIImage from the sample buffer data  return;    UIImage *image = [self imageFromSampleBuffer:sampleBuffer];  //得到的视频流图片  imgView.image=image;} // Create a UIImage from sample buffer data- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer{  // Get a CMSampleBuffer's Core Video image buffer for the media data  CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);  // Lock the base address of the pixel buffer  CVPixelBufferLockBaseAddress(imageBuffer, 0);    // Get the number of bytes per row for the pixel buffer  void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);    // Get the number of bytes per row for the pixel buffer  size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);  // Get the pixel buffer width and height  size_t width = CVPixelBufferGetWidth(imageBuffer);  size_t height = CVPixelBufferGetHeight(imageBuffer);    // Create a device-dependent RGB color space  CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();    // Create a bitmap graphics context with the sample buffer data  CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,                         bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);  // Create a Quartz image from the pixel data in the bitmap graphics context  CGImageRef quartzImage = CGBitmapContextCreateImage(context);  // Unlock the pixel buffer  CVPixelBufferUnlockBaseAddress(imageBuffer,0);    // Free up the context and color space  CGContextRelease(context);  CGColorSpaceRelease(colorSpace);    // Create an image object from the Quartz image  UIImage *image = [UIImage imageWithCGImage:quartzImage];    // Release the Quartz image  CGImageRelease(quartzImage);    return (image);} @end

以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持武林网。

发表评论 共有条评论
用户名: 密码:
验证码: 匿名发表