IOS4直接获取摄像头数据

简介:

需要添加的framework:CoreMedia,CoreVideo,QuartzCore,AVFoundation
MyAVController.h:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>
 
@interface MyAVController : UIViewController <
AVCaptureVideoDataOutputSampleBufferDelegate> {
     AVCaptureSession *_captureSession;
     UIImageView *_imageView;
     CALayer *_customLayer;
     AVCaptureVideoPreviewLayer *_prevLayer;
}
 
@property (nonatomic, retain) AVCaptureSession *captureSession;
@property (nonatomic, retain) UIImageView *imageView;
@property (nonatomic, retain) CALayer *customLayer;
@property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
- (void)initCapture;
 
@end
 
MyAVController.m:
 
#import "MyAVController.h"
 
@implementation MyAVController
 
@synthesize captureSession = _captureSession;
@synthesize imageView = _imageView;
@synthesize customLayer = _customLayer;
@synthesize prevLayer = _prevLayer;
 
#pragma mark -
#pragma mark Initialization
- (id)init {
     self = [ super  init];
     if  (self) {
         self.imageView = nil;
         self.prevLayer = nil;
         self.customLayer = nil;
     }
     return  self;
}
 
- (void)viewDidLoad {
     [self initCapture];
}
 
- (void)initCapture {
     AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
                      deviceInputWithDevice:[AVCaptureDevice
defaultDeviceWithMediaType:AVMediaTypeVideo]  error:nil];
     AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]
init];
     captureOutput.alwaysDiscardsLateVideoFrames = YES;
     //captureOutput.minFrameDuration = CMTimeMake(1, 10);
 
     dispatch_queue_t queue;
     queue = dispatch_queue_create( "cameraQueue" , NULL);
     [captureOutput setSampleBufferDelegate:self queue:queue];
     dispatch_release(queue);
     NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
     NSNumber* value = [NSNumber
numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
     NSDictionary* videoSettings = [NSDictionary
dictionaryWithObject:value forKey:key];
     [captureOutput setVideoSettings:videoSettings];
     self.captureSession = [[AVCaptureSession alloc] init];
     [self.captureSession addInput:captureInput];
     [self.captureSession addOutput:captureOutput];
     [self.captureSession startRunning];
     self.customLayer = [CALayer layer];
     self.customLayer.frame = self.view.bounds;
     self.customLayer.transform = CATransform3DRotate(
CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
     self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
     [self.view.layer addSublayer:self.customLayer];
     self.imageView = [[UIImageView alloc] init];
     self.imageView.frame = CGRectMake(0, 0, 100, 100);
      [self.view addSubview:self.imageView];
     self.prevLayer = [AVCaptureVideoPreviewLayer
layerWithSession: self.captureSession];
     self.prevLayer.frame = CGRectMake(100, 0, 100, 100);
     self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
     [self.view.layer addSublayer: self.prevLayer];
}
 
#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
        fromConnection:(AVCaptureConnection *)connection
{
 
     NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
 
     CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
     CVPixelBufferLockBaseAddress(imageBuffer,0);
     uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
     size_t width = CVPixelBufferGetWidth(imageBuffer);
     size_t height = CVPixelBufferGetHeight(imageBuffer);
 
     CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
     CGContextRef newContext = CGBitmapContextCreate(baseAddress,
  width, height, 8, bytesPerRow, colorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
     CGImageRef newImage = CGBitmapContextCreateImage(newContext);
 
     CGContextRelease(newContext);
     CGColorSpaceRelease(colorSpace);
 
     [self.customLayer performSelectorOnMainThread:@selector(setContents:)
withObject: (id) newImage waitUntilDone:YES];
 
     UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0
orientation:UIImageOrientationRight];
 
     CGImageRelease(newImage);
 
     [self.imageView performSelectorOnMainThread:@selector(setImage:)
withObject:image waitUntilDone:YES];
 
     CVPixelBufferUnlockBaseAddress(imageBuffer,0);
 
     [pool drain];
}
 
#pragma mark -
#pragma mark Memory management
 
- (void)viewDidUnload {
     self.imageView = nil;
     self.customLayer = nil;
     self.prevLayer = nil;
}
 
- (void)dealloc {
     [self.captureSession release];
     [ super  dealloc];
}
 
@end

本文转自博客园知识天地的博客,原文链接:IOS4直接获取摄像头数据,如需转载请自行联系原博主。

相关文章
|
3天前
|
存储 iOS开发 开发者
使用克魔助手进行iOS数据抓包和HTTP抓包的方法详解
使用克魔助手进行iOS数据抓包和HTTP抓包的方法详解
51 0
|
3天前
|
移动开发 小程序 API
uniapp通过蓝牙传输数据 (ios)
uniapp通过蓝牙传输数据 (ios)
55 1
|
3天前
|
Web App开发 网络安全 Android开发
🚀2023最新版克魔助手抓包教程(9) - 克魔助手 IOS 数据抓包
在移动应用程序的开发中,了解应用程序的网络通信是至关重要的。数据抓包是一种很好的方法,可以让我们分析应用程序的网络请求和响应,了解应用程序的网络操作情况。克魔助手是一款非常强大的抓包工具,可以帮助我们在 Android 和 iOS 平台上进行数据抓包。本篇博客将介绍如何使用克魔助手在 iOS 平台上进行数据抓包。
|
3天前
|
存储 移动开发 JavaScript
【原生】sd.js帮助您简化繁重的获取数据、存储数据(CRUD)骚操作(吐槽~在安卓9.0以下或者IOS10.X以下手机端H5页面不支持,在这两种情况下的系统只能使用ajax或者原生js请求后台数据)
【原生】sd.js帮助您简化繁重的获取数据、存储数据(CRUD)骚操作(吐槽~在安卓9.0以下或者IOS10.X以下手机端H5页面不支持,在这两种情况下的系统只能使用ajax或者原生js请求后台数据)
|
10月前
|
JavaScript Android开发 iOS开发
layui框架实战案例(6):上传图片和视频自动调用IOS或安卓系统的摄像头功能
layui框架实战案例(6):上传图片和视频自动调用IOS或安卓系统的摄像头功能
244 0