IOS4直接获取摄像头数据

简介:

需要添加的framework:CoreMedia,CoreVideo,QuartzCore,AVFoundation
MyAVController.h:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>
 
@interface MyAVController : UIViewController <
AVCaptureVideoDataOutputSampleBufferDelegate> {
     AVCaptureSession *_captureSession;
     UIImageView *_imageView;
     CALayer *_customLayer;
     AVCaptureVideoPreviewLayer *_prevLayer;
}
 
@property (nonatomic, retain) AVCaptureSession *captureSession;
@property (nonatomic, retain) UIImageView *imageView;
@property (nonatomic, retain) CALayer *customLayer;
@property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
- (void)initCapture;
 
@end
 
MyAVController.m:
 
#import "MyAVController.h"
 
@implementation MyAVController
 
@synthesize captureSession = _captureSession;
@synthesize imageView = _imageView;
@synthesize customLayer = _customLayer;
@synthesize prevLayer = _prevLayer;
 
#pragma mark -
#pragma mark Initialization
- (id)init {
     self = [ super  init];
     if  (self) {
         self.imageView = nil;
         self.prevLayer = nil;
         self.customLayer = nil;
     }
     return  self;
}
 
- (void)viewDidLoad {
     [self initCapture];
}
 
- (void)initCapture {
     AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
                      deviceInputWithDevice:[AVCaptureDevice
defaultDeviceWithMediaType:AVMediaTypeVideo]  error:nil];
     AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]
init];
     captureOutput.alwaysDiscardsLateVideoFrames = YES;
     //captureOutput.minFrameDuration = CMTimeMake(1, 10);
 
     dispatch_queue_t queue;
     queue = dispatch_queue_create( "cameraQueue" , NULL);
     [captureOutput setSampleBufferDelegate:self queue:queue];
     dispatch_release(queue);
     NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
     NSNumber* value = [NSNumber
numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
     NSDictionary* videoSettings = [NSDictionary
dictionaryWithObject:value forKey:key];
     [captureOutput setVideoSettings:videoSettings];
     self.captureSession = [[AVCaptureSession alloc] init];
     [self.captureSession addInput:captureInput];
     [self.captureSession addOutput:captureOutput];
     [self.captureSession startRunning];
     self.customLayer = [CALayer layer];
     self.customLayer.frame = self.view.bounds;
     self.customLayer.transform = CATransform3DRotate(
CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
     self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
     [self.view.layer addSublayer:self.customLayer];
     self.imageView = [[UIImageView alloc] init];
     self.imageView.frame = CGRectMake(0, 0, 100, 100);
      [self.view addSubview:self.imageView];
     self.prevLayer = [AVCaptureVideoPreviewLayer
layerWithSession: self.captureSession];
     self.prevLayer.frame = CGRectMake(100, 0, 100, 100);
     self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
     [self.view.layer addSublayer: self.prevLayer];
}
 
#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
        fromConnection:(AVCaptureConnection *)connection
{
 
     NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
 
     CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
     CVPixelBufferLockBaseAddress(imageBuffer,0);
     uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
     size_t width = CVPixelBufferGetWidth(imageBuffer);
     size_t height = CVPixelBufferGetHeight(imageBuffer);
 
     CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
     CGContextRef newContext = CGBitmapContextCreate(baseAddress,
  width, height, 8, bytesPerRow, colorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
     CGImageRef newImage = CGBitmapContextCreateImage(newContext);
 
     CGContextRelease(newContext);
     CGColorSpaceRelease(colorSpace);
 
     [self.customLayer performSelectorOnMainThread:@selector(setContents:)
withObject: (id) newImage waitUntilDone:YES];
 
     UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0
orientation:UIImageOrientationRight];
 
     CGImageRelease(newImage);
 
     [self.imageView performSelectorOnMainThread:@selector(setImage:)
withObject:image waitUntilDone:YES];
 
     CVPixelBufferUnlockBaseAddress(imageBuffer,0);
 
     [pool drain];
}
 
#pragma mark -
#pragma mark Memory management
 
- (void)viewDidUnload {
     self.imageView = nil;
     self.customLayer = nil;
     self.prevLayer = nil;
}
 
- (void)dealloc {
     [self.captureSession release];
     [ super  dealloc];
}
 
@end

本文转自博客园知识天地的博客,原文链接:IOS4直接获取摄像头数据,如需转载请自行联系原博主。

相关文章
|
8月前
|
开发框架 前端开发 Android开发
Flutter 与原生模块(Android 和 iOS)之间的通信机制,包括方法调用、事件传递等,分析了通信的必要性、主要方式、数据传递、性能优化及错误处理,并通过实际案例展示了其应用效果,展望了未来的发展趋势
本文深入探讨了 Flutter 与原生模块(Android 和 iOS)之间的通信机制,包括方法调用、事件传递等,分析了通信的必要性、主要方式、数据传递、性能优化及错误处理,并通过实际案例展示了其应用效果,展望了未来的发展趋势。这对于实现高效的跨平台移动应用开发具有重要指导意义。
792 4
|
JSON JavaScript 安全
iOS应用程序数据保护:如何保护iOS应用程序中的图片、资源和敏感数据
iOS应用程序数据保护:如何保护iOS应用程序中的图片、资源和敏感数据
125 1
|
存储 iOS开发 开发者
使用克魔助手进行iOS数据抓包和HTTP抓包的方法详解
使用克魔助手进行iOS数据抓包和HTTP抓包的方法详解
239 0
|
11月前
|
iOS开发 开发者
iOS平台RTMP|RTSP播放器如何实时回调YUV数据
我们在做RTMP、RTSP播放器的时候,有开发者需要自己处理拉取到的YUV数据,做二次分析之用,为此,我们做了以下的设计:InitPlayer之后,再调用SmartPlayerStart()接口之前,设置yuv数据回调即可。
161 6
|
移动开发 小程序 API
uniapp通过蓝牙传输数据 (ios)
uniapp通过蓝牙传输数据 (ios)
619 1
|
Java iOS开发
iOS的数据序列化(又称持久化)的两类使用方式
iOS的数据序列化(又称持久化)的两类使用方式
121 0
|
JSON JavaScript 安全
iOS 应用程序数据保护:如何保护 iOS 应用程序中的图片、资源和敏感数据
iOS 应用程序数据保护:如何保护 iOS 应用程序中的图片、资源和敏感数据

热门文章

最新文章