我对Xcode中的Objective C有点陌生。我正在尝试加快我的应用程序中条形码的读取速度,该应用程序可以正常运行,但是读取条形码时有点慢。
有没有人对提高条形码读取速度有任何建议?我还是个新人,所以如果我问愚蠢的菜鸟问题请多多包涵。
感谢大家!这是我到目前为止的代码```#import "ScanViewController.h" #import "Utils.h" #import <AVFoundation/AVFoundation.h> #import <AudioToolbox/AudioToolbox.h>
@interface ScanViewController ()
@property (nonatomic, readwrite) AVCaptureSession *captureSession; @property (nonatomic, readwrite) AVCaptureVideoPreviewLayer *videoPreviewLayer; @property (nonatomic, readwrite) UIView *qrCodeFrameView; @property (nonatomic, readwrite) UILabel *qrCodeTextView; @property (nonatomic, readwrite) NSArray *supportedCodeTypes; @property (nonatomic, readwrite) long long lastScanTime; @property (nonatomic, readwrite) NSString *lastScanCode; @property (nonatomic, readwrite) AVAudioPlayer *audioPlayer;
@end
@implementation ScanViewController
(void)viewDidLoad { [super viewDidLoad]; // Do any additional setup after loading the view.
self.captureSession = [AVCaptureSession new];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh; self.supportedCodeTypes = @[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code]; // AVMetadataObjectTypeCode39Code, // AVMetadataObjectTypeCode39Mod43Code, // AVMetadataObjectTypeCode93Code, // AVMetadataObjectTypeCode128Code, // AVMetadataObjectTypeAztecCode, // AVMetadataObjectTypePDF417Code, // AVMetadataObjectTypeITF14Code, // AVMetadataObjectTypeDataMatrixCode, // AVMetadataObjectTypeInterleaved2of5Code, // AVMetadataObjectTypeQRCode];
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVCaptureSessionPre]; if(captureDevice == nil) { NSLog(@"Failed to get the camera device"); return; }
@try { // Get an instance of the AVCaptureDeviceInput class using the previous device object. AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:nil];
// Set the input device on the capture session.
[self.captureSession addInput:input];
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
[self.captureSession addOutput:captureMetadataOutput];
// Set delegate and use the default dispatch queue to execute the call back
[captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
captureMetadataOutput.metadataObjectTypes = self.supportedCodeTypes;
// captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
} @catch (NSException *error) { // If any error occurs, simply print it out and don't continue any more. NSLog(@"%@", error); return; }
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer. self.videoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession]; self.videoPreviewLayer.videoGravity = kCAGravityResizeAspectFill; self.videoPreviewLayer.frame = self.view.layer.bounds; [self.view.layer addSublayer:self.videoPreviewLayer];
// Start video capture. [self.captureSession startRunning];
// Move the result view and loading view to the front [self.view bringSubviewToFront:self.resultView]; [self.view bringSubviewToFront:self.loadingView];
// Initialize QR Code Frame to highlight the QR code self.qrCodeFrameView = [[UIView alloc] init]; if (self.qrCodeFrameView) { self.qrCodeFrameView.layer.borderColor = UIColor.greenColor.CGColor; self.qrCodeFrameView.layer.borderWidth = 2; [self.view addSubview:self.qrCodeFrameView]; [self.view bringSubviewToFront:self.qrCodeFrameView]; }
self.qrCodeTextView = [[UILabel alloc] init]; if (self.qrCodeTextView) { [self.qrCodeTextView setTextColor:UIColor.greenColor]; [self.qrCodeTextView setFont:[UIFont systemFontOfSize:20]]; [self.qrCodeFrameView addSubview:self.qrCodeTextView]; }
[self rotateLoadingImage]; [self setResultType:RESULT_TYPE_WORKING codeContent:@"Ready" price:0.00]; [self.loadingView setHidden:YES];
}
-(void)viewWillDisappear:(BOOL)animated { if (self.audioPlayer != nil) { [self.audioPlayer stop]; self.audioPlayer = nil; }
[super viewWillDisappear:animated];
}
/* #pragma mark - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation - (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender { // Get the new view controller using [segue destinationViewController]. // Pass the selected object to the new view controller. } / -(void) updatePreviewLayer:(AVCaptureConnection)layer orientation:(AVCaptureVideoOrientation)orientation { layer.videoOrientation = orientation; self.videoPreviewLayer.frame = self.view.bounds; }
-(void)viewDidLayoutSubviews { [super viewDidLayoutSubviews];
if(self.videoPreviewLayer.connection != nil) {
UIDevice *currentDevice = [UIDevice currentDevice];
UIDeviceOrientation orientation = [currentDevice orientation];
AVCaptureConnection *previewLayerConnection = self.videoPreviewLayer.connection;
if(previewLayerConnection.isVideoOrientationSupported) {
switch (orientation) {
case UIDeviceOrientationPortrait:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortrait];
break;
case UIDeviceOrientationLandscapeRight:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationLandscapeLeft];
break;
case UIDeviceOrientationLandscapeLeft:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationLandscapeRight];
break;
case UIDeviceOrientationPortraitUpsideDown:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortraitUpsideDown];
break;
default:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortrait];
break;
}
}
}
}
-(void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection { // Check if the metadataObjects array is not nil and it contains at least one object. if (metadataObjects.count == 0) { self.qrCodeFrameView.frame = CGRectZero; return; }
// Get the metadata object.
AVMetadataMachineReadableCodeObject *metadataObj = (AVMetadataMachineReadableCodeObject*)(metadataObjects[0]);
if ([self.supportedCodeTypes containsObject:metadataObj.type]) {
// If the found metadata is equal to the QR code metadata (or barcode) then update the status label's text and set the bounds
AVMetadataObject *barCodeObject = [self.videoPreviewLayer transformedMetadataObjectForMetadataObject:metadataObj];
NSString *code = metadataObj.stringValue;
if (code != nil) {
// check upc a code
if ([self checkUpcACode:metadataObj.type code:code] == NO) {
self.qrCodeTextView.text = @"";
return;
}
int i=0;
for (i=0; i<code.length; i++) {
char ch = [code characterAtIndex:i];
if (ch != '0') break;
}
if (i>0) i--;
code = [code substringFromIndex:i];
self.qrCodeFrameView.frame = barCodeObject.bounds;
[self.qrCodeTextView setText:code];
self.qrCodeTextView.frame = CGRectMake(0, self.qrCodeFrameView.frame.size.height-20, self.qrCodeFrameView.frame.size.width, 20);
NSLog(@"%@", code);
[self handleBarcode:code];
} else {
self.qrCodeTextView.text = @"";
}
}
}
-(BOOL)checkUpcACode:(AVMetadataObjectType)type code:(NSString*)code { if (type == AVMetadataObjectTypeEAN13Code) { if ([code hasPrefix:@"0"] && [code length] > 0) { return YES; } } return NO; }
版权声明:本文内容由阿里云实名注册用户自发贡献,版权归原作者所有,阿里云开发者社区不拥有其著作权,亦不承担相应法律责任。具体规则请查看《阿里云开发者社区用户服务协议》和《阿里云开发者社区知识产权保护指引》。如果您发现本社区中有涉嫌抄袭的内容,填写侵权投诉表单进行举报,一经查实,本社区将立刻删除涉嫌侵权内容。