开发者社区> 问答> 正文

如何提高读取UPC-A条码速度?

我对Xcode中的Objective C有点陌生。我正在尝试加快我的应用程序中条形码的读取速度,该应用程序可以正常运行,但是读取条形码时有点慢。

有没有人对提高条形码读取速度有任何建议?我还是个新人,所以如果我问愚蠢的菜鸟问题请多多包涵。

感谢大家!这是我到目前为止的代码```#import "ScanViewController.h" #import "Utils.h" #import <AVFoundation/AVFoundation.h> #import <AudioToolbox/AudioToolbox.h>

@interface ScanViewController ()

@property (nonatomic, readwrite) AVCaptureSession *captureSession; @property (nonatomic, readwrite) AVCaptureVideoPreviewLayer *videoPreviewLayer; @property (nonatomic, readwrite) UIView *qrCodeFrameView; @property (nonatomic, readwrite) UILabel *qrCodeTextView; @property (nonatomic, readwrite) NSArray *supportedCodeTypes; @property (nonatomic, readwrite) long long lastScanTime; @property (nonatomic, readwrite) NSString *lastScanCode; @property (nonatomic, readwrite) AVAudioPlayer *audioPlayer;

@end

@implementation ScanViewController

  • (void)viewDidLoad { [super viewDidLoad]; // Do any additional setup after loading the view.

    self.captureSession = [AVCaptureSession new];

    self.captureSession.sessionPreset = AVCaptureSessionPresetHigh; self.supportedCodeTypes = @[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code]; // AVMetadataObjectTypeCode39Code, // AVMetadataObjectTypeCode39Mod43Code, // AVMetadataObjectTypeCode93Code, // AVMetadataObjectTypeCode128Code, // AVMetadataObjectTypeAztecCode, // AVMetadataObjectTypePDF417Code, // AVMetadataObjectTypeITF14Code, // AVMetadataObjectTypeDataMatrixCode, // AVMetadataObjectTypeInterleaved2of5Code, // AVMetadataObjectTypeQRCode];

    AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVCaptureSessionPre]; if(captureDevice == nil) { NSLog(@"Failed to get the camera device"); return; }

    @try { // Get an instance of the AVCaptureDeviceInput class using the previous device object. AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:nil];

    // Set the input device on the capture session.
    [self.captureSession addInput:input];
    
    // Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
    AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
    [self.captureSession addOutput:captureMetadataOutput];
    
    // Set delegate and use the default dispatch queue to execute the call back
    [captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
    captureMetadataOutput.metadataObjectTypes = self.supportedCodeTypes;
    //            captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
    

    } @catch (NSException *error) { // If any error occurs, simply print it out and don't continue any more. NSLog(@"%@", error); return; }

    // Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer. self.videoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession]; self.videoPreviewLayer.videoGravity = kCAGravityResizeAspectFill; self.videoPreviewLayer.frame = self.view.layer.bounds; [self.view.layer addSublayer:self.videoPreviewLayer];

    // Start video capture. [self.captureSession startRunning];

    // Move the result view and loading view to the front [self.view bringSubviewToFront:self.resultView]; [self.view bringSubviewToFront:self.loadingView];

    // Initialize QR Code Frame to highlight the QR code self.qrCodeFrameView = [[UIView alloc] init]; if (self.qrCodeFrameView) { self.qrCodeFrameView.layer.borderColor = UIColor.greenColor.CGColor; self.qrCodeFrameView.layer.borderWidth = 2; [self.view addSubview:self.qrCodeFrameView]; [self.view bringSubviewToFront:self.qrCodeFrameView]; }

    self.qrCodeTextView = [[UILabel alloc] init]; if (self.qrCodeTextView) { [self.qrCodeTextView setTextColor:UIColor.greenColor]; [self.qrCodeTextView setFont:[UIFont systemFontOfSize:20]]; [self.qrCodeFrameView addSubview:self.qrCodeTextView]; }

    [self rotateLoadingImage]; [self setResultType:RESULT_TYPE_WORKING codeContent:@"Ready" price:0.00]; [self.loadingView setHidden:YES];

}

-(void)viewWillDisappear:(BOOL)animated { if (self.audioPlayer != nil) { [self.audioPlayer stop]; self.audioPlayer = nil; }

[super viewWillDisappear:animated];

}

/* #pragma mark - Navigation

// In a storyboard-based application, you will often want to do a little preparation before navigation - (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender { // Get the new view controller using [segue destinationViewController]. // Pass the selected object to the new view controller. } / -(void) updatePreviewLayer:(AVCaptureConnection)layer orientation:(AVCaptureVideoOrientation)orientation { layer.videoOrientation = orientation; self.videoPreviewLayer.frame = self.view.bounds; }

-(void)viewDidLayoutSubviews { [super viewDidLayoutSubviews];

if(self.videoPreviewLayer.connection != nil) {
    UIDevice *currentDevice = [UIDevice currentDevice];
    UIDeviceOrientation orientation = [currentDevice orientation];
    AVCaptureConnection *previewLayerConnection = self.videoPreviewLayer.connection;

    if(previewLayerConnection.isVideoOrientationSupported) {
        switch (orientation) {
            case UIDeviceOrientationPortrait:
                [self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortrait];
                break;
            case UIDeviceOrientationLandscapeRight:
                [self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationLandscapeLeft];
                break;
            case UIDeviceOrientationLandscapeLeft:
                [self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationLandscapeRight];
                break;
            case UIDeviceOrientationPortraitUpsideDown:
                [self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortraitUpsideDown];
                break;
            default:
                [self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortrait];
                break;
        }
    }
}

}

-(void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection { // Check if the metadataObjects array is not nil and it contains at least one object. if (metadataObjects.count == 0) { self.qrCodeFrameView.frame = CGRectZero; return; }

// Get the metadata object.
AVMetadataMachineReadableCodeObject *metadataObj = (AVMetadataMachineReadableCodeObject*)(metadataObjects[0]);

if ([self.supportedCodeTypes containsObject:metadataObj.type]) {
    // If the found metadata is equal to the QR code metadata (or barcode) then update the status label's text and set the bounds
    AVMetadataObject *barCodeObject = [self.videoPreviewLayer transformedMetadataObjectForMetadataObject:metadataObj];
    NSString *code = metadataObj.stringValue;

    if (code != nil) {
        // check upc a code
        if ([self checkUpcACode:metadataObj.type code:code] == NO) {
            self.qrCodeTextView.text = @"";
            return;
        }

        int i=0;
        for (i=0; i<code.length; i++) {
            char ch = [code characterAtIndex:i];
            if (ch != '0') break;
        }
        if (i>0) i--;
        code = [code substringFromIndex:i];

        self.qrCodeFrameView.frame = barCodeObject.bounds;
        [self.qrCodeTextView setText:code];
        self.qrCodeTextView.frame = CGRectMake(0, self.qrCodeFrameView.frame.size.height-20, self.qrCodeFrameView.frame.size.width, 20);
        NSLog(@"%@", code);

        [self handleBarcode:code];
    } else {
        self.qrCodeTextView.text = @"";
    }
}

}

-(BOOL)checkUpcACode:(AVMetadataObjectType)type code:(NSString*)code { if (type == AVMetadataObjectTypeEAN13Code) { if ([code hasPrefix:@"0"] && [code length] > 0) { return YES; } } return NO; }

展开
收起
南南唐语 2019-12-02 20:31:38 694 0
0 条回答
写回答
取消 提交回答
问答分类:
问答地址:
问答排行榜
最热
最新

相关电子书

更多
低代码开发师(初级)实战教程 立即下载
冬季实战营第三期:MySQL数据库进阶实战 立即下载
阿里巴巴DevOps 最佳实践手册 立即下载