Immer nur weißer Screenshot

Ich kann den Barcode lesen, aber keine Momentaufnahme des Bildschirms erhalten. Die Funktion getScreenImage erhält einen weißen Bildschirm. Wie bekomme ich den Screenshot mit dem Bildschirm, auf dem ich die Kameraansicht sehe? Danke.

@interface igViewController () <AVCaptureMetadataOutputObjectsDelegate,AVCaptureVideoDataOutputSampleBufferDelegate>
{
    AVCaptureSession *_session;
    AVCaptureDevice *_device;
    AVCaptureDeviceInput *_input;
    AVCaptureMetadataOutput *_output;
    AVCaptureVideoPreviewLayer *_prevLayer;

    UIView *_highlightView;
    UILabel *_label;
    UIImage *img;

}
@end
- (void)viewDidLoad
{
    [super viewDidLoad];

    _highlightView = [[UIView alloc] init];
    _highlightView.autoresizingMask = UIViewAutoresizingFlexibleTopMargin|UIViewAutoresizingFlexibleLeftMargin|UIViewAutoresizingFlexibleRightMargin|UIViewAutoresizingFlexibleBottomMargin;
    _highlightView.layer.borderColor = [UIColor greenColor].CGColor;
    _highlightView.layer.borderWidth = 3;
    [self.view addSubview:_highlightView];

    _label = [[UILabel alloc] init];
    _label.frame = CGRectMake(0, self.view.bounds.size.height -100, self.view.bounds.size.width, 100);
    _label.autoresizingMask = UIViewAutoresizingFlexibleTopMargin;
    _label.backgroundColor = [UIColor colorWithWhite:0.15 alpha:0.65];
    _label.textColor = [UIColor whiteColor];
    _label.textAlignment = NSTextAlignmentCenter;


    //[_label addObserver:self forKeyPath:@"text" options:NSKeyValueObservingOptionNew context:NULL];
    [self.view addSubview:_label];

    _session = [[AVCaptureSession alloc] init];
    _device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    NSError *error = nil;

    _input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
    if (_input) {
        [_session addInput:_input];
    } else {
        NSLog(@"Error: %@", error);
    }

    _output = [[AVCaptureMetadataOutput alloc] init];
    [_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
    [_session addOutput:_output];

    _output.metadataObjectTypes = [_output availableMetadataObjectTypes];

    _prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
    _prevLayer.frame = self.view.bounds;
    _prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    [self.view.layer addSublayer:_prevLayer];
    [_session startRunning];

    [self.view bringSubviewToFront:_highlightView];
    [self.view bringSubviewToFront:_label];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
    CGRect highlightViewRect = CGRectZero;
    AVMetadataMachineReadableCodeObject *barCodeObject;
    detectionString = nil;
    NSArray *barCodeTypes = @[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code,
            AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code,
            AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode];

    for (AVMetadataObject *metadata in metadataObjects) {

        for (NSString *type in barCodeTypes) {
            if ([metadata.type isEqualToString:type])
            {
                barCodeObject = (AVMetadataMachineReadableCodeObject *)[_prevLayer transformedMetadataObjectForMetadataObject:(AVMetadataMachineReadableCodeObject *)metadata];
                highlightViewRect = barCodeObject.bounds;
                detectionString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
                break;
            }
        }

        _highlightView.frame = highlightViewRect;
        if (detectionString != nil)
        {    NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
            [self getScreenImage];
            _label.text = detectionString;
            [self performSelector:@selector(changeText) withObject:nil afterDelay:2.0];
            break;
        }
    }

    if(detectionString!=nil){
        [MainViewController setResultTexts:detectionString img:img];
        detectionString=nil;
        [self dismissViewControllerAnimated:YES completion:nil];
        [_session stopRunning];

    }
}
CGImageRef UIGetScreenImage(void);
-(void)getScreenImage{
    if(detectionString!=nil){
    CGImageRef screen = UIGetScreenImage();
    img = [UIImage imageWithCGImage:screen];
    CGImageRelease(screen);
    }
}

Bearbeitet:

Das funktioniert. Sie müssen die Ausgabe für die Sitzung jedoch schneller ändern. Weil sich die Sitzung ändert, um ein Bild aufzunehmen. Der Bildschirm verschwindet für eine Sekunde. Und ich bekomme nur Bildschirm mit% 50 Deckkraft.Dieser Link Hier bekomme ich Hilfe. Wie kann ich jetzt einen 100% Deckkraft-Screenshot davon bekommen?

_highlightView.frame = highlightViewRect;
        if (detectionString != nil)
        {
            **[_session removeOutput:_output];
            [_session addOutput:_stillImageOutput];**
            _label.text = detectionString;
            **[self captureNow];**
            [self performSelector:@selector(changeText) withObject:nil afterDelay:1.0];
            break;
        }

-(void)captureNow {
    AVCaptureConnection *videoConnection = nil;
    for (AVCaptureConnection *connection in _stillImageOutput.connections)
    {
        for (AVCaptureInputPort *port in [connection inputPorts])
        {
            if ([[port mediaType] isEqual:AVMediaTypeVideo] )
            {
                videoConnection = connection;
                break;
            }
        }
        if (videoConnection)
        {
            break;
        }
    }

    NSLog(@"about to request a capture from: %@", _stillImageOutput);
    [_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
     {
         NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
         img = [[UIImage alloc] initWithData:imageData];
     }];

}

Antworten auf die Frage(2)

Ihre Antwort auf die Frage