2016-06-09 9 views
1

Ich versuche, eine App zu erstellen, die Video von einem iPhone nehmen und es über Apple Multipeer-Schnittstelle an ein iPad senden wird.Swift Multipeer Video App liest keine Daten

// 
// ViewController.swift 
// Multipeer Video 
// 
// 


import UIKit 
import MultipeerConnectivity 
import CoreMotion 
import SceneKit 
import AVFoundation 
import AVKit 
import MediaPlayer 

class ViewController: UIViewController,  MCNearbyServiceAdvertiserDelegate, MCNearbyServiceBrowserDelegate, MCSessionDelegate, NSStreamDelegate, AVCaptureVideoDataOutputSampleBufferDelegate 
{ 
//Video 
let transferredSession = AVCaptureSession() 
let captureSession = AVCaptureSession() 
var transferredLayer : AVCaptureVideoPreviewLayer? 
var previewLayer : AVCaptureVideoPreviewLayer? 
var captureDevice : AVCaptureDevice? 
var videoDeviceOutput: AVCaptureVideoDataOutput! 
var sessionQueue: dispatch_queue_t! 
var data = NSData() 
var movieplayer = MPMoviePlayerController() 

//MultiPeer 
let label = UILabel() 
var displayLink: CADisplayLink? 
let serviceType = "motion-control" 
let peerID = MCPeerID(displayName: UIDevice.currentDevice().name) 
var serviceAdvertiser : MCNearbyServiceAdvertiser! 
var serviceBrowser : MCNearbyServiceBrowser! 
lazy var session : MCSession = 
{ 
    let session = MCSession(peer: self.peerID, securityIdentity: nil, encryptionPreference: MCEncryptionPreference.Required) 
    session.delegate = self 
    return session 
}() 


override func viewDidLoad() 
{ 
    super.viewDidLoad() 

    //Video 
    captureSession.sessionPreset = AVCaptureSessionPresetHigh 

    let devices = AVCaptureDevice.devices() 

    // Loop through all the capture devices on this phone 
    for device in devices { 
     // Make sure this particular device supports video 
     if (device.hasMediaType(AVMediaTypeVideo)) { 
      // Finally check the position and confirm we've got the back camera 
      if(device.position == AVCaptureDevicePosition.Back) { 
       captureDevice = device as? AVCaptureDevice 
       if captureDevice != nil { 
        print("Capture device found") 

       } 
      } 
     } 
    } 

    label.textAlignment = NSTextAlignment.Center 
    view.addSubview(label) 
    if UIDevice.currentDevice().userInterfaceIdiom == UIUserInterfaceIdiom.Pad 
    { 
     label.text = "iPad" 
     view.backgroundColor = UIColor.blackColor() 
     label.textColor = UIColor.whiteColor() 
     initialiseAdvertising() 
     //need to setup recieving video 
    } 
    else 
    { 
     label.text = "iPhone" 
     initialiseBrowsing() 
     beginVideoSession() 
    } 

} 

func beginVideoSession() { 

    configureDevice() 
    do { 
     //try captureSession.addInput(input: captureDevice) 
     try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice)) 
     updateDeviceSettings(0.0, isoValue: 0.0) 
    } catch { 
     //error message etc. 
     print("Capture device not initialisable") 
    } 
    previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) 
    self.view.layer.addSublayer(previewLayer!) 
    previewLayer?.frame = self.view.layer.frame 
    self.view.layer.insertSublayer(previewLayer!, atIndex: 0) 
    captureSession.startRunning() 
} 

func configureDevice() { 
    if let device = captureDevice { 
     do { 
      try device.lockForConfiguration() 
      device.focusMode = .Locked 
      device.unlockForConfiguration() 
     } catch { 
      //error message etc. 
      print("Capture device not configurable") 
     } 
    } 

} 

// set ISO 

func updateDeviceSettings(focusValue : Float, isoValue : Float) { 
    if let device = captureDevice { 
     do { 
      try device.lockForConfiguration() 
      device.setFocusModeLockedWithLensPosition(focusValue, completionHandler: { (time) -> Void in 
      }) 

      let minISO = device.activeFormat.minISO 
      let maxISO = device.activeFormat.maxISO 
      let clampedISO = isoValue * (maxISO - minISO) + minISO 

      device.setExposureModeCustomWithDuration(AVCaptureExposureDurationCurrent, ISO: clampedISO, completionHandler: { (time) -> Void in 
       // 
      }) 

      device.unlockForConfiguration() 
     } catch { 
      print("Can't update device settings") 
     } 

    } 
} 


// MARK: MCNearbyServiceBrowserDelegate (iPhone is browser) 

var streamTargetPeer: MCPeerID? 
var outputStream: NSOutputStream? 

func initialiseBrowsing() 
{ 
    serviceBrowser = MCNearbyServiceBrowser(peer: peerID, serviceType: serviceType) 
    serviceBrowser.delegate = self 
    serviceBrowser.startBrowsingForPeers() 
} 

func browser(browser: MCNearbyServiceBrowser, foundPeer peerID: MCPeerID, withDiscoveryInfo info: [String : String]?) 
{ 
    print("Found Peer! \(peerID)") 
    streamTargetPeer = peerID 
    browser.invitePeer(peerID, toSession: session, withContext: nil, timeout: 120) 

    displayLink = CADisplayLink(target: self, selector: #selector(ViewController.step)) 
    displayLink?.addToRunLoop(NSRunLoop.mainRunLoop(), forMode: NSDefaultRunLoopMode) 
    print("displayLink") 
} 

//Initializes SampleBufferDelegate and videoDeviceOutput 
func addVideoOutput() { 
    videoDeviceOutput = AVCaptureVideoDataOutput() 
    videoDeviceOutput.alwaysDiscardsLateVideoFrames = true 
    self.sessionQueue = dispatch_queue_create("Camera Session", DISPATCH_QUEUE_SERIAL) 
    videoDeviceOutput.setSampleBufferDelegate(self, queue: sessionQueue) 
    if captureSession.canAddOutput(videoDeviceOutput) { 
     captureSession.addOutput(videoDeviceOutput) 
    } 
} 

//Grabbing frames from camera 
func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) { 
    print("frame received") 
    let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)! 
    CVPixelBufferLockBaseAddress(imageBuffer, 0) 
    let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer) 
    let height = CVPixelBufferGetHeight(imageBuffer) 
    let src_buff = CVPixelBufferGetBaseAddress(imageBuffer) 
    data = NSData(bytes: src_buff, length: bytesPerRow * height) 
    CVPixelBufferUnlockBaseAddress(imageBuffer, 0); 

} 

func startStream() 
{ 
    guard let streamTargetPeer = streamTargetPeer where outputStream == nil else 
    { 
     return 
    } 
    do 
    { 
     print("stream started") 
     outputStream = try session.startStreamWithName("MotionControlStream", toPeer: streamTargetPeer) 
     outputStream?.scheduleInRunLoop(NSRunLoop.mainRunLoop(), forMode: NSDefaultRunLoopMode) 
     outputStream?.open() 
    } 
    catch 
    { 
     print("unable to start stream!! \(error)") 
    } 
} 

func step() 
{ 
    startStream() 
    print("step") 
    guard let outputStream = outputStream else 
    { 
     print("no stream") 
     return 
    } 
    if outputStream.hasSpaceAvailable 
    { 
     print("writing to output") 
     outputStream.write(UnsafePointer<UInt8>(data.bytes), maxLength: data.length) 
    } 
    else 
    { 
     print("no space availale") 
    } 
} 

func browser(browser: MCNearbyServiceBrowser, lostPeer peerID: MCPeerID) 
{ 
    label.text = "Lost Peer!" 
} 

// MARK: MCNearbyServiceAdvertiserDelegate (iPad is advertiser) 

func initialiseAdvertising() 
{ 
    serviceAdvertiser = MCNearbyServiceAdvertiser(peer: peerID, discoveryInfo: nil, serviceType: serviceType) 
    serviceAdvertiser.delegate = self 
    serviceAdvertiser.startAdvertisingPeer() 
} 

// MARK: MCSessionDelegate 

func session(session: MCSession, peer peerID: MCPeerID, didChangeState state: MCSessionState) 
{ 
    let stateName:String 
    switch state 
    { 
    case MCSessionState.Connected: 
     stateName = "connected" 
    case MCSessionState.Connecting: 
     stateName = "connecting" 
    case MCSessionState.NotConnected: 
     stateName = "not connected" 
    } 

    let deviceName:String 
    switch UIDevice.currentDevice().userInterfaceIdiom 
    { 
    case UIUserInterfaceIdiom.Pad: 
     deviceName = "iPad" 
    case UIUserInterfaceIdiom.Phone: 
     deviceName = "iPhone" 
    default: 
     deviceName = "Unspecified" 
    } 

    dispatch_async(dispatch_get_main_queue()) 
    { 
     self.label.text = "\(deviceName) didChangeState: \(stateName)" 
    } 
} 


func session(_: MCSession, didReceiveStream stream: NSInputStream, withName streamName: String, fromPeer peerID: MCPeerID) 
{ 
    print("did recieve") 
    stream.scheduleInRunLoop(NSRunLoop.mainRunLoop(), forMode: NSDefaultRunLoopMode) 
    stream.delegate = self 
    stream.open() 
} 

func stream(stream: NSStream, handleEvent eventCode: NSStreamEvent) 
{ 
    print(eventCode) 
    if eventCode == NSStreamEvent.EndEncountered{ 
     print("end") 
    } 
    print("stream started") 
    if let inputStream = stream as? NSInputStream //where eventCode == NSStreamEvent.HasBytesAvailable 
    { 
     print("Does this byte?") 
     /* let bufferSize = 1024 
     var buffer = [UInt8](count: bufferSize, repeatedValue: 0) 
     let bytesRead = inputStream.read(&buffer, maxLength: bufferSize) 
     print(bytesRead) 
     if bytesRead >= 0 { 
      let output = NSString(bytes: &buffer, length: bytesRead, encoding: NSUTF8StringEncoding) 
      print(output) 
     } 
     */ 

     var bytes = [UInt8](count:12, repeatedValue: 0) 
     inputStream.read(&bytes, maxLength: data.length) 


     let dataIn: NSData = "Stream".dataUsingEncoding(NSUTF8StringEncoding, allowLossyConversion: true)! 
     let stream: NSInputStream = NSInputStream(data: dataIn) 
     var buffer = [UInt8](count: 8, repeatedValue: 0) 
     stream.open() 
     if stream.hasBytesAvailable { 
      print("stream has bytes!") 
      let result: Int = stream.read(&buffer, maxLength: buffer.count) 
      print("result: \(result)") 

      let dataString: NSString = NSString(data: dataIn, encoding: NSUTF8StringEncoding)! 
      print(dataString) 
      let movieURL = NSURL.init(string: dataString as String) 
      print(movieURL) 

     } else { 
     print("stream has no bytes") 
     } 
     dispatch_async(dispatch_get_main_queue()) 
     { 

     } 


     //*/ 
    } 
    if let outstream = stream as? NSOutputStream { 
     print("This is output") 
     print(outstream) 
    } 
} 

func session(session: MCSession, didFinishReceivingResourceWithName resourceName: String, fromPeer peerID: MCPeerID, atURL localURL: NSURL, withError error: NSError?) 
{ 
} 

func session(session: MCSession, didStartReceivingResourceWithName resourceName: String, fromPeer peerID: MCPeerID, withProgress progress: NSProgress) 
{ 
} 

// MARK: Layout 

func session(session: MCSession, didReceiveData data: NSData, fromPeer peerID: MCPeerID) 
{ 
} 

override func viewDidLayoutSubviews() 
{ 
    if UIDevice.currentDevice().userInterfaceIdiom == UIUserInterfaceIdiom.Pad 
    { 
     label.frame = CGRect(x: 0, y: topLayoutGuide.length, width: view.frame.width, height: label.intrinsicContentSize().height) 
    } 
    else 
    { 
     label.frame = view.bounds 
    } 
} 

func advertiser(advertiser: MCNearbyServiceAdvertiser, didReceiveInvitationFromPeer peerID: MCPeerID, withContext context: NSData?, invitationHandler: (Bool, MCSession) -> Void) 
{ 
    invitationHandler(true, self.session) 
} 
} 

Es scheint, als ob ich in der Lage bin Daten mit dem iPhone richtig zu senden, aber ich habe nicht in der Lage gewesen, um herauszufinden, was mit dem iPad ich falsch mache. Ich nehme an, es hat mit der Laufschleife zu tun, aber während der Fehlersuche habe ich bemerkt, dass das iPad einen Ende-Ereigniscode erhält.

Vielen Dank für Ihre Hilfe!

Antwort

0

Es gibt (offensichtlich) zwei Hauptaufgaben: Speichern der Basisadresse eines Pixelpuffers in einem NSData-Objekt und dann Wiederherstellen der Basisadresse am anderen Ende. Der beste Weg, dies mit dem MultiPeerConnectivity-Framework von Apple zu tun, liegt nicht in Streams, sondern in Datenobjekten.

Hier ist, wie man die rohen Pufferdaten aus einem CVPixelBuffer von Ihrem iPhone senden:

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { 
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
        NSError *err; 
        [((ViewController *)self.parentViewController).session sendData:[self dataFromImageBuffer:imageBuffer withBytesPerRow:CVPixelBufferGetBytesPerRow(imageBuffer) withHeight:CVPixelBufferGetHeight(imageBuffer)] toPeers:((ViewController *)self.parentViewController).session.connectedPeers withMode:MCSessionSendDataReliable error:&err]; 
     } 
} 
  
- (NSData *)dataFromImageBuffer:(CVImageBufferRef)imageBuffer withBytesPerRow:(size_t)bytesPerRow withHeight:(NSInteger)height 
{ 
    NSMutableData *data = [NSMutableData new]; 
    if (CVPixelBufferLockBaseAddress(imageBuffer, 0) == kCVReturnSuccess) 
    { 
        uint8_t *rawBuffer = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); 
        [data appendBytes:rawBuffer length:1228808]; 
        CVPixelBufferUnlockBaseAddress(imageBuffer, 0); 
    } 
    return data; 
} 

Hier ist, wie Sie es auf Ihrem iPad angezeigt werden:

- (void)session:(nonnull MCSession *)session didReceiveData:(nonnull NSData *)data fromPeer:(nonnull MCPeerID *)peerID { 
    dispatch_async(dispatch_get_main_queue(), ^{ 
        NSMutableData *mdata = [NSMutableData new]; 
        UInt8 *rawBuffer = (uint8_t *)[data bytes]; 
        [mdata appendBytes:rawBuffer length:1228808]; 
        uint8_t *buffer = (uint8_t *)[mdata bytes]; 
        NSLog(@"sizeof(buffer) %lu", sizeof(buffer)); 
        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 
        CGContextRef newContext = CGBitmapContextCreate(buffer, 640, 480, 8, 2560, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); 
        CGImageRef newImage = CGBitmapContextCreateImage(newContext); 
         
        CGContextRelease(newContext); 
        CGColorSpaceRelease(colorSpace); 
         
        UIImage *image = [[UIImage alloc] initWithCGImage:newImage scale:1 orientation:UIImageOrientationUp]; 
        CGImageRelease(newImage); 
         
        if (image) { 
            NSLog(@"image size %f x %f", [image size].width, [image size].height); 
            dispatch_async(dispatch_get_main_queue(), ^{ 
                [((ViewerViewController *)self.childViewControllers.lastObject).view.layer setContents:(__bridge id)image.CGImage]; 
            }); 
        } 
    }); 
} 

Wohlgemerkt, dies ist der roh ist, unkomprimierte und ansonsten unveränderte Bilddaten; Dementsprechend ist es riesig und wird zu lange dauern, um von einem Gerät auf ein anderes zu übertragen, um das Produkt rentabel zu machen.

Hier ist eine Möglichkeit, die Daten vor dem Senden zu komprimieren, um Echtzeit-Performance zu erreichen; Es ist weniger qualitativ als die Lösung, die ich jetzt verwende, aber es ist schnell und einfach.

auf Ihrem iPhone:

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 
{ 
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
    CVPixelBufferLockBaseAddress(imageBuffer,0); 
    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); 
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
    size_t height = CVPixelBufferGetHeight(imageBuffer); 
    CVPixelBufferUnlockBaseAddress(imageBuffer, 0); 
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 
    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); 
    CGImageRef newImage = CGBitmapContextCreateImage(newContext); 
    CGContextRelease(newContext); 
    CGColorSpaceRelease(colorSpace); 
    UIImage *image = [[UIImage alloc] initWithCGImage:newImage scale:1 orientation:UIImageOrientationUp]; 
    CGImageRelease(newImage); 
    if (image) { 
     NSData *data = UIImageJPEGRepresentation(image, 0.7); 
     NSError *err; 
     [((ViewController *)self.parentViewController).session sendData:data toPeers:((ViewController *)self.parentViewController).session.connectedPeers withMode:MCSessionSendDataReliable error:&err]; 
    } 

}

auf Ihrem iPad:

- (void)session:(nonnull MCSession *)session didReceiveData:(nonnull NSData *)data fromPeer:(nonnull MCPeerID *)peerID { 
UIImage *image = [UIImage imageWithData:imageData]; 
if (image) { 
    dispatch_async(dispatch_get_main_queue(), ^{ 
     NSLog(@"Displaying image..."); 
     [((ViewerViewController *)self.childViewControllers.lastObject).view.layer setContents:(__bridge id)image.CGImage]; 
    }); 
} 
} 
Verwandte Themen