MonoMac.AVFoundation.AVCaptureSession Class
Coordinates a recording session.

See Also: AVCaptureSession Members

Syntax

[MonoMac.Foundation.Register("AVCaptureSession", true)]
public class AVCaptureSession : MonoMac.Foundation.NSObject

Remarks

The AVCaptureSession object coordinates the recording of video or audio input and passing the recorded information to one or more output objects. An iPhone 3 for example will have two input devices, one for the camera and one for the microphone. An iPhone 4 would have three input devices, one for the microphone, one for the back camera and one for the front camera.

To record, you must create instances of the MonoMac.AVFoundation.AVCaptureInput class, usually by creating instances of the concrete MonoMac.AVFoundation.AVCaptureDeviceInput.

You can the configure one or more output ports for the captured data, and these can be still frames, video frames with timing information, audio samples, quicktime movie files or you can render it directly to a CoreAnimation layer.

Once you have configured the input and output components of your session, you start the actual processing by calling the AVCaptureSession.StartRunning() method.

C# Example


void SetupCapture ()
	/ configure the capture session for low resolution, change this if your code
	// can cope with more data or volume
	session = new AVCaptureSession () {
	        SessionPreset = AVCaptureSession.PresetMedium
	};
	
	// create a device input and attach it to the session
	var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
	var input = AVCaptureDeviceInput.FromDevice (captureDevice);
	if (input == null){
	        Console.WriteLine ("No video input device");
	        return false;
	}
	session.AddInput (input);
	
	// create a VideoDataOutput and add it to the sesion
	var output = new AVCaptureVideoDataOutput () {
	        VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA),
	
	        // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second
	        MinFrameDuration = new CMTime (1, 15)
	};
	
	// configure the output
	queue = new MonoMac.CoreFoundation.DispatchQueue ("myQueue");
	outputRecorder = new OutputRecorder ();
	output.SetSampleBufferDelegateAndQueue (outputRecorder, queue);
	session.AddOutput (output);
	
	session.StartRunning ();
}

public class OutputRecorder : AVCaptureVideoDataOutputSampleBufferDelegate {
        public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
                try {
                        var image = ImageFromSampleBuffer (sampleBuffer);

                        // Do something with the image, we just stuff it in our main view.
                        AppDelegate.ImageView.BeginInvokeOnMainThread (delegate {
                                AppDelegate.ImageView.Image = image;
                        });

                        //
                        // Although this looks innocent "Oh, he is just optimizing this case away"
                        // this is incredibly important to call on this callback, because the AVFoundation
                        // has a fixed number of buffers and if it runs out of free buffers, it will stop
                        // delivering frames.
                        //
                        sampleBuffer.Dispose ();
                } catch (Exception e){
                        Console.WriteLine (e);
                }
        }

        UIImage ImageFromSampleBuffer (CMSampleBuffer sampleBuffer)
        {
                // Get the CoreVideo image
                using (var pixelBuffer = sampleBuffer.GetImageBuffer () as CVPixelBuffer){
                        // Lock the base address
                        pixelBuffer.Lock (0);
                        // Get the number of bytes per row for the pixel buffer
                        var baseAddress = pixelBuffer.BaseAddress;
                        int bytesPerRow = pixelBuffer.BytesPerRow;
                        int width = pixelBuffer.Width;
                        int height = pixelBuffer.Height;
                        var flags = CGBitmapFlags.PremultipliedFirst | CGBitmapFlags.ByteOrder32Little;
                        // Create a CGImage on the RGB colorspace from the configured parameter above
                        using (var cs = CGColorSpace.CreateDeviceRGB ())
                        using (var context = new CGBitmapContext (baseAddress,width, height, 8, bytesPerRow, cs, (CGImageAlphaInfo) flags))
                        using (var cgImage = context.ToImage ()){
                                pixelBuffer.Unlock (0);
                                return UIImage.FromImage (cgImage);
                        }
                }
        }
}

	

Related content

Requirements

Namespace: MonoMac.AVFoundation
Assembly: XamMac (in XamMac.dll)
Assembly Versions: 0.0.0.0