Recently I needed to figure out how to create an iPad app when you can record and play back video from the iPad’s camera, and then upload to it to a server using AIR for iOS. I wasn’t able to find a whole lot of info on it and so I eventually pieced it together from various places. I decided to share the first part of the process here, and I might share the second part in another post later.

I will assume you have a button on the Flash stage called ‘recordbtn’ which the code below will use to fire up the device’s camera.

First we have to set up some variables and check for StageVideo availability:

// use stage video to playback recording
var stageVid:StageVideo;

// variables for displaying the video
var videoFile:File;
var ns:NetStream;
var nc:NetConnection;

// create instance of CameraUI for showing native camera app
var deviceCameraApp = new CameraUI();

// we'll use StageVideo to display the recorded video, first we have to listen for availability
stage.addEventListener(StageVideoAvailabilityEvent.STAGE_VIDEO_AVAILABILITY, onStageVideoAvailability);

function onStageVideoAvailability(e:StageVideoAvailabilityEvent):void
{
  // if StageVideo available
  if (e.availability == StageVideoAvailability.AVAILABLE)
  {
    // get stage video instance
    stageVid = stage.stageVideos[0];

    // create a net connection for playing back the video
    nc = new NetConnection();
    nc.connect(null);

    // create a netstream for the net connection
    ns = new NetStream(nc);
    ns.client = this;

    // add click event to record button
    recordbtn.addEventListener(MouseEvent.CLICK, onRecord);

    // add event for stage video render state
    stageVid.addEventListener(StageVideoEvent.RENDER_STATE, onRender);
  }

}

function onRender(e:StageVideoEvent):void
{
  // when the video is ready to play set the viewport size so we can see the video
  stageVid.viewPort = new Rectangle( 247 , 43 , 529 , 397 ) ;
}

Now we can create the event for the record button:

function onRecord(e:MouseEvent):void
{
  if (CameraUI.isSupported)
  {
    // add various camera event listeners
    deviceCameraApp.addEventListener(MediaEvent.COMPLETE, videoCaptured);
    deviceCameraApp.addEventListener(Event.CANCEL, captureCanceled);
    deviceCameraApp.addEventListener(ErrorEvent.ERROR, cameraError);

    // launch the camera in video mode
    deviceCameraApp.launch(MediaType.VIDEO);
  }
}

The next thing to do is create the various event handlers for the events that the camera will fire such as error and successful capture:

function videoCaptured(e:MediaEvent):void
{
  // get the video file from the camera's media promise
  var videoPromise:MediaPromise = e.data;
  videoFile = videoPromise.file;

  // attach netstream to the stagevideo and then play it
  if (stageVid) {
    stageVid.attachNetStream(ns);
    ns.play(videoFile.url);
  }
}

function captureCanceled(event:Event):void
{
  // capture cancelled
}

function cameraError(error:ErrorEvent):void
{
  // camera error
}

And that should be all you need to do to record a video and have it play back within the app. While the video will play there are no controls for the video so you will have to create you own playback controls which is the same process as regular video but I won’t go into that here.

I have put the uninterrupted source code below:

// use stage video to playback recording
var stageVid:StageVideo;

// variables for displaying the video
var videoFile:File;
var ns:NetStream;
var nc:NetConnection;

// create instance of CameraUI for showing native camera app
var deviceCameraApp = new CameraUI();

// we'll use StageVideo to display the recorded video, first we have to listen for availability
stage.addEventListener(StageVideoAvailabilityEvent.STAGE_VIDEO_AVAILABILITY, onStageVideoAvailability);

function onStageVideoAvailability(e:StageVideoAvailabilityEvent):void
{
  // if StageVideo available
  if (e.availability == StageVideoAvailability.AVAILABLE)
  {
    // get stage video instance
    stageVid = stage.stageVideos[0];

    // create a net connection for playing back the video
    nc = new NetConnection();
    nc.connect(null);

    // create a netstream for the net connection
    ns = new NetStream(nc);
    ns.client = this;

    // add click event to record button
    recordbtn.addEventListener(MouseEvent.CLICK, onRecord);

    // add event for stage video render state
    stageVid.addEventListener(StageVideoEvent.RENDER_STATE, onRender);
  }
}

function onRender(e:StageVideoEvent):void
{
  // when the video is ready to play set the viewport size so we can see the video
  stageVid.viewPort = new Rectangle( 247 , 43 , 529 , 397 ) ;
}

function onRecord(e:MouseEvent):void
{
  if (CameraUI.isSupported)
  {
    // add various camera events
    deviceCameraApp.addEventListener(MediaEvent.COMPLETE, videoCaptured);
    deviceCameraApp.addEventListener(Event.CANCEL, captureCanceled);
    deviceCameraApp.addEventListener(ErrorEvent.ERROR, cameraError);

    // launch the camera in video mode
    deviceCameraApp.launch(MediaType.VIDEO);
  }
}

function videoCaptured(e:MediaEvent):void
{
  // get the video file from the camera's media promise
  var videoPromise:MediaPromise = e.data;
  videoFile = videoPromise.file;

  // attach netstream to the stagevideo and then play it
  if (stageVid) {
    stageVid.attachNetStream(ns);
    ns.play(videoFile.url);
  }
}

function captureCanceled(event:Event):void
{
  // capture cancelled
}

function cameraError(error:ErrorEvent):void
{
  // camera error
}