czwartek, 22 grudnia 2011

Notatki o Windows 8 - odc. 14

Aplikacje Metro w JavaScript. Audio - wybór wersji językowej, kontrola odtwarzania, urządzenia Bluetooth. Wideo - odtwarzanie, wybór urządzenia wyjścia, wybór wersji językowej audio, opcja realtime, DRM, efekt stabilizacji, stereo video (3D), optymalizacja renderingu, skalowanie obrazu do rozmiaru ramki, rendering z horyzontalnym odbiciem obrazu, przechwytywanie obrazu i dźwięku, nagrywanie video w postaci pliku: kontrola ustawień obrazu i dźwięku, obrót obrazu przy obróconej kamerze, lustrzane odbicie obrazu np. w podglądzie z kamery przy wideokonferencji, efekt stabilizacji i inne efekty.

Audio

(function () {

    function id(elementId) {

        return document.getElementById(elementId);

    }

 

    function scenario1DoSomething() {

        // Create new media control

        var mediaControls = Windows.Media.Devices.MediaControl;

 

        // Add an event listener to listen for common media commands from the hardware

        // buttons. 

        mediaControls.addEventListener("playpausetoggleselected", playpause, false);

        mediaControls.addEventListener("playselected", play, false);

        mediaControls.addEventListener("stopselected", stop, false);

        mediaControls.addEventListener("pauseselected", pause, false);

        mediaControls.addEventListener("nexttrackselected", nexttrack, false);

        mediaControls.addEventListener("previoustrackselected", prevtrack, false);

        mediaControls.addEventListener("fastforwardselected", fastforward, false);

        mediaControls.addEventListener("rewindselected", rewind, false);

        mediaControls.addEventListener("channelupselected", channelup, false);

        mediaControls.addEventListener("channeldownselected", channeldown, false);

        mediaControls.addEventListener("recordselected", record, false);

    }

You can configure the hardware media keys on a keyboard, and then use the configured keys to control an audio-video (AV) stream by pressing or clicking play, pause, stop etc.

    (function () {

    var audtag = null;

    var mediaControl;

    var isPlaying; 

     

    function scenario1DoSomething() {

        // Create new audio tag for "media" class

        if(!audtag) {

          audtag = document.createElement('audio');

          audtag.setAttribute("id", "audtag");

          audtag.setAttribute("controls", "true");

          audtag.setAttribute("msAudioCategory", "media");

          audtag.setAttribute("src", "folk_rock.mp3");

          document.getElementById("scenario1Output").appendChild(audtag);

          audtag.load();

        }

    }


    function scenario2DoSomething() {

        // Create new audio tag for "communication" class

        if(!audtag) {

          audtag = document.createElement('audio');

          audtag.setAttribute("id", "audtag");

          audtag.setAttribute("controls", "true");

          audtag.setAttribute("msAudioDeviceType", "communications");

          audtag.setAttribute("msAudioCategory", "communications");

          audtag.setAttribute("src", "folk_rock.mp3");

          document.getElementById("scenario2Output").appendChild(audtag);

          audtag.load();

        }

    }


    function initialize() {

        // Add any initialization code here

        id("scenario1Open").addEventListener("click", scenario1DoSomething, false);

        id("scenario2Open").addEventListener("click", scenario2DoSomething, false);

        id("scenarios").addEventListener("change", onScenarioChanged, false);

 

        // Create the media control.

        var mediaControl = Windows.Media.Devices.MediaControl;

        // Add event listeners for PBM notifications to illustrate app is

        // losing/gaining focus and pass the audio tag along to the function

        mediaControl.addEventListener("mediafocuslost", focusLost, false);

        mediaControl.addEventListener("mediafocusreceived", focusReceived, false);

    }

                   …

    function focusLost() {

        // Handle the lost media focus event by pausing our audio stream.

        var Focus = 1;

 

        if (audtag) {

            if (!audtag.paused) {

                isPlaying = true;

                audtag.pause();

            } else {

                isPlaying = false;

            }

        }

    }

 

    function focusReceived() {

        // Handle the received media focus event by playing our audio stream.

        var Focus = 2;

       

        if (audtag) {

            if (isPlaying) {

                audtag.play();

            }

        }

    }

Manage calls on the default Bluetooth communications device

<audio id="audiotag" src="folk_rock.mp3" msaudiocategory="communications"

       msaudiodevicetype="communications" loop>

</audio>


(function () {

 

    // Initialize the call control object here using the default bluetooth communications device

    var callControls = null;

    var callToken;

    var audiotag; 

     

    function initDevice() {

        if (!callControls) {

            try {

                callControls = Windows.Media.Devices.CallControl.getDefault();

 

                if (callControls) {

                    // Add the event listener to listen for the various button presses

                    callControls.addEventListener("answerrequested", answerButton, false);

                    callControls.addEventListener("hanguprequested", hangupButton, false);

                    callControls.addEventListener("audiotransferrequested", audiotransferButton, false);

                    callControls.addEventListener("redialrequested", redialButton, false);

                    callControls.addEventListener("dialrequested", dialButton, false);

 

                    sdkSample.displayStatus("Call Controls Initialized");

                    id("scenario1Ring").disabled = false;

                } else {

                    sdkSample.displayError("No Bluetooth device detected.");

                }

            }

            catch (e) {               

                sdkSample.displayError("No Bluetooth device detected.");

            }

        }

    }

 

    function newIncomingCall() {

        // Indicate a new incoming call and ring the headset.

        callToken = callControls.indicateNewIncomingCall(true, "5555555555");

        sdkSample.displayStatus("Call Token: " + callToken);

    }

 

    function answerButton() {

        // When the answer button is pressed indicate to the device that the call was answered

        // and start a song on the headset (this is done by streaming music to the bluetooth

        // device in this sample)

        sdkSample.displayStatus("Answer requested: " + callToken);

        callControls.indicateActiveCall(callToken);

        audiotag = document.getElementById("audiotag");

        audiotag.play();

    }

 

    function hangupButton() {

        // Hang up request received.  The application should end the active call and stop

        // streaming to the headset

        sdkSample.displayStatus("Hangup requested");

        callControls.endCall(callToken);

        audiotag = document.getElementById("audiotag");

        audiotag.pause();

    }

 

    function audiotransferButton() {

        // Handle the audio transfer request here

        sdkSample.displayStatus("Audio Transfer requested");

    }

 

    function redialButton(redialRequestedEventArgs) {

        // Handle the redial request here.  Indicate to the device that the request was handled.

        sdkSample.displayStatus("Redial requested");

        redialRequestedEventArgs.handled = true;

    }

 

    function dialButton(dialRequestedEventArgs) {

        // A device may send a dial request by either sending a URI or if it is a speed dial,

        // an integer with the number to dial.

        if (typeof(dialRequestedEventArgs.contact) === "number") {

            sdkSample.displayStatus("Dial requested: " + dialRequestedEventArgs.contact);

            dialRequestedEventArgs.handled = true;

        }

        else {

            sdkSample.displayStatus("Dial requested: " + dialRequestedEventArgs.contact.schemeName + ":"

            + dialRequestedEventArgs.contact.path);

            dialRequestedEventArgs.handled = true;

        }

    } 

Some devices require an audio stream to be opened first, so that an audio file can then start playing to the default communications device.

You must only consider using the msAudioCategory assignment, if you need audio to play in the background. Audio playback drains the battery, so unless there is a clear need for background audio (media playback designed for longer term listening, for example) do not declare an audio category. Alternatively, you can use the "Other" category. Otherwise, your application will be muted and then suspended.

All categories other than "Communications" will be muted and suspended in the background if the low-latency option in the audio pipeline is also selected. Low-latency buffers use significantly more battery power and are reserved for foreground applications on which the user is focused.

If the user chooses to listen to the game audio, you should programmatically begin playback of a new audio stream with its msAudioCategory value set to "Media". This will mute and suspend the audio stream that is being played by the current app, and begin playback of the new game audio soundtrack. If the user chooses to listen to the existing playlist or music stream, then do nothing. Game sounds will mix with existing audio.

Multimedia

Playing audio and video

Plaing video

Internet Client Capability - this capability gives an application outbound access to the Internet, which is necessary to play video from an Internet URL.

<video src="http://example.com/clip1.mp4" controls/>

The video element provides a set of built-in playback controls that enable the user to start or pause the video, seek to a new position, and adjust the volume. The playback controls are not visible by default. To enable them, just add the controls attribute, as shown. The controls appear when the user hovers the mouse over the video. If you know in advance the dimensions of the video, it is a good idea to set the width and height attributes on the video element. The width and height attributes always specify CSS pixels, and do not take a unit. If the width and height attributes don’t match the intrinsic size of the video, the video control stretches the video, while preserving the aspect ratio by using letterboxes if needed. However, it’s best to avoid stretching the video.

Play a local media file

function playVideo() {

    var openPicker = new Windows.Storage.OpenPicker();

    var fileItem = openPicker.pickSingleItem();

    if (fileItem) {

        var video = document.getElementById('myvideo');

        video.src = URL.createObjectURL(fileItem);

        video.play();

    }

}

Capabilities: Check the box for Video Library Access or Music Library Access

A way of ensuring smooth transitions between video or audio clips by having at least two media elements and alternating between them. The media tag in the foreground can play the current media stream, while the other tag can preload the next stream in the background.

If the media clips are part of a playlist, a web app will need manage the playlist, parse the playlist, and pass each individual source to the video or audio tag for playback.

Select audio in the user's language

How to use an HTML5 extension available in Windows Developer Preview to allow apps to select the audio tracks in the user's language. It is available both to Metro style apps using JavaScript and to IE10 browsers.

The AudioTracks attribute on the <video> and <audio> elements that allows apps to access separate audio tracks in different languages.

   <script type="text/javascript">

                var myVideo = document.getElementById("videoTag1");

       if( myVideo.AudioTracks.length > 1){

           for( var i=0; i < myVideo.msAudioTracks.length; i++ ){

              if( myVideo.msAudioTracks.language(i)==“en-gb” ){

                 myVideo.msAudioTracks.selectedTrack = i;

                 break;

              }

           }

       }

       myVideo.play();

   </script>

Select an audio output device

Windows allows apps to send the audio output signals to various output devices using the msAudioDeviceType attribute (DOMString). With this extension, Metro style apps can choose to send the audio output signals to the default audio output devices for multimedia, console, and communications, respectively. The attribute can only be set on the media tag explicitly and can’t be accessed through the Windows Runtime functions.

The default value is “multimedia”. Setting msAudioCategory to “communications” will force msRealtime to “true” and also set msAudioDeviceType to “communications” if the msAudioDeviceType attribute is not set explicitly.

<audio src=”GameEffect.avi” msRealtime=true

msAudioDeviceType=”console” msAudioCatogery=”soundeffects” />

Use pluggable DRM

You can enable DRM-protected content by using the Windows Runtime MediaProtectionManager API.

var mediaProtectionManager = new Windows.Media.Protection.MediaProtectionManager();

Set the Properties property.

Currently there are 3 properties that can be set.

  • Windows.Media.ContentProtection.VideoFrameAccessCertificate - a UINT 8 array that represents the application certificate for accessing frames in frame server mode.
  • Windows.Media.ContentProtection.ContentProtectionSystemId - a GUID that represents the protection system ID. This value determines which 3rd party ProtectionSystem should be used to handle protected content in a WMDRM ASF file.
  • Windows.Media.ContentProtection.ContentProtectionSystemContext - a UINT 8 array that represents additional data that the application wants to pass to the protection system for the current content.

mediaProtectionManager.properties["Windows.Media.Protection.MediaProtectionSystemId"] = '{F4637010-03C3-42CD-B932-B48ADF3A6A54}'; // Microsoft.Media.PlayReadyClient.PlayReadyStatics.mediaProtectionSystemId;

Set the MediaProtectionManager on the HTML Video Tag or the XAML MediaElement. Set the msMediaProtectionManager method to the instance of the video or audio tag.

video.msMediaProtectionManager = mediaProtectionManager;

Attach listeners to receive the events and implement the delegates to handle the protection tasks.

ComponentLoadFailed - fired when the load of binary data fails.

RebootNeeded - fired when a reboot is needed after the component is renewed.

ServiceRequested - fired when the content protection system encounters an issue and needs the application's help.

mediaProtectionManager.addEventListener( "componentloadfailed", componentLoadFailed, false );

mediaProtectionManager.addEventListener( "servicerequested", serviceRequested, false );

videoPlayer.msSetMediaProtectionManager( mediaProtectionManager );

mediaFilename = 'pr_006_1_01_v4_0_wmv.pyv.asf';

videoPlayer.src = mediaFilename;

videoPlayer.play();

The MediaError interface is extended to include an msExtendedCode attribute so developers can get feedback during implementation.

myVideo.addEventListener('error', function onError() {

           var error = myVideo.error.msExtendedCode;

           ... ...

       }, false);

share media using PlayTo

You can use PlayTo to enable users to easily stream audio, video, or images from their computer to devices in their home network. For example, a user that is watching a video in your application can stream that video to their TV for everyone in the room to view.

enable effects in a <video> tag

how to enable video and audio effects through the Win8 media plugin model.

The following example applies the VideoStabilization effect to a <video> element. To apply a different effect, replace the first parameter with the activatable ClassID of your media extension.

var myVideo = document.getElementById("videoTag1");

myVideo.msAddVideoEffect(

    “Windows.Media.VideoEffects.VideoStabilization”, // ClassID

    true,   // effectRequired

    null);  // config

    myVideo.play();

enable low-latency playback

The msRealtime attribute (Boolean), when set to True, will enable the media element to reduce the initial latency as much as possible for playback. This is critical to apps related to 2-way communications, and can also be applicable to some gaming scenarios. The attribute can only be set on the media tag explicitly and can't be set in JavaScript.

<audio src=”GameEffect.avi” msRealtime=true

msAudioDeviceType=”console” msAudioCatogery=”soundeffects” />

Enable stereo video playback

When a video element is in the optimized rendering mode, stereo video can be enabled if:

  • the system is stereo-capable,
  • the display setting is turned on for stereo rendering, and
  • the video element has been extended to provide the interfaces for Metro style apps using JavaScript to include the stereo video experience.

The read-only (Boolean) attribute msIsStereo3D indicates whether the video source of the video element is stereo 3D. The msStereo3DPackingMode attribute indicates the frame packing mode of the video. The value is none for regular 2D video. Other possible values include topbottom and sidebyside. The value of this attribute can be adjusted when there is no standard metadata in the video stream to indicate the stereo property of the video. When the system display setting is set to enable stereo display (the system is stereo-capable), apps can turn on the stereo display mode by setting the msStereo3DRenderMode attribute to stereo. The default value of the attribute is always mono.

       var myVideo = document.getElementById("videoTag1");

       if( myVideo.msIsStereo3D==true &&

           myVideo.msIsLayoutOptimalForPlayback==true &&

           Windows.Graphics.DisplayProperties.StereoEnabled==true )

                                  myVideo.msStereo3DRenderMode="stereo";

       myVideo.play();

The stereo display setting status is available through the Windows.Graphics.DisplayProperties.StereoEnabled property (Boolean). The setting can only be changed by going to display control panel directly. Metro style apps using JavaScript should also register the Windows.Graphics.DisplayProperties.StereoEnabledChanged event to capture actual system settings.

Optimize video rendering

Windows Runtime introduces several optimizations based on a GPU-accelerated composition engine using the HTML5 video element.

The read-only boolean attribute msIsLayoutOptimalForPlayback provides information to web developers as to whether each video element in the web app is arranged in a way that can benefit from the optimized rendering option in the composition engine.

To benefit from the optimized rendering option, place the video element under the full-stacking context as defined in Appendix E of the World Wide Web Consortium (W3C)Cascading Style Sheets (CSS) standard. An element is in full stacking context if it is either:

  • Positioned with a z-index value != auto
  • Elements with Opacity value set
  • Elements with a 2D/3D Transform applied

In addition, the Win8 composition engine also allows positioned elements with z-index:auto to be rendered through the optimized rendering path.

<style>

  video{ position: relative; z-index: 1; }

</style>

<style>

  video{ position: relative; z-index: auto; }

</style>

<style>

  video{ Opacity:0.5; }

</style>

<style>

  video{ -ms-transform: translateX(100px); }

</style>

While in the optimized rendering mode (if msIsLayoutOptimalForPlayback is True), further performance optimization is also available when the video is rendered in fullscreen mode. No other web element may be rendered above the video element to benefit from this rendering option.

Please note that a plain <video> tag will not be considered to be in the stacking context, and will not be included in the optimized rendering path. The optimized rendering option will also be disabled when the following optios are applied to the video element or any element above the video element in the stacking order.

  • A negative z-index
  • An outline-color of invert
  • A parent element with a "dependent" composition effect or 3D Transform
  • Embedded in an SVG element
  • DirectDraw based Built-in and Third Party Binary Painters (such as invert, shadow, wave, etc.) as implemented in Internet Explorer 4.0 to 5.5 Filters
  • Windowless (GDI-based) ActiveX controls, see MSDN ActiveX Controls, which can make use of its container's window rather than having a window of its own

The msZoom attribute, when set to True, allows the video frame to fit into the video tag display space by trimming off part of the video picture at the top/bottom or left/right.

       var myVideo = document.getElementById("videoTag1");

       myVideo.msZoom = true;

       myVideo.play();

The msHirizontalMirror attribute, when set to True, allows the video element to render the video pictures flipped horizontally. This is useful when the video tag is used for webcam preview, where the image in the preview window should move left or right together with the user rather than going the opposite directions.

       var myVideo = document.getElementById("videoTag1");

       myVideo.msHorizontalMirror = true;

       myVideo.play();

Capturing photos or videos

You can use the Windows.Media.CameraCaptureUI API to launch a full-screen dialog that presents camera UI and enables you to capture a photo or video with as little as one method call.

If you want to create your own user interface rather than using the one that the Windows.Media.CameraCaptureUI class provides, you should use Windows.Media.MediaCapture instead.

var captureUI = new Windows.Media.Capture.CameraCaptureUI();

captureUI.captureFileAsync(Windows.Media.CameraCaptureUI.Mode.photo).then(function(capturedItem) {

    if (capturedItem) {

        document.getElementById("message").innerHTML = "User captured a photo."

    }

    else {

        document.getElementById("message").innerHTML = "User didn't capture a photo."

    }

});

}

Declare the webcam capability.

You just took a picture using the default format. You can also specify a different format, an aspect ratio, or record a video.

To capture a photo with an aspect ration of 4:3 in the PNG format.

captureUI.photoCaptureSettings.format = Windows.Media.Capture.CameraCaptureUIPhotoFormat.png;

captureUI.photoCaptureSettings.croppedAspectRatio = {width:4, height:3};

User can crop their captured photos and trim their captured videos before returning them to the calling application. In addition, users can also adjust some of the camera settings such as brightness, contrast, and exposure before capturing a photo or a video. The camera dialog is intended for live photo and video capture.

How to capture video to a file using the Windows.Media.Capture API. You can use the Windows.Media.Capture API to control the asynchronous capture operation, select an encoding profile and send the resulting video to a file.

var oMediaCapture = mediaCapture.mediaCapture();

Calling the MediaCapture.InitializeAsync method returns an InitializeOperation object that is used to control the capture operation. By default, InitializeAsync uses the default video capture device and will capture either audio or video. If you want to modify these settings, you can create and initialize your own MediaCaptureInitializationSettings object and pass it to the InitializeAsync method.

The encoding profile contains all of the settings for how the destination file will be encoded. The Windows.Media.Capture API provides several options for creating a MediaEncodingProfile object.

The Windows.Media.Capture namespace provides a set of predefined encoding profiles:

  • AAC audio (M4A)
  • MP3 audio
  • Windows Media Audio (WMA)
  • MP4 video (H.264 video plus AAC audio)
  • Windows Media Video (WMV)

profile = Windows.Media.Capture.MediaEncodingProfile.createMp4(

        Windows.Media.Capture.VideoEncodingQuality.HD720p );

Alternatively, you can create a profile that matches an existing media file by using the Windows.Media.Capture.MediaEncodingProfile.CreateFromFileAsync method. Or, if you know the exact encoding settings that you want, you can create a new Windows.Media.Capture.MediaEncodingProfile object and fill in all of the profile details.

To start capturing video to a file, create a StartRecordOperation object by calling the StartRecordToStorageFileAsync method and passing in the MediaEncodingProfile and the destination storage file.

You can use the handler for the StartRecordOperation to signal the progress of the capture operation, and to call the handler for the StopRecordOperation when the capture is completed.

To stop capturing video to a file, create a StopRecordOperation object by calling the StopRecordAsync method. You can use the handler for the StopRecordOperation to signal the completion of the capture operation.

var oMediaCapture = mediaCapture.mediaCapture();

var profile;

 

function createProfile()

{

    profile = Windows.Media.Capture.MediaEncodingProfile.createMp4(

        Windows.Media.Capture.VideoEncodingQuality.HD720p );

}

 

function InitializeHandler (initializeOperation)

{

    var result = initializeOperation.GetResults();

 

    if (result == 0)

    {

        createProfile();

 

        var startCaptureOperation = oMediaCapture.startRecordToStorageFileAsync(

          profile,

          file

        );

 

        startCaptureOperation.Completed = OnStartCaptureComplete;

        startCaptureOperation.Start();

    }

    

    initializeOperation.Close();

}

 

function OnStartCaptureComplete(startCaptureOperation)

{

    var result = startCaptureOperation.GetResults();

 

    if (result == 0)

    {

        var stopCaptureOperation = oMediaCapture.StopRecordAsync();

 

        stopCaptureOperation.Completed = OnStopRecordComplete;

        stopCaptureOperation.Start();

    }

   

    startCaptureOperation.Close();

}

 

function OnStopCaptureComplete(stopCaptureOperation)

{

    var result = stopCaptureOperation.GetResults();

 

    if (result == 0)

    {

        writeln("Successfully finished capturing");

    }

    

    stopCaptureOperation.Close();

}

 

function StartMediaCaptureSession()

{

    var initializeOperation = oMediaCapture.initializeAsync();

 

    initializeOperation.Completed = initializeHandler;

    initializeOperation.Start();

}

Preview from a webcam in a web app

     function init(){

       var mediarec = new Windows.Media.MediaRecorder();

       var opInitializeRecorder = mediarec.initializeAsync(null);

       opInitializeRecorder.start();

     }

 

     function preview(){ 

       var myVideo = document.getElementById("videoTag1");

       myVideo.src = URL.createObjectURL(mediarec);

       myVideo.play();

     }

Record audio or video

var oMediaCapture = mediaCapture.mediaCapture();

Calling the MediaCapture.InitializeAsync method returns an InitializeOperation object that is used to control the record operation. By default, InitializeAsync uses the default video capture device and will capture either audio or video. If you want to modify these settings, you can create and initialize your own MediaCaptureInitializationSettings object and pass it to the InitializeAsync method.

Camera or microphone settings

You can adjust various settings on the camera or microphone, such as brightness, contrast, focus (on the camera) or volume (on the microphone). In the Windows.Media.Capture API, this is done by retrieving a Windows.Media.Devices.VideoDeviceController or Windows.Media.Devices.AudioDeviceController object and setting the properties of the object.

// Create the media capture object.

var oMediaCapture = mediaCapture.mediaCapture();

 

// Retrieve a video device controller.

var videoDeviceController = oMediaCapture.videoDeviceController;

 

// Retrieve an audio device controller.

var audioDeviceController = oMediaCapture.audioDeviceController;

The VideoDeviceController property returns a Windows.Media.Devices.VideoDeviceController object. The properties of this object such as brightness, contrast or focus each return a MediaDeviceControlobject with a Capabilities property that returns a MediaDeviceControlCapabilities object. The MediaDeviceControlCapabilities object has properties and methods that allow you to determine if a property is supported for this camera, what the minimum and maximum values of the property are, and will allow you to get and set the property value.

brightnessCapabilities = videoDeviceController.brightness.capabilities;

if (brightnessCapabilities.Supported)

{

  var brightness;

 

  if (brightnessCapabilities.tryGetValue( brightness ))

  {

    var min = brightnessCapabilities.Min;

    var max = brightnessCapabilities.Max;

    var step = brightnessCapabilities.Step;

 

    if( (brightness + step) <= max )

    {

       if( brightnessCapabilities.trySetValue( brightness + step ) )

       {

       }

    }

     }

  else

  {

    // The brightness value couldn't be retrieved.

  }

}

else

{

  // Setting the brightness value is not supported on this camera.

}

The AudioDeviceController property returns a Windows.Media.Devices.AudioDeviceController object. The properties of this object such as Muted, and VolumePercent can be used to directly adjust the microphone settings.

// Mute the microphone.

audioDeviceController.muted = true;

 

// Un-mute the microphone.

audioDeviceController.muted = false;

 

// Get the current volume setting.

var currentVolume = audioDeviceController.volumePercent;

 

// Increase the volume by 10 percent.

audioDeviceController.volumePercent = (currentVolume + 10);

You can also launch a dialog for adjusting camera settings by calling Windows.Media.Capture.CameraOptionsUI.Show, and passing a MediaCapture object as a parameter.

Rotate the video

If the camera is oriented at a 90, 180, or 270 degree angle, you can rotate the video image. In the Windows.Media.Capture API, this is done by setting a video rotation value before you begin recording or previewing.

var oMediaCapture = mediaCapture.mediaCapture();

oMediaCapture.setRecordRotation(  VideoRotation.Clockwise90Degrees  );

var videoRotation = oMediaCapture.getRecordRotation();

The rotation of the video image that will be used when recording, is set by passing in a Windows.Media.Capture.VideoRotation enumeration value to the SetRecordRotation method of the MediaCapture object. The VideoRotation enumeration contains values for 0, 90, 180, and 270 degrees, and is used to specify the amount by which to rotate the video.

var oMediaCapture = mediaCapture.mediaCapture();

 

function StartMediaRecordingSession()

{

    //

    // Set the video roation value.

    //

    oMediaCapture.setRecordRotation(  VideoRotation.Clockwise90Degrees  );

 

    var initializeOperation = oMediaCapture.initializeAsync();

 

    initializeOperation.Completed = initializeHandler;

    initializeOperation.Start();

}

Mirror the video

You can mirror the preview image (that is, flip it horizontally). This is useful for video conferencing/video chat, because typically the user wants to see a reflected image of themselves.

The MediaCapture object provides methods for getting and setting a boolean value that indicates whether the preview image is mirrored horizontally. The Windows.Media.Capture.GetPreviewMirroring method returns the value, and the Windows.Media.Capture.SetPreviewMirroring method enables or disables horizontal mirroring of the video preview stream.

var oMediaCapture = mediaCapture.mediaCapture();

var profile;

 

function turnMirroringOn()

{

   if( !oMediaCapture.getPreviewMirroring() )

      oMediaCapture.setPreviewMirroring( true );

}

 

function OnStartPreviewComplete(startPreviewOperation)

{

    // This method does not return a value.

    //

    startPreviewOperation.GetResults();

 

    // Set the handler for the StopPreviewOperation.

    //

    var stopPreviewOperation = oMediaCapture.stopPreviewAsync();

 

    stopPreviewOperation.Completed = OnStopPreviewComplete;

    stopPreviewOperation.Start();

 

    startPreviewOperation.Close();

}

 

function OnStopPreviewComplete(stopPreviewOperation)

{

    // This method does not return a value.

    //

    stopPreviewOperation.GetResults();

 

    stopPreviewOperation.Close();

}

 

function StartPreview()

{

    // Enable horizontal mirroring.

    //

    turnMirroringOn();

 

    // Start Previewing

    var startPreviewOperation = oMediaCapture.startPreviewAsync();

 

    startPreviewOperation.Completed = OnStartPreviewComplete;

    startPreviewOperation.Start();

}

video stabilization

Windows provides a video stabilization effect, which can help reduce shakiness from a hand-held camera. This effect can be inserted during live capture, or added as a post-processing step during transcoding.

To add the stabilization effect during live capture, create an asynchronous AddEffectOperation object by calling the Windows.Media.Capture.AddEffectAsync method. This method takes the following parameters:

  • MediaStreamType - One of the values of the MediaStreamType enumeration that specifies whether the stream is for video recording, video preview, audio, or photo.
  • effectActivationID - The class identifier of the activatable runtime class that implements the effect. The runtime class must implement the IMediaExtension interface. The Windows.Media.VideoEffects namespace provides a VideoStabilization class.
  • effectSettings - An IPropertySet that contains additional configuration parameters for the effect. If no additional configuration is needed for the effect, then this parameter should be null.

The following code adds a video stabilization effect to the chain of effects that are attached to the source stream coming out of the device source. You can call this method multiple times to add several effects, and call the ClearEffectsAsync method to clear all of the effects from the stream.

//

// Create a Media Capture object and add VideoStabilization.

//

var oMediaCapture = mediaCapture.mediaCapture();

 

var addEffectOperation = oMediaCapture.addEffectAsync(

   Windows.Media.Capture.MediaStreamType.VideoRecord,

   "Windows.Media.VideoEffects.VideoStabilization",

   null

);

 

 

//

// To clear all the effects from this stream type, use ClearEffectsAsync.

//

var clearEffectsOperation = oMediaCapture.clearEffectsAsync( MediaStreamType.VideoRecord );

 

To add the stabilization effect during transcoding, use the MediaTranscoder.AddVideoEffect method and provide the class identifier of the activatable runtime class that implements the effect.

The following code adds the Windows.Media.VideoEffects.VideoStabilization effect to the transcoder object. You can call this method multiple times to add several effects, and call the ClearEffects method to clear all of the effects from the transcoder.

//

// Create a Transcoder object and add VideoStabilization.

// 

var oTranscoder = new Windows.Media.Transcoding.MediaTranscoder.mediaTranscoder();

 

oTranscoder.addVideoEffect( "Windows.Media.VideoEffects.VideoStabilization" );

 

 

//

// To clear all the effects from this stream type, use ClearEffects.

//

oTranscoder.clearEffects();

Settings and effects to captured video

Effects are typically Media Foundation Transforms (MFTs) that are passed to Windows.Media.Capture.AddEffectAsync to add an effect when capturing a photo or video.

The AddEffectAsync method returns an AddEffectOperation object that is used to control the asynchronous operation. The application can call this method multiple times to add several effects.

var addEffectOperation = mediaCapture.addEffectAsync(

  MediaStreamType,

  effectActivationID,

  effectSettings

);

c.d.n

Brak komentarzy: