Issue video scrubber for AIR

I have a code setup to play fullscreen video with play/pause, close, and a scrubber/seek bar. Howver, there are some irregularities when publishing for either AIR for Android or AIR for iOS.
Publish for Android gives me an inconsistent scrubber ( in portrait, sometimes it scrubs/seeks, most of the time it does not detect a drag and thus does not scrub). Landscape seems to do a better job of adjusting the scrubber.
Using the same file and codes, when publishing for iOS, the scrubber does not move forward. When I tap and drag the scrubber, the video either restarts or goes back a few frames, but never goes forward.
Is this a bug on Adobe/AIR or do I need to make adjustments to the code to implement for iOS? What is causing these inconsistencies?
Please see the code below and feel free to make suggestions. Any help is greatly appreciated.
From file AIRMobileVideo.as :
package
      import flash.display.MovieClip;
      import flash.desktop.NativeApplication;
      import flash.desktop.SystemIdleMode;
      import flash.display.Stage;
      import flash.display.StageAlign;
      import flash.display.StageScaleMode;
      import flash.events.Event;
      import flash.events.StageVideoAvailabilityEvent;
      import flash.geom.Rectangle;
      import flash.media.StageVideo;
      import flash.media.Video;
      import flash.media.StageVideoAvailability;
      import flash.net.NetConnection;
      import flash.net.NetStream;
      import flash.events.NetStatusEvent;
      import flash.events.MouseEvent;
      import flash.events.TouchEvent;
      import flash.geom.Point;
      import flash.utils.Timer;
      import flash.events.TimerEvent;
      public class AIRMobileVideo
            protected var stream:NetStream;
            protected var stageVideo:StageVideo;
            protected var softwareVideo:Video;
            public var stageref:Stage;
            public var timelinetarget:MovieClip;
            public var playingBlank:Boolean;
            public var tmrDisplay:Timer;
            // object holds all meta data
            public var objInfo:Object;
            public var customClient:Object;
            public var bolProgressScrub:Boolean  = false;
            public var postime:Number;
            public function AIRMobileVideo(stage:Stage, X:int, Y:int, width:int, height:int, target:MovieClip)
             stageref = stage;
             timelinetarget = target;
             playingBlank = false;
             var nc:NetConnection = new NetConnection();
             nc.connect(null);
             customClient = new Object();
             customClient.onMetaData = metaDataHandler;
             //stream.client = customClient;
             stream = new NetStream(nc);
             //stream.client = this ;
             stream.client = customClient;
             var vidWidth:int;// = stage.fullScreenWidth;//1028;
             var vidHeight:int;// = int((480*vidWidth)/640);//764;
             var Ycal;// = (stage.fullScreenHeight/2) - (vidHeight/2)
             var X;
             if(stageref.fullScreenWidth>stageref.fullScreenHeight){
              //landscape
              vidHeight = stageref.fullScreenHeight;//
              vidWidth  = int((640*vidHeight)/480);
              Ycal = 0;//
              X = (stageref.fullScreenWidth/2) - (vidWidth/2);
             } else {
              //portrait
              vidWidth  = stageref.fullScreenWidth;//1028;
              vidHeight = int((480*vidWidth)/640);//764;
              Ycal = (stageref.fullScreenHeight/2) - (vidHeight/2);
              X=0;
             trace("width:"+vidWidth);
             trace("height:"+vidHeight);
             stream.addEventListener(NetStatusEvent.NET_STATUS, statusHandler);
             // software fallback
                   softwareVideo = new Video(vidWidth, vidHeight);
                   softwareVideo.x = X;
                   softwareVideo.y = Ycal;
                   timelinetarget.addChild(softwareVideo);
                   var pt:Point = new Point(timelinetarget.mcVideoControls.mcProgressScrubber.x,   timelinetarget.mcVideoControls.mcProgressScrubber.y);
                   pt = timelinetarget.mcVideoControls.mcProgressScrubber.parent.localToGlobal(pt);
                   timelinetarget.mcProgressShow.x = pt.x;
                   timelinetarget.mcProgressShow.y = pt.y;
                   timelinetarget.addcontrols();
                   timelinetarget.mcVideoControls.mcProgressScrubber.addEventListener(TouchEvent.TOUCH_BEGIN , onTouchBegin);
                   tmrDisplay = new Timer(10);
                   tmrDisplay.addEventListener(TimerEvent.TIMER, updateDisplay);
                   trace("AIRMobileVideo: using software fallback");
function onTouchBegin(event:TouchEvent) {
          // set progress scrub flag to true
          bolProgressScrub = true;
          timelinetarget.killControlFadeout();
          //stream.pause();
          tmrDisplay.removeEventListener(TimerEvent.TIMER, updateDisplay);
          stageref.addEventListener(TouchEvent.TOUCH_END, onTouchEnd);
          stageref.addEventListener(TouchEvent.TOUCH_MOVE, onTouchMove);
          // start drag
          timelinetarget.mcVideoControls.mcProgressScrubber.startDrag(false, new Rectangle(0, 4, 400, 0));
function onTouchMove(event:TouchEvent) {
          stream.seek(Math.round(timelinetarget.mcVideoControls.mcProgressScrubber.x * objInfo.duration / 400));
          timelinetarget.mcVideoControls.mcProgressFill.mcFillRed.width = timelinetarget.mcVideoControls.mcProgressScrubber.x + 5;
          timelinetarget.mcVideoControls.mcProgressFill.mcFillGrey.width = stream.bytesLoaded * 406 / stream.bytesTotal;
        var pt:Point = new Point(timelinetarget.mcVideoControls.mcProgressScrubber.x, timelinetarget.mcVideoControls.mcProgressScrubber.y);
        pt = timelinetarget.mcVideoControls.mcProgressScrubber.parent.localToGlobal(pt);
        timelinetarget.mcProgressShow.x = pt.x;
        timelinetarget.mcProgressShow.y = pt.y;
function onTouchEnd(event:TouchEvent) {
          bolProgressScrub = false;
          timelinetarget.mcVideoControls.mcProgressScrubber.stopDrag();
          // update progress/volume fill
          timelinetarget.mcVideoControls.mcProgressFill.mcFillRed.width = timelinetarget.mcVideoControls.mcProgressScrubber.x + 5;
          //timelinetarget.mcVideoControls.mcProgressScrubber.x = postime * 364 / objInfo.duration;
          //stream.seek(postime);
          //stream.resume();
          stageref.removeEventListener(TouchEvent.TOUCH_END, onTouchEnd);
          stageref.removeEventListener(TouchEvent.TOUCH_MOVE, onTouchMove);
          //tmrDisplay.addEventListener(TimerEvent.TIMER, updateDisplay);
          timelinetarget.addControlFadeout();
          // stop all dragging actions
          event.updateAfterEvent();
  public function updateDisplay(e:TimerEvent):void {
             // checks, if user is scrubbing. if so, seek in the video
             // if not, just update the position of the scrubber according
             // to the current time
             if(bolProgressScrub){
              //postime = Math.round(timelinetarget.mcVideoControls.mcProgressScrubber.x * objInfo.duration / 364);
              stream.seek(Math.round(timelinetarget.mcVideoControls.mcProgressScrubber.x * objInfo.duration / 400));
             } else {
              timelinetarget.mcVideoControls.mcProgressScrubber.x = stream.time * 400 / objInfo.duration;
             //timelinetarget.mcVideoControls.mcProgressScrubber.scaleX = timelinetarget.mcVideoControls.scaleX;
             // update the width from the progress bar. the grey one displays
             // the loading progress
             timelinetarget.mcVideoControls.mcProgressFill.mcFillRed.width = timelinetarget.mcVideoControls.mcProgressScrubber.x + 5;
             timelinetarget.mcVideoControls.mcProgressFill.mcFillGrey.width = stream.bytesLoaded * 406 / stream.bytesTotal;
             var pt:Point = new Point(timelinetarget.mcVideoControls.mcProgressScrubber.x, timelinetarget.mcVideoControls.mcProgressScrubber.y);
             pt = timelinetarget.mcVideoControls.mcProgressScrubber.parent.localToGlobal(pt);
             timelinetarget.mcProgressShow.x = pt.x;
             timelinetarget.mcProgressShow.y = pt.y;
             //trace("loaded:"+stream.bytesLoaded + " total:"+stream.bytesTotal);
  public function resizeVid ():void {
             var vidWidth:int;
             var vidHeight:int;
             var Ycal;
             var X;
   if(stageref.fullScreenWidth>stageref.fullScreenHeight){
              //landscape
              vidHeight = stageref.fullScreenHeight;//
              vidWidth  = int((640*vidHeight)/480);
              Ycal = 0;
              X = (stageref.fullScreenWidth/2) - (vidWidth/2);
   } else {
              //portrait
              vidWidth  = stageref.fullScreenWidth;//1028;
              vidHeight = int((480*vidWidth)/640);//764;
              Ycal = (stageref.fullScreenHeight/2) - (vidHeight/2);
              X=0;
              // software fallback
              //softwareVideo = new Video(vidWidth, vidHeight);
              softwareVideo.x = X;
              softwareVideo.y = Ycal;
              softwareVideo.width = vidWidth;
              softwareVideo.height = vidHeight;
              timelinetarget.addcontrols();
              trace("AIRMobileVideo: using software fallback");
public function playVideo(url:String):void
             stream.close();
             softwareVideo.clear();
             softwareVideo.attachNetStream(stream);
             playingBlank = false;
             stream.play(url);
public function pauseVideo():void
             stream.pause();
            public function resumeVideo():void
             stream.resume();
public function stopVideo():void
             trace("stopVideo()");
             playingBlank = false;
             zeroVideo();
             stream.close();
             softwareVideo.clear();
             timelinetarget.removeChild(softwareVideo);
             timelinetarget.removeBlackBack();
  public function playBlankVideo():void
             trace("playBlankVideo()");
             playingBlank = true;
             timelinetarget.removeControls();
             stream.play("blank.mp4");
  public function zeroVideo():void
             stream.pause();
             stream.seek(0);
public function metaDataHandler(infoObject:Object):void
             objInfo = infoObject;
             tmrDisplay.start();
  private function handleActivate(event:Event):void
             NativeApplication.nativeApplication.systemIdleMode = SystemIdleMode.KEEP_AWAKE;
  private function handleDeactivate(event:Event):void
             NativeApplication.nativeApplication.exit();
  private function statusHandler(event:NetStatusEvent):void
             trace(event.info.code)
             if(event.info.code=="NetStream.Play.Stop"){
              trace("Video stopped");
              if(playingBlank == true){
               trace("stopVideo()");
               stopVideo();
         } else {
               trace("playBlankVideo()");
               playBlankVideo();
   if(event.info.code=="NetStream.Play.Start"){
         trace("Remove loading");

Have you checked if its a file specific problem?
With a 764 height resolution your video is non IOS-Standard (height+width should be divisible by 16)
Here are some limitations you should consider when encoding for IOS:
http://blog.zencoder.com/2012/01/24/encoding-settings-for-perfect-ipadiphone-video/
Try a standard video and report back.

Similar Messages

  • Redraw issues when compile for Air

    This is about redraw issues which occur in Flash, when I try to compile a file in Air 2.6. The initial files were built in inDesign CS5.5, saved as a FLA, and opened in Flash CS5.5.
    I created a test file in inDesign to be used as an Air App, as I saw discussed and done on several videos on Adobe.tv. The file has 3 pages. The pages are pretty simple. Though there are some  clipping paths and animation.
    Here are the steps:
    1.
    I ported the filed to Flash, as a FLA.
    2.
    I opened the FLA in Flash, and added some more animation.
    I added simple code so the file will go from frame to frame, (what were the pages in inDesisgn), on mouse click in the frame.
    3.
    If I complie it is Flash, it works.
    4.
    If I compile it in Air, which I want to do, when it gets to the last page, it starts to have redraw-issues.
    When  the user clicks on page/frame 3, (the last page), there is AS3 code,  a gotoAndStop(), which normally  takes the user back to frame 1. But in Air, the user gets to page one,  but only sees page 3, thinking they are on page 3, as the redraw of page 1 never occurs. I tested this very  carefully, by rebuilding the whole thing and removing every element and all code and starting over piece by piece to isolate the issue.
    5.
    The  issue seems to be, if I remove all the pagespreads from inDesign, which the FLA transformed into "movieclips" in Flash, it works fine without redraw issues. If I  add any of the pagespread movieclips to a frame in the Flash file, as they were when  imported, the redraw issue occurs, but only when I compile in Air.
    Have  you heard of this before? I was wondering if there might be some  settting in Air that is conflicting with the inDesign created pages which are now movieclips in Flash. I am hoping there is something that I can do to  fix it, as I would like to continue to create the spreads in inDesign.
    I  am working on a PC in Windows Vista. I wondered if this was the issue  as well. I can share the file if you want to see it, compiled in flash,  and compiled in Air or the FLA.
    Thanks very much,
    e

    I fixed it. it was not the code. it was the use of text. This time  when exporting to an FLA from indesign, I  turned the TLF text to pixels, and now it compiles  fine. I isolated the problem by dismantling each item in the indesign  spread, and seeing when the problem occured. once I removed the type, it  had no issues. I was using hanging indents. Maybe that was the issue.  All I know is once I turned off the TLF text option, there are no  problems.It is sad to lose that feature. but at least it works, and I  would rather have my type look well formatted with hanging intents, and  correct kearning, even if i have to rasterize it first.
    geez. that was fun.

  • How to record a time-limited video with Adobe AIR for iOS

    I am trying to record a time-limited video with Adobe AIR for iOS.
    For example, I want to implement the following function. Start a one-minute timer before launching CameraUI to record video. When the timeout event happens after one minute, stop recording video, close the CameraUI view and obtain the video data so far.
      I have several questions related to that.
      1. How to stop recording video from outside the CameraUI view(in this case, from the timeout event handler) and then close the CemeraUI view? As far as I know, to close the CameraUI view, the only way is to press the [Use Video] button or the [Cancel] button from inside the CameraUI view. Is it possible to close it from outside?
      2. Even if the first problem mentioned above is solved, then how can I get the video data so far(in this case, the video data before the timeout). I know that normally we can get a MediaPromise object from MediaEvent parameter of the  complete handler, and read the video data from the MediaPromise object. But obviously in this case, we can not access the MediaPromise object just because the complete handler itself will not be executed since the [Use Video] button is not pressed.
      3. Is it possible to add a stopwatch to show possible remaining recording time when CameraUI view is open? It seems that the CameraUI automatically uses the full screen of iOS device(in my case, iPad) and there is no extra space to show the stopwatch.
      Are there any solutions or workarounds about the three problem above? I really appreciate it if anyone has any idea about this. Thanks in advance.

    You'd have more control by using the Camera object, showing the camera on a video object inside a Sprite, and capturing that. Then you could put whatever graphics alongside it on the stage.. I've used FlashyWrappers in a test to capture the video to the library.  It took some work, but the test worked well...
    Flash/AIR record videos of your apps and games: Rainbow Creatures

  • IOS 7 Issues | Flash Professional update for Air 3.6?

    Is anyone having any issues with iOS 7?
    I am using Flash Professional to publish my apps and have  had a few rejected becuase of:
    1) Microphone request - App requests to use the microphone although it does not use the microphone
    https://bugbase.adobe.com/index.cfm?event=bug&id=3609631
    This is was apparently updated for the Air SDK, but not for Air in Flash Professional?
    "Fixed in the latest AIR SDK, please update:
    http://helpx.adobe.com/en/flash-player/release-note/fp_118_air_38_rele ase_notes.html"
    2) Movie Clip is not displaying in the correct place - I had an app rejected because a movie clip is not displaying in the correct place on the screen:
    "10.1- We found that your app does not comply with the Apple iOS Human Interface Guidelines, as required by the App Store Review Guidelines. After tapping on the info button, the content of the app is displayed on the top left of the screen."
    I used the following simple lines of code but the image is being displayed in the top left of the screen:
    addChildAt(bgAbout,0);
    bgAbout.x=stage.stageWidth/2;
    bgAbout.y=stage.stageHeight/2;
    Is there any ETA on the update iOS?
    Is Adobe aware of this?
    I hope these issues are fixed soon.

    You can use the 3.9 AIR SDK that is available on labs and add this in Flash Pro using Manage AIR SDK.
    Download AIR from:
    http://labs.adobe.com/downloads/air.html
    -Sujai

  • Video Conferencing in Air 3 for Android/IOS

    Hi,
    We are in the process of creating a Video Conferencing application for Android and IOS devices, the app would be published on Android and IOS market.  I have some quires related to using Air 3.0 for the development:
    For Android devices Front facing camera is supported only in Air 3.0, but can we assume Air 3.0 would be officially available by end of this year?
    Would this feature available for all Android 2.2 and above devices?
    Can we use Flash Media Server/any other streaming server for video conferencing?
    I have read in some of the post that Apple store do not allow video streaming with RTMP, is this true? Is there any work-around to use streaming video from within Air app for IOS?
    Thanks in advance!
    Sangram

    I used milkmangames ANE. (50$), because i didnt want to fork the code for each os.
    http://www.milkmangames.com/blog/tools/
    It was "easy" to set up (quotes: as easy as something complicate can be).
    Consumables, or One-time-purchases worked out of the box, but restore, subscriptions and auto subscriptions (like newspapers) had bugs like hell and were unusable.
    Also check out here for some free stuff (wich propably works as well as the comercial, "supported" stuff, stupid-me)...
    https://github.com/freshplanet/ANE-In-App-Purchase
    More ANEs:
    http://sleepydesign.blogspot.de/2012/07/ane-free-air-native-extensions.html

  • Video settings for iPad air 2

    What are the video settings for the iPad air 2 with 128 GB?

    I'm not sure what you mean by the video settings. You can read the iPad Air 2 technical specs here.
    https://www.apple.com/ipad-air-2/specs/

  • How can I make a "Youtube" style video gallery for offline, '.air' use?

    I work for a language company and we've put together a set of videos for use in computers, not online.  Currently we're using Apple's Keynote to present our videos to to the student.  This has the advantage of allowing us to put videos in a sequential order, with absolutely no work in the development department, and most importantly was easy for any student to control and navigate.
    Things have now changed.
    Keynote can no longer hold the size of our videos and we are constantly adding and changing videos all the time.  Now that CS5 is out, I would like to build a stand alone flash application for our system modeled after the same look and functionality of youtube. 
    Key features would be:
    • Main video player that links to our resident video files
    • Something that generates and displays a title for the current video
    • A text box below the main video player for notes and instructions
    • a side bar of thumbnails for all related videos, with titles displayed
    Future features would be an easy way for us to change videos in and out of the library (by "uploading" and "downloading" using a form for the videos, although not to the net, but again, on the resident computer).
    It looks like CS5 would make this possible.  My plan is to make a youtube-like site using flash catalyst.  I think I can do it myself since the first version only needs basic elements and design, but I know I'm going to run into some problems because I'm not sure the limitations of catalyst for this style of interactive video gallery. 
    What I'm looking for is someone to just quickly guide, tell, or yell me in the right direction, and I'm really keen (hell-bent?) on trying it out using Flash Catalyst... is this the way?
    Keeping in mind that my first draft only has to be a simple working version of the system, and I can play with the rest later, is any of what I said possible using flash catalyst?
    Thanks in advance to whoever can help me out!

    I doubt MU can do this.  It's pretty limited in scope &  I don't think it supports server-side code which you would need to parse feeds.  But feel free to post your question in the MU forum.  Maybe somebody there has a workaround.
    http://forums.adobe.com/community/muse/help_with_using_adobe_muse
    Nancy O.

  • OS X 10.8.5 broke Video on MacBook Air

    If you got problem for camera to work at Skype, QQ or Line by OS X10.8.5, below solution may help you to solve this issue,
    http://community.skype.com/t5/Mac/OS-X-10-8-5-broke-Video-on-MacBook-Air/td-p/18 91729/page/4

    Yup so does many people I heard.
    Im just waiting for the update to patch this

  • Integrate native WebRTC audio/video on all AIR platforms including Adobe Media Server

    Hey everybody,
    It is apparent that Adobe is very busy trying to keep up with improving and fixing video and audio bugs on all platforms. Thankfully, in the past month Adobe finally made a H264 video with Nellymoser audio stream work on Androids! That took a while to make that work. But even though that is working, AIR on Androids still cannot transmit H264 video. Also AIR on iOS cannot view live video, but instead video has to be wrapped inside a Apple HLS (HTTP Live Streaming) format which introduces way too much latency for live audio/video streaming. Also AIR cannot transmit AAC Audio, and echo cancellation with Nellymoser just doesnt make the cut.
    Everybody is aware that Flash Player can only stream video/audio smoothly for 1 in 10 users. There is just way too much for Adobe to do to get audio/video to work again and to work for everybody on every device.
    So because WebRTC has much more development effort going into it, and because it is being promoted as free source to try to make the proprietary licensing world to rethink H264 and AAC audio, perhaps Adobe should just focus their efforts on implementing the existing and mostly working WebRTC libraries into Adobe AIR.
    Since these libraries are becoming so popular, Adobe could also integrate support for WebRTC into the Adobe Media Server for recording, peer-to-peer negotiation, and firewall hole punching.
    See my feature request here to integrate native WebRTC audio/video on all platforms
    https://bugbase.adobe.com/index.cfm?event=bug&id=3728399
    So will you vote with me to get WebRTC into Adobe AIR and Adobe Media Server?
    Adobe could essentially deprecate many existing features requests and bug fixes related to audio video and solve many problems with WebRTC such as:
    Implement Opus Codec:
    https://bugbase.adobe.com/index.cfm?event=selectBug&CFGRIDKEY=3016518
    https://bugbase.adobe.com/index.cfm?event=selectBug&CFGRIDKEY=3331640
    https://bugbase.adobe.com/index.cfm?event=selectBug&CFGRIDKEY=3633142
    Fix Enhanced Microphone issues:
    https://bugbase.adobe.com/index.cfm?event=selectBug&CFGRIDKEY=3711062
    https://bugbase.adobe.com/index.cfm?event=selectBug&CFGRIDKEY=3521224
    Add support to stream AAC HE-AAC v2 audio, allowing iOS to be an endpoint that can receive audio and video:
    https://bugbase.adobe.com/index.cfm?event=selectBug&CFGRIDKEY=3694393
    Add support to transmit H264 video from an Android:
    https://bugbase.adobe.com/index.cfm?event=selectBug&CFGRIDKEY=3648262
    Decreased video latency and more performance on Androids regardless of the 32-bit/64-bit ARM Intel processors
    https://bugbase.adobe.com/index.cfm?event=selectBug&CFGRIDKEY=3648268
    Improve video quality and bandwidth:
    HEVC, H265, VP9

    No there has been no update nor comment from Adobe regarding adding support for WebRTC. According to this year's roadmap they are booked! So it appears that we will not be seeing WebRTC in the ActionScript platform this year unfortunately. Back in March I made some predictions as to what might be considered higher priority for Adobe to be working on. It appears that list is still being worked on by the Adobe AIR/Flash team. They still have not finished support for iOS 8, and they still have several months to get hardware accelerated video finished. And VideoTexture is still in beta and will probably be in beta until AIR version 19 I would guess. Then 64-bit AIR runtime will be completed 3rd quarter along with HTML5 improvments for the 4th quarter. It looks like Adobe is completely booked.

  • Final Cut Timeline Feed as video source for iChat?

    I was at the Apple NAB presentation in Vegas a few weeks ago, and one of the things they showcased in regard to iChat AV was the ability for an editor to videoconference with several other people at once and one of the video feeds in the chat window was actually the output from the editor's Final Cut Pro timeline. As he made changes in the FCP timeline, he could play it out in real time and all the people in the chat could see the video feed and make comments on the edit.
    OK- so we were excited because that's a workflow issue we have here. So, we bought some iSight cameras and Tiger and started to do some testing here. The only problem is that it doesn't seem very easy to set up. At first I just thought, "well, they are just playing the timeline out of the firewire port on the FCP machine and another computer is ingesting that firewire stream as a video source for iChat." except when you connect 2 Macs together with a firewire cable, the FCP system doesn't detect a DV video device to output to and the iChat system doesn't detect the incoming firewire stream as a "camera". I spoke to tech support and they said it's not something they support so no luck there.
    I did find an article on Creative Cow about someone doing something similar, but it involved exporting the FCP timeline out through a Kona analog video card into an external tape deck that does analog to DV conversion which then can be imported back into iChat as a DV stream, but last time I checked, a Kona card would run me around 1500 bucks.
    Anyone have any other ideas?

    HI Jeremy,
    Welcome to the Aple Discussion Pages.
    Based on the Auto Responding iChat Accounts that play films I would guess they were either using an AppleScript to import FCP as the Video sourdce or usiing an Add-On like iChatUSBCam that also alows you to change the video source to the desktop.
    iChatUSBcam has a new beta for Tiger http://www.ecamm.com/mac/ichatusbcam/
    Ralph

  • Dual Monitor video cards for HP Pro 3400

    I am looking to add a 2nd monitor to my HP Pro 3400 MT and want to make sure I get a video card that will not have any compatibility issues.  I'm not doing any gaming so I don't think I need anything super fancy and I would prefer not to have to upgrade the power supply.
    So my questions are:
    Is the HP Pro 3400 MT compatible witha PCI Express 3.0 card?  2.1 ?
    How much power draw from the video card would require me to upgrade the power supply (my PC is stock, no added peripherals).  Is there a way for me to determine how much is being currently (no pun intended) drawn?
    Are there any drawbacks to usnig the built-in video card for 1 monitor and a new, single-ouput card for the other (vs a dual-output card) ?
    Anything else I need to consider when deciding on a new video card?
    Thanks.

    Hi,
    You might get better assistance on the HP Enterprise Business Forum since you have a business class PC.
    HP DV9700, t9300, Nvidia 8600, 4GB, Crucial C300 128GB SSD
    HP Photosmart Premium C309G, HP Photosmart 6520
    HP Touchpad, HP Chromebook 11
    Custom i7-4770k,Z-87, 8GB, Vertex 3 SSD, Samsung EVO SSD, Corsair HX650,GTX 760
    Custom i7-4790k,Z-97, 16GB, Vertex 3 SSD, Plextor M.2 SSD, Samsung EVO SSD, Corsair HX650, GTX 660TI
    Windows 7/8 UEFI/Legacy mode, MBR/GPT

  • I do video productions for a small local TV station. I use Final Cut Express to edit. I need a new video camera but am at a loss as to what to buy. I know I will get a digital camera but do not know the difference between just digital and digital HD.

    I do video productions for a small local TV station. I use Final Cut Express to edit. I need a new video camera but am at a loss as to what to buy. I know I will get a digital camera but do not know the difference between just digital and digital HD. Also, I can not afford an expensive camera and need some advice on which of the available cameras would be best and also work well with a Mac. One last issue, I currently use a Panasonic #CCD camera that takes a tape. When I load video to Final Cut the audio and video are out of sinc. Go figure. Can anyone help with these questions. Karen

    Hello Karen,
    If you are using Final Cut Express, then look for camcorders that are AVCHD camcorders.  Look especially for the AVCHD logo and specific mention of AVCHD in the specs.  Most of the major manufacturers produce good quality camdcorders - Canon, Sony, etc.
    Also, be aware that there are a lot of "not quite AVCHD" camcorders on the market (sometimes they say they record video as MPEG4-H.264/AVC) - buyer beware!
    Most everything you are going to find on the market today is HD, which stands for "high-def".
    Regarding your question about the Panasonic camcorder, it's best if you post that as a separate question, and please identify the specific model camcorder and the Easy Setup you are using in FCE.

  • Android Video play via AIR application is jerky

    Hi All,
      We are developing an AIR mobile application targetted for Android 2.2 or above. One of the requirement is to play H.264 videos on the mobile from the Assets folder (local file system). We have run in to a show stopper while trying to implement this feature, here are the details -
    1. We are using OSMF that comes bundled with Flex Builder 4.6
    2. Using Adobe AIR 3.1 and Flex SDK 4.6
    When video of quality H.264 MPEG4 1280*720 30FPS with input bit rate 2000 is being played on the mobile device (android), the video frame rate is choppy however on the desktop it plays out fine... StageVideo is something I would like to try however, at this moment I have no access to Android 3.0 which is required for StageVideo...
    What are we missing? Is AIR 3 not capable to play H.264 videos on android, though the Android native video player plays it out just fine?
    Regards
    Baliga

    Hi,
    Video encoding is very important.
    For example, use baseline profile level 3.1 for H264 for mobile
    (and not High profile level 4.1 recommended for desktop).
    For more information, see the MAX session of Fabio Sonnati:
    "Encoding for Performance on Multiple Devices"
    And if you have an Android phone/tablet, there is my AIR application
    to watch Adobe MAX 2011 videos :
    https://market.android.com/details?id=air.fr.inway.maxVideos2011
    Search "Sonnati" for this session's video.
    (For info, i use a video player based on OSMF 1.6)
    The pdf presentation is available on Sonmati's blog:
    http://sonnati.wordpress.com/
    Philippe

  • Controller/Scrubber for animation with scenes

    Hello,
    I would like to make a player controller with a scrubber for an animation I am creating. I do not want to control a video (mp3, flv, or other). I want to control the animation on the timeline.
    My Flash file has scenes.
    Is there a tutorial for applying a controller for this kind of file?
    Thanks!
    Jerry T

    there's not enough material for a tutorial.  just define your parameters and scrub:
    parametersF(scrubberMC,scrubberMin,1,scrubberMax,_root._totalframes);
    scrubberMC.onPress=function(){
    this.startDrag(false, scrubberMin,this._y,scrubberMax,this._y);  // for a horiz scrubber
    this.onEnterFrame=scrubF;
    scrubberMC.onRelease=function(){
    this.stopDrag();
    _root.play();
    delete this.onEnterFrame;
    function scrubF(){
    _root.gotoAndStop(Math.round(this.m*this._x+this.b));
    function parametersF(mc,x1,y1,x2,y2){
    mc.m=(y1-y2)/(x1-x2);
    mc.b=y1-mc.m*x1;

  • Why is the iPod Touch (4th Gen) missing the video setting for TV Out?

    Has anyone else noticed that in the iPod Touch (4th Gen) that there isn't a video setting for TV Out? It's not on the new iPhone either. I wanted to attach an AV Composite cord from my iPod to my TV. It won't work because of the missing video setting. You would think that since this is new technology, it would have that setting. After all, my iPod from 2005 has that feature and works just fine with the $39 AV Compostie cable from Apple. I was hoping to display what I have on my iPod Touch to my TV, but discovered that I couldn't. When I called Apple, they thought at first I had a defective cable. But later told me that i OS 5 left out the video out feature as if by mistake. Could anyone elaborate on this issue? I would appreciate it. Thanks!!

    I am well aware that setting was on the iPod before iOS 5! My question is why is it missing? Because right now, Apple is selling the AV Composite cable claiming it's compatible with the iPod Touch (4th Gen) and it's not. Thank you!

Maybe you are looking for

  • OS 10.8.5 won't let me open mail on thumb drive

    Lots of people have posted at various times in the discussion groups that they have had problems accessing their mail, getting the message "You can't use this version of the application 'Mail' with this version of OS X.'" I have mail from my old Mac

  • Need help launching WMP sharewere for Mac OSX

    Anyone please help! I downloaded Windows Media Player v 10 for Mac but it won't run withuot Stuffit Expander because it's in .sitx format, but the StiffIt Expander sharewere is also, so I'm utterly confused, wmp won't open and the app that should dec

  • How To Get rid of Exponential format in datagridview when the number is very large

    When the number is very large like :290754232, I got 2.907542E +08. in datagridview cell I using vb.net , framework 2.0. how can I get rid of this format? Thanks in advance

  • "64 bit Win 7 not found on disc or drive"

    Hi I very reacently bought an 27-inch iMac (late 2013), that I want to bootcamp. I have 'bootcamped' macs before, a couple of them, and I am familiar with the process. However... After opening bootcamp assistant and all that, I get a message when I p

  • Can I add new user to drive 2?

    I have two drives on a MacPro. I cannot boot drive two because I have forgotten the password. Can I add a new user for this drive (and boot into this new user)? I am fairly competent with Unix but need explicit instructions. Thanks in advance. Lewis