Skip to content

Instantly share code, notes, and snippets.

@VCone
Last active February 8, 2017 23:38
Show Gist options
  • Save VCone/657054b773dba9ba1cbc to your computer and use it in GitHub Desktop.
Save VCone/657054b773dba9ba1cbc to your computer and use it in GitHub Desktop.
Basic demo of handling seeking with NetStream AppendBytes using AS3 (Flash) - ( by VC:One - vcone.co.uk)
package
{
import flash.display.*; import flash.media.*; import flash.text.*;
import flash.geom.*; import flash.net.*; import flash.system.*;
import flash.events.*; import flash.errors.*; import flash.utils.*;
import flash.system.ApplicationDomain;
/*
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
::::: ABOUT ::::: © 2015 vcone.co.uk
WORKING DEMO: http://vcone.co.uk/labs/video/append_seek_01/index.html
NOTE: This is not a (copy & paste) production-ready code, it's meant for other programmers to study and learn from.
No-one out there on the internet is showing how to do this so I put it up.
TO COMMISSION A PROFESSIONAL/COMPLETE VERSION FOR YOUR PROJECT CONTACT: [email protected]
The code shown works fine but is intended fro FLV with H.264 video and AAC/MP3 audio
The code could be extended to include (tested) :
> MP4 loading and frame extracts
> Real-time preview thumbnails in seek bar
> AAC or MP3 extraction from video file
> Bitmap effects ( real-time or pre-processing on frame-by-frame for export as new file)
> many other options....
/////////////////////////////////////////////////////////////////////////////////////////
::::: INTRO :::::
A Basic Demo of : Handling Seeking with appendBytes ( ActionScript 3.0 )
> Shows how to convert a seek-bar width (pixels) to represent total file bytes
> Shows how to seek within bytes
> Shows how check tag timestamps and search up/down for "nearer" to user-selected time
> Shows how to do frame-by-frame stepping (feed/append one video frame at a time)
main functions are..
::: public function Append_SEEK ( offset : int ) ::: //handles the searching through bytes for a video keyframe
::: public function get_frame_TAG ( v_offset:int, v_size:int ) ::: //handles extracting and appending audio/video tags
*/
//set SWF size & frame rate
[ SWF(width="550", height="400", frameRate="30", backgroundColor="#000000") ]
public class AppendBytes_Seeking_Demo extends MovieClip
{
public var URL_String :String = "";
public var loader_SIZE:URLLoader = new URLLoader();
public var stream:URLStream = new URLStream;
public var videoByteArray : ByteArray = new ByteArray; //Source file bytes
public var temp_BA : ByteArray = new ByteArray;
public var temp_BA_FLV_Header : ByteArray;
public var data_BytesLoaded : int = 0;
public var data_BytesTotal : int = 0;
private var nc:NetConnection; private var ns:NetStream;
public var video:Video = new Video; public var vid_T:Video = new Video;
public var metaListener : Object;
public var got_MetaData : Boolean = false;
public var can_append_first_frame : Boolean = false;
public var frame_data_available : Boolean = false;
public var await_first_frame : Boolean = true;
public var need_totalBytes:Boolean = true;
public var check_FLV_Header : Boolean = true;
public var data_isFLV : Boolean = false;
public var meta_is_Processed:Boolean = false;
public var temp_Num:Number = 0; public var temp_UInt:uint = 0; public var temp_Int:int = -1;
public var temp_Int_1:int = -1; public var temp_Int_2:int = -1;
public var temp_Int_3:int = -1; public var temp_Int_4:int = -1;
public var temp_String : String = "";
public var size_MEDIA_CONFIG : int = 0; //means all data up to first ACTUAL picture/audio frame
public var progress_Count : int = 0;
public var first_frame_offset : int = -1;
public var first_frame_size : int = -1;
public var current_offset_idx : int = 0;
public var xPos:int = -1; //pos within bytes
public var TAG_timestamp : int = 0; public var last_Timestamp : int = 0;
public var TAG_offset : int = -1;
public var TAG_size : int = -1;
public var TAG_type : int = -1;
public var size_SeekBar : int = 0; //
public var seek_Width : int = 0;
public var seek_bytesOffset : int = 0;
public var got_SEEK_KEYFRAME : Boolean = false;
public var got_TAG_Data : Boolean = false;
public var timer_Append : Timer;
public var timer_FirstFrame : Timer;
public var FLV_framerate : Number = 0;
public var FLV_duration : Number = 0;
public var vid_canvas: Sprite = new Sprite();
public var mc_Progress: MovieClip = new MovieClip();
public var fill_progress: Sprite = new Sprite();
public var mc_btn_loop : MovieClip = new MovieClip();
public var has_MPEG_AUDIO:Boolean=false, has_MPEG_VIDEO:Boolean=false, got_MPEG_CONFIG_Bytes:Boolean=false;
public var need_PICTURE_AUDIO:Boolean=false, got_PICTURE_AUDIO:Boolean=false;
public var mode_PlayBack : String = "";
public var vid_is_Playing:Boolean=false, from_Seeking:Boolean=false, vidPause_state:uint=1; //initialy 1 == true before any playback
public var prog_Dot:Shape = new Shape; // initializing the variable named rectangle
public var buffer_lastKeyframePos:int=0, buffer_currentFramePos:int=0, time_Elapsed:Number=0;
public var _stageW:uint=0, _stageH:uint=0, _videoW:uint=0, _videoH:uint=0;
public var _aspectH:uint=0, Aspect_num:Number=0;
public var aspect_F_W:uint=0, aspect_F_H:uint=0, aspect_C_W:uint=0, aspect_C_H:uint=0;
public var data_Width:int=0, data_Height:int=0, stage_state_int:uint=0;
public var got_seeking_TAG_start:Boolean=false;
public var has_VIDEO:Boolean=false, has_AUDIO:Boolean=false;
public var nowSecs:Number=0, totalSecs:Number=0, displayHours:Boolean = true;
public var last_decodedFrames:int=0, current_decodedFrames:int=0;
public var timeNUM:Number=0;
public var txt_BufferLen : TextField = new TextField();
public var txt_keyShortcuts : TextField = new TextField();
public var txt_ByteSelected : TextField = new TextField();
public var txt_BytesTAG : TextField = new TextField();
public var txt_TimeElapsed : TextField = new TextField();
public var txt_TimeSelected : TextField = new TextField();
public var txt_KeytimeSelected : TextField = new TextField();
public var txt_NStimeSelected : TextField = new TextField();
public var txt_fmt_black:TextFormat = new TextFormat();
public var txt_fmt_white:TextFormat = new TextFormat();
public var temp_Count:int=0, MPEG_CONFIG_count:int=0, MPEG_CONFIG_expected:int=0;
public var in_PLAYING_mode : Boolean = false;
public var NS_timeOffset:int = 0;
public var btn_Browse : Sprite = new Sprite();
public var btn_Play : Sprite = new Sprite();
public var btn_Step : Sprite = new Sprite();
public var btn_FS : Sprite = new Sprite();
public var txt_btn_FS : TextField = new TextField();
public var txt_btn_Browse : TextField = new TextField();
public var txt_btn_Play : TextField = new TextField();
public var txt_btn_Step : TextField = new TextField();
public var fileRef : FileReference; // = new FileReference;
public var rect_Fill_BG : Shape;
public var _mYpos:int=0, _mXpos:int=0, _mXwidth:int=0, _mXtime:int = 0;
public var tempNum:Number, minutes:String, seconds:String, hours:String, currentTimeConverted:String;
///////////////////////////////////////////////////////////////
////// Main Program Code
///////////////////////////////////////////////////////////////
public function AppendBytes_Seeking_Demo()
{
// constructor code
if (stage) { Append_Init(); }
else { addEventListener( Event.ADDED_TO_STAGE, Append_Init ); }
}
public function do_Resets() : void
{
//resets for each new file browse/load
//# if timer is running..
if ( timer_Append != null && timer_Append.running == true) { timer_Append.stop(); txt_btn_Play.text = "PLAY"; }
//# clear the byte arrays for this new file data
videoByteArray.clear(); temp_BA.clear();
//# reset the "-1" vars
first_frame_offset = first_frame_size = -1;
//# reset the "0" vars
//if ( data_BytesTotal > 0 ) { data_BytesTotal = data_BytesTotal }
//data_BytesLoaded =
last_decodedFrames = current_decodedFrames = 0;
temp_Num = temp_UInt = temp_Count = MPEG_CONFIG_count = 0;
size_MEDIA_CONFIG = xPos = current_offset_idx = NS_timeOffset = 0;
//# reset the "1" vars
vidPause_state = 1; //assume initially paused
//# reset the "true" vars
check_FLV_Header = need_totalBytes = await_first_frame = true;
//# reset the "false" vars
frame_data_available = meta_is_Processed = got_TAG_Data = vid_is_Playing = false;
can_append_first_frame = false;
size_SeekBar = 530; //the pixel width of seek bar (for calculations)
/*
if ( timer_FirstFrame.running == true )
{
timer_FirstFrame.removeEventListener( TimerEvent.TIMER, append_Timer_firstFrame_Handler ); timer_FirstFrame.stop();
timer_FirstFrame.addEventListener( TimerEvent.TIMER, append_Timer_firstFrame_Handler ); //timer_first_Frame_check
}
*/
//timer_FirstFrame.start(); trace( " timer_FirstFrame was started... " );
}
public function Append_Init() : void
{
//inital function do whatever
do_Resets(); setup_Video(); setup_UI();
removeEventListener(Event.ADDED_TO_STAGE, init);
stage.align = StageAlign.TOP_LEFT;
stage.scaleMode = StageScaleMode.EXACT_FIT;
//# load a starting video file
//URL_String = "https://vcone.co.uk/labs/_assets/Star_Wars_7_trailer2_MP3_720.flv";
URL_String = ""; //or use this instead if you dont want an initial starting video
//# Get file size via URL Loader
loader_SIZE.addEventListener(ProgressEvent.PROGRESS, Handle_Size_Progress);
loader_SIZE.addEventListener( IOErrorEvent.IO_ERROR, onIOErrorHandler );
//# Get Video File Bytes... (URLStream)
stream.addEventListener( ProgressEvent.PROGRESS, Handle_Stream_Progress );
stream.addEventListener( Event.COMPLETE, Handle_Stream_Complete );
stream.addEventListener( IOErrorEvent.IO_ERROR, onIOErrorHandler );
//URL_String = URL_String.replace(/ /g, ""); //remove spaces
//trace( "URL_String : " + URL_String );
//if ( URL_String.length > 2 ) { stream.load( new URLRequest( URL_String ) ); }
//if already have one open and playing
if ( stream.connected == true ) { stream.close(); }
stream = new URLStream();
stream.addEventListener( ProgressEvent.PROGRESS, Handle_Stream_Progress );
stream.addEventListener( Event.COMPLETE, Handle_Stream_Complete );
stream.addEventListener( IOErrorEvent.IO_ERROR, onIOErrorHandler );
stream.load( new URLRequest( URL_String ) );
//timer_FirstFrame = new Timer( 333 ); //check 3 times per second if first frame bytes available
}
public function Handle_Size_Progress(e:ProgressEvent):void
{
if( e.bytesTotal > 0 )
{ data_BytesTotal = 4 + e.bytesTotal; need_totalBytes = false; }
if (data_BytesTotal > 0 )
{
try //# clean up
{
loader_SIZE.removeEventListener(ProgressEvent.PROGRESS, Handle_Size_Progress);
loader_SIZE.close(); loader_SIZE = null; need_totalBytes = false;
}
catch(e:Error)
{ trace("URLloader: An error occurred ::: " + e.toString()); }
}
}
public function onIOErrorHandler(evt:IOErrorEvent):void
{
//continue();
}
public function Handle_Stream_Complete (evt:Event) : void
{
txt_BytesTAG.text = URL_String;
if ( data_BytesLoaded > data_BytesTotal )
{
data_BytesTotal = data_BytesLoaded; need_totalBytes = false;
seek_Width = fill_progress.width = ( ( data_BytesLoaded / data_BytesTotal ) * size_SeekBar );
}
}
//# for ONLINE loading (for LOCAL loading see.. ::: function Handle_FileRef_Stream_Complete :::
public function Handle_Stream_Progress (evt:Event) : void
{
//update a count of how many times Progress Event was fired...
progress_Count += 1; data_BytesLoaded += (evt.target.bytesAvailable);
//get file total bytes size
//if ( need_totalBytes == true && data_BytesTotal > 0 ) { need_totalBytes = false; }
//# Update progress bar = size_SeekBar (as maximum width)
if ( data_BytesLoaded > 0 ) { seek_Width = fill_progress.width = ( ( data_BytesLoaded / data_BytesTotal ) * size_SeekBar ); }
//# Copy incoming data as it streams past the bytes reader
if ( stream.bytesAvailable > 0 ) //!= 0
{
//# For each firing of Progress Event this reads maximum 65536 bytes INTO videoByteArray...
evt.target.readBytes(videoByteArray, videoByteArray.length, evt.target.bytesAvailable);
if ( check_FLV_Header == true ) //this will be true at first fire of Progress Event
{
temp_BA.clear();
//# get front 1000 bytes //out of 65535 from the first progressEvent firing
temp_BA.writeBytes(videoByteArray, 0, 1000); //higher num gives bigger search area but slower
//# convert bytes into a string ( for quick search via String functions)
temp_String = bytes_toString ( temp_BA );
//# check for "F-L-V" ( first 3 bytes == 0x46 0x4C 0x56 )
if ( temp_String.indexOf("464C56") != -1 ) // -1 means does not exist
{
//# Is valid FLV file so extract FLV Header and FLV MetaData
trace ("File Format is FLV... ");
data_isFLV = true; check_FLV_Header = false; //set false to avoid later multiple checks
META_extract_from_FLV(); //begin extracting FLV Metadata for first Append
} //END "if temp_String IS NOT -1" check
} //END check FLV header bytes == true..
} //END if ( stream.bytesAvailable > 0 )
//# Check if we have enough bytes to display first frame
if ( await_first_frame == true ) //remains true until bytes-loaded is bigger than expected total for first frame
{
if ( data_BytesLoaded > ( size_MEDIA_CONFIG + TAG_offset + TAG_size ) )
//if true then "function Append_PLAY" will respond next (timer event) & append first frame
{ can_append_first_frame = true; await_first_frame = false; }
} //END if ( await_first_frame == true )
} //end Handle_Stream_Progress function
public function META_extract_from_FLV() : void
{
//# add meta bytes to temp_BA's FLV header bytes
trace("DOING ::: function META_extract_from_FLV() ");
temp_BA.clear(); xPos = 13; //usual start pos of metadata in FLV format
TAG_size = get_TAG_size ( videoByteArray, xPos );
trace ("META :: TAG_size : " + TAG_size );
xPos = size_MEDIA_CONFIG = ( TAG_size + 13); //set all to same amount
//# if MPEG Video then also get MPEG Decoder Config for Audio/Video
if ( bytes_toInt(videoByteArray[xPos]) == 0x09 ) //Video tag
{
//MPEG Decoder Config for Video
if ( bytes_toInt(videoByteArray[xPos+11]) == 0x17 && bytes_toInt(videoByteArray[xPos+12]) == 0x00 )
{
trace ("@@ AVCDECODER CONFIG :: got MPEG video decoder config");
TAG_size = get_TAG_size ( videoByteArray, xPos );
size_MEDIA_CONFIG += TAG_size;
has_MPEG_VIDEO = true; has_VIDEO = true;
}
xPos = size_MEDIA_CONFIG; //increment for audio tag if available
//trace ("xPos check :::: " + xPos);
}
if ( bytes_toInt(videoByteArray[xPos]) == 0x08 ) //Audio tag
{
//MPEG Decoder Config for Audio
if ( bytes_toInt(videoByteArray[xPos+11]) == 0xAF || bytes_toInt(videoByteArray[xPos+11]) == 0x2F )
{
trace ("@@ AVCDECODER CONFIG :: got MPEG audio decoder config");
TAG_size = get_TAG_size ( videoByteArray, xPos );
size_MEDIA_CONFIG += TAG_size;
has_MPEG_AUDIO = true; has_AUDIO = true;
}
xPos = size_MEDIA_CONFIG;
}
trace("META EXTRACT ::: size_MEDIA_CONFIG : " + size_MEDIA_CONFIG );
TAG_offset = size_MEDIA_CONFIG;
//# To include sound tags along with video tag frame extractions
need_PICTURE_AUDIO = true;
//# append META DATA & MPEG CONFIG BYTES
temp_BA.writeBytes(videoByteArray, 0, size_MEDIA_CONFIG );
//# Append metadata
ns.play(null); ns.appendBytesAction( NetStreamAppendBytesAction.RESET_BEGIN );
ns.appendBytes(temp_BA);
}
public function append_Timer_firstFrame_Handler (e:TimerEvent) : void
{
if ( size_MEDIA_CONFIG > 0 )
{
if ( data_BytesLoaded >= ( size_MEDIA_CONFIG + TAG_offset + TAG_size ) )
//if true then "function Append_PLAY" will respond next (timer event) & append first frame
{ can_append_first_frame = true; await_first_frame = false; }
}
//# add first Frame
if ( can_append_first_frame == true)
{
//# stop Timer checking this condition
stage.removeEventListener( TimerEvent.TIMER, append_Timer_firstFrame_Handler );
trace ( "Ready to Append First Frame... " );
//update position according to size of Metadata
xPos = TAG_offset = size_MEDIA_CONFIG;
// get size of tag (from offset of xPos) for first frame
TAG_size = get_TAG_size ( videoByteArray, xPos );
get_First_Frame ( TAG_offset, TAG_size ); ns.appendBytesAction("endSequence");
can_append_first_frame = false; timer_FirstFrame.stop();
}
}
public function handler_Mouse_Seek (evt:Event) : void
{
if( evt.type == "click" )
{
if (timer_Append.running == true) { timer_Append.stop(); }
ns.seek(0); ns.pause();
vidPause_state = 1; ns.appendBytesAction( NetStreamAppendBytesAction.RESET_SEEK );
from_Seeking = true; got_SEEK_KEYFRAME = false; //reset for search
mode_PlayBack = "PLAY"; //trace( "Got a click on a seek bar..." );
Append_SEEK ( seek_bytesOffset );
vidPause_state = 0; //auto play after seeking
}
}
public function Append_SEEK ( offset : int ) : void
{
xPos = current_offset_idx = offset;
//# reset for this function run
from_Seeking = got_seeking_TAG_start = got_SEEK_KEYFRAME = false;
temp_BA.clear();
//# search through bytes using While loop
while ( got_SEEK_KEYFRAME == false )
{
if ( got_SEEK_KEYFRAME == true )
{
trace( "> v1.. got_SEEK_KEYFRAME == true ::: BREAKING " );
//xPos -= TAG_size;
break;
}
//We want video frame image but if we find audio tag we can do fast skip of bytes
//use audio tag size to skip instead of reading every single byte
//# skip Audio tags
if ( bytes_toInt(videoByteArray[xPos]) == 0x08 )
{
//audio ::: 0x2F = MP3, 0xAF = AAC :::
if ( bytes_toInt(videoByteArray[xPos+11]) == 0xAF || bytes_toInt(videoByteArray[xPos+11]) == 0x2F )
{
//# if MP3
if ( bytes_toInt(videoByteArray[xPos+11]) == 0x2F )
{
if ( bytes_toInt(videoByteArray[xPos+12]) == 0xFF && bytes_toInt(videoByteArray[xPos+13]) == 0xFB )
{
TAG_size = get_TAG_size ( videoByteArray, xPos ); got_seeking_TAG_start = true;
//# do other stuff here with audio tag...
//example: can grab MP3 audio to a separate byteArray
}
}
//# if AAC
if ( bytes_toInt(videoByteArray[xPos+11]) == 0xAF )
{
if ( bytes_toInt(videoByteArray[xPos+12]) == 0x01 && bytes_toInt(videoByteArray[xPos+13]) == 0x21 )
{
TAG_size = get_TAG_size ( videoByteArray, xPos ); got_seeking_TAG_start = true;
//# do other stuff here with audio tag...
//example: can grab AAC audio to a separate byteArray
}
}
}
}
//# skip P-Frame Video tags
if ( bytes_toInt(videoByteArray[xPos]) == 0x09 )
{
//check for codec type here too
if ( bytes_toInt(videoByteArray[xPos+11]) == 0x17 || bytes_toInt(videoByteArray[xPos+11]) == 0x27 )
{
//your code
}
//# if p-frame candidate
if ( bytes_toInt(videoByteArray[xPos+11]) == 0x27 && bytes_toInt(videoByteArray[xPos+12]) == 0x01 )
{ TAG_size = get_TAG_size ( videoByteArray, xPos ); got_seeking_TAG_start = true; }
if ( bytes_toInt(videoByteArray[xPos+11]) == 0x17 && bytes_toInt(videoByteArray[xPos+12]) == 0x01 )
{
//trace ( "Got \"09\" I-Frame TAG Byte at : " + xPos );
//# get tag time-stamp in milliseconds
TAG_timestamp = get_TAG_timestamp ( videoByteArray, xPos );
NS_timeOffset = ( TAG_timestamp / 1000 );
txt_KeytimeSelected.text = "Keyframe Time : " + videoTimeConvert( NS_timeOffset );
//# Search for NEAR-er timestamp
temp_Num = 0;
if ( NS_timeOffset > timeNUM )
{
// use label "backSeek" for this While loop
backSeek: while ( true )
{
//# read prev 4 bytes ( Previous TAGsize )
TAG_size = bytes_toInt( videoByteArray[xPos-4], videoByteArray[xPos-3], videoByteArray[xPos-2], videoByteArray[xPos-1] );
TAG_size += 4; //account for 4 bytes of Previous TAGsize
//# move back by size to start of previous tag
xPos -= TAG_size;
//# check timestamps
temp_Num = get_TAG_timestamp ( videoByteArray, xPos );
temp_Num /= 1000;
//# check type
if ( bytes_toInt(videoByteArray[xPos]) == 0x09 &&
bytes_toInt(videoByteArray[xPos+11]) == 0x17 && bytes_toInt(videoByteArray[xPos+12]) == 0x01
)
{
/*
trace ("### Found backSeek KEYFRAME at : " + xPos );
trace ("### AA expected timestamp : " + videoTimeConvert( timeNUM ) );
trace ("### AA found timestamp : " + videoTimeConvert( temp_Num ) );
*/
TAG_timestamp = get_TAG_timestamp ( videoByteArray, xPos );
last_Timestamp = TAG_timestamp;
NS_timeOffset = time_Elapsed = ( TAG_timestamp / 1000 );
txt_KeytimeSelected.text = "Keyframe Time : " + videoTimeConvert( NS_timeOffset );
}
if ( bytes_toInt(videoByteArray[xPos]) == 0x09 &&
bytes_toInt(videoByteArray[xPos+11]) == 0x17 && bytes_toInt(videoByteArray[xPos+12]) == 0x01
) // handle if keyframe bytes
{
if ( temp_Num <= timeNUM )
{
TAG_timestamp = TAG_timestamp; //trace ("### Found backSeek nearest timestamp : BREAKING ::: backSeek .... ");
break backSeek;
}
}
} //end "backSeek" While loop
} //end IF keyframe time larger than selected time
///// End search
TAG_offset = current_offset_idx = xPos;
has_MPEG_VIDEO = true; has_VIDEO = true;
got_SEEK_KEYFRAME = true;
from_Seeking = true;
break;
}
}
if ( got_seeking_TAG_start == true && got_SEEK_KEYFRAME == false )
{ xPos += TAG_size; }
else
{ xPos++; }
//# avoid END OF FILE error
if ( xPos >= ( data_BytesTotal - 10 ) )
{
stage.addEventListener(Event.ENTER_FRAME, handler_enterFrame );
got_SEEK_KEYFRAME = got_seeking_TAG_start = false;
break;
}
} // end while loop
if ( got_SEEK_KEYFRAME == true )
{
//# reset for new keyframe data
temp_BA.clear(); got_SEEK_KEYFRAME = false; from_Seeking = true;
TAG_offset = xPos; TAG_size = get_TAG_size ( videoByteArray, xPos );
if ( first_frame_offset <= 0 )
{ first_frame_offset = TAG_offset; first_frame_size = TAG_size; }
else
{
if ( has_VIDEO == true) //has video keyframe
{ temp_BA.writeBytes( videoByteArray, TAG_offset, TAG_size ); }
else if ( has_AUDIO == true && has_VIDEO == false) //audio only file
{ temp_BA.writeBytes(videoByteArray, TAG_offset, TAG_size); }
}
//# clear the buffer of any "ahead seconds" decoded frames
ns.seek(0); ns.appendBytesAction( NetStreamAppendBytesAction.RESET_SEEK );
//# update picture immediately
ns.appendBytes( temp_BA ); ns.appendBytesAction( NetStreamAppendBytesAction.END_SEQUENCE );
ns.appendBytes( temp_BA ); ns.resume();
if ( mode_PlayBack == "PLAY" ) { timer_Append.start(); }
//if ( from_Seeking == true ) //append_seek
if ( got_SEEK_KEYFRAME == true ) { got_SEEK_KEYFRAME = false; }
//# Updates for next frame
TAG_offset = ( xPos + TAG_size );
xPos = TAG_offset; TAG_size = get_TAG_size ( videoByteArray, xPos );
got_SEEK_KEYFRAME = false;
} //end while
stage.addEventListener(Event.ENTER_FRAME, handler_enterFrame );
from_Seeking = true; Append_PLAY();
}
public function handler_enterFrame (evt:Event) : void
{
//# SEEK BAR LOGIC
nowSecs = Math.ceil( ( NS_timeOffset + ns.time) / 1000 );
totalSecs = FLV_duration; //could be set at once in metadata instead of here
_mYpos = fill_progress.mouseY;
_mXwidth = ( size_SeekBar );
//#for seek offset
_mXpos = int ( ( fill_progress.mouseX ) * data_BytesTotal ); //works
seek_bytesOffset = _mXpos; //set as seek offset
//#for mousePos to time
_mXtime = ( ( rect_Fill_BG.mouseX ) ); //works??
timeNUM = (totalSecs / size_SeekBar ) * _mXtime;
if ( seek_bytesOffset > data_BytesTotal ) { seek_bytesOffset = data_BytesTotal; }
//# if Mouse is moved within seek bar area ( to fix properly )
if ( _mXpos <= fill_progress.width && _mYpos <= fill_progress.height && _mYpos >= fill_progress.y )
{ txt_TimeSelected.text = "Time Selected : " + videoTimeConvert( timeNUM ); }
else
{ if ( _mYpos < fill_progress.y || _mYpos > fill_progress.height ) { /* do if needed */ } }
//# text updates
txt_BufferLen.text = "Buffer Length (secs) : " + String( ns.bufferLength ) ;
txt_TimeSelected.text = "Time Selected : " + videoTimeConvert( timeNUM );
txt_BytesTAG.text = "TAG Bytes Offset (decoded) : " + String( buffer_currentFramePos );
txt_NStimeSelected.text = "Frame Time (calculated) : " + videoTimeConvert( NS_timeOffset + ns.time);
txt_TimeElapsed.text = "TAG TimeStamp (decoded) : " + videoTimeConvert( TAG_timestamp / 1000 );
txt_ByteSelected.text = "Selected Bytes Offset : " + String( seek_bytesOffset );
//# progress dot (white dot)
prog_Dot.x = ( ( NS_timeOffset + ns.time ) * size_SeekBar ) / totalSecs;
if ( _mXpos >= fill_progress.x && _mXpos <= (fill_progress.width * size_SeekBar ) )
{
if ( _mYpos >= fill_progress.y && _mYpos <= ( fill_progress.y + fill_progress.height) )
{
}
}
if ( _mXpos <= fill_progress.width && _mYpos <= fill_progress.height && _mYpos >= fill_progress.y )
{
}
else
{
if ( _mYpos < fill_progress.y || _mYpos > fill_progress.height )
{
}
}
}
public function get_First_Frame ( v_offset:int, v_size:int ) : void
{
TAG_offset = first_frame_offset = current_offset_idx = v_offset;
TAG_size = first_frame_size = v_size;
//# if within bytes range for frame data
if( ( current_offset_idx + TAG_size ) <= videoByteArray.length ) { frame_data_available = true; }
else { frame_data_available = false; }
//# if available then extract & append frame
if ( frame_data_available == true)
{
//Get first frame here
temp_BA.clear(); ns.resume();
temp_BA.writeBytes(videoByteArray, TAG_offset, TAG_size);
ns.seek(0); ns.appendBytesAction(NetStreamAppendBytesAction.RESET_SEEK);
ns.appendBytes(temp_BA); ns.appendBytesAction(NetStreamAppendBytesAction.END_SEQUENCE);
//ns.appendBytes(temp_BA);
temp_BA.writeBytes(videoByteArray, TAG_offset, TAG_size);
ns.appendBytes(temp_BA);
//# update next TAG pos + size
TAG_offset = ( TAG_offset + TAG_size ); xPos = TAG_offset + 1;
TAG_size = get_TAG_size ( videoByteArray, xPos );
TAG_type = bytes_toInt( videoByteArray[xPos-1] );
TAG_timestamp = get_TAG_timestamp ( videoByteArray, xPos );
/*
trace ("expected next TAG_offset :::: (after FIRST FRAME) : " + TAG_offset );
trace ("expected next TAG_size :::: (after FIRST FRAME) : " + TAG_size );
trace ("expected next TAG_type :::: (after FIRST FRAME) : " + TAG_type );
*/
//# Pause at first frame (ready for play or frame steps)
ns.appendBytesAction(NetStreamAppendBytesAction.END_SEQUENCE);
vidPause_state = 1; vid_is_Playing = false;
} // end if ( frame_data_available == true)
//# add listeners for mouse
stage.addEventListener(KeyboardEvent.KEY_DOWN, handler_UI_Keys);
btn_Play.addEventListener(MouseEvent.CLICK, handler_UI_Mouse );
btn_Step.addEventListener(MouseEvent.CLICK, handler_UI_Mouse );
}
function videoTimeConvert(inputNum):String
{
tempNum = inputNum;
minutes = String ( Math.floor( tempNum / 60 ) );
if (displayHours == true) { hours = String( Math.floor( int(minutes) / 60) ); }
seconds = String ( Math.round( tempNum - ( int(minutes) * 60) ) );
if ( int(seconds) < 10 ) { seconds = "0" + seconds; }
if ( int(minutes) < 10 ) { minutes = "0" + minutes; }
if (displayHours == true) { if ( int(hours) < 10) { hours = "0" + hours; } }
currentTimeConverted = hours + ":" + minutes + ":" + seconds;
return currentTimeConverted;
}
private function Append_PLAY ( evt:Event = null ) : void
{
if ( vidPause_state == 1 ) { txt_btn_Play.text = "PLAY"; }
else { txt_btn_Play.text = "PAUSE" }
if ( timer_Append != null && timer_Append.running == true ) { timer_Append.stop(); }
ns.resume(); vid_is_Playing = true;
get_frame_TAG (TAG_offset, TAG_size );
timer_Append.start();
}
private function append_STEP ( evt:Event = null ) : void
{
timer_Append.stop();
//# If mode is not set to "PLAY" then assume we want to frame "STEP"
if ( mode_PlayBack != "PLAY" )
{ mode_PlayBack = "STEP"; }
if ( from_Seeking == true )
{ from_Seeking = false; }
get_frame_TAG (TAG_offset, TAG_size); //extract new frame
}
public function get_TAG_size ( input_BA :ByteArray, at_xPos : int ) : int
{
temp_Int = 0;
temp_Int = bytes_toInt( input_BA[at_xPos+1], input_BA[at_xPos+2], input_BA[at_xPos+3] );
temp_Int += 15;
return temp_Int;
}
public function get_TAG_timestamp ( input_BA :ByteArray, at_xPos : int ) : int
{
if ( last_Timestamp <= 16777215 )
{
temp_Int = bytes_toInt( input_BA[at_xPos+4], input_BA[at_xPos+5], input_BA[at_xPos+6] );
}
else
{
//# NOTE : This "ELSE" condition code is untested //////////////////
//# one of these methods is the correct way but it is untested......
//method A: [xD] [xA] [xB] [xC] ? //account for fourth byte xDD as being put first [DD AA BB CC]
temp_Int = bytes_toInt( input_BA[at_xPos+7], input_BA[at_xPos+4], input_BA[at_xPos+5], input_BA[at_xPos+6] );
//method B: [xA] [xB] [xC] [xD] ? //account for bytes in exact shown order? [AA BB CC DD]
//temp_Int = bytes_toInt( input_BA[at_xPos+4], input_BA[at_xPos+5], input_BA[at_xPos+6], input_BA[at_xPos+7] );
}
return temp_Int;
}
public function get_frame_TAG ( v_offset:int, v_size:int ) : void
{
stage.removeEventListener(KeyboardEvent.KEY_DOWN, handler_UI_Keys);
btn_Play.removeEventListener(MouseEvent.CLICK, handler_UI_Mouse );
btn_Step.removeEventListener(MouseEvent.CLICK, handler_UI_Mouse );
if ( from_Seeking == true )
{
//do whatever //no end_seq
from_Seeking = false;
}
temp_BA.clear(); got_TAG_Data = false; //reset for new A/V frames to get
temp_UInt = temp_Count = 0; MPEG_CONFIG_count = 0;
xPos = current_offset_idx = v_offset;
//# Check if "step mode then skip audio bytes else get all
if ( mode_PlayBack == "STEP" ) { need_PICTURE_AUDIO = false; /* # skip audio bytes to next video tag */ }
else if ( mode_PlayBack == "PLAY" ) { if ( has_AUDIO == true ) { need_PICTURE_AUDIO = true; } /* # include audio bytes */ }
if ( in_PLAYING_mode == true ) { if ( has_AUDIO == true ) { need_PICTURE_AUDIO = true; } /* force test */ }
//# extract tag data
while ( got_TAG_Data == false )
{
if( ( (current_offset_idx + v_size) ) > videoByteArray.length )
{
trace ("NOT ENOUGH DATA... breaking :::: ");
break;
}
//# AUDIO Tag handler
if ( bytes_toInt(videoByteArray[xPos]) == 0x08 )
{
// Extract INT Value from 11 bytes ahead
temp_UInt = bytes_toInt( videoByteArray[xPos+11] );
// Check if Value is xAF (AAC) or x2F (MP3) or x3F (PCM)
if ( temp_UInt == 0xAF || temp_UInt == 0x2F || temp_UInt == 0x3F )
{
//# get tag size from "videoByteArray" starting at tag "xPos"
TAG_size = get_TAG_size ( videoByteArray, xPos );
//# handle time
TAG_timestamp = get_TAG_timestamp ( videoByteArray, xPos );
last_Timestamp = TAG_timestamp; //new update
time_Elapsed = (TAG_timestamp / 1000.0);
if ( need_PICTURE_AUDIO == true )
{
temp_Count++; temp_BA.position = temp_BA.length;
temp_BA.writeBytes( videoByteArray, xPos, (TAG_size) );
}//end IF need_PICTURE_AUDIO check
}//end IF Temp_Uint Format check
//# Skip using Audio tag size
xPos += TAG_size;
}
//# VIDEO Tag handler
if ( bytes_toInt(videoByteArray[xPos]) == 0x09 )
{
if ( has_MPEG_VIDEO == true ) //# for MPEG Video
{
if ( bytes_toInt(videoByteArray[xPos+11]) == 0x17 && bytes_toInt(videoByteArray[xPos+12]) == 0x02 )
{
//### MPEG End-Frame tag code here... ###//
timer_Append.stop(); //no need to try extract any more
//# get tag size from "videoByteArray" starting at tag "xPos"
TAG_size = get_TAG_size ( videoByteArray, xPos );
//# get tag time-stamp in milliseconds
TAG_timestamp = get_TAG_timestamp ( videoByteArray, xPos );
last_Timestamp = TAG_timestamp; //update last known as this new one
//# since is last tag in file we can safely clear any old data held by temp_BA
temp_BA.clear(); temp_BA.writeBytes( videoByteArray, xPos, TAG_size );
ns.appendBytes( temp_BA ); ns.appendBytesAction(NetStreamAppendBytesAction.END_SEQUENCE);
//update to first frame pos in case of replay
xPos = TAG_offset = first_frame_offset; TAG_size = first_frame_size;
}
//if the 11th byte is either 0x17 OR 0x27 ( ie: potential MPEG video tag )
if ( bytes_toInt(videoByteArray[xPos+11]) == 0x17 || bytes_toInt(videoByteArray[xPos+11]) == 0x27 )
{
//# if the 12th byte is 0x01... this confirms it's an MPEG video tag...
if ( bytes_toInt(videoByteArray[xPos+12]) == 0x01 )
{
//# and if the 11th byte is 0x17... This is a Video Keyframe tag
if ( bytes_toInt(videoByteArray[xPos+11]) == 0x17 )
{
//# Mark this keyframe position (if you need it)
buffer_lastKeyframePos = xPos;
if ( ns.bufferLength > 0.002 )
{
}
} //end IF this tag specifically has a video keyframe...
//### handle rest of tag data regardless of status Keyframe or P-frame ###//
buffer_currentFramePos = xPos;
//# get tag size from "videoByteArray" starting at tag "xPos"
TAG_size = get_TAG_size ( videoByteArray, xPos );
//# get tag time-stamp in milliseconds
TAG_timestamp = get_TAG_timestamp ( videoByteArray, xPos );
last_Timestamp = TAG_timestamp; //update last known as this new one
time_Elapsed = (TAG_timestamp / 1000.0);
temp_BA.position = temp_BA.length; temp_BA.writeBytes( videoByteArray, xPos, TAG_size );
TAG_offset = xPos + TAG_size; //increment for next time
got_TAG_Data = true;
break;
}
}
}
else if ( has_MPEG_AUDIO == true && has_MPEG_VIDEO == false ) // FLV with AAC/MP3 only
{
//# handle this later like a boss...
}
}
//# avoid END OF FILE error
if ( xPos >= (videoByteArray.length-10) )
{
trace( "reached end of bytes for FRAME TAG search :: BREAKING " );
trace( "selected xPos : " + xPos );
trace( "videoByteArray.length : " + videoByteArray.length );
got_SEEK_KEYFRAME = false;
break
}
}//end while got_TAG_Data == false
temp_BA.position = 0; ns.appendBytes( temp_BA );
temp_BA.clear(); from_Seeking = false; //reset
stage.addEventListener(KeyboardEvent.KEY_DOWN, handler_UI_Keys);
btn_Play.addEventListener(MouseEvent.CLICK, handler_UI_Mouse );
btn_Step.addEventListener(MouseEvent.CLICK, handler_UI_Mouse );
}
public function ns_statusHandler(evt:NetStatusEvent):void
{
//trace( " ## ns_statusHandler : " + evt.info.code);
if (evt.info.code == 'NetStream.Play.Stop')
{
//trace('the video has ended');
}
if (evt.info.code == "NetStream.Buffer.Flush")
{
//ns.resume();
}
if (evt.info.code == "NetStream.Buffer.Empty")
{
//your required commands here...
}
if (evt.info.code == "NetStream.SeekStart.Notify")
{
//your required commands here...
}
if (evt.info.code == "NetStream.Seek.Notify")
{
//your required commands here...
}
}
public function received_Meta (data:Object):void
{
trace ( "## MetaData is now sent to NetStream... " );
got_MetaData = true; meta_is_Processed = true; //means we can now do "first frame" function
FLV_framerate = data.framerate;
FLV_duration = data.duration;
_stageW = stage.stageWidth; _stageH = stage.stageHeight;
//aspect ratio
if (data.width > 0) //if there is Picture Size in metadata
{
data_Width = data.width; data_Height = data.height;
Aspect_num = data_Width / data_Height;
aspect_F_W = stage.stageWidth; aspect_F_H = stage.stageHeight;
aspect_C_W = stage.stageWidth; aspect_C_H = aspect_C_W / Aspect_num;
video.width = aspect_F_W; //_videoW;
video.height = aspect_F_H; //_videoH;
trace ( "aspect_F_W : " + aspect_F_W );
trace ( "new video width : " + video.width );
}
//## Timer for auto appending via video frame rate
timer_Append = new Timer( FLV_framerate ); //slow mo = FLV_framerate * 10
//# add listeners for timers
timer_Append.addEventListener( TimerEvent.TIMER, Append_PLAY );
//add seek bar enterframe
stage.addEventListener(Event.ENTER_FRAME, handler_enterFrame );
fill_progress.addEventListener(MouseEvent.MOUSE_OVER, handler_Mouse_Seek );
fill_progress.addEventListener(MouseEvent.MOUSE_MOVE, handler_Mouse_Seek );
fill_progress.addEventListener(MouseEvent.CLICK, handler_Mouse_Seek );
}
public function handler_UI_Play() : void
{
//# handle play/pause mode
mode_PlayBack = "PLAY"; in_PLAYING_mode = true;
if ( vidPause_state == 1 ) //#if video pause is.. 1 == true or 0 == false..
{
vidPause_state = 0; txt_btn_Play.text = "PAUSE";
Append_PLAY(); ns.resume();
}
else { vidPause_state = 1; ns.pause(); txt_btn_Play.text = "PLAY"; timer_Append.stop(); }
}
public function handler_UI_Step() : void
{
//# handle frame step mode
timer_Append.stop(); ns.pause(); vidPause_state = 1; in_PLAYING_mode = false;
txt_btn_Play.text = "PLAY"; mode_PlayBack = "STEP";
append_STEP(); ns.resume();
}
public function handler_UI_Keys ( evt:KeyboardEvent ) : void
{
//# RIGHT ARROW or numpad 6 (arrow)
if ( evt.keyCode == 39 || evt.keyCode == 54 || evt.keyCode == 102) { handler_UI_Step(); }
//# SPACE BAR
if ( evt.keyCode == 32) { handler_UI_Play(); }
}
public function handler_UI_Mouse ( evt:Event ) : void
{
//# RIGHT ARROW
if ( evt.target.name == "btnStep" ) { handler_UI_Step(); }
//# SPACE BAR
if ( evt.target.name == "btnPlay" ) { handler_UI_Play(); }
}
public function setup_UI() : void
{
//# Fullscreen button
var rect_FS:Shape = new Shape; // initializing the variable named rectangle
rect_FS.graphics.beginFill(0xFFFFFF); // choosing the colour for the fill, here it is red
rect_FS.graphics.drawRect(0, 0, 80, 20); // (x spacing, y spacing, width, height)
rect_FS.graphics.endFill(); // not always needed but I like to put it in to end the fill
btn_FS.addChild( rect_FS );
//# Fill progress background ( grey )
rect_Fill_BG = new Shape; rect_Fill_BG.graphics.beginFill(0x888888);
rect_Fill_BG.graphics.drawRect(0, 0, size_SeekBar,10); rect_Fill_BG.graphics.endFill();
//# Fill progress bar ( blue )
var rect_Fill:Shape = new Shape; rect_Fill.graphics.beginFill(0x3300FF);
rect_Fill.graphics.drawRect(0, 0, 1,10); rect_Fill.graphics.endFill();
fill_progress.addChild( rect_Fill );
//# button Browse
var rect_Browse:Shape = new Shape; rect_Browse.graphics.beginFill(0xFFFFFF);
rect_Browse.graphics.drawRect(0, 0, 80, 20); rect_Browse.graphics.endFill();
btn_Browse.addChild( rect_Browse );
//# button Play
var rect_Play:Shape = new Shape; rect_Play.graphics.beginFill(0xFFFFFF);
rect_Play.graphics.drawRect(0, 0, 50,20); rect_Play.graphics.endFill();
btn_Play.addChild( rect_Play );
//# button Step
var rect_Step:Shape = new Shape; rect_Step.graphics.beginFill(0xFFFFFF);
rect_Step.graphics.drawRect(0, 0, 50,20); rect_Step.graphics.endFill();
btn_Step.addChild( rect_Step );
//# Progress Dot
prog_Dot.graphics.beginFill(0xFFFFFF); prog_Dot.graphics.drawRect(0, 0, 8, 14); // (x spacing, y spacing, width, height)
prog_Dot.graphics.endFill();
btn_FS.x = 440; btn_FS.y = 14;
btn_Browse.x = 10; btn_Browse.y = 14;
btn_Play.x = 10; btn_Play.y = 230;
btn_Step.x = 70; btn_Step.y = 230;
mc_Progress.x = 10; mc_Progress.y = 290;
//# add texts
txt_fmt_black.color = 0x000000; txt_fmt_white.color = 0xFFFFFF;
//UI buttons + lables;
txt_btn_FS.defaultTextFormat = txt_btn_Browse.defaultTextFormat = txt_btn_Play.defaultTextFormat = txt_btn_Step.defaultTextFormat = txt_fmt_black;
txt_TimeElapsed.defaultTextFormat = txt_NStimeSelected.defaultTextFormat = txt_TimeSelected.defaultTextFormat = txt_fmt_white;
txt_BytesTAG.defaultTextFormat = txt_ByteSelected.defaultTextFormat = txt_KeytimeSelected.defaultTextFormat = txt_fmt_white;
txt_BufferLen.defaultTextFormat = txt_keyShortcuts.defaultTextFormat = txt_fmt_white;
txt_NStimeSelected.selectable = txt_TimeElapsed.selectable = txt_TimeSelected.selectable = txt_ByteSelected.selectable = false;
txt_KeytimeSelected.selectable = txt_BytesTAG.selectable = txt_NStimeSelected.selectable = txt_BufferLen.selectable = false;
txt_btn_Browse.selectable = txt_btn_Play.selectable = txt_btn_Step.selectable = txt_keyShortcuts.selectable = false;
//# shows text for buttons
txt_btn_FS.x = 4; txt_btn_Browse.height = 20; txt_btn_Browse.width = 80;
txt_btn_FS.text = "Full Screen";
//btn_FS.buttonMode = true; btn_FS.mouseChildren = false; btn_FS.addEventListener(MouseEvent.CLICK, toggle_FullScreen );
btn_FS.buttonMode = true; btn_FS.mouseChildren = false; btn_FS.addEventListener(MouseEvent.CLICK, toggle_FullScreen );
btn_FS.addChild( txt_btn_FS );
txt_btn_Browse.x = 4; txt_btn_Browse.height = 20; txt_btn_Browse.width = 80;
txt_btn_Browse.text = "Open FLV file";
btn_Browse.buttonMode = true; btn_Browse.mouseChildren = false; btn_Browse.addEventListener(MouseEvent.CLICK, browse_File );
btn_Browse.addChild( txt_btn_Browse );
txt_btn_Play.x = 4; txt_btn_Play.height = 20; txt_btn_Play.width = 50;
txt_btn_Play.text = "PLAY"; btn_Play.name = "btnPlay";
btn_Play.buttonMode = true; btn_Play.mouseChildren = false;
//btn_Play.addEventListener(MouseEvent.CLICK, handler_UI_Mouse );
btn_Play.addChild( txt_btn_Play );
txt_btn_Step.x = 10; txt_btn_Step.height = 20; txt_btn_Step.width = 50;
txt_btn_Step.text = "STEP"; btn_Step.name = "btnStep";
btn_Step.buttonMode = true; btn_Step.mouseChildren = false;
//btn_Step.addEventListener(MouseEvent.CLICK, handler_UI_Mouse );
btn_Step.addChild( txt_btn_Step );
//# texts...
txt_keyShortcuts.x = (btn_Browse.x + 90); txt_keyShortcuts.y = (btn_Browse.y + 2); txt_keyShortcuts.width = 300; txt_keyShortcuts.height = 20;
txt_keyShortcuts.text = "Keyboard : PLAY = [Space-Bar] || STEP = [Right-Arrow]";
//# text "TAG Bytes Offset"... shows Decoded frame time (via "get_frame_tag" function
txt_BytesTAG.x = 10; txt_BytesTAG.y = mc_Progress.y - 22; txt_BytesTAG.width = 190; txt_BytesTAG.height = 20;
//# text "NS Buffer Length"... shows Decoded frame time (via "get_frame_tag" function
txt_BufferLen.x = 340; txt_BufferLen.y = mc_Progress.y - 54; txt_BufferLen.width = 190; txt_BufferLen.height = 20;
txt_BufferLen.text = "Buffer Length (secs) : " + "0"; //initial text
//# text "Selected Bytes Offset"... shows Decoded frame time (via "get_frame_tag" function
txt_ByteSelected.x = 10; txt_ByteSelected.y = mc_Progress.y + 18; txt_ByteSelected.height = 20; txt_ByteSelected.width = 210;
//# text "Frame Time (calculated)"... shows Net Stream time (calculation is... Frame Time = last_seeked_keyframe_tag_timestamp + NS_timeOffset )
txt_NStimeSelected.x = 340; txt_NStimeSelected.y = mc_Progress.y - 38; txt_NStimeSelected.height = 20; txt_NStimeSelected.width = 210;
txt_NStimeSelected.text = "NS Time (calculated) : " + "0"; //initial text
//# text "TAG TimeStamp (decoded)"... shows Decoded frame time (via "get_frame_tag" function)
txt_TimeElapsed.x = 340; txt_TimeElapsed.y = mc_Progress.y - 22; txt_TimeElapsed.width = 210; txt_TimeElapsed.height = 20;
//# text "Time Selected"... shows Decoded frame time (via "get_frame_tag" function
txt_TimeSelected.x = 340; txt_TimeSelected.y = mc_Progress.y + 18; txt_TimeSelected.height = 20; txt_TimeSelected.width = 180;
//# text "Keyframe Time"... shows time from keyframe found during seek bar click
txt_KeytimeSelected.x = 340; txt_KeytimeSelected.y = mc_Progress.y + 34; txt_KeytimeSelected.height = 20; txt_KeytimeSelected.width = 180;
txt_KeytimeSelected.text = "Keyframe Time : " + "0"; //initial text
//add UI items to stage
stage.addChild( txt_keyShortcuts ); stage.addChild( btn_FS );
stage.addChild( btn_Browse ); stage.addChild( btn_Play ); stage.addChild( btn_Step ); stage.addChild( txt_BufferLen );
stage.addChild( txt_BytesTAG ); stage.addChild( txt_NStimeSelected ); stage.addChild( txt_TimeElapsed );
stage.addChild( txt_TimeSelected ); stage.addChild( txt_ByteSelected ); stage.addChild( txt_KeytimeSelected );
stage.addChild( mc_Progress ); mc_Progress.addChild( rect_Fill_BG );
mc_Progress.addChild( fill_progress); mc_Progress.addChild( prog_Dot );
//# add keyboard shortcuts
stage.addEventListener(KeyboardEvent.KEY_DOWN, handler_UI_Keys);
btn_Play.addEventListener(MouseEvent.CLICK, handler_UI_Mouse );
btn_Step.addEventListener(MouseEvent.CLICK, handler_UI_Mouse );
}
public function setup_Video() : void
{
if( ns != null ) { ns.close(); trace("NStream was closed.. Ready for this new file.."); }
nc = new NetConnection(); nc.connect(null);
metaListener = new Object(); metaListener = { onMetaData: received_Meta };
ns = new NetStream(nc); ns.client = metaListener; //use above object called metaListener
ns.addEventListener(NetStatusEvent.NET_STATUS, ns_statusHandler);
video = new Video(10, 10); video.attachNetStream(ns);
video.smoothing = true; video.deblocking = 1;
vid_canvas.addChild( video ); stage.addChild ( vid_canvas );
ns.bufferTime = 0.001; ns.bufferTimeMax = 0.005; ns.maxPauseBufferTime = 0.005;
vidPause_state = 0;
}
private function browse_File ( event:MouseEvent ):void
{
fileRef = new FileReference();
fileRef.addEventListener(Event.SELECT, onSelectFile);
fileRef.addEventListener(ProgressEvent.PROGRESS, Handle_FileRef_Stream_Progress);
fileRef.addEventListener(Event.COMPLETE, Handle_FileRef_Stream_Complete );
trace("..Trying new file load...");
try {
var fileFilter:FileFilter = new FileFilter("FLV Media Container", "*.flv");
fileRef.browse([fileFilter]);
}
catch(e:Error) { trace(e.message); }
}
private function onSelectFile(event:Event):void
{
do_Resets();
fileRef.load(); URL_String = fileRef.name;
}
public function Handle_FileRef_Stream_Complete (evt:Event) : void
{
//# Copy incoming data as it streams past the bytes reader
evt.target.data.readBytes( videoByteArray, 0, evt.target.data.length );
//size_SeekBar = 530;
data_BytesLoaded = data_BytesTotal = evt.target.data.length; trace( "data_BytesLoaded .... ::: " + data_BytesLoaded );
if ( data_BytesLoaded > 0 ) { seek_Width = fill_progress.width = ( ( data_BytesLoaded / data_BytesTotal ) * size_SeekBar ); }
if ( check_FLV_Header == true ) //this will be true at first fire of Progress Event
{
temp_BA.clear();
//# get front 1000 bytes //out of 65535 from the first progressEvent firing
temp_BA.writeBytes(videoByteArray, 0, 1000); //higher num gives bigger search area but slower
//# convert bytes into a string ( for quick search via String functions)
temp_String = bytes_toString ( temp_BA );
//# check for "F-L-V" ( first 3 bytes == 0x46 0x4C 0x56 )
if ( temp_String.indexOf("464C56") != -1 ) // -1 means does not exist
{
//# Is valid FLV file so extract FLV Header and FLV MetaData
trace ("File Format is FLV... ");
data_isFLV = true; check_FLV_Header = false; //set false to avoid later multiple checks
META_extract_from_FLV(); //begin extracting FLV Metadata for first Append
} //END "if temp_String IS NOT -1" check
} //END check FLV header bytes == true..
//# Check if we have enough bytes to display first frame
if ( await_first_frame == true ) //remains true until bytes-loaded is bigger than expected total for first frame
{
if ( data_BytesLoaded > ( size_MEDIA_CONFIG + TAG_offset + TAG_size ) )
//if true then "function Append_PLAY" will respond next (timer event) & append first frame
{ can_append_first_frame = true; await_first_frame = false; }
} //END if ( await_first_frame == true )
}
public function Handle_FileRef_Stream_Progress (evt:Event) : void
{
//your code...
}
private function toggle_FullScreen(evt:Event):void
{
//toggle Full Screen on/off
if( stage.displayState == StageDisplayState.NORMAL )
{
stage.displayState = StageDisplayState.FULL_SCREEN;
//stage.displayState = StageDisplayState.FULL_SCREEN_INTERACTIVE;
btn_Browse.visible = txt_btn_Browse.visible = false;
txt_btn_FS.text = "Exit Full Screen";
}
else
{
stage.displayState = StageDisplayState.NORMAL; txt_btn_FS.text = "Full Screen";
btn_Browse.visible = txt_btn_Browse.visible = true;
}
}
//// UTILS CODE - utility functions /////
public function string_toBytes( hex:String ) : ByteArray
{
// CONVERT STRING OF HEX CODES INTO A BYTEARRAY //
/////////////////////////////////////////////////
//hex.replace(" ", ""); //remove white spaces in bytes string
var ba:ByteArray = new ByteArray(); ba.position = ba.length;
var len:uint = hex.length; //trace("writeBytes. len: " + len);
for (var i:uint = 0; i < len; i += 2)
{
var byte:uint = uint("0x" + hex.substr(i, 2));
ba.writeByte(byte);
}
return ba;
}
public function bytes_toString ( ba:ByteArray ) : String
{
// PUT HEX OF BYTES INTO A TEXT STRING //
////////////////////////////////////////
var str_Hex:String = ""; var len:uint = ba.length;
ba.position = 0;
//trace("<HEX> readBytes. len: " + len);
for (var i:uint = 0; i < len; i++)
{
var n:String=ba.readUnsignedByte().toString(16);
if(n.length<2) //padding
{ n="0"+n; } str_Hex += n ;
}
return str_Hex.toUpperCase();
}
//# Convert a value into hex written over 3 bytes
public function writeUInt24( input_BA:ByteArray, val:uint ) : void
{
temp_Int_1 = val >> 16;
temp_Int_2 = val >> 8 & 0xff;
temp_Int_3 = val & 0xff;
input_BA.writeByte(temp_Int_1); input_BA.writeByte(temp_Int_2);
input_BA.writeByte(temp_Int_3);
}
//# Convert values of x-amount of bytes to an integer amount
public function bytes_toInt( ...args ) : int
{
var temp_conv_ba : ByteArray = new ByteArray();
for (var i:uint = 0; i < args.length; i++)
{
temp_conv_ba[i] = args[i];
}
var int_fromBytes:int = int("0x" + bytes_toString(temp_conv_ba) );
return int_fromBytes;
}
} //end public class
} //end package
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment