使用带有H264 NAL的NetStream.appendBytes有没有办法防止缓冲?

时间:2016-03-02 18:29:41

标签: actionscript-3 flash h.264 netstream

我正在从服务器传输H264 NAL,将它们包装为FLV标记,并将它们传递到带有appendBytes(数据生成模式)的NetStream中。但是,当视频正常播放时,流会延迟大约一秒钟。

我已经尝试过设置bufferTime,bufferTimeMax但没有运气来阻止缓冲进行。

我也尝试过使用RESET_SEEK和END_SEQUENCE的NetStream.seek()和NetStream.appendBytesAction()的各种组合,再次无济于事。

这里有一个我不知道的伎俩,有没有办法防止这种延迟?

有趣的是,我没有看到我传入的音频延迟(PCMU)所以我最终会出现唇形同步问题。

更新:仍然卡住了,所以发布我正在使用的代码:

        var timestamp : uint = networkPayload.readUnsignedInt();
        if (videoTimestampBase == 0) {
            videoTimestampBase = timestamp;
        }
        timestamp = timestamp - videoTimestampBase;
        timestamp = timestamp / 90.0;

        // skip 7 bytes of marker
        networkPayload.position = 7;
        var nalType : int = networkPayload.readByte();
        nalType &= 0x1F;
        networkPayload.position = 7;

        // reformat Annex B bitstream encoding, to Mp4 - remove timestamp and bitstream marker (3 bytes)
        var mp4Payload : ByteArray = new ByteArray();
        var mp4PayloadLength : int = networkPayload.bytesAvailable;
        mp4Payload.writeUnsignedInt(mp4PayloadLength);
        mp4Payload.writeBytes(networkPayload, 7, mp4PayloadLength);
        mp4Payload.position = 0;

        if (nalType == 8) {
            // PPS
            ppsNAL = new ByteArray();
            // special case for PPS/SPS - don't length encode
            ppsLength = mp4Payload.bytesAvailable - 4;
            ppsNAL.writeBytes(mp4Payload, 4, mp4Payload.bytesAvailable - 4);
            if (spsNAL == null) {
                return;
            }
        } else if (nalType == 7) {
            // SPS
            spsNAL = new ByteArray();
            // special case for PPS/SPS - don't length encode
            spsLength = mp4Payload.bytesAvailable - 4;
            spsNAL.writeBytes(mp4Payload, 4, mp4Payload.bytesAvailable - 4);

            if (ppsNAL == null) {
                return;
            }
        }

        if ((spsNAL != null) && (ppsNAL != null)) {
            Log.debug(TAG, "Writing sequence header: " + spsLength + "," + ppsLength + "," + timestamp);

            var sequenceHeaderTag : FLVTagVideo = new FLVTagVideo();
            sequenceHeaderTag.codecID = FLVTagVideo.CODEC_ID_AVC;
            sequenceHeaderTag.frameType = FLVTagVideo.FRAME_TYPE_KEYFRAME;
            sequenceHeaderTag.timestamp = timestamp;
            sequenceHeaderTag.avcPacketType = FLVTagVideo.AVC_PACKET_TYPE_SEQUENCE_HEADER;

            spsNAL.position = 1;
            var profile : int = spsNAL.readByte();
            var compatibility : int = spsNAL.readByte();
            var level : int = spsNAL.readByte();
            Log.debug(TAG, profile + "," + compatibility + "," + level + "," + spsLength);

            var avcc : ByteArray = new ByteArray();
            avcc.writeByte(0x01); // avcC version 1
            // profile, compatibility, level
            avcc.writeByte(profile);
            avcc.writeByte(compatibility);
            avcc.writeByte(0x20); //level);
            avcc.writeByte(0xff); // 111111 + 2 bit NAL size - 1
            avcc.writeByte(0xe1); // number of SPS
            avcc.writeByte(spsLength >> 8); // 16-bit SPS byte count
            avcc.writeByte(spsLength);
            avcc.writeBytes(spsNAL, 0, spsLength); // the SPS
            avcc.writeByte(0x01); // number of PPS
            avcc.writeByte(ppsLength >> 8); // 16-bit PPS byte count
            avcc.writeByte(ppsLength);
            avcc.writeBytes(ppsNAL, 0, ppsLength);
            sequenceHeaderTag.data = avcc;

            // clear the pps/sps til next buffer
            var bytes : ByteArray = new ByteArray();
            sequenceHeaderTag.write(bytes);

            stream.appendBytes(bytes);

            ppsNAL = null;
            spsNAL = null;
        } else {
            if ((timestamp != currentTimestamp) || (currentVideoTag == null)) {
                if (currentVideoTag != null) {
                    currentVideoTag.data = currentSegment;

                    var tagData : ByteArray = new ByteArray();
                    currentVideoTag.write(tagData);

                    stream.appendBytes(tagData);
                }

                currentVideoTag = new FLVTagVideo();
                currentVideoTag.codecID = FLVTagVideo.CODEC_ID_AVC;
                currentVideoTag.frameType = FLVTagVideo.FRAME_TYPE_INTER;
                if (nalType == 5) {
                    currentVideoTag.frameType = FLVTagVideo.FRAME_TYPE_KEYFRAME;
                }
                lastNalType = nalType;
                currentVideoTag.avcPacketType = FLVTagVideo.AVC_PACKET_TYPE_NALU;
                currentVideoTag.timestamp = timestamp;
                currentVideoTag.avcCompositionTimeOffset = 0;

                currentSegment = new ByteArray();
                currentTimestamp = timestamp;
            }

            mp4Payload.position = 0;
            currentSegment.writeBytes(mp4Payload);
        }

更新,更多细节,这是传递的时间戳:

DEBUG: StreamPlayback: 66,-32,20,19
DEBUG: StreamPlayback: Timestamp: 0
DEBUG: StreamPlayback: Timestamp: 63
DEBUG: StreamPlayback: stream status update: netStatus NetStream.Buffer.Full
DEBUG: StreamPlayback: Timestamp: 137
DEBUG: StreamPlayback: Timestamp: 200
DEBUG: StreamPlayback: Timestamp: 264
DEBUG: StreamPlayback: Timestamp: 328
DEBUG: StreamPlayback: Timestamp: 403
DEBUG: StreamPlayback: Timestamp: 467
DEBUG: StreamPlayback: Timestamp: 531
DEBUG: StreamPlayback: Timestamp: 595
DEBUG: StreamPlayback: Timestamp: 659
DEBUG: StreamPlayback: Timestamp: 723
DEBUG: StreamPlayback: Timestamp: 830
DEBUG: StreamPlayback: Timestamp: 894
DEBUG: StreamPlayback: Timestamp: 958
DEBUG: StreamPlayback: Timestamp: 1021
DEBUG: StreamPlayback: Timestamp: 1086
DEBUG: StreamPlayback: Timestamp: 1161
DEBUG: StreamPlayback: Timestamp: 1225
DEBUG: StreamPlayback: Timestamp: 1289
DEBUG: StreamPlayback: Timestamp: 1353
DEBUG: StreamPlayback: Timestamp: 1418
DEBUG: StreamPlayback: Timestamp: 1491
DEBUG: StreamPlayback: Timestamp: 1556
DEBUG: StreamPlayback: Timestamp: 1633
DEBUG: StreamPlayback: Timestamp: 1684
DEBUG: StreamPlayback: Timestamp: 1747
DEBUG: StreamPlayback: stream status update: netStatus NetStream.Video.DimensionChange
DEBUG: StreamPlayback: Timestamp: 1811

干杯,

千电子伏

1 个答案:

答案 0 :(得分:0)

解决方案一:

  

媒体实际上并没有开始播放   触发NetStream.Video.DimensionChange

为什么不在开始任何追加之前暂停 NetStream?然后添加标签,直到NetStream确认“尺寸更改”。在Net Status处理程序中,然后取消暂停 NetStream 希望它会播放同步,因为在暂停模式下,声音或视频的播放头都没有移动。

stream.addEventListener(NetStatusEvent.NET_STATUS, stream_StatusHandler);

stream.play(null);
stream.appendBytesAction(NetStreamAppendBytesAction.RESET_BEGIN);
stream.pause(); //# pause before beginning FLV A/V Tag appends

public function stream_StatusHandler (evt:NetStatusEvent) : void
{
    trace("DEBUG: StreamPlayback : NEW evt.info.code : " + evt.info.code );

    switch (evt.info.code) 
    {
        //# in case its "NetStream.Buffer.Full"
        case "NetStream.Buffer.Full"  :     
        trace("DEBUG: StreamPlayback : NetStream.Buffer.Full...");
        break;

        //# in case its "NetStream.Video.DimensionChange" :
        case "NetStream.Video.DimensionChange"  :
        trace("DEBUG: StreamPlayback : #### Video Dimensions have changed...");         
        trace("DEBUG: StreamPlayback : #### NEW Detected video.videoHeight : " + video.videoHeight ); 
        stream.resume(); //# resume playback
        //# or use :  stream.togglePause();
        break;

    }
}

如果这不起作用,那么你可以试试......

解决方案二:

  

我不知道如何更快地发生尺寸变化事件   虽然......有没有办法伪造它?

使用位图数据创建仅由简单颜色块组成的动态视频帧。该块与您的视频流的分辨率大小不同。您首先附加块,与您自己的视频帧的差异将触发尺寸更改。

注意:如果您的视频触发太晚(即:A / V未同步,则表示您首先发送的音频标签过多(可能是视频时间后的错误时间戳?)...尝试检查时间戳。音频总是在视频之前,不得超过相关视频标签的时间戳。)

下面的示例代码生成 100宽x 50高视频帧(位图数据被编码为屏幕 - 视频格式并附加为视频标签)。

// 1) ## Setup Video Object + Append FLV header + Append Metadata etc
// 2) ## Run function below before appending your first Video tag...

force_Dimension_Adjust();

// 3) ## Do your usual appends...

以下是相关代码:force_Dimension_Adjust();

public var BA_BMD_Frame : ByteArray;
public var BA_Temp : ByteArray;

public function force_Dimension_Adjust() : void
{
    trace("DEBUG: #### doing function : force_Dimension_Adjust");

    //create BMD frame for dimension change
    generate_Frame_BMPdata(); //# Puts result video tag into BA_BMD_Frame

    BA_BMD_Frame.position = 0;
    stream.appendBytes( BA_BMD_Frame ); //should trigger "dimesion change" for video picture size
    trace("DEBUG: StreamPlayback : #### APPENDED :::: BA_BMD_Frame : " );

}

public function generate_Frame_BMPdata() : void
{
    //## Simple colour block as video frame content 
    //## (pW = Picture Width, pH = Picture Height)

    var pW : int = 100; var pH : int = 50; 
    var temp_BMD = new BitmapData(pW, pH, false, 0x5500AA ); //R-G-B 5500AA = purple 
    var temp_BMP = new Bitmap(temp_BMD);

    // 1) #### encode BitmapData to codec Screen Video 
    BA_BMD_Frame.clear(); BA_Temp.clear(); BA_Temp.position = 0; //# Resets 
    encode_SCREENVIDEO (BA_Temp, temp_BMD); //# Put encoded BMD into a temp ByteArray

    // 2) #### Create Video Frame TAG to hold encoded frame 
    BA_BMD_Frame.writeByte(0x09); //# is video TAG

    writeUInt24( BA_BMD_Frame, BA_Temp.length ); //# Write 3 bytes  : size of BMD bytes length 

    BA_BMD_Frame.writeUnsignedInt(0x00); //# Write  4 byte timestamp : 0x00 0x00 0x00 0x00

    writeUInt24( BA_BMD_Frame, 0x00 ); //# Write 3 bytes (stream ID etc) : 0x00 0x00 0x00

    BA_BMD_Frame.position = BA_BMD_Frame.length;
    BA_BMD_Frame.writeBytes( BA_Temp ); //# Write encoded BMD bytes here

    BA_BMD_Frame.position = BA_BMD_Frame.length;
    BA_BMD_Frame.writeUnsignedInt( BA_BMD_Frame.length - 4 ); //# Close : total size of this byteArray (TAG) length minus 4

    BA_BMD_Frame.position = 0; //# Reset position

}


public function encode_SCREENVIDEO ( input_BA : ByteArray , input_BMD : BitmapData ) : void //ByteArray
{
    var w:int = input_BMD.width; var h:int = input_BMD.height;

    //# Video Type = 1 (Keyframe) |&&| Codec ID = 3 (Screen-Video)
    input_BA.writeByte(0x13); 

    //# SCREENVIDEOPACKET 'header'
    writeUI4_12( input_BA, int( (BLOCK_WIDTH  /16) - 1 ),  w ); //12 bits for width
    writeUI4_12( input_BA, int( (BLOCK_HEIGHT /16) - 1 ),  h ); //12 bits for height

    //# Create IMAGEBLOCKS

    const BLOCK_WIDTH:int = input_BMD.width; //# is 100;
    const BLOCK_HEIGHT:int = input_BMD.height; //# is 50;

    var rowMax:int = int(h / BLOCK_HEIGHT);
    var rowRemainder:int = h % BLOCK_HEIGHT; 
    if (rowRemainder > 0) rowMax += 1;

    var colMax:int = int(w / BLOCK_WIDTH);
    var colRemainder:int = w % BLOCK_WIDTH;             
    if (colRemainder > 0) colMax += 1;

    var block:ByteArray = new ByteArray();
    block.endian = Endian.LITTLE_ENDIAN;

    for (var row:int = 0; row < rowMax; row++)
    {
        for (var col:int = 0; col < colMax; col++) 
        {
            var xStart:uint = col * BLOCK_WIDTH;
            var xLimit:int = (colRemainder > 0 && col + 1 == colMax) ? colRemainder : BLOCK_WIDTH;
            var xEnd:int = xStart + xLimit;

            var yStart:uint = h - (row * BLOCK_HEIGHT); //# Read BMP Data from bottom to top
            var yLimit:int = (rowRemainder > 0 && row + 1 == rowMax) ? rowRemainder : BLOCK_HEIGHT; 
            var yEnd:int = yStart - yLimit;

            block.clear(); //# re-use ByteArray

            for (var y:int = yStart-1; y >= yEnd; y--) //# FLV stores Bitmap Data from bottom to top)
            {
                for (var x:int = xStart; x < xEnd; x++) 
                {
                    var p:uint = input_BMD.getPixel(x, y);
                    writeUInt24( block, p ); //# write B-G-R pixel values 
                }
            }

            block.compress();

            input_BA.writeShort(block.length); // write block length (2 bytes == 16 bits)
            input_BA.writeBytes( block ); // write block
        }
    }

    block.length = 0; block = null;
    input_BA.position = input_BA.length;
}

//// Supporting functions

public function writeUInt24( input_BA:ByteArray, val:uint ) : void
{
    input_BA.position = input_BA.position;

    temp_Int_1 = val >> 16;
    temp_Int_2 = val >> 8 & 0xff;
    temp_Int_3 = val & 0xff;

    input_BA.writeByte(temp_Int_1); input_BA.writeByte(temp_Int_2);
    input_BA.writeByte(temp_Int_3);
}

public function writeUI4_12(input_BA:ByteArray, p1:uint, p2:uint):void
{
    // writes a 4-bit value followed by a 12-bit value in a total of 16 bits (2 bytes)

    var byte1a:int = p1 << 4;
    var byte1b:int = p2 >> 8;
    var byte1:int = byte1a + byte1b;
    var byte2:int = p2 & 0xff;

    input_BA.writeByte(byte1);  input_BA.writeByte(byte2);
}   
相关问题