博客> IOS 基于ffmpeg VR播放器
IOS 基于ffmpeg VR播放器
2017-05-23 00:02 评论:0 阅读:143 sunny_zhang
ios 音视频同步

使用场景 支持rtsp,rtmp等的实时流播放的全景播放器 开源的全景播放器HTY360Player有一个问题就是用的是系统的播放器,所以无法支持rtsp或rtmp协议,所以解码部分使用Kxmovie,openGL部分使用HTY360Player,完美解决 HTY360Player 下载地址:https://github.com/hanton/HTY360Player Kxmovie 下载地址:https://github.com/kolyvan/kxmovie 首先需要修改 HTY360Player的openGL部分代码,由于苹果的yuv格式是NV21,NV12和NV21属于YUV420格式,是一种two-plane模式,即Y和UV分为两个Plane,但是UV(CbCr)为交错存储,而不是分为三个plane。 shader.fsh修改为 precision mediumpfloat;

uniform sampler2D SamplerY; //uniform sampler2D SamplerUV; uniform sampler2D SamplerU; uniform sampler2D SamplerV;

varying mediumpvec2 v_textureCoordinate;

//uniform mat3 colorConversionMatrix;

void main() { // mediump vec3 yuv; // lowp vec3 rgb; //
// yuv.x = texture2D(SamplerY, v_textureCoordinate).r - (16.0/255.0); // yuv.yz = texture2D(SamplerUV, v_textureCoordinate).rg - vec2(0.5, 0.5); //
// rgb = colorConversionMatrix * yuv; //
// gl_FragColor = vec4(rgb, 1);

highpfloat y = texture2D(SamplerY, v_textureCoordinate).r ;
highpfloat u = texture2D(SamplerU, v_textureCoordinate).r -0.5;
highpfloat v = texture2D(SamplerV, v_textureCoordinate).r -0.5;

highpfloat r = y +            1.402 * v;
highpfloat g = y - 0.344 * u -0.714 * v;
highpfloat b = y + 1.772 * u;

gl_FragColor =vec4(r,g,b,1.0);

}

HTYGLKVC.m需修改 uniforms[UNIFORM_Y] = [self.programuniformIndex:@"SamplerY"]; // uniforms[UNIFORM_UV] = [self.program uniformIndex:@"SamplerUV"]; uniforms[UNIFORM_U] = [self.programuniformIndex:@"SamplerU"]; uniforms[UNIFORM_V] = [self.programuniformIndex:@"SamplerV"];

解码使用kxmovie,修改KxmovieViewController.m //KxMovieGLView _glView; HTYGLKVC _glView; 显示使用HTYGLKVC,给HTYGLKVC.m添加方法- (void) render: (KxVideoFrame *) frame;

  • (void) render: (KxVideoFrame *) frame { [frameArray addObject:frame]; return; }

重写HTYGLKVC.m的- (void)refreshTexture;方法

  • (void)refreshTexture {

    CVReturn err; //CVPixelBufferRef pixelBuffer = [self.videoPlayerController retrievePixelBufferToDraw];

    CVPixelBufferRef pixelBuffer =nil; if (frameArray.count >0) { KxVideoFrame frame = [frameArrayobjectAtIndex:0]; if (frame) { KxVideoFrameYUV yuvFrame = (KxVideoFrameYUV *)frame;

        assert(yuvFrame.luma.length == yuvFrame.width * yuvFrame.height);
        assert(yuvFrame.chromaB.length == (yuvFrame.width * yuvFrame.height) / 4);
        assert(yuvFrame.chromaR.length == (yuvFrame.width * yuvFrame.height) / 4);
    
        constNSUInteger frameWidth = frame.width;
        constNSUInteger frameHeight = frame.height;
    
        glPixelStorei(GL_UNPACK_ALIGNMENT,1);
    
        if (0 ==_textures[0])
            glGenTextures(3,_textures);
    
        constUInt8 *pixels[3] = { yuvFrame.luma.bytes, yuvFrame.chromaB.bytes, yuvFrame.chromaR.bytes };
        constNSUInteger widths[3]  = { frameWidth, frameWidth /2, frameWidth / 2 };
        constNSUInteger heights[3] = { frameHeight, frameHeight /2, frameHeight / 2 };
    
        for (int i =0; i < 3; ++i) {
    
            glBindTexture(GL_TEXTURE_2D,_textures[i]);
    
            glTexImage2D(GL_TEXTURE_2D,
                         0,
                         GL_LUMINANCE,
                         widths[i],
                         heights[i],
                         0,
                         GL_LUMINANCE,
                         GL_UNSIGNED_BYTE,
                         pixels[i]);
    
            glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER, GL_LINEAR);
            glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER, GL_LINEAR);
            glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
            glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
        }     
    
        for (int i =0; i < 3; ++i) {
            glActiveTexture(GL_TEXTURE0 + i);
            glBindTexture(GL_TEXTURE_2D,_textures[i]);
            glUniform1i(uniforms[i+1], i);
        }
    
        [frameArrayremoveObjectAtIndex:0];
    }

    } }

OK,大功告成。

收藏
1
sina weixin mail 回到顶部