ChinaFFmpeg

 找回密码
 立即注册

QQ登录

只需一步,快速开始

查看: 12020|回复: 0

[Android] opengl 渲染yuv数据

[复制链接]
发表于 2015-1-27 18:27:54 | 显示全部楼层 |阅读模式
先帖一下代码
[Java] 纯文本查看 复制代码
public class VideoView extends GLSurfaceView implements GLSurfaceView.Renderer
{
        private int w, h;
        public VideoView(Context context, AttributeSet attrs)
        {
        super(context, attrs);
        setEGLContextClientVersion(2);	// 设置opengl es2 版本
        setRenderer(this);				// 设置渲染
        getHolder().setFormat(PixelFormat.RGB_565); // 
        setRenderMode(RENDERMODE_WHEN_DIRTY);		// 需要时渲
                w = getWidth();
                h = getHeight();
        }

        public VideoView(Context context, AttributeSet attrs, int defStyle)
        {
        super(context);
        }
	
        @Override
        public void onSurfaceCreated(GL10 gl, EGLConfig config)
        {
                int nRe = H264Decoder.GetObj().InitGl(0, 0, w, h);		// 初化始openggl
        }

        @Override
        public void onSurfaceChanged(GL10 gl, int width, int height)
        {
                w = width;
                h = height;
                Log.d("==========", "width:" + w + "   height:" + h);
                H264Decoder.GetObj().ChangedGl(0, 0, w, h);			// 窗口发生变化时调整opengl
        }
	
        @Override
        public void onDrawFrame(GL10 gl)
        {
                H264Decoder.GetObj().Draw(0, 0, w, h);	// 渲染图片(调用requestRender()此方法通知开始渲染)
        }
}



[C++] 纯文本查看 复制代码
static void MemCpyYuv()
{
        int w = param.pCodecCtx->width;
        int h = param.pCodecCtx->height;
        int y = param.pFrame->linesize[0];
        int uv = param.pFrame->linesize[1];

        if (g_YuvBuff.pszY == NULL)
                g_YuvBuff.pszY = (unsigned char*)malloc(w * h);

        if (g_YuvBuff.pszY)
        {
                unsigned char* p = g_YuvBuff.pszY;
                unsigned char* d = param.pFrame->data[0];
                for (int i = 0; i < h; ++i)
                {
                        memcpy(p, d, w);
                        p += w;
                        d += y;
                }
        }

        int uvw = (w >> 1);
        int uvh = (h >> 1);

        if (g_YuvBuff.pszU == NULL)
                g_YuvBuff.pszU = (unsigned char*)malloc((w >> 1) * (h >> 1));

        if (g_YuvBuff.pszU)
        {
                unsigned char* p = g_YuvBuff.pszU;
                unsigned char* d = param.pFrame->data[1];
                for (int i = 0; i < uvh; ++i)
                {
                        memcpy(p, d, uvw);
                        p += uvw;
                        d += uv;
                }
        }

        if (g_YuvBuff.pszV == NULL)
        g_YuvBuff.pszV = (unsigned char*)malloc((w >> 1) * (h >> 1));

        if (g_YuvBuff.pszV)
        {
                unsigned char* p = g_YuvBuff.pszV;
                unsigned char* d = param.pFrame->data[2];
                for (int i = 0; i < uvh; ++i)
                {
                        memcpy(p, d, uvw);
                        p += uvw;
                        d += uv;
                }
        }
}

void SaveImage(JNIEnv * env, jobject obj)
{
	int y = g_nWidth * g_nHeight;
	int uv = (g_nWidth >> 1) * (g_nHeight >> 1);
	pszYuv = (unsigned char*) malloc(y + uv + uv/*g_nWidth * g_nHeight * 3 >> 1*/);
	if (!pszYuv)
		return ;
	
	memcpy(pszYuv, g_YuvBuff.pszY, y);
	memcpy(pszYuv + y, g_YuvBuff.pszU, uv);
	memcpy(pszYuv + y + uv, g_YuvBuff.pszV, uv);

	jclass cls = env->GetObjectClass(obj);
	jmethodID callback = env->GetMethodID(cls, "SaveImage", "()V");
	env->CallVoidMethod(obj, callback);
	env->DeleteLocalRef(cls);
}

jint decoder(JNIEnv * env, jobject obj, jbyteArray buffer, jint length, jint nOffset, jint nDPI, int nSaveImage)
{
	switch (nDPI)
	{
	case 0:
		if (g_nFrameDrop < 8)
		{
			++ g_nFrameDrop;
			return 0;
		}
	break;
	case 1:
		if (g_nFrameDrop < 5)
		{
			++ g_nFrameDrop;
			return 0;
		}
	break;
	case 2:
		if (g_nFrameDrop < 3)
		{
			++ g_nFrameDrop;
			return 0;
		}
	break;
	case 3:
	break;
	}

	jbyte* jbyteArrayTemp = env->GetByteArrayElements(buffer, NULL);
	uint8_t* pDataFlag = (uint8_t *) jbyteArrayTemp + nOffset;
	uint16_t bufSize = (uint16_t) length;
	
	while (bufSize > 0)
	{
		int size = 0;
		uint8_t * pBuf = NULL;
		int nLen = av_parser_parse2(param.pCodecParser,
				param.pCodecCtx, &pBuf, &size, (uint8_t*) pDataFlag,
				bufSize, AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);

		bufSize -= nLen;
		pDataFlag += nLen;

		if (size > 0)
		{
			param.pkt.data = pBuf;
			param.pkt.size = size;
			int nFrameFinished = 0;
			int len = avcodec_decode_video2(param.pCodecCtx, param.pFrame, &nFrameFinished, &param.pkt);

			if (nFrameFinished > 0)
			{
//				int nDecodeYUVLen = (param.pCodecCtx->width * param.pCodecCtx->height * 3) >> 1;
				
				if (param.bFirst == 0)
				{
					g_nWidth = param.pCodecCtx->width;
					g_nHeight = param.pCodecCtx->height;
//					CreateTexture(0, g_nWidth, g_nHeight, g_YuvBuff.pszY);
//					CreateTexture(1, g_nWidth, g_nHeight, g_YuvBuff.pszU);
//					CreateTexture(2, g_nWidth, g_nHeight, g_YuvBuff.pszV);
					callbackCreateBitmap(env, obj, g_nWidth, g_nHeight);
					param.bFirst = 1;
				}

				MemCpyYuv();

				if (1 == nSaveImage)
				{
					SaveImage(env, obj);
				}

				g_bIsDraw = 0;
				callbackInvalidate(env, obj);
			}
			av_free_packet(&param.pkt);
		}
	}
	
	env->ReleaseByteArrayElements(buffer, jbyteArrayTemp, 0);
	return 0;
}

GLuint g_ShaderPro; // 着色程序句柄

// 纹理
static GLuint g_nTextureY = -1;
static GLuint g_nTextureU = -1;
static GLuint g_nTextureV = -1;

// 2D纹理地址
static GLuint g_dwSamplerY;
static GLuint g_dwSamplerU;
static GLuint g_dwSamplerV;

// 片元地址
static GLuint g_dwPosition;			// 顶点坐标
static GLuint g_dwTexCoordIn;		// 纹理坐标

static GLuint g_nV = -1;		// 顶点缓存
static GLuint g_nC = -1;		// 纹理缓存

static int g_nWidth;
static int g_nHeight;

static int g_bIsDraw = 0;

// 顶点着色代码,这里就是把yuv转化为rgb565
#define FRAGMENT_SHADER	"varying lowp vec2 TexCoordOut;\n" \
					  	"uniform sampler2D SamplerY;\n" \
					  	"uniform sampler2D SamplerU;\n" \
					  	"uniform sampler2D SamplerV;\n" \
					  	"void main()\n" \
					  	"{\n" \
					  	"mediump vec3 yuv;\n" \
					  	"lowp vec3 rgb;\n" \
					  	"yuv.x = texture2D(SamplerY, TexCoordOut).r;\n" \
					  	"yuv.y = texture2D(SamplerU, TexCoordOut).r - 0.5;\n" \
					  	"yuv.z = texture2D(SamplerV, TexCoordOut).r - 0.5;\n" \
					  	"rgb = mat3(1, 1, 1, 0, -0.39465, 2.03211, 1.13983, -0.58060, 0) * yuv;\n" \
						"gl_FragColor = vec4(rgb, 1);\n" \
						"}\n"

// 顶点纹理坐标代码
#define VERTEX_SHWDER	"attribute vec4 Position;\n" \
						"attribute vec2 TexCoordIn;\n" \
						"varying vec2 TexCoordOut;\n" \
						"void main()\n" \
						"{\n" \
						"gl_Position = Position;\n" \
						"TexCoordOut = TexCoordIn;\n" \
						"}\n"


static int LoadShader(int nType, const char* pszShader)
{
	int s = glCreateShader(nType);				// 设置着色类型
	glShaderSource(s, 1, &pszShader, NULL);		// 设置着色代码
	glCompileShader(s);							// 编译代码

	int nCompStatus;
	glGetShaderiv(s, GL_COMPILE_STATUS, &nCompStatus);  // 获得编译是否成功

	if (nCompStatus != GL_TRUE)
	{
		char szBuf[256];
		glGetShaderInfoLog(s, 256, &nCompStatus, szBuf);
		LOGE("[error]  %s", szBuf);
		glDeleteShader(s);
		return -1;
	}
	return s;
}

jint InitGl(JNIEnv * env, jobject obj, jint x, jint y, jint w, jint h)
{
	LOGI("================== %s ==========", __FUNCTION__);
	glViewport(x, y, w, h);				// 设置视口

	// 着色器创建
	int v = LoadShader(GL_VERTEX_SHADER, VERTEX_SHWDER);
	if (v == -1)
	{
		LOGE("[error] %s build vertex failed %d ===\n%s", __FUNCTION__, v, VERTEX_SHWDER);
		return -1;
	}
	int f = LoadShader(GL_FRAGMENT_SHADER, FRAGMENT_SHADER);
	if (f == -1)
	{
		LOGE("[error] %s build fragment failed %d ===\n%s", __FUNCTION__, f, FRAGMENT_SHADER);
		return -1;
	}

	g_ShaderPro = glCreateProgram();		// 创建着色程序
	glAttachShader(g_ShaderPro, v);			// 将顶点着色链接到着色容器
	glAttachShader(g_ShaderPro, f);			// 将片元着色链接到着色容器
	glLinkProgram(g_ShaderPro);				// 链接着色程序

	int nLinkStatus;
	glGetProgramiv(g_ShaderPro, GL_LINK_STATUS, &nLinkStatus);		// 获得着色程序是否成功
	if (nLinkStatus != GL_TRUE)
	{
		char szBuf[256];
		glGetProgramInfoLog(g_ShaderPro, 256, &nLinkStatus, szBuf);
		LOGE("[error] %s build link program failed %s", __FUNCTION__, szBuf);
		glDeleteShader(g_ShaderPro);
		return -1;
	}

	g_dwPosition = glGetAttribLocation(g_ShaderPro, "aPosition");			// 位置
	g_dwTexCoordIn = glGetAttribLocation(g_ShaderPro, "TexCoordIn");		// 坐标
	LOGI("******************* %s ************** g_dwPosition = %d  g_dwTexCoordIn=%d", __FUNCTION__, g_dwPosition, g_dwTexCoordIn);
	return 0;
}

// 窗口发生了变化
void ChangedGl(JNIEnv * env, jobject obj, jint left, jint top, jint right, jint bottom)
{
	LOGI("================== %s ==========", __FUNCTION__);
	g_dwSamplerY = glGetUniformLocation(g_ShaderPro, "SamplerY");// 获得着色器地址值
	g_dwSamplerU = glGetUniformLocation(g_ShaderPro, "SamplerU");
	g_dwSamplerV = glGetUniformLocation(g_ShaderPro, "SamplerV");

	LOGI("******************* %s ---------- g_dwSamplerY = %d  g_dwSamplerU = %d g_dwSamplerV = %d", __FUNCTION__, g_dwSamplerY, g_dwSamplerU, g_dwSamplerV);

        //这里的顶点坐标有些和纹理坐不对映,是图片的原因
	float fV[][2] = 
	{
		{-1.0f, -1.0f}, 	// 左下角
		{1.0f, -1.0f}, 		// 右下角
		{-1.0f, 1.0f}, 		// 左上角
		{1.0f, 1.0f},		// 右上角
	};

	float fC[][2] = 
	{
		{0.0f, 1.0f}, 		// 左上角
		{1.0f, 1.0f}, 		// 右上角
		{0.0f, 0.0f}, 		// 左下角
		{1.0f, 0.0f}		// 右下角
	};

	// 设置顶点坐标缓存
	if (-1 != g_nV)
		glDeleteBuffers(1, &g_nV);
	glGenBuffers(1, &g_nV);  // 生成gpu顶点坐标内存地址
	glBindBuffer(GL_ARRAY_BUFFER, g_nV); // 绑定地址告诉opengl服务器要操作这个地址
	glBufferData(GL_ARRAY_BUFFER, sizeof(fV), fV, GL_STATIC_DRAW);//考呗数据

        // 纹理坐标缓存
	if (-1 != g_nC)
		glDeleteBuffers(1, &g_nC);
	glGenBuffers(1, &g_nC); // 
	glBindBuffer(GL_ARRAY_BUFFER, g_nC);
	glBufferData(GL_ARRAY_BUFFER, sizeof(fC), fC, GL_STATIC_DRAW);
	glBindBuffer(GL_ARRAY_BUFFER, 0);

        // 生成纹理数据地址
	if (-1 != g_nTextureY)
		glDeleteTextures(1, &g_nTextureY);
	glGenTextures(1, &g_nTextureY);

	if (-1 != g_nTextureU)
		glDeleteTextures(1, &g_nTextureU);
	glGenTextures(1, &g_nTextureU);

	if (-1 != g_nTextureV)
		glDeleteTextures(1, &g_nTextureV);
	glGenTextures(1, &g_nTextureV);
       // 设置视口大小
	glViewport(left, top, right, bottom);
	LOGI("******************* %s ************** g_nTextureY = %d  g_nTextureU = %d g_nTextureV = %d", __FUNCTION__, g_nTextureY, g_nTextureU, g_nTextureV);
}

void Draw(JNIEnv * env, jobject obj, jint left, jint top, jint right, jint bottom)
{
	if (0 == param.bFirst || 1 == g_bIsDraw)
		return ;

	g_bIsDraw = 1;
//	LOGI("[info] left=%d, top=%d, right=%d, bottom=%d", left, top, right, bottom);
	glViewport(left, top, right, bottom);
	glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
	glUseProgram(g_ShaderPro); // 启动着色程序

	glActiveTexture(GL_TEXTURE0); // 激活0层纹理
	glBindTexture(GL_TEXTURE_2D, g_nTextureY); // 绑定纹理ID
	glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, g_nWidth, g_nHeight, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, g_YuvBuff.pszY);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
	// GL_LUMINANCE 按照亮度值存储纹理单元
	glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, g_nWidth, g_nHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, g_YuvBuff.pszY);
	glUniform1i(g_dwSamplerY, 0);

	int uvw = g_nWidth >> 1;
	int uvh = g_nHeight >> 1;
	glActiveTexture(GL_TEXTURE1);
	glBindTexture(GL_TEXTURE_2D, g_nTextureU);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, uvw, uvh, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, g_YuvBuff.pszU);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
	glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, uvw, uvh, GL_LUMINANCE, GL_UNSIGNED_BYTE, g_YuvBuff.pszU);
	glUniform1i(g_dwSamplerU, 1);

	glActiveTexture(GL_TEXTURE2);
	glBindTexture(GL_TEXTURE_2D, g_nTextureV);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, uvw, uvh, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, g_YuvBuff.pszV);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
	glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, uvw, uvh, GL_LUMINANCE, GL_UNSIGNED_BYTE, g_YuvBuff.pszV);
	glUniform1i(g_dwSamplerV, 2);

       // 将顶点坐标从gpu内存考呗到gpu渲染顶点处
	glBindBuffer(GL_ARRAY_BUFFER, g_nV);
	glVertexAttribPointer(g_dwPosition, 2, GL_FLOAT, GL_FALSE, 0, 0);
	glEnableVertexAttribArray(g_dwPosition);

        // 纹理和顶点一样的原理
	glBindBuffer(GL_ARRAY_BUFFER, g_nC);
	glVertexAttribPointer(g_dwTexCoordIn, 2, GL_FLOAT, GL_FALSE, 0, 0);
	glEnableVertexAttribArray(g_dwTexCoordIn);
       // 启动渲染
	glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
	glBindBuffer(GL_ARRAY_BUFFER, 0);
}






这里没有什么好说的,看代码应该也明白了。我碰到的问题是
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, g_nTextureV);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, uvw, uvh, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, g_YuvBuff.pszV);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

这一部分代码放到窗口变化时只想做一次操作,但在绘制时glTexSubImage2D函数出错说没有在之前调用过glTexImage2D函数对g_nTextureV做处理。


如上面有说不对的请大神指点。


回复

使用道具 举报

您需要登录后才可以回帖 登录 | 立即注册

本版积分规则

手机版|Archiver|ChinaFFmpeg

GMT+8, 2024-12-27 06:37 , Processed in 0.052061 second(s), 15 queries .

Powered by Discuz! X3.4

Copyright © 2001-2021, Tencent Cloud.

快速回复 返回顶部 返回列表