在Android上的OpenGL Sphere上绑定video纹理

我正试图在Android上创建一个360video领域 (就像纸板一样)。 我通过在OpenGL ES1.0中渲染一个球体而不是将纹理附加到它来拍摄照片。 之后我可以使用传感器值来旋转球体。

但是,我无法弄清楚如何将图片更改为video 。 我已经尝试使用texSubImage2D()逐帧渲染,但它是超级慢。 我的video可能大约是4k密度,因为我需要一个好的质量,即使它只显示了一小部分。

我已经阅读了一些关于应该如何完成的理论知识(即帧缓冲区,外部纹理,同步等),但我找不到这些东西的任何示例 ,因此一些代码将非常受欢迎

这是我如何渲染球体,绘制它并附加纹理(即我的球体类)…

import rapid.decoder.BitmapDecoder; public class Sphere { /** Buffer holding the vertices. */ private final List mVertexBuffer = new ArrayList(); /** The vertices for the sphere. */ private final List mVertices = new ArrayList(); /** Buffer holding the texture coordinates. */ private final List mTextureBuffer = new ArrayList(); /** Mapping texture coordinates for the vertices. */ private final List mTexture = new ArrayList(); /** The texture pointer. */ private final int[] mTextures = new int[1]; /** Total number of strips for the given depth. */ private final int mTotalNumStrips; public Sphere(final int depth, final float radius) { // Calculate basic values for the sphere. this.mTotalNumStrips = Maths.power(2, depth - 1) * 5; //last 5 is related to properties of a icosahedron final int numVerticesPerStrip = Maths.power(2, depth) * 3; final double altitudeStepAngle = Maths.rad120 / Maths.power(2, depth); final double azimuthStepAngle = Maths.rad360 / this.mTotalNumStrips; double x, y, z, h, altitude, azimuth; Log.e("mTotalNumStrips", ""+mTotalNumStrips); Log.e("numVerticesPerStrip", ""+numVerticesPerStrip); for (int stripNum = 0; stripNum < this.mTotalNumStrips; stripNum++) { // Setup arrays to hold the points for this strip. final float[] vertices = new float[numVerticesPerStrip * 3]; // x,y,z final float[] texturePoints = new float[numVerticesPerStrip * 2]; // 2d texture int vertexPos = 0; int texturePos = 0; // Calculate position of the first vertex in this strip. altitude = Maths.rad90; azimuth = stripNum * azimuthStepAngle; // Draw the rest of this strip. for (int vertexNum = 0; vertexNum < numVerticesPerStrip; vertexNum += 2) { // First point - Vertex. y = radius * Math.sin(altitude); h = radius * Math.cos(altitude); z = h * Math.sin(azimuth); x = h * Math.cos(azimuth); vertices[vertexPos++] = (float) x; vertices[vertexPos++] = (float) y; vertices[vertexPos++] = (float) z; // First point - Texture. texturePoints[texturePos++] = (float) (1 + azimuth / Maths.rad360); texturePoints[texturePos++] = (float) (1 - (altitude + Maths.rad90) / Maths.rad180); // Second point - Vertex. altitude -= altitudeStepAngle; azimuth -= azimuthStepAngle / 2.0; y = radius * Math.sin(altitude); h = radius * Math.cos(altitude); z = h * Math.sin(azimuth); x = h * Math.cos(azimuth); vertices[vertexPos++] = (float) x; vertices[vertexPos++] = (float) y; vertices[vertexPos++] = (float) z; // Second point - Texture. texturePoints[texturePos++] = (float) (1 + azimuth / Maths.rad360); texturePoints[texturePos++] = (float) (1 - (altitude + Maths.rad90) / Maths.rad180); azimuth += azimuthStepAngle; } this.mVertices.add(vertices); this.mTexture.add(texturePoints); ByteBuffer byteBuffer = ByteBuffer.allocateDirect(numVerticesPerStrip * 3 * Float.SIZE); byteBuffer.order(ByteOrder.nativeOrder()); FloatBuffer fb = byteBuffer.asFloatBuffer(); fb.put(this.mVertices.get(stripNum)); fb.position(0); this.mVertexBuffer.add(fb); // Setup texture. byteBuffer = ByteBuffer.allocateDirect(numVerticesPerStrip * 2 * Float.SIZE); byteBuffer.order(ByteOrder.nativeOrder()); fb = byteBuffer.asFloatBuffer(); fb.put(this.mTexture.get(stripNum)); fb.position(0); this.mTextureBuffer.add(fb); } } public void loadGLTexture(final GL10 gl, final Context context, final int texture) { Bitmap bitmap = BitmapDecoder.from(context.getResources(), texture) .scale(4048, 2024) .decode(); // Generate one texture pointer, and bind it to the texture array. gl.glGenTextures(1, this.mTextures, 0); gl.glBindTexture(GL10.GL_TEXTURE_2D, this.mTextures[0]); // Create nearest filtered texture. gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST); gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR); // Use Android GLUtils to specify a two-dimensional texture image from our bitmap. GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0); // Tide up. bitmap.recycle(); } /** * The draw method for the square with the GL context. * * @param gl Graphics handle. */ public void draw(final GL10 gl) { // bind the previously generated texture. gl.glBindTexture(GL10.GL_TEXTURE_2D, this.mTextures[0]); // Point to our buffers. gl.glEnableClientState(GL10.GL_VERTEX_ARRAY); gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY); // Set the face rotation, clockwise in this case. gl.glFrontFace(GL10.GL_CW); // Point to our vertex buffer. for (int i = 0; i < this.mTotalNumStrips; i++) { gl.glVertexPointer(3, GL10.GL_FLOAT, 0, this.mVertexBuffer.get(i)); gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, this.mTextureBuffer.get(i)); // Draw the vertices as triangle strip. gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, this.mVertices.get(i).length / 3); } // Disable the client state before leaving. gl.glDisableClientState(GL10.GL_VERTEX_ARRAY); gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY); } 

}

这是我的渲染器……

 @Override public void onDrawFrame(final GL10 gl) { zvector = new float[] {0,0,1,0}; resultvector = new float[] {0,0,1,0}; gl.glMatrixMode(GL10.GL_MODELVIEW); gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT); gl.glLoadIdentity(); float radiansX = (float) Math.toRadians(gyro_angle[1]); float radiansY = (float) Math.toRadians(-gyro_angle[0]); float radiansZ = (float) Math.toRadians(-gyro_angle[2]); // Finds the Sin and Cosin for the half angle. float sinX =(float) Math.sin(radiansX * 0.5); float cosX =(float) Math.cos(radiansX * 0.5); float sinY =(float) Math.sin(radiansY * 0.5); float cosY =(float) Math.cos(radiansY * 0.5); float sinZ =(float) Math.sin(radiansZ * 0.5); float cosZ =(float) Math.cos(radiansZ * 0.5); // Formula to construct a new Quaternion based on direction and angle. quatX[0] = cosX; quatX[1] = 1 * sinX; quatX[2] = 0 * sinX; quatX[3] = 0 * sinX; quatY[0] = cosY; quatY[1] = 0 * sinY; quatY[2] = 1 * sinY; quatY[3] = 0 * sinY; quatZ[0] = cosZ; quatZ[1] = 0 * sinZ; quatZ[2] = 0 * sinZ; quatZ[3] = 1 * sinZ; quat1 = multiplyQuat(quatX, quatY); quat2 = multiplyQuat(quat1, quatZ); mMatrix = getMatrixfromQuat(quat1); gl.glLoadMatrixf(mMatrix, 0); this.mSphere.draw(gl); } @Override public void onSurfaceChanged(final GL10 gl, final int width, final int height) { final float aspectRatio = (float) width / (float) (height == 0 ? 1 : height); gl.glViewport(0, 0, width, height); gl.glMatrixMode(GL10.GL_PROJECTION); gl.glLoadIdentity(); GLU.gluPerspective(gl, 45.0f, aspectRatio, 0.1f, 100.0f); gl.glMatrixMode(GL10.GL_MODELVIEW); gl.glLoadIdentity(); } @Override public void onSurfaceCreated(final GL10 gl, final EGLConfig config) { this.mSphere.loadGLTexture(gl, this.mContext, R.drawable.pic360); gl.glEnable(GL10.GL_TEXTURE_2D); gl.glShadeModel(GL10.GL_SMOOTH); gl.glClearColor(0.0f, 0.0f, 0.0f, 0.5f); gl.glClearDepthf(1.0f); gl.glEnable(GL10.GL_DEPTH_TEST); gl.glDepthFunc(GL10.GL_LEQUAL); gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST); } //CONSTRUCTER public GlRenderer(final Context context) { this.mContext = context; this.mSphere = new Sphere(5, 2); sensorManager = (SensorManager) this.mContext.getSystemService(this.mContext.SENSOR_SERVICE); sensorGyroscope = sensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE); sensorAccelerometer = sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); sensorMagneticField = sensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD); valuesAccelerometer = new float[3]; valuesMagneticField = new float[3]; matrixR = new float[9]; matrixI = new float[9]; matrixValues = new float[3]; sensorManager.registerListener(this, sensorGyroscope, SensorManager.SENSOR_DELAY_FASTEST); sensorManager.registerListener(this, sensorAccelerometer, SensorManager.SENSOR_DELAY_FASTEST); sensorManager.registerListener(this, sensorMagneticField, SensorManager.SENSOR_DELAY_FASTEST); } //HERE GOES SOME CURRENTLY IRRELEVANT STUFF ABOUT THE SENSORS AND QUATERNIONS 

我有一些这种类型的video纹理问题。 我第一次使用ffmpeg进行video解码但性能却很差(就像你一样 – 逐帧提取)。 为了提高性能,我使用了android默认的mediaplayer。 您可以使用表面纹理来创建opengl表面(球体,圆柱体,立方体等…),然后在媒体播放器中设置表面

 Surface surface = new Surface(mSurface);//mSurface is your surface texture mMediaPlayer.setSurface(surface); mMediaPlayer.setScreenOnWhilePlaying(true); 

这只是一种技术。 我为一些商业封闭项目做了这个,所以我不能分享代码。 我希望我很快就会在github上发布免费代码。