I created an app which records video from camera without sound 10 seconds. Thats part of program code:
...
MediaCodec mMediaCodec = MediaCodec.createEncoderByType("video/avc");
mMediaCodec.configure(mMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
Surface mSurface = mMediaCodec.createInputSurface();
EGLDisplay mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
int[] e = new int[2];
EGL14.eglInitialize(mEGLDisplay, e, 0, e, 1);
EGLConfig[] mEGLConfig = new EGLConfig[1];
EGL14.eglChooseConfig(mEGLDisplay, new int[]{EGL14.EGL_RED_SIZE, 8, EGL14.EGL_GREEN_SIZE, 8, EGL14.EGL_BLUE_SIZE, 8, EGL14.EGL_ALPHA_SIZE, 8, EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, 12610, 1, EGL14.EGL_NONE}, 0, mEGLConfig, 0, 1, new int[1], 0);
EGLContext mEGLContext = EGL14.eglCreateContext(mEGLDisplay, mEGLConfig[0], EGL14.EGL_NO_CONTEXT, new int[]{EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE}, 0);
EGLSurface mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig[0], mSurface, new int[]{EGL14.EGL_NONE}, 0);
mMediaCodec.start();
MediaMuxer mMediaMuxer = new MediaMuxer(new File(Environment.getExternalStorageDirectory(), "ipcamera.mp4").getPath(), OutputFormat.MUXER_OUTPUT_MPEG_4);
EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);
FloatBuffer mFloatBuffer = ByteBuffer.allocateDirect(80).order(ByteOrder.nativeOrder()).asFloatBuffer();
mFloatBuffer.put(new float[]{-1, -1, 0, 0, 0, 1, -1, 0, 1, 0, -1, 1, 0, 0, 1, 1, 1, 0, 1, 1}).position(0);
float[] sm1 = new float[16], sm2 = new float[16];
Matrix.setIdentityM(sm1, 0); int program = GLES20.glCreateProgram(), f = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER), params[] = new int[1];
GLES20.glShaderSource(f, "uniform mat4 uMVPMatrix;n" + "uniform mat4 uSTMatrix;n" + "attribute vec4 aPosition;n" + "attribute vec4 aTextureCoord;n" + "varying vec2 vTextureCoord;n" + "void main() {n" + " gl_Position = uMVPMatrix * aPosition;n" + " vTextureCoord = (uSTMatrix * aTextureCoord).xy;n" + "}n"); GLES20.glCompileShader(f);
GLES20.glGetShaderiv(f, GLES20.GL_COMPILE_STATUS, params, 0); GLES20.glAttachShader(program, f); GLES20.glShaderSource(f = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER), "#extension GL_OES_EGL_image_external : requiren" + "precision mediump float;n" + "varying vec2 vTextureCoord;n" + "uniform samplerExternalOES sTexture;n" + "void main() {n" + " gl_FragColor = texture2D(sTexture, vTextureCoord);n" + "}n");
GLES20.glCompileShader(f);
GLES20.glGetShaderiv(f, GLES20.GL_COMPILE_STATUS, params, 0); GLES20.glAttachShader(program, f);
GLES20.glLinkProgram(program);
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, params, 0);
if (params[0] != GLES20.GL_TRUE) GLES20.glDeleteProgram(program);
int maPositionHandle = GLES20.glGetAttribLocation(program, "aPosition"), maTextureHandle = GLES20.glGetAttribLocation(program, "aTextureCoord"), muMVPMatrixHandle = GLES20.glGetUniformLocation(program, "uMVPMatrix"), muSTMatrixHandle = GLES20.glGetUniformLocation(program, "uSTMatrix"), texName[] = new int[1];
GLES20.glGenTextures(1, texName, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texName[0]);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
(mSurfaceTexture = new SurfaceTexture(texName[0])).setOnFrameAvailableListener(this);
mCamera.setPreviewTexture(mSurfaceTexture);
mCamera.startPreview();
long a = System.currentTimeMillis();
BufferInfo mBufferInfo = new BufferInfo();
boolean b = true;
int c, d = 0;
do {
synchronized (VideoRecording.this.b) {
if (!VideoRecording.this.b) continue; else VideoRecording.this.b = false;
}
mSurfaceTexture.updateTexImage();
mSurfaceTexture.getTransformMatrix(sm1);
GLES20.glClearColor(0, 0, 0, 1);
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(program);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texName[0]);
mFloatBuffer.position(0);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 20, mFloatBuffer);
GLES20.glEnableVertexAttribArray(maPositionHandle);
mFloatBuffer.position(3);
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, 20, mFloatBuffer);
GLES20.glEnableVertexAttribArray(maTextureHandle);
Matrix.setIdentityM(sm2, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, sm2, 0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, sm1, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
if (!(b = System.currentTimeMillis() - a < 10000)) mMediaCodec.signalEndOfInputStream();
while ((c = mMediaCodec.dequeueOutputBuffer(mBufferInfo, 10000)) != MediaCodec.INFO_TRY_AGAIN_LATER || !b) {
if (c == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
d = mMediaMuxer.addTrack(mMediaCodec.getOutputFormat());
mMediaMuxer.start();
} else if (c >= 0) {
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) mBufferInfo.size = 0; else mMediaMuxer.writeSampleData(d, (ByteBuffer) mMediaCodec.getOutputBuffers()[c].position(mBufferInfo.offset).limit(mBufferInfo.offset + mBufferInfo.size), mBufferInfo);
mMediaCodec.releaseOutputBuffer(c, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) break;
}
}
} while (b);
mMediaCodec.stop();
mMediaCodec.release();
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(mEGLDisplay);
mSurface.release();
...
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
synchronized (VideoRecording.this.b) {
VideoRecording.this.b = true;
}
}
Practically I imported all the code from popular CameraToMpegTest.java and tried to make it maximum simple by replacing a lot of code into one block (presented it upper). I am programming on Java language for 3 years but trying to use OpenGL Android libraries first. I had already read a lot of tutorials on this theme but I found very little information about recording video via MediaMuxer and built-in OpenGL libraries. Only the Grafika project contains something useful. How can I add watermark (for example R.mipmap.ic_launcher) on video with specified coordinates? In internet I found not so much information about it, I saw this code on some forum:
Bitmap bitmap = BitmapFactory.decodeResource(getResources(), R.mipmap.ic_launcher);
//Generate one texture pointer...
gl.glGenTextures(1, textures, 0);
//...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
//Create Nearest Filtered Texture gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
//Different possible texture parameters, e.g. GL10.GL_CLAMP_TO_EDGE
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_REPEAT);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_REPEAT);
//Use the Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
//Clean up
bitmap.recycle();
But really I don't have an idea where to put this code in my way... I tried to put it practically everywhere but nothing was happened or my video had been damaged. As well as one of questions in stackoverflow (I lost a link) programmers affirmed that this thing requires two GLES20 programs... Please tell me correct program code of adding watermark on video and where could I put it in my way. Maybe it's even possible to do that without using OpenGL but with using MediaMuxer and MediaCodec?
Don't offer to me different libraries, which are not built-in in, like FFMPEG. I must to do that via built-in Android libraries. Minimum required API level for my app must be 18 (Android 4.3.1).
@NizaSiwale, that's what I am having:
but I want this:

