I'm trying to implement seeking through video file with MediaCodec decoder which outputs to GLSurfaceView. The solution's based on Bigflake examples and fadden comments.
It works with SurfaceView
, but I'm facing some troubles using GLSurafaceView
:
rendered frame is always black
View implementation:
class GLVideoView @JvmOverloads constructor(
context: Context,
attrs: AttributeSet? = null
) : GLSurfaceView(context, attrs),
SurfaceTexture.OnFrameAvailableListener {
private var outputSurface: OutputSurface? = null
private var videoPlayer: VideoPlayer? = null
private var videoFilePath: String? = null
private var videoDuration: Int = 0
private var videoWidth = 0
private var videoHeight = 0
private val renderer: Renderer
init {
setEGLContextClientVersion(2)
renderer = VideoRender()
setRenderer(renderer)
renderMode = RENDERMODE_WHEN_DIRTY
}
// region Public API
fun setVideoSource(videoFilePath: String) {
this.videoFilePath = videoFilePath
val metadataRetriever = MediaMetadataRetriever().apply { setDataSource(videoFilePath) }
videoDuration = metadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION).toInt()
videoWidth = metadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH).toInt()
videoHeight = metadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT).toInt()
try {
val rotation = metadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION).toInt()
if (rotation == 90 || rotation == 270) {
val temp = videoWidth
videoWidth = videoHeight
videoHeight = temp
}
} catch (e: Exception) {
// ignore
}
}
override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) {
...
}
override fun onFrameAvailable(st: SurfaceTexture) {
L.debug { "onFrameAvailable()" }
outputSurface?.updateTextureImage()
requestRender()
}
// endregion
// region Private API
private fun initVideoPlayer() {
val filePath = videoFilePath ?: throw IllegalStateException("No video source!")
outputSurface = OutputSurface(this)
val surface = outputSurface?.surface ?: throw IllegalStateException("No surface created!")
videoPlayer = VideoPlayer(filePath, outputSurface!!).apply { initialize(surface) }
}
// endregion
companion object {
private val L = Logger()
}
private inner class VideoRender : Renderer {
override fun onDrawFrame(gl: GL10?) {
L.debug { "onDrawFrame()" }
outputSurface?.drawImage()
}
override fun onSurfaceChanged(gl: GL10?, width: Int, height: Int) {
GLES20.glViewport(0, 0, width, height)
}
override fun onSurfaceCreated(gl: GL10?, config: EGLConfig?) {
if (videoPlayer == null) {
initVideoPlayer()
}
}
}
}
OutputSurface
comes from Bigflake,
as well as TextureRenderer
link
Here is the basic decoder implementation:
internal class GLSyncVideoDecoder(
private val mediaExtractor: VideoExtractor,
private val outputSurface: OutputSurface
) : VideoFrameDecoder {
private lateinit var mediaCodec: MediaCodec
private lateinit var taskHandler: Handler
private val uiHandler: Handler = Handler(Looper.getMainLooper())
@Volatile
private var isRunning = false
@Throws(IOException::class)
override fun initCodec(
outSurface: Surface,
inputFormat: MediaFormat,
handlerThread: HandlerThread
): Boolean {
taskHandler = Handler(handlerThread.looper)
val mime = inputFormat.getString(MediaFormat.KEY_MIME) ?: return false
mediaCodec = MediaCodec.createDecoderByType(mime).apply {
configure(inputFormat, outSurface, null, 0)
start()
}
return true
}
override fun decodeFrameAt(timeUs: Long) {
if (isRunning) {
L.debug { "!@# Skip 'seekTo()' at time: $timeUs" }
return
}
isRunning = true
taskHandler.post {
mediaCodec.flush()
seekTo(timeUs, mediaCodec)
isRunning = false
}
}
private fun seekTo(timeUs: Long, decoder: MediaCodec) {
var outputDone = false
var inputDone = false
mediaExtractor.seekTo(timeUs, MediaExtractor.SEEK_TO_PREVIOUS_SYNC)
val bufferInfo = MediaCodec.BufferInfo()
outerloop@ while (true) {
var ptUs = 0L
// Feed more data to the decoder.
if (!inputDone) {
val inputBufIndex = decoder.dequeueInputBuffer(1000)
if (inputBufIndex >= 0) {
val inputBuf = decoder.getInputBuffer(inputBufIndex)
val chunkSize = mediaExtractor.readSampleData(inputBuf!!, 0)
if (chunkSize < 0) {
// End of stream -- send empty frame with EOS flag set.
decoder.queueInputBuffer(
inputBufIndex,
0,
0,
0L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM
)
inputDone = true
} else {
val presentationTimeUs = mediaExtractor.sampleTime
val flags = mediaExtractor.sampleFlags
ptUs = presentationTimeUs
decoder.queueInputBuffer(
inputBufIndex,
0,
chunkSize,
presentationTimeUs,
flags
)
mediaExtractor.advance()
}
}
}
if (!outputDone) {
val decoderStatus = decoder.dequeueOutputBuffer(bufferInfo, 1000)
when {
decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER -> { }
decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED -> { }
decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> { }
decoderStatus < 0 -> throw RuntimeException("unexpected result from decoder.dequeueOutputBuffer: $decoderStatus")
else -> { // decoderStatus >= 0
if ((bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
outputDone = true
break@outerloop
}
val presentationTimeUs = bufferInfo.presentationTimeUs
val validFrame = presentationTimeUs >= timeUs
val doRender = (bufferInfo.size != 0) && validFrame
decoder.releaseOutputBuffer(decoderStatus, doRender)
if (doRender) {
break@outerloop
}
}
}
}
}
}
...
}
How to make TextureRenderer
draw properly to GLSurfaceView
? What I'm doing wrong? Is it incorrect OpenGL drawing or GLSurfaceView
is not linked to MediaCodec
output surface?
Finally I've found an answer to the question.
I followed the code from VideoSurfaceView.java.
(Put OutputSurface
to Renderer
thread and update SurfaceTexture
texImage from Renderer's onDrawFrame()
)
Hope it might help someone in future. Thanks for your attention :)