Skip to content

Commit

Permalink
fix(*): improve orientation management for the various sources
Browse files Browse the repository at this point in the history
  • Loading branch information
ThibaultBee committed Nov 10, 2023
1 parent 64f2bb8 commit 5ae4592
Show file tree
Hide file tree
Showing 45 changed files with 496 additions and 439 deletions.
14 changes: 7 additions & 7 deletions DEVELOPER_README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
### Definitions

`Source`:
A class that represents an audio or video source. For example, a camera (`CameraCapture`), or a
microphone (`AudioCapture`).
A class that represents an audio or video source. For example, a camera (`CameraSource`), or a
microphone (`AudioSource`).

`Encoder`:
A class that represents an audio or video encoders. Only Android MediaCodec API is used (
Expand Down Expand Up @@ -76,15 +76,15 @@ Then these base streamers are specialized for a File or for a Live:
There are 2 types of sources:

- frames are captured in a `ByteBuffer`: such as a microphone. `ByteBuffer` sources
implement `IFrameCapture`.
implement `IFrameSource`.
- frames are passed to the encoder surface (video only): when the video source can write to
a `Surface`. Its purpose is to improve encoder performance. For example, it suits camera and
screen recorder. `Surface` sources implement `ISurfaceCapture`.
screen recorder. `Surface` sources implement `ISurfaceSource`.

To create a new audio source, implements a `IAudioCapture`. It inherits from `IFrameCapture`.
To create a new audio source, implements a `IAudioSource`. It inherits from `IFrameSource`.

To create a new video source, implements a `IVideoCapture`. It inherits from both `IFrameCapture`
and `ISurfaceCapture`. Always prefer to use a video source as a `Surface` source if it is possible.
To create a new video source, implements a `IVideSource`. It inherits from both `IFrameCapture`
and `ISurfaceSource`. Always prefer to use a video source as a `Surface` source if it is possible.

If your video source is a `Surface` source, set:

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class TsAudioOnlyStreamerTest : AudioOnlyStreamerTestCase() {
class FlvAudioOnlyStreamerTest : AudioOnlyStreamerTestCase() {
override val streamer = BaseAudioOnlyStreamer(
context,
FlvMuxer(context, writeToFile = false),
FlvMuxer(writeToFile = false),
FakeEndpoint(),
)
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,11 @@ import android.media.MediaFormat
import android.os.Build
import android.util.Size
import io.github.thibaultbee.streampack.internal.encoders.MediaCodecHelper
import io.github.thibaultbee.streampack.internal.utils.extensions.deviceOrientation
import io.github.thibaultbee.streampack.internal.utils.extensions.isDevicePortrait
import io.github.thibaultbee.streampack.internal.utils.extensions.isVideo
import io.github.thibaultbee.streampack.internal.utils.extensions.landscapize
import io.github.thibaultbee.streampack.internal.utils.extensions.portraitize
import io.github.thibaultbee.streampack.streamers.bases.BaseStreamer
import io.github.thibaultbee.streampack.utils.OrientationUtils
import java.security.InvalidParameterException
import kotlin.math.roundToInt

Expand Down Expand Up @@ -139,17 +138,8 @@ class VideoConfig(
* @param context activity context
* @return oriented resolution
*/
fun getDeviceOrientedResolution(context: Context) =
getOrientedResolution(context.deviceOrientation)

/**
* Get resolution according to orientation provided
*
* @param orientation the orientation
* @return oriented resolution
*/
fun getOrientedResolution(orientation: Int): Size {
return if (OrientationUtils.isPortrait(orientation)) {
fun getDeviceOrientedResolution(context: Context): Size {
return if (context.isDevicePortrait) {
resolution.portraitize()
} else {
resolution.landscapize()
Expand Down

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -25,23 +25,24 @@ import android.util.Size
import android.view.Surface
import io.github.thibaultbee.streampack.data.Config
import io.github.thibaultbee.streampack.data.VideoConfig
import io.github.thibaultbee.streampack.internal.gl.EGlSurface
import io.github.thibaultbee.streampack.internal.gl.EglWindowSurface
import io.github.thibaultbee.streampack.internal.gl.FullFrameRect
import io.github.thibaultbee.streampack.internal.gl.Texture2DProgram
import io.github.thibaultbee.streampack.internal.interfaces.IOrientationProvider
import io.github.thibaultbee.streampack.internal.interfaces.ISourceOrientationProvider
import io.github.thibaultbee.streampack.listeners.OnErrorListener
import java.util.concurrent.Executors

/**
* Encoder for video using MediaCodec.
*
* @param useSurfaceMode to get video frames, if [Boolean.true],the encoder will use Surface mode, else Buffer mode with [IEncoderListener.onInputFrame].
* @param orientationProvider to get the orientation of the source. If null, the source will keep its original dimensions.
*/
class VideoMediaCodecEncoder(
encoderListener: IEncoderListener,
override val onInternalErrorListener: OnErrorListener,
private val useSurfaceMode: Boolean,
private val orientationProvider: IOrientationProvider
private val orientationProvider: ISourceOrientationProvider?
) :
MediaCodecEncoder<VideoConfig>(encoderListener) {
var codecSurface = if (useSurfaceMode) {
Expand All @@ -63,7 +64,7 @@ class VideoMediaCodecEncoder(

override fun onNewMediaCodec() {
mediaCodec?.let {
codecSurface?.surface = it.createInputSurface()
codecSurface?.outputSurface = it.createInputSurface()
}
}

Expand All @@ -86,10 +87,12 @@ class VideoMediaCodecEncoder(

override fun extendMediaFormat(config: Config, format: MediaFormat) {
val videoConfig = config as VideoConfig
orientationProvider.orientedSize(videoConfig.resolution).apply {
// Override previous format
format.setInteger(MediaFormat.KEY_WIDTH, width)
format.setInteger(MediaFormat.KEY_HEIGHT, height)
orientationProvider?.let {
it.getOrientedSize(videoConfig.resolution).apply {
// Override previous format
format.setInteger(MediaFormat.KEY_WIDTH, width)
format.setInteger(MediaFormat.KEY_HEIGHT, height)
}
}
}

Expand All @@ -107,10 +110,10 @@ class VideoMediaCodecEncoder(
get() = codecSurface?.inputSurface

class CodecSurface(
private val orientationProvider: IOrientationProvider
private val orientationProvider: ISourceOrientationProvider?
) :
SurfaceTexture.OnFrameAvailableListener {
private var eglSurface: EGlSurface? = null
private var eglSurface: EglWindowSurface? = null
private var fullFrameRect: FullFrameRect? = null
private var textureId = -1
private val executor = Executors.newSingleThreadExecutor()
Expand All @@ -119,9 +122,8 @@ class VideoMediaCodecEncoder(
val inputSurface: Surface?
get() = surfaceTexture?.let { Surface(surfaceTexture) }

var surface: Surface? = null
var outputSurface: Surface? = null
set(value) {

/**
* When surface is called twice without the stopStream(). When configure() is
* called twice for example,
Expand All @@ -141,19 +143,24 @@ class VideoMediaCodecEncoder(
}

private fun initOrUpdateSurfaceTexture(surface: Surface) {
eglSurface = ensureGlContext(EGlSurface(surface)) {
eglSurface = ensureGlContext(EglWindowSurface(surface)) {
val width = it.getWidth()
val height = it.getHeight()
val size =
orientationProvider?.getOrientedSize(Size(width, height)) ?: Size(width, height)
val orientation = orientationProvider?.orientation ?: 0
fullFrameRect = FullFrameRect(Texture2DProgram()).apply {
textureId = createTextureObject()
setMVPMatrixAndViewPort(
orientationProvider.orientation.toFloat(),
Size(width, height)
orientation.toFloat(),
size
)
}

val defaultBufferSize =
orientationProvider?.getDefaultBufferSize(size) ?: Size(width, height)
surfaceTexture = attachOrBuildSurfaceTexture(surfaceTexture).apply {
setDefaultBufferSize(maxOf(height, width), minOf(height, width))
setDefaultBufferSize(defaultBufferSize.width, defaultBufferSize.height)
setOnFrameAvailableListener(this@CodecSurface)
}
}
Expand All @@ -170,9 +177,9 @@ class VideoMediaCodecEncoder(
}

private fun ensureGlContext(
surface: EGlSurface?,
action: (EGlSurface) -> Unit
): EGlSurface? {
surface: EglWindowSurface?,
action: (EglWindowSurface) -> Unit
): EglWindowSurface? {
surface?.let {
it.makeCurrent()
action(it)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,12 @@ import java.util.*
* (Contains mostly code borrowed from CameraX)
*/

class EGlSurface(private val surface: Surface) {
class EglWindowSurface(private val surface: Surface) {
private var eglDisplay: EGLDisplay = EGL14.EGL_NO_DISPLAY
private var eglContext: EGLContext = EGL14.EGL_NO_CONTEXT
private var eglSurface: EGLSurface = EGL14.EGL_NO_SURFACE
private val configs = arrayOfNulls<EGLConfig>(1)

private var width = 0
private var height = 0

companion object {
private const val EGL_RECORDABLE_ANDROID = 0x3142
}
Expand Down Expand Up @@ -92,8 +89,6 @@ class EGlSurface(private val surface: Surface) {

// Create a window surface, and attach it to the Surface we received.
createEGLSurface()
width = getWidth()
height = getHeight()
}

private fun createEGLSurface() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,15 @@
*/
package io.github.thibaultbee.streampack.internal.interfaces

import android.graphics.SurfaceTexture
import android.util.Size

/**
* Interface to get the orientation of the capture surface.
* These information are used to rotate the frames in the codec surface if the source needs to be rotated.
* It might not be the case for certain sources.
*/
interface IOrientationProvider {
interface ISourceOrientationProvider {
/**
* Orientation in degrees of the surface.
* Expected values: 0, 90, 180, 270.
Expand All @@ -29,6 +32,15 @@ interface IOrientationProvider {

/**
* Return the size with the correct orientation.
* If orientation is portrait, it returns a portrait size.
* Example:
* - Size = 1920x1080, if orientation is portrait, it returns 1080x1920.
*/
fun orientedSize(size: Size): Size
fun getOrientedSize(size: Size): Size

/**
* Return the size for [SurfaceTexture.setDefaultBufferSize].
* Override this method if the image is stretched.
*/
fun getDefaultBufferSize(size: Size) = size
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,13 @@ package io.github.thibaultbee.streampack.internal.muxers

import io.github.thibaultbee.streampack.data.Config
import io.github.thibaultbee.streampack.internal.data.Frame
import io.github.thibaultbee.streampack.internal.interfaces.IOrientationProvider
import io.github.thibaultbee.streampack.internal.interfaces.ISourceOrientationProvider
import io.github.thibaultbee.streampack.internal.interfaces.Streamable

interface IMuxer: Streamable<Unit> {
interface IMuxer : Streamable<Unit> {
val helper: IMuxerHelper

var orientationProvider: IOrientationProvider
var sourceOrientationProvider: ISourceOrientationProvider?
var listener: IMuxerListener?

fun encode(frame: Frame, streamPid: Int)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,11 @@
*/
package io.github.thibaultbee.streampack.internal.muxers.flv

import android.content.Context
import io.github.thibaultbee.streampack.data.Config
import io.github.thibaultbee.streampack.internal.data.Frame
import io.github.thibaultbee.streampack.internal.data.Packet
import io.github.thibaultbee.streampack.internal.data.PacketType
import io.github.thibaultbee.streampack.internal.interfaces.IOrientationProvider
import io.github.thibaultbee.streampack.internal.interfaces.ISourceOrientationProvider
import io.github.thibaultbee.streampack.internal.muxers.IMuxer
import io.github.thibaultbee.streampack.internal.muxers.IMuxerListener
import io.github.thibaultbee.streampack.internal.muxers.flv.tags.AVTagsFactory
Expand All @@ -31,7 +30,6 @@ import io.github.thibaultbee.streampack.internal.utils.extensions.isAudio
import io.github.thibaultbee.streampack.internal.utils.extensions.isVideo

class FlvMuxer(
private val context: Context,
override var listener: IMuxerListener? = null,
initialStreams: List<Config>? = null,
private val writeToFile: Boolean,
Expand All @@ -49,7 +47,7 @@ class FlvMuxer(
initialStreams?.let { streams.addAll(it) }
}

override lateinit var orientationProvider: IOrientationProvider
override var sourceOrientationProvider: ISourceOrientationProvider? = null

override fun encode(frame: Frame, streamPid: Int) {
if (!hasFirstFrame) {
Expand All @@ -60,6 +58,7 @@ class FlvMuxer(
startUpTime = frame.pts
hasFirstFrame = true
} else {
// Drop
return
}
} else {
Expand Down Expand Up @@ -113,7 +112,7 @@ class FlvMuxer(
// Metadata
listener?.onOutputFrame(
Packet(
OnMetadata(orientationProvider, streams).write(),
OnMetadata.fromConfigs(streams, sourceOrientationProvider).write(),
TimeUtils.currentTime()
)
)
Expand Down
Loading

0 comments on commit 5ae4592

Please sign in to comment.