Skip to content

feat: AndroidUrlMediaSource, Tests, Comments and Refactoring for MediaSourceConfiguration classes. #3642

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -28,51 +28,61 @@
import android.util.Log;
import android.view.Surface;

import com.amazonaws.kinesisvideo.client.mediasource.CameraMediaSourceConfiguration;
import com.amazonaws.kinesisvideo.client.mediasource.AbstractMediaSourceConfiguration;

/**
* Factory class to create and configure emcoders based on a given media configuration.
*/
public class EncoderFactory {
private static final String TAG = EncoderFactory.class.getSimpleName();
private static final Surface NULL_SURFACE = null;
private static final MediaCrypto NULL_CRYPTO = null;
private static final int IFRAME_EVERY_2_SEC = 2;

/**
* Creates and configure emcoders based on a given media configuration.
* @param mediaSourceConfiguration The MediaSourceConfiguration to be used to configure the encoder.
* @return The encoder MediaCodec object.
*/
public static MediaCodec createConfiguredEncoder(
final CameraMediaSourceConfiguration mediaSourceConfiguration) {
final AbstractMediaSourceConfiguration mediaSourceConfiguration) {

return createMediaCodec(mediaSourceConfiguration);
}

private static MediaCodec createMediaCodec(final CameraMediaSourceConfiguration mediaSourceConfiguration) {
/**
* Helper fucntion to create a MediaCodec and configure it based on the given MediaSourceConfiguration.
* @param mediaSourceConfiguration The MediaSourceConfiguration to be used to configure the encoder.
* @return The encoder MediaCodec object.
*/
private static MediaCodec createMediaCodec(final AbstractMediaSourceConfiguration mediaSourceConfiguration) {
try {
final MediaCodec encoder = MediaCodec.createEncoderByType(mediaSourceConfiguration.getEncoderMimeType());
try {
encoder.configure(
configureMediaFormat(mediaSourceConfiguration,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar),
NULL_SURFACE,
NULL_CRYPTO,
MediaCodec.CONFIGURE_FLAG_ENCODE);
logSupportedColorFormats(encoder, mediaSourceConfiguration);
} catch (MediaCodec.CodecException e) {
Log.d(TAG, "Failed configuring MediaCodec with Semi-planar pixel format, falling back to planar");

encoder.configure(
configureMediaFormat(mediaSourceConfiguration,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar),
NULL_SURFACE,
NULL_CRYPTO,
MediaCodec.CONFIGURE_FLAG_ENCODE);
logSupportedColorFormats(encoder, mediaSourceConfiguration);
}

// Use YUV420Flexible to be able to support a wide range of devices and scenarios.
encoder.configure(
configureMediaFormat(mediaSourceConfiguration,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible),
NULL_SURFACE,
NULL_CRYPTO,
MediaCodec.CONFIGURE_FLAG_ENCODE);

logSupportedColorFormats(encoder, mediaSourceConfiguration);
return encoder;

} catch (final IOException e) {
throw new RuntimeException("unable to create encoder", e);
}
}

/**
* Helper function to create and prepare a MediaFormat matching the provided MediaSourceConfiguration.
* @param mediaSourceConfiguration The MediaSourceConfiguration to be used to configure the encoder.
* @param colorFormat The MediaFormat object based on the provided configuration.
* @return
*/
private static MediaFormat configureMediaFormat(
final CameraMediaSourceConfiguration mediaSourceConfiguration,
final AbstractMediaSourceConfiguration mediaSourceConfiguration,
final int colorFormat) {

Log.d(TAG, mediaSourceConfiguration.getEncoderMimeType() + " output "
Expand All @@ -98,9 +108,14 @@ private static MediaFormat configureMediaFormat(
return format;
}

/**
* Debugging helper function to log all supported color formats of a given encoder.
* @param encoder The MediaCodec encoder to be inspected.
* @param mediaSourceConfiguration The MediaSourceConfiguration used to configure the encoder.
*/
private static void logSupportedColorFormats(
final MediaCodec encoder,
final CameraMediaSourceConfiguration mediaSourceConfiguration) {
final AbstractMediaSourceConfiguration mediaSourceConfiguration) {

final MediaCodecInfo.CodecCapabilities capabilities =
encoder.getCodecInfo().getCapabilitiesForType(mediaSourceConfiguration.getEncoderMimeType());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,22 +21,20 @@
import android.media.MediaCodec;
import android.util.Log;

import com.amazonaws.kinesisvideo.client.mediasource.CameraMediaSourceConfiguration;
import com.amazonaws.kinesisvideo.client.mediasource.AbstractMediaSourceConfiguration;
import com.amazonaws.kinesisvideo.producer.KinesisVideoFrame;
import com.amazonaws.mobileconnectors.kinesisvideo.util.FrameUtility;

import java.nio.ByteBuffer;

/**
* Wrapper class around MediaCodec.
* Accepts raw frame data in YUV420 format as an input, sends it to the encoder,
* notifies the listeners when encoding is complete.
* All happens on the same thread
* Wrapper class around MediaCodec. It accepts raw frame data in YUV420 format as an input, sends it
* to the encoder, and notifies the listeners when encoding is complete. All happens on the same thread.
*/
public class EncoderWrapper {
private static final String TAG = EncoderWrapper.class.getSimpleName();
private static final int TIMEOUT_USEC = 10000;
private final CameraMediaSourceConfiguration mMediaSourceConfiguration;
private final AbstractMediaSourceConfiguration mMediaSourceConfiguration;
private MediaCodec mEncoder;
private EncoderFrameSubmitter mEncoderFrameSubmitter;
private long mLastRecordedFrameTimestamp = 0;
Expand All @@ -47,56 +45,90 @@ public class EncoderWrapper {
private int mFrameIndex;
private long mFragmentStart = 0;

/**
* Interface for frame listeners.
*/
public interface FrameAvailableListener {

/**
* Called when a new frame is available.
* @param frame The new frame.
*/
void onFrameAvailable(final KinesisVideoFrame frame);
}
public interface CodecPrivateDataAvailableListener {

/**
* Interface for codec private data listeners.
*/
public interface CodecPrivateDataAvailableListener {
/**
* Called when new codec private data is available.
* @param privateData
*/
void onCodecPrivateDataAvailable(final byte[] privateData);
}
public EncoderWrapper(final CameraMediaSourceConfiguration mediaSourceConfiguration) {

/**
* This constructor creates a new EncoderWrapper using the given MediaSourceConfiguration.
* @param mediaSourceConfiguration MediaSourceConfiguration to use.
*/
public EncoderWrapper(final AbstractMediaSourceConfiguration mediaSourceConfiguration) {
mMediaSourceConfiguration = mediaSourceConfiguration;
initEncoder();
}

/**
* Helper fucntion to initialize the EncoderWrapper using the MediaSourceConfiguration.
* It creates, configures and starts an encoder.
*/
private void initEncoder() {
mBufferInfo = new MediaCodec.BufferInfo();
mEncoder = EncoderFactory.createConfiguredEncoder(mMediaSourceConfiguration);
mEncoderFrameSubmitter = new EncoderFrameSubmitter(mEncoder);
mEncoder.start();
}

public void setCodecPrivateDataAvailableListener(
final CodecPrivateDataAvailableListener listener) {

/**
* Sets a listener for codec private data.
* @param listener The listener object.
*/
public void setCodecPrivateDataAvailableListener(final CodecPrivateDataAvailableListener listener) {
mCodecPrivateDataListener = listener;
}

/**
* Sets a listener for new frames.
* @param listener The listener object.
*/
public void setEncodedFrameAvailableListener(final FrameAvailableListener listener) {
mFrameAvailableListener = listener;
}

public void encodeFrame(final Image frameImageYUV420,
final boolean endOfStream) {

/**
* Encodes a frame using the available encoder, and processes the output of the encoder as well.
* @param frameImageYUV420 The frame to be encoded.
* @param endOfStream True if this is the last frame.
*/
public void encodeFrame(final Image frameImageYUV420, final boolean endOfStream) {
// Edge case.
if (mIsStopped) {
Log.w(TAG, "received a frame to encode after already stopped. returning");
return;
}

// Submit the frame to encoder using EncoderFrameSubmitter.
Log.d(TAG, "encoding frame" + threadId());

mEncoderFrameSubmitter.submitFrameToEncoder(frameImageYUV420, endOfStream);

Log.d(TAG, "frame sent to encoder" + threadId());

// Process output from encoder.
getDataFromEncoder(endOfStream);

Log.d(TAG, "frame encoded" + threadId());
}


/**
* Deques an output buffer from the encoder and processes it based on the return value from dequeueOutputBuffer.
* @param endOfStream True if the stream has ended.
*/
private void getDataFromEncoder(final boolean endOfStream) {
boolean stopReadingFromEncoder = false;
while(!stopReadingFromEncoder) {
Expand All @@ -110,80 +142,121 @@ private void getDataFromEncoder(final boolean endOfStream) {
}
stopReadingFromEncoder = true;
break;

case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
break;

default:
if (outputBufferId < 0) {
Log.w(TAG, "unexpected encoder output buffer id: " + outputBufferId);
break;
}

// Process output buffer.
processEncoderOutputBuffer(outputBufferId);

// Handle end of stream.
if (isEndOfStream()) {
stopReadingFromEncoder = true;
}

// Break after processing one buffer.
break;
}
}
}

/**
* Process the given output buffer from the encoder.
* @param outputBufferId Output buffer ID.
*/
private void processEncoderOutputBuffer(final int outputBufferId) {
// Edge case.
if (mBufferInfo.size == 0) {
Log.w(TAG, "empty buffer " + outputBufferId);
mEncoder.releaseOutputBuffer(outputBufferId, false);
return;
}

// Read output buffer contents.
final ByteBuffer encodedData = mEncoder.getOutputBuffer(outputBufferId);

if (encodedData == null) {
throw new RuntimeException("encoder output buffer " + outputBufferId + " is null");
}

// Process the data.
processEncodedData(encodedData);

// Release the buffer.
mEncoder.releaseOutputBuffer(outputBufferId, false);
}

/**
* Process the data from the output buffer.
* @param encodedData Data from the output buffer.
*/
private void processEncodedData(final ByteBuffer encodedData) {
// Position the buffers.
adjustEncodedDataPosition(encodedData);
adjustEncodedDataPosition(encodedData);

// Handle coded private data.
if (isCodecPrivateData()) {
notifyCodecPrivateDataAvailable(encodedData);
return;
}

// Handle end of stream.
if (isEndOfStream()) {
Log.d(TAG, "end of stream reached");
return;
}

// Send the data to KVS producer SDK.
sendEncodedFrameToProducerSDK(encodedData);
}

/**
* Helper fucntion to position a given buffer correctly.
* @param encodedData The buffer to be used.
*/
private void adjustEncodedDataPosition(final ByteBuffer encodedData) {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
}

/**
* Helper function to check for end of stream flag.
* @return True if the flag is present.
*/
private boolean isEndOfStream() {
return (mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
}

/**
* Helper function to check for codec private data flag.
* @return True if the flag is present.
*/
private boolean isCodecPrivateData() {
return (mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
}

/**
* Helper fucntion to notify codec private data listener.
* @param codecPrivateDataBuffer The data to be passed to the listener.
*/
private void notifyCodecPrivateDataAvailable(final ByteBuffer codecPrivateDataBuffer) {
Log.d(TAG, "got codec private data");
final ByteBuffer privateData = codecPrivateDataBuffer;
final byte[] codecPrivateDataArray = convertToArray(privateData);
mCodecPrivateDataListener.onCodecPrivateDataAvailable(codecPrivateDataArray);
}

/**
* Helper function to notify frame listener. It uses the data and system time to create a Frame
* to be sent to the listener.
* @param encodedData The frame data to be passed to the listener.
*/
private void sendEncodedFrameToProducerSDK(final ByteBuffer encodedData) {
final long currentTime = System.currentTimeMillis();
Log.d(TAG, "time between frames: " + (currentTime - mLastRecordedFrameTimestamp) + "ms");
Expand All @@ -203,23 +276,39 @@ private void sendEncodedFrameToProducerSDK(final ByteBuffer encodedData) {
frameData));
}

/**
* Stops the encoder and releases it. It also uodates the status.
*/
public void stop() {
Log.d(TAG, "stopping encoder");
mIsStopped = true;
mEncoder.stop();
mEncoder.release();
}

/**
* Helper fucntion to convert a ByteBuffer to byte array.
* @param byteBuffer The ByteBuffer to be converted.
* @return The byte[] after conversion.
*/
private byte[] convertToArray(final ByteBuffer byteBuffer) {
final byte[] array = new byte[byteBuffer.remaining()];
byteBuffer.get(array);
return array;
}

/**
* Helper fucntion to get a string with Thread ID of the calling thread.
* @return A string with the thread ID.
*/
private static String threadId() {
return " | threadId=" + Thread.currentThread().getId();
}

/**
* Helper function to sleep for the given millis.
* @param ms Time in millis to sleep.
*/
private static void sleep(final int ms) {
try {
Thread.sleep(ms);
Expand Down
Loading