Skip to content

Commit d0881a3

Browse files
committed
fix: conflict
2 parents b68a832 + 2e2d097 commit d0881a3

File tree

6 files changed

+250
-7
lines changed

6 files changed

+250
-7
lines changed

android/build.gradle

+6-3
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ android {
4242
}
4343

4444
defaultConfig {
45-
minSdkVersion 21
45+
minSdkVersion 23
4646
testInstrumentationRunner 'androidx.test.runner.AndroidJUnitRunner'
4747
consumerProguardFiles 'proguard-rules.pro'
4848

@@ -69,13 +69,16 @@ android {
6969
}
7070

7171
dependencies {
72-
implementation 'io.github.webrtc-sdk:android:114.5735.02'
72+
implementation 'io.github.webrtc-sdk:android:114.5735.02'
7373
implementation 'com.twilio:audioswitch:1.1.8'
7474
implementation 'androidx.annotation:annotation:1.1.0'
7575
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
7676

7777
// ML Kit
78-
implementation 'com.google.mlkit:segmentation-selfie:16.0.0-beta4'
78+
implementation 'com.google.mlkit:segmentation-selfie:16.0.0-beta3'
7979

80+
// Libyuv
81+
implementation "io.github.crow-misia.libyuv:libyuv-android:0.28.0"
82+
implementation 'androidx.camera:camera-core:1.0.2'
8083
// implementation files('libwebrtc.aar')
8184
}

android/local.properties

+2-2
Original file line numberDiff line numberDiff line change
@@ -4,5 +4,5 @@
44
# Location of the SDK. This is only used by Gradle.
55
# For customization when using a Version Control System, please read the
66
# header note.
7-
#Sat May 20 23:50:57 ICT 2023
8-
sdk.dir=/home/lambiengcode/Android/Sdk
7+
#Tue Jul 18 10:35:26 ICT 2023
8+
sdk.dir=/Users/lambiengcode/Library/Android/sdk

android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java

+239
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@
88
import android.content.Context;
99
import android.content.Intent;
1010
import android.content.pm.PackageManager;
11+
import android.graphics.Bitmap;
12+
import android.graphics.ImageFormat;
1113
import android.hardware.Camera;
1214
import android.hardware.Camera.Parameters;
1315
import android.hardware.camera2.CameraAccessException;
@@ -35,6 +37,7 @@
3537
import android.view.Surface;
3638
import android.view.WindowManager;
3739

40+
import androidx.annotation.NonNull;
3841
import androidx.annotation.Nullable;
3942
import androidx.annotation.RequiresApi;
4043

@@ -51,6 +54,14 @@
5154
import com.cloudwebrtc.webrtc.utils.MediaConstraintsUtils;
5255
import com.cloudwebrtc.webrtc.utils.ObjectType;
5356
import com.cloudwebrtc.webrtc.utils.PermissionUtils;
57+
import com.google.android.gms.tasks.OnFailureListener;
58+
import com.google.android.gms.tasks.OnSuccessListener;
59+
import com.google.mlkit.common.MlKitException;
60+
import com.google.mlkit.vision.common.InputImage;
61+
import com.google.mlkit.vision.segmentation.Segmentation;
62+
import com.google.mlkit.vision.segmentation.SegmentationMask;
63+
import com.google.mlkit.vision.segmentation.Segmenter;
64+
import com.google.mlkit.vision.segmentation.selfie.SelfieSegmenterOptions;
5465

5566
import org.webrtc.AudioSource;
5667
import org.webrtc.AudioTrack;
@@ -61,14 +72,19 @@
6172
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
6273
import org.webrtc.CameraEnumerator;
6374
import org.webrtc.CameraVideoCapturer;
75+
import org.webrtc.JavaI420Buffer;
6476
import org.webrtc.MediaConstraints;
6577
import org.webrtc.MediaStream;
6678
import org.webrtc.MediaStreamTrack;
6779
import org.webrtc.PeerConnectionFactory;
6880
import org.webrtc.SurfaceTextureHelper;
6981
import org.webrtc.VideoCapturer;
82+
import org.webrtc.VideoFrame;
83+
import org.webrtc.VideoProcessor;
84+
import org.webrtc.VideoSink;
7085
import org.webrtc.VideoSource;
7186
import org.webrtc.VideoTrack;
87+
import org.webrtc.YuvHelper;
7288
import org.webrtc.audio.JavaAudioDeviceModule;
7389
import org.webrtc.audio.WebRtcAudioTrackUtils;
7490

@@ -83,6 +99,14 @@
8399

84100
import io.flutter.plugin.common.MethodChannel.Result;
85101

102+
import android.graphics.Bitmap;
103+
import android.graphics.BitmapFactory;
104+
import android.graphics.Canvas;
105+
import android.graphics.PorterDuff;
106+
import android.media.Image;
107+
import android.util.Log;
108+
import androidx.camera.core.ImageProxy;
109+
86110
/**
87111
* The implementation of {@code getUserMedia} extracted into a separate file in order to reduce
88112
* complexity and to (somewhat) separate concerns.
@@ -830,6 +854,221 @@ private ConstraintsMap getUserVideo(ConstraintsMap constraints, MediaStream medi
830854
return trackParams;
831855
}
832856

857+
void setVirtualBackground() {
858+
vbVideoSource.setVideoProcessor(new VideoProcessor() {
859+
@Override
860+
public void onCapturerStarted(boolean success) {
861+
// Xử lý khi bắt đầu capture video
862+
}
863+
864+
@Override
865+
public void onCapturerStopped() {
866+
// Xử lý khi dừng capture video
867+
}
868+
869+
@Override
870+
public void onFrameCaptured(VideoFrame frame) {
871+
// Chuyển đổi frame thành bitmap
872+
Bitmap bitmap = videoFrameToBitmap(frame);
873+
874+
// Xử lý segment với bitmap
875+
processSegmentation(bitmap);
876+
}
877+
878+
@Override
879+
public void setSink(VideoSink sink) {
880+
// Lưu sink để gửi frame đã được cập nhật trở lại WebRTC
881+
// Sink sẽ được sử dụng sau khi xử lý segment
882+
vbVideoSink = sink;
883+
}
884+
});
885+
}
886+
887+
public Bitmap videoFrameToBitmap(VideoFrame videoFrame) {
888+
VideoFrame.Buffer buffer = videoFrame.getBuffer();
889+
int width = buffer.getWidth();
890+
int height = buffer.getHeight();
891+
892+
if (buffer instanceof VideoFrame.TextureBuffer) {
893+
// Không hỗ trợ trực tiếp chuyển đổi từ TextureBuffer sang Bitmap
894+
return null;
895+
} else if (buffer instanceof VideoFrame.I420Buffer) {
896+
VideoFrame.I420Buffer i420Buffer = (VideoFrame.I420Buffer) buffer;
897+
898+
int ySize = width * height;
899+
int uvSize = width * height / 4;
900+
901+
ByteBuffer dataY = i420Buffer.getDataY();
902+
ByteBuffer dataU = i420Buffer.getDataU();
903+
ByteBuffer dataV = i420Buffer.getDataV();
904+
905+
byte[] dataYArray = new byte[ySize];
906+
byte[] dataUArray = new byte[uvSize];
907+
byte[] dataVArray = new byte[uvSize];
908+
909+
dataY.get(dataYArray);
910+
dataU.get(dataUArray);
911+
dataV.get(dataVArray);
912+
913+
// Chuyển đổi từ YUV sang RGB
914+
int[] rgbData = convertYUVtoRGB(dataYArray, dataUArray, dataVArray, width, height);
915+
916+
// Tạo Bitmap từ dữ liệu RGB
917+
Bitmap bitmap = Bitmap.createBitmap(rgbData, width, height, Bitmap.Config.ARGB_8888);
918+
919+
return bitmap;
920+
}
921+
922+
return null;
923+
}
924+
925+
private int[] convertYUVtoRGB(byte[] yData, byte[] uData, byte[] vData, int width, int height) {
926+
int[] rgbData = new int[width * height];
927+
int uvIndex = 0;
928+
int yOffset = 0;
929+
930+
for (int y = 0; y < height; y++) {
931+
int uvRowStart = uvIndex;
932+
int uvRowOffset = y >> 1;
933+
934+
for (int x = 0; x < width; x++) {
935+
int yIndex = yOffset + x;
936+
int uvIndexOffset = uvRowStart + (x >> 1);
937+
938+
int yValue = yData[yIndex] & 0xFF;
939+
int uValue = uData[uvIndexOffset] & 0xFF;
940+
int vValue = vData[uvIndexOffset] & 0xFF;
941+
942+
int r = yValue + (int) (1.370705f * (vValue - 128));
943+
int g = yValue - (int) (0.698001f * (vValue - 128)) - (int) (0.337633f * (uValue - 128));
944+
int b = yValue + (int) (1.732446f * (uValue - 128));
945+
946+
r = Math.max(0, Math.min(255, r));
947+
g = Math.max(0, Math.min(255, g));
948+
b = Math.max(0, Math.min(255, b));
949+
950+
int pixelColor = 0xFF000000 | (r << 16) | (g << 8) | b;
951+
rgbData[y * width + x] = pixelColor;
952+
}
953+
954+
if (y % 2 == 1) {
955+
uvIndex = uvRowStart + width / 2;
956+
yOffset += width;
957+
}
958+
}
959+
960+
return rgbData;
961+
}
962+
963+
private void processSegmentation(Bitmap bitmap) {
964+
// Tạo InputImage từ bitmap
965+
InputImage inputImage = InputImage.fromBitmap(bitmap, 0);
966+
967+
// Xử lý phân đoạn
968+
segmenter.process(inputImage)
969+
.addOnSuccessListener(new OnSuccessListener<SegmentationMask>() {
970+
@Override
971+
public void onSuccess(@NonNull SegmentationMask segmentationMask) {
972+
// Xử lý khi phân đoạn thành công
973+
ByteBuffer mask = segmentationMask.getBuffer();
974+
int maskWidth = segmentationMask.getWidth();
975+
int maskHeight = segmentationMask.getHeight();
976+
mask.rewind();
977+
978+
// Chuyển đổi buffer thành mảng màu
979+
int[] colors = maskColorsFromByteBuffer(mask, maskWidth, maskHeight);
980+
981+
// Tạo bitmap đã được phân đoạn từ mảng màu
982+
Bitmap segmentedBitmap = createBitmapFromColors(colors, maskWidth, maskHeight);
983+
984+
// Vẽ ảnh nền đã phân đoạn lên canvas
985+
Bitmap outputBitmap = drawSegmentedBackground(segmentedBitmap, segmentedBitmap);
986+
987+
// Tạo VideoFrame mới từ bitmap đã xử lý
988+
int frameRotation = 180; // Frame rotation angle (customize as needed)
989+
long frameTimestamp = System.nanoTime(); // Frame timestamp (customize as needed)
990+
VideoFrame outputVideoFrame = createVideoFrame(outputBitmap, frameRotation, frameTimestamp);
991+
992+
// Gửi frame đã được cập nhật trở lại WebRTC
993+
vbVideoSink.onFrame(outputVideoFrame);
994+
}
995+
})
996+
.addOnFailureListener(new OnFailureListener() {
997+
@Override
998+
public void onFailure(@NonNull Exception exception) {
999+
// Xử lý khi phân đoạn thất bại
1000+
Log.e(TAG, "Segmentation failed: " + exception.getMessage());
1001+
}
1002+
});
1003+
}
1004+
1005+
private Bitmap drawSegmentedBackground(Bitmap segmentedBitmap, Bitmap backgroundBitmap) {
1006+
Bitmap outputBitmap = Bitmap.createBitmap(
1007+
segmentedBitmap.getWidth(), segmentedBitmap.getHeight(), Bitmap.Config.ARGB_8888
1008+
);
1009+
Canvas canvas = new Canvas(outputBitmap);
1010+
1011+
// Vẽ ảnh nền đã phân đoạn lên canvas
1012+
canvas.drawBitmap(backgroundBitmap, 0, 0, null);
1013+
canvas.drawBitmap(segmentedBitmap, 0, 0, null);
1014+
1015+
return outputBitmap;
1016+
}
1017+
1018+
private VideoFrame createVideoFrame(Bitmap bitmap, int rotation, long timestampNs) {
1019+
ByteBuffer buffer = ByteBuffer.allocate(bitmap.getByteCount());
1020+
bitmap.copyPixelsToBuffer(buffer);
1021+
byte[] data = buffer.array();
1022+
1023+
int width = bitmap.getWidth();
1024+
int height = bitmap.getHeight();
1025+
int strideY = width;
1026+
int strideU = (width + 1) / 2;
1027+
int strideV = (width + 1) / 2;
1028+
1029+
byte[] dataU = new byte[width * height / 4];
1030+
byte[] dataV = new byte[width * height / 4];
1031+
for (int i = 0; i < width * height / 4; i++) {
1032+
dataU[i] = data[width * height + i];
1033+
dataV[i] = data[width * height + width * height / 4 + i];
1034+
}
1035+
1036+
Runnable releaseCallback = () -> {
1037+
// Thực hiện các thao tác giải phóng tài nguyên liên quan tại đây (nếu có)
1038+
};
1039+
1040+
VideoFrame.I420Buffer i420Buffer = JavaI420Buffer.wrap(
1041+
width,
1042+
height,
1043+
ByteBuffer.wrap(data),
1044+
strideY,
1045+
ByteBuffer.wrap(dataU),
1046+
strideU, ByteBuffer.wrap(dataV), strideV, releaseCallback
1047+
);
1048+
1049+
return new VideoFrame(i420Buffer, rotation, timestampNs);
1050+
}
1051+
1052+
1053+
// Hàm chuyển đổi buffer thành mảng màu
1054+
private int[] maskColorsFromByteBuffer(ByteBuffer buffer, int width, int height) {
1055+
// Chuyển đổi từ ByteBuffer thành mảng màu, tùy thuộc vào định dạng màu
1056+
// của buffer. Đảm bảo bạn sử dụng đúng định dạng màu tương ứng với
1057+
// phân đoạn của ML Kit.
1058+
// Trong ví dụ này, chúng tôi giả định rằng buffer có định dạng ARGB_8888.
1059+
1060+
// Ví dụ: chuyển đổi từ ByteBuffer thành mảng ARGB_8888
1061+
int[] colors = new int[width * height];
1062+
buffer.asIntBuffer().get(colors);
1063+
1064+
return colors;
1065+
}
1066+
1067+
// Hàm tạo bitmap từ mảng màu
1068+
private Bitmap createBitmapFromColors(int[] colors, int width, int height) {
1069+
return Bitmap.createBitmap(colors, width, height, Bitmap.Config.ARGB_8888);
1070+
}
1071+
8331072
void removeVideoCapturerSync(String id) {
8341073
synchronized (mVideoCapturers) {
8351074
// Dispose Virtual Background

example/android/build.gradle

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ buildscript {
66
}
77

88
dependencies {
9-
classpath 'com.android.tools.build:gradle:7.3.0'
9+
classpath 'com.android.tools.build:gradle:7.4.2'
1010
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
1111
}
1212
}

ios/flutter_webrtc.podspec

+1-1
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,6 @@ A new flutter plugin project.
1616
s.public_header_files = 'Classes/**/*.h'
1717
s.dependency 'Flutter'
1818
s.dependency 'WebRTC-lbc', '116.5845.02'
19-
s.ios.deployment_target = '10.0'
19+
s.ios.deployment_target = '11.0'
2020
s.static_framework = true
2121
end

pubspec.yaml

+1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ name: flutter_webrtc
22
description: Flutter WebRTC plugin for iOS/Android/Destkop/Web, based on GoogleWebRTC.
33
version: 0.9.36+2
44
homepage: https://github.com/cloudwebrtc/flutter-webrtc
5+
publish_to: none
56
environment:
67
sdk: '>=2.12.0 <4.0.0'
78
flutter: '>=1.22.0'

0 commit comments

Comments
 (0)