Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

I Have Trouble While Publish VideoCapture using OpenCV, The Camera Get Blurred when displayed on meet.livekit.io/custom #170

Open
rubygobay opened this issue Feb 19, 2024 · 1 comment

Comments

@rubygobay
Copy link

Here My Result

gambar

Here My Code

async def main(room: rtc.Room):
    url = "LINK"
    logging.info("connecting to %s", url)
    try:
        await room.connect(url,
                           "TOKEN")
        logging.info("connected to room %s", room.name)
    except rtc.ConnectError as e:
        logging.error("failed to connect to the room: %s", e)
        return

    # publish a track
    source = rtc.VideoSource(WIDTH, HEIGHT)
    track = rtc.LocalVideoTrack.create_video_track("hue", source)
    options = rtc.TrackPublishOptions()
    options.source = rtc.TrackSource.SOURCE_CAMERA
    publication = await room.local_participant.publish_track(track, options)
    logging.info("published track %s", publication.sid)

    asyncio.ensure_future(capture_and_process_frame(source))
async def capture_and_process_frame(source: rtc.VideoSource):
    argb_frame = bytearray(WIDTH * HEIGHT * 4)
    cap = cv2.VideoCapture(0)  # Assuming device index 0 for your webcam
    arr = np.frombuffer(argb_frame, dtype=np.uint8)
    
    while True:
        start_time = asyncio.get_event_loop().time()

        ret_val, frame = cap.read()
        rgba = cv2.cvtColor(frame, cv2.COLOR_RGB2RGBA)

        argb_color = np.array(rgba, dtype=np.uint8)
        arr.flat[::4] = argb_color[0]
        arr.flat[1::4] = argb_color[1]
        arr.flat[2::4] = argb_color[2]
        arr.flat[3::4] = argb_color[3]

        frame = rtc.VideoFrame(WIDTH, HEIGHT, rtc.VideoBufferType.RGBA, argb_frame)
        source.capture_frame(frame)

        code_duration = asyncio.get_event_loop().time() - start_time
        await asyncio.sleep(1 / 30 - code_duration)

@maisonsmd
Copy link

maisonsmd commented Dec 20, 2024

Hi @rubygobay

I was able to use my webcam.

Could you please print(frame.shape) to see if your camera is using 24 or 32bit frames!?

My code:

WIDTH, HEIGHT = 1920, 1080
FPS = 30

async def draw_camera_frame(source: rtc.VideoSource):
    next_frame_time = perf_counter()

    cap = cv2.VideoCapture(0)
    while True:
        ret, frame = cap.read()
        if not ret:
            break

        print(frame.shape)

        frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
        frame = cv2.resize(frame, (WIDTH, HEIGHT))

        rgb_frame = frame.tobytes()

        frame = rtc.VideoFrame(WIDTH, HEIGHT, rtc.VideoBufferType.RGB24, rgb_frame)
        source.capture_frame(frame)

        next_frame_time += 1 / FPS
        await asyncio.sleep(next_frame_time - perf_counter())

    cap.release()

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

2 participants