diff --git a/.bazelversion b/.bazelversion index f3b5af39e4..f22d756da3 100644 --- a/.bazelversion +++ b/.bazelversion @@ -1 +1 @@ -6.1.1 +6.5.0 diff --git a/.github/ISSUE_TEMPLATE/00-task-issue-template.yaml b/.github/ISSUE_TEMPLATE/00-task-issue-template.yaml index d6130edb6d..9576d40ad7 100644 --- a/.github/ISSUE_TEMPLATE/00-task-issue-template.yaml +++ b/.github/ISSUE_TEMPLATE/00-task-issue-template.yaml @@ -52,12 +52,11 @@ body: label: Describe the expected behaviour validations: required: true - - type: textarea + - type: input id: what-happened_model attributes: label: Standalone code/steps you may have used to try to get what you need description: If there is a problem, provide a reproducible test case that is the bare minimum necessary to generate the problem. If possible, please share a link to Colab, GitHub repo link or anything that we can use to reproduce the problem - render: shell validations: required: true - type: textarea diff --git a/.github/ISSUE_TEMPLATE/11-model-maker-issue-template.yaml b/.github/ISSUE_TEMPLATE/11-model-maker-issue-template.yaml index 7a6d92152f..dcc4d8aaa5 100644 --- a/.github/ISSUE_TEMPLATE/11-model-maker-issue-template.yaml +++ b/.github/ISSUE_TEMPLATE/11-model-maker-issue-template.yaml @@ -53,12 +53,11 @@ body: label: Describe the expected behaviour validations: required: true - - type: textarea + - type: input id: what-happened_model attributes: label: Standalone code/steps you may have used to try to get what you need description: If there is a problem, provide a reproducible test case that is the bare minimum necessary to generate the problem. If possible, please share a link to Colab, GitHub repo link or anything that we can use to reproduce the problem - render: shell validations: required: true - type: textarea diff --git a/.github/ISSUE_TEMPLATE/12-studio-issue-template.yaml b/.github/ISSUE_TEMPLATE/12-studio-issue-template.yaml index ffaa315f94..135e7b7810 100644 --- a/.github/ISSUE_TEMPLATE/12-studio-issue-template.yaml +++ b/.github/ISSUE_TEMPLATE/12-studio-issue-template.yaml @@ -43,12 +43,11 @@ body: label: Describe the expected behaviour validations: required: false - - type: textarea + - type: input id: what-happened_model attributes: label: Standalone code/steps you may have used to try to get what you need description: If there is a problem, provide a reproducible test case that is the bare minimum necessary to generate the problem. If possible, please share a link to Colab, GitHub repo link or anything that we can use to reproduce the problem - render: shell validations: required: false - type: textarea diff --git a/.github/ISSUE_TEMPLATE/15-build-install-issue-template.yaml b/.github/ISSUE_TEMPLATE/15-build-install-issue-template.yaml index ded9d09a63..040e859a84 100644 --- a/.github/ISSUE_TEMPLATE/15-build-install-issue-template.yaml +++ b/.github/ISSUE_TEMPLATE/15-build-install-issue-template.yaml @@ -5,7 +5,7 @@ body: - type: markdown id: link attributes: - value: Please make sure that this is a build/installation issue and also refer to the [troubleshooting](https://google.github.io/mediapipe/getting_started/troubleshooting.html) documentation before raising any issues. + value: Please make sure that this is a build/installation issue and also refer to the [troubleshooting](https://google-ai-edge.github.io/mediapipe/getting_started/troubleshooting.html) documentation before raising any issues. - type: input id: os attributes: @@ -91,7 +91,7 @@ body: id: what-happened attributes: label: Describe the problem - description: Provide the exact sequence of commands / steps that you executed before running into the [problem](https://google.github.io/mediapipe/getting_started/getting_started.html) + description: Provide the exact sequence of commands / steps that you executed before running into the [problem](https://google-ai-edge.github.io/mediapipe/getting_started/getting_started.html) placeholder: Tell us what you see! value: "A bug happened!" validations: diff --git a/.github/ISSUE_TEMPLATE/16-bug-issue-template.yaml b/.github/ISSUE_TEMPLATE/16-bug-issue-template.yaml index efa925b44c..7aeead912a 100644 --- a/.github/ISSUE_TEMPLATE/16-bug-issue-template.yaml +++ b/.github/ISSUE_TEMPLATE/16-bug-issue-template.yaml @@ -5,7 +5,7 @@ body: - type: markdown id: link attributes: - value: Please make sure that this is a bug and also refer to the [troubleshooting](https://google.github.io/mediapipe/getting_started/troubleshooting.html), FAQ documentation before raising any issues. + value: Please make sure that this is a bug and also refer to the [troubleshooting](https://google-ai-edge.github.io/mediapipe/getting_started/troubleshooting.html), FAQ documentation before raising any issues. - type: dropdown id: customcode_model attributes: @@ -92,12 +92,11 @@ body: label: Describe the expected behaviour validations: required: true - - type: textarea + - type: input id: what-happened_model attributes: label: Standalone code/steps you may have used to try to get what you need description: If there is a problem, provide a reproducible test case that is the bare minimum necessary to generate the problem. If possible, please share a link to Colab, GitHub repo link or anything that we can use to reproduce the problem - render: shell validations: required: true - type: textarea diff --git a/.github/ISSUE_TEMPLATE/18-solution-legacy-issue-template.yaml b/.github/ISSUE_TEMPLATE/18-solution-legacy-issue-template.yaml index acb0f5b891..8c51a29b97 100644 --- a/.github/ISSUE_TEMPLATE/18-solution-legacy-issue-template.yaml +++ b/.github/ISSUE_TEMPLATE/18-solution-legacy-issue-template.yaml @@ -1,11 +1,11 @@ name: Solution(Legacy) Issue -description: Use this template for assistance with a specific Mediapipe solution (google.github.io/mediapipe/solutions) such as "Pose", including inference model usage/training, solution-specific calculators etc. +description: Use this template for assistance with a specific Mediapipe solution (google-ai-edge.github.io/mediapipe/solutions) such as "Pose", including inference model usage/training, solution-specific calculators etc. labels: 'type:support' body: - type: markdown id: linkmodel attributes: - value: Please make sure that this is a [solution](https://google.github.io/mediapipe/solutions/solutions.html) issue. + value: Please make sure that this is a [solution](https://google-ai-edge.github.io/mediapipe/solutions/solutions.html) issue. - type: dropdown id: customcode_model attributes: @@ -60,12 +60,11 @@ body: label: Describe the expected behaviour validations: required: false - - type: textarea + - type: input id: what-happened_model attributes: label: Standalone code/steps you may have used to try to get what you need description: If there is a problem, provide a reproducible test case that is the bare minimum necessary to generate the problem. If possible, please share a link to Colab, GitHub repo link or anything that we can use to reproduce the problem - render: shell validations: required: false - type: textarea diff --git a/.github/bot_config.yml b/.github/bot_config.yml index 74a60e4b9b..8049a79100 100644 --- a/.github/bot_config.yml +++ b/.github/bot_config.yml @@ -16,4 +16,4 @@ # A list of assignees assignees: - kuaashish - - ayushgdev + - kalyan2789g diff --git a/Dockerfile b/Dockerfile index 3ff6a5a33a..46b9cd4dcd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -67,7 +67,7 @@ RUN pip3 install tf_slim RUN ln -s /usr/bin/python3 /usr/bin/python # Install bazel -ARG BAZEL_VERSION=6.1.1 +ARG BAZEL_VERSION=6.5.0 RUN mkdir /bazel && \ wget --no-check-certificate -O /bazel/installer.sh "https://github.com/bazelbuild/bazel/releases/download/${BAZEL_VERSION}/b\ azel-${BAZEL_VERSION}-installer-linux-x86_64.sh" && \ diff --git a/WORKSPACE b/WORKSPACE index 67866ed18c..a1d06c46ec 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -16,23 +16,27 @@ http_archive( "https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz", ], ) + load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace") + bazel_skylib_workspace() + load("@bazel_skylib//lib:versions.bzl", "versions") + versions.check(minimum_bazel_version = "3.7.2") # ABSL on 2023-10-18 http_archive( name = "com_google_absl", - urls = ["https://github.com/abseil/abseil-cpp/archive//9687a8ea750bfcddf790372093245a1d041b21a3.tar.gz"], - patches = [ - "@//third_party:com_google_absl_windows_patch.diff" - ], patch_args = [ "-p1", ], - strip_prefix = "abseil-cpp-9687a8ea750bfcddf790372093245a1d041b21a3", + patches = [ + "@//third_party:com_google_absl_windows_patch.diff", + ], sha256 = "f841f78243f179326f2a80b719f2887c38fe226d288ecdc46e2aa091e6aa43bc", + strip_prefix = "abseil-cpp-9687a8ea750bfcddf790372093245a1d041b21a3", + urls = ["https://github.com/abseil/abseil-cpp/archive//9687a8ea750bfcddf790372093245a1d041b21a3.tar.gz"], ) http_archive( @@ -41,7 +45,6 @@ http_archive( url = "https://github.com/bazelbuild/rules_java/releases/download/5.3.5/rules_java-5.3.5.tar.gz", ) - http_archive( name = "rules_android_ndk", sha256 = "d230a980e0d3a42b85d5fce2cb17ec3ac52b88d2cff5aaf86bae0f05b48adc55", @@ -53,40 +56,39 @@ load("@rules_android_ndk//:rules.bzl", "android_ndk_repository") http_archive( name = "build_bazel_rules_apple", - sha256 = "3e2c7ae0ddd181c4053b6491dad1d01ae29011bc322ca87eea45957c76d3a0c3", - url = "https://github.com/bazelbuild/rules_apple/releases/download/2.1.0/rules_apple.2.1.0.tar.gz", + patch_args = [ + "-p1", + ], patches = [ # Bypass checking ios unit test runner when building MP ios applications. "@//third_party:build_bazel_rules_apple_bypass_test_runner_check.diff", # https://github.com/bazelbuild/rules_apple/commit/95b1305255dc29874cacc3dc7fdc017f16d8dbe8 - "@//third_party:build_bazel_rules_apple_multi_arch_split_with_new_transition.diff" - ], - patch_args = [ - "-p1", + "@//third_party:build_bazel_rules_apple_multi_arch_split_with_new_transition.diff", ], + sha256 = "3e2c7ae0ddd181c4053b6491dad1d01ae29011bc322ca87eea45957c76d3a0c3", + url = "https://github.com/bazelbuild/rules_apple/releases/download/2.1.0/rules_apple.2.1.0.tar.gz", ) http_archive( name = "com_google_protobuf", - sha256 = "87407cd28e7a9c95d9f61a098a53cf031109d451a7763e7dd1253abf8b4df422", - strip_prefix = "protobuf-3.19.1", - urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.19.1.tar.gz"], - patches = [ - "@//third_party:com_google_protobuf_fixes.diff" - ], patch_args = [ "-p1", ], + patches = [ + "@//third_party:com_google_protobuf_fixes.diff", + ], + sha256 = "87407cd28e7a9c95d9f61a098a53cf031109d451a7763e7dd1253abf8b4df422", + strip_prefix = "protobuf-3.19.1", + urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.19.1.tar.gz"], ) - # GoogleTest/GoogleMock framework. Used by most unit-tests. # Last updated 2021-07-02. http_archive( name = "com_google_googletest", - urls = ["https://github.com/google/googletest/archive/4ec4cd23f486bf70efcc5d2caa40f24368f752e3.zip"], - strip_prefix = "googletest-4ec4cd23f486bf70efcc5d2caa40f24368f752e3", sha256 = "de682ea824bfffba05b4e33b67431c247397d6175962534305136aa06f92e049", + strip_prefix = "googletest-4ec4cd23f486bf70efcc5d2caa40f24368f752e3", + urls = ["https://github.com/google/googletest/archive/4ec4cd23f486bf70efcc5d2caa40f24368f752e3.zip"], ) # Load Zlib before initializing TensorFlow and the iOS build rules to guarantee @@ -94,48 +96,48 @@ http_archive( http_archive( name = "zlib", build_file = "@//third_party:zlib.BUILD", - sha256 = "b3a24de97a8fdbc835b9833169501030b8977031bcb54b3b3ac13740f846ab30", - strip_prefix = "zlib-1.2.13", - url = "http://zlib.net/fossils/zlib-1.2.13.tar.gz", - patches = [ - "@//third_party:zlib.diff", - ], patch_args = [ "-p1", ], + patches = [ + "@//third_party:zlib.diff", + ], + sha256 = "b3a24de97a8fdbc835b9833169501030b8977031bcb54b3b3ac13740f846ab30", + strip_prefix = "zlib-1.2.13", + url = "http://zlib.net/fossils/zlib-1.2.13.tar.gz", ) - # gflags needed by glog http_archive( name = "com_github_gflags_gflags", - strip_prefix = "gflags-2.2.2", sha256 = "19713a36c9f32b33df59d1c79b4958434cb005b5b47dc5400a7a4b078111d9b5", + strip_prefix = "gflags-2.2.2", url = "https://github.com/gflags/gflags/archive/v2.2.2.zip", ) # 2020-08-21 http_archive( name = "com_github_glog_glog", - strip_prefix = "glog-0.6.0", sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", + strip_prefix = "glog-0.6.0", urls = [ "https://github.com/google/glog/archive/v0.6.0.tar.gz", ], ) + http_archive( name = "com_github_glog_glog_no_gflags", - strip_prefix = "glog-0.6.0", - sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", build_file = "@//third_party:glog_no_gflags.BUILD", - urls = [ - "https://github.com/google/glog/archive/v0.6.0.tar.gz", + patch_args = [ + "-p1", ], patches = [ "@//third_party:com_github_glog_glog.diff", ], - patch_args = [ - "-p1", + sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", + strip_prefix = "glog-0.6.0", + urls = [ + "https://github.com/google/glog/archive/v0.6.0.tar.gz", ], ) @@ -144,28 +146,29 @@ http_archive( # crashes on some Android devices. http_archive( name = "com_github_glog_glog_windows", - strip_prefix = "glog-3a0d4d22c5ae0b9a2216988411cfa6bf860cc372", - sha256 = "170d08f80210b82d95563f4723a15095eff1aad1863000e8eeb569c96a98fefb", - urls = [ - "https://github.com/google/glog/archive/3a0d4d22c5ae0b9a2216988411cfa6bf860cc372.zip", + patch_args = [ + "-p1", ], patches = [ "@//third_party:com_github_glog_glog.diff", "@//third_party:com_github_glog_glog_windows_patch.diff", ], - patch_args = [ - "-p1", + sha256 = "170d08f80210b82d95563f4723a15095eff1aad1863000e8eeb569c96a98fefb", + strip_prefix = "glog-3a0d4d22c5ae0b9a2216988411cfa6bf860cc372", + urls = [ + "https://github.com/google/glog/archive/3a0d4d22c5ae0b9a2216988411cfa6bf860cc372.zip", ], ) # Maven dependencies. RULES_JVM_EXTERNAL_TAG = "4.0" + RULES_JVM_EXTERNAL_SHA = "31701ad93dbfe544d597dbe62c9a1fdd76d81d8a9150c2bf1ecf928ecdf97169" http_archive( name = "rules_jvm_external", - strip_prefix = "rules_jvm_external-%s" % RULES_JVM_EXTERNAL_TAG, sha256 = RULES_JVM_EXTERNAL_SHA, + strip_prefix = "rules_jvm_external-%s" % RULES_JVM_EXTERNAL_TAG, url = "https://github.com/bazelbuild/rules_jvm_external/archive/%s.zip" % RULES_JVM_EXTERNAL_TAG, ) @@ -204,13 +207,13 @@ maven_install( "junit:junit:4.12", "org.hamcrest:hamcrest-library:1.3", ], + fetch_sources = True, repositories = [ "https://maven.google.com", "https://dl.google.com/dl/android/maven2", "https://repo1.maven.org/maven2", "https://jcenter.bintray.com", ], - fetch_sources = True, version_conflict_policy = "pinned", ) @@ -225,32 +228,32 @@ http_archive( ], ) -# XNNPACK on 2024-09-24 +# XNNPACK on 2024-11-18 http_archive( name = "XNNPACK", # `curl -L | shasum -a 256` - sha256 = "feecde71526d955a0125f7ddd28b9f2d282cd6fca6c1c6bde48f29f86365dd0b", - strip_prefix = "XNNPACK-9007aa93227010168e615f9c6552035040c94a15", - url = "https://github.com/google/XNNPACK/archive/9007aa93227010168e615f9c6552035040c94a15.zip", + sha256 = "af30fe2b301330a7e19cd422acf22991de3c1f5d91dda58e9ee67544d608fa51", + strip_prefix = "XNNPACK-dc1549a7141c7a9496ae160bb27b8700f0f6e1f1", + url = "https://github.com/google/XNNPACK/archive/dc1549a7141c7a9496ae160bb27b8700f0f6e1f1.zip", ) # 2020-07-09 http_archive( name = "pybind11_bazel", + sha256 = "75922da3a1bdb417d820398eb03d4e9bd067c4905a4246d35a44c01d62154d91", strip_prefix = "pybind11_bazel-203508e14aab7309892a1c5f7dd05debda22d9a5", urls = ["https://github.com/pybind/pybind11_bazel/archive/203508e14aab7309892a1c5f7dd05debda22d9a5.zip"], - sha256 = "75922da3a1bdb417d820398eb03d4e9bd067c4905a4246d35a44c01d62154d91", ) # 2022-10-20 http_archive( name = "pybind11", + build_file = "@pybind11_bazel//:pybind11.BUILD", + sha256 = "fcf94065efcfd0a7a828bacf118fa11c43f6390d0c805e3e6342ac119f2e9976", + strip_prefix = "pybind11-2.10.1", urls = [ "https://github.com/pybind/pybind11/archive/v2.10.1.zip", ], - sha256 = "fcf94065efcfd0a7a828bacf118fa11c43f6390d0c805e3e6342ac119f2e9976", - strip_prefix = "pybind11-2.10.1", - build_file = "@pybind11_bazel//:pybind11.BUILD", ) http_archive( @@ -262,6 +265,25 @@ http_archive( ], ) +# KleidiAI is needed to get the best possible performance out of XNNPack +http_archive( + name = "KleidiAI", + sha256 = "ad37707084a6d4ff41be10cbe8540c75bea057ba79d0de6c367c1bfac6ba0852", + strip_prefix = "kleidiai-40a926833857fb64786e02f97703e42b1537cb57", + urls = [ + "https://gitlab.arm.com/kleidi/kleidiai/-/archive/40a926833857fb64786e02f97703e42b1537cb57/kleidiai-40a926833857fb64786e02f97703e42b1537cb57.zip" + ], +) + +http_archive( + name = "cpuinfo", + sha256 = "e2bd8049d29dfbed675a0bc7c01947f8b8bd3f17f706b827d3f6c1e5c64dd8c3", + strip_prefix = "cpuinfo-8df44962d437a0477f07ba6b8843d0b6a48646a4", + urls = [ + "https://github.com/pytorch/cpuinfo/archive/8df44962d437a0477f07ba6b8843d0b6a48646a4.zip", + ], +) + # TF on 2024-09-24 _TENSORFLOW_GIT_COMMIT = "5329ec8dd396487982ef3e743f98c0195af39a6b" @@ -270,8 +292,8 @@ _TENSORFLOW_SHA256 = "eb1f8d740d59ea3dee91108ab1fc19d91c4e9ac2fd17d9ab86d865c3c4 http_archive( name = "org_tensorflow", - urls = [ - "https://github.com/tensorflow/tensorflow/archive/%s.tar.gz" % _TENSORFLOW_GIT_COMMIT, + patch_args = [ + "-p1", ], patches = [ "@//third_party:org_tensorflow_c_api_experimental.diff", @@ -281,59 +303,72 @@ http_archive( # See https://github.com/bazelbuild/bazel/issues/19912 "@//third_party:org_tensorflow_objc_build_fixes.diff", ], - patch_args = [ - "-p1", - ], - strip_prefix = "tensorflow-%s" % _TENSORFLOW_GIT_COMMIT, sha256 = _TENSORFLOW_SHA256, + strip_prefix = "tensorflow-%s" % _TENSORFLOW_GIT_COMMIT, + urls = [ + "https://github.com/tensorflow/tensorflow/archive/%s.tar.gz" % _TENSORFLOW_GIT_COMMIT, + ], ) load("@org_tensorflow//tensorflow:workspace3.bzl", "tf_workspace3") + tf_workspace3() # Initialize hermetic Python load("@org_tensorflow//third_party/xla/third_party/py:python_init_rules.bzl", "python_init_rules") + python_init_rules() load("@org_tensorflow//third_party/xla/third_party/py:python_init_repositories.bzl", "python_init_repositories") + python_init_repositories( default_python_version = "system", local_wheel_dist_folder = "dist", + local_wheel_inclusion_list = ["mediapipe*"], + local_wheel_workspaces = ["//:WORKSPACE"], requirements = { "3.9": "//:requirements_lock.txt", "3.10": "//:requirements_lock_3_10.txt", "3.11": "//:requirements_lock_3_11.txt", "3.12": "//:requirements_lock_3_12.txt", }, - local_wheel_inclusion_list = ["mediapipe*"], - local_wheel_workspaces = ["//:WORKSPACE"], ) load("@org_tensorflow//third_party/xla/third_party/py:python_init_toolchains.bzl", "python_init_toolchains") + python_init_toolchains() load("@org_tensorflow//third_party/xla/third_party/py:python_init_pip.bzl", "python_init_pip") + python_init_pip() load("@pypi//:requirements.bzl", "install_deps") + install_deps() # End hermetic Python initialization load("@org_tensorflow//tensorflow:workspace2.bzl", "tf_workspace2") + tf_workspace2() load("@rules_python//python:pip.bzl", "pip_parse") + pip_parse( name = "mediapipe_pip_deps", requirements_lock = "@//:requirements_lock.txt", ) + load("@mediapipe_pip_deps//:requirements.bzl", mp_install_deps = "install_deps") + mp_install_deps() + pip_parse( name = "model_maker_pip_deps", requirements_lock = "@//mediapipe/model_maker:requirements_lock.txt", ) + load("@model_maker_pip_deps//:requirements.bzl", mm_install_deps = "install_deps") + mm_install_deps() http_archive( @@ -344,29 +379,12 @@ http_archive( ) load("@rules_foreign_cc//foreign_cc:repositories.bzl", "rules_foreign_cc_dependencies") + rules_foreign_cc_dependencies() load("@bazel_features//:deps.bzl", "bazel_features_deps") -bazel_features_deps() - -http_archive( - name = "cpuinfo", - sha256 = "2bf2b62eb86e2d2eaf862d0b9683a6c467a4d69fb2f7f1dc47c799809148608f", - strip_prefix = "cpuinfo-fa1c679da8d19e1d87f20175ae1ec10995cd3dd3", - urls = [ - "https://github.com/pytorch/cpuinfo/archive/fa1c679da8d19e1d87f20175ae1ec10995cd3dd3.zip", - ], -) -# KleidiAI is needed to get the best possible performance out of XNNPack -http_archive( - name = "KleidiAI", - sha256 = "88233e427be6579560073267575f00f3b5fc370a31a43bbdd87a1810bd4bf1b6", - strip_prefix = "kleidiai-cddf991af5de49fd34949fa39690e4e906e04074", - urls = [ - "https://gitlab.arm.com/kleidi/kleidiai/-/archive/cddf991af5de49fd34949fa39690e4e906e04074/kleidiai-cddf991af5de49fd34949fa39690e4e906e04074.zip", - ], -) +bazel_features_deps() # TODO: This is an are indirect dependency. We should factor it out. http_archive( @@ -380,53 +398,56 @@ load( "@build_bazel_rules_apple//apple:repositories.bzl", "apple_rules_dependencies", ) + apple_rules_dependencies() load( "@build_bazel_rules_swift//swift:repositories.bzl", "swift_rules_dependencies", ) + swift_rules_dependencies() load( "@build_bazel_rules_swift//swift:extras.bzl", "swift_rules_extra_dependencies", ) + swift_rules_extra_dependencies() load( "@build_bazel_apple_support//lib:repositories.bzl", "apple_support_dependencies", ) + apple_support_dependencies() # This is used to select all contents of the archives for CMake-based packages to give CMake access to them. all_content = """filegroup(name = "all", srcs = glob(["**"]), visibility = ["//visibility:public"])""" - # Google Benchmark library v1.6.1 released on 2022-01-10. http_archive( name = "com_google_benchmark", - urls = ["https://github.com/google/benchmark/archive/refs/tags/v1.6.1.tar.gz"], - strip_prefix = "benchmark-1.6.1", - sha256 = "6132883bc8c9b0df5375b16ab520fac1a85dc9e4cf5be59480448ece74b278d4", build_file = "@//third_party:benchmark.BUILD", + sha256 = "6132883bc8c9b0df5375b16ab520fac1a85dc9e4cf5be59480448ece74b278d4", + strip_prefix = "benchmark-1.6.1", + urls = ["https://github.com/google/benchmark/archive/refs/tags/v1.6.1.tar.gz"], ) # easyexif http_archive( name = "easyexif", - url = "https://github.com/mayanklahiri/easyexif/archive/master.zip", - strip_prefix = "easyexif-master", build_file = "@//third_party:easyexif.BUILD", + strip_prefix = "easyexif-master", + url = "https://github.com/mayanklahiri/easyexif/archive/master.zip", ) # libyuv http_archive( name = "libyuv", + build_file = "@//third_party:libyuv.BUILD", # Error: operand type mismatch for `vbroadcastss' caused by commit 8a13626e42f7fdcf3a6acbb0316760ee54cda7d8. urls = ["https://chromium.googlesource.com/libyuv/libyuv/+archive/2525698acba9bf9b701ba6b4d9584291a1f62257.tar.gz"], - build_file = "@//third_party:libyuv.BUILD", ) # Note: protobuf-javalite is no longer released as a separate download, it's included in the main Java download. @@ -439,38 +460,43 @@ http_archive( ) load("@//third_party/flatbuffers:workspace.bzl", flatbuffers = "repo") + flatbuffers() http_archive( name = "com_google_audio_tools", - strip_prefix = "multichannel-audio-tools-1f6b1319f13282eda6ff1317be13de67f4723860", - urls = ["https://github.com/google/multichannel-audio-tools/archive/1f6b1319f13282eda6ff1317be13de67f4723860.zip"], - sha256 = "fe346e1aee4f5069c4cbccb88706a9a2b2b4cf98aeb91ec1319be77e07dd7435", - repo_mapping = {"@com_github_glog_glog" : "@com_github_glog_glog_no_gflags"}, + patch_args = ["-p1"], # TODO: Fix this in AudioTools directly patches = ["@//third_party:com_google_audio_tools_fixes.diff"], - patch_args = ["-p1"] + repo_mapping = {"@com_github_glog_glog": "@com_github_glog_glog_no_gflags"}, + sha256 = "fe346e1aee4f5069c4cbccb88706a9a2b2b4cf98aeb91ec1319be77e07dd7435", + strip_prefix = "multichannel-audio-tools-1f6b1319f13282eda6ff1317be13de67f4723860", + urls = ["https://github.com/google/multichannel-audio-tools/archive/1f6b1319f13282eda6ff1317be13de67f4723860.zip"], ) http_archive( name = "pffft", + build_file = "@//third_party:pffft.BUILD", strip_prefix = "jpommier-pffft-7c3b5a7dc510", urls = ["https://bitbucket.org/jpommier/pffft/get/7c3b5a7dc510.zip"], - build_file = "@//third_party:pffft.BUILD", ) # Sentencepiece http_archive( name = "com_google_sentencepiece", - strip_prefix = "sentencepiece-0.1.96", add_prefix = "sentencepiece", + build_file = "@//third_party:sentencepiece.BUILD", + patch_args = [ + "-d", + "sentencepiece", + "-p1", + ], + patches = ["@//third_party:com_google_sentencepiece.diff"], sha256 = "8409b0126ebd62b256c685d5757150cf7fcb2b92a2f2b98efb3f38fc36719754", + strip_prefix = "sentencepiece-0.1.96", urls = [ - "https://github.com/google/sentencepiece/archive/refs/tags/v0.1.96.zip" + "https://github.com/google/sentencepiece/archive/refs/tags/v0.1.96.zip", ], - build_file = "@//third_party:sentencepiece.BUILD", - patches = ["@//third_party:com_google_sentencepiece.diff"], - patch_args = ["-d", "sentencepiece", "-p1"], ) http_archive( @@ -485,17 +511,17 @@ http_archive( http_archive( name = "org_tensorflow_text", - sha256 = "f64647276f7288d1b1fe4c89581d51404d0ce4ae97f2bcc4c19bd667549adca8", - strip_prefix = "text-2.2.0", - urls = [ - "https://github.com/tensorflow/text/archive/v2.2.0.zip", - ], + patch_args = ["-p1"], patches = [ "@//third_party:tensorflow_text_remove_tf_deps.diff", "@//third_party:tensorflow_text_a0f49e63.diff", ], - patch_args = ["-p1"], repo_mapping = {"@com_google_re2": "@com_googlesource_code_re2"}, + sha256 = "f64647276f7288d1b1fe4c89581d51404d0ce4ae97f2bcc4c19bd667549adca8", + strip_prefix = "text-2.2.0", + urls = [ + "https://github.com/tensorflow/text/archive/v2.2.0.zip", + ], ) http_archive( @@ -510,15 +536,15 @@ http_archive( # Point to the commit that deprecates the usage of Eigen::MappedSparseMatrix. http_archive( name = "ceres_solver", - url = "https://github.com/ceres-solver/ceres-solver/archive/123fba61cf2611a3c8bddc9d91416db26b10b558.zip", - patches = [ - "@//third_party:ceres_solver_compatibility_fixes.diff" - ], patch_args = [ "-p1", ], + patches = [ + "@//third_party:ceres_solver_compatibility_fixes.diff", + ], + sha256 = "8b7b16ceb363420e0fd499576daf73fa338adb0b1449f58bea7862766baa1ac7", strip_prefix = "ceres-solver-123fba61cf2611a3c8bddc9d91416db26b10b558", - sha256 = "8b7b16ceb363420e0fd499576daf73fa338adb0b1449f58bea7862766baa1ac7" + url = "https://github.com/ceres-solver/ceres-solver/archive/123fba61cf2611a3c8bddc9d91416db26b10b558.zip", ) http_archive( @@ -537,7 +563,7 @@ new_local_repository( new_local_repository( name = "linux_ffmpeg", build_file = "@//third_party:ffmpeg_linux.BUILD", - path = "/usr" + path = "/usr", ) new_local_repository( @@ -575,8 +601,8 @@ http_archive( # '-DBUILD_PROTOBUF=OFF -DBUILD_opencv_dnn=OFF'. http_archive( name = "ios_opencv", - sha256 = "7dd536d06f59e6e1156b546bd581523d8df92ce83440002885ec5abc06558de2", build_file = "@//third_party:opencv_ios.BUILD", + sha256 = "7dd536d06f59e6e1156b546bd581523d8df92ce83440002885ec5abc06558de2", type = "zip", url = "https://github.com/opencv/opencv/releases/download/3.2.0/opencv-3.2.0-ios-framework.zip", ) @@ -591,36 +617,35 @@ http_archive( # Task libraries are built. http_archive( name = "ios_opencv_source", - sha256 = "a61e7a4618d353140c857f25843f39b2abe5f451b018aab1604ef0bc34cd23d5", build_file = "@//third_party:opencv_ios_source.BUILD", + sha256 = "a61e7a4618d353140c857f25843f39b2abe5f451b018aab1604ef0bc34cd23d5", type = "zip", url = "https://github.com/opencv/opencv/archive/refs/tags/4.5.3.zip", ) http_archive( name = "stblib", - strip_prefix = "stb-b42009b3b9d4ca35bc703f5310eedc74f584be58", - sha256 = "13a99ad430e930907f5611325ec384168a958bf7610e63e60e2fd8e7b7379610", - urls = ["https://github.com/nothings/stb/archive/b42009b3b9d4ca35bc703f5310eedc74f584be58.tar.gz"], build_file = "@//third_party:stblib.BUILD", - patches = [ - "@//third_party:stb_image_impl.diff" - ], patch_args = [ "-p1", ], + patches = [ + "@//third_party:stb_image_impl.diff", + ], + sha256 = "13a99ad430e930907f5611325ec384168a958bf7610e63e60e2fd8e7b7379610", + strip_prefix = "stb-b42009b3b9d4ca35bc703f5310eedc74f584be58", + urls = ["https://github.com/nothings/stb/archive/b42009b3b9d4ca35bc703f5310eedc74f584be58.tar.gz"], ) -# More iOS deps. - http_archive( name = "google_toolbox_for_mac", - url = "https://github.com/google/google-toolbox-for-mac/archive/v2.2.1.zip", + build_file = "@//third_party:google_toolbox_for_mac.BUILD", sha256 = "e3ac053813c989a88703556df4dc4466e424e30d32108433ed6beaec76ba4fdc", strip_prefix = "google-toolbox-for-mac-2.2.1", - build_file = "@//third_party:google_toolbox_for_mac.BUILD", + url = "https://github.com/google/google-toolbox-for-mac/archive/v2.2.1.zip", ) +# Hermetic CUDA load( "@org_tensorflow//third_party/gpus/cuda/hermetic:cuda_json_init_repository.bzl", "cuda_json_init_repository", @@ -638,9 +663,11 @@ load( "cuda_redist_init_repositories", "cudnn_redist_init_repository", ) + cuda_redist_init_repositories( cuda_redistributions = CUDA_REDISTRIBUTIONS, ) + cudnn_redist_init_repository( cudnn_redistributions = CUDNN_REDISTRIBUTIONS, ) @@ -649,23 +676,26 @@ load( "@org_tensorflow//third_party/gpus/cuda/hermetic:cuda_configure.bzl", "cuda_configure", ) + cuda_configure(name = "local_config_cuda") # Edge TPU http_archive( - name = "libedgetpu", - sha256 = "14d5527a943a25bc648c28a9961f954f70ba4d79c0a9ca5ae226e1831d72fe80", - strip_prefix = "libedgetpu-3164995622300286ef2bb14d7fdc2792dae045b7", - urls = [ - "https://github.com/google-coral/libedgetpu/archive/3164995622300286ef2bb14d7fdc2792dae045b7.tar.gz" - ], + name = "libedgetpu", + sha256 = "14d5527a943a25bc648c28a9961f954f70ba4d79c0a9ca5ae226e1831d72fe80", + strip_prefix = "libedgetpu-3164995622300286ef2bb14d7fdc2792dae045b7", + urls = [ + "https://github.com/google-coral/libedgetpu/archive/3164995622300286ef2bb14d7fdc2792dae045b7.tar.gz", + ], ) + load("@libedgetpu//:workspace.bzl", "libedgetpu_dependencies") + libedgetpu_dependencies() load("@coral_crosstool//:configure.bzl", "cc_crosstool") -cc_crosstool(name = "crosstool") +cc_crosstool(name = "crosstool") # Node dependencies http_archive( @@ -675,11 +705,14 @@ http_archive( ) load("@build_bazel_rules_nodejs//:repositories.bzl", "build_bazel_rules_nodejs_dependencies") + build_bazel_rules_nodejs_dependencies() # fetches nodejs, npm, and yarn load("@build_bazel_rules_nodejs//:index.bzl", "node_repositories", "yarn_install") + node_repositories() + yarn_install( name = "npm", package_json = "@//:package.json", @@ -701,18 +734,24 @@ http_archive( urls = ["https://github.com/protocolbuffers/protobuf-javascript/archive/refs/tags/v3.21.2.tar.gz"], ) -load("@rules_proto_grpc//:repositories.bzl", "rules_proto_grpc_toolchains", "rules_proto_grpc_repos") +load("@rules_proto_grpc//:repositories.bzl", "rules_proto_grpc_repos", "rules_proto_grpc_toolchains") + rules_proto_grpc_toolchains() + rules_proto_grpc_repos() load("@rules_proto//proto:repositories.bzl", "rules_proto_dependencies", "rules_proto_toolchains") + rules_proto_dependencies() + rules_proto_toolchains() load("@//third_party:external_files.bzl", "external_files") + external_files() load("@//third_party:wasm_files.bzl", "wasm_files") + wasm_files() # Halide @@ -720,39 +759,39 @@ wasm_files() new_local_repository( name = "halide", build_file = "@//third_party/halide:BUILD.bazel", - path = "third_party/halide" + path = "third_party/halide", ) http_archive( name = "linux_halide", + build_file = "@//third_party:halide.BUILD", sha256 = "d290fadf3f358c94aacf43c883de6468bb98883e26116920afd491ec0e440cd2", strip_prefix = "Halide-15.0.1-x86-64-linux", urls = ["https://github.com/halide/Halide/releases/download/v15.0.1/Halide-15.0.1-x86-64-linux-4c63f1befa1063184c5982b11b6a2cc17d4e5815.tar.gz"], - build_file = "@//third_party:halide.BUILD", ) http_archive( name = "macos_x86_64_halide", + build_file = "@//third_party:halide.BUILD", sha256 = "48ff073ac1aee5c4aca941a4f043cac64b38ba236cdca12567e09d803594a61c", strip_prefix = "Halide-15.0.1-x86-64-osx", urls = ["https://github.com/halide/Halide/releases/download/v15.0.1/Halide-15.0.1-x86-64-osx-4c63f1befa1063184c5982b11b6a2cc17d4e5815.tar.gz"], - build_file = "@//third_party:halide.BUILD", ) http_archive( name = "macos_arm_64_halide", + build_file = "@//third_party:halide.BUILD", sha256 = "db5d20d75fa7463490fcbc79c89f0abec9c23991f787c8e3e831fff411d5395c", strip_prefix = "Halide-15.0.1-arm-64-osx", urls = ["https://github.com/halide/Halide/releases/download/v15.0.1/Halide-15.0.1-arm-64-osx-4c63f1befa1063184c5982b11b6a2cc17d4e5815.tar.gz"], - build_file = "@//third_party:halide.BUILD", ) http_archive( name = "windows_halide", + build_file = "@//third_party:halide.BUILD", sha256 = "61fd049bd75ee918ac6c30d0693aac6048f63f8d1fc4db31001573e58eae8dae", strip_prefix = "Halide-15.0.1-x86-64-windows", urls = ["https://github.com/halide/Halide/releases/download/v15.0.1/Halide-15.0.1-x86-64-windows-4c63f1befa1063184c5982b11b6a2cc17d4e5815.zip"], - build_file = "@//third_party:halide.BUILD", ) http_archive( @@ -764,9 +803,9 @@ http_archive( http_archive( name = "com_github_nlohmann_json", + build_file = "@//third_party:nlohmann.BUILD", sha256 = "6bea5877b1541d353bd77bdfbdb2696333ae5ed8f9e8cc22df657192218cad91", urls = ["https://github.com/nlohmann/json/releases/download/v3.9.1/include.zip"], - build_file = "@//third_party:nlohmann.BUILD", ) http_archive( @@ -775,3 +814,17 @@ http_archive( strip_prefix = "abseil-py-1.4.0", urls = ["https://github.com/abseil/abseil-py/archive/refs/tags/v1.4.0.tar.gz"], ) + +http_archive( + name = "skia", + sha256 = "038d4a21f9c72d71ab49e3a7d7677b39585329465d093a4260b6c73d2f3984d6", + strip_prefix = "skia-ac75382cb971d2f5465b4608a74561ecb68599c5", + urls = ["https://github.com/google/skia/archive/ac75382cb971d2f5465b4608a74561ecb68599c5.zip"], +) + +http_archive( + name = "skia_user_config", + sha256 = "038d4a21f9c72d71ab49e3a7d7677b39585329465d093a4260b6c73d2f3984d6", + strip_prefix = "skia-ac75382cb971d2f5465b4608a74561ecb68599c5/include/config", + urls = ["https://github.com/google/skia/archive/ac75382cb971d2f5465b4608a74561ecb68599c5.zip"], +) diff --git a/docs/framework_concepts/calculators.md b/docs/framework_concepts/calculators.md index 3a3661dd4a..3923a0ada4 100644 --- a/docs/framework_concepts/calculators.md +++ b/docs/framework_concepts/calculators.md @@ -29,7 +29,7 @@ packets and produces zero or more output streams and/or side packets. ## CalculatorBase A calculator is created by defining a new sub-class of the -[`CalculatorBase`](https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator_base.cc) +[`CalculatorBase`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator_base.cc) class, implementing a number of methods, and registering the new sub-class with Mediapipe. At a minimum, a new calculator must implement the below four methods @@ -65,7 +65,7 @@ Mediapipe. At a minimum, a new calculator must implement the below four methods soon as the graph finishes running. The following are code snippets from -[CalculatorBase.h](https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator_base.h). +[CalculatorBase.h](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator_base.h). ```c++ class CalculatorBase { @@ -140,7 +140,7 @@ write outputs. After Close returns, the calculator is destroyed. Calculators with no inputs are referred to as sources. A source calculator continues to have `Process()` called as long as it returns an `Ok` status. A source calculator indicates that it is exhausted by returning a stop status -(i.e. [`mediaPipe::tool::StatusStop()`](https://github.com/google/mediapipe/tree/master/mediapipe/framework/tool/status_util.cc).). +(i.e. [`mediaPipe::tool::StatusStop()`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/tool/status_util.cc).). ## Identifying inputs and outputs diff --git a/docs/framework_concepts/synchronization.md b/docs/framework_concepts/synchronization.md index 8a0a907c5a..57f0940b5d 100644 --- a/docs/framework_concepts/synchronization.md +++ b/docs/framework_concepts/synchronization.md @@ -174,9 +174,9 @@ This calculator-based approach gives the graph author control of where packets can be dropped, and allows flexibility in adapting and customizing the graph’s behavior depending on resource constraints. -[`CalculatorBase`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator_base.h -[`DefaultInputStreamHandler`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/stream_handler/default_input_stream_handler.h -[`SyncSetInputStreamHandler`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/stream_handler/sync_set_input_stream_handler.cc -[`ImmediateInputStreamHandler`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/stream_handler/immediate_input_stream_handler.cc -[`CalculatorGraphConfig::max_queue_size`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator.proto -[`FlowLimiterCalculator`]: https://github.com/google/mediapipe/tree/master/mediapipe/calculators/core/flow_limiter_calculator.cc +[`CalculatorBase`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator_base.h +[`DefaultInputStreamHandler`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/stream_handler/default_input_stream_handler.h +[`SyncSetInputStreamHandler`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/stream_handler/sync_set_input_stream_handler.cc +[`ImmediateInputStreamHandler`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/stream_handler/immediate_input_stream_handler.cc +[`CalculatorGraphConfig::max_queue_size`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator.proto +[`FlowLimiterCalculator`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/calculators/core/flow_limiter_calculator.cc diff --git a/docs/getting_started/android.md b/docs/getting_started/android.md index 83fbd1c930..08db85c4e3 100644 --- a/docs/getting_started/android.md +++ b/docs/getting_started/android.md @@ -77,7 +77,7 @@ to build (and install) all MediaPipe Android example apps. `android_binary` build target. For instance, for [MediaPipe Hands](../solutions/hands.md) the target is `handtrackinggpu` in the - [BUILD](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/BUILD) + [BUILD](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/BUILD) file: Note: To reduce the binary size, consider appending `--linkopt="-s"` to the diff --git a/docs/getting_started/android_solutions.md b/docs/getting_started/android_solutions.md index 159d1358df..3477af5425 100644 --- a/docs/getting_started/android_solutions.md +++ b/docs/getting_started/android_solutions.md @@ -53,7 +53,7 @@ following these [instructions](./android_archive_library.md). ## Building solution example apps Detailed usage examples of the Android Solution APIs can be found in the -[source code](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/solutions) +[source code](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/solutions) of the solution example apps. To build these apps: diff --git a/docs/getting_started/faq.md b/docs/getting_started/faq.md index db84fe2b9e..ac223037fe 100644 --- a/docs/getting_started/faq.md +++ b/docs/getting_started/faq.md @@ -135,19 +135,19 @@ MacOS, Android, and iOS. The core of MediaPipe framework is a C++ library conforming to the C++11 standard, so it is relatively easy to port to additional platforms. -[`object_detection_mobile_cpu.pbtxt`]: https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection/object_detection_mobile_cpu.pbtxt -[`ImageFrame`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/formats/image_frame.h -[`GpuBuffer`]: https://github.com/google/mediapipe/tree/master/mediapipe/gpu/gpu_buffer.h -[`GpuBufferToImageFrameCalculator`]: https://github.com/google/mediapipe/tree/master/mediapipe/gpu/gpu_buffer_to_image_frame_calculator.cc -[`ImageFrameToGpuBufferCalculator`]: https://github.com/google/mediapipe/tree/master/mediapipe/gpu/image_frame_to_gpu_buffer_calculator.cc -[`AnnotationOverlayCalculator`]: https://github.com/google/mediapipe/tree/master/mediapipe/calculators/util/annotation_overlay_calculator.cc -[`face_detection_mobile_gpu.pbtxt`]: https://github.com/google/mediapipe/tree/master/mediapipe/graphs/face_detection/face_detection_mobile_gpu.pbtxt -[`CalculatorBase::Process`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator_base.h -[`max_in_flight`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator.proto -[`RoundRobinDemuxCalculator`]: https://github.com/google/mediapipe/tree/master//mediapipe/calculators/core/round_robin_demux_calculator.cc -[`ScaleImageCalculator`]: https://github.com/google/mediapipe/tree/master/mediapipe/calculators/image/scale_image_calculator.cc -[`ImmediateInputStreamHandler`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/stream_handler/immediate_input_stream_handler.cc -[`CalculatorGraphConfig`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator.proto -[`FlowLimiterCalculator`]: https://github.com/google/mediapipe/tree/master/mediapipe/calculators/core/flow_limiter_calculator.cc -[`PacketClonerCalculator`]: https://github.com/google/mediapipe/tree/master/mediapipe/calculators/core/packet_cloner_calculator.cc -[`MakePairCalculator`]: https://github.com/google/mediapipe/tree/master/mediapipe/calculators/core/make_pair_calculator.cc +[`object_detection_mobile_cpu.pbtxt`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection/object_detection_mobile_cpu.pbtxt +[`ImageFrame`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/formats/image_frame.h +[`GpuBuffer`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/gpu/gpu_buffer.h +[`GpuBufferToImageFrameCalculator`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/gpu/gpu_buffer_to_image_frame_calculator.cc +[`ImageFrameToGpuBufferCalculator`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/gpu/image_frame_to_gpu_buffer_calculator.cc +[`AnnotationOverlayCalculator`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/calculators/util/annotation_overlay_calculator.cc +[`face_detection_mobile_gpu.pbtxt`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/face_detection/face_detection_mobile_gpu.pbtxt +[`CalculatorBase::Process`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator_base.h +[`max_in_flight`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator.proto +[`RoundRobinDemuxCalculator`]: https://github.com/google-ai-edge/mediapipe/tree/master//mediapipe/calculators/core/round_robin_demux_calculator.cc +[`ScaleImageCalculator`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/calculators/image/scale_image_calculator.cc +[`ImmediateInputStreamHandler`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/stream_handler/immediate_input_stream_handler.cc +[`CalculatorGraphConfig`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator.proto +[`FlowLimiterCalculator`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/calculators/core/flow_limiter_calculator.cc +[`PacketClonerCalculator`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/calculators/core/packet_cloner_calculator.cc +[`MakePairCalculator`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/calculators/core/make_pair_calculator.cc diff --git a/docs/getting_started/hello_world_android.md b/docs/getting_started/hello_world_android.md index 210565665c..75c9392697 100644 --- a/docs/getting_started/hello_world_android.md +++ b/docs/getting_started/hello_world_android.md @@ -763,21 +763,21 @@ feed! Congrats! ![edge_detection_android_gpu_gif](https://mediapipe.dev/images/mobile/edge_detection_android_gpu.gif) If you ran into any issues, please see the full code of the tutorial -[here](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic). +[here](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic). [`ApplicationInfo`]:https://developer.android.com/reference/android/content/pm/ApplicationInfo -[`AndroidAssetUtil`]:https://github.com/google/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/framework/AndroidAssetUtil.java +[`AndroidAssetUtil`]:https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/framework/AndroidAssetUtil.java [Bazel]:https://bazel.build/ -[`CameraHelper`]:https://github.com/google/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/components/CameraHelper.java +[`CameraHelper`]:https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/components/CameraHelper.java [CameraX]:https://developer.android.com/training/camerax -[`CameraXPreviewHelper`]:https://github.com/google/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/components/CameraXPreviewHelper.java +[`CameraXPreviewHelper`]:https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/components/CameraXPreviewHelper.java [developer options]:https://developer.android.com/studio/debug/dev-options -[`edge_detection_mobile_gpu.pbtxt`]:https://github.com/google/mediapipe/tree/master/mediapipe/graphs/edge_detection/edge_detection_mobile_gpu.pbtxt -[`EglManager`]:https://github.com/google/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/glutil/EglManager.java -[`ExternalTextureConverter`]:https://github.com/google/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/components/ExternalTextureConverter.java +[`edge_detection_mobile_gpu.pbtxt`]:https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/edge_detection/edge_detection_mobile_gpu.pbtxt +[`EglManager`]:https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/glutil/EglManager.java +[`ExternalTextureConverter`]:https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/components/ExternalTextureConverter.java [`FrameLayout`]:https://developer.android.com/reference/android/widget/FrameLayout -[`FrameProcessor`]:https://github.com/google/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/components/FrameProcessor.java -[`PermissionHelper`]: https://github.com/google/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/components/PermissionHelper.java +[`FrameProcessor`]:https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/components/FrameProcessor.java +[`PermissionHelper`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/components/PermissionHelper.java [`SurfaceHolder.Callback`]:https://developer.android.com/reference/android/view/SurfaceHolder.Callback.html [`SurfaceView`]:https://developer.android.com/reference/android/view/SurfaceView [`SurfaceView`]:https://developer.android.com/reference/android/view/SurfaceView diff --git a/docs/getting_started/hello_world_cpp.md b/docs/getting_started/hello_world_cpp.md index f0c7ff0f94..98eb242f34 100644 --- a/docs/getting_started/hello_world_cpp.md +++ b/docs/getting_started/hello_world_cpp.md @@ -130,8 +130,8 @@ as the primary developer documentation site for MediaPipe as of April 3, 2023.* } ``` -[`hello world`]: https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/hello_world/hello_world.cc -[`CalculatorGraphConfig`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator.proto -[`PassThroughCalculator`]: https://github.com/google/mediapipe/tree/master/mediapipe/calculators/core/pass_through_calculator.cc -[`MakePacket`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/packet.h -[`StartRun`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator_graph.h +[`hello world`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/hello_world/hello_world.cc +[`CalculatorGraphConfig`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator.proto +[`PassThroughCalculator`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/calculators/core/pass_through_calculator.cc +[`MakePacket`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/packet.h +[`StartRun`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator_graph.h diff --git a/docs/getting_started/hello_world_ios.md b/docs/getting_started/hello_world_ios.md index 118b9a05b4..bd3dd9220f 100644 --- a/docs/getting_started/hello_world_ios.md +++ b/docs/getting_started/hello_world_ios.md @@ -594,6 +594,6 @@ this tutorial is used in the [common] template app. The [helloworld] app has the appropriate `BUILD` file dependencies for the edge detection graph. [Bazel]:https://bazel.build/ -[`edge_detection_mobile_gpu.pbtxt`]:https://github.com/google/mediapipe/tree/master/mediapipe/graphs/edge_detection/edge_detection_mobile_gpu.pbtxt -[common]:https://github.com/google/mediapipe/tree/master/mediapipe/examples/ios/common -[helloworld]:https://github.com/google/mediapipe/tree/master/mediapipe/examples/ios/helloworld +[`edge_detection_mobile_gpu.pbtxt`]:https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/edge_detection/edge_detection_mobile_gpu.pbtxt +[common]:https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/ios/common +[helloworld]:https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/ios/helloworld diff --git a/docs/getting_started/install.md b/docs/getting_started/install.md index b30284779a..a00b29a573 100644 --- a/docs/getting_started/install.md +++ b/docs/getting_started/install.md @@ -583,7 +583,7 @@ next section. Option 1. Follow [the official Bazel documentation](https://docs.bazel.build/versions/master/install-windows.html) - to install Bazel 6.1.1 or higher. + to install Bazel 6.5.0 or higher. Option 2. Follow the official [Bazel documentation](https://docs.bazel.build/versions/master/install-bazelisk.html) diff --git a/docs/getting_started/python_framework.md b/docs/getting_started/python_framework.md index 60f3878402..fed8d1af94 100644 --- a/docs/getting_started/python_framework.md +++ b/docs/getting_started/python_framework.md @@ -38,10 +38,10 @@ The packet is the basic data flow unit in MediaPipe. A packet consists of a numeric timestamp and a shared pointer to an immutable payload. In Python, a MediaPipe packet can be created by calling one of the packet creator methods in the -[`mp.packet_creator`](https://github.com/google/mediapipe/tree/master/mediapipe/python/pybind/packet_creator.cc) +[`mp.packet_creator`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/python/pybind/packet_creator.cc) module. Correspondingly, the packet payload can be retrieved by using one of the packet getter methods in the -[`mp.packet_getter`](https://github.com/google/mediapipe/tree/master/mediapipe/python/pybind/packet_getter.cc) +[`mp.packet_getter`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/python/pybind/packet_getter.cc) module. Note that the packet payload becomes **immutable** after packet creation. Thus, the modification of the retrieved packet content doesn't affect the actual payload in the packet. MediaPipe framework Python API supports the @@ -176,17 +176,17 @@ the Packet API provides a convenience method `packet.at()` to define the numeric timestamp of a packet. More generally, `packet.timestamp` is the packet class property for accessing the underlying timestamp. To convert an Unix epoch to a MediaPipe timestamp, -[the Timestamp API](https://github.com/google/mediapipe/tree/master/mediapipe/python/pybind/timestamp.cc) +[the Timestamp API](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/python/pybind/timestamp.cc) offers a method `mp.Timestamp.from_seconds()` for this purpose. ### ImageFrame ImageFrame is the container for storing an image or a video frame. Formats supported by ImageFrame are listed in -[the ImageFormat enum](https://github.com/google/mediapipe/tree/master/mediapipe/python/pybind/image_frame.cc#l=170). +[the ImageFormat enum](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/python/pybind/image_frame.cc#l=170). Pixels are encoded row-major with interleaved color components, and ImageFrame supports uint8, uint16, and float as its data types. MediaPipe provides -[an ImageFrame Python API](https://github.com/google/mediapipe/tree/master/mediapipe/python/pybind/image_frame.cc) +[an ImageFrame Python API](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/python/pybind/image_frame.cc) to access the ImageFrame C++ class. In Python, the easiest way to retrieve the pixel data is to call `image_frame.numpy_view()` to get a numpy ndarray. Note that the returned numpy ndarray, a reference to the internal pixel data, is @@ -200,7 +200,7 @@ contiguous when it's returned to the Python side. In MediaPipe, all processing takes places within the context of a CalculatorGraph. -[The CalculatorGraph Python API](https://github.com/google/mediapipe/tree/master/mediapipe/python/pybind/calculator_graph.cc) +[The CalculatorGraph Python API](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/python/pybind/calculator_graph.cc) is a direct binding to the C++ CalculatorGraph class. The major difference is the CalculatorGraph Python API raises a Python error instead of returning a non-OK Status when an error occurs. Therefore, as a Python user, you can handle diff --git a/docs/getting_started/troubleshooting.md b/docs/getting_started/troubleshooting.md index e7dff332cd..ca6f226534 100644 --- a/docs/getting_started/troubleshooting.md +++ b/docs/getting_started/troubleshooting.md @@ -17,7 +17,7 @@ nav_order: 10 [https://developers.google.com/mediapipe](https://developers.google.com/mediapipe) as the primary developer documentation site for MediaPipe as of April 3, 2023.* ----- +-------------------------------------------------------------------------------- ## Missing Python binary path @@ -113,9 +113,10 @@ ERROR: Could not find a version that satisfies the requirement mediapipe ERROR: No matching distribution found for mediapipe ``` -after running `pip install mediapipe` usually indicates that there is no qualified MediaPipe Python for your system. -Please note that MediaPipe Python PyPI officially supports the **64-bit** -version of Python 3.7 to 3.10 on the following OS: +after running `pip install mediapipe` usually indicates that there is no +qualified MediaPipe Python for your system. Please note that MediaPipe Python +PyPI officially supports the **64-bit** version of Python 3.7 to 3.10 on the +following OS: - x86_64 Linux - x86_64 macOS 10.15+ @@ -270,15 +271,112 @@ calculators designed specifically for this purpose such as [`FlowLimiterCalculator`] as described in [`How to process realtime input streams`]. -[`CalculatorGraphConfig`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator.proto -[`CalculatorGraphConfig::max_queue_size`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator.proto -[`CalculatorGraphConfig::report_deadlock`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator.proto -[`REGISTER_CALCULATOR`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator_registry.h -[`registration.h`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/deps/registration.h -[`CalculatorGraph::CloseAllPacketSources`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator_graph.h -[`CalculatorGraph::Cancel`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator_graph.h -[`CalculatorGraph::WaitUntilDone`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator_graph.h -[`Timestamp::Done`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/timestamp.h -[`CalculatorBase::Close`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator_base.h -[`FlowLimiterCalculator`]: https://github.com/google/mediapipe/tree/master/mediapipe/calculators/core/flow_limiter_calculator.cc +## Monitor calculator inputs and timestamp settlements + +Debugging MediaPipe calculators often requires a deep understanding of the data +flow and timestamp synchronization. Incoming packets to calculators are first +buffered in input queues per stream to be synchronized by the assigned +`InputStreamHandler`. The `InputStreamHandler` job is to determine the input +packet set for a settled timestamp, which puts the calculator into a “ready” +state, followed by triggering a Calculator::Process call with the determined +packet set as input. + +The `DebugInputStreamHandler` can be used to track incoming packets and +timestamp settlements in real-time in the application's LOG(INFO) output. It can +be assigned to specific calculators via the Calculator's input_stream_handler or +graph globally via the `CalculatorGraphConfig`'s input_stream_handler field. + +During the graph execution, incoming packets generate LOG messages which reveal +the timestamp and type of the packet, followed by the current state of all input +queues: + +``` +[INFO] SomeCalculator: Adding packet (ts:2, type:int) to stream INPUT_B:0:input_b +[INFO] SomeCalculator: INPUT_A:0:input_a num_packets: 0 min_ts: 2 +[INFO] SomeCalculator: INPUT_B:0:input_b num_packets: 1 min_ts: 2 +``` + +In addition, it enables the monitoring of timestamp settlement events (in case +the `DefaultInputStreamHandler` is applied). This can help to reveal an +unexpected timestamp bound increase on input streams resulting in a +Calculator::Process call with an incomplete input set resulting in empty packets +on (potentially required) input streams. + +*Example scenario:* + +``` +node { + calculator: "SomeCalculator" + input_stream: "INPUT_A:a" + input_stream: "INPUT_B:b" + ... +} +``` + +Given a calculator with two inputs, receiving an incoming packet with timestamp +1 on stream A followed by an input packet with timestamp 2 on stream B. The +timestamp bound increase to 2 on stream B with pending input packet on stream A +at timestamp 1 triggers the Calculator::Process call with an incomplete input +set for timestamp 1. In this case, the `DefaultInputStreamHandler` outputs: + +``` +[INFO] SomeCalculator: Filled input set at ts: 1 with MISSING packets in input streams: INPUT_B:0:input_b. +``` + +## VLOG is your friend + +MediaPipe uses `VLOG` in many places to log important events for debugging +purposes, while not affecting performance if logging is not enabled. + +See more about `VLOG` on [abseil `VLOG`] + +Mind that `VLOG` can be spammy if you enable it globally e.g. (using `--v` +flag). The solution `--vmodule` flag that allows different levels to be set for +different source files. + +In cases when `--v` / `--vmodule` cannot be used (e.g. running an Android app), +MediaPipe allows to set `VLOG` `--v` / `--vmodule` flags overrides for debugging +purposes which are applied when `CalculatorGraph` is created. + +Overrides: + +- `MEDIAPIPE_VLOG_V`: define and provide value you provide for `--v` +- `MEDIAPIPE_VLOG_VMODULE`: define and provide value you provide for + `--vmodule` + +You can set overrides by adding: +`--copt=-DMEDIAPIPE_VLOG_VMODULE=\"*calculator*=5\"` + +with your desired module patterns and `VLOG` levels (see more details for +`--vmodule` at [abseil `VLOG`]) to your build command. + +IMPORTANT: mind that adding the above to your build command will trigger rebuild +of the whole binary including dependencies. So, considering `VLOG` overrides +exist for debugging purposes only, it is faster to simply modify +[`vlog_overrides.cc`] adding `MEDIAPIPE_VLOG_V/VMODULE` at the very top. + +[`CalculatorGraphConfig`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator.proto +[`CalculatorGraphConfig::max_queue_size`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator.proto +[`CalculatorGraphConfig::report_deadlock`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator.proto +[`REGISTER_CALCULATOR`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator_registry.h +[`registration.h`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/deps/registration.h +[`CalculatorGraph::CloseAllPacketSources`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator_graph.h +[`CalculatorGraph::Cancel`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator_graph.h +[`CalculatorGraph::WaitUntilDone`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator_graph.h +[`Timestamp::Done`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/timestamp.h +[`CalculatorBase::Close`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator_base.h +[`FlowLimiterCalculator`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/calculators/core/flow_limiter_calculator.cc [`How to process realtime input streams`]: faq.md#how-to-process-realtime-input-streams +[`vlog_overrides.cc`]: https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/vlog_overrides.cc +[abseil `VLOG`]: https://abseil.io/docs/cpp/guides/logging#VLOG + +## Unsupported flags during build + +If you are using Clang 18 or older, you may have to disable some compiler +optimizations in our CPU backend. + +To disable support for `avxvnniint8`, add the following to you `.bazelrc`: + +``` +build --define=xnn_enable_avxvnniint8=false +``` diff --git a/docs/solutions/box_tracking.md b/docs/solutions/box_tracking.md index 537916ac43..480a869605 100644 --- a/docs/solutions/box_tracking.md +++ b/docs/solutions/box_tracking.md @@ -44,13 +44,13 @@ Our solution consists of three main components: a motion analysis component, a flow packager component, and a box tracking component. Each component is encapsulated as a MediaPipe calculator, and the box tracking solution as a whole is represented as a MediaPipe -[subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/tracking/subgraphs/box_tracking_gpu.pbtxt). +[subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/tracking/subgraphs/box_tracking_gpu.pbtxt). Note: To visualize a graph, copy the graph and paste it into [MediaPipe Visualizer](https://viz.mediapipe.dev/). In the -[box tracking subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/tracking/subgraphs/box_tracking_gpu.pbtxt), +[box tracking subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/tracking/subgraphs/box_tracking_gpu.pbtxt), the MotionAnalysis calculator extracts features (e.g. high-gradient corners) across the image, tracks those features over time, classifies them into foreground and background features, and estimates both local motion vectors and @@ -94,25 +94,25 @@ frame (e.g., [MediaPipe Object Detection](./object_detection.md)): *Fig 1. Box tracking paired with ML-based object detection.* | The object detection and tracking pipeline can be implemented as a MediaPipe -[graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/tracking/object_detection_tracking_mobile_gpu.pbtxt), +[graph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/tracking/object_detection_tracking_mobile_gpu.pbtxt), which internally utilizes an -[object detection subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/tracking/subgraphs/object_detection_gpu.pbtxt), +[object detection subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/tracking/subgraphs/object_detection_gpu.pbtxt), an -[object tracking subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/tracking/subgraphs/object_tracking_gpu.pbtxt), +[object tracking subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/tracking/subgraphs/object_tracking_gpu.pbtxt), and a -[renderer subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/tracking/subgraphs/renderer_gpu.pbtxt). +[renderer subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/tracking/subgraphs/renderer_gpu.pbtxt). In general, the object detection subgraph (which performs ML model inference internally) runs only upon request, e.g. at an arbitrary frame rate or triggered by specific signals. More specifically, in this particular -[graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/tracking/object_detection_tracking_mobile_gpu.pbtxt) +[graph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/tracking/object_detection_tracking_mobile_gpu.pbtxt) a PacketResampler calculator temporally subsamples the incoming video frames to 0.5 fps before they are passed into the object detection subgraph. This frame rate can be configured differently as an option in PacketResampler. The object tracking subgraph runs in real-time on every incoming frame to track the detected objects. It expands the -[box tracking subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/tracking/subgraphs/box_tracking_gpu.pbtxt) +[box tracking subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/tracking/subgraphs/box_tracking_gpu.pbtxt) with additional functionality: when new detections arrive it uses IoU (Intersection over Union) to associate the current tracked objects/boxes with new detections to remove obsolete or duplicated boxes. @@ -133,10 +133,10 @@ to visualize its associated subgraphs, please see Note: Object detection is using TensorFlow Lite on GPU while tracking is on CPU. * Graph: - [`mediapipe/graphs/tracking/object_detection_tracking_mobile_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/tracking/object_detection_tracking_mobile_gpu.pbtxt) + [`mediapipe/graphs/tracking/object_detection_tracking_mobile_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/tracking/object_detection_tracking_mobile_gpu.pbtxt) * Android target: [(or download prebuilt ARM64 APK)](https://drive.google.com/open?id=1UXL9jX4Wpp34TsiVogugV3J3T9_C5UK-) - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/objecttrackinggpu:objecttrackinggpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objecttrackinggpu/BUILD) + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/objecttrackinggpu:objecttrackinggpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objecttrackinggpu/BUILD) * iOS target: Not available ### Desktop @@ -144,9 +144,9 @@ Note: Object detection is using TensorFlow Lite on GPU while tracking is on CPU. * Running on CPU (both for object detection using TensorFlow Lite and tracking): * Graph: - [`mediapipe/graphs/tracking/object_detection_tracking_desktop_live.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/tracking/object_detection_tracking_desktop_live.pbtxt) + [`mediapipe/graphs/tracking/object_detection_tracking_desktop_live.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/tracking/object_detection_tracking_desktop_live.pbtxt) * Target: - [`mediapipe/examples/desktop/object_tracking:object_tracking_cpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/object_tracking/BUILD) + [`mediapipe/examples/desktop/object_tracking:object_tracking_cpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/object_tracking/BUILD) * Running on GPU: Not available ## Resources diff --git a/docs/solutions/face_detection.md b/docs/solutions/face_detection.md index 93f239c373..b5764c91c0 100644 --- a/docs/solutions/face_detection.md +++ b/docs/solutions/face_detection.md @@ -249,7 +249,7 @@ camera.start(); Please first follow general [instructions](../getting_started/android_solutions.md) to add MediaPipe Gradle dependencies and try the Android Solution API in the companion -[example Android Studio project](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/solutions/facedetection), +[example Android Studio project](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/solutions/facedetection), and learn more in the usage example below. Supported configuration options: @@ -468,12 +468,12 @@ to visualize its associated subgraphs, please see #### GPU Pipeline * Graph: - [`mediapipe/graphs/face_detection/face_detection_mobile_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/face_detection/face_detection_mobile_gpu.pbtxt) + [`mediapipe/graphs/face_detection/face_detection_mobile_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/face_detection/face_detection_mobile_gpu.pbtxt) * Android target: [(or download prebuilt ARM64 APK)](https://drive.google.com/open?id=1DZTCy1gp238kkMnu4fUkwI3IrF77Mhy5) - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectiongpu:facedetectiongpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectiongpu/BUILD) + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectiongpu:facedetectiongpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectiongpu/BUILD) * iOS target: - [`mediapipe/examples/ios/facedetectiongpu:FaceDetectionGpuApp`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/ios/facedetectiongpu/BUILD) + [`mediapipe/examples/ios/facedetectiongpu:FaceDetectionGpuApp`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/ios/facedetectiongpu/BUILD) #### CPU Pipeline @@ -483,30 +483,30 @@ image transfer respectively. As a result, the rest of graph, which shares the same configuration as the GPU pipeline, runs entirely on CPU. * Graph: - [`mediapipe/graphs/face_detection/face_detection_mobile_cpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/face_detection/face_detection_mobile_cpu.pbtxt) + [`mediapipe/graphs/face_detection/face_detection_mobile_cpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/face_detection/face_detection_mobile_cpu.pbtxt) * Android target: [(or download prebuilt ARM64 APK)](https://drive.google.com/open?id=1npiZY47jbO5m2YaL63o5QoCQs40JC6C7) - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectioncpu:facedetectioncpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectioncpu/BUILD) + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectioncpu:facedetectioncpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectioncpu/BUILD) * iOS target: - [`mediapipe/examples/ios/facedetectioncpu:FaceDetectionCpuApp`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/ios/facedetectioncpu/BUILD) + [`mediapipe/examples/ios/facedetectioncpu:FaceDetectionCpuApp`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/ios/facedetectioncpu/BUILD) ### Desktop * Running on CPU: * Graph: - [`mediapipe/graphs/face_detection/face_detection_desktop_live.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/face_detection/face_detection_desktop_live.pbtxt) + [`mediapipe/graphs/face_detection/face_detection_desktop_live.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/face_detection/face_detection_desktop_live.pbtxt) * Target: - [`mediapipe/examples/desktop/face_detection:face_detection_cpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/face_detection/BUILD) + [`mediapipe/examples/desktop/face_detection:face_detection_cpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/face_detection/BUILD) * Running on GPU * Graph: - [`mediapipe/graphs/face_detection/face_detection_mobile_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/face_detection/face_detection_mobile_gpu.pbtxt) + [`mediapipe/graphs/face_detection/face_detection_mobile_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/face_detection/face_detection_mobile_gpu.pbtxt) * Target: - [`mediapipe/examples/desktop/face_detection:face_detection_gpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/face_detection/BUILD) + [`mediapipe/examples/desktop/face_detection:face_detection_gpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/face_detection/BUILD) ### Coral Please refer to -[these instructions](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/README.md) +[these instructions](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/coral/README.md) to cross-compile and run MediaPipe examples on the [Coral Dev Board](https://coral.ai/products/dev-board). diff --git a/docs/solutions/face_mesh.md b/docs/solutions/face_mesh.md index a859bafaab..7fac8f2c7d 100644 --- a/docs/solutions/face_mesh.md +++ b/docs/solutions/face_mesh.md @@ -68,19 +68,19 @@ employed in our [MediaPipe Hands](./hands.md) solution, which uses a palm detector together with a hand landmark model. The pipeline is implemented as a MediaPipe -[graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/face_mesh/face_mesh_mobile.pbtxt) +[graph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/face_mesh/face_mesh_mobile.pbtxt) that uses a -[face landmark subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_front_gpu.pbtxt) +[face landmark subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_front_gpu.pbtxt) from the -[face landmark module](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark), +[face landmark module](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_landmark), and renders using a dedicated -[face renderer subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/face_mesh/subgraphs/face_renderer_gpu.pbtxt). +[face renderer subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/face_mesh/subgraphs/face_renderer_gpu.pbtxt). The -[face landmark subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_front_gpu.pbtxt) +[face landmark subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_front_gpu.pbtxt) internally uses a -[face_detection_subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_short_range_gpu.pbtxt) +[face_detection_subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_short_range_gpu.pbtxt) from the -[face detection module](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_detection). +[face detection module](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_detection). Note: To visualize a graph, copy the graph and paste it into [MediaPipe Visualizer](https://viz.mediapipe.dev/). For more information on how @@ -149,7 +149,7 @@ enable the full spectrum of augmented reality (AR) features like aligning a virtual 3D object with a detected face. The -[Face Transform module](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry) +[Face Transform module](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry) moves away from the screen coordinate space towards a metric 3D space and provides necessary primitives to handle a detected face as a regular 3D object. By design, you'll be able to use a perspective camera to project the final 3D @@ -183,7 +183,7 @@ functions: - **Defines metric units**: the scale of the canonical face model defines the metric units of the Metric 3D space. A metric unit used by the - [default canonical face model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/data/canonical_face_model.fbx) + [default canonical face model](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/data/canonical_face_model.fbx) is a centimeter; - **Bridges static and runtime spaces**: the face pose transformation matrix is - in fact - a linear map from the canonical face model into the runtime @@ -209,12 +209,12 @@ the following steps are executed in the given order: triangular topology are inherited from the canonical face model. The transform pipeline is implemented as a MediaPipe -[calculator](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/geometry_pipeline_calculator.cc). +[calculator](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/geometry_pipeline_calculator.cc). For your convenience, this calculator is bundled together with corresponding metadata into a unified MediaPipe -[subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/face_geometry_from_landmarks.pbtxt). +[subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/face_geometry_from_landmarks.pbtxt). The face transform format is defined as a Protocol Buffer -[message](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/protos/face_geometry.proto). +[message](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/protos/face_geometry.proto). #### Effect Renderer @@ -232,7 +232,7 @@ into the depth buffer. This step helps to create a more believable effect via hiding invisible elements behind the face surface. The effect renderer is implemented as a MediaPipe -[calculator](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/effect_renderer_calculator.cc). +[calculator](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/effect_renderer_calculator.cc). | ![face_geometry_renderer.gif](https://mediapipe.dev/images/face_geometry_renderer.gif) | | :---------------------------------------------------------------------: | @@ -498,7 +498,7 @@ camera.start(); Please first follow general [instructions](../getting_started/android_solutions.md) to add MediaPipe Gradle dependencies and try the Android Solution API in the companion -[example Android Studio project](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/solutions/facemesh), +[example Android Studio project](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/solutions/facemesh), and learn more in the usage example below. Supported configuration options: @@ -708,31 +708,31 @@ detection. For visual reference, please refer to *Fig. 2*. #### Mobile * Graph: - [`mediapipe/graphs/face_mesh/face_mesh_mobile.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/face_mesh/face_mesh_mobile.pbtxt) + [`mediapipe/graphs/face_mesh/face_mesh_mobile.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/face_mesh/face_mesh_mobile.pbtxt) * Android target: [(or download prebuilt ARM64 APK)](https://drive.google.com/open?id=1pUmd7CXCL_onYMbsZo5p91cH0oNnR4gi) - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/facemeshgpu:facemeshgpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facemeshgpu/BUILD) + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/facemeshgpu:facemeshgpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facemeshgpu/BUILD) * iOS target: [`mediapipe/examples/ios/facemeshgpu:FaceMeshGpuApp`](http:/mediapipe/examples/ios/facemeshgpu/BUILD) Tip: Maximum number of faces to detect/process is set to 1 by default. To change it, for Android modify `NUM_FACES` in -[MainActivity.java](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facemeshgpu/MainActivity.java), +[MainActivity.java](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facemeshgpu/MainActivity.java), and for iOS modify `kNumFaces` in -[FaceMeshGpuViewController.mm](https://github.com/google/mediapipe/tree/master/mediapipe/examples/ios/facemeshgpu/FaceMeshGpuViewController.mm). +[FaceMeshGpuViewController.mm](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/ios/facemeshgpu/FaceMeshGpuViewController.mm). #### Desktop * Running on CPU * Graph: - [`mediapipe/graphs/face_mesh/face_mesh_desktop_live.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/face_mesh/face_mesh_desktop_live.pbtxt) + [`mediapipe/graphs/face_mesh/face_mesh_desktop_live.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/face_mesh/face_mesh_desktop_live.pbtxt) * Target: - [`mediapipe/examples/desktop/face_mesh:face_mesh_cpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/face_mesh/BUILD) + [`mediapipe/examples/desktop/face_mesh:face_mesh_cpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/face_mesh/BUILD) * Running on GPU * Graph: - [`mediapipe/graphs/face_mesh/face_mesh_desktop_live_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/face_mesh/face_mesh_desktop_live_gpu.pbtxt) + [`mediapipe/graphs/face_mesh/face_mesh_desktop_live_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/face_mesh/face_mesh_desktop_live_gpu.pbtxt) * Target: - [`mediapipe/examples/desktop/face_mesh:face_mesh_gpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/face_mesh/BUILD) + [`mediapipe/examples/desktop/face_mesh:face_mesh_gpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/face_mesh/BUILD) Tip: Maximum number of faces to detect/process is set to 1 by default. To change it, in the graph file modify the option of `ConstantSidePacketCalculator`. @@ -746,10 +746,10 @@ only works for a single face. For visual reference, please refer to *Fig. 4*. #### Mobile * Graph: - [`mediapipe/graphs/face_effect/face_effect_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/face_effect/face_effect_gpu.pbtxt) + [`mediapipe/graphs/face_effect/face_effect_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/face_effect/face_effect_gpu.pbtxt) * Android target: [(or download prebuilt ARM64 APK)](https://drive.google.com/file/d/1ccnaDnffEuIXriBZr2SK_Eu4FpO7K44s) - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/faceeffect`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/faceeffect/BUILD) + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/faceeffect`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/faceeffect/BUILD) * iOS target: [`mediapipe/examples/ios/faceeffect`](http:/mediapipe/examples/ios/faceeffect/BUILD) @@ -765,9 +765,9 @@ only works for a single face. For visual reference, please refer to *Fig. 4*. [Real-time Facial Surface Geometry from Monocular Video on Mobile GPUs](https://arxiv.org/abs/1907.06724) ([poster](https://docs.google.com/presentation/d/1-LWwOMO9TzEVdrZ1CS1ndJzciRHfYDJfbSxH_ke_JRg/present?slide=id.g5986dd4b4c_4_212)) * Canonical face model: - [FBX](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/data/canonical_face_model.fbx), - [OBJ](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/data/canonical_face_model.obj), - [UV visualization](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/data/canonical_face_model_uv_visualization.png) + [FBX](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/data/canonical_face_model.fbx), + [OBJ](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/data/canonical_face_model.obj), + [UV visualization](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/data/canonical_face_model_uv_visualization.png) * [Models and model cards](./models.md#face_mesh) * [Web demo](https://code.mediapipe.dev/codepen/face_mesh) * [Python Colab](https://mediapipe.page.link/face_mesh_py_colab) diff --git a/docs/solutions/hair_segmentation.md b/docs/solutions/hair_segmentation.md index feb40f9c00..a7db32d035 100644 --- a/docs/solutions/hair_segmentation.md +++ b/docs/solutions/hair_segmentation.md @@ -43,10 +43,10 @@ to visualize its associated subgraphs, please see ### Mobile * Graph: - [`mediapipe/graphs/hair_segmentation/hair_segmentation_mobile_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/hair_segmentation/hair_segmentation_mobile_gpu.pbtxt) + [`mediapipe/graphs/hair_segmentation/hair_segmentation_mobile_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/hair_segmentation/hair_segmentation_mobile_gpu.pbtxt) * Android target: [(or download prebuilt ARM64 APK)](https://drive.google.com/open?id=1mmLtyL8IRfCUbqqu0-E-Hgjr_e6P3XAy) - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu:hairsegmentationgpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/BUILD) + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu:hairsegmentationgpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/BUILD) * iOS target: Not available ### Desktop @@ -54,9 +54,9 @@ to visualize its associated subgraphs, please see * Running on CPU: Not available * Running on GPU * Graph: - [`mediapipe/graphs/hair_segmentation/hair_segmentation_mobile_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/hair_segmentation/hair_segmentation_mobile_gpu.pbtxt) + [`mediapipe/graphs/hair_segmentation/hair_segmentation_mobile_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/hair_segmentation/hair_segmentation_mobile_gpu.pbtxt) * Target: - [`mediapipe/examples/desktop/hair_segmentation:hair_segmentation_gpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/hair_segmentation/BUILD) + [`mediapipe/examples/desktop/hair_segmentation:hair_segmentation_gpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/hair_segmentation/BUILD) ### Web diff --git a/docs/solutions/hands.md b/docs/solutions/hands.md index 6cf2264ed9..45ab776aa5 100644 --- a/docs/solutions/hands.md +++ b/docs/solutions/hands.md @@ -70,21 +70,21 @@ frame, and only when the landmark model could no longer identify hand presence is palm detection invoked to relocalize the hand. The pipeline is implemented as a MediaPipe -[graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_tracking_mobile.pbtxt) +[graph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_tracking_mobile.pbtxt) that uses a -[hand landmark tracking subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_tracking_gpu.pbtxt) +[hand landmark tracking subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_tracking_gpu.pbtxt) from the -[hand landmark module](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark), +[hand landmark module](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/hand_landmark), and renders using a dedicated -[hand renderer subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/hand_tracking/subgraphs/hand_renderer_gpu.pbtxt). +[hand renderer subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/hand_tracking/subgraphs/hand_renderer_gpu.pbtxt). The -[hand landmark tracking subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_tracking_gpu.pbtxt) +[hand landmark tracking subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_tracking_gpu.pbtxt) internally uses a -[hand landmark subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_gpu.pbtxt) +[hand landmark subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_gpu.pbtxt) from the same module and a -[palm detection subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/palm_detection/palm_detection_gpu.pbtxt) +[palm detection subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/palm_detection/palm_detection_gpu.pbtxt) from the -[palm detection module](https://github.com/google/mediapipe/tree/master/mediapipe/modules/palm_detection). +[palm detection module](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/palm_detection). Note: To visualize a graph, copy the graph and paste it into [MediaPipe Visualizer](https://viz.mediapipe.dev/). For more information on how @@ -405,7 +405,7 @@ camera.start(); Please first follow general [instructions](../getting_started/android_solutions.md) to add MediaPipe Gradle dependencies and try the Android Solution API in the companion -[example Android Studio project](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/solutions/hands), +[example Android Studio project](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/solutions/hands), and learn more in the usage example below. Supported configuration options: @@ -617,41 +617,41 @@ to visualize its associated subgraphs, please see #### Main Example * Graph: - [`mediapipe/graphs/hand_tracking/hand_tracking_mobile.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_tracking_mobile.pbtxt) + [`mediapipe/graphs/hand_tracking/hand_tracking_mobile.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_tracking_mobile.pbtxt) * Android target: [(or download prebuilt ARM64 APK)](https://drive.google.com/open?id=1uCjS0y0O0dTDItsMh8x2cf4-l3uHW1vE) - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu:handtrackinggpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/BUILD) + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu:handtrackinggpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/BUILD) * iOS target: - [`mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/ios/handtrackinggpu/BUILD) + [`mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/ios/handtrackinggpu/BUILD) Tip: Maximum number of hands to detect/process is set to 2 by default. To change it, for Android modify `NUM_HANDS` in -[MainActivity.java](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/MainActivity.java), +[MainActivity.java](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/MainActivity.java), and for iOS modify `kNumHands` in -[HandTrackingViewController.mm](https://github.com/google/mediapipe/tree/master/mediapipe/examples/ios/handtrackinggpu/HandTrackingViewController.mm). +[HandTrackingViewController.mm](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/ios/handtrackinggpu/HandTrackingViewController.mm). #### Palm/Hand Detection Only (no landmarks) * Graph: - [`mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt) + [`mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt) * Android target: [(or download prebuilt ARM64 APK)](https://drive.google.com/open?id=1qUlTtH7Ydg-wl_H6VVL8vueu2UCTu37E) - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/handdetectiongpu:handdetectiongpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handdetectiongpu/BUILD) + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/handdetectiongpu:handdetectiongpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handdetectiongpu/BUILD) * iOS target: - [`mediapipe/examples/ios/handdetectiongpu:HandDetectionGpuApp`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/ios/handdetectiongpu/BUILD) + [`mediapipe/examples/ios/handdetectiongpu:HandDetectionGpuApp`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/ios/handdetectiongpu/BUILD) ### Desktop * Running on CPU * Graph: - [`mediapipe/graphs/hand_tracking/hand_tracking_desktop_live.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_tracking_desktop_live.pbtxt) + [`mediapipe/graphs/hand_tracking/hand_tracking_desktop_live.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_tracking_desktop_live.pbtxt) * Target: - [`mediapipe/examples/desktop/hand_tracking:hand_tracking_cpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/hand_tracking/BUILD) + [`mediapipe/examples/desktop/hand_tracking:hand_tracking_cpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/hand_tracking/BUILD) * Running on GPU * Graph: - [`mediapipe/graphs/hand_tracking/hand_tracking_desktop_live_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_tracking_desktop_gpu.pbtxt) + [`mediapipe/graphs/hand_tracking/hand_tracking_desktop_live_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_tracking_desktop_gpu.pbtxt) * Target: - [`mediapipe/examples/desktop/hand_tracking:hand_tracking_gpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/hand_tracking/BUILD) + [`mediapipe/examples/desktop/hand_tracking:hand_tracking_gpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/hand_tracking/BUILD) Tip: Maximum number of hands to detect/process is set to 2 by default. To change it, in the graph file modify the option of `ConstantSidePacketCalculator`. diff --git a/docs/solutions/holistic.md b/docs/solutions/holistic.md index 6909096fe0..f192b4811e 100644 --- a/docs/solutions/holistic.md +++ b/docs/solutions/holistic.md @@ -89,21 +89,21 @@ models that play the role of corresponding model's inference time. The pipeline is implemented as a MediaPipe -[graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/holistic_tracking/holistic_tracking_gpu.pbtxt) +[graph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/holistic_tracking/holistic_tracking_gpu.pbtxt) that uses a -[holistic landmark subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/holistic_landmark/holistic_landmark_gpu.pbtxt) +[holistic landmark subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/holistic_landmark/holistic_landmark_gpu.pbtxt) from the -[holistic landmark module](https://github.com/google/mediapipe/tree/master/mediapipe/modules/holistic_landmark) +[holistic landmark module](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/holistic_landmark) and renders using a dedicated -[holistic renderer subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/holistic_tracking/holistic_tracking_to_render_data.pbtxt). +[holistic renderer subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/holistic_tracking/holistic_tracking_to_render_data.pbtxt). The -[holistic landmark subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/holistic_landmark/holistic_landmark_gpu.pbtxt) +[holistic landmark subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/holistic_landmark/holistic_landmark_gpu.pbtxt) internally uses a -[pose landmark module](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark) +[pose landmark module](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/pose_landmark) , -[hand landmark module](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark) +[hand landmark module](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/hand_landmark) and -[face landmark module](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark/). +[face landmark module](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_landmark/). Please check them for implementation details. Note: To visualize a graph, copy the graph and paste it into @@ -470,10 +470,10 @@ to visualize its associated subgraphs, please see ### Mobile * Graph: - [`mediapipe/graphs/holistic_tracking/holistic_tracking_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/holistic_tracking/holistic_tracking_gpu.pbtxt) + [`mediapipe/graphs/holistic_tracking/holistic_tracking_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/holistic_tracking/holistic_tracking_gpu.pbtxt) * Android target: [(or download prebuilt ARM64 APK)](https://drive.google.com/file/d/1o-Trp2GIRitA0OvmZWUQjVMa476xpfgK/view?usp=sharing) - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/holistictrackinggpu:holistictrackinggpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/holistictrackinggpu/BUILD) + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/holistictrackinggpu:holistictrackinggpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/holistictrackinggpu/BUILD) * iOS target: [`mediapipe/examples/ios/holistictrackinggpu:HolisticTrackingGpuApp`](http:/mediapipe/examples/ios/holistictrackinggpu/BUILD) @@ -484,14 +484,14 @@ on how to build MediaPipe examples. * Running on CPU * Graph: - [`mediapipe/graphs/holistic_tracking/holistic_tracking_cpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/holistic_tracking/holistic_tracking_cpu.pbtxt) + [`mediapipe/graphs/holistic_tracking/holistic_tracking_cpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/holistic_tracking/holistic_tracking_cpu.pbtxt) * Target: - [`mediapipe/examples/desktop/holistic_tracking:holistic_tracking_cpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/holistic_tracking/BUILD) + [`mediapipe/examples/desktop/holistic_tracking:holistic_tracking_cpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/holistic_tracking/BUILD) * Running on GPU * Graph: - [`mediapipe/graphs/holistic_tracking/holistic_tracking_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/holistic_tracking/holistic_tracking_gpu.pbtxt) + [`mediapipe/graphs/holistic_tracking/holistic_tracking_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/holistic_tracking/holistic_tracking_gpu.pbtxt) * Target: - [`mediapipe/examples/desktop/holistic_tracking:holistic_tracking_gpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/holistic_tracking/BUILD) + [`mediapipe/examples/desktop/holistic_tracking:holistic_tracking_gpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/holistic_tracking/BUILD) ## Resources diff --git a/docs/solutions/instant_motion_tracking.md b/docs/solutions/instant_motion_tracking.md index 361bc91ff8..21c422a49e 100644 --- a/docs/solutions/instant_motion_tracking.md +++ b/docs/solutions/instant_motion_tracking.md @@ -46,32 +46,32 @@ surfaces, allowing them to seamlessly interact with the real-world environment. ## Pipeline The Instant Motion Tracking pipeline is implemented as a MediaPipe -[graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/instant_motion_tracking/instant_motion_tracking.pbtxt), +[graph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/instant_motion_tracking/instant_motion_tracking.pbtxt), which internally utilizes a -[RegionTrackingSubgraph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/instant_motion_tracking/subgraphs/region_tracking.pbtxt) +[RegionTrackingSubgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/instant_motion_tracking/subgraphs/region_tracking.pbtxt) in order to perform anchor tracking for each individual 3D sticker. We first use a -[StickerManagerCalculator](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/instant_motion_tracking/calculators/sticker_manager_calculator.cc) +[StickerManagerCalculator](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/instant_motion_tracking/calculators/sticker_manager_calculator.cc) to prepare the individual sticker data for the rest of the application. This information is then sent to the -[RegionTrackingSubgraph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/instant_motion_tracking/subgraphs/region_tracking.pbtxt) +[RegionTrackingSubgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/instant_motion_tracking/subgraphs/region_tracking.pbtxt) that performs 3D region tracking for sticker placement and rendering. Once acquired, our tracked sticker regions are sent with user transformations (i.e. gestures from the user to rotate and zoom the sticker) and IMU data to the -[MatricesManagerCalculator](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/instant_motion_tracking/calculators/matrices_manager_calculator.cc), +[MatricesManagerCalculator](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/instant_motion_tracking/calculators/matrices_manager_calculator.cc), which turns all our sticker transformation data into a set of model matrices. This data is handled directly by our -[GlAnimationOverlayCalculator](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc) +[GlAnimationOverlayCalculator](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc) as an input stream, which will render the provided texture and object file using our matrix specifications. The output of -[GlAnimationOverlayCalculator](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc) +[GlAnimationOverlayCalculator](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc) is a video stream depicting the virtual 3D content rendered on top of the real world, creating immersive AR experiences for users. ## Using Instant Motion Tracking -With the Instant Motion Tracking MediaPipe [graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/instant_motion_tracking/instant_motion_tracking.pbtxt), +With the Instant Motion Tracking MediaPipe [graph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/instant_motion_tracking/instant_motion_tracking.pbtxt), an application can create an interactive and realistic AR experience by specifying the required input streams, side packets, and output streams. The input streams are the following: @@ -80,7 +80,7 @@ The input streams are the following: * Rotation Matrix (9-element Float Array): The 3x3 row-major rotation matrix from the device IMU to determine proper orientation of the device. * Sticker Proto String (String): A string representing the -serialized [sticker buffer protobuf message](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/instant_motion_tracking/calculators/sticker_buffer.proto), +serialized [sticker buffer protobuf message](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/instant_motion_tracking/calculators/sticker_buffer.proto), containing a list of all stickers and their attributes. * Each sticker in the Protobuffer has a unique ID to find associated anchors and transforms, an initial anchor placement in a normalized [0.0, 1.0] @@ -100,12 +100,12 @@ to provide device-specific information for the rendering system: (this ratio corresponds to the image frames themselves, not necessarily the screen bounds). * Object Asset (String): The - [GlAnimationOverlayCalculator](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc) + [GlAnimationOverlayCalculator](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc) must be provided with an associated asset file name pointing to the 3D model to render in the viewfinder. * (Optional) Texture (ImageFrame on Android, GpuBuffer on iOS): Textures for the - [GlAnimationOverlayCalculator](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc) + [GlAnimationOverlayCalculator](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc) can be provided either via an input stream (dynamic texturing) or as a side packet (unchanging texture). @@ -121,12 +121,12 @@ and connected camera. Please first see general instructions for [Android](../getting_started/android.md) on how to build MediaPipe examples. -* Graph: [mediapipe/graphs/instant_motion_tracking/instant_motion_tracking.pbtxt](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/instant_motion_tracking/instant_motion_tracking.pbtxt) +* Graph: [mediapipe/graphs/instant_motion_tracking/instant_motion_tracking.pbtxt](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/instant_motion_tracking/instant_motion_tracking.pbtxt) * Android target (or download prebuilt [ARM64 APK](https://drive.google.com/file/d/1KnaBBoKpCHR73nOBJ4fL_YdWVTAcwe6L/view?usp=sharing)): -[`mediapipe/examples/android/src/java/com/google/mediapipe/apps/instantmotiontracking:instantmotiontracking`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/instantmotiontracking/BUILD) +[`mediapipe/examples/android/src/java/com/google/mediapipe/apps/instantmotiontracking:instantmotiontracking`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/instantmotiontracking/BUILD) -* Assets rendered by the [GlAnimationOverlayCalculator](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc) must be preprocessed into an OpenGL-ready custom .uuu format. This can be done +* Assets rendered by the [GlAnimationOverlayCalculator](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc) must be preprocessed into an OpenGL-ready custom .uuu format. This can be done for user assets as follows: > First run > @@ -143,7 +143,7 @@ for user assets as follows: > > Note: ObjParser combines all .obj files found in the given directory into a > single .uuu animation file, using the order given by sorting the filenames alphanumerically. Also the ObjParser directory inputs must be given as -> absolute paths, not relative paths. See parser utility library at [`mediapipe/graphs/object_detection_3d/obj_parser/`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/obj_parser/) for more details. +> absolute paths, not relative paths. See parser utility library at [`mediapipe/graphs/object_detection_3d/obj_parser/`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/obj_parser/) for more details. ## Resources diff --git a/docs/solutions/iris.md b/docs/solutions/iris.md index c0af4342c7..8cd0de5cf7 100644 --- a/docs/solutions/iris.md +++ b/docs/solutions/iris.md @@ -64,23 +64,23 @@ isolate the eye region in the original image for use in the subsequent iris tracking step. The pipeline is implemented as a MediaPipe -[graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/iris_tracking/iris_tracking_gpu.pbtxt) +[graph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/iris_tracking/iris_tracking_gpu.pbtxt) that uses a -[face landmark subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_front_gpu.pbtxt) +[face landmark subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_front_gpu.pbtxt) from the -[face landmark module](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark), +[face landmark module](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_landmark), an -[iris landmark subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/iris_landmark/iris_landmark_left_and_right_gpu.pbtxt) +[iris landmark subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/iris_landmark/iris_landmark_left_and_right_gpu.pbtxt) from the -[iris landmark module](https://github.com/google/mediapipe/tree/master/mediapipe/modules/iris_landmark), +[iris landmark module](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/iris_landmark), and renders using a dedicated -[iris-and-depth renderer subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/iris_tracking/subgraphs/iris_and_depth_renderer_gpu.pbtxt). +[iris-and-depth renderer subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/iris_tracking/subgraphs/iris_and_depth_renderer_gpu.pbtxt). The -[face landmark subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_front_gpu.pbtxt) +[face landmark subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_front_gpu.pbtxt) internally uses a -[face detection subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_short_range_gpu.pbtxt) +[face detection subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_short_range_gpu.pbtxt) from the -[face detection module](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_detection). +[face detection module](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_detection). Note: To visualize a graph, copy the graph and paste it into [MediaPipe Visualizer](https://viz.mediapipe.dev/). For more information on how @@ -142,10 +142,10 @@ to visualize its associated subgraphs, please see ### Mobile * Graph: - [`mediapipe/graphs/iris_tracking/iris_tracking_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/iris_tracking/iris_tracking_gpu.pbtxt) + [`mediapipe/graphs/iris_tracking/iris_tracking_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/iris_tracking/iris_tracking_gpu.pbtxt) * Android target: [(or download prebuilt ARM64 APK)](https://drive.google.com/file/d/1cywcNtqk764TlZf1lvSTV4F3NGB2aL1R/view?usp=sharing) - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/iristrackinggpu:iristrackinggpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/iristrackinggpu/BUILD) + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/iristrackinggpu:iristrackinggpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/iristrackinggpu/BUILD) * iOS target: [`mediapipe/examples/ios/iristrackinggpu:IrisTrackingGpuApp`](http:/mediapipe/examples/ios/iristrackinggpu/BUILD) @@ -158,14 +158,14 @@ on how to build MediaPipe examples. * Running on CPU * Graph: - [`mediapipe/graphs/iris_tracking/iris_tracking_cpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/iris_tracking/iris_tracking_cpu.pbtxt) + [`mediapipe/graphs/iris_tracking/iris_tracking_cpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/iris_tracking/iris_tracking_cpu.pbtxt) * Target: - [`mediapipe/examples/desktop/iris_tracking:iris_tracking_cpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/iris_tracking/BUILD) + [`mediapipe/examples/desktop/iris_tracking:iris_tracking_cpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/iris_tracking/BUILD) * Running on GPU * Graph: - [`mediapipe/graphs/iris_tracking/iris_tracking_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/iris_tracking/iris_tracking_gpu.pbtxt) + [`mediapipe/graphs/iris_tracking/iris_tracking_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/iris_tracking/iris_tracking_gpu.pbtxt) * Target: - [`mediapipe/examples/desktop/iris_tracking:iris_tracking_gpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/iris_tracking/BUILD) + [`mediapipe/examples/desktop/iris_tracking:iris_tracking_gpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/iris_tracking/BUILD) #### Video File Input diff --git a/docs/solutions/knift.md b/docs/solutions/knift.md index 19e04cb5e5..68e3f084d9 100644 --- a/docs/solutions/knift.md +++ b/docs/solutions/knift.md @@ -74,7 +74,7 @@ in Google Developers Blog. ### Matching US Dollar Bills In MediaPipe, we've already provided an -[index file](https://github.com/google/mediapipe/tree/master/mediapipe/models/knift_index.pb) +[index file](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/models/knift_index.pb) pre-computed from the 3 template images (of US dollar bills) shown below. If you'd like to use your own template images, see [Matching Your Own Template Images](#matching-your-own-template-images). @@ -90,10 +90,10 @@ to visualize its associated subgraphs, please see [visualizer documentation](../tools/visualizer.md). * Graph: - [`mediapipe/graphs/template_matching/template_matching_mobile_cpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/template_matching/template_matching_mobile_cpu.pbtxt) + [`mediapipe/graphs/template_matching/template_matching_mobile_cpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/template_matching/template_matching_mobile_cpu.pbtxt) * Android target: [(or download prebuilt ARM64 APK)](https://drive.google.com/open?id=1tSWRfes9rAM4NrzmJBplguNQQvaeBZSa) - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/templatematchingcpu:templatematchingcpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/templatematchingcpu/BUILD) + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/templatematchingcpu:templatematchingcpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/templatematchingcpu/BUILD) Note: MediaPipe uses OpenCV 3 by default. However, because of [issues](https://github.com/opencv/opencv/issues/11488) between NDK 17+ and @@ -119,7 +119,7 @@ sed -i -e 's:libopencv_java4:libopencv_java3:g' third_party/opencv_android.BUILD Tip: The example uses the TFLite [XNNPACK delegate](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/lite/delegates/xnnpack) by default for faster inference. Users can change the -[option in TfLiteInferenceCalculator](https://github.com/google/mediapipe/tree/master/mediapipe/calculators/tflite/tflite_inference_calculator.proto) +[option in TfLiteInferenceCalculator](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/calculators/tflite/tflite_inference_calculator.proto) to run regular TFLite inference. ### Matching Your Own Template Images @@ -142,9 +142,9 @@ to run regular TFLite inference. The output index file includes the extracted KNIFT features. * Step 3: Replace - [mediapipe/models/knift_index.pb](https://github.com/google/mediapipe/tree/master/mediapipe/models/knift_index.pb) + [mediapipe/models/knift_index.pb](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/models/knift_index.pb) with the index file you generated, and update - [mediapipe/models/knift_labelmap.txt](https://github.com/google/mediapipe/tree/master/mediapipe/models/knift_labelmap.txt) + [mediapipe/models/knift_labelmap.txt](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/models/knift_labelmap.txt) with your own template names. * Step 4: Build and run the app using the same instructions in diff --git a/docs/solutions/media_sequence.md b/docs/solutions/media_sequence.md index 5224dd3713..3eaa34d7c6 100644 --- a/docs/solutions/media_sequence.md +++ b/docs/solutions/media_sequence.md @@ -43,7 +43,7 @@ TensorFlow.Examples. The goal of MediaSequence is to simplify working with SequenceExamples and to automate common preparation tasks. Much more information is available about the MediaSequence pipeline, including how to use it to process new data sets, in the documentation of -[MediaSequence](https://github.com/google/mediapipe/tree/master/mediapipe/util/sequence). +[MediaSequence](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/util/sequence). ## Preparing an example data set @@ -108,7 +108,7 @@ process new data sets, in the documentation of ``` and then you can import the data set in Python using - [read_demo_dataset.py](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/media_sequence/read_demo_dataset.py) + [read_demo_dataset.py](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/media_sequence/read_demo_dataset.py) ## Preparing a practical data set @@ -136,7 +136,7 @@ python -m mediapipe.examples.desktop.media_sequence.charades_dataset \ ## Preparing your own data set The process for preparing your own data set is described in the -[MediaSequence documentation](https://github.com/google/mediapipe/tree/master/mediapipe/util/sequence). +[MediaSequence documentation](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/util/sequence). The Python code for Charades can easily be modified to process most annotations, but the MediaPipe processing warrants further discussion. MediaSequence uses MediaPipe graphs to extract features related to the metadata or previously diff --git a/docs/solutions/models.md b/docs/solutions/models.md index 0af91eb489..a6afe79bc7 100644 --- a/docs/solutions/models.md +++ b/docs/solutions/models.md @@ -25,11 +25,11 @@ We encourage you to check out the new MediaPipe Solutions at: ---- -### [Face Detection](https://google.github.io/mediapipe/solutions/face_detection) +### [Face Detection](https://google-ai-edge.github.io/mediapipe/solutions/face_detection) * Short-range model (best for faces within 2 meters from the camera): [TFLite model](https://storage.googleapis.com/mediapipe-assets/face_detection_short_range.tflite), - [TFLite model quantized for EdgeTPU/Coral](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/models/face-detector-quantized_edgetpu.tflite), + [TFLite model quantized for EdgeTPU/Coral](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/coral/models/face-detector-quantized_edgetpu.tflite), [Model card](https://mediapipe.page.link/blazeface-mc) * Full-range model (dense, best for faces within 5 meters from the camera): [TFLite model](https://storage.googleapis.com/mediapipe-assets/face_detection_full_range.tflite), @@ -49,7 +49,7 @@ sparse model is ~30% faster when executing on CPU via demonstrate comparable latencies. Depending on your application, you may prefer one over the other. -### [Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) +### [Face Mesh](https://google-ai-edge.github.io/mediapipe/solutions/face_mesh) * Face landmark model: [TFLite model](https://storage.googleapis.com/mediapipe-assets/face_landmark.tflite), @@ -59,13 +59,13 @@ one over the other. * [Model card](https://mediapipe.page.link/facemesh-mc), [Model card (w/ attention)](https://mediapipe.page.link/attentionmesh-mc) -### [Iris](https://google.github.io/mediapipe/solutions/iris) +### [Iris](https://google-ai-edge.github.io/mediapipe/solutions/iris) * Iris landmark model: [TFLite model](https://storage.googleapis.com/mediapipe-assets/iris_landmark.tflite) * [Model card](https://mediapipe.page.link/iris-mc) -### [Hands](https://google.github.io/mediapipe/solutions/hands) +### [Hands](https://google-ai-edge.github.io/mediapipe/solutions/hands) * Palm detection model: [TFLite model (lite)](https://storage.googleapis.com/mediapipe-assets/palm_detection_lite.tflite), @@ -77,7 +77,7 @@ one over the other. [TF.js model](https://tfhub.dev/mediapipe/handskeleton/1) * [Model card](https://mediapipe.page.link/handmc) -### [Pose](https://google.github.io/mediapipe/solutions/pose) +### [Pose](https://google-ai-edge.github.io/mediapipe/solutions/pose) * Pose detection model: [TFLite model](https://storage.googleapis.com/mediapipe-assets/pose_detection.tflite) @@ -87,30 +87,30 @@ one over the other. [TFLite model (heavy)](https://storage.googleapis.com/mediapipe-assets/pose_landmark_heavy.tflite) * [Model card](https://mediapipe.page.link/blazepose-mc) -### [Holistic](https://google.github.io/mediapipe/solutions/holistic) +### [Holistic](https://google-ai-edge.github.io/mediapipe/solutions/holistic) * Hand recrop model: [TFLite model](https://storage.googleapis.com/mediapipe-assets/hand_recrop.tflite) -### [Selfie Segmentation](https://google.github.io/mediapipe/solutions/selfie_segmentation) +### [Selfie Segmentation](https://google-ai-edge.github.io/mediapipe/solutions/selfie_segmentation) * [TFLite model (general)](https://storage.googleapis.com/mediapipe-assets/selfie_segmentation.tflite) * [TFLite model (landscape)](https://storage.googleapis.com/mediapipe-assets/selfie_segmentation_landscape.tflite) * [Model card](https://mediapipe.page.link/selfiesegmentation-mc) -### [Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) +### [Hair Segmentation](https://google-ai-edge.github.io/mediapipe/solutions/hair_segmentation) * [TFLite model](https://storage.googleapis.com/mediapipe-assets/hair_segmentation.tflite) * [Model card](https://mediapipe.page.link/hairsegmentation-mc) -### [Object Detection](https://google.github.io/mediapipe/solutions/object_detection) +### [Object Detection](https://google-ai-edge.github.io/mediapipe/solutions/object_detection) * [TFLite model](https://storage.googleapis.com/mediapipe-assets/ssdlite_object_detection.tflite) -* [TFLite model quantized for EdgeTPU/Coral](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/models/object-detector-quantized_edgetpu.tflite) +* [TFLite model quantized for EdgeTPU/Coral](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/coral/models/object-detector-quantized_edgetpu.tflite) * [TensorFlow model](https://storage.googleapis.com/mediapipe-assets/object_detection_saved_model/archive.zip) * [Model information](https://storage.googleapis.com/mediapipe-assets/object_detection_saved_model/README.md) -### [Objectron](https://google.github.io/mediapipe/solutions/objectron) +### [Objectron](https://google-ai-edge.github.io/mediapipe/solutions/objectron) * [TFLite model for shoes](https://storage.googleapis.com/mediapipe-assets/object_detection_3d_sneakers.tflite) * [TFLite model for chairs](https://storage.googleapis.com/mediapipe-assets/object_detection_3d_chair.tflite) @@ -120,7 +120,7 @@ one over the other. * [Single-stage TFLite model for chairs](https://storage.googleapis.com/mediapipe-assets/object_detection_3d_chair_1stage.tflite) * [Model card](https://mediapipe.page.link/objectron-mc) -### [KNIFT](https://google.github.io/mediapipe/solutions/knift) +### [KNIFT](https://google-ai-edge.github.io/mediapipe/solutions/knift) * [TFLite model for up to 200 keypoints](https://storage.googleapis.com/mediapipe-assets/knift_float.tflite) * [TFLite model for up to 400 keypoints](https://storage.googleapis.com/mediapipe-assets/knift_float_400.tflite) diff --git a/docs/solutions/object_detection.md b/docs/solutions/object_detection.md index efa2e5266e..59fce0cb9e 100644 --- a/docs/solutions/object_detection.md +++ b/docs/solutions/object_detection.md @@ -45,12 +45,12 @@ how to build MediaPipe examples. #### GPU Pipeline * Graph: - [`mediapipe/graphs/object_detection/object_detection_mobile_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection/object_detection_mobile_gpu.pbtxt) + [`mediapipe/graphs/object_detection/object_detection_mobile_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection/object_detection_mobile_gpu.pbtxt) * Android target: [(or download prebuilt ARM64 APK)](https://drive.google.com/open?id=1di2ywCA_acf3y5rIcJHngWHAUNsUHAGz) - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetectiongpu:objectdetectiongpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetectiongpu/BUILD) + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetectiongpu:objectdetectiongpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetectiongpu/BUILD) * iOS target: - [`mediapipe/examples/ios/objectdetectiongpu:ObjectDetectionGpuApp`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/ios/objectdetectiongpu/BUILD) + [`mediapipe/examples/ios/objectdetectiongpu:ObjectDetectionGpuApp`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/ios/objectdetectiongpu/BUILD) #### CPU Pipeline @@ -60,12 +60,12 @@ image transfer respectively. As a result, the rest of graph, which shares the same configuration as the GPU pipeline, runs entirely on CPU. * Graph: - [`mediapipe/graphs/object_detection/object_detection_mobile_cpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection/object_detection_mobile_cpu.pbtxt)) + [`mediapipe/graphs/object_detection/object_detection_mobile_cpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection/object_detection_mobile_cpu.pbtxt)) * Android target: [(or download prebuilt ARM64 APK)](https://drive.google.com/open?id=1eRBK6V5Qd1LCRwexitR2OXgrBBXbOfZ5) - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetectioncpu:objectdetectioncpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetectioncpu/BUILD) + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetectioncpu:objectdetectioncpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetectioncpu/BUILD) * iOS target: - [`mediapipe/examples/ios/objectdetectioncpu:ObjectDetectionCpuApp`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/ios/objectdetectioncpu/BUILD) + [`mediapipe/examples/ios/objectdetectioncpu:ObjectDetectionCpuApp`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/ios/objectdetectioncpu/BUILD) ### Desktop @@ -75,9 +75,9 @@ Please first see general instructions for [desktop](../getting_started/cpp.md) on how to build MediaPipe examples. * Graph: - [`mediapipe/graphs/object_detection/object_detection_desktop_live.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection/object_detection_desktop_live.pbtxt) + [`mediapipe/graphs/object_detection/object_detection_desktop_live.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection/object_detection_desktop_live.pbtxt) * Target: - [`mediapipe/examples/desktop/object_detection:object_detection_cpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/object_detection/BUILD) + [`mediapipe/examples/desktop/object_detection:object_detection_cpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/object_detection/BUILD) #### Video File Input @@ -86,12 +86,12 @@ on how to build MediaPipe examples. This uses the same [TFLite model](https://storage.googleapis.com/mediapipe-assets/ssdlite_object_detection.tflite) (see also - [model info](https://github.com/google/mediapipe/tree/master/mediapipe/models/object_detection_saved_model/README.md)) + [model info](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/models/object_detection_saved_model/README.md)) as in [Live Camera Input](#live-camera-input) above. The pipeline is implemented in this - [graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection/object_detection_desktop_tflite_graph.pbtxt), + [graph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection/object_detection_desktop_tflite_graph.pbtxt), which differs from the live-camera-input CPU-based pipeline - [graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection/object_detection_mobile_cpu.pbtxt) + [graph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection/object_detection_mobile_cpu.pbtxt) simply by the additional `OpenCvVideoDecoderCalculator` and `OpenCvVideoEncoderCalculator` at the beginning and the end of the graph respectively. @@ -121,7 +121,7 @@ on how to build MediaPipe examples. ( see also [model info](https://storage.googleapis.com/mediapipe-assets/object_detection_saved_model/README.md)), and the pipeline is implemented in this - [graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection/object_detection_mobile_cpu.pbtxt). + [graph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection/object_detection_mobile_cpu.pbtxt). Note: The following runs TensorFlow inference on CPU. If you would like to run inference on GPU (Linux only), please follow @@ -153,7 +153,7 @@ on how to build MediaPipe examples. ### Coral Please refer to -[these instructions](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/README.md) +[these instructions](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/coral/README.md) to cross-compile and run MediaPipe examples on the [Coral Dev Board](https://coral.ai/products/dev-board). diff --git a/docs/solutions/objectron.md b/docs/solutions/objectron.md index 09f8028bcf..03e2e9cbb7 100644 --- a/docs/solutions/objectron.md +++ b/docs/solutions/objectron.md @@ -171,11 +171,11 @@ temporally consistent, reducing the jitter. The Objectron 3D object detection and tracking pipeline is implemented as a MediaPipe -[graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/object_occlusion_tracking_1stage.pbtxt), +[graph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/object_occlusion_tracking_1stage.pbtxt), which internally uses a -[detection subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/objectron_detection_1stage_gpu.pbtxt) +[detection subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/objectron/objectron_detection_1stage_gpu.pbtxt) and a -[tracking subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/objectron_tracking_1stage_gpu.pbtxt). +[tracking subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/objectron/objectron_tracking_1stage_gpu.pbtxt). The detection subgraph performs ML inference only once every few frames to reduce computation load, and decodes the output tensor to a FrameAnnotation that contains nine keypoints: the 3D bounding box's center and its eight vertices. @@ -468,10 +468,10 @@ to visualize its associated subgraphs, please see #### Two-stage Objectron * Graph: - [`mediapipe/graphs/object_detection_3d/object_occlusion_tracking.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/object_occlusion_tracking.pbtxt) + [`mediapipe/graphs/object_detection_3d/object_occlusion_tracking.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/object_occlusion_tracking.pbtxt) * Android target: - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetection3d:objectdetection3d`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetection3d/BUILD). + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetection3d:objectdetection3d`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetection3d/BUILD). Build for **shoes** (default) with: [(or download prebuilt ARM64 APK)](https://drive.google.com/file/d/1ANW9WDOCb8QO1r8gDC03A4UgrPkICdPP/view?usp=sharing) @@ -506,10 +506,10 @@ to visualize its associated subgraphs, please see #### Single-stage Objectron * Graph: - [`mediapipe/graphs/object_detection_3d/object_occlusion_tracking_1stage.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/object_occlusion_tracking.pbtxt) + [`mediapipe/graphs/object_detection_3d/object_occlusion_tracking_1stage.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/object_occlusion_tracking.pbtxt) * Android target: - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetection3d:objectdetection3d`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetection3d/BUILD). + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetection3d:objectdetection3d`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetection3d/BUILD). Build with **single-stage** model for **shoes** with: [(or download prebuilt ARM64 APK)](https://drive.google.com/file/d/1MvaEg4dkvKN8jAU1Z2GtudyXi1rQHYsE/view?usp=sharing) @@ -529,7 +529,7 @@ to visualize its associated subgraphs, please see #### Assets -Example app bounding boxes are rendered with [GlAnimationOverlayCalculator](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc) using a parsing of the sequenced .obj file +Example app bounding boxes are rendered with [GlAnimationOverlayCalculator](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc) using a parsing of the sequenced .obj file format into a custom .uuu format. This can be done for user assets as follows: > First run > @@ -546,7 +546,7 @@ Example app bounding boxes are rendered with [GlAnimationOverlayCalculator](http > > Note: ObjParser combines all .obj files found in the given directory into a > single .uuu animation file, using the order given by sorting the filenames alphanumerically. Also the ObjParser directory inputs must be given as -> absolute paths, not relative paths. See parser utility library at [`mediapipe/graphs/object_detection_3d/obj_parser/`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/obj_parser/) for more details. +> absolute paths, not relative paths. See parser utility library at [`mediapipe/graphs/object_detection_3d/obj_parser/`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/obj_parser/) for more details. ### Desktop diff --git a/docs/solutions/pose.md b/docs/solutions/pose.md index 09c313b5e6..d7390981f4 100644 --- a/docs/solutions/pose.md +++ b/docs/solutions/pose.md @@ -67,19 +67,19 @@ other frames the pipeline simply derives the ROI from the previous frame’s pos landmarks. The pipeline is implemented as a MediaPipe -[graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt) +[graph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt) that uses a -[pose landmark subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_gpu.pbtxt) +[pose landmark subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_gpu.pbtxt) from the -[pose landmark module](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark) +[pose landmark module](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/pose_landmark) and renders using a dedicated -[pose renderer subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/pose_tracking/subgraphs/pose_renderer_gpu.pbtxt). +[pose renderer subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/pose_tracking/subgraphs/pose_renderer_gpu.pbtxt). The -[pose landmark subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_gpu.pbtxt) +[pose landmark subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_gpu.pbtxt) internally uses a -[pose detection subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_detection/pose_detection_gpu.pbtxt) +[pose detection subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/pose_detection/pose_detection_gpu.pbtxt) from the -[pose detection module](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_detection). +[pose detection module](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/pose_detection). Note: To visualize a graph, copy the graph and paste it into [MediaPipe Visualizer](https://viz.mediapipe.dev/). For more information on how @@ -463,10 +463,10 @@ to visualize its associated subgraphs, please see #### Main Example * Graph: - [`mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt) + [`mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt) * Android target: [(or download prebuilt ARM64 APK)](https://drive.google.com/file/d/17GFIrqEJS6W8UHKXlYevTtSCLxN9pWlY/view?usp=sharing) - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/posetrackinggpu:posetrackinggpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/posetrackinggpu/BUILD) + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/posetrackinggpu:posetrackinggpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/posetrackinggpu/BUILD) * iOS target: [`mediapipe/examples/ios/posetrackinggpu:PoseTrackingGpuApp`](http:/mediapipe/examples/ios/posetrackinggpu/BUILD) @@ -479,14 +479,14 @@ on how to build MediaPipe examples. * Running on CPU * Graph: - [`mediapipe/graphs/pose_tracking/pose_tracking_cpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/pose_tracking/pose_tracking_cpu.pbtxt) + [`mediapipe/graphs/pose_tracking/pose_tracking_cpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/pose_tracking/pose_tracking_cpu.pbtxt) * Target: - [`mediapipe/examples/desktop/pose_tracking:pose_tracking_cpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/pose_tracking/BUILD) + [`mediapipe/examples/desktop/pose_tracking:pose_tracking_cpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/pose_tracking/BUILD) * Running on GPU * Graph: - [`mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt) + [`mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt) * Target: - [`mediapipe/examples/desktop/pose_tracking:pose_tracking_gpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/pose_tracking/BUILD) + [`mediapipe/examples/desktop/pose_tracking:pose_tracking_gpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/pose_tracking/BUILD) ## Resources diff --git a/docs/solutions/selfie_segmentation.md b/docs/solutions/selfie_segmentation.md index 17e6fc2525..d9aa52d9dc 100644 --- a/docs/solutions/selfie_segmentation.md +++ b/docs/solutions/selfie_segmentation.md @@ -61,11 +61,11 @@ Please find more detail about the models in the ## ML Pipeline The pipeline is implemented as a MediaPipe -[graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/selfie_segmentation/selfie_segmentation_gpu.pbtxt) +[graph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/selfie_segmentation/selfie_segmentation_gpu.pbtxt) that uses a -[selfie segmentation subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/selfie_segmentation/selfie_segmentation_gpu.pbtxt) +[selfie segmentation subgraph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/selfie_segmentation/selfie_segmentation_gpu.pbtxt) from the -[selfie segmentation module](https://github.com/google/mediapipe/tree/master/mediapipe/modules/selfie_segmentation). +[selfie segmentation module](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/selfie_segmentation). Note: To visualize a graph, copy the graph and paste it into [MediaPipe Visualizer](https://viz.mediapipe.dev/). For more information on how @@ -266,12 +266,12 @@ to visualize its associated subgraphs, please see ### Mobile * Graph: - [`mediapipe/graphs/selfie_segmentation/selfie_segmentation_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/selfie_segmentation/selfie_segmentation_gpu.pbtxt) + [`mediapipe/graphs/selfie_segmentation/selfie_segmentation_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/selfie_segmentation/selfie_segmentation_gpu.pbtxt) * Android target: [(or download prebuilt ARM64 APK)](https://drive.google.com/file/d/1DoeyGzMmWUsjfVgZfGGecrn7GKzYcEAo/view?usp=sharing) - [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/selfiesegmentationgpu:selfiesegmentationgpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/selfiesegmentationgpu/BUILD) + [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/selfiesegmentationgpu:selfiesegmentationgpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/selfiesegmentationgpu/BUILD) * iOS target: - [`mediapipe/examples/ios/selfiesegmentationgpu:SelfieSegmentationGpuApp`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/ios/selfiesegmentationgpu/BUILD) + [`mediapipe/examples/ios/selfiesegmentationgpu:SelfieSegmentationGpuApp`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/ios/selfiesegmentationgpu/BUILD) ### Desktop @@ -280,14 +280,14 @@ on how to build MediaPipe examples. * Running on CPU * Graph: - [`mediapipe/graphs/selfie_segmentation/selfie_segmentation_cpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/selfie_segmentation/selfie_segmentation_cpu.pbtxt) + [`mediapipe/graphs/selfie_segmentation/selfie_segmentation_cpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/selfie_segmentation/selfie_segmentation_cpu.pbtxt) * Target: - [`mediapipe/examples/desktop/selfie_segmentation:selfie_segmentation_cpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/selfie_segmentation/BUILD) + [`mediapipe/examples/desktop/selfie_segmentation:selfie_segmentation_cpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/selfie_segmentation/BUILD) * Running on GPU * Graph: - [`mediapipe/graphs/selfie_segmentation/selfie_segmentation_gpu.pbtxt`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/selfie_segmentation/selfie_segmentation_gpu.pbtxt) + [`mediapipe/graphs/selfie_segmentation/selfie_segmentation_gpu.pbtxt`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/selfie_segmentation/selfie_segmentation_gpu.pbtxt) * Target: - [`mediapipe/examples/desktop/selfie_segmentation:selfie_segmentation_gpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/selfie_segmentation/BUILD) + [`mediapipe/examples/desktop/selfie_segmentation:selfie_segmentation_gpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/selfie_segmentation/BUILD) ## Resources diff --git a/docs/solutions/solutions.md b/docs/solutions/solutions.md index 10551b7c90..5fe5f134e7 100644 --- a/docs/solutions/solutions.md +++ b/docs/solutions/solutions.md @@ -33,25 +33,25 @@ and streaming media. -[]() | [Android](https://google.github.io/mediapipe/getting_started/android) | [iOS](https://google.github.io/mediapipe/getting_started/ios) | [C++](https://google.github.io/mediapipe/getting_started/cpp) | [Python](https://google.github.io/mediapipe/getting_started/python) | [JS](https://google.github.io/mediapipe/getting_started/javascript) | [Coral](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/README.md) +[]() | [Android](https://google-ai-edge.github.io/mediapipe/getting_started/android) | [iOS](https://google-ai-edge.github.io/mediapipe/getting_started/ios) | [C++](https://google-ai-edge.github.io/mediapipe/getting_started/cpp) | [Python](https://google-ai-edge.github.io/mediapipe/getting_started/python) | [JS](https://google-ai-edge.github.io/mediapipe/getting_started/javascript) | [Coral](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/coral/README.md) :---------------------------------------------------------------------------------------- | :-------------------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------------: | :-----------------------------------------------------------: | :--------------------------------------------------------------------: -[Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ -[Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | ✅ | ✅ | -[Iris](https://google.github.io/mediapipe/solutions/iris) | ✅ | ✅ | ✅ | | | -[Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ | ✅ | -[Pose](https://google.github.io/mediapipe/solutions/pose) | ✅ | ✅ | ✅ | ✅ | ✅ | -[Holistic](https://google.github.io/mediapipe/solutions/holistic) | ✅ | ✅ | ✅ | ✅ | ✅ | -[Selfie Segmentation](https://google.github.io/mediapipe/solutions/selfie_segmentation) | ✅ | ✅ | ✅ | ✅ | ✅ | -[Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) | ✅ | | ✅ | | | -[Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | | ✅ -[Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | | | -[Instant Motion Tracking](https://google.github.io/mediapipe/solutions/instant_motion_tracking) | ✅ | | | | | -[Objectron](https://google.github.io/mediapipe/solutions/objectron) | ✅ | | ✅ | ✅ | ✅ | -[KNIFT](https://google.github.io/mediapipe/solutions/knift) | ✅ | | | | | -[AutoFlip](https://google.github.io/mediapipe/solutions/autoflip) | | | ✅ | | | -[MediaSequence](https://google.github.io/mediapipe/solutions/media_sequence) | | | ✅ | | | -[YouTube 8M](https://google.github.io/mediapipe/solutions/youtube_8m) | | | ✅ | | | +[Face Detection](https://google-ai-edge.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ +[Face Mesh](https://google-ai-edge.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | ✅ | ✅ | +[Iris](https://google-ai-edge.github.io/mediapipe/solutions/iris) | ✅ | ✅ | ✅ | | | +[Hands](https://google-ai-edge.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ | ✅ | +[Pose](https://google-ai-edge.github.io/mediapipe/solutions/pose) | ✅ | ✅ | ✅ | ✅ | ✅ | +[Holistic](https://google-ai-edge.github.io/mediapipe/solutions/holistic) | ✅ | ✅ | ✅ | ✅ | ✅ | +[Selfie Segmentation](https://google-ai-edge.github.io/mediapipe/solutions/selfie_segmentation) | ✅ | ✅ | ✅ | ✅ | ✅ | +[Hair Segmentation](https://google-ai-edge.github.io/mediapipe/solutions/hair_segmentation) | ✅ | | ✅ | | | +[Object Detection](https://google-ai-edge.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | | ✅ +[Box Tracking](https://google-ai-edge.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | | | +[Instant Motion Tracking](https://google-ai-edge.github.io/mediapipe/solutions/instant_motion_tracking) | ✅ | | | | | +[Objectron](https://google-ai-edge.github.io/mediapipe/solutions/objectron) | ✅ | | ✅ | ✅ | ✅ | +[KNIFT](https://google-ai-edge.github.io/mediapipe/solutions/knift) | ✅ | | | | | +[AutoFlip](https://google-ai-edge.github.io/mediapipe/solutions/autoflip) | | | ✅ | | | +[MediaSequence](https://google-ai-edge.github.io/mediapipe/solutions/media_sequence) | | | ✅ | | | +[YouTube 8M](https://google-ai-edge.github.io/mediapipe/solutions/youtube_8m) | | | ✅ | | | See also -[MediaPipe Models and Model Cards](https://google.github.io/mediapipe/solutions/models) +[MediaPipe Models and Model Cards](https://google-ai-edge.github.io/mediapipe/solutions/models) for ML models released in MediaPipe. diff --git a/docs/solutions/youtube_8m.md b/docs/solutions/youtube_8m.md index 80fb9d9a62..b3aeb3a6e1 100644 --- a/docs/solutions/youtube_8m.md +++ b/docs/solutions/youtube_8m.md @@ -49,7 +49,7 @@ and to extract features from video and audio respectively. To visualize the -[graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/youtube8m/feature_extraction.pbtxt), +[graph](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/youtube8m/feature_extraction.pbtxt), copy the text specification of the graph and paste it into [MediaPipe Visualizer](https://viz.mediapipe.dev/). The feature extraction pipeline is highly customizable. You are welcome to add new calculators or use @@ -130,9 +130,9 @@ videos. MediaPipe can help you do model inference for YouTube-8M Challenge with both local videos and the YouTube-8M dataset. To visualize -[the graph for local videos](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/youtube8m/local_video_model_inference.pbtxt) +[the graph for local videos](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/youtube8m/local_video_model_inference.pbtxt) and -[the graph for the YouTube-8M dataset](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/youtube8m/yt8m_dataset_model_inference.pbtxt), +[the graph for the YouTube-8M dataset](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/graphs/youtube8m/yt8m_dataset_model_inference.pbtxt), copy the text specification of the graph and paste it into [MediaPipe Visualizer](https://viz.mediapipe.dev/). We use the baseline model [(model card)](https://drive.google.com/file/d/1xTCi9-Nm9dt2KIk8WR0dDFrIssWawyXy/view) @@ -171,7 +171,7 @@ the inference for both local videos and the dataset [Here](https://drive.google.com/file/d/19GSvdAAuAlACpBhHOaqMWZ_9p8bLUYKh/view?usp=sharing) is a demo video showing the steps to use this web application. Also please read - [youtube8m/README.md](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/youtube8m/README.md) + [youtube8m/README.md](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/examples/desktop/youtube8m/README.md) if you prefer to run the underlying model_inference binary in command line. ### Steps to run the YouTube-8M model inference graph with a local video diff --git a/docs/tools/tracing_and_profiling.md b/docs/tools/tracing_and_profiling.md index 0ed6f57ab5..ce82c4af50 100644 --- a/docs/tools/tracing_and_profiling.md +++ b/docs/tools/tracing_and_profiling.md @@ -39,7 +39,7 @@ option: `--define MEDIAPIPE_PROFILING=0`. For other platforms, you can use the bazel command line option `--define MEDIAPIPE_PROFILING=1` to link it. To enable tracing and profiling, the `CalculatorGraphConfig` (in -[calculator.proto](https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator.proto)) +[calculator.proto](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/framework/calculator.proto)) representing the graph must have a `profiler_config` message at its root. Here is a simple setup that turns on tracing and keeps 100 seconds of timing events: diff --git a/mediapipe/calculators/audio/BUILD b/mediapipe/calculators/audio/BUILD index 12223f609a..d1c69a2ce6 100644 --- a/mediapipe/calculators/audio/BUILD +++ b/mediapipe/calculators/audio/BUILD @@ -14,6 +14,7 @@ load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library", "mediapipe_proto_library") # Placeholder: load py_proto_library +# Placeholder: load proto_library licenses(["notice"]) diff --git a/mediapipe/calculators/core/packet_resampler_calculator.cc b/mediapipe/calculators/core/packet_resampler_calculator.cc index 81a68f03f9..06bfa0b4d1 100644 --- a/mediapipe/calculators/core/packet_resampler_calculator.cc +++ b/mediapipe/calculators/core/packet_resampler_calculator.cc @@ -14,10 +14,12 @@ #include "mediapipe/calculators/core/packet_resampler_calculator.h" +#include #include #include "absl/log/absl_check.h" #include "absl/log/absl_log.h" +#include "mediapipe/framework/port/ret_check.h" namespace { // Reflect an integer against the lower and upper bound of an interval. @@ -76,6 +78,8 @@ absl::Status PacketResamplerCalculator::GetContract(CalculatorContract* cc) { } cc->Outputs().Get(output_data_id).SetSameAs(&cc->Inputs().Get(input_data_id)); if (cc->Outputs().HasTag(kVideoHeaderTag)) { + RET_CHECK(resampler_options.max_frame_rate() <= 0) + << "VideoHeader output is not supported with max_frame_rate."; cc->Outputs().Tag(kVideoHeaderTag).Set(); } @@ -88,24 +92,13 @@ absl::Status PacketResamplerCalculator::GetContract(CalculatorContract* cc) { return absl::OkStatus(); } -absl::Status PacketResamplerCalculator::Open(CalculatorContext* cc) { - const auto resampler_options = - tool::RetrieveOptions(cc->Options(), - cc->InputSidePackets(), "OPTIONS"); - - flush_last_packet_ = resampler_options.flush_last_packet(); - jitter_ = resampler_options.jitter(); - - input_data_id_ = cc->Inputs().GetId("DATA", 0); - if (!input_data_id_.IsValid()) { - input_data_id_ = cc->Inputs().GetId("", 0); +absl::Status PacketResamplerCalculator::UpdateFrameRate( + const PacketResamplerCalculatorOptions& resampler_options, + double frame_rate) { + frame_rate_ = frame_rate; + if (resampler_options.max_frame_rate() > 0) { + frame_rate_ = std::min(frame_rate_, resampler_options.max_frame_rate()); } - output_data_id_ = cc->Outputs().GetId("DATA", 0); - if (!output_data_id_.IsValid()) { - output_data_id_ = cc->Outputs().GetId("", 0); - } - - frame_rate_ = resampler_options.frame_rate(); start_time_ = resampler_options.has_start_time() ? Timestamp(resampler_options.start_time()) : Timestamp::Min(); @@ -125,6 +118,28 @@ absl::Status PacketResamplerCalculator::Open(CalculatorContext* cc) { RET_CHECK_LE(jitter_usec_, frame_time_usec_); video_header_.frame_rate = frame_rate_; + return absl::OkStatus(); +} + +absl::Status PacketResamplerCalculator::Open(CalculatorContext* cc) { + const auto resampler_options = + tool::RetrieveOptions(cc->Options(), + cc->InputSidePackets(), "OPTIONS"); + + flush_last_packet_ = resampler_options.flush_last_packet(); + jitter_ = resampler_options.jitter(); + + input_data_id_ = cc->Inputs().GetId("DATA", 0); + if (!input_data_id_.IsValid()) { + input_data_id_ = cc->Inputs().GetId("", 0); + } + output_data_id_ = cc->Outputs().GetId("DATA", 0); + if (!output_data_id_.IsValid()) { + output_data_id_ = cc->Outputs().GetId("", 0); + } + + RET_CHECK_OK( + UpdateFrameRate(resampler_options, resampler_options.frame_rate())); if (resampler_options.output_header() != PacketResamplerCalculatorOptions::NONE && @@ -150,10 +165,18 @@ absl::Status PacketResamplerCalculator::Open(CalculatorContext* cc) { } absl::Status PacketResamplerCalculator::Process(CalculatorContext* cc) { + const auto resampler_options = + tool::RetrieveOptions(cc->Options(), + cc->InputSidePackets(), "OPTIONS"); + if (cc->InputTimestamp() == Timestamp::PreStream() && cc->Inputs().UsesTags() && cc->Inputs().HasTag(kVideoHeaderTag) && !cc->Inputs().Tag(kVideoHeaderTag).IsEmpty()) { video_header_ = cc->Inputs().Tag(kVideoHeaderTag).Get(); + if (resampler_options.use_input_frame_rate()) { + RET_CHECK_OK( + UpdateFrameRate(resampler_options, video_header_.frame_rate)); + } video_header_.frame_rate = frame_rate_; if (cc->Inputs().Get(input_data_id_).IsEmpty()) { return absl::OkStatus(); diff --git a/mediapipe/calculators/core/packet_resampler_calculator.h b/mediapipe/calculators/core/packet_resampler_calculator.h index 1cf425b5e5..5635d1fb81 100644 --- a/mediapipe/calculators/core/packet_resampler_calculator.h +++ b/mediapipe/calculators/core/packet_resampler_calculator.h @@ -146,6 +146,14 @@ class PacketResamplerCalculator : public CalculatorBase { const mediapipe::PacketResamplerCalculatorOptions& options); private: + // Updates the frame rate of the calculator. + // + // This updates the metadata of the frame rate of the calculator moving + // forward. All already processed packets will be ignored. + absl::Status UpdateFrameRate( + const mediapipe::PacketResamplerCalculatorOptions& resampler_options, + double frame_rate); + std::unique_ptr strategy_; // The timestamp of the first packet received. diff --git a/mediapipe/calculators/core/packet_resampler_calculator.proto b/mediapipe/calculators/core/packet_resampler_calculator.proto index 29ca8082a0..97f717adc7 100644 --- a/mediapipe/calculators/core/packet_resampler_calculator.proto +++ b/mediapipe/calculators/core/packet_resampler_calculator.proto @@ -108,4 +108,17 @@ message PacketResamplerCalculatorOptions { // are included in the output, even if the nearest timestamp is not // between start_time and end_time. optional bool round_limits = 8 [default = false]; + + // If set, the output frame rate is the same as the input frame rate. + // You need to provide the frame rate of the input images in the header in the + // input_side_packet. + // This option only makes sense in combination with max_frame_rate. It will + // hold on to the original frame rate unless it's higher than the + // max_frame_rate. + optional bool use_input_frame_rate = 11 [default = false]; + + // If set, the output frame rate is limited to this value. + // You need to provide the frame rate of the input images in the header in the + // input_side_packet. + optional double max_frame_rate = 12 [default = -1.0]; } diff --git a/mediapipe/calculators/core/packet_resampler_calculator_test.cc b/mediapipe/calculators/core/packet_resampler_calculator_test.cc index d80793da4a..ab74fa5463 100644 --- a/mediapipe/calculators/core/packet_resampler_calculator_test.cc +++ b/mediapipe/calculators/core/packet_resampler_calculator_test.cc @@ -14,6 +14,7 @@ #include "mediapipe/calculators/core/packet_resampler_calculator.h" +#include #include #include #include @@ -271,6 +272,150 @@ TEST(PacketResamplerCalculatorTest, TwoPacketsInStream) { } } +TEST(PacketResamplerCalculatorTest, UseInputFrameRate_HeaderHasSameFramerate) { + CalculatorRunner runner(ParseTextProtoOrDie(R"pb( + calculator: "PacketResamplerCalculator" + input_stream: "DATA:in_data" + input_stream: "VIDEO_HEADER:in_video_header" + output_stream: "DATA:out_data" + options { + [mediapipe.PacketResamplerCalculatorOptions.ext] { + use_input_frame_rate: true + frame_rate: 1000.0 + } + } + )pb")); + + for (const int64_t ts : {0, 5000, 10010, 15001, 19990}) { + runner.MutableInputs()->Tag(kDataTag).packets.push_back( + Adopt(new std::string(absl::StrCat("Frame #", ts))).At(Timestamp(ts))); + } + VideoHeader video_header_in; + video_header_in.width = 10; + video_header_in.height = 100; + video_header_in.frame_rate = 200.0; + video_header_in.duration = 1.0; + video_header_in.format = ImageFormat::SRGB; + runner.MutableInputs() + ->Tag(kVideoHeaderTag) + .packets.push_back( + Adopt(new VideoHeader(video_header_in)).At(Timestamp::PreStream())); + MP_ASSERT_OK(runner.Run()); + + std::vector expected_frames = {0, 5000, 10010, 15001, 19990}; + std::vector expected_timestamps = {0, 5000, 10000, 15000, 20000}; + EXPECT_EQ(expected_frames.size(), + runner.Outputs().Tag(kDataTag).packets.size()); + EXPECT_EQ(expected_timestamps.size(), + runner.Outputs().Tag(kDataTag).packets.size()); + + int count = 0; + for (const Packet& packet : runner.Outputs().Tag(kDataTag).packets) { + EXPECT_EQ(Timestamp(expected_timestamps[count]), packet.Timestamp()); + const std::string& packet_contents = packet.Get(); + EXPECT_EQ(std::string(absl::StrCat("Frame #", expected_frames[count])), + packet_contents); + ++count; + } +} + +TEST(PacketResamplerCalculatorTest, + UseInputFrameRate_HeaderHasSmallerFramerate) { + CalculatorRunner runner(ParseTextProtoOrDie(R"pb( + calculator: "PacketResamplerCalculator" + input_stream: "DATA:in_data" + input_stream: "VIDEO_HEADER:in_video_header" + output_stream: "DATA:out_data" + options { + [mediapipe.PacketResamplerCalculatorOptions.ext] { + use_input_frame_rate: true + frame_rate: 1000.0 + } + } + )pb")); + + for (const int64_t ts : {0, 5000, 10010, 15001}) { + runner.MutableInputs()->Tag(kDataTag).packets.push_back( + Adopt(new std::string(absl::StrCat("Frame #", ts))).At(Timestamp(ts))); + } + VideoHeader video_header_in; + video_header_in.width = 10; + video_header_in.height = 100; + video_header_in.frame_rate = 100.0; + video_header_in.duration = 1.0; + video_header_in.format = ImageFormat::SRGB; + runner.MutableInputs() + ->Tag(kVideoHeaderTag) + .packets.push_back( + Adopt(new VideoHeader(video_header_in)).At(Timestamp::PreStream())); + MP_ASSERT_OK(runner.Run()); + + std::vector expected_frames = {0, 10010, 15001}; + std::vector expected_timestamps = {0, 10000, 20000}; + EXPECT_EQ(expected_frames.size(), + runner.Outputs().Tag(kDataTag).packets.size()); + EXPECT_EQ(expected_timestamps.size(), + runner.Outputs().Tag(kDataTag).packets.size()); + + int count = 0; + for (const Packet& packet : runner.Outputs().Tag(kDataTag).packets) { + EXPECT_EQ(Timestamp(expected_timestamps[count]), packet.Timestamp()); + const std::string& packet_contents = packet.Get(); + EXPECT_EQ(std::string(absl::StrCat("Frame #", expected_frames[count])), + packet_contents); + ++count; + } +} + +TEST(PacketResamplerCalculatorTest, + UseInputFrameRate_MaxFrameRateSmallerThanInput) { + CalculatorRunner runner(ParseTextProtoOrDie(R"pb( + calculator: "PacketResamplerCalculator" + input_stream: "DATA:in_data" + input_stream: "VIDEO_HEADER:in_video_header" + output_stream: "DATA:out_data" + options { + [mediapipe.PacketResamplerCalculatorOptions.ext] { + use_input_frame_rate: true + frame_rate: 1000.0 + max_frame_rate: 50.0 + } + } + )pb")); + + for (const int64_t ts : {0, 5000, 10010, 15001, 20010}) { + runner.MutableInputs()->Tag(kDataTag).packets.push_back( + Adopt(new std::string(absl::StrCat("Frame #", ts))).At(Timestamp(ts))); + } + VideoHeader video_header_in; + video_header_in.width = 10; + video_header_in.height = 200; + video_header_in.frame_rate = 100.0; + video_header_in.duration = 1.0; + video_header_in.format = ImageFormat::SRGB; + runner.MutableInputs() + ->Tag(kVideoHeaderTag) + .packets.push_back( + Adopt(new VideoHeader(video_header_in)).At(Timestamp::PreStream())); + MP_ASSERT_OK(runner.Run()); + + std::vector expected_frames = {0, 20010}; + std::vector expected_timestamps = {0, 20000}; + EXPECT_EQ(expected_frames.size(), + runner.Outputs().Tag(kDataTag).packets.size()); + EXPECT_EQ(expected_timestamps.size(), + runner.Outputs().Tag(kDataTag).packets.size()); + + int count = 0; + for (const Packet& packet : runner.Outputs().Tag(kDataTag).packets) { + EXPECT_EQ(Timestamp(expected_timestamps[count]), packet.Timestamp()); + const std::string& packet_contents = packet.Get(); + EXPECT_EQ(std::string(absl::StrCat("Frame #", expected_frames[count])), + packet_contents); + ++count; + } +} + TEST(PacketResamplerCalculatorTest, InputAtExactFrequencyMiddlepoints) { SimpleRunner runner( "[mediapipe.PacketResamplerCalculatorOptions.ext]: " diff --git a/mediapipe/calculators/tensor/BUILD b/mediapipe/calculators/tensor/BUILD index a6717c4319..c336c13d98 100644 --- a/mediapipe/calculators/tensor/BUILD +++ b/mediapipe/calculators/tensor/BUILD @@ -454,6 +454,7 @@ cc_library_with_tflite( "//mediapipe/framework/stream_handler:fixed_size_input_stream_handler", "//mediapipe/framework/tool:subgraph_expansion", "//mediapipe/util:cpu_util", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", @@ -632,6 +633,7 @@ cc_library( ":inference_io_mapper", ":inference_on_disk_cache_helper", ":tensor_span", + "//mediapipe/calculators/tensor:inference_runner", "//mediapipe/framework:calculator_framework", "//mediapipe/framework:mediapipe_profiling", "//mediapipe/framework/api2:packet", diff --git a/mediapipe/calculators/tensor/inference_calculator.cc b/mediapipe/calculators/tensor/inference_calculator.cc index bf60ea8e65..a29e0fb7ec 100644 --- a/mediapipe/calculators/tensor/inference_calculator.cc +++ b/mediapipe/calculators/tensor/inference_calculator.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "absl/strings/str_cat.h" @@ -118,8 +119,16 @@ absl::StatusOr> InferenceCalculator::GetModelAsPacket( CalculatorContext* cc) { const auto& options = cc->Options(); if (!options.model_path().empty()) { - return TfLiteModelLoader::LoadFromPath( - cc->GetResources(), options.model_path(), options.try_mmap_model()); + MP_ASSIGN_OR_RETURN( + auto model, TfLiteModelLoader::LoadFromPath(cc->GetResources(), + options.model_path(), + options.try_mmap_model())); + ABSL_CHECK(!model.IsEmpty()); + VLOG(1) << absl::StrFormat( + "GetModelAsPacket successfully loaded model " + "(path: %s, size: %ld bytes)", + options.model_path(), model.Get()->allocation()->bytes()); + return model; } if (!kSideInModel(cc).IsEmpty()) return kSideInModel(cc); return absl::Status(absl::StatusCode::kNotFound, diff --git a/mediapipe/calculators/tensor/inference_calculator.proto b/mediapipe/calculators/tensor/inference_calculator.proto index 2ce82a6a0c..0be14b7f58 100644 --- a/mediapipe/calculators/tensor/inference_calculator.proto +++ b/mediapipe/calculators/tensor/inference_calculator.proto @@ -283,7 +283,7 @@ message InferenceCalculatorOptions { TensorNamesMap input_tensor_names_map = 3; } - // Maps the output tensors of the TfLite model to the the output tensors of + // Maps the output tensors of the TfLite model to the output tensors of // the InferenceCalculator. Values are model I/O tensor indices or tensor // names. oneof OutputTensorMap { diff --git a/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc b/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc index 8661e742c4..07b23af0c1 100644 --- a/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc +++ b/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc @@ -25,6 +25,7 @@ #include "mediapipe/calculators/tensor/inference_calculator.h" #include "mediapipe/calculators/tensor/inference_io_mapper.h" #include "mediapipe/calculators/tensor/inference_on_disk_cache_helper.h" +#include "mediapipe/calculators/tensor/inference_runner.h" #include "mediapipe/calculators/tensor/tensor_span.h" #include "mediapipe/framework/api2/packet.h" #include "mediapipe/framework/calculator_framework.h" @@ -63,17 +64,17 @@ class InferenceCalculatorGlAdvancedImpl private: // Helper class that wraps everything related to GPU inference acceleration. - class GpuInferenceRunner { + class GpuInferenceRunner : public InferenceRunner { public: ~GpuInferenceRunner(); absl::Status Init(CalculatorContext* cc, std::shared_ptr gl_context); - absl::StatusOr> Process( - CalculatorContext* cc, const TensorSpan& input_tensors); + absl::StatusOr> Run( + CalculatorContext* cc, const TensorSpan& input_tensors) override; - const InputOutputTensorNames& GetInputOutputTensorNames() const; + const InputOutputTensorNames& GetInputOutputTensorNames() const override; private: absl::Status InitTFLiteGPURunner( @@ -99,7 +100,7 @@ class InferenceCalculatorGlAdvancedImpl absl::StatusOr> CreateInferenceRunner( CalculatorContext* cc); - std::unique_ptr gpu_inference_runner_; + std::unique_ptr inference_runner_; mediapipe::GlCalculatorHelper gpu_helper_; }; @@ -141,7 +142,7 @@ absl::Status InferenceCalculatorGlAdvancedImpl::GpuInferenceRunner::Init( } absl::StatusOr> -InferenceCalculatorGlAdvancedImpl::GpuInferenceRunner::Process( +InferenceCalculatorGlAdvancedImpl::GpuInferenceRunner::Run( CalculatorContext* cc, const TensorSpan& input_tensors) { std::vector output_tensors; for (int i = 0; i < input_tensors.size(); ++i) { @@ -267,11 +268,10 @@ absl::Status InferenceCalculatorGlAdvancedImpl::UpdateContract( absl::Status InferenceCalculatorGlAdvancedImpl::Open(CalculatorContext* cc) { MP_RETURN_IF_ERROR(gpu_helper_.Open(cc)); - gpu_inference_runner_ = std::make_unique(); - MP_RETURN_IF_ERROR( - gpu_inference_runner_->Init(cc, gpu_helper_.GetSharedGlContext())); + + MP_ASSIGN_OR_RETURN(inference_runner_, CreateInferenceRunner(cc)); return InferenceCalculatorNodeImpl::UpdateIoMapping( - cc, gpu_inference_runner_->GetInputOutputTensorNames()); + cc, inference_runner_->GetInputOutputTensorNames()); } absl::StatusOr> InferenceCalculatorGlAdvancedImpl::Process( @@ -279,14 +279,14 @@ absl::StatusOr> InferenceCalculatorGlAdvancedImpl::Process( std::vector output_tensors; MP_RETURN_IF_ERROR(gpu_helper_.RunInGlContext([&]() -> absl::Status { MP_ASSIGN_OR_RETURN(output_tensors, - gpu_inference_runner_->Process(cc, tensor_span)); + inference_runner_->Run(cc, tensor_span)); return absl::OkStatus(); })); return output_tensors; } absl::Status InferenceCalculatorGlAdvancedImpl::Close(CalculatorContext* cc) { - gpu_inference_runner_.reset(); + inference_runner_.reset(); return absl::OkStatus(); } diff --git a/mediapipe/calculators/tensor/inference_calculator_utils.cc b/mediapipe/calculators/tensor/inference_calculator_utils.cc index 829bc676e5..8d7761f6ab 100644 --- a/mediapipe/calculators/tensor/inference_calculator_utils.cc +++ b/mediapipe/calculators/tensor/inference_calculator_utils.cc @@ -240,7 +240,11 @@ absl::Status CopyCpuInputIntoTfLiteTensor(const Tensor& input_tensor, const Tensor::ElementType input_tensor_type = input_tensor.element_type(); RET_CHECK(input_tensor_type == interpreter_tensor_type) .SetCode(absl::StatusCode::kInvalidArgument) - << "Input and interpreter tensor type do not match."; + << absl::StrFormat( + "Input and interpreter tensor type do not match: Input tensor " + "type %s vs interpreter tensor type %s.", + GetTensorTypeString(input_tensor_type), + TfLiteTypeGetName(interpreter_tensor_type)); switch (interpreter_tensor_type) { case TfLiteType::kTfLiteFloat16: case TfLiteType::kTfLiteFloat32: { diff --git a/mediapipe/calculators/tensorflow/BUILD b/mediapipe/calculators/tensorflow/BUILD index 5b8c48ce7e..80a4d89eef 100644 --- a/mediapipe/calculators/tensorflow/BUILD +++ b/mediapipe/calculators/tensorflow/BUILD @@ -15,6 +15,7 @@ load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library", "mediapipe_proto_library") # Placeholder: load py_proto_library +# Placeholder: load proto_library licenses(["notice"]) diff --git a/mediapipe/calculators/tensorflow/lapped_tensor_buffer_calculator.cc b/mediapipe/calculators/tensorflow/lapped_tensor_buffer_calculator.cc index a07b95ccc6..58bc60aeb3 100644 --- a/mediapipe/calculators/tensorflow/lapped_tensor_buffer_calculator.cc +++ b/mediapipe/calculators/tensorflow/lapped_tensor_buffer_calculator.cc @@ -236,7 +236,7 @@ absl::Status LappedTensorBufferCalculator::AddBatchDimension( absl::Status LappedTensorBufferCalculator::ProcessBuffer( CalculatorContext* cc) { auto concatenated = ::absl::make_unique(); - const tf::Status concat_status = tf::tensor::Concat( + const absl::Status concat_status = tf::tensor::Concat( std::vector(buffer_->begin(), buffer_->end()), concatenated.get()); RET_CHECK(concat_status.ok()) << concat_status.ToString(); diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc index 6e27542775..c373cd2236 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc @@ -13,6 +13,7 @@ // limitations under the License. #include +#include #include #include #include @@ -104,6 +105,8 @@ namespace mpms = mediapipe::mediasequence; // } // } namespace { +constexpr int kMaxProtoBytes = std::numeric_limits::max(); + uint8_t ConvertFloatToByte(const float float_value) { float clamped_value = std::clamp(0.0f, 1.0f, float_value); return static_cast(clamped_value * 255.0 + .5f); @@ -360,14 +363,21 @@ class PackMediaSequenceCalculator : public CalculatorBase { } } - absl::Status VerifySize() { - const int64_t MAX_PROTO_BYTES = 1073741823; + absl::Status VerifySize(const PackMediaSequenceCalculatorOptions& options) { + if (!options.skip_large_sequences()) { + return absl::OkStatus(); + } + + const int max_bytes = (options.max_sequence_bytes() > 0) + ? options.max_sequence_bytes() + : kMaxProtoBytes; + std::string id = mpms::HasExampleId(*sequence_) ? mpms::GetExampleId(*sequence_) : "example"; - RET_CHECK_LT(sequence_->ByteSizeLong(), MAX_PROTO_BYTES) - << "sequence '" << id - << "' would be too many bytes to serialize after adding features."; + RET_CHECK_LT(sequence_->ByteSizeLong(), max_bytes) + << "sequence '" << id << "' with " << sequence_->ByteSizeLong() + << " bytes would be more than " << max_bytes << " bytes."; return absl::OkStatus(); } @@ -379,9 +389,7 @@ class PackMediaSequenceCalculator : public CalculatorBase { options.reconcile_region_annotations(), sequence_.get())); } - if (options.skip_large_sequences()) { - RET_CHECK_OK(VerifySize()); - } + RET_CHECK_OK(VerifySize(options)); if (options.output_only_if_all_present()) { absl::Status status = VerifySequence(); if (!status.ok()) { diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.proto b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.proto index cc6c2ffda4..a5c7bbbdf7 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.proto +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.proto @@ -65,6 +65,9 @@ message PackMediaSequenceCalculatorOptions { // If true, will return an error status if an output sequence would be too // many bytes to serialize. optional bool skip_large_sequences = 7 [default = true]; + // If > 0, will return an error status if an output sequence would be too + // many bytes to serialize. Otherwise uses int max. + optional int32 max_sequence_bytes = 10 [default = -1]; // If true/false, outputs the SequenceExample at timestamp 0/PostStream. optional bool output_as_zero_timestamp = 8 [default = false]; diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc index d8fbc94d53..fee2a8b48c 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc @@ -85,8 +85,9 @@ class PackMediaSequenceCalculatorTest : public ::testing::Test { const bool replace_instead_of_append, const bool output_as_zero_timestamp = false, const bool add_empty_labels = false, - const std::vector& input_side_packets = { - "SEQUENCE_EXAMPLE:input_sequence"}) { + const std::vector& input_side_packets = + {"SEQUENCE_EXAMPLE:input_sequence"}, + const int32_t max_sequence_bytes = -1) { CalculatorGraphConfig::Node config; config.set_calculator("PackMediaSequenceCalculator"); for (const std::string& side_packet : input_side_packets) { @@ -103,6 +104,7 @@ class PackMediaSequenceCalculatorTest : public ::testing::Test { options->set_replace_data_instead_of_append(replace_instead_of_append); options->set_output_as_zero_timestamp(output_as_zero_timestamp); options->set_add_empty_labels(add_empty_labels); + options->set_max_sequence_bytes(max_sequence_bytes); runner_ = ::absl::make_unique(config); } @@ -1987,5 +1989,37 @@ TEST_F(PackMediaSequenceCalculatorTest, TestTooLargeInputFailsSoftly) { ASSERT_FALSE(runner_->Run().ok()); } +TEST_F(PackMediaSequenceCalculatorTest, SkipLargeSequence) { + SetUpCalculator({"IMAGE:images"}, {}, false, true, false, false, + {"SEQUENCE_EXAMPLE:input_sequence"}, + /*max_sequence_bytes=*/10); + auto input_sequence = ::absl::make_unique(); + std::string test_video_id = "test_video_id"; + mpms::SetClipMediaId(test_video_id, input_sequence.get()); + cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); + std::vector bytes; + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); + OpenCvImageEncoderCalculatorResults encoded_image; + encoded_image.set_encoded_image(bytes.data(), bytes.size()); + encoded_image.set_width(2); + encoded_image.set_height(1); + + int num_images = 2; + for (int i = 0; i < num_images; ++i) { + auto image_ptr = + ::absl::make_unique(encoded_image); + runner_->MutableInputs()->Tag(kImageTag).packets.push_back( + Adopt(image_ptr.release()).At(Timestamp(i))); + } + + runner_->MutableSidePackets()->Tag(kSequenceExampleTag) = + Adopt(input_sequence.release()); + + absl::Status status = runner_->Run(); + EXPECT_THAT(status.ToString(), + ::testing::HasSubstr("bytes would be more than 10 bytes")); +} + } // namespace } // namespace mediapipe diff --git a/mediapipe/calculators/tensorflow/tensorflow_inference_calculator.cc b/mediapipe/calculators/tensorflow/tensorflow_inference_calculator.cc index c750b61b8d..d673895aae 100644 --- a/mediapipe/calculators/tensorflow/tensorflow_inference_calculator.cc +++ b/mediapipe/calculators/tensorflow/tensorflow_inference_calculator.cc @@ -515,7 +515,7 @@ class TensorFlowInferenceCalculator : public CalculatorBase { keyed_tensors.second.end(), keyed_tensors.second[0]); } tf::Tensor concated; - const tf::Status concat_status = + const absl::Status concat_status = tf::tensor::Concat(keyed_tensors.second, &concated); ABSL_CHECK(concat_status.ok()) << concat_status.ToString(); input_tensors.emplace_back(tag_to_tensor_map_[keyed_tensors.first], @@ -547,7 +547,7 @@ class TensorFlowInferenceCalculator : public CalculatorBase { session_run_throttle->Acquire(1); } const int64_t run_start_time = absl::ToUnixMicros(clock_->TimeNow()); - tf::Status tf_status; + absl::Status tf_status; { #if !defined(MEDIAPIPE_MOBILE) && !defined(__APPLE__) tsl::profiler::TraceMe trace(absl::string_view(cc->NodeName())); @@ -597,7 +597,7 @@ class TensorFlowInferenceCalculator : public CalculatorBase { } } else { std::vector split_tensors; - const tf::Status split_status = + const absl::Status split_status = tf::tensor::Split(outputs[i], split_vector, &split_tensors); ABSL_CHECK(split_status.ok()) << split_status.ToString(); // Loop over timestamps so that we don't copy the padding. diff --git a/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_calculator.cc b/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_calculator.cc index 358b50cd31..5b3be901ff 100644 --- a/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_calculator.cc +++ b/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_calculator.cc @@ -140,14 +140,14 @@ class TensorFlowSessionFromFrozenGraphCalculator : public CalculatorBase { SetPreferredDevice(&graph_def, options.preferred_device_id()); } - const tf::Status tf_status = session->session->Create(graph_def); + const absl::Status tf_status = session->session->Create(graph_def); RET_CHECK(tf_status.ok()) << "Create failed: " << tf_status.ToString(); for (const auto& key_value : options.tag_to_tensor_names()) { session->tag_to_tensor_map[key_value.first] = key_value.second; } if (!initialization_op_names.empty()) { - const tf::Status tf_status = + const absl::Status tf_status = session->session->Run({}, {}, initialization_op_names, {}); // RET_CHECK on the tf::Status object itself in order to print an // informative error message. diff --git a/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_generator.cc b/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_generator.cc index e340a098b5..b83dd632c8 100644 --- a/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_generator.cc +++ b/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_generator.cc @@ -139,14 +139,14 @@ class TensorFlowSessionFromFrozenGraphGenerator : public PacketGenerator { SetPreferredDevice(&graph_def, options.preferred_device_id()); } - const tf::Status tf_status = session->session->Create(graph_def); + const absl::Status tf_status = session->session->Create(graph_def); RET_CHECK(tf_status.ok()) << "Create failed: " << tf_status.ToString(); for (const auto& key_value : options.tag_to_tensor_names()) { session->tag_to_tensor_map[key_value.first] = key_value.second; } if (!initialization_op_names.empty()) { - const tf::Status tf_status = + const absl::Status tf_status = session->session->Run({}, {}, initialization_op_names, {}); // RET_CHECK on the tf::Status object itself in order to print an // informative error message. diff --git a/mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_calculator.cc b/mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_calculator.cc index 4ca4cb8d6e..18523b32af 100644 --- a/mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_calculator.cc +++ b/mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_calculator.cc @@ -143,7 +143,7 @@ class TensorFlowSessionFromSavedModelCalculator : public CalculatorBase { tensorflow::SessionOptions session_options; session_options.config = options.session_config(); auto saved_model = absl::make_unique(); - ::tensorflow::Status status = tensorflow::LoadSavedModel( + absl::Status status = tensorflow::LoadSavedModel( session_options, run_options, path, tags_set, saved_model.get()); if (!status.ok()) { return absl::Status(static_cast(status.code()), diff --git a/mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_generator.cc b/mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_generator.cc index 9596224470..208f4b066b 100644 --- a/mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_generator.cc +++ b/mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_generator.cc @@ -145,7 +145,7 @@ class TensorFlowSessionFromSavedModelGenerator : public PacketGenerator { tensorflow::SessionOptions session_options; session_options.config = options.session_config(); auto saved_model = absl::make_unique(); - ::tensorflow::Status status = tensorflow::LoadSavedModel( + absl::Status status = tensorflow::LoadSavedModel( session_options, run_options, path, tags_set, saved_model.get()); if (!status.ok()) { return absl::Status(static_cast(status.code()), diff --git a/mediapipe/calculators/util/detections_deduplicate_calculator.cc b/mediapipe/calculators/util/detections_deduplicate_calculator.cc index a31585b883..0c13317d99 100644 --- a/mediapipe/calculators/util/detections_deduplicate_calculator.cc +++ b/mediapipe/calculators/util/detections_deduplicate_calculator.cc @@ -14,6 +14,7 @@ limitations under the License. ==============================================================================*/ #include +#include #include #include #include @@ -47,7 +48,7 @@ struct BoundingBoxEq { } // namespace -// This Calculator deduplicates the bunding boxes with exactly the same +// This Calculator deduplicates the bounding boxes with exactly the same // coordinates, and folds the labels into a single Detection proto. Note // non-maximum-suppression remove the overlapping bounding boxes within a class, // while the deduplication operation merges bounding boxes from different @@ -73,7 +74,9 @@ class DetectionsDeduplicateCalculator : public Node { absl::Status Process(mediapipe::CalculatorContext* cc) { const std::vector& raw_detections = kIn(cc).Get(); - absl::flat_hash_map bbox_to_detections; std::vector deduplicated_detections; @@ -87,8 +90,8 @@ class DetectionsDeduplicateCalculator : public Node { detection.location_data().bounding_box())) { // The bbox location already exists. Merge the detection labels into // the existing detection proto. - Detection& deduplicated_detection = - *bbox_to_detections[detection.location_data().bounding_box()]; + Detection& deduplicated_detection = deduplicated_detections + [bbox_to_detections[detection.location_data().bounding_box()]]; deduplicated_detection.mutable_score()->MergeFrom(detection.score()); deduplicated_detection.mutable_label()->MergeFrom(detection.label()); deduplicated_detection.mutable_label_id()->MergeFrom( @@ -100,7 +103,7 @@ class DetectionsDeduplicateCalculator : public Node { // detection vector. deduplicated_detections.push_back(detection); bbox_to_detections[detection.location_data().bounding_box()] = - &deduplicated_detections.back(); + deduplicated_detections.size() - 1; } } kOut(cc).Send(std::move(deduplicated_detections)); diff --git a/mediapipe/calculators/video/tool/BUILD b/mediapipe/calculators/video/tool/BUILD index 2a32c680cf..d72aab0c31 100644 --- a/mediapipe/calculators/video/tool/BUILD +++ b/mediapipe/calculators/video/tool/BUILD @@ -14,6 +14,7 @@ # limitations under the License. load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library") +# Placeholder: load proto_library licenses(["notice"]) diff --git a/mediapipe/docs/autoflip.md b/mediapipe/docs/autoflip.md index 4d7367810d..1765595f3b 100644 --- a/mediapipe/docs/autoflip.md +++ b/mediapipe/docs/autoflip.md @@ -1,2 +1,2 @@ Content moved to -[AutoFlip: Saliency-aware Video Cropping](https://google.github.io/mediapipe/solutions/autoflip) +[AutoFlip: Saliency-aware Video Cropping](https://google-ai-edge.github.io/mediapipe/solutions/autoflip) diff --git a/mediapipe/docs/face_detection_desktop.md b/mediapipe/docs/face_detection_desktop.md index 8377e8df1d..42a8aab7ca 100644 --- a/mediapipe/docs/face_detection_desktop.md +++ b/mediapipe/docs/face_detection_desktop.md @@ -1,2 +1,2 @@ Content moved to -[MediaPipe Face Detection](https://google.github.io/mediapipe/solutions/face_detection) +[MediaPipe Face Detection](https://google-ai-edge.github.io/mediapipe/solutions/face_detection) diff --git a/mediapipe/docs/face_detection_mobile_gpu.md b/mediapipe/docs/face_detection_mobile_gpu.md index 8377e8df1d..42a8aab7ca 100644 --- a/mediapipe/docs/face_detection_mobile_gpu.md +++ b/mediapipe/docs/face_detection_mobile_gpu.md @@ -1,2 +1,2 @@ Content moved to -[MediaPipe Face Detection](https://google.github.io/mediapipe/solutions/face_detection) +[MediaPipe Face Detection](https://google-ai-edge.github.io/mediapipe/solutions/face_detection) diff --git a/mediapipe/docs/hair_segmentation_mobile_gpu.md b/mediapipe/docs/hair_segmentation_mobile_gpu.md index 43116a4f65..08236d7f80 100644 --- a/mediapipe/docs/hair_segmentation_mobile_gpu.md +++ b/mediapipe/docs/hair_segmentation_mobile_gpu.md @@ -1,2 +1,2 @@ Content moved to -[MediaPipe Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) +[MediaPipe Hair Segmentation](https://google-ai-edge.github.io/mediapipe/solutions/hair_segmentation) diff --git a/mediapipe/docs/hand_tracking_desktop.md b/mediapipe/docs/hand_tracking_desktop.md index 02bb1312cc..f9dc45399c 100644 --- a/mediapipe/docs/hand_tracking_desktop.md +++ b/mediapipe/docs/hand_tracking_desktop.md @@ -1 +1 @@ -Content moved to [MediaPipe Hands](https://google.github.io/mediapipe/solutions/hands) +Content moved to [MediaPipe Hands](https://google-ai-edge.github.io/mediapipe/solutions/hands) diff --git a/mediapipe/docs/hand_tracking_mobile_gpu.md b/mediapipe/docs/hand_tracking_mobile_gpu.md index 02bb1312cc..f9dc45399c 100644 --- a/mediapipe/docs/hand_tracking_mobile_gpu.md +++ b/mediapipe/docs/hand_tracking_mobile_gpu.md @@ -1 +1 @@ -Content moved to [MediaPipe Hands](https://google.github.io/mediapipe/solutions/hands) +Content moved to [MediaPipe Hands](https://google-ai-edge.github.io/mediapipe/solutions/hands) diff --git a/mediapipe/docs/multi_hand_tracking_mobile_gpu.md b/mediapipe/docs/multi_hand_tracking_mobile_gpu.md index 02bb1312cc..f9dc45399c 100644 --- a/mediapipe/docs/multi_hand_tracking_mobile_gpu.md +++ b/mediapipe/docs/multi_hand_tracking_mobile_gpu.md @@ -1 +1 @@ -Content moved to [MediaPipe Hands](https://google.github.io/mediapipe/solutions/hands) +Content moved to [MediaPipe Hands](https://google-ai-edge.github.io/mediapipe/solutions/hands) diff --git a/mediapipe/docs/object_detection_desktop.md b/mediapipe/docs/object_detection_desktop.md index 2e565cefda..3313fd0781 100644 --- a/mediapipe/docs/object_detection_desktop.md +++ b/mediapipe/docs/object_detection_desktop.md @@ -1,2 +1,2 @@ Content moved to -[MediaPipe Object Detection](https://google.github.io/mediapipe/solutions/object_detection) +[MediaPipe Object Detection](https://google-ai-edge.github.io/mediapipe/solutions/object_detection) diff --git a/mediapipe/docs/object_detection_mobile_gpu.md b/mediapipe/docs/object_detection_mobile_gpu.md index 2e565cefda..3313fd0781 100644 --- a/mediapipe/docs/object_detection_mobile_gpu.md +++ b/mediapipe/docs/object_detection_mobile_gpu.md @@ -1,2 +1,2 @@ Content moved to -[MediaPipe Object Detection](https://google.github.io/mediapipe/solutions/object_detection) +[MediaPipe Object Detection](https://google-ai-edge.github.io/mediapipe/solutions/object_detection) diff --git a/mediapipe/docs/object_tracking_mobile_gpu.md b/mediapipe/docs/object_tracking_mobile_gpu.md index c74d942f6a..89a8ff7840 100644 --- a/mediapipe/docs/object_tracking_mobile_gpu.md +++ b/mediapipe/docs/object_tracking_mobile_gpu.md @@ -1,2 +1,2 @@ Content moved to -[MediaPipe Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) +[MediaPipe Box Tracking](https://google-ai-edge.github.io/mediapipe/solutions/box_tracking) diff --git a/mediapipe/docs/objectron_mobile_gpu.md b/mediapipe/docs/objectron_mobile_gpu.md index 231fc512ca..19e02b9c57 100644 --- a/mediapipe/docs/objectron_mobile_gpu.md +++ b/mediapipe/docs/objectron_mobile_gpu.md @@ -1,2 +1,2 @@ Content moved to -[MediaPipe Objectron](https://google.github.io/mediapipe/solutions/objectron) +[MediaPipe Objectron](https://google-ai-edge.github.io/mediapipe/solutions/objectron) diff --git a/mediapipe/docs/template_matching_mobile_cpu.md b/mediapipe/docs/template_matching_mobile_cpu.md index 02150175cc..a47f5ea716 100644 --- a/mediapipe/docs/template_matching_mobile_cpu.md +++ b/mediapipe/docs/template_matching_mobile_cpu.md @@ -1,2 +1,2 @@ Content moved to -[MediaPipe KNIFT](https://google.github.io/mediapipe/solutions/knift) +[MediaPipe KNIFT](https://google-ai-edge.github.io/mediapipe/solutions/knift) diff --git a/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.jar b/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.jar index 7f93135c49..d64cd49177 100644 Binary files a/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.jar and b/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.jar differ diff --git a/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.properties b/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.properties index 3fa8f862f7..1af9e0930b 100644 --- a/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.properties +++ b/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-bin.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic/BUILD index ae4652dba9..9c75207c0a 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic/BUILD @@ -80,5 +80,6 @@ android_binary( deps = [ ":basic_lib", ":mediapipe_jni_lib", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectioncpu/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectioncpu/BUILD index cf71048b5e..e26e228b3b 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectioncpu/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectioncpu/BUILD @@ -56,5 +56,6 @@ android_binary( deps = [ ":mediapipe_jni_lib", "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectionfullrangegpu/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectionfullrangegpu/BUILD index 03891439da..6d1188bd93 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectionfullrangegpu/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectionfullrangegpu/BUILD @@ -56,5 +56,6 @@ android_binary( deps = [ ":mediapipe_jni_lib", "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectiongpu/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectiongpu/BUILD index 9c60d5a573..1c31f71f6c 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectiongpu/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectiongpu/BUILD @@ -56,5 +56,6 @@ android_binary( deps = [ ":mediapipe_jni_lib", "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/faceeffect/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/faceeffect/BUILD index becef5246a..72800a0609 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/faceeffect/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/faceeffect/BUILD @@ -67,5 +67,6 @@ android_binary( "//mediapipe/framework/formats:matrix_data_java_proto_lite", "//mediapipe/java/com/google/mediapipe/framework:android_framework", "//mediapipe/modules/face_geometry/protos:face_geometry_java_proto_lite", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facemeshgpu/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facemeshgpu/BUILD index edef0b8604..78a0a0f341 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facemeshgpu/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facemeshgpu/BUILD @@ -59,5 +59,6 @@ android_binary( "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", "//mediapipe/framework/formats:landmark_java_proto_lite", "//mediapipe/java/com/google/mediapipe/framework:android_framework", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/BUILD index df58f27131..f8753045f3 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/BUILD @@ -56,5 +56,6 @@ android_binary( deps = [ ":mediapipe_jni_lib", "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handdetectiongpu/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handdetectiongpu/BUILD index 550d61ec05..f1c405f0c7 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handdetectiongpu/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handdetectiongpu/BUILD @@ -56,5 +56,6 @@ android_binary( deps = [ ":mediapipe_jni_lib", "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/BUILD index b79fc5ba1e..4f3c10681b 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/BUILD @@ -63,5 +63,6 @@ android_binary( "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", "//mediapipe/framework/formats:landmark_java_proto_lite", "//mediapipe/java/com/google/mediapipe/framework:android_framework", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/holistictrackinggpu/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/holistictrackinggpu/BUILD index e7e6cfb975..3263afe889 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/holistictrackinggpu/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/holistictrackinggpu/BUILD @@ -65,5 +65,6 @@ android_binary( "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", "//mediapipe/framework/formats:landmark_java_proto_lite", "//mediapipe/java/com/google/mediapipe/framework:android_framework", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/instantmotiontracking/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/instantmotiontracking/BUILD index 3dea64053e..b0f0df7d0f 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/instantmotiontracking/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/instantmotiontracking/BUILD @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Placeholder: load java_lite_proto_library + licenses(["notice"]) package(default_visibility = ["//visibility:private"]) @@ -96,5 +98,6 @@ android_binary( ":instantmotiontracking_lib", ":mediapipe_jni_lib", "//mediapipe/java/com/google/mediapipe/framework:android_framework", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/iristrackinggpu/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/iristrackinggpu/BUILD index 482932b193..3812673b33 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/iristrackinggpu/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/iristrackinggpu/BUILD @@ -60,6 +60,7 @@ android_binary( "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", "//mediapipe/framework/formats:landmark_java_proto_lite", "//mediapipe/java/com/google/mediapipe/framework:android_framework", + "//third_party:libc++_shared_lib", "@com_google_protobuf//:protobuf_javalite", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetection3d/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetection3d/BUILD index 783ae200e2..a45a6a8c31 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetection3d/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetection3d/BUILD @@ -186,5 +186,6 @@ android_binary( "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", "//mediapipe/framework/formats:landmark_java_proto_lite", "//mediapipe/java/com/google/mediapipe/framework:android_framework", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetectioncpu/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetectioncpu/BUILD index 9bb0549364..3656d2617b 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetectioncpu/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetectioncpu/BUILD @@ -57,5 +57,6 @@ android_binary( deps = [ ":mediapipe_jni_lib", "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetectiongpu/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetectiongpu/BUILD index 81f2ed3e69..5abb518bce 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetectiongpu/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objectdetectiongpu/BUILD @@ -57,5 +57,6 @@ android_binary( deps = [ ":mediapipe_jni_lib", "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objecttrackinggpu/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objecttrackinggpu/BUILD index 50ea70f898..095f4b27bb 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objecttrackinggpu/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/objecttrackinggpu/BUILD @@ -57,5 +57,6 @@ android_binary( deps = [ ":mediapipe_jni_lib", "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/posetrackinggpu/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/posetrackinggpu/BUILD index d1c45345fd..b21488eb60 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/posetrackinggpu/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/posetrackinggpu/BUILD @@ -59,6 +59,7 @@ android_binary( "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", "//mediapipe/framework/formats:landmark_java_proto_lite", "//mediapipe/java/com/google/mediapipe/framework:android_framework", + "//third_party:libc++_shared_lib", "@com_google_protobuf//:protobuf_javalite", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/selfiesegmentationgpu/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/selfiesegmentationgpu/BUILD index 6bfcf34c17..68efaae0f4 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/selfiesegmentationgpu/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/selfiesegmentationgpu/BUILD @@ -56,5 +56,6 @@ android_binary( deps = [ ":mediapipe_jni_lib", "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/templatematchingcpu/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/templatematchingcpu/BUILD index ed3a63a70e..51152c567d 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/templatematchingcpu/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/templatematchingcpu/BUILD @@ -58,5 +58,6 @@ android_binary( deps = [ ":mediapipe_jni_lib", "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", + "//third_party:libc++_shared_lib", ], ) diff --git a/mediapipe/framework/BUILD b/mediapipe/framework/BUILD index 2930cf68d7..9067b6da3f 100644 --- a/mediapipe/framework/BUILD +++ b/mediapipe/framework/BUILD @@ -14,6 +14,7 @@ load("@bazel_skylib//:bzl_library.bzl", "bzl_library") load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") +# Placeholder: load proto_library licenses(["notice"]) @@ -334,6 +335,7 @@ cc_library( ":delegating_executor", ":executor", ":graph_output_stream", + ":graph_runtime_info_cc_proto", ":graph_service", ":graph_service_manager", ":input_stream_manager", @@ -358,14 +360,16 @@ cc_library( ":thread_pool_executor_cc_proto", ":timestamp", ":validated_graph_config", + ":vlog_overrides", + "//mediapipe/framework/deps:clock", "//mediapipe/framework/port:core_proto", - "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:logging", "//mediapipe/framework/port:map_util", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:source_location", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:fill_packet_set", + "//mediapipe/framework/tool:graph_runtime_info_logger", "//mediapipe/framework/tool:packet_generator_wrapper_calculator", "//mediapipe/framework/tool:status_util", "//mediapipe/framework/tool:tag_map", @@ -386,6 +390,7 @@ cc_library( "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/synchronization", + "@com_google_absl//absl/time", ], ) @@ -424,6 +429,7 @@ cc_library( ":calculator_context_manager", ":calculator_state", ":counter_factory", + ":graph_runtime_info_cc_proto", ":graph_service_manager", ":input_side_packet_handler", ":input_stream_handler", @@ -440,6 +446,7 @@ cc_library( ":stream_handler_cc_proto", ":timestamp", ":validated_graph_config", + "//mediapipe/framework/deps:clock", "//mediapipe/framework/port:core_proto", "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:logging", @@ -453,12 +460,14 @@ cc_library( "//mediapipe/framework/tool:tag_map", "//mediapipe/framework/tool:validate_name", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/cleanup", "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", + "@com_google_absl//absl/time", ], ) @@ -646,6 +655,12 @@ cc_library( ], ) +mediapipe_proto_library( + name = "graph_runtime_info_proto", + srcs = ["graph_runtime_info.proto"], + visibility = ["//visibility:public"], +) + cc_library( name = "graph_service", hdrs = ["graph_service.h"], @@ -666,9 +681,15 @@ cc_library( hdrs = ["resources.h"], visibility = ["//visibility:public"], deps = [ + "//mediapipe/framework/deps:mlock_helpers", + "//mediapipe/framework/deps:mmapped_file", + "//mediapipe/framework/port:file_helpers", + "//mediapipe/framework/port:logging", + "//mediapipe/framework/port:status", "//mediapipe/framework/tool:status_util", "//mediapipe/util:resource_util", "@com_google_absl//absl/container:flat_hash_map", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", @@ -784,7 +805,9 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:tag_map", + "@com_google_absl//absl/log", "@com_google_absl//absl/log:absl_check", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], ) @@ -1166,12 +1189,12 @@ cc_library( ":calculator_node", ":executor", "//mediapipe/framework/deps:clock", - "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:logging", - "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", "@com_google_absl//absl/log:absl_check", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/strings:string_view", "@com_google_absl//absl/synchronization", ], ) @@ -1333,7 +1356,6 @@ cc_library( ":legacy_calculator_support", ":packet_generator", ":packet_generator_cc_proto", - ":packet_set", ":packet_type", ":port", ":status_handler", @@ -1341,8 +1363,9 @@ cc_library( ":stream_handler_cc_proto", ":subgraph", ":thread_pool_executor_cc_proto", + ":vlog_utils", "//mediapipe/framework/port:core_proto", - "//mediapipe/framework/port:integral_types", + "//mediapipe/framework/port:file_helpers", "//mediapipe/framework/port:logging", "//mediapipe/framework/port:map_util", "//mediapipe/framework/port:ret_check", @@ -1352,12 +1375,13 @@ cc_library( "//mediapipe/framework/tool:name_util", "//mediapipe/framework/tool:status_util", "//mediapipe/framework/tool:subgraph_expansion", - "//mediapipe/framework/tool:validate", "//mediapipe/framework/tool:validate_name", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/log", "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/memory", + "@com_google_absl//absl/status", "@com_google_absl//absl/strings", "@com_google_protobuf//:protobuf", ], @@ -1618,6 +1642,7 @@ cc_test( "//mediapipe/calculators/core:counting_source_calculator", "//mediapipe/calculators/core:mux_calculator", "//mediapipe/calculators/core:pass_through_calculator", + "//mediapipe/framework/deps:clock", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:ret_check", @@ -1926,3 +1951,33 @@ cc_library( ":memory_manager", ], ) + +cc_library( + name = "vlog_overrides", + srcs = ["vlog_overrides.cc"], + hdrs = ["vlog_overrides.h"], + visibility = ["//visibility:private"], + deps = [ + "//mediapipe/framework/deps:no_destructor", + "@com_google_absl//absl/log:absl_check", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/log:globals", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:str_format", + "@com_google_absl//absl/strings:string_view", + ], +) + +cc_library( + name = "vlog_utils", + srcs = ["vlog_utils.cc"], + hdrs = ["vlog_utils.h"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:string_view", + ], +) diff --git a/mediapipe/framework/api2/BUILD b/mediapipe/framework/api2/BUILD index f95f7f9f3d..61806f7331 100644 --- a/mediapipe/framework/api2/BUILD +++ b/mediapipe/framework/api2/BUILD @@ -25,10 +25,14 @@ cc_library( "//mediapipe/framework:stream_handler_cc_proto", "//mediapipe/framework/port:any_proto", "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:source_location", + "//mediapipe/framework/port:status", "@com_google_absl//absl/container:btree", "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/memory", + "@com_google_absl//absl/status", "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:str_format", "@com_google_protobuf//:protobuf", ], ) @@ -139,15 +143,19 @@ cc_library( hdrs = ["packet.h"], deps = [ ":tuple", + "//mediapipe/framework:legacy_calculator_support", "//mediapipe/framework:packet", + "//mediapipe/framework:port", "//mediapipe/framework:timestamp", "//mediapipe/framework/port:logging", "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", "@com_google_absl//absl/log:absl_check", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/meta:type_traits", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", ], ) diff --git a/mediapipe/framework/api2/README.md b/mediapipe/framework/api2/README.md index 849a5f4c42..85c1c4a38c 100644 --- a/mediapipe/framework/api2/README.md +++ b/mediapipe/framework/api2/README.md @@ -99,8 +99,8 @@ MEDIAPIPE_NODE_CONTRACT(kMain, kLoop, kPrevLoop, ``` Several calculators in -[`calculators/core`](https://github.com/google/mediapipe/tree/master/mediapipe/calculators/core) and -[`calculators/tensor`](https://github.com/google/mediapipe/tree/master/mediapipe/calculators/tensor) +[`calculators/core`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/calculators/core) and +[`calculators/tensor`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/calculators/tensor) have been updated to use this API. Reference them for more examples. More complete documentation will be provided in the future. diff --git a/mediapipe/framework/api2/builder.h b/mediapipe/framework/api2/builder.h index 0b307326ad..b2d7feb22e 100644 --- a/mediapipe/framework/api2/builder.h +++ b/mediapipe/framework/api2/builder.h @@ -13,6 +13,7 @@ #include "absl/container/btree_map.h" #include "absl/log/absl_check.h" #include "absl/memory/memory.h" +#include "absl/status/status.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" #include "google/protobuf/message_lite.h" @@ -23,6 +24,8 @@ #include "mediapipe/framework/mediapipe_options.pb.h" #include "mediapipe/framework/port/any_proto.h" #include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status_builder.h" +#include "mediapipe/framework/port/status_macros.h" #include "mediapipe/framework/stream_handler.pb.h" namespace mediapipe { @@ -62,26 +65,25 @@ class TagIndexMap { return map_[tag]; } - void Visit(std::function fun) const { - for (const auto& tagged : map_) { - TagIndexLocation loc{tagged.first, 0, tagged.second.size()}; - for (const auto& item : tagged.second) { - fun(loc, *item); - ++loc.index; - } - } - } - - void Visit(std::function fun) { + absl::Status Visit( + std::function fun) const { for (auto& tagged : map_) { TagIndexLocation loc{tagged.first, 0, tagged.second.size()}; - for (auto& item : tagged.second) { - fun(loc, item.get()); - ++loc.index; + for (int i = 0; i < tagged.second.size(); ++i) { + auto* item = tagged.second[i].get(); + loc.index = i; + // If the item is nullptr, it means that the connection vector for + // current tag grew by a GetWithAutoGrow() request but hasn't been + // populated yet. + if (item != nullptr) { + MP_RETURN_IF_ERROR(fun(loc, *item)); + } } } + return absl::OkStatus(); } + private: // Note: entries are held by a unique_ptr to ensure pointers remain valid. // Should use absl::flat_hash_map but ordering keys for now. absl::btree_map>> map_; @@ -230,6 +232,8 @@ class SourceImpl { return !(*this == other); } + const std::string& Name() const { return base_->name_; } + Src& SetName(const char* name) { base_->name_ = std::string(name); return *this; @@ -857,7 +861,7 @@ class Graph { } } - FixUnnamedConnections(); + ABSL_CHECK_OK(FixUnnamedConnections()); ABSL_CHECK_OK(UpdateBoundaryConfig(&config)); for (const std::unique_ptr& node : nodes_) { auto* out_node = config.add_node(); @@ -871,32 +875,41 @@ class Graph { } private: - void FixUnnamedConnections(NodeBase* node, int* unnamed_count) { - node->out_streams_.Visit([&](const TagIndexLocation&, SourceBase* source) { - if (source->name_.empty()) { - source->name_ = absl::StrCat("__stream_", (*unnamed_count)++); - } - }); - node->out_sides_.Visit([&](const TagIndexLocation&, SourceBase* source) { - if (source->name_.empty()) { - source->name_ = absl::StrCat("__side_packet_", (*unnamed_count)++); - } - }); + absl::Status FixUnnamedConnections(NodeBase* node, int* unnamed_count) { + MP_RETURN_IF_ERROR(node->out_streams_.Visit( + [&](const TagIndexLocation& loc, SourceBase& source) -> absl::Status { + if (source.name_.empty()) { + source.name_ = absl::StrCat("__stream_", (*unnamed_count)++); + } + return absl::OkStatus(); + })); + + MP_RETURN_IF_ERROR(node->out_sides_.Visit( + [&](const TagIndexLocation& loc, SourceBase& source) -> absl::Status { + if (source.name_.empty()) { + source.name_ = absl::StrCat("__side_packet_", (*unnamed_count)++); + } + return absl::OkStatus(); + })); + return absl::OkStatus(); } - void FixUnnamedConnections() { + absl::Status FixUnnamedConnections() { int unnamed_count = 0; - FixUnnamedConnections(&graph_boundary_, &unnamed_count); + MP_RETURN_IF_ERROR(FixUnnamedConnections(&graph_boundary_, &unnamed_count)); for (std::unique_ptr& node : nodes_) { - FixUnnamedConnections(node.get(), &unnamed_count); + MP_RETURN_IF_ERROR(FixUnnamedConnections(node.get(), &unnamed_count)); } for (std::unique_ptr& node : packet_gens_) { - node->out_sides_.Visit([&](const TagIndexLocation&, SourceBase* source) { - if (source->name_.empty()) { - source->name_ = absl::StrCat("__side_packet_", unnamed_count++); - } - }); + MP_RETURN_IF_ERROR(node->out_sides_.Visit( + [&](const TagIndexLocation& loc, SourceBase& source) -> absl::Status { + if (source.name_.empty()) { + source.name_ = absl::StrCat("__side_packet_", unnamed_count++); + } + return absl::OkStatus(); + })); } + return absl::OkStatus(); } std::string TaggedName(const TagIndexLocation& loc, absl::string_view name) { @@ -917,24 +930,39 @@ class Graph { absl::Status UpdateNodeConfig(const NodeBase& node, CalculatorGraphConfig::Node* config) { config->set_calculator(node.type_); - node.in_streams_.Visit( - [&](const TagIndexLocation& loc, const DestinationBase& endpoint) { - ABSL_CHECK(endpoint.source != nullptr); + MP_RETURN_IF_ERROR(node.in_streams_.Visit( + [&](const TagIndexLocation& loc, + const DestinationBase& endpoint) -> absl::Status { + RET_CHECK(endpoint.source != nullptr) + << node.type_ << ": Missing source for input stream with tag " + << (loc.tag.empty() ? "(empty)" : loc.tag) << " at index " + << loc.index; config->add_input_stream(TaggedName(loc, endpoint.source->name_)); - }); - node.out_streams_.Visit( - [&](const TagIndexLocation& loc, const SourceBase& endpoint) { + return absl::OkStatus(); + })); + MP_RETURN_IF_ERROR(node.out_streams_.Visit( + [&](const TagIndexLocation& loc, + const SourceBase& endpoint) -> absl::Status { config->add_output_stream(TaggedName(loc, endpoint.name_)); - }); - node.in_sides_.Visit([&](const TagIndexLocation& loc, - const DestinationBase& endpoint) { - ABSL_CHECK(endpoint.source != nullptr); - config->add_input_side_packet(TaggedName(loc, endpoint.source->name_)); - }); - node.out_sides_.Visit( - [&](const TagIndexLocation& loc, const SourceBase& endpoint) { + return absl::OkStatus(); + })); + MP_RETURN_IF_ERROR(node.in_sides_.Visit( + [&](const TagIndexLocation& loc, + const DestinationBase& endpoint) -> absl::Status { + RET_CHECK(endpoint.source != nullptr) + << node.type_ + << ": Missing source for input side packet stream with tag " + << loc.tag << " at index " << loc.index; + config->add_input_side_packet( + TaggedName(loc, endpoint.source->name_)); + return absl::OkStatus(); + })); + MP_RETURN_IF_ERROR( + node.out_sides_.Visit([&](const TagIndexLocation& loc, + const SourceBase& endpoint) -> absl::Status { config->add_output_side_packet(TaggedName(loc, endpoint.name_)); - }); + return absl::OkStatus(); + })); if (node.calculator_option_.has_value()) { *config->mutable_options() = *node.calculator_option_; } @@ -966,15 +994,24 @@ class Graph { absl::Status UpdateNodeConfig(const PacketGenerator& node, PacketGeneratorConfig* config) { config->set_packet_generator(node.type_); - node.in_sides_.Visit([&](const TagIndexLocation& loc, - const DestinationBase& endpoint) { - ABSL_CHECK(endpoint.source != nullptr); - config->add_input_side_packet(TaggedName(loc, endpoint.source->name_)); - }); - node.out_sides_.Visit( - [&](const TagIndexLocation& loc, const SourceBase& endpoint) { + MP_RETURN_IF_ERROR(node.in_sides_.Visit( + [&](const TagIndexLocation& loc, + const DestinationBase& endpoint) -> absl::Status { + RET_CHECK(endpoint.source != nullptr) + << node.type_ + << ": Missing source for input side packet stream with tag " + << (loc.tag.empty() ? "(empty)" : loc.tag) << " at index " + << loc.index; + config->add_input_side_packet( + TaggedName(loc, endpoint.source->name_)); + return absl::OkStatus(); + })); + MP_RETURN_IF_ERROR( + node.out_sides_.Visit([&](const TagIndexLocation& loc, + const SourceBase& endpoint) -> absl::Status { config->add_output_side_packet(TaggedName(loc, endpoint.name_)); - }); + return absl::OkStatus(); + })); if (node.options_used_) { *config->mutable_options() = node.options_; } @@ -983,25 +1020,43 @@ class Graph { // For special boundary node. absl::Status UpdateBoundaryConfig(CalculatorGraphConfig* config) { - graph_boundary_.in_streams_.Visit( - [&](const TagIndexLocation& loc, const DestinationBase& endpoint) { - ABSL_CHECK(endpoint.source != nullptr); + MP_RETURN_IF_ERROR(graph_boundary_.in_streams_.Visit( + [&](const TagIndexLocation& loc, + const DestinationBase& endpoint) -> absl::Status { + RET_CHECK(endpoint.source != nullptr) + << type_ << ": Missing source for graph output stream with tag " + << (loc.tag.empty() ? "(empty)" : loc.tag) << " at index " + << loc.index; config->add_output_stream(TaggedName(loc, endpoint.source->name_)); - }); - graph_boundary_.out_streams_.Visit( - [&](const TagIndexLocation& loc, const SourceBase& endpoint) { + return absl::OkStatus(); + })); + MP_RETURN_IF_ERROR(graph_boundary_.out_streams_.Visit( + [&](const TagIndexLocation& loc, + const SourceBase& endpoint) -> absl::Status { config->add_input_stream(TaggedName(loc, endpoint.name_)); - }); - graph_boundary_.in_sides_.Visit([&](const TagIndexLocation& loc, - const DestinationBase& endpoint) { - ABSL_CHECK(endpoint.source != nullptr); - config->add_output_side_packet(TaggedName(loc, endpoint.source->name_)); - }); - graph_boundary_.out_sides_.Visit( - [&](const TagIndexLocation& loc, const SourceBase& endpoint) { + return absl::OkStatus(); + })); + MP_RETURN_IF_ERROR(graph_boundary_.in_sides_.Visit( + [&](const TagIndexLocation& loc, + const DestinationBase& endpoint) -> absl::Status { + RET_CHECK(endpoint.source != nullptr) + << type_ + << ": Missing source for graph output side packet stream with " + "tag " + << (loc.tag.empty() ? "(empty)" : loc.tag) << " at index " + << loc.index; + config->add_output_side_packet( + TaggedName(loc, endpoint.source->name_)); + return absl::OkStatus(); + })); + MP_RETURN_IF_ERROR(graph_boundary_.out_sides_.Visit( + [&](const TagIndexLocation& loc, + const SourceBase& endpoint) -> absl::Status { config->add_input_side_packet(TaggedName(loc, endpoint.name_)); - }); - return {}; + + return absl::OkStatus(); + })); + return absl::OkStatus(); } std::string type_; diff --git a/mediapipe/framework/api2/builder_test.cc b/mediapipe/framework/api2/builder_test.cc index da3d7a82ad..c282b84643 100644 --- a/mediapipe/framework/api2/builder_test.cc +++ b/mediapipe/framework/api2/builder_test.cc @@ -1,6 +1,6 @@ #include "mediapipe/framework/api2/builder.h" -#include +#include #include "absl/strings/string_view.h" #include "absl/strings/substitute.h" @@ -903,5 +903,19 @@ TEST(CastTest, FromAnyToAny) { [[maybe_unused]] Stream int_dest = any_inp.Cast(); } +TEST(BuilderTest, CrashWithUsefulMessageIfSkippingInputSource) { + Graph graph; + + auto& multi_node = graph.AddNode("MultiInputsOutputs"); + Stream base = graph.In("IN").SetName("base"); + // We only connect to the second input. Missing source for input stream at + // index 0. + base >> multi_node.In(1); + + EXPECT_DEATH(graph.GetConfig(), + testing::HasSubstr("MultiInputsOutputs: Missing source for " + "input stream with tag (empty) at index 0")); +} + } // namespace } // namespace mediapipe::api2::builder diff --git a/mediapipe/framework/api2/packet.h b/mediapipe/framework/api2/packet.h index 750ef80327..9abf879793 100644 --- a/mediapipe/framework/api2/packet.h +++ b/mediapipe/framework/api2/packet.h @@ -11,16 +11,20 @@ #define MEDIAPIPE_FRAMEWORK_API2_PACKET_H_ #include +#include #include #include #include "absl/base/attributes.h" #include "absl/base/optimization.h" #include "absl/log/absl_check.h" +#include "absl/log/absl_log.h" #include "absl/meta/type_traits.h" #include "absl/status/status.h" #include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" #include "mediapipe/framework/api2/tuple.h" +#include "mediapipe/framework/legacy_calculator_support.h" #include "mediapipe/framework/packet.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/status_macros.h" @@ -263,7 +267,21 @@ class Packet : public Packet { Packet At(Timestamp timestamp) &&; const T& Get() const { - ABSL_CHECK(payload_); + if (!payload_) { + // TODO - Remove this check once stack trace symbolization + // works on Android non-apk execution. + const CalculatorContext* calculator_context = + LegacyCalculatorSupport::Scoped::current(); + if (calculator_context) { + ABSL_LOG(FATAL) << absl::StrCat("Get() called for type ", + MediaPipeTypeStringOrDemangled(), + " on empty packet during execution of ", + calculator_context->NodeName(), "."); + } + ABSL_LOG(FATAL) << absl::StrCat("Get() called for type ", + MediaPipeTypeStringOrDemangled(), + " on empty packet."); + } const packet_internal::Holder* typed_payload = payload_->As(); ABSL_CHECK(typed_payload); return typed_payload->data(); diff --git a/mediapipe/framework/calculator.proto b/mediapipe/framework/calculator.proto index 9984ee2505..daa2d594e6 100644 --- a/mediapipe/framework/calculator.proto +++ b/mediapipe/framework/calculator.proto @@ -217,6 +217,19 @@ message ProfilerConfig { string calculator_filter = 18; } +// Configuration for the runtime info logger. It collects runtime information +// and statistics about calculators and their input streams at the configured +// capture rate and writes them to LOG(INFO). It can be used to inspect a +// stalled graph by understanding which calculators are waiting for input +// packets to triger their Process() method. +message GraphRuntimeInfoConfig { + // If true, the runtime info logger is enabled and runs in the background. + bool enable_graph_runtime_info = 1; + // The period in seconds at which the runtime info logger is updated. The + // default value is 10 secods. + uint32 capture_period_msec = 2; +} + // Describes the topology and function of a MediaPipe Graph. The graph of // Nodes must be a Directed Acyclic Graph (DAG) except as annotated by // "back_edge" in InputStreamInfo. Use a mediapipe::CalculatorGraph object to @@ -392,6 +405,9 @@ message CalculatorGraphConfig { // calculators from running. If false, max_queue_size for an input stream // is adjusted when throttling prevents all calculators from running. bool report_deadlock = 21; + // Enable the collection of runtime information and statistics about + // calculators and their input streams. + GraphRuntimeInfoConfig runtime_info = 22; // Config for this graph's InputStreamHandler. // If unspecified, the framework will automatically install the default // handler, which works as follows. diff --git a/mediapipe/framework/calculator_graph.cc b/mediapipe/framework/calculator_graph.cc index a64c295ac9..dfaf0230c7 100644 --- a/mediapipe/framework/calculator_graph.cc +++ b/mediapipe/framework/calculator_graph.cc @@ -37,10 +37,12 @@ #include "absl/strings/string_view.h" #include "absl/strings/substitute.h" #include "absl/synchronization/mutex.h" +#include "absl/time/time.h" #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/calculator_base.h" #include "mediapipe/framework/counter_factory.h" #include "mediapipe/framework/delegating_executor.h" +#include "mediapipe/framework/deps/clock.h" #include "mediapipe/framework/executor.h" #include "mediapipe/framework/graph_output_stream.h" #include "mediapipe/framework/graph_service_manager.h" @@ -76,6 +78,7 @@ #include "mediapipe/framework/tool/validate.h" #include "mediapipe/framework/tool/validate_name.h" #include "mediapipe/framework/validated_graph_config.h" +#include "mediapipe/framework/vlog_overrides.h" #include "mediapipe/gpu/gpu_service.h" #include "mediapipe/gpu/graph_support.h" #include "mediapipe/util/cpu_util.h" @@ -145,6 +148,7 @@ CalculatorGraph::CalculatorGraph(CalculatorContext* cc) // TODO b/368015341- Use factory method to avoid CHECK in constructor. ABSL_CHECK_OK(DisallowServiceDefaultInitialization()); } + SetVLogOverrides(); } CalculatorGraph::CalculatorGraph(CalculatorGraphConfig config) @@ -391,6 +395,11 @@ absl::Status CalculatorGraph::InitializeExecutors() { MEDIAPIPE_CHECK_OK(SetExecutorInternal( executor_config.name(), std::shared_ptr(executor))); } +#ifdef __EMSCRIPTEN__ + // Emscripten runs the application single threaded and therefore requires to + // use the application thread. + use_application_thread = true; +#endif // __EMSCRIPTEN__ if (!mediapipe::ContainsKey(executors_, "")) { MP_RETURN_IF_ERROR(InitializeDefaultExecutor(default_executor_options, @@ -404,7 +413,9 @@ absl::Status CalculatorGraph::InitializeDefaultExecutor( const ThreadPoolExecutorOptions* default_executor_options, bool use_application_thread) { #ifdef __EMSCRIPTEN__ - use_application_thread = true; + // Emscripten runs the application single threaded and therefore requires to + // use the application thread. + RET_CHECK(use_application_thread); #endif // __EMSCRIPTEN__ // If specified, run synchronously on the calling thread. if (use_application_thread) { @@ -456,6 +467,25 @@ absl::Status CalculatorGraph::Initialize( #endif initialized_ = true; + +#if !defined(__EMSCRIPTEN__) + // Emscripten only supports single threaded applications. + const auto& runtime_info_logger_config = + validated_graph_->Config().runtime_info(); + if (runtime_info_logger_config.enable_graph_runtime_info()) { + MP_RETURN_IF_ERROR(graph_runtime_info_logger_.StartInBackground( + runtime_info_logger_config, + [this]() { return GetGraphRuntimeInfo(); })); + } +#else + const auto& runtime_info_logger_config = + validated_graph_->Config().runtime_info(); + // TODO - remove once graph runtime infos are supported in + // Emscripten. + if (runtime_info_logger_config.enable_graph_runtime_info()) { + ABSL_LOG(WARNING) << "Graph runtime infos are not supported in Emscripten."; + } +#endif // defined(__EMSCRIPTEN__) return absl::OkStatus(); } @@ -912,6 +942,17 @@ absl::Status CalculatorGraph::WaitForObservedOutput() { return scheduler_.WaitForObservedOutput(); } +absl::StatusOr CalculatorGraph::GetGraphRuntimeInfo() { + RET_CHECK(initialized_); + GraphRuntimeInfo info; + for (const auto& node : nodes_) { + *info.add_calculator_infos() = node->GetStreamMonitoringInfo(); + } + const absl::Time time_now = mediapipe::Clock::RealClock()->TimeNow(); + info.set_capture_time_unix_us(absl::ToUnixMicros(time_now)); + return info; +} + absl::Status CalculatorGraph::AddPacketToInputStream( absl::string_view stream_name, const Packet& packet) { return AddPacketToInputStreamInternal(stream_name, packet); diff --git a/mediapipe/framework/calculator_graph.h b/mediapipe/framework/calculator_graph.h index 19032fbc2f..bc11ae9fdc 100644 --- a/mediapipe/framework/calculator_graph.h +++ b/mediapipe/framework/calculator_graph.h @@ -40,6 +40,7 @@ #include "mediapipe/framework/counter_factory.h" #include "mediapipe/framework/executor.h" #include "mediapipe/framework/graph_output_stream.h" +#include "mediapipe/framework/graph_runtime_info.pb.h" #include "mediapipe/framework/graph_service.h" #include "mediapipe/framework/graph_service_manager.h" #include "mediapipe/framework/mediapipe_profiling.h" @@ -57,6 +58,10 @@ #include "mediapipe/framework/timestamp.h" #include "mediapipe/framework/validated_graph_config.h" +#if !defined(__EMSCRIPTEN__) +#include "mediapipe/framework/tool/graph_runtime_info_logger.h" +#endif // !defined(__EMSCRIPTEN__) + namespace mediapipe { #if !MEDIAPIPE_DISABLE_GPU @@ -257,6 +262,11 @@ class CalculatorGraph { // Quick non-locking means of checking if the graph has encountered an error. bool HasError() const { return has_error_; } + // Returns debugging information about the graph transient state, including + // information about all input streams and their timestamp bounds. This method + // is thread safe and can be called from any thread. + absl::StatusOr GetGraphRuntimeInfo(); + // Add a Packet to a graph input stream based on the graph input stream add // mode. If the mode is ADD_IF_NOT_FULL, the packet will not be added if any // queue exceeds max_queue_size specified by the graph config and will return @@ -764,6 +774,11 @@ class CalculatorGraph { std::shared_ptr profiler_; internal::Scheduler scheduler_; + +#if !defined(__EMSCRIPTEN__) + // Collects runtime information about the graph in the background. + tool::GraphRuntimeInfoLogger graph_runtime_info_logger_; +#endif // !defined(__EMSCRIPTEN__) }; } // namespace mediapipe diff --git a/mediapipe/framework/calculator_graph_test.cc b/mediapipe/framework/calculator_graph_test.cc index 745271e53b..ef59bf6388 100644 --- a/mediapipe/framework/calculator_graph_test.cc +++ b/mediapipe/framework/calculator_graph_test.cc @@ -43,6 +43,7 @@ #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/collection_item_id.h" #include "mediapipe/framework/counter_factory.h" +#include "mediapipe/framework/deps/clock.h" #include "mediapipe/framework/executor.h" #include "mediapipe/framework/input_stream_handler.h" #include "mediapipe/framework/lifetime_tracker.h" @@ -70,7 +71,6 @@ #include "mediapipe/gpu/gpu_service.h" namespace mediapipe { - namespace { constexpr char kCounter2Tag[] = "COUNTER2"; @@ -83,8 +83,9 @@ constexpr char kOutputTag[] = "OUTPUT"; constexpr char kInputTag[] = "INPUT"; constexpr char kSelectTag[] = "SELECT"; -using testing::ElementsAre; -using testing::HasSubstr; +using ::mediapipe::Clock; +using ::testing::ElementsAre; +using ::testing::HasSubstr; // Pass packets through. Note that it calls SetOffset() in Process() // instead of Open(). diff --git a/mediapipe/framework/calculator_node.cc b/mediapipe/framework/calculator_node.cc index 6767cb874d..2a7d56c934 100644 --- a/mediapipe/framework/calculator_node.cc +++ b/mediapipe/framework/calculator_node.cc @@ -17,22 +17,28 @@ #include #include #include +#include #include #include +#include +#include "absl/cleanup/cleanup.h" #include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/status/status.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_join.h" +#include "absl/strings/str_split.h" #include "absl/strings/string_view.h" #include "absl/strings/substitute.h" #include "absl/synchronization/mutex.h" +#include "absl/time/time.h" #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/calculator_base.h" #include "mediapipe/framework/calculator_state.h" #include "mediapipe/framework/counter_factory.h" +#include "mediapipe/framework/deps/clock.h" #include "mediapipe/framework/graph_service_manager.h" #include "mediapipe/framework/input_stream_manager.h" #include "mediapipe/framework/mediapipe_profiling.h" @@ -54,6 +60,7 @@ namespace mediapipe { +using ::mediapipe::Clock; namespace { const PacketType* GetPacketType(const PacketTypeSet& packet_type_set, @@ -116,7 +123,11 @@ std::unique_ptr RemoveOmittedPacketTypes( } // namespace -CalculatorNode::CalculatorNode() {} +CalculatorNode::CalculatorNode() { + absl::Time now = Clock::RealClock()->TimeNow(); + last_process_start_ts_ = now; + last_process_finish_ts_ = now; +} Timestamp CalculatorNode::SourceProcessOrder( const CalculatorContext* cc) const { @@ -218,6 +229,29 @@ absl::Status CalculatorNode::Initialize( return InitializeInputStreams(input_stream_managers, output_stream_managers); } +CalculatorRuntimeInfo CalculatorNode::GetStreamMonitoringInfo() const { + CalculatorRuntimeInfo calulator_info; + calulator_info.set_calculator_name(DebugName()); + { + absl::MutexLock lock(&runtime_info_mutex_); + calulator_info.set_last_process_start_unix_us( + absl::ToUnixMicros(last_process_start_ts_)); + calulator_info.set_last_process_finish_unix_us( + absl::ToUnixMicros(last_process_finish_ts_)); + } + const auto monitoring_info = input_stream_handler_->GetMonitoringInfo(); + for (const auto& [stream_name, queue_size, num_packets_added, + minimum_timestamp_or_bound] : monitoring_info) { + auto* stream_info = calulator_info.add_input_stream_infos(); + stream_info->set_stream_name(stream_name); + stream_info->set_queue_size(queue_size); + stream_info->set_number_of_packets_added(num_packets_added); + stream_info->set_minimum_timestamp_or_bound( + minimum_timestamp_or_bound.Value()); + } + return calulator_info; +} + absl::Status CalculatorNode::InitializeOutputSidePackets( const PacketTypeSet& output_side_packet_types, OutputSidePacketImpl* output_side_packets) { @@ -669,14 +703,14 @@ void CalculatorNode::SchedulingLoop() { max_allowance = max_in_flight_ - current_in_flight_; } while (true) { - Timestamp input_bound; - // input_bound is set to a meaningful value iff the latest readiness of the - // node is kNotReady when ScheduleInvocations() returns. - input_stream_handler_->ScheduleInvocations(max_allowance, &input_bound); - if (input_bound != Timestamp::Unset()) { + // last_timestamp_bound_ is set to a meaningful value iff the latest + // readiness of the node is kNotReady when ScheduleInvocations() returns. + input_stream_handler_->ScheduleInvocations(max_allowance, + &last_timestamp_bound_); + if (last_timestamp_bound_ != Timestamp::Unset()) { // Updates the minimum timestamp for which a new packet could possibly // arrive. - output_stream_handler_->UpdateTaskTimestampBound(input_bound); + output_stream_handler_->UpdateTaskTimestampBound(last_timestamp_bound_); } { @@ -805,6 +839,18 @@ std::string CalculatorNode::DebugName() const { // TODO: Split this function. absl::Status CalculatorNode::ProcessNode( CalculatorContext* calculator_context) { + // Update calculator runtime info. + { + absl::MutexLock lock(&runtime_info_mutex_); + last_process_start_ts_ = Clock::RealClock()->TimeNow(); + } + absl::Cleanup last_process_finish_ts_cleanup([this]() { + { + absl::MutexLock lock(&runtime_info_mutex_); + last_process_finish_ts_ = Clock::RealClock()->TimeNow(); + } + }); + if (IsSource()) { // This is a source Calculator. if (Closed()) { diff --git a/mediapipe/framework/calculator_node.h b/mediapipe/framework/calculator_node.h index 1340461169..26b644dec1 100644 --- a/mediapipe/framework/calculator_node.h +++ b/mediapipe/framework/calculator_node.h @@ -28,13 +28,16 @@ #include #include "absl/base/macros.h" +#include "absl/base/thread_annotations.h" #include "absl/status/status.h" #include "absl/synchronization/mutex.h" +#include "absl/time/time.h" #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/calculator_base.h" #include "mediapipe/framework/calculator_context.h" #include "mediapipe/framework/calculator_context_manager.h" #include "mediapipe/framework/calculator_state.h" +#include "mediapipe/framework/graph_runtime_info.pb.h" #include "mediapipe/framework/graph_service_manager.h" #include "mediapipe/framework/input_side_packet_handler.h" #include "mediapipe/framework/input_stream_handler.h" @@ -239,6 +242,14 @@ class CalculatorNode { return node_type_info_->Contract(); } + // Returns the last timestamp bound used to schedule this node. + Timestamp GetLastTimestampBound() const { return last_timestamp_bound_; } + + // Returns the stream monitoring info for this node consisting of a vector of + // tuples of input stream name, queue size, number of packets added, and + // minimum timestamp or bound. + CalculatorRuntimeInfo GetStreamMonitoringInfo() const; + private: // Sets up the output side packets from the main flat array. absl::Status InitializeOutputSidePackets( @@ -376,6 +387,16 @@ class CalculatorNode { const ValidatedGraphConfig* validated_graph_ = nullptr; const NodeTypeInfo* node_type_info_ = nullptr; + + // Keeps track of the latest timestamp bound used to schedule this node. + Timestamp last_timestamp_bound_ = Timestamp::Unset(); + + // Keeps track of the runtime info for this node. + mutable absl::Mutex runtime_info_mutex_; + absl::Time last_process_start_ts_ ABSL_GUARDED_BY(runtime_info_mutex_) = + absl::InfinitePast(); + absl::Time last_process_finish_ts_ ABSL_GUARDED_BY(runtime_info_mutex_) = + absl::InfinitePast(); }; } // namespace mediapipe diff --git a/mediapipe/framework/deps/BUILD b/mediapipe/framework/deps/BUILD index 615f7f1d5a..2dc600ada7 100644 --- a/mediapipe/framework/deps/BUILD +++ b/mediapipe/framework/deps/BUILD @@ -110,6 +110,22 @@ cc_library( ], ) +cc_library( + name = "platform_strings", + srcs = ["platform_strings.cc"], + hdrs = ["platform_strings.h"], + visibility = ["//visibility:public"], +) + +cc_library( + name = "mmapped_file", + hdrs = ["mmapped_file.h"], + deps = [ + "//mediapipe/framework/port:logging", + "@com_google_absl//absl/status", + ], +) + cc_library( name = "file_helpers", srcs = ["file_helpers.cc"], @@ -117,6 +133,27 @@ cc_library( visibility = ["//visibility:public"], deps = [ ":file_path", + ":mmapped_file", + ":platform_strings", + "//mediapipe/framework/port:status", + "@com_google_absl//absl/base:config", + "@com_google_absl//absl/cleanup", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + ] + select({ + "//mediapipe:windows": [], + "//conditions:default": ["//mediapipe/framework/formats:unique_fd"], + }), +) + +cc_library( + name = "mlock_helpers", + srcs = ["mlock_helpers.cc"], + hdrs = ["mlock_helpers.h"], + visibility = ["//visibility:public"], + deps = [ + ":platform_strings", "//mediapipe/framework/port:status", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", diff --git a/mediapipe/framework/deps/file_helpers.cc b/mediapipe/framework/deps/file_helpers.cc index 5cfaf09e70..a20a129aa6 100644 --- a/mediapipe/framework/deps/file_helpers.cc +++ b/mediapipe/framework/deps/file_helpers.cc @@ -14,34 +14,44 @@ #include "mediapipe/framework/deps/file_helpers.h" -#include "absl/strings/str_cat.h" - #ifdef _WIN32 #include #include - -#include -#include +#include #else #include +#include +#include #endif // _WIN32 #include #include #include #include +#include #include +#include +#include "absl/base/config.h" +#include "absl/cleanup/cleanup.h" // IWYU pragma: keep #include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/string_view.h" #include "mediapipe/framework/deps/file_path.h" -#include "mediapipe/framework/port/canonical_errors.h" -#include "mediapipe/framework/port/status.h" -#include "mediapipe/framework/port/status_builder.h" +#include "mediapipe/framework/deps/mmapped_file.h" +#include "mediapipe/framework/deps/platform_strings.h" // IWYU pragma: keep +#ifndef _WIN32 +#include "mediapipe/framework/formats/unique_fd.h" +#endif // !_WIN32 #include "mediapipe/framework/port/status_macros.h" namespace mediapipe { namespace file { namespace { +size_t RoundUp(size_t size, size_t align) { + return (size + align - 1) & ~(align - 1); +} // Helper class that returns all entries (files, directories) in a directory, // except "." and "..". Example usage: @@ -93,31 +103,12 @@ class DirectoryListing { struct dirent* next_entry_ = nullptr; }; #else -#if defined(UNICODE) -using PathString = std::wstring; - -PathString Utf8ToNative(const std::string& string) { - std::wstring_convert, wchar_t> converter; - return converter.from_bytes(string.data(), string.data() + string.size()); -} -std::string NativeToUtf8(const PathString& string) { - std::wstring_convert, wchar_t> converter; - return converter.to_bytes(string.data(), string.data() + string.size()); -} -#define FILE_PATH_LITERAL_INTERNAL(x) L##x -#define FILE_PATH_LITERAL(x) FILE_PATH_LITERAL_INTERNAL(x) -#else -using PathString = std::string; -PathString Utf8ToNative(const std::string& string) { return string; } -std::string NativeToUtf8(const PathString& string) { return string; } -#define FILE_PATH_LITERAL(x) x -#endif - class DirectoryListing { public: explicit DirectoryListing(const std::string& directory) : directory_(Utf8ToNative(directory)) { - PathString search_string = directory_ + Utf8ToNative("\\*.*"); + PlatformString search_string = + directory_ + PLATFORM_STRING_LITERAL("\\*.*"); find_handle_ = FindFirstFile(search_string.c_str(), &find_data_); } @@ -134,8 +125,8 @@ class DirectoryListing { // after the one that is returned, if it exists. std::string NextEntry() { if (HasNextEntry()) { - PathString result = - directory_ + Utf8ToNative("\\") + PathString(find_data_.cFileName); + PlatformString result = directory_ + PLATFORM_STRING_LITERAL("\\") + + PlatformString(find_data_.cFileName); ReadNextEntry(); return NativeToUtf8(result); } else { @@ -146,9 +137,10 @@ class DirectoryListing { private: void ReadNextEntry() { int find_result = FindNextFile(find_handle_, &find_data_); - while (find_result != 0 && - (PathString(find_data_.cFileName) == FILE_PATH_LITERAL(".") || - PathString(find_data_.cFileName) == FILE_PATH_LITERAL(".."))) { + while (find_result != 0 && (PlatformString(find_data_.cFileName) == + PLATFORM_STRING_LITERAL(".") || + PlatformString(find_data_.cFileName) == + PLATFORM_STRING_LITERAL(".."))) { find_result = FindNextFile(find_handle_, &find_data_); } @@ -158,7 +150,7 @@ class DirectoryListing { } } - const PathString directory_; + const PlatformString directory_; HANDLE find_handle_ = INVALID_HANDLE_VALUE; WIN32_FIND_DATA find_data_; }; @@ -166,12 +158,11 @@ class DirectoryListing { } // namespace -absl::Status GetContents(absl::string_view file_name, std::string* output, +absl::Status GetContents(absl::string_view path, std::string* output, bool read_as_binary) { - FILE* fp = fopen(file_name.data(), read_as_binary ? "rb" : "r"); + FILE* fp = fopen(std::string(path).c_str(), read_as_binary ? "rb" : "r"); if (fp == NULL) { - return mediapipe::InvalidArgumentErrorBuilder(MEDIAPIPE_LOC) - << "Can't find file: " << file_name; + return absl::NotFoundError(absl::StrCat("Can't find file: ", path)); } output->clear(); @@ -179,8 +170,8 @@ absl::Status GetContents(absl::string_view file_name, std::string* output, char buf[4096]; size_t ret = fread(buf, 1, 4096, fp); if (ret == 0 && ferror(fp)) { - return mediapipe::InternalErrorBuilder(MEDIAPIPE_LOC) - << "Error while reading file: " << file_name; + return absl::UnavailableError( + absl::StrCat("Error while reading file: ", path)); } output->append(std::string(buf, ret)); } @@ -188,41 +179,217 @@ absl::Status GetContents(absl::string_view file_name, std::string* output, return absl::OkStatus(); } -absl::Status SetContents(absl::string_view file_name, - absl::string_view content) { - FILE* fp = fopen(file_name.data(), "wb"); +absl::Status SetContents(absl::string_view path, absl::string_view content) { + FILE* fp = fopen(std::string(path).c_str(), "wb"); if (fp == NULL) { - return mediapipe::InvalidArgumentErrorBuilder(MEDIAPIPE_LOC) - << "Can't open file: " << file_name; + return absl::InvalidArgumentError(absl::StrCat("Can't open file: ", path)); } fwrite(content.data(), sizeof(char), content.size(), fp); size_t write_error = ferror(fp); if (fclose(fp) != 0 || write_error) { - return mediapipe::InternalErrorBuilder(MEDIAPIPE_LOC) - << "Error while writing file: " << file_name - << ". Error message: " << strerror(write_error); + return absl::UnavailableError( + absl::StrCat("Error while writing file: ", path, + ". Error message: ", strerror(write_error))); } return absl::OkStatus(); } -absl::Status AppendStringToFile(absl::string_view file_name, +absl::Status AppendStringToFile(absl::string_view path, absl::string_view contents) { - FILE* fp = fopen(file_name.data(), "ab"); + FILE* fp = fopen(std::string(path).c_str(), "ab"); if (!fp) { - return mediapipe::InvalidArgumentErrorBuilder(MEDIAPIPE_LOC) - << "Can't open file: " << file_name; + return absl::InvalidArgumentError(absl::StrCat("Can't open file: ", path)); } fwrite(contents.data(), sizeof(char), contents.size(), fp); size_t write_error = ferror(fp); if (fclose(fp) != 0 || write_error) { - return mediapipe::InternalErrorBuilder(MEDIAPIPE_LOC) - << "Error while writing file: " << file_name - << ". Error message: " << strerror(write_error); + return absl::UnavailableError( + absl::StrCat("Error while writing file: ", path, + ". Error message: ", strerror(write_error))); + } + return absl::OkStatus(); +} + +#ifdef _WIN32 +class WindowsMMap : public MemoryMappedFile { + public: + WindowsMMap(std::string path, const void* base_address, size_t length, + HANDLE file_handle, HANDLE mapping_handle) + : MemoryMappedFile(std::move(path), base_address, length), + file_handle_(file_handle), + mapping_handle_(mapping_handle) {} + + virtual absl::Status Close() override; + + private: + const HANDLE file_handle_; + const HANDLE mapping_handle_; +}; + +absl::StatusOr> MMapFile( + absl::string_view path) { + std::string name_string = std::string(path); + const HANDLE file_handle = CreateFile( + /*lpFileName=*/Utf8ToNative(name_string).c_str(), + /*dwDesiredAccess=*/GENERIC_READ, + /*dwShareMode=*/FILE_SHARE_READ, + /*lpSecurityAttributes=*/NULL, + /*dwCreationDisposition=*/OPEN_EXISTING, + /*dwFlagsAndAttributes=*/FILE_ATTRIBUTE_NORMAL, + /*hTemplateFile=*/NULL); + if (file_handle == INVALID_HANDLE_VALUE) { + return absl::UnavailableError( + absl::StrCat("Failed to open the file '", path, + "' for reading: ", FormatLastError())); + } + absl::Cleanup file_closer = [file_handle] { CloseHandle(file_handle); }; + + // We're calling `CreateFileMappingA` regardless of `UNICODE` because we don't + // pass the `lpName` string parameter. + const HANDLE mapping_handle = CreateFileMappingA( + /*hFile=*/file_handle, + /*lpFileMappingAttributes=*/NULL, + /*flProtect=*/PAGE_READONLY, + /*dwMaximumSizeHigh=*/0, // If `dwMaximumSize{Low,High} are zero, + /*dwMaximumSizeLow=*/0, // the maximum mapping size is the file size. + /*lpName=*/NULL); + if (mapping_handle == INVALID_HANDLE_VALUE) { + return absl::UnavailableError( + absl::StrCat("Failed to create a memory mapping for the file '", path, + "': ", FormatLastError())); + } + absl::Cleanup mapping_closer = [mapping_handle] { + CloseHandle(mapping_handle); + }; + + const LPVOID base_address = MapViewOfFile( + /*hFileMappingObject=*/mapping_handle, + /*dwDesiredAccess=*/FILE_MAP_READ, + /*dwFileOffsetHigh=*/0, + /*dwFileOffsetLow=*/0, + /*dwNumberOfBytesToMap=*/0 // Extends to the file end. + ); + if (base_address == NULL) { + return absl::UnavailableError(absl::StrCat( + "Failed to memory-map the file '", path, "': ", FormatLastError())); + } + + LARGE_INTEGER large_length; + const BOOL success = GetFileSizeEx(file_handle, &large_length); + if (!success) { + return absl::UnavailableError( + absl::StrCat("Failed to determine the size of the file '", path, + "': ", FormatLastError())); + } + const size_t length = static_cast(large_length.QuadPart); + + std::move(file_closer).Cancel(); + std::move(mapping_closer).Cancel(); + + return std::make_unique(std::move(name_string), base_address, + length, file_handle, mapping_handle); +} + +absl::Status WindowsMMap::Close() { + BOOL success = UnmapViewOfFile(BaseAddress()); + if (!success) { + return absl::UnavailableError(absl::StrCat( + "Failed to unmap the file '", Path(), "': ", FormatLastError())); + } + success = CloseHandle(mapping_handle_); + if (!success) { + return absl::UnavailableError( + absl::StrCat("Failed to close the memory mapping for file '", Path(), + "': ", FormatLastError())); + } + success = CloseHandle(file_handle_); + if (!success) { + return absl::UnavailableError(absl::StrCat( + "Failed to close the file '", Path(), "': ", FormatLastError())); } return absl::OkStatus(); } +#elif ABSL_HAVE_MMAP +class PosixMMap : public MemoryMappedFile { + public: + PosixMMap(std::string path, const void* base_address, size_t length, + UniqueFd&& fd) + : MemoryMappedFile(path, base_address, length), + unique_fd_(std::move(fd)) {} + + absl::Status Close() override; + + private: + UniqueFd unique_fd_; +}; + +absl::StatusOr> MMapFile( + absl::string_view path) { + std::string name_string = std::string(path); + const int fd = open(name_string.c_str(), O_RDONLY); + if (fd < 0) { + return absl::UnavailableError(absl::StrCat( + "Couldn't open file '", path, "' for reading: ", FormatLastError())); + } + UniqueFd unique_fd(fd); + + struct stat file_stat; + const int status = fstat(unique_fd.Get(), &file_stat); + if (status < 0) { + return absl::UnavailableError( + absl::StrCat("Couldn't stat file '", path, "': ", FormatLastError())); + } + size_t length = file_stat.st_size; + + const void* base_address = + mmap(nullptr, length, PROT_READ, /*flags=*/MAP_SHARED, unique_fd.Get(), + /*offset=*/0); + if (base_address == MAP_FAILED) { + return absl::UnavailableError(absl::StrCat( + "Couldn't map file '", path, "' into memory: ", FormatLastError())); + } + + return std::make_unique(std::move(name_string), base_address, + length, std::move(unique_fd)); +} + +absl::Status PosixMMap::Close() { + // `munmap` length should be a multiple of page size. + const int page_size = sysconf(_SC_PAGESIZE); + const size_t aligned_length = + RoundUp(Length(), static_cast(page_size)); + int status = munmap(const_cast(BaseAddress()), aligned_length); + if (status < 0) { + return absl::UnavailableError(absl::StrCat( + "Couldn't unmap file '", Path(), "' from memory: ", FormatLastError())); + } + + status = close(unique_fd_.Release()); + if (status < 0) { + return absl::UnavailableError(absl::StrCat("Couldn't close file '", Path(), + "': ", FormatLastError())); + } + return absl::OkStatus(); +} +#else // _WIN32 / ABSL_HAVE_MMAP +absl::StatusOr> MMapFile( + absl::string_view path) { + return absl::UnavailableError(absl::StrCat( + "No supported memory-mapping mechanism is provided for file '", path, + "'")); +} + +absl::Status LockMemory(const void* base_address, size_t length) { + return absl::UnavailableError("Locking memory unsupported"); +} + +absl::Status UnlockMemory(const void* base_address, size_t length) { + return absl::UnavailableError( + "Shouldn't attempt unlocking memory where locking is not supported"); +} +#endif // _WIN32 / ABSL_HAVE_MMAP absl::Status MatchInTopSubdirectories(const std::string& parent_directory, const std::string& file_name, @@ -272,7 +439,7 @@ absl::Status Exists(absl::string_view file_name) { } switch (errno) { case EACCES: - return mediapipe::PermissionDeniedError("Insufficient permissions."); + return absl::PermissionDeniedError("Insufficient permissions."); default: return absl::NotFoundError( absl::StrCat("The path does not exist: ", file_name)); @@ -314,7 +481,7 @@ absl::Status RecursivelyCreateDir(absl::string_view path) { if (mkdir(std::string(path)) != 0) { switch (errno) { case EACCES: - return mediapipe::PermissionDeniedError("Insufficient permissions."); + return absl::PermissionDeniedError("Insufficient permissions."); default: return absl::UnavailableError("Failed to create directory."); } diff --git a/mediapipe/framework/deps/file_helpers.h b/mediapipe/framework/deps/file_helpers.h index 2725d6c1d2..6724fa5427 100644 --- a/mediapipe/framework/deps/file_helpers.h +++ b/mediapipe/framework/deps/file_helpers.h @@ -12,11 +12,13 @@ // See the License for the specific language governing permissions and // limitations under the License. -#ifndef MEDIAPIPE_DEPS_FILE_HELPERS_H_ -#define MEDIAPIPE_DEPS_FILE_HELPERS_H_ +#ifndef MEDIAPIPE_FRAMEWORK_DEPS_FILE_HELPERS_H_ +#define MEDIAPIPE_FRAMEWORK_DEPS_FILE_HELPERS_H_ +#include "absl/status/status.h" +#include "absl/status/statusor.h" #include "absl/strings/match.h" -#include "mediapipe/framework/port/status.h" +#include "mediapipe/framework/deps/mmapped_file.h" namespace mediapipe { namespace file { @@ -29,6 +31,12 @@ absl::Status SetContents(absl::string_view file_name, absl::Status AppendStringToFile(absl::string_view file_name, absl::string_view contents); +absl::StatusOr> MMapFile( + absl::string_view path); + +absl::Status LockMemory(const void* base_address, size_t length); +absl::Status UnlockMemory(const void* base_address, size_t length); + absl::Status MatchInTopSubdirectories(const std::string& parent_directory, const std::string& file_name, std::vector* results); @@ -46,4 +54,4 @@ absl::Status RecursivelyCreateDir(absl::string_view path); } // namespace file } // namespace mediapipe -#endif // MEDIAPIPE_DEPS_FILE_HELPERS_H_ +#endif // MEDIAPIPE_FRAMEWORK_DEPS_FILE_HELPERS_H_ diff --git a/mediapipe/framework/deps/mlock_helpers.cc b/mediapipe/framework/deps/mlock_helpers.cc new file mode 100644 index 0000000000..0d335f5698 --- /dev/null +++ b/mediapipe/framework/deps/mlock_helpers.cc @@ -0,0 +1,56 @@ +#include "mediapipe/framework/deps/mlock_helpers.h" + +#include + +#ifdef _WIN32 +// clang-format off +#include // Must come before other Windows headers. +// clang-format on +#include +#else +#include +#endif + +#include "absl/status/status.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/deps/platform_strings.h" + +namespace mediapipe { +#ifdef _WIN32 +absl::Status LockMemory(const void* base_address, size_t length) { + BOOL status = VirtualLock(const_cast(base_address), length); + if (!status) { + return absl::UnavailableError( + absl::StrCat("Failed to lock pages in memory: ", FormatLastError())); + } + return absl::OkStatus(); +} + +absl::Status UnlockMemory(const void* base_address, size_t length) { + BOOL status = VirtualUnlock(const_cast(base_address), length); + if (!status) { + return absl::UnavailableError( + absl::StrCat("Failed to unlock memory pages: ", FormatLastError())); + } + return absl::OkStatus(); +} +#else // _WIN32 +absl::Status LockMemory(const void* base_address, size_t length) { + int status = mlock(base_address, length); + if (status < 0) { + return absl::UnavailableError( + absl::StrCat("Failed to lock pages in memory: ", FormatLastError())); + } + return absl::OkStatus(); +} + +absl::Status UnlockMemory(const void* base_address, size_t length) { + int status = munlock(base_address, length); + if (status < 0) { + return absl::UnavailableError( + absl::StrCat("Failed to unlock memory pages: ", FormatLastError())); + } + return absl::OkStatus(); +} +#endif // _WIN32 +} // namespace mediapipe diff --git a/mediapipe/framework/deps/mlock_helpers.h b/mediapipe/framework/deps/mlock_helpers.h new file mode 100644 index 0000000000..1f4a1ab585 --- /dev/null +++ b/mediapipe/framework/deps/mlock_helpers.h @@ -0,0 +1,11 @@ +#ifndef MEDIAPIPE_FRAMEWORK_DEPS_MLOCK_HELPERS_H_ +#define MEDIAPIPE_FRAMEWORK_DEPS_MLOCK_HELPERS_H_ +#include "absl/status/status.h" + +namespace mediapipe { +// Uses `mlock`/`VirtualLock` to pin memory pages. +absl::Status LockMemory(const void* base_address, size_t length); +// Unlocks a previously locked memory region. +absl::Status UnlockMemory(const void* base_address, size_t length); +} // namespace mediapipe +#endif // MEDIAPIPE_FRAMEWORK_DEPS_MLOCK_HELPERS_H_ diff --git a/mediapipe/framework/deps/mmapped_file.h b/mediapipe/framework/deps/mmapped_file.h new file mode 100644 index 0000000000..3409762f9f --- /dev/null +++ b/mediapipe/framework/deps/mmapped_file.h @@ -0,0 +1,46 @@ +// Copyright 2024 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include + +#include "absl/status/status.h" +#include "mediapipe/framework/port/logging.h" + +#ifndef MEDIAPIPE_FRAMEWORK_DEPS_MMAPPED_FILE_H_ +#define MEDIAPIPE_FRAMEWORK_DEPS_MMAPPED_FILE_H_ +namespace mediapipe { +namespace file { +class MemoryMappedFile { + public: + MemoryMappedFile(std::string path, const void* base_address, size_t length) + : path_(std::move(path)), base_address_(base_address), length_(length) {} + + virtual absl::Status Close() = 0; + + virtual ~MemoryMappedFile() = default; + + const std::string& Path() const { return path_; } + const void* BaseAddress() const { return base_address_; } + size_t Length() const { return length_; } + + private: + std::string path_; + const void* base_address_; + size_t length_; +}; +} // namespace file +} // namespace mediapipe +#endif // MEDIAPIPE_FRAMEWORK_DEPS_MMAPPED_FILE_H_ diff --git a/mediapipe/framework/deps/platform_strings.cc b/mediapipe/framework/deps/platform_strings.cc new file mode 100644 index 0000000000..fa8f3c791f --- /dev/null +++ b/mediapipe/framework/deps/platform_strings.cc @@ -0,0 +1,53 @@ +// Copyright 2024 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/framework/deps/platform_strings.h" + +#include + +namespace mediapipe { +#ifdef _WIN32 +#include + +std::string FormatLastError() { + DWORD message_id = GetLastError(); + if (message_id == 0) { + return std::string("(no error reported)"); + } + + LPSTR message_buffer = nullptr; + DWORD size = FormatMessage( + /*dwFlags=*/(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | + FORMAT_MESSAGE_IGNORE_INSERTS), + /*lpSource=*/NULL, + /*dwMessageId=*/message_id, + /*dwLanguageId=*/MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), + /*lpBuffer=*/(LPSTR)&message_buffer, + /*nSize=*/0, + /*Arguments=*/NULL); + if (size == 0) { + return "(error while trying to format the error message)"; + } + + std::string message(message_buffer, size); + LocalFree(message_buffer); + return NativeToUtf8(message); +} +#else +#include +#include + +std::string FormatLastError() { return strerror(errno); } +#endif // _WIN32 +} // namespace mediapipe diff --git a/mediapipe/framework/deps/platform_strings.h b/mediapipe/framework/deps/platform_strings.h new file mode 100644 index 0000000000..6ff4fdbb2f --- /dev/null +++ b/mediapipe/framework/deps/platform_strings.h @@ -0,0 +1,48 @@ +// Copyright 2024 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_DEPS_PLATFORM_STRINGS_H_ +#define MEDIAPIPE_DEPS_PLATFORM_STRINGS_H_ +#include +#include +#include + +namespace mediapipe { +// `PlatformString` represents a native string type on the platform. +// `Utf8ToNative`/`NativeToUtf8` convert between UTF-8 and that type. +#if defined(_WIN32) && defined(UNICODE) +using PlatformString = std::wstring; + +inline PlatformString Utf8ToNative(const std::string& string) { + std::wstring_convert, wchar_t> converter; + return converter.from_bytes(string.data(), string.data() + string.size()); +} +inline std::string NativeToUtf8(const PlatformString& string) { + std::wstring_convert, wchar_t> converter; + return converter.to_bytes(string.data(), string.data() + string.size()); +} +#define PLATFORM_STRING_LITERAL_INTERNAL(x) L##x +#define PLATFORM_STRING_LITERAL(x) PLATFORM_STRING_LITERAL_INTERNAL(x) +#else +using PlatformString = std::string; + +inline PlatformString Utf8ToNative(const std::string& string) { return string; } +inline std::string NativeToUtf8(const PlatformString& string) { return string; } +#define PLATFORM_STRING_LITERAL(x) x +#endif + +// Produces a human-readable message about the last OS error. +std::string FormatLastError(); +} // namespace mediapipe +#endif // MEDIAPIPE_DEPS_PLATFORM_STRINGS_H_ diff --git a/mediapipe/framework/deps/platform_strings_test.cc b/mediapipe/framework/deps/platform_strings_test.cc new file mode 100644 index 0000000000..4f324b29d9 --- /dev/null +++ b/mediapipe/framework/deps/platform_strings_test.cc @@ -0,0 +1,32 @@ +// Copyright 2024 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/framework/deps/platform_strings.h" + +#include + +#include "mediapipe/framework/port/gtest.h" +#include "testing/base/public/gunit.h" + +namespace mediapipe { +namespace { + +TEST(PlatformStrings, ThereAndBack) { + const std::string source = "Шчучыншчына"; + const std::string result = NativeToUtf8(Utf8ToNative(source)); + EXPECT_EQ(result, source); +} + +} // namespace +} // namespace mediapipe diff --git a/mediapipe/framework/formats/BUILD b/mediapipe/framework/formats/BUILD index 3ee0472434..596f72f0c3 100644 --- a/mediapipe/framework/formats/BUILD +++ b/mediapipe/framework/formats/BUILD @@ -130,6 +130,7 @@ cc_library( deps = [ ":hardware_buffer", "//mediapipe/framework:port", + "//mediapipe/framework/formats:shared_fd", "//mediapipe/gpu:gpu_buffer_storage", ], ) @@ -300,10 +301,14 @@ cc_test( cc_library( name = "unique_fd", + srcs = ["unique_fd.cc"], hdrs = ["unique_fd.h"], deps = [ + "//mediapipe/framework/port:ret_check", "@com_google_absl//absl/base:core_headers", "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", ], ) @@ -313,6 +318,29 @@ cc_test( deps = [ ":unique_fd", "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:status_matchers", + "//mediapipe/util:fd_test_util", + ], +) + +cc_library( + name = "shared_fd", + hdrs = ["shared_fd.h"], + deps = [ + ":unique_fd", + "@com_google_absl//absl/status:statusor", + ], +) + +cc_test( + name = "shared_fd_test", + srcs = ["shared_fd_test.cc"], + deps = [ + ":shared_fd", + ":unique_fd", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:status_matchers", + "//mediapipe/util:fd_test_util", ], ) @@ -660,3 +688,13 @@ cc_test( "//mediapipe/framework/port:status", ], ) + +cc_test( + name = "yuv_image_test", + srcs = ["yuv_image_test.cc"], + deps = [ + ":yuv_image", + "//mediapipe/framework/port:gtest_main", + "@libyuv", + ], +) diff --git a/mediapipe/framework/formats/ahwb_view.h b/mediapipe/framework/formats/ahwb_view.h index 9438e185ed..249a7d91ad 100644 --- a/mediapipe/framework/formats/ahwb_view.h +++ b/mediapipe/framework/formats/ahwb_view.h @@ -2,8 +2,16 @@ #define MEDIAPIPE_FRAMEWORK_FORMATS_AHWB_VIEW_H_ #include "mediapipe/framework/port.h" + #ifdef MEDIAPIPE_GPU_BUFFER_USE_AHWB + +#include +#include + +#include "absl/functional/any_invocable.h" +#include "absl/status/status.h" #include "mediapipe/framework/formats/hardware_buffer.h" +#include "mediapipe/framework/formats/shared_fd.h" #include "mediapipe/gpu/gpu_buffer_storage.h" namespace mediapipe { @@ -16,25 +24,58 @@ namespace mediapipe { // own usage. // The AHWB abstractions in GpuBuffer and Tensor are likely more suitable for // other CPU/GPU uses of AHWBs. + class AhwbView { public: - explicit AhwbView(HardwareBuffer* ahwb) : ahwb_(ahwb) {} + explicit AhwbView( + HardwareBuffer* ahwb, int width_step_bytes, + absl::AnyInvocable set_usage_fence_fn) + : ahwb_(ahwb), + width_step_bytes_(width_step_bytes), + set_usage_fence_fn_(std::move(set_usage_fence_fn)) {} // Non-copyable AhwbView(const AhwbView&) = delete; AhwbView& operator=(const AhwbView&) = delete; // Non-movable AhwbView(AhwbView&&) = delete; - // Only supports synchronous usage. All users of GetHandle must finish + // Supports only synchronous read usage - all users of GetHandle must finish // accessing the buffer before this view object is destroyed to avoid race - // conditions. - // TODO: Support asynchronous usage. + // conditions). + // + // Supports async write usage - user must provide a usage fence which is + // signaled when the write is complete. See more details in `SetUsageFence`. + // TODO: Support full async usage. const AHardwareBuffer* GetHandle() const { return ahwb_->GetAHardwareBuffer(); } + int GetWidthStepBytes() const { return width_step_bytes_; } + + // Sets usage fence for this AHWB: + // - fence is not signaled => AHWB is still in use + // - fence is signaled => AHWB is not in use anymore + // + // Example use case: + // - Calculator gets AhwbView for writing where writing is done asynchronously + // and fence is created to indicate write completion. (E.g. TPU/DSP delegate + // is used and can provide a completion fence.) + // - Calculator schedules async write, retrieves the completion fence and sets + // it using `SetUsageFence`. + // - Calculator sends corresponding `GpuBuffer` to a downstream calculator. + // - The downstream calculator gets `GlBufferView` for reading, `GpuBuffer` + // automatically imports and inserts the fence as GL fence sync ensuring + // following GL operations wait for write completion. + // + // TODO: b/376753887 - replace with a dedicated type (MP's Fence) + absl::Status SetUsageFence(SharedFd fence) { + return set_usage_fence_fn_(std::move(fence)); + } + private: const HardwareBuffer* ahwb_; + const int width_step_bytes_; + absl::AnyInvocable set_usage_fence_fn_; }; namespace internal { diff --git a/mediapipe/framework/formats/shared_fd.h b/mediapipe/framework/formats/shared_fd.h new file mode 100644 index 0000000000..1c6ed105b1 --- /dev/null +++ b/mediapipe/framework/formats/shared_fd.h @@ -0,0 +1,66 @@ +#ifndef MEDIAPIPE_FRAMEWORK_FORMATS_SHARED_FD_H_ +#define MEDIAPIPE_FRAMEWORK_FORMATS_SHARED_FD_H_ + +#include +#include +#include + +#include "absl/status/statusor.h" +#include "mediapipe/framework/formats/unique_fd.h" + +namespace mediapipe { + +// Provides a shared ownership for a file descriptor. +// +// File descriptor is closed as soon as last SharedFd is destroyed. +// (Uses `std::shared_ptr` internally and can be used in the same way: copy, +// move, assign/compare with nullptr, use in conditional statements.) +class SharedFd { + public: + // `fd` a valid file descriptor. + explicit SharedFd(UniqueFd fd) + : fd_(std::make_shared(std::move(fd))) {} + + // Constructs empty SharedFd (fd == nullptr evaluates to true) + SharedFd() = default; + + ~SharedFd() = default; + + // Copyable + SharedFd(const SharedFd&) = default; + SharedFd& operator=(const SharedFd&) = default; + + // Moveable + SharedFd(SharedFd&& other) = default; + SharedFd& operator=(SharedFd&& other) = default; + + // Resets this SharedFd object (fd == nullptr will evaluate to true). + SharedFd& operator=(std::nullptr_t other) { + fd_ = other; + return *this; + } + + bool operator==(std::nullptr_t other) const { return fd_ == other; } + bool operator!=(std::nullptr_t other) const { return !operator==(other); }; + + // SharedFd can be used in conditional statements: + // ``` + // if (fd) { + // int raw_fd = fd.Get(); + // } + // ``` + explicit operator bool() const { return operator!=(nullptr); } + + // Gets raw file descriptor for read purposes. + int Get() const { return fd_->Get(); } + + // Duplicates file descriptor. + absl::StatusOr Dup() const { return fd_->Dup(); } + + private: + std::shared_ptr fd_; +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_FRAMEWORK_FORMATS_SHARED_FD_H_ diff --git a/mediapipe/framework/formats/shared_fd_test.cc b/mediapipe/framework/formats/shared_fd_test.cc new file mode 100644 index 0000000000..0d8d22e6a8 --- /dev/null +++ b/mediapipe/framework/formats/shared_fd_test.cc @@ -0,0 +1,64 @@ +#include "mediapipe/framework/formats/shared_fd.h" + +#include + +#include "mediapipe/framework/formats/unique_fd.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/util/fd_test_util.h" + +namespace mediapipe { +namespace { + +TEST(SharedFdTest, CanCreateFromUniqueFd) { + int raw_fd = GetValidFd(); + { + auto fd = SharedFd(UniqueFd(raw_fd)); + EXPECT_TRUE(IsFdValid(fd.Get())); + } + EXPECT_FALSE(IsFdValid(raw_fd)); +} + +TEST(SharedFdTest, CanCopyAndMoveFd) { + int raw_fd = GetValidFd(); + auto fd = SharedFd(UniqueFd(raw_fd)); + { + SharedFd copied_fd = fd; + EXPECT_TRUE(IsFdValid(copied_fd.Get())); + } + EXPECT_TRUE(IsFdValid(fd.Get())); + + { + SharedFd moved_fd = std::move(fd); + EXPECT_TRUE(IsFdValid(moved_fd.Get())); + } + EXPECT_FALSE(IsFdValid(raw_fd)); +} + +TEST(SharedFdTest, CanBeAssignedAndComparedWithNullptr) { + SharedFd fd; + EXPECT_FALSE(fd); + EXPECT_EQ(fd, nullptr); + + int raw_fd = GetValidFd(); + fd = SharedFd(UniqueFd(raw_fd)); + + EXPECT_NE(fd, nullptr); + EXPECT_TRUE(fd); + + fd = nullptr; + EXPECT_FALSE(IsFdValid(raw_fd)); + EXPECT_EQ(fd, nullptr); + EXPECT_FALSE(fd); +} + +TEST(SharedFdTest, CanDup) { + int raw_fd = GetValidFd(); + auto fd = SharedFd(UniqueFd(GetValidFd())); + MP_ASSERT_OK_AND_ASSIGN(UniqueFd dup_fd, fd.Dup()); + EXPECT_NE(dup_fd.Get(), raw_fd); + EXPECT_TRUE(IsFdValid(dup_fd.Get())); +} + +} // namespace +} // namespace mediapipe diff --git a/mediapipe/framework/formats/tensor.cc b/mediapipe/framework/formats/tensor.cc index d38e75d542..f0f6aa9a60 100644 --- a/mediapipe/framework/formats/tensor.cc +++ b/mediapipe/framework/formats/tensor.cc @@ -407,7 +407,7 @@ Tensor::OpenGlBufferView Tensor::GetOpenGlBufferWriteView( } AllocateOpenGlBuffer(); if (valid_ != 0) { - ABSL_LOG(ERROR) + ABSL_LOG_FIRST_N(ERROR, 1) << "Tensors are designed for single writes. Multiple writes to a " "Tensor instance are not supported and may lead to undefined " "behavior due to lack of synchronization."; diff --git a/mediapipe/framework/formats/unique_fd.cc b/mediapipe/framework/formats/unique_fd.cc new file mode 100644 index 0000000000..38f2aed338 --- /dev/null +++ b/mediapipe/framework/formats/unique_fd.cc @@ -0,0 +1,118 @@ +#include "mediapipe/framework/formats/unique_fd.h" + +#include + +#include "absl/base/attributes.h" +#include "absl/log/absl_log.h" +#include "absl/status/statusor.h" +#include "mediapipe/framework/port/ret_check.h" + +#if (__ANDROID_API__ >= 29) && defined(__BIONIC__) && !defined(NDEBUG) +#define MEDIAPIPE_UNIQUE_FD_USE_FDSAN 1 + +#include + +#include + +#endif // (__ANDROID_API__ >= 29) && defined(__BIONIC__) && !defined(NDEBUG) + +namespace mediapipe { + +namespace { + +#if defined(MEDIAPIPE_UNIQUE_FD_USE_FDSAN) +// Address of the object is used as tag. +uint64_t Tag(UniqueFd* fd) { return reinterpret_cast(fd); } + +// These functions are marked with __attribute__((weak)), so that their +// availability can be determined at runtime. These wrappers will use them +// if available, and fall back to no-ops or regular close on devices older +// than API level 29 or non-bionic or non-production builds. +void FdsanExchangeTag(int fd, uint64_t old_tag, uint64_t new_tag) { + if (android_fdsan_exchange_owner_tag) { + android_fdsan_exchange_owner_tag(fd, old_tag, new_tag); + } +} + +void FdsanClose(int fd, uint64_t tag) { + if (android_fdsan_close_with_tag) { + if (android_fdsan_close_with_tag(fd, tag) != 0) { + ABSL_LOG(ERROR) << "Failed to close fd: " << fd; + } + return; + } + if (::close(fd) != 0) { + ABSL_LOG(ERROR) << "Failed to close fd: " << fd; + } +} +#endif // MEDIAPIPE_UNIQUE_FD_USE_FDSAN + +} // namespace + +UniqueFd& UniqueFd::operator=(UniqueFd&& move) { + if (this == &move) { + return *this; + } + + Reset(); + + if (move.fd_ != -1) { + fd_ = move.fd_; + move.fd_ = -1; +#if defined(MEDIAPIPE_UNIQUE_FD_USE_FDSAN) + // Acquire ownership from the moved-from object. + FdsanExchangeTag(fd_, Tag(&move), Tag(this)); +#endif // MEDIAPIPE_UNIQUE_FD_USE_FDSAN + } + + return *this; +} + +absl::StatusOr UniqueFd::Dup() const { + RET_CHECK(IsValid()); + int dup_fd = dup(Get()); + return UniqueFd(dup_fd); +} + +// Releases ownership of the file descriptor and returns it. +ABSL_MUST_USE_RESULT int UniqueFd::Release() { + if (!IsValid()) { + return -1; + } + + int fd = fd_; + fd_ = -1; +#if defined(MEDIAPIPE_UNIQUE_FD_USE_FDSAN) + // Release ownership. + FdsanExchangeTag(fd, Tag(this), 0); +#endif // MEDIAPIPE_UNIQUE_FD_USE_FDSAN + return fd; +} + +// Closes a wrapped file descriptor and resets the wrapper. +void UniqueFd::Reset(int new_fd) { + if (IsValid()) { +#if defined(MEDIAPIPE_UNIQUE_FD_USE_FDSAN) + FdsanClose(fd_, Tag(this)); +#else + if (::close(fd_) != 0) { + ABSL_LOG(ERROR) << "Failed to close fd: " << fd_; + } +#endif // MEDIAPIPE_UNIQUE_FD_USE_FDSAN + fd_ = -1; + } + + if (new_fd != -1) { + fd_ = new_fd; +#if defined(MEDIAPIPE_UNIQUE_FD_USE_FDSAN) + // Acquire ownership of the presumably unowned fd. + FdsanExchangeTag(fd_, 0, Tag(this)); +#endif // MEDIAPIPE_UNIQUE_FD_USE_FDSAN + } +} + +} // namespace mediapipe + +#ifdef MEDIAPIPE_UNIQUE_FD_USE_FDSAN +#undef MEDIAPIPE_UNIQUE_FD_USE_FDSAN +#endif diff --git a/mediapipe/framework/formats/unique_fd.h b/mediapipe/framework/formats/unique_fd.h index 666584919e..3f1c63d7c1 100644 --- a/mediapipe/framework/formats/unique_fd.h +++ b/mediapipe/framework/formats/unique_fd.h @@ -1,17 +1,10 @@ #ifndef MEDIAPIPE_FRAMEWORK_FORMATS_ANDROID_UNIQUE_FD_H_ #define MEDIAPIPE_FRAMEWORK_FORMATS_ANDROID_UNIQUE_FD_H_ -#include - #include #include "absl/base/attributes.h" -#include "absl/log/absl_log.h" - -#if (__ANDROID_API__ >= 29) && defined(__BIONIC__) && !defined(NDEBUG) -#define MEDIAPIPE_USE_FDSAN 1 -#include -#endif // (__ANDROID_API__ >= 29) && defined(__BIONIC__) && !defined(NDEBUG) +#include "absl/status/statusor.h" namespace mediapipe { @@ -36,97 +29,23 @@ class UniqueFd { ~UniqueFd() { Reset(); } UniqueFd& operator=(const UniqueFd& copy) = delete; - UniqueFd& operator=(UniqueFd&& move) { - if (this == &move) { - return *this; - } - - Reset(); - - if (move.fd_ != -1) { - fd_ = move.fd_; - move.fd_ = -1; -#if defined(MEDIAPIPE_USE_FDSAN) - // Acquire ownership from the moved-from object. - FdsanExchangeTag(fd_, move.Tag(), Tag()); -#endif // MEDIAPIPE_USE_FDSAN - } - - return *this; - } - + UniqueFd& operator=(UniqueFd&& move); // Returns a non-owned file descriptor. - int Get() { return fd_; } + int Get() const { return fd_; } // Checks if a valid file descriptor is wrapped. bool IsValid() const { return fd_ >= 0; } - // Releases ownership of the file descriptor and returns it. - ABSL_MUST_USE_RESULT int Release() { - if (!IsValid()) { - return -1; - } + absl::StatusOr Dup() const; - int fd = fd_; - fd_ = -1; -#if defined(MEDIAPIPE_USE_FDSAN) - // Release ownership. - FdsanExchangeTag(fd, Tag(), 0); -#endif // MEDIAPIPE_USE_FDSAN - return fd; - } + // Releases ownership of the file descriptor and returns it. + ABSL_MUST_USE_RESULT int Release(); // Closes a wrapped file descriptor and resets the wrapper. - void Reset(int new_fd = -1) { - if (IsValid()) { -#if defined(MEDIAPIPE_USE_FDSAN) - FdsanClose(fd_, Tag()); -#else - if (::close(fd_) != 0) { - ABSL_LOG(ERROR) << "Failed to close fd: " << fd_; - } -#endif // MEDIAPIPE_USE_FDSAN - fd_ = -1; - } - - if (new_fd != -1) { - fd_ = new_fd; -#if defined(MEDIAPIPE_USE_FDSAN) - // Acquire ownership of the presumably unowned fd. - FdsanExchangeTag(fd_, 0, Tag()); -#endif // MEDIAPIPE_USE_FDSAN - } - } + void Reset(int new_fd = -1); private: int fd_ = -1; - -#if defined(MEDIAPIPE_USE_FDSAN) - // Address of the object is used as tag. - uint64_t Tag() { return reinterpret_cast(this); } - - // These functions are marked with __attribute__((weak)), so that their - // availability can be determined at runtime. These wrappers will use them - // if available, and fall back to no-ops or regular close on devices older - // than API level 29 or non-bionic or non-production builds. - static void FdsanExchangeTag(int fd, uint64_t old_tag, uint64_t new_tag) { - if (android_fdsan_exchange_owner_tag) { - android_fdsan_exchange_owner_tag(fd, old_tag, new_tag); - } - } - - static void FdsanClose(int fd, uint64_t tag) { - if (android_fdsan_close_with_tag) { - if (android_fdsan_close_with_tag(fd, tag) != 0) { - ABSL_LOG(ERROR) << "Failed to close fd: " << fd; - } - return; - } - if (::close(fd) != 0) { - ABSL_LOG(ERROR) << "Failed to close fd: " << fd; - } - } -#endif // MEDIAPIPE_USE_FDSAN }; } // namespace mediapipe diff --git a/mediapipe/framework/formats/unique_fd_test.cc b/mediapipe/framework/formats/unique_fd_test.cc index 885b5199eb..a900b53be9 100644 --- a/mediapipe/framework/formats/unique_fd_test.cc +++ b/mediapipe/framework/formats/unique_fd_test.cc @@ -1,22 +1,15 @@ #include "mediapipe/framework/formats/unique_fd.h" -#include -#include - #include #include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/util/fd_test_util.h" namespace mediapipe { namespace { -// Returns a valid system file descriptor. -int GetValidFd() { return dup(STDOUT_FILENO); } - -// Helper function to check if the file descriptor is valid (still open). -int IsFdValid(int fd) { return fcntl(fd, F_GETFD) != -1; } - TEST(UniqueFdTest, ShouldInitializeInvalidFd) { UniqueFd unique_fd; EXPECT_FALSE(unique_fd.IsValid()); @@ -60,6 +53,15 @@ TEST(UniqueFdTest, ShouldCreateValidFd) { EXPECT_FALSE(unique_fd.IsValid()); } +TEST(UniqueFdTest, ShouldDupValidFd) { + UniqueFd unique_fd(GetValidFd()); + + MP_ASSERT_OK_AND_ASSIGN(UniqueFd dup_unique_fd, unique_fd.Dup()); + + EXPECT_TRUE(dup_unique_fd.IsValid()); + EXPECT_NE(dup_unique_fd.Get(), unique_fd.Get()); +} + TEST(UniqueFdTest, ShouldReleaseValidFd) { UniqueFd unique_fd(GetValidFd()); EXPECT_TRUE(unique_fd.IsValid()); diff --git a/mediapipe/framework/formats/yuv_image.h b/mediapipe/framework/formats/yuv_image.h index 4aeb75d6b0..1c516c083e 100644 --- a/mediapipe/framework/formats/yuv_image.h +++ b/mediapipe/framework/formats/yuv_image.h @@ -15,9 +15,11 @@ #ifndef MEDIAPIPE_FRAMEWORK_FORMATS_YUV_IMAGE_H_ #define MEDIAPIPE_FRAMEWORK_FORMATS_YUV_IMAGE_H_ +#include #include #include #include +#include #include "libyuv/video_common.h" @@ -118,6 +120,28 @@ class YUVImage { YUVImage() = default; ~YUVImage() { Clear(); } + // YUVImage is move-only. + YUVImage(const YUVImage&) = delete; + YUVImage& operator=(const YUVImage&) = delete; + YUVImage(YUVImage&& b) { *this = std::move(b); } + + YUVImage& operator=(YUVImage&& b) { + if (this != &b) { + Clear(); + deallocation_function_ = std::exchange(b.deallocation_function_, nullptr); + fourcc_ = std::exchange(b.fourcc_, libyuv::FOURCC_ANY); + std::swap_ranges(data_, data_ + kMaxNumPlanes, b.data_); + std::swap_ranges(stride_, stride_ + kMaxNumPlanes, b.stride_); + width_ = std::exchange(b.width_, 0); + height_ = std::exchange(b.height_, 0); + bit_depth_ = std::exchange(b.bit_depth_, 0); + matrix_coefficients_ = std::exchange( + b.matrix_coefficients_, COLOR_MATRIX_COEFFICIENTS_UNSPECIFIED); + full_range_ = std::exchange(b.full_range_, false); + } + return *this; + } + // Convenience constructor YUVImage(libyuv::FourCC fourcc, // std::unique_ptr data_location, // diff --git a/mediapipe/framework/formats/yuv_image_test.cc b/mediapipe/framework/formats/yuv_image_test.cc new file mode 100644 index 0000000000..ded9f0458a --- /dev/null +++ b/mediapipe/framework/formats/yuv_image_test.cc @@ -0,0 +1,171 @@ +#include "mediapipe/framework/formats/yuv_image.h" + +#include +#include + +#include "libyuv/video_common.h" +#include "mediapipe/framework/port/gtest.h" + +namespace mediapipe { +namespace { + +// See: +// https://clang.llvm.org/extra/clang-tidy/checks/bugprone/use-after-move.html +template +void SILENCE_USE_AFTER_MOVE(T&) {} + +TEST(YUVImageTest, TestInitializeAndDestruct) { + uint8_t data0 = 0, data1 = 1, data2 = 2; + const libyuv::FourCC fourcc = libyuv::FOURCC_I420; + const int stride0 = 100, stride1 = 50, stride2 = 50; + const int width = 100, height = 60; + const int bit_depth = 4; + int deallocation_counter = 0; + auto deallocation_function = [&deallocation_counter] { + ++deallocation_counter; + }; + { + YUVImage yuv_image; + yuv_image.Initialize(fourcc, deallocation_function, // + &data0, stride0, // + &data1, stride1, // + &data2, stride2, // + width, height, bit_depth); + + EXPECT_EQ(yuv_image.fourcc(), fourcc); + EXPECT_EQ(yuv_image.data(0), &data0); + EXPECT_EQ(yuv_image.data(1), &data1); + EXPECT_EQ(yuv_image.data(2), &data2); + EXPECT_EQ(yuv_image.stride(0), stride0); + EXPECT_EQ(yuv_image.stride(1), stride1); + EXPECT_EQ(yuv_image.stride(2), stride2); + EXPECT_EQ(yuv_image.width(), width); + EXPECT_EQ(yuv_image.height(), height); + EXPECT_EQ(yuv_image.bit_depth(), bit_depth); + } + EXPECT_EQ(deallocation_counter, 1); +} + +TEST(YUVImageTest, TestMoveConstructor) { + uint8_t data0 = 0, data1 = 1, data2 = 2; + const libyuv::FourCC fourcc = libyuv::FOURCC_I420; + const int stride0 = 100, stride1 = 50, stride2 = 50; + const int width = 100, height = 60; + const int bit_depth = 4; + int deallocation_counter = 0; + auto deallocation_function = [&deallocation_counter] { + ++deallocation_counter; + }; + { + YUVImage yuv_image; + yuv_image.Initialize(fourcc, deallocation_function, // + &data0, stride0, // + &data1, stride1, // + &data2, stride2, // + width, height, bit_depth); + + EXPECT_EQ(yuv_image.fourcc(), fourcc); + EXPECT_EQ(yuv_image.data(0), &data0); + EXPECT_EQ(yuv_image.data(1), &data1); + EXPECT_EQ(yuv_image.data(2), &data2); + EXPECT_EQ(yuv_image.stride(0), stride0); + EXPECT_EQ(yuv_image.stride(1), stride1); + EXPECT_EQ(yuv_image.stride(2), stride2); + EXPECT_EQ(yuv_image.width(), width); + EXPECT_EQ(yuv_image.height(), height); + EXPECT_EQ(yuv_image.bit_depth(), bit_depth); + + YUVImage yuv_image2(std::move(yuv_image)); + + // ClangTidy will complain about accessing yuv_image after it has been moved + // from. The C++ standard says that "moved-from objects shall be placed in a + // valid but unspecified state". These tests are here to ensure that. + SILENCE_USE_AFTER_MOVE(yuv_image); + EXPECT_EQ(yuv_image.fourcc(), libyuv::FOURCC_ANY); + EXPECT_EQ(yuv_image.data(0), nullptr); + EXPECT_EQ(yuv_image.data(1), nullptr); + EXPECT_EQ(yuv_image.data(2), nullptr); + EXPECT_EQ(yuv_image.stride(0), 0); + EXPECT_EQ(yuv_image.stride(1), 0); + EXPECT_EQ(yuv_image.stride(2), 0); + EXPECT_EQ(yuv_image.width(), 0); + EXPECT_EQ(yuv_image.height(), 0); + EXPECT_EQ(yuv_image.bit_depth(), 0); + + EXPECT_EQ(yuv_image2.fourcc(), fourcc); + EXPECT_EQ(yuv_image2.data(0), &data0); + EXPECT_EQ(yuv_image2.data(1), &data1); + EXPECT_EQ(yuv_image2.data(2), &data2); + EXPECT_EQ(yuv_image2.stride(0), stride0); + EXPECT_EQ(yuv_image2.stride(1), stride1); + EXPECT_EQ(yuv_image2.stride(2), stride2); + EXPECT_EQ(yuv_image2.width(), width); + EXPECT_EQ(yuv_image2.height(), height); + EXPECT_EQ(yuv_image2.bit_depth(), bit_depth); + } + EXPECT_EQ(deallocation_counter, 1); +} + +TEST(YUVImageTest, TestMoveAssignment) { + uint8_t data0 = 0, data1 = 1, data2 = 2; + const libyuv::FourCC fourcc = libyuv::FOURCC_I420; + const int stride0 = 100, stride1 = 50, stride2 = 50; + const int width = 100, height = 60; + const int bit_depth = 4; + int deallocation_counter = 0; + auto deallocation_function = [&deallocation_counter] { + ++deallocation_counter; + }; + { + YUVImage yuv_image; + yuv_image.Initialize(fourcc, deallocation_function, // + &data0, stride0, // + &data1, stride1, // + &data2, stride2, // + width, height, bit_depth); + + EXPECT_EQ(yuv_image.fourcc(), fourcc); + EXPECT_EQ(yuv_image.data(0), &data0); + EXPECT_EQ(yuv_image.data(1), &data1); + EXPECT_EQ(yuv_image.data(2), &data2); + EXPECT_EQ(yuv_image.stride(0), stride0); + EXPECT_EQ(yuv_image.stride(1), stride1); + EXPECT_EQ(yuv_image.stride(2), stride2); + EXPECT_EQ(yuv_image.width(), width); + EXPECT_EQ(yuv_image.height(), height); + EXPECT_EQ(yuv_image.bit_depth(), bit_depth); + + YUVImage yuv_image2; + yuv_image2 = std::move(yuv_image); + + // ClangTidy will complain about accessing yuv_image after it has been moved + // from. The C++ standard says that "moved-from objects shall be placed in a + // valid but unspecified state". These tests are here to ensure that. + SILENCE_USE_AFTER_MOVE(yuv_image); + EXPECT_EQ(yuv_image.fourcc(), libyuv::FOURCC_ANY); + EXPECT_EQ(yuv_image.data(0), nullptr); + EXPECT_EQ(yuv_image.data(1), nullptr); + EXPECT_EQ(yuv_image.data(2), nullptr); + EXPECT_EQ(yuv_image.stride(0), 0); + EXPECT_EQ(yuv_image.stride(1), 0); + EXPECT_EQ(yuv_image.stride(2), 0); + EXPECT_EQ(yuv_image.width(), 0); + EXPECT_EQ(yuv_image.height(), 0); + EXPECT_EQ(yuv_image.bit_depth(), 0); + + EXPECT_EQ(yuv_image2.fourcc(), fourcc); + EXPECT_EQ(yuv_image2.data(0), &data0); + EXPECT_EQ(yuv_image2.data(1), &data1); + EXPECT_EQ(yuv_image2.data(2), &data2); + EXPECT_EQ(yuv_image2.stride(0), stride0); + EXPECT_EQ(yuv_image2.stride(1), stride1); + EXPECT_EQ(yuv_image2.stride(2), stride2); + EXPECT_EQ(yuv_image2.width(), width); + EXPECT_EQ(yuv_image2.height(), height); + EXPECT_EQ(yuv_image2.bit_depth(), bit_depth); + } + EXPECT_EQ(deallocation_counter, 1); +} + +} // namespace +} // namespace mediapipe diff --git a/mediapipe/framework/graph_output_stream.cc b/mediapipe/framework/graph_output_stream.cc index e456c6535d..62956cdada 100644 --- a/mediapipe/framework/graph_output_stream.cc +++ b/mediapipe/framework/graph_output_stream.cc @@ -16,7 +16,7 @@ #include "absl/log/absl_check.h" #include "absl/synchronization/mutex.h" -#include "mediapipe/framework/port/status.h" +#include "mediapipe/framework/port/status_macros.h" namespace mediapipe { diff --git a/mediapipe/framework/graph_runtime_info.proto b/mediapipe/framework/graph_runtime_info.proto new file mode 100644 index 0000000000..1488a33f7d --- /dev/null +++ b/mediapipe/framework/graph_runtime_info.proto @@ -0,0 +1,62 @@ +// Copyright 2024 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package mediapipe; + +option java_package = "com.google.mediapipe.proto"; +option java_outer_classname = "GraphRuntimeInfoProto"; + +// The runtime info for an input stream. +message StreamRuntimeInfo { + // The name of the stream in the format "TAG:index:stream_name" + string stream_name = 1; + + // The number of packets in the queue. + int32 queue_size = 2; + + // The total number of packets added to the queue. + int32 number_of_packets_added = 3; + + // The minimum timestamp or timestanp bound of the stream. + int64 minimum_timestamp_or_bound = 4; +} + +// The runtime info for a calculator. +message CalculatorRuntimeInfo { + // The name of the calculator. + string calculator_name = 1; + + // The last time when the Calculator::Process was started. + int64 last_process_start_unix_us = 2; + + // The last time when the Calculator::Process was finished. + int64 last_process_finish_unix_us = 3; + + // The timestamp bound of the calculator. + int64 timestamp_bound = 4; + + // The runtime info for each input stream of the calculator. + repeated StreamRuntimeInfo input_stream_infos = 5; +} + +// The runtime info for the whole graph. +message GraphRuntimeInfo { + // The time when the runtime info was captured. + int64 capture_time_unix_us = 1; + + // The runtime info for each calculator in the graph. + repeated CalculatorRuntimeInfo calculator_infos = 2; +} diff --git a/mediapipe/framework/input_stream_handler.cc b/mediapipe/framework/input_stream_handler.cc index e222c2e6cc..ee4a86adf2 100644 --- a/mediapipe/framework/input_stream_handler.cc +++ b/mediapipe/framework/input_stream_handler.cc @@ -14,7 +14,16 @@ #include "mediapipe/framework/input_stream_handler.h" +#include +#include +#include +#include +#include + #include "absl/log/absl_check.h" +#include "absl/log/absl_log.h" +#include "absl/log/log.h" +#include "absl/strings/str_cat.h" #include "absl/strings/str_join.h" #include "absl/strings/substitute.h" #include "mediapipe/framework/collection_item_id.h" @@ -22,8 +31,41 @@ #include "mediapipe/framework/port/ret_check.h" namespace mediapipe { + +namespace { using SyncSet = InputStreamHandler::SyncSet; +// Helper class to vlog the streams with missing packets during FillInputSet +// calls. +class FillInputSetLogger { + public: + FillInputSetLogger(const std::string& node_name, Timestamp timestamp) + : node_name_(node_name), timestamp_(timestamp) {} + ~FillInputSetLogger() { OutputLogs(); } + + void AddMissingPacketStreamName(const std::string& stream_name) { + missing_streams_.push_back(stream_name); + } + + private: + void OutputLogs() const { + if (!missing_streams_.empty()) { + VLOG(1) << absl::StrCat( + node_name_, ": Filled input set at ts: ", timestamp_.DebugString(), + " with MISSING packets in input streams: ", + absl::StrJoin(missing_streams_, ", "), "."); + } else { + VLOG(1) << absl::StrCat( + node_name_, ": Filled input set at ts: ", timestamp_.DebugString()); + } + } + + const std::string node_name_; + const Timestamp timestamp_; + std::vector missing_streams_; +}; +} // namespace + absl::Status InputStreamHandler::InitializeInputStreamManagers( InputStreamManager* flat_input_stream_managers) { for (CollectionItemId id = input_stream_managers_.BeginId(); @@ -55,13 +97,15 @@ std::vector> InputStreamHandler::GetMonitoringInfo() { std::vector> monitoring_info_vector; - for (auto& stream : input_stream_managers_) { + for (CollectionItemId id = input_stream_managers_.BeginId(); + id < input_stream_managers_.EndId(); ++id) { + const auto& stream = input_stream_managers_.Get(id); if (!stream) { continue; } monitoring_info_vector.emplace_back( std::tuple( - stream->Name(), stream->QueueSize(), stream->NumPacketsAdded(), + DebugStreamName(id), stream->QueueSize(), stream->NumPacketsAdded(), stream->MinTimestampOrBound(nullptr))); } return monitoring_info_vector; @@ -145,6 +189,19 @@ std::string InputStreamHandler::DebugStreamNames() const { ">"); } +std::string InputStreamHandler::DebugStreamName(CollectionItemId id) const { + const auto tag_map = input_stream_managers_.TagMap(); + const std::string& stream_name = tag_map->Names()[id.value()]; + const auto& [stream_tag, stream_idx] = tag_map->TagAndIndexFromId(id); + return absl::StrCat(stream_tag, ":", stream_idx, ":", stream_name); +} + +std::string InputStreamHandler::GetNodeName() const { + const auto* calculator_context = + calculator_context_manager_->GetDefaultCalculatorContext(); + return calculator_context ? calculator_context->NodeName() : ""; +} + bool InputStreamHandler::ScheduleInvocations(int max_allowance, Timestamp* input_bound) { *input_bound = Timestamp::Unset(); @@ -409,12 +466,22 @@ void SyncSet::FillInputSet(Timestamp input_timestamp, InputStreamShardSet* input_set) { ABSL_CHECK(input_timestamp.IsAllowedInStream()); ABSL_CHECK(input_set); + std::optional logger; + if (VLOG_IS_ON(1)) { + logger.emplace(input_stream_handler_->GetNodeName(), input_timestamp); + } + std::vector streams_with_missing_packets; for (CollectionItemId id : stream_ids_) { const auto& stream = input_stream_handler_->input_stream_managers_.Get(id); int num_packets_dropped = 0; bool stream_is_done = false; Packet current_packet = stream->PopPacketAtTimestamp( input_timestamp, &num_packets_dropped, &stream_is_done); + if (current_packet.IsEmpty() && logger.has_value()) { + // Track the streams that have no packets at the current timestamp. + logger->AddMissingPacketStreamName( + input_stream_handler_->DebugStreamName(id)); + } ABSL_CHECK_EQ(num_packets_dropped, 0) << absl::Substitute("Dropped $0 packet(s) on input stream \"$1\".", num_packets_dropped, stream->Name()); diff --git a/mediapipe/framework/input_stream_handler.h b/mediapipe/framework/input_stream_handler.h index bf74a63d26..cbf9e04db4 100644 --- a/mediapipe/framework/input_stream_handler.h +++ b/mediapipe/framework/input_stream_handler.h @@ -20,6 +20,7 @@ #include #include #include +#include #include #include @@ -92,8 +93,8 @@ class InputStreamHandler { // Sets up the InputStreamShardSet by propagating data from the managers. absl::Status SetupInputShards(InputStreamShardSet* input_shards); - // Returns a vector of pairs of stream name and queue size for monitoring - // purpose. + // Returns a vector of tuples of stream name, queue size, number of packets + // added, and minimum timestamp or bound for monitoring purpose. std::vector> GetMonitoringInfo(); // Resets the input stream handler and its underlying input streams for @@ -150,6 +151,13 @@ class InputStreamHandler { // Returns a string that concatenates the stream names of all managed streams. std::string DebugStreamNames() const; + // Return the stream name for an input stream in the format: + // stream_tag:stream_index:stream_name. + std::string DebugStreamName(CollectionItemId id) const; + + // Returns the node name of the calculator node. + std::string GetNodeName() const; + // Keeps scheduling new invocations until 1) the node is not ready or 2) the // max number of invocations that are allowed to be scheduled is reached. // Returns true if at least one invocation has been scheduled. diff --git a/mediapipe/framework/input_stream_manager.cc b/mediapipe/framework/input_stream_manager.cc index 5acd571e13..8cd7f6baa6 100644 --- a/mediapipe/framework/input_stream_manager.cc +++ b/mediapipe/framework/input_stream_manager.cc @@ -302,6 +302,7 @@ Packet InputStreamManager::PopPacketAtTimestamp(Timestamp timestamp, if (current_timestamp != timestamp) { // The timestamp bound reported when no packet is sent. Timestamp bound = MinTimestampOrBoundHelper(); + // Generate empty packet at the timestamp bound. packet = Packet().At(bound.PreviousAllowedInStream()); ++(*num_packets_dropped); } diff --git a/mediapipe/framework/input_stream_manager.h b/mediapipe/framework/input_stream_manager.h index b97e11e7ab..27f148edbf 100644 --- a/mediapipe/framework/input_stream_manager.h +++ b/mediapipe/framework/input_stream_manager.h @@ -22,11 +22,10 @@ #include #include "absl/base/thread_annotations.h" +#include "absl/status/status.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/packet.h" #include "mediapipe/framework/packet_type.h" -#include "mediapipe/framework/port.h" -#include "mediapipe/framework/port/status.h" #include "mediapipe/framework/timestamp.h" namespace mediapipe { diff --git a/mediapipe/framework/port/BUILD b/mediapipe/framework/port/BUILD index 31c8223044..985a4655f0 100644 --- a/mediapipe/framework/port/BUILD +++ b/mediapipe/framework/port/BUILD @@ -113,6 +113,7 @@ cc_library( ":status", "//mediapipe/framework/deps:file_helpers", "//mediapipe/framework/deps:file_path", + "//mediapipe/framework/deps:mmapped_file", ], ) diff --git a/mediapipe/framework/resources.cc b/mediapipe/framework/resources.cc index e8daefa1e1..475646f7c8 100644 --- a/mediapipe/framework/resources.cc +++ b/mediapipe/framework/resources.cc @@ -6,10 +6,15 @@ #include #include "absl/container/flat_hash_map.h" +#include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" +#include "mediapipe/framework/deps/mlock_helpers.h" +#include "mediapipe/framework/deps/mmapped_file.h" +#include "mediapipe/framework/port/file_helpers.h" +#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/tool/status_util.h" #include "mediapipe/util/resource_util.h" @@ -35,30 +40,75 @@ class NoCleanupResource : public Resource { NoCleanupResource(const void* data, size_t length) : Resource(data, length) {} }; +class MMapResource : public Resource { + public: + MMapResource(std::unique_ptr mmapped_file, + bool mlocked) + : Resource(mmapped_file->BaseAddress(), mmapped_file->Length()), + mmapped_file_(std::move(mmapped_file)), + mlocked_(mlocked) {} + + ~MMapResource() override { + if (mlocked_) { + auto status = + UnlockMemory(mmapped_file_->BaseAddress(), mmapped_file_->Length()); + if (!status.ok()) { + ABSL_LOG(DFATAL) << status; + } + } + auto status = mmapped_file_->Close(); + if (!status.ok()) { + ABSL_LOG(DFATAL) << status; + } + } + + private: + std::unique_ptr mmapped_file_; + bool mlocked_; +}; + class DefaultResources : public Resources { public: absl::StatusOr> Get( absl::string_view resource_id, const Options& options) const final { - // First try to load resource as is. - std::string path(resource_id); + const std::string path(resource_id); + if (options.mmap_mode.has_value()) { + const MMapMode mode = options.mmap_mode.value(); + // Try to resolve `resource_id` into a path. + const absl::StatusOr resolved_path = + PathToResourceAsFile(path, /*shadow_copy=*/false); + if (resolved_path.ok()) { + auto status_or_mmap = + MakeMMapResource(path, + /*mlock=*/mode == MMapMode::kMMapAndMLock); + if (status_or_mmap.ok() || mode != MMapMode::kMMapOrRead) { + return status_or_mmap; + } + } else if (mode != MMapMode::kMMapOrRead) { + return resolved_path.status(); + } + } + + // Try to load the resource as is. std::string output; - absl::Status status = + const absl::Status status = GetResourceContents(path, &output, options.read_as_binary); if (status.ok()) { return MakeStringResource(std::move(output)); } - // Try to resolve resource_id. - absl::StatusOr resolved_path = PathToResourceAsFile(path); - if (!resolved_path.ok() || resolved_path.value() == path) { + // Try the path resolution again, this time possibly with shadow copying. + const absl::StatusOr resolved_path_maybe_shadow = + PathToResourceAsFile(path, /*shadow_copy=*/true); + if (!resolved_path_maybe_shadow.ok()) { return tool::CombinedStatus( absl::StrCat("Failed to load resource: ", resource_id), - {status, resolved_path.status()}); + {status, resolved_path_maybe_shadow.status()}); } // Try to load by resolved path. absl::Status status_for_resolved = GetResourceContents( - resolved_path.value(), &output, options.read_as_binary); + resolved_path_maybe_shadow.value(), &output, options.read_as_binary); if (status_for_resolved.ok()) { return MakeStringResource(std::move(output)); } @@ -104,6 +154,24 @@ std::unique_ptr MakeNoCleanupResource(const void* data, return std::make_unique(data, length); } +absl::StatusOr> MakeMMapResource( + absl::string_view path, bool mlock) { + auto mmap_or_error = file::MMapFile(path); + if (!mmap_or_error.ok()) { + return mmap_or_error.status(); + } + std::unique_ptr mmap = std::move(*mmap_or_error); + + if (mlock) { + auto status = LockMemory(mmap->BaseAddress(), mmap->Length()); + if (!status.ok()) { + return absl::UnavailableError(absl::StrCat("Locking memory for file '", + path, "' failed: ", status)); + } + } + return std::make_unique(std::move(mmap), mlock); +} + std::unique_ptr CreateDefaultResources() { return std::make_unique(); } diff --git a/mediapipe/framework/resources.h b/mediapipe/framework/resources.h index 9124ca9fd8..0920236877 100644 --- a/mediapipe/framework/resources.h +++ b/mediapipe/framework/resources.h @@ -60,6 +60,21 @@ std::unique_ptr MakeStringResource(std::string&& s); std::unique_ptr MakeNoCleanupResource(const void* data, size_t length); +// Creates a resource by memory-mapping the file at `path`. +absl::StatusOr> MakeMMapResource( + absl::string_view path, bool mlock); + +enum class MMapMode { + // Map the file contents into memory when supported, read otherwise. + kMMapOrRead, + // Fail if memory mapping is not available. + kMMap, + // Like `kMMap` with additional memory-locking of the mapped pages. + // This makes sure the data is resident in memory (never swapped) but comes + // with increased memory usage and takes time to perform the initial read. + kMMapAndMLock, +}; + // Represents an interface to load resources in calculators and subgraphs. // // Should be accessed through `CalculatorContext::GetResources` and @@ -71,6 +86,11 @@ class Resources { public: struct Options { bool read_as_binary = true; + + // If specified, attempt memory-mapping file-based resources in the given + // mode. Otherwise the file contents are read into memory. + // Memory-mapped files are always `read_as_binary`. + std::optional mmap_mode; }; virtual ~Resources() = default; diff --git a/mediapipe/framework/resources_test.cc b/mediapipe/framework/resources_test.cc index 83e922e69a..e0cb3a7e56 100644 --- a/mediapipe/framework/resources_test.cc +++ b/mediapipe/framework/resources_test.cc @@ -39,6 +39,28 @@ TEST(Resources, CanCreateDefaultResourcesAndReadFileContents) { EXPECT_EQ(resource->ToStringView(), "File system calculator contents\n"); } +TEST(Resources, CanCreateDefaultResourcesAndReadFileContentsWithMMap) { + std::unique_ptr resources = CreateDefaultResources(); + + Resources::Options options{.mmap_mode = MMapMode::kMMap}; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr resource, + resources->Get("mediapipe/framework/testdata/resource_calculator.data", + options)); + EXPECT_EQ(resource->ToStringView(), "File system calculator contents\n"); +} + +TEST(Resources, CanCreateDefaultResourcesAndReadFileContentsWithMMapAndMLock) { + std::unique_ptr resources = CreateDefaultResources(); + + Resources::Options options{.mmap_mode = MMapMode::kMMapAndMLock}; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr resource, + resources->Get("mediapipe/framework/testdata/resource_calculator.data", + options)); + EXPECT_EQ(resource->ToStringView(), "File system calculator contents\n"); +} + TEST(Resources, CanReadFileContentsByUnresolvedId) { absl::SetFlag(&FLAGS_resource_root_dir, "mediapipe/framework/testdata"); std::unique_ptr resources = CreateDefaultResources(); diff --git a/mediapipe/framework/scheduler.cc b/mediapipe/framework/scheduler.cc index 36effe0165..8e925d9156 100644 --- a/mediapipe/framework/scheduler.cc +++ b/mediapipe/framework/scheduler.cc @@ -14,6 +14,7 @@ #include "mediapipe/framework/scheduler.h" +#include #include #include #include @@ -21,6 +22,7 @@ #include "absl/log/absl_check.h" #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/calculator_graph.h" #include "mediapipe/framework/executor.h" @@ -36,8 +38,10 @@ namespace mediapipe { namespace internal { +inline constexpr absl::string_view kDefaultQueueName = "default_queue"; + Scheduler::Scheduler(CalculatorGraph* graph) - : graph_(graph), shared_(), default_queue_(&shared_) { + : graph_(graph), shared_(), default_queue_(kDefaultQueueName, &shared_) { shared_.error_callback = std::bind(&CalculatorGraph::RecordError, graph_, std::placeholders::_1); default_queue_.SetIdleCallback(std::bind(&Scheduler::QueueIdleStateChanged, @@ -90,7 +94,7 @@ absl::Status Scheduler::SetNonDefaultExecutor(const std::string& name, "be called after the scheduler " "has started"; auto inserted = non_default_queues_.emplace( - name, absl::make_unique(&shared_)); + name, absl::make_unique(name, &shared_)); RET_CHECK(inserted.second) << "SetNonDefaultExecutor must be called only once for the executor \"" << name << "\""; diff --git a/mediapipe/framework/scheduler_queue.cc b/mediapipe/framework/scheduler_queue.cc index 557d7e40e8..74dc2b1d3f 100644 --- a/mediapipe/framework/scheduler_queue.cc +++ b/mediapipe/framework/scheduler_queue.cc @@ -14,17 +14,15 @@ #include "mediapipe/framework/scheduler_queue.h" -#include +#include #include -#include #include "absl/log/absl_check.h" +#include "absl/log/absl_log.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/calculator_node.h" #include "mediapipe/framework/executor.h" -#include "mediapipe/framework/port/canonical_errors.h" #include "mediapipe/framework/port/logging.h" -#include "mediapipe/framework/port/status.h" #ifdef __APPLE__ #define AUTORELEASEPOOL @autoreleasepool @@ -97,7 +95,7 @@ void SchedulerQueue::Reset() { void SchedulerQueue::SetExecutor(Executor* executor) { executor_ = executor; } bool SchedulerQueue::IsIdle() { - VLOG(3) << "Scheduler queue empty: " << queue_.empty() + VLOG(3) << "Scheduler queue (" << queue_name_ << ") empty: " << queue_.empty() << ", # of pending tasks: " << num_pending_tasks_; return queue_.empty() && num_pending_tasks_ == 0; } @@ -140,7 +138,8 @@ void SchedulerQueue::AddItemToQueue(Item&& item) { was_idle = IsIdle(); queue_.push(item); ++num_tasks_to_add_; - VLOG(4) << node->DebugName() << " was added to the scheduler queue."; + VLOG(4) << node->DebugName() << " was added to the scheduler queue (" + << queue_name_ << ")"; // Now grab the tasks to execute while still holding the lock. This will // gather any waiting tasks, in addition to the one we just added. @@ -235,13 +234,15 @@ void SchedulerQueue::RunNextTask() { void SchedulerQueue::RunCalculatorNode(CalculatorNode* node, CalculatorContext* cc) { - VLOG(3) << "Running " << node->DebugName(); + VLOG(3) << "Running " << node->DebugName() << " on queue (" << queue_name_ + << ")"; // If we are in the process of stopping the graph (due to tool::StatusStop() // from a non-source node or due to CalculatorGraph::CloseAllPacketSources), // we should not run any more sources. Close the node if it is a source. if (shared_->stopping && node->IsSource()) { - VLOG(4) << "Closing " << node->DebugName() << " due to StatusStop()."; + VLOG(4) << "Closing " << node->DebugName() + << " due to StatusStop() on queue (" << queue_name_ << ")."; int64_t start_time = shared_->timer.StartNode(); // It's OK to not reset/release the prepared CalculatorContext since a // source node always reuses the same CalculatorContext and Close() doesn't @@ -252,7 +253,8 @@ void SchedulerQueue::RunCalculatorNode(CalculatorNode* node, shared_->timer.EndNode(start_time); if (!result.ok()) { VLOG(3) << node->DebugName() - << " had an error while closing due to StatusStop()!"; + << " had an error while closing due to StatusStop()! on queue (" + << queue_name_ << ")"; shared_->error_callback(result); } } else { @@ -273,23 +275,27 @@ void SchedulerQueue::RunCalculatorNode(CalculatorNode* node, shared_->stopping = true; } else { // If we have an error in this calculator. - VLOG(3) << node->DebugName() << " had an error!"; + VLOG(3) << node->DebugName() << " had an error on queue (" + << queue_name_ << ")!"; shared_->error_callback(result); } } } - VLOG(4) << "Done running " << node->DebugName(); + VLOG(4) << "Done running " << node->DebugName() << " on queue (" + << queue_name_ << ")"; node->EndScheduling(); } void SchedulerQueue::OpenCalculatorNode(CalculatorNode* node) { - VLOG(3) << "Opening " << node->DebugName(); + VLOG(3) << "Opening " << node->DebugName() << " on queue (" << queue_name_ + << ")"; int64_t start_time = shared_->timer.StartNode(); const absl::Status result = node->OpenNode(); shared_->timer.EndNode(start_time); if (!result.ok()) { - VLOG(3) << node->DebugName() << " had an error!"; + VLOG(3) << node->DebugName() << " had an error on queue (" << queue_name_ + << ")!"; shared_->error_callback(result); return; } diff --git a/mediapipe/framework/scheduler_queue.h b/mediapipe/framework/scheduler_queue.h index 27b6829895..2268f81401 100644 --- a/mediapipe/framework/scheduler_queue.h +++ b/mediapipe/framework/scheduler_queue.h @@ -15,14 +15,14 @@ #ifndef MEDIAPIPE_FRAMEWORK_SCHEDULER_QUEUE_H_ #define MEDIAPIPE_FRAMEWORK_SCHEDULER_QUEUE_H_ -#include #include #include -#include #include +#include #include -#include "absl/base/macros.h" +#include "absl/base/thread_annotations.h" +#include "absl/strings/string_view.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/calculator_context.h" #include "mediapipe/framework/executor.h" @@ -76,7 +76,8 @@ class SchedulerQueue : public TaskQueue { bool is_open_node_ = false; // True if the task should run OpenNode(). }; - explicit SchedulerQueue(SchedulerShared* shared) : shared_(shared) {} + explicit SchedulerQueue(absl::string_view queue_name, SchedulerShared* shared) + : queue_name_(queue_name), shared_(shared) {} // Sets the executor that will run the nodes. Must be called before the // scheduler is started. @@ -145,6 +146,9 @@ class SchedulerQueue : public TaskQueue { // Checks whether the queue has no queued nodes or pending tasks. bool IsIdle() ABSL_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + // Queue name for logging purposes. + const std::string queue_name_; + Executor* executor_ = nullptr; IdleCallback idle_callback_; diff --git a/mediapipe/framework/stream_handler/early_close_input_stream_handler.cc b/mediapipe/framework/stream_handler/early_close_input_stream_handler.cc index 3a7dd86783..7adf0fb75a 100644 --- a/mediapipe/framework/stream_handler/early_close_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/early_close_input_stream_handler.cc @@ -14,6 +14,7 @@ #include "mediapipe/framework/stream_handler/early_close_input_stream_handler.h" #include +#include #include "absl/log/absl_check.h" #include "absl/strings/substitute.h" diff --git a/mediapipe/framework/tool/BUILD b/mediapipe/framework/tool/BUILD index 76d00abc60..3106fb90fa 100644 --- a/mediapipe/framework/tool/BUILD +++ b/mediapipe/framework/tool/BUILD @@ -978,6 +978,62 @@ cc_test( ], ) +cc_library( + name = "graph_runtime_info_logger", + srcs = ["graph_runtime_info_logger.cc"], + hdrs = ["graph_runtime_info_logger.h"], + visibility = ["//visibility:public"], + deps = [ + ":graph_runtime_info_utils", + "//mediapipe/framework:calculator_cc_proto", + "//mediapipe/framework:graph_runtime_info_cc_proto", + "//mediapipe/framework:vlog_utils", + "//mediapipe/framework/port:logging", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:threadpool", + "@com_google_absl//absl/base:log_severity", + "@com_google_absl//absl/functional:any_invocable", + "@com_google_absl//absl/log", + "@com_google_absl//absl/log:absl_check", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings:string_view", + "@com_google_absl//absl/synchronization", + "@com_google_absl//absl/time", + ], +) + +cc_test( + name = "graph_runtime_info_logger_test", + size = "small", + srcs = ["graph_runtime_info_logger_test.cc"], + deps = [ + ":graph_runtime_info_logger", + "//mediapipe/framework:calculator_cc_proto", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:status_matchers", + "@com_google_absl//absl/synchronization", + "@com_google_absl//absl/time", + ], +) + +cc_library( + name = "graph_runtime_info_utils", + srcs = ["graph_runtime_info_utils.cc"], + hdrs = ["graph_runtime_info_utils.h"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:graph_runtime_info_cc_proto", + "//mediapipe/framework:timestamp", + "//mediapipe/framework:vlog_utils", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:str_format", + "@com_google_absl//absl/time", + ], +) + exports_files( ["build_defs.bzl"], visibility = [ diff --git a/mediapipe/framework/tool/graph_runtime_info_logger.cc b/mediapipe/framework/tool/graph_runtime_info_logger.cc new file mode 100644 index 0000000000..6ed4704c46 --- /dev/null +++ b/mediapipe/framework/tool/graph_runtime_info_logger.cc @@ -0,0 +1,63 @@ +#include "mediapipe/framework/tool/graph_runtime_info_logger.h" + +#include +#include + +#include "absl/functional/any_invocable.h" +#include "absl/log/absl_check.h" +#include "absl/log/absl_log.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/time/time.h" +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/tool/graph_runtime_info_utils.h" +#include "mediapipe/framework/vlog_utils.h" + +namespace mediapipe::tool { + +constexpr absl::Duration kDefaultCaptureInterval = absl::Seconds(10); + +GraphRuntimeInfoLogger::GraphRuntimeInfoLogger() + : thread_pool_("GraphRuntimeInfoLogger", /*num_threads=*/1) {} + +GraphRuntimeInfoLogger::~GraphRuntimeInfoLogger() { Stop(); }; + +absl::Status GraphRuntimeInfoLogger::StartInBackground( + const mediapipe::GraphRuntimeInfoConfig& config, + absl::AnyInvocable()> + get_runtime_info_fn) { + get_runtime_info_fn_ = std::move(get_runtime_info_fn); + RET_CHECK(!is_running_.HasBeenNotified()); + ABSL_CHECK_EQ(thread_pool_.num_threads(), 1); + thread_pool_.StartWorkers(); + absl::Duration interval = + config.capture_period_msec() > 0 + ? absl::Milliseconds(config.capture_period_msec()) + : kDefaultCaptureInterval; + thread_pool_.Schedule([this, interval]() mutable { + is_running_.Notify(); + while (!shutdown_signal_.HasBeenNotified()) { + const auto runtime_info = get_runtime_info_fn_(); + if (!runtime_info.ok()) { + ABSL_LOG(DFATAL) << "Failed to get graph runtime info: " + << runtime_info.status(); + return; + } + const auto runtime_info_str = GetGraphRuntimeInfoString(*runtime_info); + if (!runtime_info_str.ok()) { + ABSL_LOG(DFATAL) << "Failed to render graph runtime info: " + << runtime_info_str.status(); + return; + } + VlogLargeMessage(/*verbose_level=*/0, *runtime_info_str); + shutdown_signal_.WaitForNotificationWithTimeout(interval); + } + }); + is_running_.WaitForNotification(); + return absl::OkStatus(); +} + +void GraphRuntimeInfoLogger::Stop() { shutdown_signal_.Notify(); } + +} // namespace mediapipe::tool diff --git a/mediapipe/framework/tool/graph_runtime_info_logger.h b/mediapipe/framework/tool/graph_runtime_info_logger.h new file mode 100644 index 0000000000..44594dfdf1 --- /dev/null +++ b/mediapipe/framework/tool/graph_runtime_info_logger.h @@ -0,0 +1,51 @@ +// Copyright 2024 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_FRAMEWORK_TOOL_GRAPH_RUNTIME_INFO_LOGGER_H_ +#define MEDIAPIPE_FRAMEWORK_TOOL_GRAPH_RUNTIME_INFO_LOGGER_H_ + +#include "absl/functional/any_invocable.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/synchronization/notification.h" +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/framework/graph_runtime_info.pb.h" +#include "mediapipe/framework/port/threadpool.h" + +namespace mediapipe::tool { + +// Periodically collects the graph runtime info and output it to LOG(INFO). +class GraphRuntimeInfoLogger { + public: + GraphRuntimeInfoLogger(); + ~GraphRuntimeInfoLogger(); + + // Starts the collector in the background. Can be called only once. + absl::Status StartInBackground( + const mediapipe::GraphRuntimeInfoConfig& config, + absl::AnyInvocable()> + get_runtime_info_fn); + + private: + void Stop(); + + absl::Notification shutdown_signal_; + absl::Notification is_running_; + absl::AnyInvocable()> get_runtime_info_fn_; + ThreadPool thread_pool_; +}; + +} // namespace mediapipe::tool + +#endif // MEDIAPIPE_FRAMEWORK_TOOL_GRAPH_RUNTIME_INFO_LOGGER_H_ diff --git a/mediapipe/framework/tool/graph_runtime_info_logger_test.cc b/mediapipe/framework/tool/graph_runtime_info_logger_test.cc new file mode 100644 index 0000000000..d9b56b88a3 --- /dev/null +++ b/mediapipe/framework/tool/graph_runtime_info_logger_test.cc @@ -0,0 +1,42 @@ +// Copyright 2024 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/framework/tool/graph_runtime_info_logger.h" + +#include "absl/synchronization/notification.h" +#include "absl/time/time.h" +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/status_matchers.h" + +namespace mediapipe::tool { +namespace { + +TEST(GraphRuntimeInfoLoggerTest, ShouldCaptureRuntimeInfo) { + mediapipe::GraphRuntimeInfoConfig config; + config.set_enable_graph_runtime_info(true); + + absl::Notification callback_called; + GraphRuntimeInfoLogger logger; + MP_ASSERT_OK(logger.StartInBackground(config, [&]() { + callback_called.Notify(); + return GraphRuntimeInfo(); + })); + EXPECT_TRUE( + callback_called.WaitForNotificationWithTimeout(absl::Seconds(10))); +} + +} // namespace +} // namespace mediapipe::tool diff --git a/mediapipe/framework/tool/graph_runtime_info_utils.cc b/mediapipe/framework/tool/graph_runtime_info_utils.cc new file mode 100644 index 0000000000..86f28d3183 --- /dev/null +++ b/mediapipe/framework/tool/graph_runtime_info_utils.cc @@ -0,0 +1,86 @@ +#include "mediapipe/framework/tool/graph_runtime_info_utils.h" + +#include +#include + +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/str_format.h" +#include "absl/strings/str_join.h" +#include "absl/time/time.h" +#include "mediapipe/framework/timestamp.h" + +namespace mediapipe::tool { + +absl::StatusOr GetGraphRuntimeInfoString( + const GraphRuntimeInfo& graph_runtime_info) { + const absl::Time caputure_time = + absl::FromUnixMicros(graph_runtime_info.capture_time_unix_us()); + std::string calculators_runtime_info_str; + std::vector calculators_with_unprocessed_packets; + std::vector running_calculators; + int num_packets_in_input_queues = 0; + for (const auto& calculator_info : graph_runtime_info.calculator_infos()) { + const bool is_idle = calculator_info.last_process_finish_unix_us() >= + calculator_info.last_process_start_unix_us(); + const std::string calculator_state_str = + is_idle ? absl::StrFormat( + "idle for %.2fs", + absl::ToDoubleSeconds( + caputure_time - + absl::FromUnixMicros( + calculator_info.last_process_finish_unix_us()))) + : absl::StrFormat( + "running for %.2fs", + absl::ToDoubleSeconds( + caputure_time - + absl::FromUnixMicros( + calculator_info.last_process_start_unix_us()))); + if (!is_idle) { + running_calculators.push_back(calculator_info.calculator_name()); + } + absl::StrAppend( + &calculators_runtime_info_str, + absl::StrFormat( + "\n%s: (%s, ts bound : %s)", calculator_info.calculator_name(), + calculator_state_str, + Timestamp::CreateNoErrorChecking(calculator_info.timestamp_bound()) + .DebugString())); + bool calculator_has_unprocessed_packets = false; + for (const auto& input_stream_info : calculator_info.input_stream_infos()) { + num_packets_in_input_queues += input_stream_info.queue_size(); + calculator_has_unprocessed_packets |= input_stream_info.queue_size() > 0; + absl::StrAppend( + &calculators_runtime_info_str, " * ", input_stream_info.stream_name(), + " - queue size: ", input_stream_info.queue_size(), + ", total added: ", input_stream_info.number_of_packets_added(), + ", min ts: ", + Timestamp::CreateNoErrorChecking( + input_stream_info.minimum_timestamp_or_bound()) + .DebugString(), + "\n"); + } + if (calculator_has_unprocessed_packets) { + calculators_with_unprocessed_packets.push_back( + calculator_info.calculator_name()); + } + } + const std::string calulators_with_unprocessed_packets_str = + num_packets_in_input_queues > 0 + ? absl::StrCat( + " (in calculators: ", + absl::StrJoin(calculators_with_unprocessed_packets, ", "), ")") + : ""; + const std::string running_calculators_str = + running_calculators.empty() + ? "None" + : absl::StrCat(" (running calculators: ", + absl::StrJoin(running_calculators, ", "), ")"); + return absl::StrFormat( + "Graph runtime info: \nRunning calculators: %s\nNum packets in input " + "queues: %d%s\n%s\n", + running_calculators_str, num_packets_in_input_queues, + calulators_with_unprocessed_packets_str, calculators_runtime_info_str); +} + +} // namespace mediapipe::tool diff --git a/mediapipe/framework/tool/graph_runtime_info_utils.h b/mediapipe/framework/tool/graph_runtime_info_utils.h new file mode 100644 index 0000000000..0418ffa152 --- /dev/null +++ b/mediapipe/framework/tool/graph_runtime_info_utils.h @@ -0,0 +1,31 @@ +// Copyright 2024 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_FRAMEWORK_TOOL_GRAPH_RUNTIME_INFO_UTILS_H_ +#define MEDIAPIPE_FRAMEWORK_TOOL_GRAPH_RUNTIME_INFO_UTILS_H_ + +#include + +#include "absl/status/statusor.h" +#include "mediapipe/framework/graph_runtime_info.pb.h" + +namespace mediapipe::tool { + +// Returns a human readable representation of the graph runtime info. +absl::StatusOr GetGraphRuntimeInfoString( + const GraphRuntimeInfo& graph_runtime_info); + +} // namespace mediapipe::tool + +#endif // MEDIAPIPE_FRAMEWORK_TOOL_GRAPH_RUNTIME_INFO_UTILS_H_ diff --git a/mediapipe/framework/tool/mediapipe_files.bzl b/mediapipe/framework/tool/mediapipe_files.bzl index 2e0ec9b42b..53c3065787 100644 --- a/mediapipe/framework/tool/mediapipe_files.bzl +++ b/mediapipe/framework/tool/mediapipe_files.bzl @@ -13,7 +13,7 @@ def mediapipe_files(srcs): """ for src in srcs: - archive_name = "com_google_mediapipe_%s" % src.replace("/", "_").replace(".", "_") + archive_name = "com_google_mediapipe_%s" % src.replace("/", "_").replace(".", "_").replace("+", "_") native.genrule( name = "%s_ln" % archive_name, srcs = ["@%s//file" % archive_name], diff --git a/mediapipe/framework/tool/type_util.h b/mediapipe/framework/tool/type_util.h index 9c955f2a36..aee708d8be 100644 --- a/mediapipe/framework/tool/type_util.h +++ b/mediapipe/framework/tool/type_util.h @@ -16,6 +16,7 @@ #define MEDIAPIPE_FRAMEWORK_TOOL_TYPE_UTIL_H_ #include +#include #include #include #include @@ -35,6 +36,9 @@ class TypeId { std::string name() const { return impl_.name(); } bool operator==(const TypeId& other) const { return impl_ == other.impl_; } bool operator<(const TypeId& other) const { return impl_ < other.impl_; } + friend std::ostream& operator<<(std::ostream& stream, const TypeId& id) { + return stream << id.name(); + } template friend H AbslHashValue(H h, const TypeId& r) { diff --git a/mediapipe/framework/validated_graph_config.cc b/mediapipe/framework/validated_graph_config.cc index 370701d0c9..e5d44b1efe 100644 --- a/mediapipe/framework/validated_graph_config.cc +++ b/mediapipe/framework/validated_graph_config.cc @@ -20,7 +20,8 @@ #include "absl/container/flat_hash_set.h" #include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/memory/memory.h" +#include "absl/log/log.h" +#include "absl/status/status.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_join.h" #include "absl/strings/substitute.h" @@ -30,16 +31,14 @@ #include "mediapipe/framework/legacy_calculator_support.h" #include "mediapipe/framework/packet_generator.h" #include "mediapipe/framework/packet_generator.pb.h" -#include "mediapipe/framework/packet_set.h" #include "mediapipe/framework/packet_type.h" #include "mediapipe/framework/port.h" -#include "mediapipe/framework/port/core_proto_inc.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/proto_ns.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/source_location.h" -#include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/status_builder.h" +#include "mediapipe/framework/port/status_macros.h" #include "mediapipe/framework/port/topologicalsorter.h" #include "mediapipe/framework/status_handler.h" #include "mediapipe/framework/stream_handler.pb.h" @@ -47,8 +46,8 @@ #include "mediapipe/framework/tool/name_util.h" #include "mediapipe/framework/tool/status_util.h" #include "mediapipe/framework/tool/subgraph_expansion.h" -#include "mediapipe/framework/tool/validate.h" #include "mediapipe/framework/tool/validate_name.h" +#include "mediapipe/framework/vlog_utils.h" namespace mediapipe { @@ -328,11 +327,12 @@ absl::Status ValidatedGraphConfig::Initialize( const GraphServiceManager* service_manager) { RET_CHECK(!initialized_) << "ValidatedGraphConfig can be initialized only once."; - -#if !defined(MEDIAPIPE_MOBILE) - VLOG(1) << "ValidatedGraphConfig::Initialize called with config:\n" - << input_config.DebugString(); -#endif + if (VLOG_IS_ON(1)) { + VlogLargeMessage( + /*verbose_level=*/1, + absl::StrCat("ValidatedGraphConfig::Initialize called with config:\n", + input_config.DebugString())); + } config_ = std::move(input_config); MP_RETURN_IF_ERROR( @@ -404,10 +404,13 @@ absl::Status ValidatedGraphConfig::Initialize( MP_RETURN_IF_ERROR(ValidateExecutors()); -#if !defined(MEDIAPIPE_MOBILE) - VLOG(1) << "ValidatedGraphConfig produced canonical config:\n" - << config_.DebugString(); -#endif + if (VLOG_IS_ON(1)) { + VlogLargeMessage( + /*verbose_level=*/1, + absl::StrCat("ValidatedGraphConfig produced canonical config:\n", + config_.DebugString())); + } + initialized_ = true; return absl::OkStatus(); } @@ -1087,7 +1090,7 @@ absl::Status ValidatedGraphConfig::ValidateRequiredSidePacketTypes( } if (!statuses.empty()) { return tool::CombinedStatus( - "ValidateRequiredSidePackets failed to validate: ", statuses); + "ValidateRequiredSidePacketTypes failed to validate: ", statuses); } return absl::OkStatus(); } diff --git a/mediapipe/framework/vlog_overrides.cc b/mediapipe/framework/vlog_overrides.cc new file mode 100644 index 0000000000..169f276f47 --- /dev/null +++ b/mediapipe/framework/vlog_overrides.cc @@ -0,0 +1,55 @@ +#include "mediapipe/framework/vlog_overrides.h" + +// Template to temporary enable VLOG overrides in code: +// #define MEDIAPIPE_VLOG_VMODULE "calculator_graph*=5,southbound*=5" +// #define MEDIAPIPE_VLOG_V 1 + +#if defined(MEDIAPIPE_VLOG_V) || defined(MEDIAPIPE_VLOG_VMODULE) + +#include +#include +#include + +#include "absl/log/absl_check.h" +#include "absl/log/absl_log.h" +#include "absl/log/globals.h" +#include "absl/strings/str_format.h" +#include "absl/strings/str_split.h" +#include "absl/strings/string_view.h" +#include "mediapipe/framework/deps/no_destructor.h" + +#endif // defined(MEDIAPIPE_VLOG_V) || defined(MEDIAPIPE_VLOG_VMODULE) + +namespace mediapipe { + +void SetVLogOverrides() { +#if defined(MEDIAPIPE_VLOG_V) + ABSL_LOG(INFO) << absl::StrFormat("Setting global VLOG level: %d", + MEDIAPIPE_VLOG_V); + absl::SetGlobalVLogLevel(MEDIAPIPE_VLOG_V); +#endif // defined(MEDIAPIPE_VLOG_V) + +#if defined(MEDIAPIPE_VLOG_VMODULE) + static NoDestructor>> kVModuleMapping( + []() { + constexpr absl::string_view kVModule = MEDIAPIPE_VLOG_VMODULE; + std::vector parts = + absl::StrSplit(kVModule, absl::ByAnyChar(",=")); + ABSL_CHECK_EQ(parts.size() % 2, 0) + << "Invalid MEDIAPIPE_VLOG_VMODULE: " << kVModule; + std::vector> result; + for (int i = 0; i < parts.size(); i += 2) { + result.push_back({parts[i], std::stoi(parts[i + 1])}); + } + return result; + }()); + + ABSL_LOG(INFO) << "Setting VLOG levels..."; + for (const auto& [key, value] : *kVModuleMapping) { + ABSL_LOG(INFO) << absl::StrFormat("Setting [%s] to level: %d", key, value); + absl::SetVLogLevel(key, value); + } +#endif // defined(MEDIAPIPE_VLOG_VMODULE) +} + +} // namespace mediapipe diff --git a/mediapipe/framework/vlog_overrides.h b/mediapipe/framework/vlog_overrides.h new file mode 100644 index 0000000000..82168088a5 --- /dev/null +++ b/mediapipe/framework/vlog_overrides.h @@ -0,0 +1,32 @@ +#ifndef MEDIAPIPE_FRAMEWORK_VLOG_OVERRIDES_H_ +#define MEDIAPIPE_FRAMEWORK_VLOG_OVERRIDES_H_ + +namespace mediapipe { + +// If possible, rely on --v / --vmodule to set VLOG level and modules. +// +// However, in cases when --v / --vmodule cannot be used (e.g. running an +// Android app and enabling VLOGs), MediaPipe allows to set VLOG --v / --vmodule +// overrides for debugging purposes which are applied when `CalculatorGraph` is +// created. +// +// Overrides: +// - MEDIAPIPE_VLOG_V (define and provide value you provide for --v) +// - MEDIAPIPE_VLOG_VMODULE (define and provide value you provide for --vmodule) +// +// You can set overrides by adding: +// ``` +// --copt=-DMEDIAPIPE_VLOG_VMODULE=\"*calculator*=5\" +// ``` +// with your desired module patterns and VLOG levels (see more details for +// --vmodule) to your build command. +// +// IMPORTANT: mind that adding the above to your build command will trigger +// rebuild of the whole binary including dependencies. So, considering vlog +// overrides exist for debugging purposes only, it is faster to simply modify +// `vlog_overrides.cc` adding MEDIAPIPE_VLOG_V/VMODULE at the very top. +void SetVLogOverrides(); + +} // namespace mediapipe + +#endif // MEDIAPIPE_FRAMEWORK_VLOG_OVERRIDES_H_ diff --git a/mediapipe/framework/vlog_utils.cc b/mediapipe/framework/vlog_utils.cc new file mode 100644 index 0000000000..6faaabb3b0 --- /dev/null +++ b/mediapipe/framework/vlog_utils.cc @@ -0,0 +1,23 @@ +#include "mediapipe/framework/vlog_utils.h" + +#include "absl/log/absl_log.h" +#include "absl/log/log.h" +#include "absl/strings/str_split.h" // IWYU pragma: keep +#include "absl/strings/string_view.h" +#include "mediapipe/framework/port/logging.h" + +namespace mediapipe { + +void VlogLargeMessage(int verbose_level, absl::string_view message) { +#if defined(MEDIAPIPE_MOBILE) + if (message.size() > 4096) { + for (const auto& line : absl::StrSplit(message, '\n')) { + VLOG(verbose_level) << line; + } + return; + } +#endif + VLOG(verbose_level) << message; +} + +} // namespace mediapipe diff --git a/mediapipe/framework/vlog_utils.h b/mediapipe/framework/vlog_utils.h new file mode 100644 index 0000000000..2d953963ff --- /dev/null +++ b/mediapipe/framework/vlog_utils.h @@ -0,0 +1,22 @@ +#ifndef MEDIAPIPE_FRAMEWORK_VLOG_UTILS_H_ +#define MEDIAPIPE_FRAMEWORK_VLOG_UTILS_H_ + +#include "absl/strings/string_view.h" + +namespace mediapipe { + +// Helper to log a message with a large number of lines on mobile (Android). +// +// On Android, the logcat will truncate the log if the message is larger than +// 4096 bytes. This function splits the message by new lines and logs each +// line separately. To ensure the log message is only generated when VLOG is +// turned on, use this function in a VLOG_IS_ON() block: +// if (VLOG_IS_ON(1)) { +// VlogLargeMessage( +// /*verbose_level=*/1, GenerateDebugString()); +// } +void VlogLargeMessage(int verbose_level, absl::string_view message); + +} // namespace mediapipe + +#endif // MEDIAPIPE_FRAMEWORK_VLOG_UTILS_H_ diff --git a/mediapipe/gpu/BUILD b/mediapipe/gpu/BUILD index ba6e7f7cef..c8fa2ad81e 100644 --- a/mediapipe/gpu/BUILD +++ b/mediapipe/gpu/BUILD @@ -125,6 +125,13 @@ cc_library( }), ) +cc_library( + name = "egl_base", + textual_hdrs = ["egl_base.h"], + visibility = ["//visibility:public"], + deps = [":gl_base"], +) + cc_library( name = "gl_base_hdr", hdrs = ["gl_base.h"], @@ -219,6 +226,7 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/framework/port:statusor", "//mediapipe/framework/port:threadpool", + "@com_google_absl//absl/base:core_headers", "@com_google_absl//absl/base:dynamic_annotations", "@com_google_absl//absl/debugging:leak_check", "@com_google_absl//absl/log:absl_check", @@ -226,7 +234,9 @@ cc_library( "@com_google_absl//absl/memory", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", + "@com_google_absl//absl/strings:string_view", "@com_google_absl//absl/synchronization", ] + select({ "//conditions:default": [], @@ -557,11 +567,26 @@ cc_library( "//mediapipe/framework/formats:ahwb_view", "//mediapipe/framework/formats:hardware_buffer", "//mediapipe/framework/formats:image_frame", + "//mediapipe/framework/formats:shared_fd", + "//mediapipe/framework/formats:unique_fd", "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "//mediapipe/util:sync_wait", "//third_party/GL:EGL_headers", + "@com_google_absl//absl/base", "@com_google_absl//absl/log:absl_check", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings:str_format", - ], + "@com_google_absl//absl/time", + ] + select({ + "//conditions:default": [], + "//mediapipe:android": [ + ":egl_sync", + ":egl_sync_point", + ], + }), ) mediapipe_proto_library( @@ -841,6 +866,54 @@ cc_library( }), ) +cc_library( + name = "egl_errors", + srcs = ["egl_errors.cc"], + hdrs = ["egl_errors.h"], + visibility = ["//visibility:public"], + deps = [ + ":egl_base", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + ], +) + +cc_library( + name = "egl_sync", + srcs = ["egl_sync.cc"], + hdrs = ["egl_sync.h"], + visibility = ["//visibility:public"], + deps = [ + ":egl_base", + ":egl_errors", + "//mediapipe/framework/deps:no_destructor", + "//mediapipe/framework/formats:shared_fd", + "//mediapipe/framework/formats:unique_fd", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "@com_google_absl//absl/cleanup", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + ], +) + +cc_library( + name = "egl_sync_point", + srcs = ["egl_sync_point.cc"], + hdrs = ["egl_sync_point.h"], + visibility = ["//visibility:public"], + deps = [ + ":egl_base", + ":egl_sync", + ":gl_context", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + ], +) + cc_library( name = "gl_texture_util", srcs = ["gl_texture_util.cc"], @@ -1314,13 +1387,24 @@ mediapipe_cc_test( ], requires_full_emulation = True, deps = [ + ":egl_sync", ":gl_texture_buffer", ":gl_texture_util", + ":gl_texture_view", ":gpu_buffer_format", ":gpu_buffer_storage_ahwb", ":gpu_test_base", + "//mediapipe/framework:port", + "//mediapipe/framework/formats:ahwb_view", + "//mediapipe/framework/formats:hardware_buffer", + "//mediapipe/framework/formats:image_frame", + "//mediapipe/framework/formats:shared_fd", + "//mediapipe/framework/formats:unique_fd", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/tool:test_util", + "//mediapipe/util:sync_wait", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/time", ], ) diff --git a/mediapipe/gpu/egl_base.h b/mediapipe/gpu/egl_base.h new file mode 100644 index 0000000000..5192c9e457 --- /dev/null +++ b/mediapipe/gpu/egl_base.h @@ -0,0 +1,15 @@ +#ifndef MEDIAPIPE_GPU_EGL_BASE_H_ +#define MEDIAPIPE_GPU_EGL_BASE_H_ + +#include "mediapipe/gpu/gl_base.h" + +#if defined(HAS_EGL) + +// TODO: b/377324183 - merge into gl_base.h + +#include +#include + +#endif // defined(HAS_EGL) + +#endif // MEDIAPIPE_GPU_EGL_BASE_H_ diff --git a/mediapipe/gpu/egl_errors.cc b/mediapipe/gpu/egl_errors.cc new file mode 100644 index 0000000000..8b3de1205f --- /dev/null +++ b/mediapipe/gpu/egl_errors.cc @@ -0,0 +1,72 @@ +#include "mediapipe/gpu/egl_errors.h" + +#include "absl/status/status.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/gpu/egl_base.h" + +namespace mediapipe { + +absl::Status GetEglError() { + EGLint error = eglGetError(); + switch (error) { + case EGL_SUCCESS: + return absl::OkStatus(); + case EGL_NOT_INITIALIZED: + return absl::InternalError( + "EGL is not initialized, or could not be initialized, for the " + "specified EGL display connection."); + case EGL_BAD_ACCESS: + return absl::InternalError( + "EGL cannot access a requested resource (for example a context is " + "bound in another thread)."); + case EGL_BAD_ALLOC: + return absl::InternalError( + "EGL failed to allocate resources for the requested operation."); + case EGL_BAD_ATTRIBUTE: + return absl::InternalError( + "An unrecognized attribute or attribute value was passed in the " + "attribute list."); + case EGL_BAD_CONTEXT: + return absl::InternalError( + "An EGLContext argument does not name a valid EGL rendering " + "context."); + case EGL_BAD_CONFIG: + return absl::InternalError( + "An EGLConfig argument does not name a valid EGL frame buffer " + "configuration."); + case EGL_BAD_CURRENT_SURFACE: + return absl::InternalError( + "The current surface of the calling thread is a window, pixel buffer " + "or pixmap that is no longer valid."); + case EGL_BAD_DISPLAY: + return absl::InternalError( + "An EGLDisplay argument does not name a valid EGL display " + "connection."); + case EGL_BAD_SURFACE: + return absl::InternalError( + "An EGLSurface argument does not name a valid surface (window, pixel " + "buffer or pixmap) configured for GL rendering."); + case EGL_BAD_MATCH: + return absl::InternalError( + "Arguments are inconsistent (for example, a valid context requires " + "buffers not supplied by a valid surface)."); + case EGL_BAD_PARAMETER: + return absl::InternalError("One or more argument values are invalid."); + case EGL_BAD_NATIVE_PIXMAP: + return absl::InternalError( + "A NativePixmapType argument does not refer to a valid native " + "pixmap."); + case EGL_BAD_NATIVE_WINDOW: + return absl::InternalError( + "A NativeWindowType argument does not refer to a valid native " + "window."); + case EGL_CONTEXT_LOST: + return absl::InternalError( + "A power management event has occurred. The application must destroy " + "all contexts and reinitialize OpenGL ES state and objects to " + "continue rendering."); + } + return absl::UnknownError(absl::StrCat("EGL error: ", error)); +} + +} // namespace mediapipe diff --git a/mediapipe/gpu/egl_errors.h b/mediapipe/gpu/egl_errors.h new file mode 100644 index 0000000000..9a14f0c79b --- /dev/null +++ b/mediapipe/gpu/egl_errors.h @@ -0,0 +1,13 @@ +#ifndef MEDIAPIPE_GPU_EGL_ERRORS_H_ +#define MEDIAPIPE_GPU_EGL_ERRORS_H_ + +#include "absl/status/status.h" + +namespace mediapipe { + +// Returns the error of the last called EGL function in the current thread. +absl::Status GetEglError(); + +} // namespace mediapipe + +#endif // MEDIAPIPE_GPU_EGL_ERRORS_H_ diff --git a/mediapipe/gpu/egl_sync.cc b/mediapipe/gpu/egl_sync.cc new file mode 100644 index 0000000000..d55c744b8c --- /dev/null +++ b/mediapipe/gpu/egl_sync.cc @@ -0,0 +1,233 @@ +#include "mediapipe/gpu/egl_sync.h" + +#include + +#include +#include + +#include "absl/cleanup/cleanup.h" +#include "absl/log/absl_log.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/deps/no_destructor.h" +#include "mediapipe/framework/formats/shared_fd.h" +#include "mediapipe/framework/formats/unique_fd.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/gpu/egl_base.h" +#include "mediapipe/gpu/egl_errors.h" + +namespace mediapipe { + +namespace { + +PFNEGLCREATESYNCKHRPROC eglCreateSyncKHR; +PFNEGLWAITSYNCKHRPROC eglWaitSyncKHR; +PFNEGLCLIENTWAITSYNCKHRPROC eglClientWaitSyncKHR; +PFNEGLDESTROYSYNCKHRPROC eglDestroySyncKHR; +PFNEGLDUPNATIVEFENCEFDANDROIDPROC eglDupNativeFenceFDANDROID; +PFNEGLGETSYNCATTRIBKHRPROC eglGetSyncAttribKHR; + +bool HasExtension(EGLDisplay display, const char* extension) { + const char* extensions = eglQueryString(display, EGL_EXTENSIONS); + return extensions && std::strstr(extensions, extension); +} + +absl::Status CheckEglFenceSyncSupported(EGLDisplay display) { + static bool supported = HasExtension(display, "EGL_KHR_fence_sync"); + if (supported) { + return absl::OkStatus(); + } + return absl::UnavailableError("EGL_KHR_fence_sync unavailable."); +} + +absl::Status CheckEglWaitSyncSupported(EGLDisplay display) { + static bool supported = HasExtension(display, "EGL_KHR_wait_sync"); + if (supported) { + return absl::OkStatus(); + } + return absl::UnavailableError("EGL_KHR_wait_sync unavailable."); +} + +absl::Status CheckEglAndroidNativeSyncSupported(EGLDisplay display) { + static bool supported = + HasExtension(display, "EGL_ANDROID_native_fence_sync"); + if (supported) { + return absl::OkStatus(); + } + return absl::UnavailableError("EGL_ANDROID_native_fence_sync unavailable."); +} + +absl::Status CheckEglSyncSupported(EGLDisplay egl_display) { + static NoDestructor support_status([&]() -> absl::Status { + MP_RETURN_IF_ERROR(CheckEglFenceSyncSupported(egl_display)); + MP_RETURN_IF_ERROR(CheckEglWaitSyncSupported(egl_display)); + + RET_CHECK(eglCreateSyncKHR = reinterpret_cast( + eglGetProcAddress("eglCreateSyncKHR"))); + RET_CHECK(eglWaitSyncKHR = reinterpret_cast( + eglGetProcAddress("eglWaitSyncKHR"))); + RET_CHECK(eglClientWaitSyncKHR = + reinterpret_cast( + eglGetProcAddress("eglClientWaitSyncKHR"))); + RET_CHECK(eglDestroySyncKHR = reinterpret_cast( + eglGetProcAddress("eglDestroySyncKHR"))); + RET_CHECK(eglGetSyncAttribKHR = + reinterpret_cast( + eglGetProcAddress("eglGetSyncAttribKHR"))); + return absl::OkStatus(); + }()); + return *support_status; +} + +absl::Status CheckEglNativeSyncSupported(EGLDisplay egl_display) { + static NoDestructor support_status([&]() -> absl::Status { + MP_RETURN_IF_ERROR(CheckEglAndroidNativeSyncSupported(egl_display)); + RET_CHECK(eglDupNativeFenceFDANDROID = + reinterpret_cast( + eglGetProcAddress("eglDupNativeFenceFDANDROID"))); + return absl::OkStatus(); + }()); + return *support_status; +} + +} // namespace + +absl::StatusOr EglSync::Create(EGLDisplay display) { + MP_RETURN_IF_ERROR(CheckEglSyncSupported(display)); + + const EGLSyncKHR egl_sync = + eglCreateSyncKHR(display, EGL_SYNC_FENCE_KHR, nullptr); + RET_CHECK_NE(egl_sync, EGL_NO_SYNC_KHR) + << "Create/eglCreateSyncKHR failed: " << GetEglError(); + return EglSync(display, egl_sync); +} + +absl::StatusOr EglSync::CreateNative(EGLDisplay display) { + MP_RETURN_IF_ERROR(CheckEglSyncSupported(display)); + MP_RETURN_IF_ERROR(CheckEglNativeSyncSupported(display)); + + const EGLSyncKHR egl_sync = + eglCreateSyncKHR(display, EGL_SYNC_NATIVE_FENCE_ANDROID, nullptr); + RET_CHECK_NE(egl_sync, EGL_NO_SYNC_KHR) + << "CreateNative/eglCreateSyncKHR failed: " << GetEglError(); + return EglSync(display, egl_sync); +} + +absl::StatusOr EglSync::CreateNative(EGLDisplay display, + int native_fence_fd) { + MP_RETURN_IF_ERROR(CheckEglSyncSupported(display)); + MP_RETURN_IF_ERROR(CheckEglNativeSyncSupported(display)); + + // NOTE: cannot use `UniqueFd`, as there's clashing on ownership of the FD + // when passing it to eglCreateSyncKHR (which takes the ownership of the FD) + // which makes `UniqueFd` to be in an invalid state and there are related + // fdsan issues, hence relying on absl::Cleanup. + const int fd = dup(native_fence_fd); + absl::Cleanup fd_cleanup = [fd]() { close(fd); }; + const EGLint sync_attribs[] = {EGL_SYNC_NATIVE_FENCE_FD_ANDROID, + static_cast(fd), EGL_NONE}; + const EGLSyncKHR egl_sync = + eglCreateSyncKHR(display, EGL_SYNC_NATIVE_FENCE_ANDROID, sync_attribs); + RET_CHECK_NE(egl_sync, EGL_NO_SYNC_KHR) << absl::StrCat( + "CreateNative/eglCreateSyncKHR with original FD: ", native_fence_fd, + " and dup FD: ", fd, " - failed: ", GetEglError()); + // EGL took ownership of the passed FD as eglCreateSyncKHR succeeded, so + // cancelling the cleanup. + std::move(fd_cleanup).Cancel(); + + return EglSync(display, egl_sync); +} + +absl::StatusOr EglSync::CreateNative(EGLDisplay display, + const UniqueFd& native_fence_fd) { + RET_CHECK(native_fence_fd.IsValid()); + return CreateNative(display, native_fence_fd.Get()); +} + +absl::StatusOr EglSync::CreateNative(EGLDisplay display, + const SharedFd& native_fence_fd) { + RET_CHECK(native_fence_fd); + return CreateNative(display, native_fence_fd.Get()); +} + +bool EglSync::IsSupported(EGLDisplay display) { + return CheckEglSyncSupported(display).ok(); +} + +bool EglSync::IsNativeSupported(EGLDisplay display) { + return CheckEglNativeSyncSupported(display).ok(); +} + +EglSync::EglSync(EglSync&& sync) { *this = std::move(sync); } + +EglSync& EglSync::operator=(EglSync&& sync) { + if (this != &sync) { + Invalidate(); + + using std::swap; + sync_ = std::exchange(sync.sync_, EGL_NO_SYNC_KHR); + display_ = std::exchange(sync.display_, EGL_NO_DISPLAY); + } + return *this; +} + +void EglSync::Invalidate() { + if (sync_ == EGL_NO_SYNC_KHR || display_ == EGL_NO_DISPLAY) { + return; + } + + const absl::Status egl_sync_support = CheckEglSyncSupported(display_); + if (!egl_sync_support.ok()) { + ABSL_LOG(DFATAL) << "Attempt to destroy an EGL sync: " << egl_sync_support; + return; + } + + // Needs extension: EGL_KHR_fence_sync (EGL) / GL_OES_EGL_sync (OpenGL ES). + // Note: we're doing nothing when the function pointer is nullptr, or the + // call returns EGL_FALSE. + const EGLBoolean result = eglDestroySyncKHR(display_, sync_); + if (result == EGL_FALSE) { + ABSL_LOG(DFATAL) << "eglDestroySyncKHR failed: " << GetEglError(); + } + sync_ = EGL_NO_SYNC_KHR; +} + +absl::Status EglSync::WaitOnGpu() { + MP_RETURN_IF_ERROR(CheckEglSyncSupported(display_)); + + const EGLint result = eglWaitSyncKHR(display_, sync_, 0); + RET_CHECK_EQ(result, EGL_TRUE) << "eglWaitSyncKHR failed: " << GetEglError(); + return absl::OkStatus(); +} + +absl::Status EglSync::Wait() { + MP_RETURN_IF_ERROR(CheckEglSyncSupported(display_)); + + const EGLint result = eglClientWaitSyncKHR( + display_, sync_, EGL_SYNC_FLUSH_COMMANDS_BIT_KHR, EGL_FOREVER_KHR); + RET_CHECK_EQ(result, EGL_CONDITION_SATISFIED_KHR) + << "eglClientWaitSyncKHR failed: " << GetEglError(); + return absl::OkStatus(); +} + +absl::StatusOr EglSync::DupNativeFd() { + MP_RETURN_IF_ERROR(CheckEglNativeSyncSupported(display_)); + + const int fd = eglDupNativeFenceFDANDROID(display_, sync_); + RET_CHECK_NE(fd, EGL_NO_NATIVE_FENCE_FD_ANDROID) + << "eglDupNativeFenceFDANDROID failed: " << GetEglError(); + return UniqueFd(fd); +} + +absl::StatusOr EglSync::IsSignaled() { + EGLint status; + const EGLBoolean success = + eglGetSyncAttribKHR(display_, sync_, EGL_SYNC_STATUS_KHR, &status); + RET_CHECK_EQ(success, EGL_TRUE) + << "eglGetSyncAttribKHR failed: " << GetEglError(); + return status == EGL_SIGNALED_KHR; +} + +} // namespace mediapipe diff --git a/mediapipe/gpu/egl_sync.h b/mediapipe/gpu/egl_sync.h new file mode 100644 index 0000000000..81ae5e0701 --- /dev/null +++ b/mediapipe/gpu/egl_sync.h @@ -0,0 +1,80 @@ +#ifndef MEDIAPIPE_GPU_EGL_SYNC_H_ +#define MEDIAPIPE_GPU_EGL_SYNC_H_ + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "mediapipe/framework/formats/shared_fd.h" +#include "mediapipe/framework/formats/unique_fd.h" +#include "mediapipe/gpu/egl_base.h" + +namespace mediapipe { + +// RAII wrapper for EGL sync object. +class EglSync { + public: + // Creates a fence in OpenGL command stream. This sync is enqueued and *not* + // flushed. + static absl::StatusOr Create(EGLDisplay display); + + // Creates a native fence in OpenGL command stream. This sync is enqueued and + // *not* flushed. + static absl::StatusOr CreateNative(EGLDisplay display); + + // Creates a native fence in OpenGL command stream based on a native fence FD. + static absl::StatusOr CreateNative(EGLDisplay display, + const UniqueFd& native_fence_fd); + + // Creates a native fence in OpenGL command stream based on a native fence FD. + static absl::StatusOr CreateNative(EGLDisplay display, + const SharedFd& native_fence_fd); + + static bool IsSupported(EGLDisplay display); + static bool IsNativeSupported(EGLDisplay display); + + // Move-only + EglSync(EglSync&& sync); + EglSync& operator=(EglSync&& sync); + + EglSync(const EglSync&) = delete; + EglSync& operator=(const EglSync&) = delete; + + ~EglSync() { Invalidate(); } + + // Causes GPU to block and wait until this sync has been signaled. + // This call does not block and returns immediately. + absl::Status WaitOnGpu(); + + // Causes CPU to block and wait until this sync has been signaled. + absl::Status Wait(); + + // Returns the EGLDisplay on which this instance was created. + EGLDisplay display() const { return display_; } + + // Returns the EGLSyncKHR wrapped by this instance. + EGLSyncKHR sync() const { return sync_; } + + // Returns true if this EGL sync is signaled. + absl::StatusOr IsSignaled(); + + // Duplicates the file descriptor stored in native EGL fence sync. + absl::StatusOr DupNativeFd(); + + private: + EglSync(EGLDisplay display, EGLSyncKHR sync) + : display_(display), sync_(sync) {} + + // `native_fence_fd` - valid native fence FD. + // NOTE: this function duplicates `native_fence_fd` (doesn't take ownership or + // modifies it) + static absl::StatusOr CreateNative(EGLDisplay display, + int native_fence_fd); + + void Invalidate(); + + EGLDisplay display_; + EGLSyncKHR sync_ = EGL_NO_SYNC_KHR; +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_GPU_EGL_SYNC_H_ diff --git a/mediapipe/gpu/egl_sync_point.cc b/mediapipe/gpu/egl_sync_point.cc new file mode 100644 index 0000000000..f11520d973 --- /dev/null +++ b/mediapipe/gpu/egl_sync_point.cc @@ -0,0 +1,88 @@ +#include "mediapipe/gpu/egl_sync_point.h" + +#include +#include + +#include "absl/log/absl_log.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "mediapipe/gpu/egl_sync.h" +#include "mediapipe/gpu/gl_context.h" + +namespace mediapipe { + +namespace { + +class EglFenceSyncPoint : public GlSyncPoint { + public: + explicit EglFenceSyncPoint(std::shared_ptr gl_context, + EglSync egl_sync) + : GlSyncPoint(std::move(gl_context)), egl_sync_(std::move(egl_sync)) {} + + ~EglFenceSyncPoint() override { + gl_context_->RunWithoutWaiting( + [ptr = new EglSync(std::move(egl_sync_))]() { delete ptr; }); + } + + EglFenceSyncPoint(const EglFenceSyncPoint&) = delete; + EglFenceSyncPoint& operator=(const EglFenceSyncPoint&) = delete; + + void Wait() override { + if (GlContext::IsAnyContextCurrent()) { + WaitInternal(); + } + // Fall back to GL context used during sync creation. + gl_context_->Run([this] { WaitInternal(); }); + } + + void WaitInternal() { + absl::Status result = egl_sync_.Wait(); + if (!result.ok()) { + ABSL_LOG(DFATAL) << "EGL sync Wait failed: " << result; + } + } + + void WaitOnGpu() override { + if (!GlContext::IsAnyContextCurrent()) { + ABSL_LOG(DFATAL) << "WaitOnGpu without current context."; + } + + absl::Status result = egl_sync_.WaitOnGpu(); + if (!result.ok()) { + ABSL_LOG(DFATAL) << "EGL sync WaitOnGpu failed: " << result; + } + } + + bool IsReady() override { + if (GlContext::IsAnyContextCurrent()) { + return IsReadyInternal(); + } + + // Fall back to GL context used during sync creation. + bool ready = false; + gl_context_->Run([this, &ready] { ready = IsReadyInternal(); }); + return ready; + } + + bool IsReadyInternal() { + absl::StatusOr is_ready = egl_sync_.IsSignaled(); + if (!is_ready.ok()) { + ABSL_LOG(DFATAL) << "EGL sync IsSignaled failed: " << is_ready.status(); + return false; + } + return *is_ready; + } + + private: + EglSync egl_sync_; +}; + +} // namespace + +absl::StatusOr> CreateEglSyncPoint( + std::shared_ptr gl_context, EglSync egl_sync) { + return std::make_unique(std::move(gl_context), + std::move(egl_sync)); +} + +} // namespace mediapipe diff --git a/mediapipe/gpu/egl_sync_point.h b/mediapipe/gpu/egl_sync_point.h new file mode 100644 index 0000000000..717d0cf41d --- /dev/null +++ b/mediapipe/gpu/egl_sync_point.h @@ -0,0 +1,17 @@ +#ifndef MEDIAPIPE_GPU_EGL_SYNC_POINT_H_ +#define MEDIAPIPE_GPU_EGL_SYNC_POINT_H_ + +#include + +#include "absl/status/statusor.h" +#include "mediapipe/gpu/egl_sync.h" +#include "mediapipe/gpu/gl_context.h" + +namespace mediapipe { + +absl::StatusOr> CreateEglSyncPoint( + std::shared_ptr gl_context, EglSync egl_sync); + +} // namespace mediapipe + +#endif // MEDIAPIPE_GPU_EGL_SYNC_POINT_H_ diff --git a/mediapipe/gpu/gl_base.h b/mediapipe/gpu/gl_base.h index 12a04e0bbf..a16bcffa3b 100644 --- a/mediapipe/gpu/gl_base.h +++ b/mediapipe/gpu/gl_base.h @@ -55,6 +55,7 @@ #define HAS_EGL 1 #include +// TODO: b/377324183 - add #include #include #if defined(__ANDROID__) diff --git a/mediapipe/gpu/gl_context.cc b/mediapipe/gpu/gl_context.cc index 69d0e7bbc4..0be605db55 100644 --- a/mediapipe/gpu/gl_context.cc +++ b/mediapipe/gpu/gl_context.cc @@ -16,22 +16,34 @@ #include -#include +#include +#include +#include +#include +#include +#include +#include +#include #include -#include #include +#include "absl/base/attributes.h" #include "absl/base/dynamic_annotations.h" +#include "absl/base/thread_annotations.h" #include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/status/status.h" +#include "absl/strings/numbers.h" #include "absl/strings/str_format.h" +#include "absl/strings/str_split.h" +#include "absl/strings/string_view.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/port.h" // IWYU pragma: keep #include "mediapipe/framework/port/ret_check.h" -#include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/status_builder.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/gpu/gl_base.h" #include "mediapipe/gpu/gl_context_internal.h" #include "mediapipe/gpu/gpu_buffer_format.h" diff --git a/mediapipe/gpu/gl_context.h b/mediapipe/gpu/gl_context.h index 19dbce2429..e491bb9b06 100644 --- a/mediapipe/gpu/gl_context.h +++ b/mediapipe/gpu/gl_context.h @@ -18,11 +18,18 @@ #include #include +#include #include #include +#include +#include "absl/base/attributes.h" +#include "absl/base/thread_annotations.h" #include "absl/container/flat_hash_map.h" #include "absl/log/absl_check.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/executor.h" #include "mediapipe/framework/mediapipe_profiling.h" diff --git a/mediapipe/gpu/gl_texture_buffer.cc b/mediapipe/gpu/gl_texture_buffer.cc index e835a1a55f..db61fb35e6 100644 --- a/mediapipe/gpu/gl_texture_buffer.cc +++ b/mediapipe/gpu/gl_texture_buffer.cc @@ -14,6 +14,7 @@ #include "mediapipe/gpu/gl_texture_buffer.h" +#include #include #include #include @@ -357,8 +358,8 @@ void GlTextureBuffer::ViewDoneWriting(const GlTextureView& view) { #endif // __ANDROID__ } -static void ReadTexture(GlContext& ctx, const GlTextureView& view, - GpuBufferFormat format, void* output, size_t size) { +void ReadTexture(GlContext& ctx, const GlTextureView& view, + GpuBufferFormat format, void* output, size_t size) { // TODO: check buffer size? We could use glReadnPixels where available // (OpenGL ES 3.2, i.e. nowhere). Note that, to fully check that the read // won't overflow the buffer with glReadPixels, we'd also need to check or diff --git a/mediapipe/gpu/gl_texture_buffer.h b/mediapipe/gpu/gl_texture_buffer.h index ab8154a85d..a8753889ef 100644 --- a/mediapipe/gpu/gl_texture_buffer.h +++ b/mediapipe/gpu/gl_texture_buffer.h @@ -18,6 +18,7 @@ #ifndef MEDIAPIPE_GPU_GL_TEXTURE_BUFFER_H_ #define MEDIAPIPE_GPU_GL_TEXTURE_BUFFER_H_ +#include #include #include @@ -175,6 +176,13 @@ class GlTextureBuffer std::shared_ptr producer_context_; }; +// Reads `texture_view` into `output`. +// NOTE: It's clients responsibility to allocate `output` properly and provide +// the right `size`. +// NOTE: Must be invoked on a thread with GL context. +void ReadTexture(GlContext& ctx, const GlTextureView& texture_view, + GpuBufferFormat format, void* output, size_t size); + using GlTextureBufferSharedPtr = std::shared_ptr; } // namespace mediapipe diff --git a/mediapipe/gpu/gpu_buffer_test.cc b/mediapipe/gpu/gpu_buffer_test.cc index 7be33e39c6..f166dd4777 100644 --- a/mediapipe/gpu/gpu_buffer_test.cc +++ b/mediapipe/gpu/gpu_buffer_test.cc @@ -22,8 +22,6 @@ #include "mediapipe/framework/tool/test_util.h" #include "mediapipe/gpu/gl_texture_buffer.h" #include "mediapipe/gpu/gl_texture_util.h" -#include "mediapipe/gpu/gpu_buffer_storage_ahwb.h" -#include "mediapipe/gpu/gpu_buffer_storage_image_frame.h" #include "mediapipe/gpu/gpu_test_base.h" #include "stb_image.h" #include "stb_image_write.h" diff --git a/mediapipe/gpu/gpu_shared_data_internal.cc b/mediapipe/gpu/gpu_shared_data_internal.cc index a6838d238a..bf8086b49f 100644 --- a/mediapipe/gpu/gpu_shared_data_internal.cc +++ b/mediapipe/gpu/gpu_shared_data_internal.cc @@ -42,6 +42,17 @@ namespace mediapipe { +namespace { + +inline constexpr char kGpuExecutorName[] = "__gpu"; + +// Returns the executor name from a context key . +std::string GetExecutorNameFromContextKey(const std::string& context_key) { + return absl::StrCat(kGpuExecutorName, "_", context_key); +} + +} // namespace + #if __APPLE__ static constexpr bool kGlContextUseDedicatedThread = false; #elif defined(__EMSCRIPTEN__) @@ -130,7 +141,9 @@ GpuResources::GpuResources(std::shared_ptr gl_context, #endif // MEDIAPIPE_GPU_BUFFER_USE_CV_PIXEL_BUFFER { gl_key_context_->insert({SharedContextKey(), gl_context}); - named_executors_[kGpuExecutorName] = + const std::string executor_name = + GetExecutorNameFromContextKey(SharedContextKey()); + named_executors_[executor_name] = std::make_shared(gl_context.get()); #if __APPLE__ #if MEDIAPIPE_GPU_BUFFER_USE_CV_PIXEL_BUFFER @@ -144,7 +157,9 @@ GpuResources::GpuResources(std::shared_ptr gl_context, absl::StatusOr> GpuResources::GetDefaultGpuExecutor() const { - const auto it = named_executors_.find(kGpuExecutorName); + const std::string executor_name = + GetExecutorNameFromContextKey(SharedContextKey()); + const auto it = named_executors_.find(executor_name); RET_CHECK(it != named_executors_.end()) << "Can't find default gpu executor."; return it->second; } @@ -209,8 +224,8 @@ absl::Status GpuResources::PrepareGpuNode(CalculatorNode* node) { GetOrCreateGlContext(context_key)); if (kGlContextUseDedicatedThread) { - std::string executor_name = - absl::StrCat(kGpuExecutorName, "_", context_key); + const std::string executor_name = + GetExecutorNameFromContextKey(context_key); node->SetExecutor(executor_name); if (!ContainsKey(named_executors_, executor_name)) { named_executors_.emplace( diff --git a/mediapipe/gpu/graph_support.h b/mediapipe/gpu/graph_support.h index df8c27dff6..df20ef829b 100644 --- a/mediapipe/gpu/graph_support.h +++ b/mediapipe/gpu/graph_support.h @@ -20,7 +20,6 @@ namespace mediapipe { inline constexpr char kGpuSharedTagName[] = "GPU_SHARED"; inline constexpr char kGpuSharedSidePacketName[] = "gpu_shared"; -inline constexpr char kGpuExecutorName[] = "__gpu"; } // namespace mediapipe diff --git a/mediapipe/gpu/webgpu/BUILD b/mediapipe/gpu/webgpu/BUILD index 597b60da8f..797f2d4bd1 100644 --- a/mediapipe/gpu/webgpu/BUILD +++ b/mediapipe/gpu/webgpu/BUILD @@ -75,16 +75,17 @@ cc_library( "//mediapipe/framework:graph_service", "//mediapipe/framework/deps:no_destructor", "//mediapipe/gpu:attachments", + "//third_party/dawn:webgpu_headers", "@com_google_absl//absl/base:core_headers", "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", ] + select({ - "//mediapipe:emscripten": [], + "//mediapipe:emscripten": [ + ], "//conditions:default": [ ":webgpu_device_registration", - "//third_party/dawn:dawncpp_headers", ], }), ) @@ -127,6 +128,7 @@ cc_library( deps = [ "//mediapipe/framework/port:status", "//mediapipe/web:jspi_check", + "//third_party/dawn:webgpu_headers", "@com_google_absl//absl/status:statusor", ], ) diff --git a/mediapipe/gpu/webgpu/webgpu_service.cc b/mediapipe/gpu/webgpu/webgpu_service.cc index 9bd9a72114..c7633a25c0 100644 --- a/mediapipe/gpu/webgpu/webgpu_service.cc +++ b/mediapipe/gpu/webgpu/webgpu_service.cc @@ -14,12 +14,15 @@ #include "mediapipe/gpu/webgpu/webgpu_service.h" +#ifdef __EMSCRIPTEN__ +#include +#endif // __EMSCRIPTEN__ + #include "mediapipe/framework/graph_service.h" #ifdef __EMSCRIPTEN__ #include #include -#include #include EM_JS_DEPS(webgpu_service_deps, "$stringToNewUTF8") @@ -65,10 +68,14 @@ EM_JS(char*, GetAdapterVendor, (), { WebGpuService::WebGpuService() : canvas_selector_("canvas_webgpu"), device_(wgpu::Device::Acquire(emscripten_webgpu_get_device())) { - adapter_info_.architecture = GetAdapterArchitecture(); - adapter_info_.description = GetAdapterDescription(); - adapter_info_.device = GetAdapterDeviceName(); - adapter_info_.vendor = GetAdapterVendor(); + adapter_info_.architecture.data = GetAdapterArchitecture(); + adapter_info_.architecture.length = strlen(adapter_info_.architecture.data); + adapter_info_.description.data = GetAdapterDescription(); + adapter_info_.description.length = strlen(adapter_info_.description.data); + adapter_info_.device.data = GetAdapterDeviceName(); + adapter_info_.device.length = strlen(adapter_info_.device.data); + adapter_info_.vendor.data = GetAdapterVendor(); + adapter_info_.vendor.length = strlen(adapter_info_.vendor.data); } #else WebGpuService::WebGpuService() diff --git a/mediapipe/gpu/webgpu/webgpu_service.h b/mediapipe/gpu/webgpu/webgpu_service.h index 5afd6c5b27..268191e665 100644 --- a/mediapipe/gpu/webgpu/webgpu_service.h +++ b/mediapipe/gpu/webgpu/webgpu_service.h @@ -28,7 +28,6 @@ #include "mediapipe/gpu/webgpu/webgpu_check.h" #ifdef __EMSCRIPTEN__ -#include #include #else #include "mediapipe/gpu/webgpu/webgpu_device_registration.h" diff --git a/mediapipe/gpu/webgpu/webgpu_texture_buffer.cc b/mediapipe/gpu/webgpu/webgpu_texture_buffer.cc index 6e52d5f655..69bb0f7773 100644 --- a/mediapipe/gpu/webgpu/webgpu_texture_buffer.cc +++ b/mediapipe/gpu/webgpu/webgpu_texture_buffer.cc @@ -1,5 +1,6 @@ #include "mediapipe/gpu/webgpu/webgpu_texture_buffer.h" +#include #include #include diff --git a/mediapipe/gpu/webgpu/webgpu_utils.cc b/mediapipe/gpu/webgpu/webgpu_utils.cc index 10b61a4ee3..65ed1c43fc 100644 --- a/mediapipe/gpu/webgpu/webgpu_utils.cc +++ b/mediapipe/gpu/webgpu/webgpu_utils.cc @@ -23,7 +23,7 @@ namespace { EM_ASYNC_JS(void, mediapipe_map_buffer_jspi, (WGPUBuffer buffer_handle, uint8_t* data), { - const buffer = WebGPU.mgrBuffer.get(buffer_handle); + const buffer = WebGPU.getJsObject(buffer_handle); await buffer.mapAsync(GPUMapMode.READ); const mapped = buffer.getMappedRange(); HEAPU8.set(new Uint8Array(mapped), data); diff --git a/mediapipe/graphs/instant_motion_tracking/calculators/BUILD b/mediapipe/graphs/instant_motion_tracking/calculators/BUILD index cdfd911d49..50128d97fc 100644 --- a/mediapipe/graphs/instant_motion_tracking/calculators/BUILD +++ b/mediapipe/graphs/instant_motion_tracking/calculators/BUILD @@ -13,6 +13,7 @@ # limitations under the License. load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library") +# Placeholder: load proto_library licenses(["notice"]) diff --git a/mediapipe/java/com/google/mediapipe/framework/AssetCache.java b/mediapipe/java/com/google/mediapipe/framework/AssetCache.java index 21fea061d7..21bbcd9aba 100644 --- a/mediapipe/java/com/google/mediapipe/framework/AssetCache.java +++ b/mediapipe/java/com/google/mediapipe/framework/AssetCache.java @@ -133,7 +133,7 @@ public synchronized String getAbsolutePathFromAsset(String assetPath) { inStream = assetManager.open(assetPath); writeStreamToFile(inStream, destinationFile); } catch (IOException ioe) { - logger.atSevere().log("Unable to unpack: %s", assetPath); + logger.atSevere().withCause(ioe).log("Unable to unpack: %s", assetPath); try { if (inStream != null) { inStream.close(); diff --git a/mediapipe/model_maker/python/core/data/classification_dataset_test.py b/mediapipe/model_maker/python/core/data/classification_dataset_test.py index dfcea7da6a..ad83651ba0 100644 --- a/mediapipe/model_maker/python/core/data/classification_dataset_test.py +++ b/mediapipe/model_maker/python/core/data/classification_dataset_test.py @@ -14,8 +14,6 @@ from typing import Any, List, Tuple, TypeVar -# Dependency imports - import tensorflow as tf from mediapipe.model_maker.python.core.data import classification_dataset diff --git a/mediapipe/model_maker/python/core/data/data_util_test.py b/mediapipe/model_maker/python/core/data/data_util_test.py index 8bed8ef7c6..fbf9a32dc8 100644 --- a/mediapipe/model_maker/python/core/data/data_util_test.py +++ b/mediapipe/model_maker/python/core/data/data_util_test.py @@ -18,8 +18,6 @@ import os -# Dependency imports - from absl import flags import tensorflow as tf diff --git a/mediapipe/model_maker/python/core/data/dataset.py b/mediapipe/model_maker/python/core/data/dataset.py index 30480995df..2db599aa8d 100644 --- a/mediapipe/model_maker/python/core/data/dataset.py +++ b/mediapipe/model_maker/python/core/data/dataset.py @@ -20,7 +20,6 @@ import functools from typing import Any, Callable, Optional, Tuple, TypeVar -# Dependency imports import tensorflow as tf _DatasetT = TypeVar('_DatasetT', bound='Dataset') diff --git a/mediapipe/model_maker/python/core/data/dataset_test.py b/mediapipe/model_maker/python/core/data/dataset_test.py index 7a3f75388f..f57342cded 100644 --- a/mediapipe/model_maker/python/core/data/dataset_test.py +++ b/mediapipe/model_maker/python/core/data/dataset_test.py @@ -16,8 +16,6 @@ from __future__ import division from __future__ import print_function -# Dependency imports - import numpy as np import tensorflow as tf diff --git a/mediapipe/model_maker/python/core/tasks/classifier_test.py b/mediapipe/model_maker/python/core/tasks/classifier_test.py index 2943825acf..ea88268e97 100644 --- a/mediapipe/model_maker/python/core/tasks/classifier_test.py +++ b/mediapipe/model_maker/python/core/tasks/classifier_test.py @@ -14,8 +14,6 @@ import os -# Dependency imports - import tensorflow as tf from mediapipe.model_maker.python.core.tasks import classifier diff --git a/mediapipe/model_maker/python/core/tasks/custom_model_test.py b/mediapipe/model_maker/python/core/tasks/custom_model_test.py index afb418c445..6641d8c649 100644 --- a/mediapipe/model_maker/python/core/tasks/custom_model_test.py +++ b/mediapipe/model_maker/python/core/tasks/custom_model_test.py @@ -18,8 +18,6 @@ import os -# Dependency imports - import tensorflow as tf from mediapipe.model_maker.python.core.tasks import custom_model diff --git a/mediapipe/model_maker/python/core/utils/BUILD b/mediapipe/model_maker/python/core/utils/BUILD index ef6e652845..4b8ff1bf97 100644 --- a/mediapipe/model_maker/python/core/utils/BUILD +++ b/mediapipe/model_maker/python/core/utils/BUILD @@ -44,7 +44,6 @@ py_library( deps = [ ":quantization", "//mediapipe/model_maker/python/core/data:dataset", - "@model_maker_pip_deps_ai_edge_litert_nightly//:pkg", "@model_maker_pip_deps_numpy//:pkg", "@model_maker_pip_deps_tensorflow//:pkg", ], @@ -173,7 +172,6 @@ py_test( ":quantization", ":test_util", "@model_maker_pip_deps_absl_py//:pkg", - "@model_maker_pip_deps_ai_edge_litert_nightly//:pkg", "@model_maker_pip_deps_tensorflow//:pkg", ], ) diff --git a/mediapipe/model_maker/python/core/utils/loss_functions.py b/mediapipe/model_maker/python/core/utils/loss_functions.py index c63c277163..476623bda2 100644 --- a/mediapipe/model_maker/python/core/utils/loss_functions.py +++ b/mediapipe/model_maker/python/core/utils/loss_functions.py @@ -165,7 +165,7 @@ def __call__(self, y_pred_rank = y_pred.shape.ndims if y_pred_rank - weight_rank == 1: sample_weight = tf.expand_dims(sample_weight, [-1]) - elif weight_rank != 0: + elif weight_rank != 0 and y_pred_rank != weight_rank: raise ValueError(f'Unexpected sample_weights, should be either a scalar' f'or a vector of batch_size:{batch_size.numpy()}') ce = -tf.math.log(y_pred) diff --git a/mediapipe/model_maker/python/core/utils/model_util.py b/mediapipe/model_maker/python/core/utils/model_util.py index 0a13095e6a..72271d71a8 100644 --- a/mediapipe/model_maker/python/core/utils/model_util.py +++ b/mediapipe/model_maker/python/core/utils/model_util.py @@ -21,14 +21,11 @@ import tempfile from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -# Dependency imports - import numpy as np import tensorflow as tf from mediapipe.model_maker.python.core.data import dataset from mediapipe.model_maker.python.core.utils import quantization -from ai_edge_litert import interpreter as tfl_interpreter DEFAULT_SCALE, DEFAULT_ZERO_POINT = 0, 0 ESTIMITED_STEPS_PER_EPOCH = 1000 @@ -274,7 +271,7 @@ def __init__(self, tflite_model: bytearray): Args: tflite_model: A valid flatbuffer representing the TFLite model. """ - self.interpreter = tfl_interpreter.Interpreter(model_content=tflite_model) + self.interpreter = tf.lite.Interpreter(model_content=tflite_model) self.interpreter.allocate_tensors() self.input_details = self.interpreter.get_input_details() self.output_details = self.interpreter.get_output_details() diff --git a/mediapipe/model_maker/python/core/utils/quantization.py b/mediapipe/model_maker/python/core/utils/quantization.py index 2a8d92244b..a58f0386ec 100644 --- a/mediapipe/model_maker/python/core/utils/quantization.py +++ b/mediapipe/model_maker/python/core/utils/quantization.py @@ -19,8 +19,6 @@ from typing import Any, Callable, List, Optional, Union -# Dependency imports - import tensorflow as tf from mediapipe.model_maker.python.core.data import dataset as ds diff --git a/mediapipe/model_maker/python/core/utils/quantization_test.py b/mediapipe/model_maker/python/core/utils/quantization_test.py index 0164d39bf8..57523d4056 100644 --- a/mediapipe/model_maker/python/core/utils/quantization_test.py +++ b/mediapipe/model_maker/python/core/utils/quantization_test.py @@ -17,7 +17,6 @@ from mediapipe.model_maker.python.core.utils import quantization from mediapipe.model_maker.python.core.utils import test_util -from ai_edge_litert import interpreter as tfl_interpreter class QuantizationTest(tf.test.TestCase, parameterized.TestCase): @@ -60,7 +59,7 @@ def test_set_converter_with_quantization_from_int8_config(self): self.assertEqual(config.supported_ops, [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]) tflite_model = converter.convert() - interpreter = tfl_interpreter.Interpreter(model_content=tflite_model) + interpreter = tf.lite.Interpreter(model_content=tflite_model) self.assertEqual(interpreter.get_input_details()[0]['dtype'], tf.uint8) self.assertEqual(interpreter.get_output_details()[0]['dtype'], tf.uint8) @@ -83,7 +82,7 @@ def test_set_converter_with_quantization_from_float16_config(self): converter = config.set_converter_with_quantization(converter=converter) self.assertEqual(config.supported_types, [tf.float16]) tflite_model = converter.convert() - interpreter = tfl_interpreter.Interpreter(model_content=tflite_model) + interpreter = tf.lite.Interpreter(model_content=tflite_model) # The input and output are expected to be set to float32 by default. self.assertEqual(interpreter.get_input_details()[0]['dtype'], tf.float32) self.assertEqual(interpreter.get_output_details()[0]['dtype'], tf.float32) diff --git a/mediapipe/model_maker/python/core/utils/test_util.py b/mediapipe/model_maker/python/core/utils/test_util.py index 72fb229c76..04a343777d 100644 --- a/mediapipe/model_maker/python/core/utils/test_util.py +++ b/mediapipe/model_maker/python/core/utils/test_util.py @@ -19,8 +19,6 @@ from typing import Sequence from typing import Dict, List, Union -# Dependency imports - import numpy as np import tensorflow as tf diff --git a/mediapipe/model_maker/python/text/text_classifier/BUILD b/mediapipe/model_maker/python/text/text_classifier/BUILD index 03be858cfb..c5861ed2d5 100644 --- a/mediapipe/model_maker/python/text/text_classifier/BUILD +++ b/mediapipe/model_maker/python/text/text_classifier/BUILD @@ -65,7 +65,6 @@ py_library( deps = [ ":hyperparameters", ":model_options", - "//mediapipe/model_maker/python/core/utils:file_util", "//mediapipe/model_maker/python/text/core:bert_model_spec", ], ) diff --git a/mediapipe/model_maker/python/text/text_classifier/model_spec.py b/mediapipe/model_maker/python/text/text_classifier/model_spec.py index a34f14fd5c..c22454c43f 100644 --- a/mediapipe/model_maker/python/text/text_classifier/model_spec.py +++ b/mediapipe/model_maker/python/text/text_classifier/model_spec.py @@ -17,7 +17,6 @@ import enum import functools -from mediapipe.model_maker.python.core.utils import file_util from mediapipe.model_maker.python.text.core import bert_model_spec from mediapipe.model_maker.python.text.text_classifier import hyperparameters as hp from mediapipe.model_maker.python.text.text_classifier import model_options as mo diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py index 07811d8baa..a881562cf7 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py @@ -859,6 +859,7 @@ def export_model( self, model_name: str = "model.tflite", quantization_config: Optional[quantization.QuantizationConfig] = None, + batch_size: int | None = None, ): """Converts and saves the model to a TFLite file with metadata included. @@ -873,6 +874,8 @@ def export_model( model_name: File name to save TFLite model with metadata. The full export path is {self._hparams.export_dir}/{model_name}. quantization_config: The configuration for model quantization. + batch_size: Inference batch size to use for the TFlite model. Default is + None, which means the batch size is dynamic. """ tf.io.gfile.makedirs(self._hparams.export_dir) tflite_file = os.path.join(self._hparams.export_dir, model_name) @@ -883,16 +886,19 @@ def export_model( shape=(self._model_options.seq_len,), dtype=tf.int32, name="input_word_ids", + batch_size=batch_size, ), input_mask=tf.keras.layers.Input( shape=(self._model_options.seq_len,), dtype=tf.int32, name="input_mask", + batch_size=batch_size, ), input_type_ids=tf.keras.layers.Input( shape=(self._model_options.seq_len,), dtype=tf.int32, name="input_type_ids", + batch_size=batch_size, ), ) output = self._model(constant_len_inputs) diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py index b646a15ad9..6484e70ffb 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py @@ -16,8 +16,6 @@ import os import tempfile -# Dependency imports - from absl import app from absl import flags from absl import logging diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py index 62db36e455..c5c5c7fd3a 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py @@ -149,6 +149,7 @@ def test_create_and_train_bert(self, supported_model): batch_size=1, learning_rate=3e-5, distribution_strategy='off', + class_weights={0: 1.0, 1: 1.0}, ), ) bert_classifier = text_classifier.TextClassifier.create( diff --git a/mediapipe/model_maker/python/vision/gesture_recognizer/gesture_recognizer_demo.py b/mediapipe/model_maker/python/vision/gesture_recognizer/gesture_recognizer_demo.py index 0c1d57d2b7..47cc7b3b74 100644 --- a/mediapipe/model_maker/python/vision/gesture_recognizer/gesture_recognizer_demo.py +++ b/mediapipe/model_maker/python/vision/gesture_recognizer/gesture_recognizer_demo.py @@ -19,8 +19,6 @@ import os -# Dependency imports - from absl import app from absl import flags from absl import logging diff --git a/mediapipe/model_maker/python/vision/image_classifier/image_classifier_demo.py b/mediapipe/model_maker/python/vision/image_classifier/image_classifier_demo.py index 31b6e58768..d71c16ef25 100644 --- a/mediapipe/model_maker/python/vision/image_classifier/image_classifier_demo.py +++ b/mediapipe/model_maker/python/vision/image_classifier/image_classifier_demo.py @@ -15,8 +15,6 @@ import os -# Dependency imports - from absl import app from absl import flags from absl import logging diff --git a/mediapipe/model_maker/python/vision/object_detector/model.py b/mediapipe/model_maker/python/vision/object_detector/model.py index ea78ca8c62..317d686885 100644 --- a/mediapipe/model_maker/python/vision/object_detector/model.py +++ b/mediapipe/model_maker/python/vision/object_detector/model.py @@ -256,7 +256,7 @@ def export_saved_model(self, save_path: str): # The remaining method overrides are used to train this object detector model # using model.fit(). - def call( + def call( # pytype: disable=annotation-type-mismatch self, images: Union[tf.Tensor, Sequence[tf.Tensor]], image_shape: Optional[tf.Tensor] = None, diff --git a/mediapipe/model_maker/python/vision/object_detector/object_detector_demo.py b/mediapipe/model_maker/python/vision/object_detector/object_detector_demo.py index 04820796f0..75929277f9 100644 --- a/mediapipe/model_maker/python/vision/object_detector/object_detector_demo.py +++ b/mediapipe/model_maker/python/vision/object_detector/object_detector_demo.py @@ -14,8 +14,6 @@ """Demo for making an object detector model by MediaPipe Model Maker.""" import os -# Dependency imports - from absl import app from absl import flags from absl import logging diff --git a/mediapipe/model_maker/requirements.txt b/mediapipe/model_maker/requirements.txt index 419d0166ca..3ce977b671 100644 --- a/mediapipe/model_maker/requirements.txt +++ b/mediapipe/model_maker/requirements.txt @@ -1,5 +1,4 @@ absl-py -ai-edge-litert-nightly mediapipe>=0.10.0 numpy<2 opencv-python diff --git a/mediapipe/model_maker/requirements_bazel.txt b/mediapipe/model_maker/requirements_bazel.txt index 003085ed07..fd6c421cf8 100644 --- a/mediapipe/model_maker/requirements_bazel.txt +++ b/mediapipe/model_maker/requirements_bazel.txt @@ -1,5 +1,4 @@ absl-py -ai-edge-litert-nightly numpy<2 opencv-python setuptools==70.3.0 # needed due to https://github.com/pypa/setuptools/issues/4487 diff --git a/mediapipe/model_maker/requirements_lock.txt b/mediapipe/model_maker/requirements_lock.txt index 62972c57da..83eb07a121 100644 --- a/mediapipe/model_maker/requirements_lock.txt +++ b/mediapipe/model_maker/requirements_lock.txt @@ -15,8 +15,6 @@ absl-py==1.4.0 # tensorflow-metadata # tensorflow-model-optimization # tf-slim -ai-edge-litert-nightly==1.0.1.dev20241022 - # via -r mediapipe/opensource_only/model_maker_requirements_bazel.txt array-record==0.5.1 # via tensorflow-datasets astunparse==1.6.3 @@ -50,9 +48,7 @@ etils[array-types,enp,epath,epy,etqdm,etree]==1.5.2 # array-record # tensorflow-datasets flatbuffers==24.3.25 - # via - # ai-edge-litert-nightly - # tensorflow + # via tensorflow fonttools==4.54.1 # via matplotlib fsspec==2024.9.0 @@ -124,7 +120,6 @@ ml-dtypes==0.3.2 numpy==1.26.4 # via # -r mediapipe/opensource_only/model_maker_requirements_bazel.txt - # ai-edge-litert-nightly # contourpy # etils # h5py diff --git a/mediapipe/modules/face_detection/README.md b/mediapipe/modules/face_detection/README.md index 17cf27b5f4..19c8e7212b 100644 --- a/mediapipe/modules/face_detection/README.md +++ b/mediapipe/modules/face_detection/README.md @@ -2,7 +2,7 @@ Subgraphs|Details :--- | :--- -[`FaceDetectionFullRangeCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_full_range_cpu.pbtxt)| Detects faces. Works best for faces within 5 meters from the camera. (CPU input, and inference is executed on CPU.) -[`FaceDetectionFullRangeGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_full_range_gpu.pbtxt)| Detects faces. Works best for faces within 5 meters from the camera. (GPU input, and inference is executed on GPU.) -[`FaceDetectionShortRangeCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_short_range_cpu.pbtxt)| Detects faces. Works best for faces within 2 meters from the camera. (CPU input, and inference is executed on CPU.) -[`FaceDetectionShortRangeGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_short_range_gpu.pbtxt)| Detects faces. Works best for faces within 2 meters from the camera. (GPU input, and inference is executed on GPU.) +[`FaceDetectionFullRangeCpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_full_range_cpu.pbtxt)| Detects faces. Works best for faces within 5 meters from the camera. (CPU input, and inference is executed on CPU.) +[`FaceDetectionFullRangeGpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_full_range_gpu.pbtxt)| Detects faces. Works best for faces within 5 meters from the camera. (GPU input, and inference is executed on GPU.) +[`FaceDetectionShortRangeCpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_short_range_cpu.pbtxt)| Detects faces. Works best for faces within 2 meters from the camera. (CPU input, and inference is executed on CPU.) +[`FaceDetectionShortRangeGpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_short_range_gpu.pbtxt)| Detects faces. Works best for faces within 2 meters from the camera. (GPU input, and inference is executed on GPU.) diff --git a/mediapipe/modules/face_geometry/BUILD b/mediapipe/modules/face_geometry/BUILD index 710747e361..6cb45b2851 100644 --- a/mediapipe/modules/face_geometry/BUILD +++ b/mediapipe/modules/face_geometry/BUILD @@ -82,6 +82,7 @@ cc_library( "//mediapipe/util:resource_util", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", "@com_google_absl//absl/types:optional", ], alwayslink = 1, diff --git a/mediapipe/modules/face_geometry/README.md b/mediapipe/modules/face_geometry/README.md index 649d0a853c..cf04aadda6 100644 --- a/mediapipe/modules/face_geometry/README.md +++ b/mediapipe/modules/face_geometry/README.md @@ -2,19 +2,19 @@ Protos|Details :--- | :--- -[`face_geometry.Environment`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/protos/environment.proto)| Describes an environment; includes the camera frame origin point location as well as virtual camera parameters. -[`face_geometry.GeometryPipelineMetadata`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/protos/geometry_pipeline_metadata.proto)| Describes metadata needed to estimate face 3D transform based on the face landmark module result. -[`face_geometry.FaceGeometry`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/protos/face_geometry.proto)| Describes 3D transform data for a single face; includes a face mesh surface and a face pose in a given environment. -[`face_geometry.Mesh3d`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/protos/mesh_3d.proto)| Describes a 3D mesh triangular surface. +[`face_geometry.Environment`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/protos/environment.proto)| Describes an environment; includes the camera frame origin point location as well as virtual camera parameters. +[`face_geometry.GeometryPipelineMetadata`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/protos/geometry_pipeline_metadata.proto)| Describes metadata needed to estimate face 3D transform based on the face landmark module result. +[`face_geometry.FaceGeometry`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/protos/face_geometry.proto)| Describes 3D transform data for a single face; includes a face mesh surface and a face pose in a given environment. +[`face_geometry.Mesh3d`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/protos/mesh_3d.proto)| Describes a 3D mesh triangular surface. Calculators|Details :--- | :--- -[`FaceGeometryEnvGeneratorCalculator`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/env_generator_calculator.cc)| Generates an environment that describes a virtual scene. -[`FaceGeometryPipelineCalculator`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/geometry_pipeline_calculator.cc)| Extracts face 3D transform for multiple faces from a vector of landmark lists. -[`FaceGeometryEffectRendererCalculator`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/effect_renderer_calculator.cc)| Renders a face effect. +[`FaceGeometryEnvGeneratorCalculator`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/env_generator_calculator.cc)| Generates an environment that describes a virtual scene. +[`FaceGeometryPipelineCalculator`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/geometry_pipeline_calculator.cc)| Extracts face 3D transform for multiple faces from a vector of landmark lists. +[`FaceGeometryEffectRendererCalculator`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/effect_renderer_calculator.cc)| Renders a face effect. Subgraphs|Details :--- | :--- -[`FaceGeometryFromDetection`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/face_geometry_from_detection.pbtxt)| Extracts 3D transform from face detection for multiple faces. -[`FaceGeometryFromLandmarks`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/face_geometry_from_landmarks.pbtxt)| Extracts 3D transform from face landmarks for multiple faces. -[`FaceGeometry`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/face_geometry.pbtxt)| Extracts 3D transform from face landmarks for multiple faces. Deprecated, please use `FaceGeometryFromLandmarks` in the new code. +[`FaceGeometryFromDetection`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/face_geometry_from_detection.pbtxt)| Extracts 3D transform from face detection for multiple faces. +[`FaceGeometryFromLandmarks`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/face_geometry_from_landmarks.pbtxt)| Extracts 3D transform from face landmarks for multiple faces. +[`FaceGeometry`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_geometry/face_geometry.pbtxt)| Extracts 3D transform from face landmarks for multiple faces. Deprecated, please use `FaceGeometryFromLandmarks` in the new code. diff --git a/mediapipe/modules/face_geometry/effect_renderer_calculator.cc b/mediapipe/modules/face_geometry/effect_renderer_calculator.cc index f48e130498..3a7eebdbcb 100644 --- a/mediapipe/modules/face_geometry/effect_renderer_calculator.cc +++ b/mediapipe/modules/face_geometry/effect_renderer_calculator.cc @@ -20,6 +20,7 @@ #include "absl/status/status.h" #include "absl/status/statusor.h" +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/image_frame.h" @@ -256,9 +257,10 @@ class EffectRendererCalculator : public CalculatorBase { MP_ASSIGN_OR_RETURN(std::unique_ptr mesh_3d_blob, ReadContentBlobFromFile(cc, mesh_3d_path), _ << "Failed to read mesh 3D blob from file!"); + absl::string_view mesh_str = mesh_3d_blob->ToStringView(); face_geometry::Mesh3d mesh_3d; - RET_CHECK(mesh_3d.ParseFromString(mesh_3d_blob->ToStringView())) + RET_CHECK(mesh_3d.ParseFromArray(mesh_str.data(), mesh_str.size())) << "Failed to parse a mesh 3D proto from a binary blob!"; return mesh_3d; diff --git a/mediapipe/modules/face_geometry/geometry_pipeline_calculator.cc b/mediapipe/modules/face_geometry/geometry_pipeline_calculator.cc index 35efb005a7..cad22dd21f 100644 --- a/mediapipe/modules/face_geometry/geometry_pipeline_calculator.cc +++ b/mediapipe/modules/face_geometry/geometry_pipeline_calculator.cc @@ -169,7 +169,8 @@ class GeometryPipelineCalculator : public CalculatorBase { _ << "Failed to read a metadata blob from file!"); face_geometry::GeometryPipelineMetadata metadata; - RET_CHECK(metadata.ParseFromString(metadata_blob->ToStringView())) + absl::string_view metadata_str = metadata_blob->ToStringView(); + RET_CHECK(metadata.ParseFromArray(metadata_str.data(), metadata_str.size())) << "Failed to parse a metadata proto from a binary blob!"; return metadata; diff --git a/mediapipe/modules/face_landmark/README.md b/mediapipe/modules/face_landmark/README.md index eed21a2d7c..205c90a28a 100644 --- a/mediapipe/modules/face_landmark/README.md +++ b/mediapipe/modules/face_landmark/README.md @@ -2,8 +2,8 @@ Subgraphs|Details :--- | :--- -[`FaceLandmarkCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_cpu.pbtxt)| Detects landmarks on a single face. (CPU input, and inference is executed on CPU.) -[`FaceLandmarkGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_gpu.pbtxt)| Detects landmarks on a single face. (GPU input, and inference is executed on GPU) -[`FaceLandmarkFrontCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_front_cpu.pbtxt)| Detects and tracks landmarks on multiple faces. (CPU input, and inference is executed on CPU) -[`FaceLandmarkFrontGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_front_gpu.pbtxt)| Detects and tracks landmarks on multiple faces. (GPU input, and inference is executed on GPU.) +[`FaceLandmarkCpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_cpu.pbtxt)| Detects landmarks on a single face. (CPU input, and inference is executed on CPU.) +[`FaceLandmarkGpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_gpu.pbtxt)| Detects landmarks on a single face. (GPU input, and inference is executed on GPU) +[`FaceLandmarkFrontCpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_front_cpu.pbtxt)| Detects and tracks landmarks on multiple faces. (CPU input, and inference is executed on CPU) +[`FaceLandmarkFrontGpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_front_gpu.pbtxt)| Detects and tracks landmarks on multiple faces. (GPU input, and inference is executed on GPU.) diff --git a/mediapipe/modules/hand_landmark/README.md b/mediapipe/modules/hand_landmark/README.md index 31fe6f720f..62867097fa 100644 --- a/mediapipe/modules/hand_landmark/README.md +++ b/mediapipe/modules/hand_landmark/README.md @@ -2,7 +2,7 @@ Subgraphs|Details :--- | :--- -[`HandLandmarkCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_cpu.pbtxt)| Detects landmarks of a single hand. (CPU input.) -[`HandLandmarkGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_gpu.pbtxt)| Detects landmarks of a single hand. (GPU input.) -[`HandLandmarkTrackingCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_tracking_cpu.pbtxt)| Detects and tracks landmarks of multiple hands. (CPU input.) -[`HandLandmarkTrackingGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_tracking_gpu.pbtxt)| Detects and tracks landmarks of multiple hands. (GPU input.) +[`HandLandmarkCpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_cpu.pbtxt)| Detects landmarks of a single hand. (CPU input.) +[`HandLandmarkGpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_gpu.pbtxt)| Detects landmarks of a single hand. (GPU input.) +[`HandLandmarkTrackingCpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_tracking_cpu.pbtxt)| Detects and tracks landmarks of multiple hands. (CPU input.) +[`HandLandmarkTrackingGpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_tracking_gpu.pbtxt)| Detects and tracks landmarks of multiple hands. (GPU input.) diff --git a/mediapipe/modules/holistic_landmark/README.md b/mediapipe/modules/holistic_landmark/README.md index d285f155a4..eebe5a5001 100644 --- a/mediapipe/modules/holistic_landmark/README.md +++ b/mediapipe/modules/holistic_landmark/README.md @@ -2,5 +2,5 @@ Subgraphs|Details :--- | :--- -[`HolisticLandmarkCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/holistic_landmark/holistic_landmark_cpu.pbtxt)| Predicts pose + left/right hand + face landmarks. (CPU input) -[`HolisticLandmarkGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/holistic_landmark/holistic_landmark_gpu.pbtxt)| Predicts pose + left/right hand + face landmarks. (GPU input.) +[`HolisticLandmarkCpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/holistic_landmark/holistic_landmark_cpu.pbtxt)| Predicts pose + left/right hand + face landmarks. (CPU input) +[`HolisticLandmarkGpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/holistic_landmark/holistic_landmark_gpu.pbtxt)| Predicts pose + left/right hand + face landmarks. (GPU input.) diff --git a/mediapipe/modules/iris_landmark/README.md b/mediapipe/modules/iris_landmark/README.md index f99fceef14..97e0af919d 100644 --- a/mediapipe/modules/iris_landmark/README.md +++ b/mediapipe/modules/iris_landmark/README.md @@ -2,7 +2,7 @@ Subgraphs|Details :--- | :--- -[`IrisLandmarkCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/iris_landmark/iris_landmark_cpu.pbtxt)| Detects iris landmarks for left or right eye. (CPU input, and inference is executed on CPU.) -[`IrisLandmarkGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/iris_landmark/iris_landmark_gpu.pbtxt)| Detects iris landmarks for left or right eye. (GPU input, and inference is executed on GPU) -[`IrisLandmarkLeftAndRightCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/iris_landmark/iris_landmark_left_and_right_cpu.pbtxt)| Detects iris landmarks for both left and right eyes. (CPU input, and inference is executed on CPU) -[`IrisLandmarkLeftAndRightGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/iris_landmark/iris_landmark_left_and_right_gpu.pbtxt)| Detects iris landmarks for both left and right eyes. (GPU input, and inference is executed on GPU.) +[`IrisLandmarkCpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/iris_landmark/iris_landmark_cpu.pbtxt)| Detects iris landmarks for left or right eye. (CPU input, and inference is executed on CPU.) +[`IrisLandmarkGpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/iris_landmark/iris_landmark_gpu.pbtxt)| Detects iris landmarks for left or right eye. (GPU input, and inference is executed on GPU) +[`IrisLandmarkLeftAndRightCpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/iris_landmark/iris_landmark_left_and_right_cpu.pbtxt)| Detects iris landmarks for both left and right eyes. (CPU input, and inference is executed on CPU) +[`IrisLandmarkLeftAndRightGpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/iris_landmark/iris_landmark_left_and_right_gpu.pbtxt)| Detects iris landmarks for both left and right eyes. (GPU input, and inference is executed on GPU.) diff --git a/mediapipe/modules/objectron/README.md b/mediapipe/modules/objectron/README.md index 00883fe3fd..51b9375d21 100644 --- a/mediapipe/modules/objectron/README.md +++ b/mediapipe/modules/objectron/README.md @@ -2,5 +2,5 @@ Subgraphs|Details :--- | :--- -[`ObjectronCpuSubgraph`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/objectron_cpu.pbtxt)| Detects and tracks 3D bounding boxes for objects. (CPU input, and inference is executed on CPU.) -[`ObjectronGpuSubgraph`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/objectron_gpu.pbtxt)| Detects and tracks 3D bounding boxes for objects. (GPU input, and inference is executed on GPU.) +[`ObjectronCpuSubgraph`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/objectron/objectron_cpu.pbtxt)| Detects and tracks 3D bounding boxes for objects. (CPU input, and inference is executed on CPU.) +[`ObjectronGpuSubgraph`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/objectron/objectron_gpu.pbtxt)| Detects and tracks 3D bounding boxes for objects. (GPU input, and inference is executed on GPU.) diff --git a/mediapipe/modules/objectron/calculators/epnp.cc b/mediapipe/modules/objectron/calculators/epnp.cc index 03b78c7284..eed3883c23 100644 --- a/mediapipe/modules/objectron/calculators/epnp.cc +++ b/mediapipe/modules/objectron/calculators/epnp.cc @@ -99,7 +99,7 @@ absl::Status SolveEpnp(const float focal_x, const float focal_y, // Convert 2d point from `pixel coordinates` to `NDC coordinates`([-1, 1]) // following to the definitions in: - // https://google.github.io/mediapipe/solutions/objectron#ndc-space + // https://google-ai-edge.github.io/mediapipe/solutions/objectron#ndc-space // If portrait mode is been used, it's the caller's responsibility to // convert the input 2d points' coordinates. float x_ndc, y_ndc; diff --git a/mediapipe/modules/palm_detection/README.md b/mediapipe/modules/palm_detection/README.md index c7fd610604..4e1315d67e 100644 --- a/mediapipe/modules/palm_detection/README.md +++ b/mediapipe/modules/palm_detection/README.md @@ -2,6 +2,6 @@ Subgraphs|Details :--- | :--- -[`PalmDetectionCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/palm_detection/palm_detection_cpu.pbtxt)| Detects palms/hands. (CPU input.) -[`PalmDetectionGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/palm_detection/palm_detection_gpu.pbtxt)| Detects palms/hands. (GPU input.) +[`PalmDetectionCpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/palm_detection/palm_detection_cpu.pbtxt)| Detects palms/hands. (CPU input.) +[`PalmDetectionGpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/palm_detection/palm_detection_gpu.pbtxt)| Detects palms/hands. (GPU input.) diff --git a/mediapipe/modules/pose_detection/README.md b/mediapipe/modules/pose_detection/README.md index e2e3b2f24b..5024c22e81 100644 --- a/mediapipe/modules/pose_detection/README.md +++ b/mediapipe/modules/pose_detection/README.md @@ -2,6 +2,6 @@ Subgraphs|Details :--- | :--- -[`PoseDetectionCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_detection/pose_detection_cpu.pbtxt)| Detects poses. (CPU input, and inference is executed on CPU.) -[`PoseDetectionGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_detection/pose_detection_gpu.pbtxt)| Detects poses. (GPU input, and inference is executed on GPU.) +[`PoseDetectionCpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/pose_detection/pose_detection_cpu.pbtxt)| Detects poses. (CPU input, and inference is executed on CPU.) +[`PoseDetectionGpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/pose_detection/pose_detection_gpu.pbtxt)| Detects poses. (GPU input, and inference is executed on GPU.) diff --git a/mediapipe/modules/pose_landmark/README.md b/mediapipe/modules/pose_landmark/README.md index 57528382ac..53804563c2 100644 --- a/mediapipe/modules/pose_landmark/README.md +++ b/mediapipe/modules/pose_landmark/README.md @@ -2,7 +2,7 @@ Subgraphs|Details :--- | :--- -[`PoseLandmarkByRoiCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_by_roi_cpu.pbtxt)| Detects landmarks of a single body pose. See landmarks (aka keypoints) [scheme](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_topology.svg). (CPU input, and inference is executed on CPU.) -[`PoseLandmarkByRoiGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_by_roi_gpu.pbtxt)| Detects landmarks of a single body pose. See landmarks (aka keypoints) [scheme](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_topology.svg). (GPU input, and inference is executed on GPU) -[`PoseLandmarkCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_cpu.pbtxt)| Detects landmarks of a single body pose. See landmarks (aka keypoints) [scheme](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_topology.svg). (CPU input, and inference is executed on CPU) -[`PoseLandmarkGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_gpu.pbtxt)| Detects landmarks of a single body pose. See landmarks (aka keypoints) [scheme](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_topology.svg). (GPU input, and inference is executed on GPU.) +[`PoseLandmarkByRoiCpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_by_roi_cpu.pbtxt)| Detects landmarks of a single body pose. See landmarks (aka keypoints) [scheme](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_topology.svg). (CPU input, and inference is executed on CPU.) +[`PoseLandmarkByRoiGpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_by_roi_gpu.pbtxt)| Detects landmarks of a single body pose. See landmarks (aka keypoints) [scheme](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_topology.svg). (GPU input, and inference is executed on GPU) +[`PoseLandmarkCpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_cpu.pbtxt)| Detects landmarks of a single body pose. See landmarks (aka keypoints) [scheme](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_topology.svg). (CPU input, and inference is executed on CPU) +[`PoseLandmarkGpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_gpu.pbtxt)| Detects landmarks of a single body pose. See landmarks (aka keypoints) [scheme](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_topology.svg). (GPU input, and inference is executed on GPU.) diff --git a/mediapipe/modules/selfie_segmentation/README.md b/mediapipe/modules/selfie_segmentation/README.md index cd6c5e044f..0257b01be5 100644 --- a/mediapipe/modules/selfie_segmentation/README.md +++ b/mediapipe/modules/selfie_segmentation/README.md @@ -2,5 +2,5 @@ Subgraphs|Details :--- | :--- -[`SelfieSegmentationCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/selfie_segmentation/selfie_segmentation_cpu.pbtxt)| Segments the person from background in a selfie image. (CPU input, and inference is executed on CPU.) -[`SelfieSegmentationGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/selfie_segmentation/selfie_segmentation_gpu.pbtxt)| Segments the person from background in a selfie image. (GPU input, and inference is executed on GPU.) +[`SelfieSegmentationCpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/selfie_segmentation/selfie_segmentation_cpu.pbtxt)| Segments the person from background in a selfie image. (CPU input, and inference is executed on CPU.) +[`SelfieSegmentationGpu`](https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/modules/selfie_segmentation/selfie_segmentation_gpu.pbtxt)| Segments the person from background in a selfie image. (GPU input, and inference is executed on GPU.) diff --git a/mediapipe/python/packet_creator.py b/mediapipe/python/packet_creator.py index 4fff44be4d..1ad6ac42e3 100644 --- a/mediapipe/python/packet_creator.py +++ b/mediapipe/python/packet_creator.py @@ -51,7 +51,7 @@ create_matrix = _packet_creator.create_matrix -def create_image_frame(data: Union[image_frame.ImageFrame, np.ndarray], +def create_image_frame(data: Union[image_frame.ImageFrame, np.ndarray], # pytype: disable=annotation-type-mismatch *, image_format: image_frame.ImageFormat = None, copy: bool = None) -> packet.Packet: @@ -149,7 +149,7 @@ def create_image_frame(data: Union[image_frame.ImageFrame, np.ndarray], # pylint:enable=protected-access -def create_image(data: Union[image.Image, np.ndarray], +def create_image(data: Union[image.Image, np.ndarray], # pytype: disable=annotation-type-mismatch *, image_format: image_frame.ImageFormat = None, copy: bool = None) -> packet.Packet: diff --git a/mediapipe/tasks/c/text/text_embedder/text_embedder_test.cc b/mediapipe/tasks/c/text/text_embedder/text_embedder_test.cc index 951237896f..9701aa6c9d 100644 --- a/mediapipe/tasks/c/text/text_embedder/text_embedder_test.cc +++ b/mediapipe/tasks/c/text/text_embedder/text_embedder_test.cc @@ -91,7 +91,7 @@ TEST(TextEmbedderTest, SucceedsWithCosineSimilarity) { double similarity; text_embedder_cosine_similarity(&result0.embeddings[0], &result1.embeddings[0], &similarity, nullptr); - double expected_similarity = 0.98513 + 0.00512937; + double expected_similarity = 0.98103; EXPECT_LE(abs(similarity - expected_similarity), kPrecision); text_embedder_close_result(&result0); diff --git a/mediapipe/tasks/cc/genai/inference/c/BUILD b/mediapipe/tasks/cc/genai/inference/c/BUILD index 92f9f15216..3db359f473 100644 --- a/mediapipe/tasks/cc/genai/inference/c/BUILD +++ b/mediapipe/tasks/cc/genai/inference/c/BUILD @@ -28,12 +28,13 @@ cc_library( hdrs = ["llm_inference_engine.h"], tags = ["swift_module=MediaPipeTasksGenAIC"], deps = [ + "//mediapipe/framework/deps:file_path", "//mediapipe/framework/port:file_helpers", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "//mediapipe/tasks/cc/core:model_asset_bundle_resources", "//mediapipe/tasks/cc/genai/inference/proto:llm_params_cc_proto", "//mediapipe/tasks/cc/genai/inference/proto:transformer_params_cc_proto", - "//mediapipe/tasks/cc/genai/inference/utils/llm_utils:memory_mapped_file", "//mediapipe/tasks/cc/genai/inference/utils/llm_utils:metadata_utils", "//mediapipe/tasks/cc/genai/inference/utils/llm_utils:model_data", "//mediapipe/tasks/cc/genai/inference/utils/llm_utils:scoped_file", @@ -41,6 +42,8 @@ cc_library( "//mediapipe/tasks/cc/genai/inference/utils/xnn_utils:llm", "//mediapipe/tasks/cc/genai/inference/utils/xnn_utils:llm_builder_factory", "//mediapipe/tasks/cc/genai/inference/utils/xnn_utils:llm_weights", + "@com_google_absl//absl/container:flat_hash_map", + "@com_google_absl//absl/container:flat_hash_set", "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", @@ -49,6 +52,10 @@ cc_library( "@com_google_absl//absl/strings:string_view", "@com_google_sentencepiece//:sentencepiece_processor", "@org_tensorflow//tensorflow/lite:framework_stable", + "@org_tensorflow//tensorflow/lite/c:common", + "@org_tensorflow//tensorflow/lite/delegates/xnnpack:xnnpack_delegate_hdrs_only", + "@org_tensorflow//tensorflow/lite/experimental/genai:genai_ops", + "@org_tensorflow//tensorflow/lite/kernels:builtin_ops", ], ) diff --git a/mediapipe/tasks/cc/genai/inference/c/llm_inference_engine.h b/mediapipe/tasks/cc/genai/inference/c/llm_inference_engine.h index b4d33cb618..91f9a08c70 100644 --- a/mediapipe/tasks/cc/genai/inference/c/llm_inference_engine.h +++ b/mediapipe/tasks/cc/genai/inference/c/llm_inference_engine.h @@ -58,6 +58,12 @@ typedef struct { // Path to the model artifact. const char* model_path; + // Path to the vision encoder to use for vision modality. Optional. + const char* vision_encoder_path; + + // Path to the vision adapter to use for vision modality. Optional. + const char* vision_adapter_path; + // Directory path for storing model related tokenizer and cache weights. the // user is responsible for providing the directory that can be writable by the // program. @@ -121,6 +127,13 @@ typedef struct { // Path to the LoRA tflite flatbuffer file. Optional. // This is only compatible with GPU models. const char* lora_path; + + // Whether to configure the graph to include the token cost calculator, + // which allows users to only compute the cost of a prompt. + bool include_token_cost_calculator; + + // Whether to configure the graph to include the vision modality. + bool enable_vision_modality; } LlmSessionConfig; // LlmResponseContext is the return type for @@ -166,6 +179,11 @@ ODML_EXPORT void LlmInferenceEngine_Session_Delete( ODML_EXPORT int LlmInferenceEngine_Session_AddQueryChunk( LlmInferenceEngine_Session* session, const char* input, char** error_msg); +// Adds an SKBitmap to the session. +ODML_EXPORT int LlmInferenceEngine_Session_AddImage( + LlmInferenceEngine_Session* session, const void* sk_bitmap, + char** error_msg); + // Return the generated output based on the previously added query chunks in // sync mode. ODML_EXPORT LlmResponseContext diff --git a/mediapipe/tasks/cc/genai/inference/c/llm_inference_engine_cpu.cc b/mediapipe/tasks/cc/genai/inference/c/llm_inference_engine_cpu.cc index aaa7cba2b4..7c1e5bd233 100644 --- a/mediapipe/tasks/cc/genai/inference/c/llm_inference_engine_cpu.cc +++ b/mediapipe/tasks/cc/genai/inference/c/llm_inference_engine_cpu.cc @@ -14,27 +14,37 @@ #include +#include +#include #include #include +#include #include +#include #include #include #include +#include #include +#include "absl/container/flat_hash_map.h" +#include "absl/container/flat_hash_set.h" #include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/status/statusor.h" +#include "absl/strings/match.h" #include "absl/strings/str_cat.h" +#include "absl/strings/str_replace.h" #include "absl/strings/string_view.h" +#include "mediapipe/framework/deps/file_path.h" #include "mediapipe/framework/port/file_helpers.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/core/model_asset_bundle_resources.h" #include "mediapipe/tasks/cc/genai/inference/c/llm_inference_engine.h" #include "mediapipe/tasks/cc/genai/inference/proto/llm_params.pb.h" #include "mediapipe/tasks/cc/genai/inference/proto/transformer_params.pb.h" -#include "mediapipe/tasks/cc/genai/inference/utils/llm_utils/memory_mapped_file.h" #include "mediapipe/tasks/cc/genai/inference/utils/llm_utils/metadata_utils.h" #include "mediapipe/tasks/cc/genai/inference/utils/llm_utils/model_data.h" #include "mediapipe/tasks/cc/genai/inference/utils/llm_utils/scoped_file.h" @@ -42,55 +52,162 @@ #include "mediapipe/tasks/cc/genai/inference/utils/xnn_utils/llm.h" #include "mediapipe/tasks/cc/genai/inference/utils/xnn_utils/llm_builder_factory.h" #include "mediapipe/tasks/cc/genai/inference/utils/xnn_utils/llm_weights.h" -#include "sentencepiece/src/normalizer.h" // from @com_google_sentencepiece #include "sentencepiece/src/sentencepiece_processor.h" // from @com_google_sentencepiece +#include "sentencepiece/src/util.h" // from @com_google_sentencepiece +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/delegates/xnnpack/xnnpack_delegate.h" +#include "tensorflow/lite/experimental/genai/genai_ops.h" +#include "tensorflow/lite/interpreter.h" +#include "tensorflow/lite/interpreter_builder.h" +#include "tensorflow/lite/kernels/register.h" #include "tensorflow/lite/model_builder.h" namespace { constexpr int kCheckLastKChars = 10; +struct TfLiteLlm { + std::unique_ptr interpreter; + std::unique_ptr resources; +}; + struct LlmInferenceEngineCpu_Engine { - sentencepiece::SentencePieceProcessor* tokenizer; - sentencepiece::normalizer::Normalizer* normalizer; - mediapipe::tasks::genai::xnn_utils::Llm* llm; - int start_token_id; - std::vector stop_tokens; - size_t max_num_tokens; + const sentencepiece::SentencePieceProcessor* tokenizer; + const absl::flat_hash_map* bytes_to_unicode_mapper; + const absl::flat_hash_map* unicode_to_bytes_mapper; + const std::variant llm; + const int start_token_id; + const std::vector stop_tokens; + const size_t max_num_tokens; + ~LlmInferenceEngineCpu_Engine() { delete tokenizer; - if (normalizer != nullptr) { - delete normalizer; + delete bytes_to_unicode_mapper; + delete unicode_to_bytes_mapper; + if (std::holds_alternative(llm)) { + delete std::get(llm); + } else { + delete std::get(llm); } - delete llm; }; }; struct LlmInferenceEngineCpu_Session { const LlmInferenceEngineCpu_Engine* engine; std::string prompt; - int max_num_output_tokens; - int response_count; + int timestep; std::string last_10_char; std::string final_output; std::function cpu_callback; bool early_stop; pthread_t work_id; + int next_token_id; ~LlmInferenceEngineCpu_Session() { pthread_join(work_id, nullptr); }; }; +absl::StatusOr>> +CreateBytesToUnicodeMapper() { + auto bytes_to_unicode_mapper = + std::make_unique>(); + // "!" - "~" + for (int i = 33; i <= 126; i++) { + bytes_to_unicode_mapper->insert({static_cast(i), i}); + } + // "¡" - "¬" + for (int i = 161; i <= 172; i++) { + bytes_to_unicode_mapper->insert({static_cast(i), i}); + } + // "®" - "ÿ" + for (int i = 174; i < 256; i++) { + bytes_to_unicode_mapper->insert({static_cast(i), i}); + } + int n = 0; + for (int b = 0; b < 256; b++) { + if (!bytes_to_unicode_mapper->contains(static_cast(b))) { + bytes_to_unicode_mapper->insert({static_cast(b), 256 + n}); + n += 1; + } + } + return bytes_to_unicode_mapper; +} + +absl::StatusOr>> +CreateUnicodeToBytesMapper() { + MP_ASSIGN_OR_RETURN(auto bytes_to_unicode_mapper, + CreateBytesToUnicodeMapper()); + auto unicode_to_bytes_mapper = + std::make_unique>(); + for (const auto& [key, value] : *bytes_to_unicode_mapper) { + unicode_to_bytes_mapper->insert({value, key}); + } + return unicode_to_bytes_mapper; +} + +std::string MapBytesToUnicode( + absl::string_view prompt, + const absl::flat_hash_map* bytes_to_unicode_mapper) { + std::string converted_prompt = ""; + for (const uint8_t byte : prompt) { + converted_prompt.append(sentencepiece::string_util::UnicodeCharToUTF8( + bytes_to_unicode_mapper->at(byte))); + } + return converted_prompt; +} + +std::string MapUnicodeToBytes( + absl::string_view output, + const absl::flat_hash_map* unicode_to_bytes_mapper) { + sentencepiece::string_util::UnicodeText unicode_text = + sentencepiece::string_util::UTF8ToUnicodeText(output); + std::string converted_output = ""; + for (const int code_point : unicode_text) { + if (!unicode_to_bytes_mapper->contains(code_point)) { + converted_output += code_point; + } else { + converted_output += unicode_to_bytes_mapper->at(code_point); + } + } + return converted_output; +} + void* next_token_function(void* args) { struct LlmInferenceEngineCpu_Session* cpu_session = (struct LlmInferenceEngineCpu_Session*)args; - if (cpu_session->response_count++ < cpu_session->max_num_output_tokens) { + if (cpu_session->timestep < cpu_session->engine->max_num_tokens) { if (cpu_session->early_stop) { return nullptr; } auto token_ids_per_step = std::vector(); - auto status = cpu_session->engine->llm->GetNextToken(&token_ids_per_step); - if (!status.ok()) { - ABSL_LOG(FATAL) << "Failed to generate output: " << status; + if (std::holds_alternative( + cpu_session->engine->llm)) { + auto status = std::get( + cpu_session->engine->llm) + ->GetNextToken(&token_ids_per_step); + if (!status.ok()) { + ABSL_LOG(FATAL) << "Failed to generate output: " << status; + } + } else { + auto llm = std::get(cpu_session->engine->llm); + auto* decode_runner = llm->interpreter->GetSignatureRunner("decode"); + ABSL_CHECK_EQ(decode_runner->AllocateTensors(), kTfLiteOk); + TfLiteTensor* decode_input = decode_runner->input_tensor("args_0"); + TfLiteTensor* decode_input_pos = decode_runner->input_tensor("args_1"); + decode_input->data.i64[0] = + static_cast(cpu_session->next_token_id); + decode_input_pos->data.i64[0] = + static_cast(cpu_session->timestep); + + // logits->dims->data[0] = batch size + // logits->dims->data[1] = sequence length + // logits->dims->data[2] = vocab size + const TfLiteTensor* logits = decode_runner->output_tensor("output_0"); + + ABSL_CHECK_EQ(decode_runner->Invoke(), kTfLiteOk); + + auto max_logit_it = std::max_element( + logits->data.f, logits->data.f + logits->dims->data[2]); + token_ids_per_step.push_back(std::distance(logits->data.f, max_logit_it)); } // For future multithreading support. @@ -98,14 +215,19 @@ void* next_token_function(void* args) { return nullptr; } - if (cpu_session->response_count == cpu_session->max_num_output_tokens) { + if (cpu_session->timestep >= cpu_session->engine->max_num_tokens) { cpu_session->early_stop = true; } + cpu_session->next_token_id = token_ids_per_step[0]; + std::string token = cpu_session->engine->tokenizer->IdToPiece(token_ids_per_step[0]); - if (cpu_session->engine->normalizer != nullptr) { - token = cpu_session->engine->normalizer->Normalize(token); + if (cpu_session->engine->unicode_to_bytes_mapper != nullptr) { + token = MapUnicodeToBytes(token, + cpu_session->engine->unicode_to_bytes_mapper); + } else { + token = absl::StrReplaceAll(token, {{"▁", " "}}); } cpu_session->last_10_char.append(token); @@ -133,6 +255,8 @@ void* next_token_function(void* args) { cpu_session->cpu_callback(ready_char); + ++cpu_session->timestep; + next_token_function(args); } return nullptr; @@ -144,27 +268,55 @@ void* start_llm_function(void* args) { std::vector prompt_ids = {}; - auto status = - cpu_session->engine->tokenizer->Encode(cpu_session->prompt, &prompt_ids); + std::string prompt; + if (cpu_session->engine->bytes_to_unicode_mapper != nullptr) { + prompt = MapBytesToUnicode(cpu_session->prompt, + cpu_session->engine->bytes_to_unicode_mapper); + } else { + prompt = cpu_session->prompt; + } + + auto status = cpu_session->engine->tokenizer->Encode(prompt, &prompt_ids); if (!status.ok()) { ABSL_LOG(FATAL) << "Failed to encode input: " << status; } prompt_ids.insert(prompt_ids.begin(), cpu_session->engine->start_token_id); - ABSL_CHECK_OK(cpu_session->engine->llm->SeekTimeStep(0)); - ABSL_CHECK_OK(cpu_session->engine->llm->AddInputTokens({prompt_ids})); + if (std::holds_alternative( + cpu_session->engine->llm)) { + auto llm = std::get( + cpu_session->engine->llm); + ABSL_CHECK_OK(llm->SeekTimeStep(0)); + ABSL_CHECK_OK(llm->AddInputTokens({prompt_ids})); + } else { + auto llm = std::get(cpu_session->engine->llm); + auto* prefill_runner = llm->interpreter->GetSignatureRunner("prefill"); + + ABSL_CHECK_EQ(prefill_runner->AllocateTensors(), kTfLiteOk); + + TfLiteTensor* prefill_input = prefill_runner->input_tensor("args_0"); + TfLiteTensor* prefill_input_pos = prefill_runner->input_tensor("args_1"); + memset(prefill_input->data.data, 0, prefill_input->bytes); + memset(prefill_input_pos->data.data, 0, prefill_input_pos->bytes); + cpu_session->next_token_id = prompt_ids.back(); + prompt_ids.pop_back(); + for (int i = 0; i < prompt_ids.size(); ++i) { + prefill_input->data.i64[i] = static_cast(prompt_ids[i]); + prefill_input_pos->data.i64[i] = static_cast(i); + } + ABSL_CHECK_EQ(prefill_runner->Invoke(), kTfLiteOk); + } - cpu_session->max_num_output_tokens = - cpu_session->engine->max_num_tokens - prompt_ids.size(); + cpu_session->timestep = prompt_ids.size(); next_token_function(args); return nullptr; } -absl::StatusOr -LlmInferenceEngine_CreateEngine_Helper(const LlmModelSettings* model_settings) { +absl::StatusOr> +CreateXnnLlmCpuEngine(const LlmModelSettings* model_settings) { MP_ASSIGN_OR_RETURN(auto model_file, mediapipe::tasks::genai::llm_utils::ScopedFile::Open( model_settings->model_path)); @@ -199,7 +351,6 @@ LlmInferenceEngine_CreateEngine_Helper(const LlmModelSettings* model_settings) { MP_ASSIGN_OR_RETURN(auto spm_model_content, model_data->ReadMetadata("spm_vocab_model")); - model_data.reset(); llm_params.seq_size_T = model_settings->max_num_tokens; @@ -220,24 +371,24 @@ LlmInferenceEngine_CreateEngine_Helper(const LlmModelSettings* model_settings) { auto tokenizer = std::make_unique(); MP_RETURN_IF_ERROR(tokenizer->LoadFromSerializedProto(spm_model_content)); - std::vector prompt_ids; - auto status = tokenizer->Encode("hello", &prompt_ids); - - std::unique_ptr normalizer; - if (tokenizer->model_proto().has_denormalizer_spec() && - tokenizer->model_proto().denormalizer_spec().has_precompiled_charsmap() && - !tokenizer->model_proto() - .denormalizer_spec() - .precompiled_charsmap() - .empty()) { - normalizer = std::make_unique( - tokenizer->model_proto().denormalizer_spec()); + std::unique_ptr> + bytes_to_unicode_mapper; + std::unique_ptr> + unicode_to_bytes_mapper; + // These models uses GPT2 style unicode mapping, which additional mapping is + // needed. + if (model_type == odml::infra::proto::LLM_MODEL_TYPE_STABLELM_4E1T_3B || + model_type == odml::infra::proto::LLM_MODEL_TYPE_FALCON_RW_1B || + model_type == odml::infra::proto::LLM_MODEL_TYPE_PHI_2) { + MP_ASSIGN_OR_RETURN(bytes_to_unicode_mapper, CreateBytesToUnicodeMapper()); + MP_ASSIGN_OR_RETURN(unicode_to_bytes_mapper, CreateUnicodeToBytesMapper()); } std::unique_ptr engine( new LlmInferenceEngineCpu_Engine{ .tokenizer = tokenizer.release(), - .normalizer = normalizer.release(), + .bytes_to_unicode_mapper = bytes_to_unicode_mapper.release(), + .unicode_to_bytes_mapper = unicode_to_bytes_mapper.release(), .llm = llm.release(), .start_token_id = llm_params_proto.start_token_id(), .stop_tokens = @@ -246,6 +397,118 @@ LlmInferenceEngine_CreateEngine_Helper(const LlmModelSettings* model_settings) { .max_num_tokens = model_settings->max_num_tokens, }); + return engine; +} + +// Creates an inference engine from a *.task file. +// This method extracts the TF_LITE_PREFILL_DECODE, TOKENIZER_MODEL and METADATA +// files from the task bundle and initializes the TfLIte XNNPack delegate. +absl::StatusOr> +CreateTfliteLlmCpuEngine(const LlmModelSettings* model_settings) { + auto external_file = + std::make_unique(); + if (model_settings) { + external_file->set_file_name(model_settings->model_path); + } + MP_ASSIGN_OR_RETURN(auto resources, + mediapipe::tasks::core::ModelAssetBundleResources::Create( + "", std::move(external_file))); + const std::vector& files_list = resources->ListFiles(); + const absl::flat_hash_set files_set(files_list.begin(), + files_list.end()); + + std::unique_ptr interpreter; + if (!files_set.contains("TF_LITE_PREFILL_DECODE")) { + return absl::InvalidArgumentError("TF_LITE_PREFILL_DECODE not found."); + } + if (!files_set.contains("TOKENIZER_MODEL")) { + return absl::InvalidArgumentError("TOKENIZER_MODEL not found."); + } + if (!files_set.contains("METADATA")) { + return absl::InvalidArgumentError("METADATA not found."); + } + MP_ASSIGN_OR_RETURN(absl::string_view model_buffer, + resources->GetFile("TF_LITE_PREFILL_DECODE")); + MP_ASSIGN_OR_RETURN(absl::string_view tokenizer_buffer, + resources->GetFile("TOKENIZER_MODEL")); + MP_ASSIGN_OR_RETURN(absl::string_view params_buffer, + resources->GetFile("METADATA")); + auto model = tflite::FlatBufferModel::BuildFromBuffer(model_buffer.data(), + model_buffer.size()); + RET_CHECK(model) << "Failed to build TF_LITE_PREFILL_DECODE model."; + tflite::ops::builtin::BuiltinOpResolver resolver; + // NOTE: We need to manually register optimized OPs for KV-cache and + // Scaled Dot Product Attention (SDPA). + tflite::ops::custom::GenAIOpsRegisterer(&resolver); + tflite::InterpreterBuilder builder(*model, resolver); + RET_CHECK(model_settings); + builder(&interpreter); + RET_CHECK_NE(interpreter, nullptr); + + // RET_CHECK(model_settings->xnnpack_options.has_value()); + auto delegate_options = TfLiteXNNPackDelegateOptionsDefault(); + // Set the number of threads to 4 as default. + delegate_options.num_threads = 4; + // Compute the path for the cache file. + std::string weight_cache_path = model_settings->cache_dir; + if (weight_cache_path != ":nocache") { + if (weight_cache_path.empty()) { + weight_cache_path = + absl::StrCat(model_settings->model_path, ".xnnpack_cache"); + } else { + weight_cache_path = mediapipe::file::JoinPath( + weight_cache_path, + absl::StrCat(mediapipe::file::Basename(model_settings->model_path), + ".xnnpack_cache")); + } + delegate_options.weight_cache_file_path = weight_cache_path.c_str(); + } + RET_CHECK_EQ(interpreter->ModifyGraphWithDelegate( + tflite::Interpreter::TfLiteDelegatePtr( + TfLiteXNNPackDelegateCreate(&delegate_options), + [](TfLiteDelegate* delegate) { + TfLiteXNNPackDelegateDelete(delegate); + })), + kTfLiteOk); + RET_CHECK_EQ(interpreter->SetNumThreads(4), kTfLiteOk); + + auto tflite_llm = std::make_unique( + TfLiteLlm{std::move(interpreter), std::move(resources)}); + + auto tokenizer = std::make_unique(); + MP_RETURN_IF_ERROR(tokenizer->LoadFromSerializedProto(tokenizer_buffer)); + + auto llm_parameters = odml::infra::proto::LlmParameters(); + RET_CHECK(llm_parameters.ParseFromArray(params_buffer.data(), + params_buffer.size())); + + auto start_token_id = tokenizer->PieceToId(llm_parameters.start_token()); + + std::unique_ptr engine( + new LlmInferenceEngineCpu_Engine{ + .tokenizer = tokenizer.release(), + .bytes_to_unicode_mapper = nullptr, + .unicode_to_bytes_mapper = nullptr, + .llm = tflite_llm.release(), + .start_token_id = start_token_id, + .stop_tokens = + std::vector(llm_parameters.stop_tokens().begin(), + llm_parameters.stop_tokens().end()), + .max_num_tokens = model_settings->max_num_tokens, + }); + + return engine; +} + +absl::StatusOr +LlmInferenceEngine_CreateEngine_Helper(const LlmModelSettings* model_settings) { + std::unique_ptr engine; + if (absl::EndsWith(model_settings->model_path, ".tflite")) { + MP_ASSIGN_OR_RETURN(engine, CreateXnnLlmCpuEngine(model_settings)); + } else { + MP_ASSIGN_OR_RETURN(engine, CreateTfliteLlmCpuEngine(model_settings)); + } + return engine.release(); } @@ -321,6 +584,13 @@ int LlmInferenceEngine_Session_AddQueryChunk( return 0; } +ODML_EXPORT int LlmInferenceEngine_Session_AddImage( + LlmInferenceEngine_Session* session, const void* sk_bitmap, + char** error_msg) { + *error_msg = strdup("Not implemented"); + return 12; +} + LlmResponseContext LlmInferenceEngine_Session_PredictSync( LlmInferenceEngine_Session* session) { LlmInferenceEngine_Session_PredictAsync( diff --git a/mediapipe/tasks/cc/genai/inference/c/llm_inference_engine_cpu_main.cc b/mediapipe/tasks/cc/genai/inference/c/llm_inference_engine_cpu_main.cc index a23468d868..3dc269d101 100644 --- a/mediapipe/tasks/cc/genai/inference/c/llm_inference_engine_cpu_main.cc +++ b/mediapipe/tasks/cc/genai/inference/c/llm_inference_engine_cpu_main.cc @@ -83,7 +83,7 @@ int main(int argc, char** argv) { absl::ParseCommandLine(argc, argv); ABSL_QCHECK(absl::GetFlag(FLAGS_model_path).has_value()) - << "--vocab_model is required."; + << "--model_path is required."; const std::string model_path = absl::GetFlag(FLAGS_model_path).value(); std::string cache_dir; if (absl::GetFlag(FLAGS_cache_dir).has_value()) { diff --git a/mediapipe/tasks/cc/genai/inference/calculators/llm_gpu_calculator.proto b/mediapipe/tasks/cc/genai/inference/calculators/llm_gpu_calculator.proto index 5c87657e69..5c18a5f3f2 100644 --- a/mediapipe/tasks/cc/genai/inference/calculators/llm_gpu_calculator.proto +++ b/mediapipe/tasks/cc/genai/inference/calculators/llm_gpu_calculator.proto @@ -67,4 +67,7 @@ message LlmGpuCalculatorOptions { reserved 30; proto.SamplerParameters sampler_params = 31; + + // Whether the audio model is loaded in streaming mode or not. + bool audio_streaming_enabled = 32; } diff --git a/mediapipe/tasks/cc/genai/inference/proto/llm_params.proto b/mediapipe/tasks/cc/genai/inference/proto/llm_params.proto index c5c268ddd5..e613198f50 100644 --- a/mediapipe/tasks/cc/genai/inference/proto/llm_params.proto +++ b/mediapipe/tasks/cc/genai/inference/proto/llm_params.proto @@ -27,7 +27,7 @@ enum LlmModelType { // Unknown LLM model type LLM_MODEL_TYPE_UNKNOWN = 0; - reserved 1, 2, 3, 4, 7, 9, 10, 13, 14, 15, 16, 17; + reserved 1, 2, 3, 4, 7, 9, 10, 13, 14, 15, 16, 17, 19; // FALCON RefinedWeb with 1B parameters. // https://huggingface.co/tiiuae/falcon-rw-1b @@ -94,4 +94,8 @@ message LlmParameters { optional string system_role_token = 12; optional string model_role_token = 13; optional string end_role_token = 14; + + // If this model includes a submodel, these params can be used to load the + // submodel. + optional TransformerParameters submodel_transformer_parameters = 16; } diff --git a/mediapipe/tasks/cc/genai/inference/proto/transformer_params.proto b/mediapipe/tasks/cc/genai/inference/proto/transformer_params.proto index 7b9bdf4d84..c87f758e82 100644 --- a/mediapipe/tasks/cc/genai/inference/proto/transformer_params.proto +++ b/mediapipe/tasks/cc/genai/inference/proto/transformer_params.proto @@ -194,4 +194,8 @@ message TransformerParameters { // Vision parameters int32 vision_tokens_num = 26; + + // The number of stacks that are treated as "extra", which may have slightly + // different loading behavior. + int32 num_extra_stacks = 27; } diff --git a/mediapipe/tasks/cc/genai/inference/utils/llm_utils/metadata_utils.h b/mediapipe/tasks/cc/genai/inference/utils/llm_utils/metadata_utils.h index f4e7abfd51..b8a421bae2 100644 --- a/mediapipe/tasks/cc/genai/inference/utils/llm_utils/metadata_utils.h +++ b/mediapipe/tasks/cc/genai/inference/utils/llm_utils/metadata_utils.h @@ -27,6 +27,8 @@ constexpr absl::string_view kLlmModelTypeName = "odml.infra.LlmModelType"; constexpr absl::string_view kLlmBackendName = "backend"; constexpr absl::string_view kSpmVocabName = "spm_vocab_model"; constexpr absl::string_view kLoRARank = "lora_rank"; +constexpr absl::string_view kImageEncoder = "image_encoder"; +constexpr absl::string_view kImageAdapter = "image_adapter"; // Retrieve LlmModelType from tflite flatbuffer metadata. absl::StatusOr GetLlmModelType( diff --git a/mediapipe/tasks/cc/genai/inference/utils/llm_utils/model_data.cc b/mediapipe/tasks/cc/genai/inference/utils/llm_utils/model_data.cc index 03f60f19f0..dc8472f50f 100644 --- a/mediapipe/tasks/cc/genai/inference/utils/llm_utils/model_data.cc +++ b/mediapipe/tasks/cc/genai/inference/utils/llm_utils/model_data.cc @@ -145,6 +145,20 @@ class TfliteModelData : public ModelData { return nullptr; } + absl::StatusOr ReadModel(absl::string_view name) override { + MP_ASSIGN_OR_RETURN(auto data, ReadTensor(name)); + if (!data) { + return ModelWithData{}; + } + auto model = tflite::FlatBufferModel::BuildFromBuffer( + reinterpret_cast(data->GetData().data()), + data->GetData().size()); + return ModelWithData{ + .model = std::move(model), + .data = std::move(data), + }; + } + absl::Status InitLlmParameters() { MP_ASSIGN_OR_RETURN(std::string proto_str, ReadMetadata(llm_parameters_.GetTypeName())); diff --git a/mediapipe/tasks/cc/genai/inference/utils/llm_utils/model_data.h b/mediapipe/tasks/cc/genai/inference/utils/llm_utils/model_data.h index 69f616aa85..94371e7d50 100644 --- a/mediapipe/tasks/cc/genai/inference/utils/llm_utils/model_data.h +++ b/mediapipe/tasks/cc/genai/inference/utils/llm_utils/model_data.h @@ -159,6 +159,14 @@ class ModelData { // Frees the underlying data. virtual void Clear() = 0; + + // Holds the tflite model as well as the backing data. + struct ModelWithData { + std::unique_ptr model; + std::unique_ptr> data; + }; + // Reads a tflite model from the main model. + virtual absl::StatusOr ReadModel(absl::string_view name) = 0; }; // Holds data referring to a set of LoRA weights. diff --git a/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/BUILD b/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/BUILD index de621718b9..b3fce26ff4 100644 --- a/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/BUILD +++ b/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/BUILD @@ -146,6 +146,8 @@ cc_library( "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", + "@org_tensorflow//tensorflow/compiler/mlir/lite/schema:schema_fbs", + "@org_tensorflow//tensorflow/lite:model_builder", ], ) diff --git a/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/graph_builder.cc b/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/graph_builder.cc index 384b1d9dcd..1d31e32e4e 100644 --- a/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/graph_builder.cc +++ b/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/graph_builder.cc @@ -484,11 +484,11 @@ absl::StatusOr> XnnGraphBuilder::Slice( } absl::StatusOr> XnnGraphBuilder::Slice( - std::shared_ptr input, size_t axis, size_t offset, size_t length) { + std::shared_ptr input, size_t axis, int64_t offset, size_t length) { const auto& input_dims = input->dims; - Tensor::DimsType offsets(input_dims.size(), 0); + std::vector offsets(input_dims.size(), 0); offsets[axis] = offset; - Tensor::DimsType output_dims = input_dims; + std::vector output_dims = input_dims; output_dims[axis] = length; Tensor::DimsType inferrable_output_dims(input_dims.size(), 0); inferrable_output_dims[axis] = length; @@ -499,7 +499,7 @@ absl::StatusOr> XnnGraphBuilder::Slice( build_steps_.push_back([input, output, offsets, inferrable_output_dims]( xnn_subgraph_t subgraph) -> absl::Status { RET_CHECK_EQ(xnn_status_success, - xnn_define_static_slice( + xnn_define_static_slice_v2( subgraph, offsets.size(), offsets.data(), inferrable_output_dims.data(), input->tensor_id(subgraph), output->tensor_id(subgraph), /*flags=*/0)); @@ -1187,33 +1187,17 @@ absl::StatusOr> XnnGraphBuilder::Clamp( absl::StatusOr> XnnGraphBuilder::Gelu( std::shared_ptr input) { - // x^2 - MP_ASSIGN_OR_RETURN(auto sqr_out, Square(input)); - - // 0.044715 * x^2 - MP_ASSIGN_OR_RETURN(auto sqr_4471, ElementMul(sqr_out, 0.044715)); - - // 1 + 0.044715 * x^2 - MP_ASSIGN_OR_RETURN(auto sqr_4471_1, ElementAdd(sqr_4471, 1.0f)); - - // x + 0.044715 * x^3 - MP_ASSIGN_OR_RETURN(auto x_cube_4471, ElementMul(sqr_4471_1, input)); - - constexpr float sqrt_2_over_pi = 0.7978845608; - MP_ASSIGN_OR_RETURN(auto sqrt_2_over_pi_x_cube_4471, - ElementMul(x_cube_4471, sqrt_2_over_pi)); - - // tanh(x + 0.044715 * x^3) - MP_ASSIGN_OR_RETURN(auto tanh_x_cube_4471, Tanh(sqrt_2_over_pi_x_cube_4471)); - - // 1 + tanh(x + 0.044715 * x^3) - MP_ASSIGN_OR_RETURN(auto tanh_x_cube_4471_1, - ElementAdd(tanh_x_cube_4471, 1.0f)); - - // 0.5 * (1 + [tanh(x + 0.044715 * x^3)]) - MP_ASSIGN_OR_RETURN(auto cdf, ElementMul(tanh_x_cube_4471_1, 0.5)); - - return ElementMul(input, cdf); + MP_ASSIGN_OR_RETURN(auto output, + IntermediateTensor(input->dims, "gelu_output")); + build_steps_.push_back( + [output, input](xnn_subgraph_t subgraph) -> absl::Status { + RET_CHECK_EQ(xnn_status_success, + xnn_define_gelu(subgraph, input->tensor_id(subgraph), + output->tensor_id(subgraph), + /*flags=*/0)); + return absl::Status(); + }); + return output; } absl::StatusOr> XnnGraphBuilder::Sigmoid( diff --git a/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/graph_builder.h b/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/graph_builder.h index a7f1e0950f..909cae2d81 100644 --- a/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/graph_builder.h +++ b/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/graph_builder.h @@ -185,7 +185,7 @@ class XnnGraphBuilder { // dimensions unchanged. For instance, for input A = [B, M, N] and axis = 1, // the output slice would be [B, offset:offset+length, N]. absl::StatusOr> Slice(std::shared_ptr input, - size_t axis, size_t offset, + size_t axis, int64_t offset, size_t length); // Concatenate two input tensors along the provided axis. Both input tensors diff --git a/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/llm.cc b/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/llm.cc index 8ef3b92dce..e53197d288 100644 --- a/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/llm.cc +++ b/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/llm.cc @@ -17,6 +17,7 @@ #include #include #include +#include #include #include #include @@ -349,13 +350,6 @@ absl::Status Llm::AddInputTokens( transformer_input()->tensor_id(owned_subgraph_.get()), transformer_input()->dims.size(), transformer_input()->dims.data())); - logits_output()->Resize(Tensor::DimsType{ - batch_input_ids.size(), input_seq_len, llm_params_.voc_size_V}); - RET_CHECK_EQ( - xnn_status_success, - xnn_reshape_external_value( - runtime_.get(), logits_output()->tensor_id(owned_subgraph_.get()), - logits_output()->dims.size(), logits_output()->dims.data())); for (auto& kv_cache : kv_cache()) { auto key = kv_cache.k_cache; auto value = kv_cache.v_cache; @@ -373,6 +367,14 @@ absl::Status Llm::AddInputTokens( value->dims.size(), value->dims.data())); } RET_CHECK_EQ(xnn_status_success, xnn_reshape_runtime(runtime_.get())); + size_t num_output_dims = 0; + std::vector output_dims(3); + RET_CHECK_EQ( + xnn_status_success, + xnn_get_external_value_shape( + runtime_.get(), logits_output()->tensor_id(owned_subgraph_.get()), + &num_output_dims, output_dims.data())); + logits_output()->Resize(output_dims); } for (auto& kv_cache : kv_cache()) { @@ -662,9 +664,16 @@ absl::StatusOr> LlmBuilder::PostProcess( ApplyNorm(transformer_out, weights.final_norm_weight, llm_params_.final_norm)); RET_CHECK(weights.softmax_linear); + + int64_t slice_size = llm_params_.draft_size_G + 1; + // The KV caches have been filled, we only need to compute the tokens which + // will be used for computation or output. + MP_ASSIGN_OR_RETURN(auto slice, + Slice(transformer_out, /*axis=*/1, /*offset=*/-slice_size, + /*length=*/slice_size)); MP_ASSIGN_OR_RETURN( auto logits_output, - FullConn(transformer_out, weights.softmax_linear, weights.softmax_bias)); + FullConn(slice, weights.softmax_linear, weights.softmax_bias)); return logits_output; } diff --git a/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/llm_weights.cc b/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/llm_weights.cc index b4fec60a92..07259e62d1 100644 --- a/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/llm_weights.cc +++ b/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/llm_weights.cc @@ -17,6 +17,7 @@ #include #include +#include #include #include #include @@ -38,6 +39,8 @@ #include "mediapipe/tasks/cc/genai/inference/utils/xnn_utils/tflite_weight_accessor.h" #include "mediapipe/tasks/cc/genai/inference/utils/xnn_utils/utils.h" #include "mediapipe/tasks/cc/genai/inference/utils/xnn_utils/xnn_tensor.h" +#include "tensorflow/compiler/mlir/lite/schema/schema_generated.h" +#include "tensorflow/lite/model_builder.h" namespace mediapipe::tasks::genai::xnn_utils { @@ -486,8 +489,9 @@ absl::StatusOr LlmWeightsLoader::LoadWeights() { return result; } -DefaultLlmWeightsLoader::DefaultLlmWeightsLoader(absl::string_view weight_path, - const LlmParams& params) +DefaultLlmWeightsLoader::DefaultLlmWeightsLoader( + absl::string_view weight_path, const LlmParams& params, + std::shared_ptr flat_buffer_model) : LlmWeightsLoader(nullptr, params) { xnn_weights_cache_ = std::make_shared( params.cache_dir.empty() @@ -497,9 +501,20 @@ DefaultLlmWeightsLoader::DefaultLlmWeightsLoader(absl::string_view weight_path, absl::StrCat(mediapipe::file::Basename(weight_path), ".cache"))); ABSL_CHECK_OK(xnn_weights_cache_->Initialize()); - weight_accessor_ = std::make_unique( - std::make_shared(weight_path), - xnn_weights_cache_.get()); + if (flat_buffer_model != nullptr) { + const tflite::Model* model = flat_buffer_model->GetModel(); + std::shared_ptr tflite_model( + model, [](const tflite::Model*) { /* No deletion needed */ }); + char* data = const_cast( + reinterpret_cast(flat_buffer_model->allocation()->base())); + weight_accessor_ = std::make_unique( + std::make_shared(tflite_model, data), + xnn_weights_cache_.get()); + } else { + weight_accessor_ = std::make_unique( + std::make_shared(weight_path), + xnn_weights_cache_.get()); + } } } // namespace mediapipe::tasks::genai::xnn_utils diff --git a/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/llm_weights.h b/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/llm_weights.h index 56fba37047..ae5fd1b335 100644 --- a/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/llm_weights.h +++ b/mediapipe/tasks/cc/genai/inference/utils/xnn_utils/llm_weights.h @@ -28,9 +28,9 @@ #include "absl/status/statusor.h" #include "absl/strings/string_view.h" #include "mediapipe/tasks/cc/genai/inference/proto/llm_params.pb.h" -#include "mediapipe/tasks/cc/genai/inference/utils/xnn_utils/graph_builder.h" #include "mediapipe/tasks/cc/genai/inference/utils/xnn_utils/pack_weights_cache.h" #include "mediapipe/tasks/cc/genai/inference/utils/xnn_utils/xnn_tensor.h" +#include "tensorflow/lite/model_builder.h" namespace mediapipe::tasks::genai::xnn_utils { @@ -237,11 +237,6 @@ class LlmWeightsLoader { LlmParams& llm_params() { return params_; } const LlmParams& llm_params() const { return params_; } - // Returns the XnnWeightsCache that could work with weights loader, if any. - virtual std::shared_ptr GetXnnWeightsCache() { - return nullptr; - } - protected: virtual absl::StatusOr LoadSelfAttention( int layer_id); @@ -264,12 +259,9 @@ class DefaultLlmWeightsLoader : public LlmWeightsLoader { DefaultLlmWeightsLoader(std::unique_ptr weight_accessor, const LlmParams& params) : LlmWeightsLoader(std::move(weight_accessor), params) {} - DefaultLlmWeightsLoader(absl::string_view weight_path, - const LlmParams& params); - - std::shared_ptr GetXnnWeightsCache() override { - return xnn_weights_cache_; - } + DefaultLlmWeightsLoader( + absl::string_view weight_path, const LlmParams& params, + std::shared_ptr flat_buffer_model = nullptr); private: std::shared_ptr xnn_weights_cache_; diff --git a/mediapipe/tasks/cc/text/custom_ops/sentencepiece/optimized_encoder_test.cc b/mediapipe/tasks/cc/text/custom_ops/sentencepiece/optimized_encoder_test.cc index f2bc27144c..9803e61b43 100644 --- a/mediapipe/tasks/cc/text/custom_ops/sentencepiece/optimized_encoder_test.cc +++ b/mediapipe/tasks/cc/text/custom_ops/sentencepiece/optimized_encoder_test.cc @@ -34,8 +34,8 @@ namespace mediapipe::tflite_operations::sentencepiece { namespace internal { -tensorflow::Status TFReadFileToString(const std::string& filepath, - std::string* data) { +absl::Status TFReadFileToString(const std::string& filepath, + std::string* data) { return tensorflow::ReadFileToString(tensorflow::Env::Default(), filepath, data); } diff --git a/mediapipe/tasks/cc/text/text_classifier/text_classifier_test.cc b/mediapipe/tasks/cc/text/text_classifier/text_classifier_test.cc index c4f63aab63..18b8e8ae31 100644 --- a/mediapipe/tasks/cc/text/text_classifier/text_classifier_test.cc +++ b/mediapipe/tasks/cc/text/text_classifier/text_classifier_test.cc @@ -155,7 +155,7 @@ TEST_F(TextClassifierTest, TextClassifierWithBert) { /*head_index=*/0, /*head_name=*/"probability"}); positive_expected.classifications.emplace_back(Classifications{ - /*categories=*/{{1, 0.9999413, "positive"}, {0, 0.000058, "negative"}}, + /*categories=*/{{1, 0.9999370, "positive"}, {0, 0.0000629, "negative"}}, /*head_index=*/0, /*head_name=*/"probability"}); #endif // _WIN32 @@ -251,8 +251,8 @@ TEST_F(TextClassifierTest, BertLongPositive) { categories.push_back( {/*index=*/0, /*score=*/0.023313, /*category_name=*/"negative"}); #else - categories.push_back({1, 0.983276, "positive"}); - categories.push_back({0, 0.016723, "negative"}); + categories.push_back({1, 0.981097, "positive"}); + categories.push_back({0, 0.018902, "negative"}); #endif // _WIN32 expected.classifications.emplace_back( diff --git a/mediapipe/tasks/cc/vision/face_landmarker/face_landmarks_detector_graph.cc b/mediapipe/tasks/cc/vision/face_landmarker/face_landmarks_detector_graph.cc index 4ab3a40e54..e129076aba 100644 --- a/mediapipe/tasks/cc/vision/face_landmarker/face_landmarks_detector_graph.cc +++ b/mediapipe/tasks/cc/vision/face_landmarker/face_landmarks_detector_graph.cc @@ -335,6 +335,7 @@ class SingleFaceLandmarksDetectorGraph : public core::ModelTaskGraph { auto& landmark_projection = graph.AddNode("LandmarkProjectionCalculator"); landmarks_letterbox_removed >> landmark_projection.In(kNormLandmarksTag); face_rect >> landmark_projection.In(kNormRectTag); + image_size >> landmark_projection.In("IMAGE_DIMENSIONS"); Stream projected_landmarks = AllowIf( landmark_projection[Output(kNormLandmarksTag)], presence, graph); diff --git a/mediapipe/tasks/cc/vision/holistic_landmarker/holistic_face_tracking_test.cc b/mediapipe/tasks/cc/vision/holistic_landmarker/holistic_face_tracking_test.cc index 314c330b3c..89a41266ac 100644 --- a/mediapipe/tasks/cc/vision/holistic_landmarker/holistic_face_tracking_test.cc +++ b/mediapipe/tasks/cc/vision/holistic_landmarker/holistic_face_tracking_test.cc @@ -82,9 +82,9 @@ constexpr char kImageInStream[] = "image_in"; constexpr char kPoseLandmarksInStream[] = "pose_landmarks_in"; constexpr char kFaceLandmarksOutStream[] = "face_landmarks_out"; constexpr char kRenderedImageOutStream[] = "rendered_image_out"; -constexpr char kFaceDetectorTFLiteName[] = "face_detector.tflite"; -constexpr char kFaceLandmarksDetectorTFLiteName[] = - "face_landmarks_detector.tflite"; +constexpr char kFaceDetectionModelFile[] = "face_detection_short_range.tflite"; +constexpr char kFaceLandmarksModelFile[] = + "facemesh2_lite_iris_faceflag_2023_02_14.tflite"; std::string GetFilePath(absl::string_view filename) { return file::JoinPath("./", kTestDataDirectory, filename); @@ -108,6 +108,16 @@ mediapipe::LandmarksToRenderDataCalculatorOptions GetFaceRendererOptions() { return render_options; } +mediapipe::RectToRenderDataCalculatorOptions GetRectRendererOptions() { + mediapipe::RectToRenderDataCalculatorOptions render_options; + render_options.set_filled(false); + render_options.mutable_color()->set_r(255); + render_options.mutable_color()->set_g(0); + render_options.mutable_color()->set_b(0); + render_options.set_thickness(2); + return render_options; +} + absl::StatusOr> CreateModelAssetBundleResources(const std::string& model_asset_filename) { auto external_model_bundle = std::make_unique(); @@ -127,38 +137,19 @@ absl::StatusOr> CreateTaskRunner() { Stream face_landmarks_from_pose = SplitToRanges(pose_landmarks, {{0, 11}}, graph)[0]; // Create face landmarker model bundle. - MP_ASSIGN_OR_RETURN( - auto model_bundle, - CreateModelAssetBundleResources(GetFilePath("face_landmarker_v2.task"))); face_detector::proto::FaceDetectorGraphOptions detector_options; face_landmarker::proto::FaceLandmarksDetectorGraphOptions landmarks_detector_options; // Set face detection model. - MP_ASSIGN_OR_RETURN(auto face_detector_model_file, - model_bundle->GetFile(kFaceDetectorTFLiteName)); - core::proto::FilePointerMeta face_detection_file_pointer; - face_detection_file_pointer.set_pointer( - reinterpret_cast(face_detector_model_file.data())); - face_detection_file_pointer.set_length(face_detector_model_file.size()); - detector_options.mutable_base_options() - ->mutable_model_asset() - ->mutable_file_pointer_meta() - ->Swap(&face_detection_file_pointer); detector_options.set_num_faces(1); + detector_options.mutable_base_options()->mutable_model_asset()->set_file_name( + GetFilePath(kFaceDetectionModelFile)); // Set face landmarks model. - MP_ASSIGN_OR_RETURN(auto face_landmarks_model_file, - model_bundle->GetFile(kFaceLandmarksDetectorTFLiteName)); - core::proto::FilePointerMeta face_landmarks_detector_file_pointer; - face_landmarks_detector_file_pointer.set_pointer( - reinterpret_cast(face_landmarks_model_file.data())); - face_landmarks_detector_file_pointer.set_length( - face_landmarks_model_file.size()); landmarks_detector_options.mutable_base_options() ->mutable_model_asset() - ->mutable_file_pointer_meta() - ->Swap(&face_landmarks_detector_file_pointer); + ->set_file_name(GetFilePath(kFaceLandmarksModelFile)); // Track holistic face. HolisticFaceTrackingRequest request; @@ -173,10 +164,11 @@ absl::StatusOr> CreateTaskRunner() { auto render_scale = utils::GetRenderScale( image_size, result.debug_output.roi_from_pose, 0.0001, graph); - auto face_landmarks_render_data = utils::RenderLandmarks( - face_landmarks, render_scale, GetFaceRendererOptions(), graph); std::vector> render_list = { - face_landmarks_render_data}; + utils::RenderLandmarks(face_landmarks, render_scale, + GetFaceRendererOptions(), graph), + utils::RenderRect(result.debug_output.roi_from_pose, + GetRectRendererOptions(), graph)}; auto rendered_image = utils::Render( @@ -209,6 +201,7 @@ TEST_F(HolisticFaceTrackingTest, SmokeTest) { holistic_result.pose_landmarks())}})); ASSERT_TRUE(output_packets.find(kFaceLandmarksOutStream) != output_packets.end()); + ASSERT_FALSE(output_packets.find(kFaceLandmarksOutStream)->second.IsEmpty()); auto face_landmarks = output_packets.find(kFaceLandmarksOutStream) ->second.Get(); EXPECT_THAT( diff --git a/mediapipe/tasks/cc/vision/image_segmenter/calculators/BUILD b/mediapipe/tasks/cc/vision/image_segmenter/calculators/BUILD index 93a49fdf83..a694f9e275 100644 --- a/mediapipe/tasks/cc/vision/image_segmenter/calculators/BUILD +++ b/mediapipe/tasks/cc/vision/image_segmenter/calculators/BUILD @@ -78,6 +78,7 @@ cc_library( "//mediapipe/gpu:shader_util", "//mediapipe/tasks/cc/vision/image_segmenter/proto:segmenter_options_cc_proto", "//mediapipe/tasks/cc/vision/utils:image_utils", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@com_google_absl//absl/strings:str_format", ] + select({ diff --git a/mediapipe/tasks/ios/genai/inference/BUILD b/mediapipe/tasks/ios/genai/inference/BUILD index fe591690b3..fad3e3d439 100644 --- a/mediapipe/tasks/ios/genai/inference/BUILD +++ b/mediapipe/tasks/ios/genai/inference/BUILD @@ -16,7 +16,17 @@ load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library") licenses(["notice"]) -package(default_visibility = ["//mediapipe/tasks:internal"]) +package(default_visibility = [ + ":clients", + "//mediapipe/tasks:internal", +]) + +package_group( + name = "clients", + packages = [ + "//googlemac/iPhone/Home/OnDeviceML/...", + ], +) swift_library( name = "LlmInference", diff --git a/mediapipe/tasks/ios/genai/inference/sources/LlmInference+Session.swift b/mediapipe/tasks/ios/genai/inference/sources/LlmInference+Session.swift index 3fe740de2a..a01b80e20c 100644 --- a/mediapipe/tasks/ios/genai/inference/sources/LlmInference+Session.swift +++ b/mediapipe/tasks/ios/genai/inference/sources/LlmInference+Session.swift @@ -54,7 +54,9 @@ extension LlmInference { topp: options.topp, temperature: options.temperature, random_seed: options.randomSeed, - lora_path: nil) + lora_path: nil, + include_token_cost_calculator: true, + enable_vision_modality: false) /// If `loraPath` is != nil, modify session config with the corresponding C string and invoke /// the method to create session runner within the scope where the C String of the `loraPath` diff --git a/mediapipe/tasks/ios/genai/inference/sources/LlmInference.swift b/mediapipe/tasks/ios/genai/inference/sources/LlmInference.swift index 312bb23b2f..f5d7cd940c 100644 --- a/mediapipe/tasks/ios/genai/inference/sources/LlmInference.swift +++ b/mediapipe/tasks/ios/genai/inference/sources/LlmInference.swift @@ -65,6 +65,8 @@ import MediaPipeTasksGenAIC try options.supportedLoraRanks.withUnsafeMutableBufferPointer { supportedLoraRanks in let modelSetting = LlmModelSettings( model_path: modelPath, + vision_encoder_path: nil, + vision_adapter_path: nil, cache_dir: cacheDirectory, max_num_tokens: options.maxTokens, num_decode_steps_per_sync: numberOfDecodeStepsPerSync, diff --git a/mediapipe/tasks/ios/test/vision/holistic_landmarker/MPPHolisticLandmarkerTests.mm b/mediapipe/tasks/ios/test/vision/holistic_landmarker/MPPHolisticLandmarkerTests.mm index d36a7d7ed5..943e76eae9 100644 --- a/mediapipe/tasks/ios/test/vision/holistic_landmarker/MPPHolisticLandmarkerTests.mm +++ b/mediapipe/tasks/ios/test/vision/holistic_landmarker/MPPHolisticLandmarkerTests.mm @@ -533,17 +533,26 @@ - (void)assertHolisticLandmarkerResult:(MPPHolisticLandmarkerResult *)holisticLa withLandmarkTypeName:@"pose_landmarks" areApproximatelyEqualToExpectedNormalizedLandmarks:expectedHolisticLandmarkerResult .poseLandmarks]; + // Comparing world landmark counts of the actual result to pose landmark counts of expected result + // to ensure world landmarks are present since expected result does not contain world landmarks. + XCTAssertEqual(holisticLandmarkerResult.poseWorldLandmarks.count, + expectedHolisticLandmarkerResult.poseLandmarks.count); [self assertNormalizedLandmarks:holisticLandmarkerResult.leftHandLandmarks withLandmarkTypeName:@"left_hand_landmarks" areApproximatelyEqualToExpectedNormalizedLandmarks:expectedHolisticLandmarkerResult .leftHandLandmarks]; + XCTAssertEqual(holisticLandmarkerResult.leftHandWorldLandmarks.count, + expectedHolisticLandmarkerResult.leftHandLandmarks.count); [self assertNormalizedLandmarks:holisticLandmarkerResult.rightHandLandmarks withLandmarkTypeName:@"right_hand_landmarks" areApproximatelyEqualToExpectedNormalizedLandmarks:expectedHolisticLandmarkerResult .rightHandLandmarks]; + XCTAssertEqual(holisticLandmarkerResult.rightHandWorldLandmarks.count, + expectedHolisticLandmarkerResult.rightHandLandmarks.count); + [self assertFaceBlendshapes:holisticLandmarkerResult.faceBlendshapes areApproximatelyEqualToExpectedFaceBlendshapes:expectedHolisticLandmarkerResult .faceBlendshapes]; diff --git a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.mm b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.mm index 96434eb472..5f6e1d61cc 100644 --- a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.mm +++ b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.mm @@ -29,7 +29,7 @@ static constexpr int kMicrosecondsPerMillisecond = 1000; // Constants for the underlying MP Tasks Graph. See -// https://github.com/google/mediapipe/tree/master/mediapipe/tasks/cc/vision/face_detector/face_detector_graph.cc +// https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/tasks/cc/vision/face_detector/face_detector_graph.cc static NSString *const kDetectionsStreamName = @"detections_out"; static NSString *const kDetectionsTag = @"DETECTIONS"; static NSString *const kImageInStreamName = @"image_in"; diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.mm b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.mm index 8a128475d8..5748c3f4aa 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.mm +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.mm @@ -33,7 +33,7 @@ static constexpr int kMicrosecondsPerMillisecond = 1000; // Constants for the underlying MP Tasks Graph. See -// https://github.com/google/mediapipe/tree/master/mediapipe/tasks/cc/vision/face_landmarker/face_landmarker_graph.cc +// https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/tasks/cc/vision/face_landmarker/face_landmarker_graph.cc static NSString *const kLandmarksOutStreamName = @"landmarks_out"; static NSString *const kLandmarksOutTag = @"NORM_LANDMARKS"; static NSString *const kBlendshapesOutStreamName = @"blendshapes_out"; diff --git a/mediapipe/tasks/ios/vision/holistic_landmarker/utils/sources/MPPHolisticLandmarkerResult+Helpers.mm b/mediapipe/tasks/ios/vision/holistic_landmarker/utils/sources/MPPHolisticLandmarkerResult+Helpers.mm index 3cf088dd65..0f202fe96b 100644 --- a/mediapipe/tasks/ios/vision/holistic_landmarker/utils/sources/MPPHolisticLandmarkerResult+Helpers.mm +++ b/mediapipe/tasks/ios/vision/holistic_landmarker/utils/sources/MPPHolisticLandmarkerResult+Helpers.mm @@ -73,7 +73,8 @@ @implementation MPPHolisticLandmarkerResult (Helpers) faceBlendshapesProto:faceBlendshapesProto poseLandmarksProto:NormalizedLandmarkListFromPacket( poseLandmarksPacket) - poseWorldLandmarksProto:LandmarkListFromPacket(poseLandmarksPacket) + poseWorldLandmarksProto:LandmarkListFromPacket( + poseWorldLandmarksPacket) poseSegmentationMaskProto:poseSegmentationMaskProto leftHandLandmarksProto:NormalizedLandmarkListFromPacket( leftHandLandmarksPacket) diff --git a/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenter.mm b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenter.mm index 8f26641d01..7958f3184f 100644 --- a/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenter.mm +++ b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenter.mm @@ -28,7 +28,7 @@ static constexpr int kMicrosecondsPerMillisecond = 1000; // Constants for the underlying MP Tasks Graph. See -// https://github.com/google/mediapipe/tree/master/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter_graph.cc +// https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter_graph.cc static NSString *const kConfidenceMasksStreamName = @"confidence_masks"; static NSString *const kConfidenceMasksTag = @"CONFIDENCE_MASKS"; static NSString *const kCategoryMaskStreamName = @"category_mask"; diff --git a/mediapipe/tasks/ios/vision/interactive_segmenter/sources/MPPInteractiveSegmenter.mm b/mediapipe/tasks/ios/vision/interactive_segmenter/sources/MPPInteractiveSegmenter.mm index 7d894c01e7..eb5bd3651c 100644 --- a/mediapipe/tasks/ios/vision/interactive_segmenter/sources/MPPInteractiveSegmenter.mm +++ b/mediapipe/tasks/ios/vision/interactive_segmenter/sources/MPPInteractiveSegmenter.mm @@ -29,7 +29,7 @@ static constexpr int kMicrosecondsPerMillisecond = 1000; // Constants for the underlying MP Tasks Graph. See -// https://github.com/google/mediapipe/tree/master/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter_graph.cc +// https://github.com/google-ai-edge/mediapipe/tree/master/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter_graph.cc static NSString *const kConfidenceMasksStreamName = @"confidence_masks"; static NSString *const kConfidenceMasksTag = @"CONFIDENCE_MASKS"; static NSString *const kCategoryMaskStreamName = @"category_mask"; diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BUILD b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BUILD index 8b7e18aff6..656f92c60e 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BUILD +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BUILD @@ -85,6 +85,7 @@ android_library( deps = [ ":core_java", ":logging", + "//mediapipe/java/com/google/mediapipe/framework/image", "//mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/proto:llm_options_java_proto_lite", "//mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/proto:llm_response_context_java_proto_lite", "//third_party/java/protobuf:protobuf_lite", diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/LlmTaskRunner.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/LlmTaskRunner.java index c735969be1..0e83f21dfa 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/LlmTaskRunner.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/LlmTaskRunner.java @@ -15,6 +15,14 @@ package com.google.mediapipe.tasks.core; import android.content.Context; +import android.graphics.Bitmap; +import android.graphics.PixelFormat; +import android.media.Image; +import com.google.mediapipe.framework.image.BitmapExtractor; +import com.google.mediapipe.framework.image.ByteBufferExtractor; +import com.google.mediapipe.framework.image.MPImage; +import com.google.mediapipe.framework.image.MPImageProperties; +import com.google.mediapipe.framework.image.MediaImageExtractor; import com.google.mediapipe.tasks.core.OutputHandler.ProgressListener; import com.google.mediapipe.tasks.core.jni.proto.LlmOptionsProto.LlmModelSettings; import com.google.mediapipe.tasks.core.jni.proto.LlmOptionsProto.LlmSessionConfig; @@ -22,6 +30,7 @@ import com.google.mediapipe.tasks.core.logging.TasksStatsDummyLogger; import com.google.mediapipe.tasks.core.logging.TasksStatsLogger; import com.google.protobuf.InvalidProtocolBufferException; +import java.nio.ByteBuffer; import java.util.List; import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; @@ -38,6 +47,109 @@ public final class LlmTaskRunner implements AutoCloseable { private final TasksStatsLogger statsLogger; private final AtomicBoolean isProcessing; + /** + * Describes how pixel bits encode color. A pixel may be an alpha mask, a grayscale, RGB, or ARGB. + * + *

This matches the SkColorType enum in https://api.skia.org/SkColorType_8h.html. + */ + private enum SkColorType { + /** Uninitialized. */ + UNKNOWN(0), + /** Pixel with alpha in 8-bit byte. */ + ALPHA_8(1), + /** Pixel with 5 bits red, 6 bits green, 5 bits blue, in 16-bit word. */ + RGB_565(2), + /** Pixel with 4 bits for alpha, red, green, blue; in 16-bit word. */ + ARGB_4444(3), + /** Pixel with 8 bits for red, green, blue, alpha; in 32-bit word. */ + RGBA_8888(4), + /** Pixel with 8 bits each for red, green, blue; in 32-bit word. */ + RGB_888X(5), + /** Pixel with 8 bits for blue, green, red, alpha; in 32-bit word. */ + BGRA_8888(6), + /** 10 bits for red, green, blue; 2 bits for alpha; in 32-bit word. */ + RGBA_1010102(7), + /** 10 bits for blue, green, red; 2 bits for alpha; in 32-bit word. */ + BGRA_1010102(8), + /** Pixel with 10 bits each for red, green, blue; in 32-bit word. */ + RGB_101010X(9), + /** Pixel with 10 bits each for blue, green, red; in 32-bit word. */ + BGR101010X(10), + /** Pixel with 10 bits each for blue, green, red; in 32-bit word, extended range. */ + BGR_101010X_XR(11), + /** Pixel with 10 bits each for blue, green, red, alpha; in 64-bit word, extended range. */ + BGRA_10101010_XR(12), + /** + * Pixel with 10 used bits (most significant) followed by 6 unused bits for red, green, blue, + * alpha; in 64-bit word. + */ + RGBA_10X6(13), + /** Pixel with grayscale level in 8-bit byte. */ + GRAY_8(14), + /** Pixel with half floats in [0,1] for red, green, blue, alpha; in 64-bit word. */ + RGBA_F16NORM(15), + /** Pixel with half floats for red, green, blue, alpha; in 64-bit word. */ + RGBA_F16(16), + /** Pixel with half floats for red, green, blue; in 64-bit word. */ + RGB_F16F16F16X(17), + /** Pixel using C float for red, green, blue, alpha; in 128-bit word. */ + RGBA_F32(18), + /** Pixel with a uint8_t for red and green. */ + R8G8_UNORM(19), + /** Pixel with a half float for alpha. */ + A16_FLOAT(20), + /** Pixel with a half float for red and green. */ + R16G16_FLOAT(21), + /** Pixel with a little endian uint16_t for alpha. */ + A16_UNORM(22), + /** Pixel with a little endian uint16_t for red and green. */ + R16G16_UNORM(23), + /** Pixel with a little endian uint16_t for red, green, blue and alpha. */ + R16G16B16A16_UNORM(24), + /** Pixel with 8 bits for red, green, blue, alpha; in 32-bit word, gamma encoded. */ + SRGBA_8888(25), + /** Pixel with a uint8_t for red. */ + R8_UNORM(26); + + private final int value; + + SkColorType(int value) { + this.value = value; + } + + /** Returns the integer value associated with this color type. */ + int getValue() { + return value; + } + } + + /** + * Describes how to interpret the alpha component of a pixel. A pixel may be opaque, or alpha, + * describing multiple levels of transparency. + * + *

This matches the SkColorType enum in https://api.skia.org/SkAlphaType_8h.html. + */ + private enum SkAlphaType { + UNIITALIZED(0), + /** Pixel is opaque */ + OPAQUE(1), + /** Pixel components are premultiplied by alpha */ + PREMULTIPLIED(2), + /** Pixel components are independent of alpha */ + UNPREMULTIPLIED(3); + + private final int value; + + SkAlphaType(int value) { + this.value = value; + } + + /** Returns the integer value associated with this alpha type. */ + int getValue() { + return value; + } + }; + /** The session to use for LLM inference calls. */ public static final class LlmSession { private final long sessionHandle; @@ -79,6 +191,21 @@ public void addQueryChunk(LlmSession session, String input) { nativeAddQueryChunk(session.sessionHandle, input); } + /** Adds a new image to the session context. */ + public void addImage(LlmSession session, MPImage input) { + validateState(); + long imageHandle = createImage(input); + try { + // TODO: Remove this dummy chunk. + // Since AddImage cannot distinguish if start_id is being added, + // use a dummy chunk to make sure the start_id is being added properly. + nativeAddQueryChunk(session.sessionHandle, ""); + nativeAddImage(session.sessionHandle, imageHandle); + } finally { + nativeDeleteSkBitmap(imageHandle); + } + } + /** Invokes the LLM with the given session and waits for the result. */ public List predictSync(LlmSession session) { validateState(); @@ -160,6 +287,58 @@ public void close() { } } + private long createImage(MPImage image) { + MPImageProperties properties = image.getContainedImageProperties().get(0); + + SkAlphaType skAlphaType = SkAlphaType.OPAQUE; + ByteBuffer buffer; + SkColorType skColorType; + + int width = image.getWidth(); + int height = image.getHeight(); + + if (properties.getStorageType() == MPImage.STORAGE_TYPE_BYTEBUFFER) { + buffer = ByteBufferExtractor.extract(image); + + switch (properties.getImageFormat()) { + case MPImage.IMAGE_FORMAT_RGBA: + skColorType = SkColorType.RGBA_8888; + break; + case MPImage.IMAGE_FORMAT_RGB: + skColorType = SkColorType.RGB_888X; + break; + case MPImage.IMAGE_FORMAT_ALPHA: + skColorType = SkColorType.ALPHA_8; + break; + default: + throw new UnsupportedOperationException( + "Unsupported MediaPipe Image image format: " + properties.getImageFormat()); + } + } else if (properties.getStorageType() == MPImage.STORAGE_TYPE_BITMAP) { + Bitmap bitmap = BitmapExtractor.extract(image); + if (bitmap.getConfig() != Bitmap.Config.ARGB_8888) { + throw new UnsupportedOperationException("Bitmap must use ARGB_8888 config."); + } + skColorType = SkColorType.RGBA_8888; + + buffer = ByteBuffer.allocateDirect(bitmap.getByteCount()); + bitmap.copyPixelsToBuffer(buffer); + } else if (properties.getStorageType() == MPImage.STORAGE_TYPE_MEDIA_IMAGE) { + Image mediaImage = MediaImageExtractor.extract(image); + if (mediaImage.getFormat() != PixelFormat.RGBA_8888) { + throw new UnsupportedOperationException("Android media image must use RGBA_8888 config."); + } + buffer = mediaImage.getPlanes()[0].getBuffer(); + skColorType = SkColorType.RGBA_8888; + } else { + throw new UnsupportedOperationException( + "Unsupported Image container type: " + properties.getStorageType()); + } + + return nativeCreateSkBitmap( + buffer, width, height, skColorType.getValue(), skAlphaType.getValue()); + } + private void validateState() { if (isProcessing.get()) { throw new IllegalStateException("Previous invocation still processing. Wait for done=true."); @@ -187,4 +366,11 @@ private void validateState() { private static native void nativePredictAsync(long sessionPointer, long callbackContextHandle); private static native int nativeSizeInTokens(long sessionPointer, String input); + + private static native long nativeCreateSkBitmap( + ByteBuffer buffer, int width, int height, int colorType, int alphaType); + + private static native void nativeAddImage(long sessionPointer, long imagePointer); + + private static native void nativeDeleteSkBitmap(long imagePointer); } diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/BUILD b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/BUILD index 6b09bcbe70..94aad6fe4a 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/BUILD +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/BUILD @@ -72,6 +72,7 @@ cc_library( "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", + "@skia//:core", ] + select({ "//conditions:default": [ "//mediapipe/tasks/cc/genai/inference/c:libllm_inference_engine_cpu", diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/llm.cc b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/llm.cc index 18d5ceb2c8..89ba4d3563 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/llm.cc +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/llm.cc @@ -16,12 +16,18 @@ #include +#include #include +#include #include #include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/strings/str_cat.h" +#include "include/core/SkAlphaType.h" // from @skia +#include "include/core/SkBitmap.h" // from @skia +#include "include/core/SkImage.h" // from @skia +#include "include/core/SkImageInfo.h" // from @skia #include "mediapipe/java/com/google/mediapipe/framework/jni/class_registry.h" #include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h" #include "mediapipe/tasks/cc/genai/inference/c/llm_inference_engine.h" @@ -37,12 +43,22 @@ using mediapipe::android::JStringToStdString; using mediapipe::android::ThrowIfError; using mediapipe::java::GetJNIEnv; +const bool kDefaultIncludeTokenCostCalculator = true; + LlmModelSettings ParseModelSettings(void* bytes, int size) { LlmModelSettingsProto input; input.ParseFromArray(bytes, size); LlmModelSettings output; output.model_path = strdup(input.model_path().c_str()); + output.vision_encoder_path = + input.vision_model_settings().has_encoder_path() + ? strdup(input.vision_model_settings().encoder_path().c_str()) + : nullptr; + output.vision_adapter_path = + input.vision_model_settings().has_adapter_path() + ? strdup(input.vision_model_settings().adapter_path().c_str()) + : nullptr; output.cache_dir = strdup(input.cache_dir().c_str()); output.sequence_batch_size = input.sequence_batch_size(); output.num_decode_steps_per_sync = input.num_decode_steps_per_sync(); @@ -55,6 +71,8 @@ LlmModelSettings ParseModelSettings(void* bytes, int size) { for (int i = 0; i < input.supported_lora_ranks_size(); ++i) { output.supported_lora_ranks[i] = input.supported_lora_ranks(i); } + } else { + output.supported_lora_ranks = nullptr; } output.llm_activation_data_type = kLlmActivationDataTypeDefault; output.num_draft_tokens = 0; @@ -74,12 +92,20 @@ LlmSessionConfig ParseSessionConfig(void* bytes, int size) { if (input.has_lora_path()) { output.lora_path = strdup(input.lora_path().c_str()); } + output.include_token_cost_calculator = + input.graph_config().has_include_token_cost_calculator() + ? input.graph_config().include_token_cost_calculator() + : kDefaultIncludeTokenCostCalculator; + output.enable_vision_modality = input.graph_config().enable_vision_modality(); return output; } void FreeModelSettings(LlmModelSettings* model_settings) { delete model_settings->model_path; + delete model_settings->vision_adapter_path; + delete model_settings->vision_encoder_path; delete model_settings->cache_dir; + delete[] model_settings->supported_lora_ranks; model_settings->model_path = nullptr; model_settings->cache_dir = nullptr; } @@ -207,6 +233,20 @@ JNIEXPORT void JNICALL JNI_METHOD(nativeAddQueryChunk)(JNIEnv* env, jclass thiz, } } +JNIEXPORT void JNICALL JNI_METHOD(nativeAddImage)(JNIEnv* env, jclass thiz, + jlong session_handle, + jlong image_handle) { + char* error_msg = nullptr; + int error_code = LlmInferenceEngine_Session_AddImage( + reinterpret_cast(session_handle), + reinterpret_cast(image_handle), &error_msg); + if (error_code) { + ThrowIfError(env, absl::InternalError( + absl::StrCat("Failed to add image:, %s", error_msg))); + free(error_msg); + } +} + JNIEXPORT jbyteArray JNICALL JNI_METHOD(nativePredictSync)(JNIEnv* env, jclass thiz, jlong session_handle) { LlmResponseContext response_context = LlmInferenceEngine_Session_PredictSync( @@ -255,3 +295,34 @@ JNIEXPORT jint JNICALL JNI_METHOD(nativeSizeInTokens)(JNIEnv* env, jclass thiz, } return size; } + +JNIEXPORT jlong JNICALL JNI_METHOD(nativeCreateSkBitmap)( + JNIEnv* env, jclass thiz, jobject byte_buffer, jint width, jint height, + jint color_type, jint alpha_type) { + const int64_t buffer_size = env->GetDirectBufferCapacity(byte_buffer); + void* buffer_data = env->GetDirectBufferAddress(byte_buffer); + if (buffer_data == nullptr || buffer_size < 0) { + ThrowIfError(env, absl::InternalError("Cannot get direct access to the " + "input buffer. It should be created " + "using allocateDirect.")); + } + + SkColorType sk_color_type = static_cast(color_type); + SkAlphaType sk_alpha_type = static_cast(alpha_type); + SkImageInfo imageInfo = + SkImageInfo::Make(width, height, sk_color_type, sk_alpha_type); + + auto bitmap = std::make_unique(); + bool success = + bitmap->installPixels(imageInfo, buffer_data, imageInfo.minRowBytes()); + if (!success) { + ThrowIfError(env, absl::InternalError("Cannot initialize SkBitmap.")); + } + + return reinterpret_cast(bitmap.release()); +} + +JNIEXPORT void JNICALL JNI_METHOD(nativeDeleteSkBitmap)(JNIEnv*, jclass, + jlong bitmap_handle) { + delete reinterpret_cast(bitmap_handle); +} diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/llm.h b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/llm.h index cadd82a236..6e67c0d240 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/llm.h +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/llm.h @@ -69,6 +69,14 @@ JNIEXPORT void JNICALL JNI_METHOD(nativeDeleteSession)(JNIEnv *, jclass, jlong); JNIEXPORT void JNICALL JNI_METHOD(nativeAddQueryChunk)(JNIEnv *, jclass, jlong, jstring); +/* + * Class: com_google_mediapipe_tasks_core_LlmTaskRunner + * Method: nativeAddImage + * Signature: (JLL)V + */ +JNIEXPORT void JNICALL JNI_METHOD(nativeAddImage)(JNIEnv *, jclass, jlong, + jlong); + /* * Class: com_google_mediapipe_tasks_core_LlmTaskRunner * Method: nativePredictSync @@ -109,6 +117,23 @@ JNIEXPORT void JNICALL JNI_METHOD(nativePredictAsync)(JNIEnv *, jclass, jlong, JNIEXPORT jint JNICALL JNI_METHOD(nativeSizeInTokens)(JNIEnv *, jclass, jlong, jstring); +/* + * Class: com_google_mediapipe_tasks_core_LlmTaskRunner + * Method: nativeCreateSkBitmap + * Signature: (Ljava/nio/ByteBuffer;IIII)J + */ +JNIEXPORT jlong JNICALL JNI_METHOD(nativeCreateSkBitmap)(JNIEnv *, jclass, + jobject, jint, jint, + jint, jint); + +/* + * Class: com_google_mediapipe_tasks_core_LlmTaskRunner + * Method: nativeDeleteSkBitmap + * Signature: (J)V + */ +JNIEXPORT void JNICALL JNI_METHOD(nativeDeleteSkBitmap)(JNIEnv *, jclass, + jlong); + #ifdef __cplusplus } // extern "C" #endif // __cplusplus diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/proto/llm_options.proto b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/proto/llm_options.proto index 22c647e6a2..e444230313 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/proto/llm_options.proto +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni/proto/llm_options.proto @@ -34,6 +34,20 @@ message LlmSessionConfig { // The absolute path to the LoRA model asset bundle stored locally on the // device. This is only compatible with GPU models. optional string lora_path = 4; + + // Parameters to customize the graph. + message GraphConfig { + // Whether to configure the graph to include the token cost calculator, + // which allows users to only compute the cost of a prompt. + optional bool include_token_cost_calculator = 1; + + // Whether to configure the graph to include the vision modality. Only one + // of enable_vision_modality or enable_audio_modality can be true currently. + optional bool enable_vision_modality = 2; + } + + // Parameters to customize the graph. + optional GraphConfig graph_config = 5; } // Configurable model parameters for creating an LLM inference engine. @@ -72,4 +86,15 @@ message LlmModelSettings { // means only greedy decoding is supported for any sessions created with this // engine. uint32 max_top_k = 8; + + // A container for vision model related settings. + message VisionModelSettings { + // Path to the vision encoder model file. + optional string encoder_path = 1; + + // Path to the vision adapter model file. + optional string adapter_path = 2; + } + + optional VisionModelSettings vision_model_settings = 9; } diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/genai/llminference/GraphOptions.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/genai/llminference/GraphOptions.java new file mode 100644 index 0000000000..c041917cae --- /dev/null +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/genai/llminference/GraphOptions.java @@ -0,0 +1,42 @@ +package com.google.mediapipe.tasks.genai.llminference; + +import com.google.auto.value.AutoValue; + +/** Configuration for the inference graph. */ +@AutoValue +public abstract class GraphOptions { + + /** + * Returns whether to configure the graph to include the token cost calculator, which allows users + * to only compute the cost of a prompt. + */ + public abstract boolean includeTokenCostCalculator(); + + /** Returns whether to configure the graph to include the vision modality. */ + public abstract boolean enableVisionModality(); + + /** Returns a new {@link Builder} instance. */ + public static Builder builder() { + return new AutoValue_GraphOptions.Builder() + .setIncludeTokenCostCalculator(true) + .setEnableVisionModality(false); + } + + /** Builder for {@link GraphConfig}. */ + @AutoValue.Builder + public abstract static class Builder { + /** Sets whether to configure the graph to include the token cost calculator. */ + public abstract Builder setIncludeTokenCostCalculator(boolean includeTokenCostCalculator); + + /** Sets whether to configure the graph to include the vision modality. */ + public abstract Builder setEnableVisionModality(boolean enableVisionModality); + + /** AutoValue generated builder method. */ + abstract GraphOptions autoBuild(); + + /** Builds a new {@link GraphConfig} instance. */ + public GraphOptions build() { + return autoBuild(); + } + } +} diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/genai/llminference/LlmInference.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/genai/llminference/LlmInference.java index 9f09ec9128..194fe6bdba 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/genai/llminference/LlmInference.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/genai/llminference/LlmInference.java @@ -35,7 +35,7 @@ public final class LlmInference implements AutoCloseable { /** Creates an LlmInference Task. */ public static LlmInference createFromOptions(Context context, LlmInferenceOptions options) { // Configure LLM model settings. - LlmModelSettings modelSettings = + LlmModelSettings.Builder modelSettings = LlmModelSettings.newBuilder() .setModelPath(options.modelPath()) .setCacheDir(context.getCacheDir().getAbsolutePath()) @@ -43,10 +43,20 @@ public static LlmInference createFromOptions(Context context, LlmInferenceOption .setMaxTokens(options.maxTokens()) .setMaxTopK(options.maxTopK()) .setNumberOfSupportedLoraRanks(options.supportedLoraRanks().size()) - .addAllSupportedLoraRanks(options.supportedLoraRanks()) - .build(); + .addAllSupportedLoraRanks(options.supportedLoraRanks()); - return new LlmInference(context, STATS_TAG, modelSettings, options.resultListener()); + if (options.visionModelOptions().isPresent()) { + VisionModelOptions visionModelOptions = options.visionModelOptions().get(); + + LlmModelSettings.VisionModelSettings.Builder visionModelSettings = + LlmModelSettings.VisionModelSettings.newBuilder(); + visionModelOptions.getEncoderPath().ifPresent(visionModelSettings::setEncoderPath); + visionModelOptions.getAdapterPath().ifPresent(visionModelSettings::setAdapterPath); + + modelSettings.setVisionModelSettings(visionModelSettings.build()); + } + + return new LlmInference(context, STATS_TAG, modelSettings.build(), options.resultListener()); } /** Constructor to initialize an {@link LlmInference}. */ @@ -196,9 +206,12 @@ public abstract static class Builder { */ public abstract Builder setMaxTopK(int maxTopK); - /** The supported lora ranks for the base model. Used by GPU only. */ + /** Sets the supported lora ranks for the base model. Used by GPU only. */ public abstract Builder setSupportedLoraRanks(List supportedLoraRanks); + /** Sets the model options to use for vision modality. */ + public abstract Builder setVisionModelOptions(VisionModelOptions visionModelOptions); + abstract LlmInferenceOptions autoBuild(); /** Validates and builds the {@link ImageGeneratorOptions} instance. */ @@ -232,6 +245,12 @@ public final LlmInferenceOptions build() { /** The error listener to use for the {@link LlmInference#generateAsync} API. */ public abstract Optional errorListener(); + /** The model options to for vision modality. */ + public abstract Optional visionModelOptions(); + + /** Returns a new builder with the same values as this instance. */ + public abstract Builder toBuilder(); + /** Instantiates a new LlmInferenceOptions builder. */ public static Builder builder() { return new AutoValue_LlmInference_LlmInferenceOptions.Builder() diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/genai/llminference/LlmInferenceSession.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/genai/llminference/LlmInferenceSession.java index 2d070cd207..d862667293 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/genai/llminference/LlmInferenceSession.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/genai/llminference/LlmInferenceSession.java @@ -1,6 +1,7 @@ package com.google.mediapipe.tasks.genai.llminference; import com.google.auto.value.AutoValue; +import com.google.mediapipe.framework.image.MPImage; import com.google.mediapipe.tasks.core.LlmTaskRunner; import com.google.mediapipe.tasks.core.LlmTaskRunner.LlmSession; import com.google.mediapipe.tasks.core.jni.proto.LlmOptionsProto.LlmSessionConfig; @@ -37,6 +38,16 @@ public static LlmInferenceSession createFromOptions( sessionConfig.setLoraPath(""); } + if (options.graphOptions().isPresent()) { + GraphOptions graphOptions = options.graphOptions().get(); + LlmSessionConfig.GraphConfig graphConfig = + LlmSessionConfig.GraphConfig.newBuilder() + .setIncludeTokenCostCalculator(graphOptions.includeTokenCostCalculator()) + .setEnableVisionModality(graphOptions.enableVisionModality()) + .build(); + sessionConfig.setGraphConfig(graphConfig); + } + LlmTaskRunner taskRunner = llmInference.getTaskRunner(); LlmSession session = taskRunner.createSession(sessionConfig.build()); return new LlmInferenceSession(taskRunner, session); @@ -60,6 +71,16 @@ public void addQueryChunk(String inputText) { taskRunner.addQueryChunk(session, inputText); } + /** + * Add an image to the session. + * + * @param image a MediaPipe {@link MPImage} object for processing. + * @throws MediaPipeException if there is an internal error. + */ + public void addImage(MPImage image) { + taskRunner.addImage(session, image); + } + /** * Generates a response based on the previously added query chunks synchronously. Use {@link * #addQueryChunk(String)} to add at least one query chunk before calling this function. @@ -170,6 +191,9 @@ public abstract static class Builder { */ public abstract Builder setLoraPath(String loraPath); + /** Sets the parameters to customize the graph. */ + public abstract Builder setGraphOptions(GraphOptions graphOptions); + abstract LlmInferenceSessionOptions autoBuild(); /** Validates and builds the {@link LlmInferenceSessionOptions} instance. */ @@ -196,6 +220,9 @@ public final LlmInferenceSessionOptions build() { */ public abstract Optional loraPath(); + /** Returns the parameters to customize the graph. */ + public abstract Optional graphOptions(); + /** Instantiates a new LlmInferenceOptions builder. */ public static Builder builder() { return new AutoValue_LlmInferenceSession_LlmInferenceSessionOptions.Builder() diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/genai/llminference/VisionModelOptions.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/genai/llminference/VisionModelOptions.java new file mode 100644 index 0000000000..1effe75ab8 --- /dev/null +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/genai/llminference/VisionModelOptions.java @@ -0,0 +1,36 @@ +package com.google.mediapipe.tasks.genai.llminference; + +import com.google.auto.value.AutoValue; +import java.util.Optional; + +/** Options for configuring vision modality */ +@AutoValue +public abstract class VisionModelOptions { + /** Returns the path to the vision encoder model file. */ + public abstract Optional getEncoderPath(); + + /** Path to the vision adapter model file. */ + public abstract Optional getAdapterPath(); + + /** Builder for {@link VisionModelOptions}. */ + @AutoValue.Builder + public abstract static class Builder { + /** Sets the path to the vision encoder model file. */ + public abstract Builder setEncoderPath(String encoderPath); + + /** Sets the to the vision adapter model file. */ + public abstract Builder setAdapterPath(String adapterPath); + + abstract VisionModelOptions autoBuild(); + + /** Validates and builds the {@link VisionModelOptions} instance. */ + public final VisionModelOptions build() { + return autoBuild(); + } + } + + /** Instantiates a new VisionModelOption builder. */ + public static Builder builder() { + return new AutoValue_VisionModelOptions.Builder(); + } +} diff --git a/mediapipe/tasks/python/genai/bundler/BUILD b/mediapipe/tasks/python/genai/bundler/BUILD index 0ed63b2a59..172b807e88 100644 --- a/mediapipe/tasks/python/genai/bundler/BUILD +++ b/mediapipe/tasks/python/genai/bundler/BUILD @@ -15,11 +15,6 @@ # Placeholder for internal Python strict library and test compatibility macro. # Placeholder for internal Python strict test compatibility macro. -package(default_visibility = [ - "//mediapipe:__subpackages__", - "//third_party/odml:__subpackages__", -]) - py_library( name = "llm_bundler", srcs = ["llm_bundler.py"], diff --git a/mediapipe/tasks/python/genai/bundler/llm_bundler.py b/mediapipe/tasks/python/genai/bundler/llm_bundler.py index ef13deb6f1..02d73313ba 100644 --- a/mediapipe/tasks/python/genai/bundler/llm_bundler.py +++ b/mediapipe/tasks/python/genai/bundler/llm_bundler.py @@ -93,19 +93,6 @@ def _validate_config(config: BundleConfig): "Please ensure you are passing a valid SentencePiece model." ) from e - encoded_start_token = sp.PieceToId(config.start_token) - if encoded_start_token == sp.unk_id(): - raise ValueError( - f"Failed to encode start token {config.start_token} with tokenizer." - ) - - for stop_token in config.stop_tokens: - encoded_stop_token = sp.PieceToId(stop_token) - if encoded_stop_token == sp.unk_id(): - raise ValueError( - f"Failed to encode stop token {stop_token} with tokenizer." - ) - def create_bundle(config: BundleConfig): """Creates a bundle from the given config.""" diff --git a/mediapipe/tasks/python/genai/bundler/llm_bundler_test.py b/mediapipe/tasks/python/genai/bundler/llm_bundler_test.py index ea43733f1b..91108346ab 100644 --- a/mediapipe/tasks/python/genai/bundler/llm_bundler_test.py +++ b/mediapipe/tasks/python/genai/bundler/llm_bundler_test.py @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Tests for llm_bundler.""" - import os import string import zipfile @@ -147,35 +145,6 @@ def test_invalid_stop_tokens_raises_value_error(self): with self.assertRaisesRegex(ValueError, "stop_tokens must be non-empty"): llm_bundler.create_bundle(config) - def test_invalid_start_stop_tokens_raises_value_error(self): - tempdir = self.create_tempdir() - sp_file_path = self._create_sp_model(tempdir.full_path) - tflite_file_path = self._create_tflite_model(tempdir.full_path) - output_file = os.path.join(tempdir, "test.task") - config = llm_bundler.BundleConfig( - tflite_model=tflite_file_path, - tokenizer_model=sp_file_path, - start_token="invalid_token", - stop_tokens=[self.EOS], - output_filename=output_file, - ) - with self.assertRaisesRegex( - ValueError, "Failed to encode start token invalid_token with tokenizer" - ): - llm_bundler.create_bundle(config) - - config = llm_bundler.BundleConfig( - tflite_model=tflite_file_path, - tokenizer_model=sp_file_path, - start_token=self.BOS, - stop_tokens=["invalid_token"], - output_filename=output_file, - ) - with self.assertRaisesRegex( - ValueError, "Failed to encode stop token invalid_token with tokenizer" - ): - llm_bundler.create_bundle(config) - def test_invalid_tokenizer_model_raises_value_error(self): tempdir = self.create_tempdir() sp_file_path = self._create_sp_model(tempdir.full_path, corrupt=True) diff --git a/mediapipe/tasks/python/genai/converter/llm_converter.py b/mediapipe/tasks/python/genai/converter/llm_converter.py index cff7948b97..81ec5bfa36 100644 --- a/mediapipe/tasks/python/genai/converter/llm_converter.py +++ b/mediapipe/tasks/python/genai/converter/llm_converter.py @@ -48,6 +48,9 @@ class ConversionConfig(object): lora_output_tflite_file: A string indicating the name of the generated tflite file for the LoRA weight. Only applicable when the lora_rank is not zero. + image_encoder_file: A string with the name of the image encoder tflite file. + image_adapter_file: A string with the name of the image adapter tflite file. + submodel_type: Name of submodel, e.g. GEMMA_2B. use_fake_weights: Whether to use fake weights. If set to True, the weights will be filled with zeros. """ @@ -71,6 +74,9 @@ def __init__( lora_ckpt: Optional[str] = None, lora_rank: Optional[int] = None, lora_output_tflite_file: Optional[str] = None, + image_encoder_file: Optional[str] = None, + image_adapter_file: Optional[str] = None, + submodel_type: Optional[str] = None, use_fake_weights: bool = False, ): self.input_ckpt = input_ckpt @@ -90,6 +96,9 @@ def __init__( self.combine_file_only = combine_file_only self.vocab_model_file = vocab_model_file self.obfuscate = obfuscate + self.image_encoder_file = image_encoder_file + self.image_adapter_file = image_adapter_file + self.submodel_type = submodel_type self.use_fake_weights = use_fake_weights if output_tflite_file: parent_dir = os.path.dirname(output_tflite_file) @@ -212,6 +221,9 @@ def combined_weight_bins_to_tflite( lora_rank: Optional[int] = None, lora_weight_path: Optional[str] = None, lora_output_tflite_file: Optional[str] = None, + image_encoder_file: Optional[str] = None, + image_adapter_file: Optional[str] = None, + submodel_type: Optional[str] = None, ): """Combines weight files to tflite file.""" if backend == 'cpu': @@ -235,6 +247,9 @@ def combined_weight_bins_to_tflite( 0 if lora_rank is None else lora_rank, '' if lora_weight_path is None else lora_weight_path, '' if lora_output_tflite_file is None else lora_output_tflite_file, + '' if image_encoder_file is None else image_encoder_file, + '' if image_adapter_file is None else image_adapter_file, + '' if submodel_type is None else submodel_type, ) else: raise ValueError('Unsupported backend: %s' % backend) @@ -353,4 +368,7 @@ def convert_checkpoint(config: ConversionConfig) -> None: lora_rank=config.lora_rank, lora_weight_path=config.output_dir, lora_output_tflite_file=config.lora_output_tflite_file, + image_encoder_file=config.image_encoder_file, + image_adapter_file=config.image_adapter_file, + submodel_type=config.submodel_type, ) diff --git a/mediapipe/tasks/python/test/text/text_embedder_test.py b/mediapipe/tasks/python/test/text/text_embedder_test.py index 3890bf2bdd..ebc69ef809 100644 --- a/mediapipe/tasks/python/test/text/text_embedder_test.py +++ b/mediapipe/tasks/python/test/text/text_embedder_test.py @@ -287,7 +287,7 @@ def test_embed_in_context(self, l2_normalize, quantize, model_name, @parameterized.parameters( # TODO: The similarity should likely be lower - (_BERT_MODEL_FILE, 0.99025), + (_BERT_MODEL_FILE, 0.98103), (_USE_MODEL_FILE, 0.780334), ) def test_embed_with_different_themes(self, model_file, expected_similarity): diff --git a/mediapipe/tasks/testdata/vision/face_geometry_expected_out.pbtxt b/mediapipe/tasks/testdata/vision/face_geometry_expected_out.pbtxt index d7e8652f41..df6eaaec35 100644 --- a/mediapipe/tasks/testdata/vision/face_geometry_expected_out.pbtxt +++ b/mediapipe/tasks/testdata/vision/face_geometry_expected_out.pbtxt @@ -3,2343 +3,2343 @@ mesh { vertex_type: VERTEX_PT primitive_type: TRIANGLE - vertex_buffer: -0.05458094 + vertex_buffer: -0.054581404 vertex_buffer: -3.9104233 - vertex_buffer: 6.2336426 + vertex_buffer: 6.233639 vertex_buffer: 0.499977 vertex_buffer: 0.652534 - vertex_buffer: -0.08718063 - vertex_buffer: -2.5412483 + vertex_buffer: -0.08718109 + vertex_buffer: -2.5412502 vertex_buffer: 7.949688 vertex_buffer: 0.500026 vertex_buffer: 0.547487 - vertex_buffer: -0.062314644 - vertex_buffer: -2.6207771 - vertex_buffer: 6.3720016 + vertex_buffer: -0.062315032 + vertex_buffer: -2.620779 + vertex_buffer: 6.371998 vertex_buffer: 0.499974 vertex_buffer: 0.602372 - vertex_buffer: -0.5262091 - vertex_buffer: -0.13145256 + vertex_buffer: -0.52620995 + vertex_buffer: -0.13145065 vertex_buffer: 6.791649 vertex_buffer: 0.482113 vertex_buffer: 0.471979 - vertex_buffer: -0.07805602 - vertex_buffer: -1.9441948 + vertex_buffer: -0.078056484 + vertex_buffer: -1.9441929 vertex_buffer: 8.094444 vertex_buffer: 0.500151 vertex_buffer: 0.527156 - vertex_buffer: -0.058286533 - vertex_buffer: -1.0193558 - vertex_buffer: 7.732723 + vertex_buffer: -0.05828686 + vertex_buffer: -1.0193539 + vertex_buffer: 7.7327194 vertex_buffer: 0.49991 vertex_buffer: 0.498253 - vertex_buffer: 0.015007198 - vertex_buffer: 1.6476421 + vertex_buffer: 0.015007228 + vertex_buffer: 1.6476498 vertex_buffer: 5.7056847 vertex_buffer: 0.499523 vertex_buffer: 0.401062 - vertex_buffer: -4.176094 - vertex_buffer: 2.6367798 - vertex_buffer: 2.9135933 + vertex_buffer: -4.1760993 + vertex_buffer: 2.6367893 + vertex_buffer: 2.9135857 vertex_buffer: 0.289712 vertex_buffer: 0.380764 - vertex_buffer: 0.03712082 - vertex_buffer: 2.985258 - vertex_buffer: 5.0371017 + vertex_buffer: 0.037120968 + vertex_buffer: 2.9852676 + vertex_buffer: 5.037098 vertex_buffer: 0.499955 vertex_buffer: 0.312398 - vertex_buffer: 0.06068772 - vertex_buffer: 3.787365 + vertex_buffer: 0.06068799 + vertex_buffer: 3.7873764 vertex_buffer: 5.021179 vertex_buffer: 0.499987 vertex_buffer: 0.269919 - vertex_buffer: 0.121769845 - vertex_buffer: 8.058422 + vertex_buffer: 0.12177047 + vertex_buffer: 8.058437 vertex_buffer: 3.8209877 vertex_buffer: 0.500023 vertex_buffer: 0.10705 - vertex_buffer: -0.04902646 - vertex_buffer: -4.1000786 + vertex_buffer: -0.049026906 + vertex_buffer: -4.1000805 vertex_buffer: 6.1800804 vertex_buffer: 0.500023 vertex_buffer: 0.666234 - vertex_buffer: -0.03831458 - vertex_buffer: -4.228405 + vertex_buffer: -0.038315058 + vertex_buffer: -4.228407 vertex_buffer: 6.0491524 vertex_buffer: 0.500016 vertex_buffer: 0.679224 - vertex_buffer: -0.05387768 - vertex_buffer: -4.2712173 - vertex_buffer: 5.8285065 + vertex_buffer: -0.053878143 + vertex_buffer: -4.2712193 + vertex_buffer: 5.8285027 vertex_buffer: 0.500023 vertex_buffer: 0.692348 - vertex_buffer: -0.053183556 + vertex_buffer: -0.053184003 vertex_buffer: -5.543394 - vertex_buffer: 5.0727234 + vertex_buffer: 5.0727158 vertex_buffer: 0.499977 vertex_buffer: 0.695278 - vertex_buffer: -0.059314266 - vertex_buffer: -5.8140745 - vertex_buffer: 5.2457848 + vertex_buffer: -0.059314743 + vertex_buffer: -5.8140755 + vertex_buffer: 5.245777 vertex_buffer: 0.499977 vertex_buffer: 0.705934 - vertex_buffer: -0.046299785 - vertex_buffer: -6.184594 - vertex_buffer: 5.4517365 + vertex_buffer: -0.04630032 + vertex_buffer: -6.184597 + vertex_buffer: 5.4517326 vertex_buffer: 0.499977 vertex_buffer: 0.719385 - vertex_buffer: -0.0507226 - vertex_buffer: -6.5362883 - vertex_buffer: 5.412071 + vertex_buffer: -0.050723135 + vertex_buffer: -6.536292 + vertex_buffer: 5.4120636 vertex_buffer: 0.499977 vertex_buffer: 0.737019 - vertex_buffer: -0.0614883 - vertex_buffer: -7.299967 - vertex_buffer: 5.0314445 + vertex_buffer: -0.061488867 + vertex_buffer: -7.2999697 + vertex_buffer: 5.031437 vertex_buffer: 0.499968 vertex_buffer: 0.781371 - vertex_buffer: -0.080724046 - vertex_buffer: -2.802185 + vertex_buffer: -0.08072452 + vertex_buffer: -2.8021832 vertex_buffer: 7.659096 vertex_buffer: 0.499816 vertex_buffer: 0.562981 - vertex_buffer: -0.7109383 + vertex_buffer: -0.7109394 vertex_buffer: -2.4929829 vertex_buffer: 6.7737694 vertex_buffer: 0.473773 vertex_buffer: 0.57391 - vertex_buffer: -7.1076756 - vertex_buffer: 6.3111076 + vertex_buffer: -7.1076837 + vertex_buffer: 6.311123 vertex_buffer: -0.36587524 vertex_buffer: 0.104907 vertex_buffer: 0.254141 - vertex_buffer: -2.470239 - vertex_buffer: 1.9644203 - vertex_buffer: 3.584652 + vertex_buffer: -2.4702423 + vertex_buffer: 1.964428 + vertex_buffer: 3.5846405 vertex_buffer: 0.36593 vertex_buffer: 0.409576 - vertex_buffer: -3.0842545 - vertex_buffer: 1.9237556 - vertex_buffer: 3.5713196 + vertex_buffer: -3.0842586 + vertex_buffer: 1.9237652 + vertex_buffer: 3.571312 vertex_buffer: 0.338758 vertex_buffer: 0.413025 - vertex_buffer: -3.7028773 - vertex_buffer: 1.9847145 - vertex_buffer: 3.4238892 + vertex_buffer: -3.702882 + vertex_buffer: 1.9847221 + vertex_buffer: 3.4238815 vertex_buffer: 0.31112 vertex_buffer: 0.40946 - vertex_buffer: -4.5404453 - vertex_buffer: 2.4034252 - vertex_buffer: 2.868042 + vertex_buffer: -4.540451 + vertex_buffer: 2.4034386 + vertex_buffer: 2.8680344 vertex_buffer: 0.274658 vertex_buffer: 0.389131 - vertex_buffer: -1.9506176 - vertex_buffer: 2.0792522 + vertex_buffer: -1.95062 + vertex_buffer: 2.0792599 vertex_buffer: 3.545063 vertex_buffer: 0.393362 vertex_buffer: 0.403706 - vertex_buffer: -3.3600674 - vertex_buffer: 3.3302898 - vertex_buffer: 3.677475 + vertex_buffer: -3.3600717 + vertex_buffer: 3.3302975 + vertex_buffer: 3.6774712 vertex_buffer: 0.345234 vertex_buffer: 0.344011 - vertex_buffer: -2.6583462 - vertex_buffer: 3.3343391 - vertex_buffer: 3.6318817 + vertex_buffer: -2.6583493 + vertex_buffer: 3.3343468 + vertex_buffer: 3.631878 vertex_buffer: 0.370094 vertex_buffer: 0.346076 - vertex_buffer: -4.0172 - vertex_buffer: 3.2417145 - vertex_buffer: 3.5189857 + vertex_buffer: -4.017204 + vertex_buffer: 3.241726 + vertex_buffer: 3.5189743 vertex_buffer: 0.319322 vertex_buffer: 0.347265 - vertex_buffer: -4.4652123 - vertex_buffer: 3.105711 - vertex_buffer: 3.2689667 + vertex_buffer: -4.465218 + vertex_buffer: 3.1057186 + vertex_buffer: 3.2689629 vertex_buffer: 0.297903 vertex_buffer: 0.353591 - vertex_buffer: -5.148801 - vertex_buffer: 2.13056 - vertex_buffer: 2.6144447 + vertex_buffer: -5.1488075 + vertex_buffer: 2.1305676 + vertex_buffer: 2.614441 vertex_buffer: 0.247792 vertex_buffer: 0.41081 - vertex_buffer: -2.5560007 - vertex_buffer: -8.121601 - vertex_buffer: 4.312229 + vertex_buffer: -2.5560045 + vertex_buffer: -8.121606 + vertex_buffer: 4.3122253 vertex_buffer: 0.396889 vertex_buffer: 0.842755 - vertex_buffer: -4.4293623 - vertex_buffer: 2.7792645 - vertex_buffer: 2.6971169 + vertex_buffer: -4.429368 + vertex_buffer: 2.779272 + vertex_buffer: 2.6971092 vertex_buffer: 0.280098 vertex_buffer: 0.3756 - vertex_buffer: -7.3019996 - vertex_buffer: 3.2599068 - vertex_buffer: -0.24933624 + vertex_buffer: -7.3020077 + vertex_buffer: 3.2599182 + vertex_buffer: -0.24934387 vertex_buffer: 0.10631 vertex_buffer: 0.399956 - vertex_buffer: -5.9285183 - vertex_buffer: 2.6113186 - vertex_buffer: 1.935009 + vertex_buffer: -5.9285254 + vertex_buffer: 2.6113281 + vertex_buffer: 1.9350014 vertex_buffer: 0.209925 vertex_buffer: 0.391353 - vertex_buffer: -3.3364365 - vertex_buffer: -0.7940159 - vertex_buffer: 4.6608505 + vertex_buffer: -3.336441 + vertex_buffer: -0.79401016 + vertex_buffer: 4.6608467 vertex_buffer: 0.355808 vertex_buffer: 0.534406 - vertex_buffer: -1.073098 - vertex_buffer: -3.7692108 + vertex_buffer: -1.0730996 + vertex_buffer: -3.769209 vertex_buffer: 6.153408 vertex_buffer: 0.471751 vertex_buffer: 0.650404 - vertex_buffer: -1.0261879 + vertex_buffer: -1.0261894 vertex_buffer: -4.1541195 - vertex_buffer: 5.910328 + vertex_buffer: 5.910324 vertex_buffer: 0.474155 vertex_buffer: 0.680192 - vertex_buffer: -2.0449853 + vertex_buffer: -2.0449882 vertex_buffer: -3.767992 - vertex_buffer: 5.6874237 + vertex_buffer: 5.687416 vertex_buffer: 0.439785 vertex_buffer: 0.657229 - vertex_buffer: -2.7272358 - vertex_buffer: -3.7194214 - vertex_buffer: 4.9273148 + vertex_buffer: -2.7272398 + vertex_buffer: -3.7194176 + vertex_buffer: 4.927311 vertex_buffer: 0.414617 vertex_buffer: 0.666541 - vertex_buffer: -1.8418827 - vertex_buffer: -4.0302353 - vertex_buffer: 5.497223 + vertex_buffer: -1.8418853 + vertex_buffer: -4.0302334 + vertex_buffer: 5.497219 vertex_buffer: 0.450374 vertex_buffer: 0.680861 - vertex_buffer: -2.4798682 - vertex_buffer: -3.8946476 - vertex_buffer: 4.8089104 + vertex_buffer: -2.4798715 + vertex_buffer: -3.8946457 + vertex_buffer: 4.808899 vertex_buffer: 0.428771 vertex_buffer: 0.682691 - vertex_buffer: -3.4621422 + vertex_buffer: -3.4621468 vertex_buffer: -4.7935104 - vertex_buffer: 3.6339455 + vertex_buffer: 3.6339378 vertex_buffer: 0.374971 vertex_buffer: 0.727805 - vertex_buffer: -0.6082711 - vertex_buffer: -2.526701 + vertex_buffer: -0.6082723 + vertex_buffer: -2.526699 vertex_buffer: 7.89365 vertex_buffer: 0.486717 vertex_buffer: 0.547629 - vertex_buffer: -0.62934816 + vertex_buffer: -0.62934923 vertex_buffer: -1.9188957 vertex_buffer: 8.013584 vertex_buffer: 0.485301 vertex_buffer: 0.527395 - vertex_buffer: -5.3245864 - vertex_buffer: 3.4533978 - vertex_buffer: 3.129799 + vertex_buffer: -5.324592 + vertex_buffer: 3.4534073 + vertex_buffer: 3.129795 vertex_buffer: 0.257765 vertex_buffer: 0.31449 - vertex_buffer: -1.8589399 - vertex_buffer: 0.70103836 - vertex_buffer: 4.5687027 + vertex_buffer: -1.8589423 + vertex_buffer: 0.701046 + vertex_buffer: 4.568695 vertex_buffer: 0.401223 vertex_buffer: 0.455172 - vertex_buffer: -2.21105 - vertex_buffer: -1.6402969 + vertex_buffer: -2.211053 + vertex_buffer: -1.6402931 vertex_buffer: 6.246376 vertex_buffer: 0.429819 vertex_buffer: 0.548615 - vertex_buffer: -2.2373412 - vertex_buffer: -1.2092209 - vertex_buffer: 5.9900475 + vertex_buffer: -2.2373445 + vertex_buffer: -1.209219 + vertex_buffer: 5.9900436 vertex_buffer: 0.421352 vertex_buffer: 0.533741 - vertex_buffer: -5.2615614 - vertex_buffer: -0.38985252 - vertex_buffer: 3.8133812 + vertex_buffer: -5.2615685 + vertex_buffer: -0.3898449 + vertex_buffer: 3.8133736 vertex_buffer: 0.276896 vertex_buffer: 0.532057 - vertex_buffer: -0.57935333 - vertex_buffer: -1.0133495 + vertex_buffer: -0.5793542 + vertex_buffer: -1.0133476 vertex_buffer: 7.52845 vertex_buffer: 0.48337 vertex_buffer: 0.499587 - vertex_buffer: -3.619615 - vertex_buffer: 3.6594543 - vertex_buffer: 4.1930656 + vertex_buffer: -3.6196196 + vertex_buffer: 3.6594658 + vertex_buffer: 4.193058 vertex_buffer: 0.337212 vertex_buffer: 0.282883 - vertex_buffer: -4.5898757 - vertex_buffer: 3.6197815 - vertex_buffer: 3.7516823 + vertex_buffer: -4.589881 + vertex_buffer: 3.619793 + vertex_buffer: 3.7516785 vertex_buffer: 0.296392 vertex_buffer: 0.293243 - vertex_buffer: -6.3526697 - vertex_buffer: 7.150507 - vertex_buffer: 0.71819305 + vertex_buffer: -6.352678 + vertex_buffer: 7.150524 + vertex_buffer: 0.7181854 vertex_buffer: 0.169295 vertex_buffer: 0.193814 - vertex_buffer: -1.0129352 - vertex_buffer: 3.025011 + vertex_buffer: -1.0129362 + vertex_buffer: 3.0250225 vertex_buffer: 4.796524 vertex_buffer: 0.44758 vertex_buffer: 0.30261 - vertex_buffer: -2.0709233 - vertex_buffer: 3.2252083 - vertex_buffer: 3.4418488 + vertex_buffer: -2.0709255 + vertex_buffer: 3.2252178 + vertex_buffer: 3.4418411 vertex_buffer: 0.39239 vertex_buffer: 0.353888 - vertex_buffer: -4.02202 - vertex_buffer: -3.7557068 - vertex_buffer: 3.5507584 + vertex_buffer: -4.0220246 + vertex_buffer: -3.7557049 + vertex_buffer: 3.5507507 vertex_buffer: 0.35449 vertex_buffer: 0.696784 - vertex_buffer: -6.623052 - vertex_buffer: -3.412384 - vertex_buffer: -0.97278595 + vertex_buffer: -6.6230597 + vertex_buffer: -3.4123821 + vertex_buffer: -0.9727936 vertex_buffer: 0.067305 vertex_buffer: 0.730105 - vertex_buffer: -1.7937524 - vertex_buffer: -1.979456 - vertex_buffer: 6.017292 + vertex_buffer: -1.7937549 + vertex_buffer: -1.979454 + vertex_buffer: 6.017288 vertex_buffer: 0.442739 vertex_buffer: 0.572826 - vertex_buffer: -1.2062981 + vertex_buffer: -1.2062998 vertex_buffer: -2.2445698 vertex_buffer: 6.1706963 vertex_buffer: 0.457098 vertex_buffer: 0.584792 - vertex_buffer: -3.57981 - vertex_buffer: -3.7382832 - vertex_buffer: 3.4588318 + vertex_buffer: -3.5798144 + vertex_buffer: -3.7382813 + vertex_buffer: 3.458828 vertex_buffer: 0.381974 vertex_buffer: 0.694711 - vertex_buffer: -3.3970556 - vertex_buffer: -3.7821655 - vertex_buffer: 3.5801582 + vertex_buffer: -3.3970602 + vertex_buffer: -3.7821636 + vertex_buffer: 3.5801468 vertex_buffer: 0.392389 vertex_buffer: 0.694203 - vertex_buffer: -4.9694285 - vertex_buffer: 4.1729107 - vertex_buffer: 3.340042 + vertex_buffer: -4.9694343 + vertex_buffer: 4.172926 + vertex_buffer: 3.3400345 vertex_buffer: 0.277076 vertex_buffer: 0.271932 - vertex_buffer: -2.315681 + vertex_buffer: -2.3156843 vertex_buffer: -1.7466087 - vertex_buffer: 5.9153595 + vertex_buffer: 5.9153557 vertex_buffer: 0.422552 vertex_buffer: 0.563233 - vertex_buffer: -2.4251802 - vertex_buffer: 3.4840698 - vertex_buffer: 4.546856 + vertex_buffer: -2.4251833 + vertex_buffer: 3.4840775 + vertex_buffer: 4.5468445 vertex_buffer: 0.385919 vertex_buffer: 0.281364 - vertex_buffer: -2.5284352 - vertex_buffer: 4.002569 - vertex_buffer: 4.621155 + vertex_buffer: -2.5284379 + vertex_buffer: 4.0025826 + vertex_buffer: 4.621151 vertex_buffer: 0.383103 vertex_buffer: 0.25584 - vertex_buffer: -3.5193088 - vertex_buffer: 7.9853344 - vertex_buffer: 2.9795952 + vertex_buffer: -3.5193126 + vertex_buffer: 7.9853497 + vertex_buffer: 2.9795876 vertex_buffer: 0.331431 vertex_buffer: 0.119714 - vertex_buffer: -5.6371226 - vertex_buffer: 5.7293034 - vertex_buffer: 2.1390076 + vertex_buffer: -5.6371293 + vertex_buffer: 5.7293186 + vertex_buffer: 2.1390038 vertex_buffer: 0.229924 vertex_buffer: 0.232003 - vertex_buffer: -2.971123 - vertex_buffer: 6.184988 + vertex_buffer: -2.9711266 + vertex_buffer: 6.1849995 vertex_buffer: 3.8050308 vertex_buffer: 0.364501 vertex_buffer: 0.189114 - vertex_buffer: -5.761087 - vertex_buffer: 3.9773064 - vertex_buffer: 2.541832 + vertex_buffer: -5.761094 + vertex_buffer: 3.9773197 + vertex_buffer: 2.5418282 vertex_buffer: 0.229622 vertex_buffer: 0.299541 - vertex_buffer: -6.4531307 - vertex_buffer: 5.1509743 - vertex_buffer: 1.08535 + vertex_buffer: -6.453139 + vertex_buffer: 5.1509914 + vertex_buffer: 1.0853424 vertex_buffer: 0.173287 vertex_buffer: 0.278748 - vertex_buffer: -1.0653346 - vertex_buffer: -3.9987717 - vertex_buffer: 6.0787888 + vertex_buffer: -1.0653361 + vertex_buffer: -3.9987736 + vertex_buffer: 6.078785 vertex_buffer: 0.472879 vertex_buffer: 0.666198 - vertex_buffer: -1.9331014 + vertex_buffer: -1.9331044 vertex_buffer: -3.916172 - vertex_buffer: 5.592533 + vertex_buffer: 5.5925255 vertex_buffer: 0.446828 vertex_buffer: 0.668527 - vertex_buffer: -2.5978663 + vertex_buffer: -2.5978699 vertex_buffer: -3.812233 - vertex_buffer: 4.909237 + vertex_buffer: 4.9092293 vertex_buffer: 0.422762 vertex_buffer: 0.67389 - vertex_buffer: -1.6448693 - vertex_buffer: -2.058361 - vertex_buffer: 5.9337997 + vertex_buffer: -1.6448718 + vertex_buffer: -2.0583591 + vertex_buffer: 5.933792 vertex_buffer: 0.445308 vertex_buffer: 0.580066 - vertex_buffer: -3.4851413 - vertex_buffer: -3.7586155 - vertex_buffer: 3.5274696 + vertex_buffer: -3.4851453 + vertex_buffer: -3.7586174 + vertex_buffer: 3.5274658 vertex_buffer: 0.388103 vertex_buffer: 0.693961 - vertex_buffer: -3.066307 + vertex_buffer: -3.066311 vertex_buffer: -4.4653883 - vertex_buffer: 3.8333206 + vertex_buffer: 3.8333168 vertex_buffer: 0.403039 vertex_buffer: 0.70654 - vertex_buffer: -3.3134317 - vertex_buffer: -3.8092175 - vertex_buffer: 3.5993462 + vertex_buffer: -3.313436 + vertex_buffer: -3.8092194 + vertex_buffer: 3.5993423 vertex_buffer: 0.403629 vertex_buffer: 0.693953 - vertex_buffer: -1.3705826 - vertex_buffer: -2.1870499 + vertex_buffer: -1.3705846 + vertex_buffer: -2.187048 vertex_buffer: 6.929592 vertex_buffer: 0.460042 vertex_buffer: 0.557139 - vertex_buffer: -2.3757484 + vertex_buffer: -2.3757515 vertex_buffer: -3.9299736 - vertex_buffer: 4.7485847 + vertex_buffer: 4.748581 vertex_buffer: 0.431158 vertex_buffer: 0.692366 - vertex_buffer: -1.7447376 + vertex_buffer: -1.7447399 vertex_buffer: -4.0353317 - vertex_buffer: 5.2845726 + vertex_buffer: 5.284565 vertex_buffer: 0.452182 vertex_buffer: 0.692366 - vertex_buffer: -0.97020155 - vertex_buffer: -4.186224 - vertex_buffer: 5.687355 + vertex_buffer: -0.97020316 + vertex_buffer: -4.186222 + vertex_buffer: 5.687351 vertex_buffer: 0.475387 vertex_buffer: 0.692366 - vertex_buffer: -1.1198573 - vertex_buffer: -7.185463 - vertex_buffer: 4.9469795 + vertex_buffer: -1.119859 + vertex_buffer: -7.185467 + vertex_buffer: 4.946968 vertex_buffer: 0.465828 vertex_buffer: 0.77919 - vertex_buffer: -1.040087 - vertex_buffer: -6.4274054 - vertex_buffer: 5.3075333 + vertex_buffer: -1.0400888 + vertex_buffer: -6.427408 + vertex_buffer: 5.3075294 vertex_buffer: 0.472329 vertex_buffer: 0.736226 - vertex_buffer: -1.0275493 - vertex_buffer: -6.0652637 - vertex_buffer: 5.31258 + vertex_buffer: -1.027551 + vertex_buffer: -6.0652666 + vertex_buffer: 5.3125763 vertex_buffer: 0.473087 vertex_buffer: 0.717857 - vertex_buffer: -0.9899579 + vertex_buffer: -0.9899597 vertex_buffer: -5.712208 - vertex_buffer: 5.0865936 + vertex_buffer: 5.086586 vertex_buffer: 0.473122 vertex_buffer: 0.704626 - vertex_buffer: -0.922419 - vertex_buffer: -5.449852 - vertex_buffer: 4.9381943 + vertex_buffer: -0.9224205 + vertex_buffer: -5.449853 + vertex_buffer: 4.938183 vertex_buffer: 0.473033 vertex_buffer: 0.695278 - vertex_buffer: -2.3016758 + vertex_buffer: -2.3016791 vertex_buffer: -4.864131 - vertex_buffer: 4.200348 + vertex_buffer: 4.2003365 vertex_buffer: 0.427942 vertex_buffer: 0.695278 - vertex_buffer: -2.4079256 - vertex_buffer: -4.9618464 - vertex_buffer: 4.3218575 + vertex_buffer: -2.4079297 + vertex_buffer: -4.9618473 + vertex_buffer: 4.32185 vertex_buffer: 0.426479 vertex_buffer: 0.70354 - vertex_buffer: -2.539156 - vertex_buffer: -5.145029 - vertex_buffer: 4.4079857 + vertex_buffer: -2.5391598 + vertex_buffer: -5.145028 + vertex_buffer: 4.407978 vertex_buffer: 0.423162 vertex_buffer: 0.711846 - vertex_buffer: -2.626017 + vertex_buffer: -2.626021 vertex_buffer: -5.3240175 - vertex_buffer: 4.311016 + vertex_buffer: 4.3110046 vertex_buffer: 0.418309 vertex_buffer: 0.720063 - vertex_buffer: -3.234158 - vertex_buffer: -3.0173168 - vertex_buffer: 4.740303 + vertex_buffer: -3.2341623 + vertex_buffer: -3.017315 + vertex_buffer: 4.740299 vertex_buffer: 0.390095 vertex_buffer: 0.639573 - vertex_buffer: -7.579906 - vertex_buffer: 0.5933876 - vertex_buffer: -2.304924 + vertex_buffer: -7.5799155 + vertex_buffer: 0.59339714 + vertex_buffer: -2.3049393 vertex_buffer: 0.013954 vertex_buffer: 0.560034 - vertex_buffer: -0.07802668 - vertex_buffer: -2.6862774 - vertex_buffer: 6.828209 + vertex_buffer: -0.07802707 + vertex_buffer: -2.6862755 + vertex_buffer: 6.828205 vertex_buffer: 0.499914 vertex_buffer: 0.580147 - vertex_buffer: -2.7698386 - vertex_buffer: -4.4213257 - vertex_buffer: 3.7819366 + vertex_buffer: -2.7698421 + vertex_buffer: -4.421324 + vertex_buffer: 3.7819328 vertex_buffer: 0.4132 vertex_buffer: 0.6954 - vertex_buffer: -2.9227178 - vertex_buffer: -4.4475775 - vertex_buffer: 3.845951 + vertex_buffer: -2.9227223 + vertex_buffer: -4.4475765 + vertex_buffer: 3.8459435 vertex_buffer: 0.409626 vertex_buffer: 0.701823 - vertex_buffer: -1.0003803 + vertex_buffer: -1.0003818 vertex_buffer: -2.492979 vertex_buffer: 6.198204 vertex_buffer: 0.46808 vertex_buffer: 0.601535 - vertex_buffer: -2.1424727 - vertex_buffer: -1.9390526 - vertex_buffer: 5.46101 + vertex_buffer: -2.1424758 + vertex_buffer: -1.9390469 + vertex_buffer: 5.461006 vertex_buffer: 0.422729 vertex_buffer: 0.585985 - vertex_buffer: -1.0979404 - vertex_buffer: -2.3909836 - vertex_buffer: 6.2144356 + vertex_buffer: -1.097942 + vertex_buffer: -2.3909798 + vertex_buffer: 6.214432 vertex_buffer: 0.46308 vertex_buffer: 0.593784 - vertex_buffer: -2.5645185 - vertex_buffer: 0.45451736 - vertex_buffer: 4.4030495 + vertex_buffer: -2.5645218 + vertex_buffer: 0.45452118 + vertex_buffer: 4.403042 vertex_buffer: 0.37212 vertex_buffer: 0.473414 - vertex_buffer: -3.6344466 - vertex_buffer: 0.15174484 - vertex_buffer: 4.2981873 + vertex_buffer: -3.6344514 + vertex_buffer: 0.15174866 + vertex_buffer: 4.2981834 vertex_buffer: 0.334562 vertex_buffer: 0.496073 - vertex_buffer: -2.4170141 - vertex_buffer: -1.3526745 - vertex_buffer: 5.642761 + vertex_buffer: -2.4170175 + vertex_buffer: -1.3526707 + vertex_buffer: 5.6427574 vertex_buffer: 0.411671 vertex_buffer: 0.546965 - vertex_buffer: -5.1738615 - vertex_buffer: 7.6968765 - vertex_buffer: 1.9025688 + vertex_buffer: -5.1738667 + vertex_buffer: 7.696892 + vertex_buffer: 1.9025612 vertex_buffer: 0.242176 vertex_buffer: 0.147676 - vertex_buffer: -4.5046306 - vertex_buffer: 6.0956154 - vertex_buffer: 3.0134354 + vertex_buffer: -4.5046363 + vertex_buffer: 6.0956287 + vertex_buffer: 3.0134315 vertex_buffer: 0.290777 vertex_buffer: 0.201446 - vertex_buffer: -3.840843 - vertex_buffer: 4.263878 - vertex_buffer: 3.9735641 + vertex_buffer: -3.8408475 + vertex_buffer: 4.2638893 + vertex_buffer: 3.9735603 vertex_buffer: 0.327338 vertex_buffer: 0.256527 - vertex_buffer: -2.8679767 - vertex_buffer: -5.7141867 - vertex_buffer: 4.0249977 + vertex_buffer: -2.8679805 + vertex_buffer: -5.7141886 + vertex_buffer: 4.02499 vertex_buffer: 0.39951 vertex_buffer: 0.748921 - vertex_buffer: -1.1987953 - vertex_buffer: 3.787548 + vertex_buffer: -1.1987965 + vertex_buffer: 3.7875595 vertex_buffer: 4.9401436 vertex_buffer: 0.441728 vertex_buffer: 0.261676 - vertex_buffer: -1.5314405 - vertex_buffer: 6.164812 - vertex_buffer: 4.248989 + vertex_buffer: -1.5314423 + vertex_buffer: 6.1648254 + vertex_buffer: 4.2489853 vertex_buffer: 0.429765 vertex_buffer: 0.187834 - vertex_buffer: -1.8419411 - vertex_buffer: 8.065584 + vertex_buffer: -1.8419428 + vertex_buffer: 8.065599 vertex_buffer: 3.6402245 vertex_buffer: 0.412198 vertex_buffer: 0.108901 - vertex_buffer: -4.211664 - vertex_buffer: 2.1525154 - vertex_buffer: 3.1534004 + vertex_buffer: -4.211669 + vertex_buffer: 2.1525269 + vertex_buffer: 3.1533928 vertex_buffer: 0.288955 vertex_buffer: 0.398952 - vertex_buffer: -5.888899 - vertex_buffer: 1.8484287 - vertex_buffer: 2.299553 + vertex_buffer: -5.888906 + vertex_buffer: 1.8484383 + vertex_buffer: 2.2995453 vertex_buffer: 0.218937 vertex_buffer: 0.435411 - vertex_buffer: -1.6175318 - vertex_buffer: 2.2701035 - vertex_buffer: 3.4401398 + vertex_buffer: -1.6175336 + vertex_buffer: 2.270111 + vertex_buffer: 3.440136 vertex_buffer: 0.412782 vertex_buffer: 0.39897 - vertex_buffer: -5.159606 - vertex_buffer: 3.0025787 - vertex_buffer: 2.737915 + vertex_buffer: -5.1596127 + vertex_buffer: 3.0025902 + vertex_buffer: 2.7379074 vertex_buffer: 0.257135 vertex_buffer: 0.35544 - vertex_buffer: -1.3773215 - vertex_buffer: 0.9951744 - vertex_buffer: 4.782261 + vertex_buffer: -1.377323 + vertex_buffer: 0.9951782 + vertex_buffer: 4.782257 vertex_buffer: 0.427685 vertex_buffer: 0.437961 - vertex_buffer: -1.7201958 - vertex_buffer: -1.7159519 + vertex_buffer: -1.720198 + vertex_buffer: -1.7159481 vertex_buffer: 6.867691 vertex_buffer: 0.44834 vertex_buffer: 0.536936 - vertex_buffer: -6.6515293 - vertex_buffer: 1.5498962 - vertex_buffer: 1.696167 + vertex_buffer: -6.6515374 + vertex_buffer: 1.5499058 + vertex_buffer: 1.6961594 vertex_buffer: 0.17856 vertex_buffer: 0.457554 - vertex_buffer: -5.4059763 - vertex_buffer: 1.3487549 - vertex_buffer: 2.9207191 + vertex_buffer: -5.4059834 + vertex_buffer: 1.3487625 + vertex_buffer: 2.9207115 vertex_buffer: 0.247308 vertex_buffer: 0.457194 - vertex_buffer: -4.583771 - vertex_buffer: 1.0079613 - vertex_buffer: 3.5124168 + vertex_buffer: -4.583777 + vertex_buffer: 1.0079708 + vertex_buffer: 3.512413 vertex_buffer: 0.286267 vertex_buffer: 0.467675 - vertex_buffer: -3.3820121 - vertex_buffer: 0.9262314 - vertex_buffer: 3.8521194 + vertex_buffer: -3.3820162 + vertex_buffer: 0.926239 + vertex_buffer: 3.8521156 vertex_buffer: 0.332828 vertex_buffer: 0.460712 - vertex_buffer: -2.477497 - vertex_buffer: 1.0757065 - vertex_buffer: 3.953865 + vertex_buffer: -2.4775004 + vertex_buffer: 1.075716 + vertex_buffer: 3.9538612 vertex_buffer: 0.368756 vertex_buffer: 0.447207 - vertex_buffer: -1.8026321 - vertex_buffer: 1.2566261 - vertex_buffer: 4.115402 + vertex_buffer: -1.8026342 + vertex_buffer: 1.2566338 + vertex_buffer: 4.1153984 vertex_buffer: 0.398964 vertex_buffer: 0.432655 - vertex_buffer: -0.5538964 - vertex_buffer: 1.5615921 - vertex_buffer: 5.4430733 + vertex_buffer: -0.553897 + vertex_buffer: 1.5615997 + vertex_buffer: 5.4430695 vertex_buffer: 0.47641 vertex_buffer: 0.405806 - vertex_buffer: -6.6932607 - vertex_buffer: 0.20049477 - vertex_buffer: 2.0159836 + vertex_buffer: -6.6932683 + vertex_buffer: 0.20050049 + vertex_buffer: 2.015976 vertex_buffer: 0.189241 vertex_buffer: 0.523924 - vertex_buffer: -5.6793036 - vertex_buffer: 3.1260548 - vertex_buffer: 2.4527283 + vertex_buffer: -5.6793103 + vertex_buffer: 3.1260643 + vertex_buffer: 2.4527206 vertex_buffer: 0.228962 vertex_buffer: 0.348951 - vertex_buffer: -0.4061323 - vertex_buffer: -2.7714462 - vertex_buffer: 7.6199875 + vertex_buffer: -0.4061332 + vertex_buffer: -2.7714443 + vertex_buffer: 7.6199837 vertex_buffer: 0.490726 vertex_buffer: 0.562401 - vertex_buffer: -1.9291446 - vertex_buffer: -0.004880905 - vertex_buffer: 4.950474 + vertex_buffer: -1.929147 + vertex_buffer: -0.0048770905 + vertex_buffer: 4.95047 vertex_buffer: 0.40467 vertex_buffer: 0.485133 - vertex_buffer: -7.719159 - vertex_buffer: 3.9393787 - vertex_buffer: -2.2868347 + vertex_buffer: -7.7191677 + vertex_buffer: 3.939392 + vertex_buffer: -2.2868423 vertex_buffer: 0.019469 vertex_buffer: 0.401564 - vertex_buffer: -1.3079104 - vertex_buffer: 1.5567074 - vertex_buffer: 4.179184 + vertex_buffer: -1.307912 + vertex_buffer: 1.556715 + vertex_buffer: 4.1791763 vertex_buffer: 0.426243 vertex_buffer: 0.420431 - vertex_buffer: -2.5166872 - vertex_buffer: -1.1328773 - vertex_buffer: 4.9170837 + vertex_buffer: -2.5166903 + vertex_buffer: -1.1328754 + vertex_buffer: 4.91708 vertex_buffer: 0.396993 vertex_buffer: 0.548797 - vertex_buffer: -4.802832 - vertex_buffer: 2.761158 - vertex_buffer: 2.5843391 + vertex_buffer: -4.8028374 + vertex_buffer: 2.7611675 + vertex_buffer: 2.5843315 vertex_buffer: 0.26647 vertex_buffer: 0.376977 - vertex_buffer: -1.7231433 - vertex_buffer: -1.1866455 - vertex_buffer: 6.5593147 + vertex_buffer: -1.7231458 + vertex_buffer: -1.1866398 + vertex_buffer: 6.559307 vertex_buffer: 0.439121 vertex_buffer: 0.518958 - vertex_buffer: -7.232868 - vertex_buffer: -1.3291702 - vertex_buffer: -1.7797775 + vertex_buffer: -7.2328777 + vertex_buffer: -1.3291626 + vertex_buffer: -1.7797928 vertex_buffer: 0.032314 vertex_buffer: 0.644357 - vertex_buffer: -1.6283221 - vertex_buffer: 2.525772 - vertex_buffer: 3.2109795 + vertex_buffer: -1.6283238 + vertex_buffer: 2.5257816 + vertex_buffer: 3.2109756 vertex_buffer: 0.419054 vertex_buffer: 0.387155 - vertex_buffer: -1.1273737 - vertex_buffer: -1.0895672 - vertex_buffer: 7.164959 + vertex_buffer: -1.1273751 + vertex_buffer: -1.0895634 + vertex_buffer: 7.164955 vertex_buffer: 0.462783 vertex_buffer: 0.505747 - vertex_buffer: -5.139789 + vertex_buffer: -5.139796 vertex_buffer: -5.5858536 - vertex_buffer: 1.9622345 + vertex_buffer: 1.9622269 vertex_buffer: 0.238979 vertex_buffer: 0.779745 - vertex_buffer: -5.005826 + vertex_buffer: -5.005832 vertex_buffer: -6.508588 - vertex_buffer: 1.2313309 + vertex_buffer: 1.2313194 vertex_buffer: 0.198221 vertex_buffer: 0.831938 - vertex_buffer: -7.366962 - vertex_buffer: 0.3376236 - vertex_buffer: -0.16316223 + vertex_buffer: -7.3669715 + vertex_buffer: 0.33763313 + vertex_buffer: -0.16316986 vertex_buffer: 0.10755 vertex_buffer: 0.540755 - vertex_buffer: -5.9915476 - vertex_buffer: -4.4671774 - vertex_buffer: 1.3122635 + vertex_buffer: -5.9915557 + vertex_buffer: -4.4671764 + vertex_buffer: 1.3122559 vertex_buffer: 0.18361 vertex_buffer: 0.740257 - vertex_buffer: -6.912982 - vertex_buffer: 4.4011097 - vertex_buffer: 0.22525787 + vertex_buffer: -6.91299 + vertex_buffer: 4.401125 + vertex_buffer: 0.22525024 vertex_buffer: 0.13441 vertex_buffer: 0.333683 - vertex_buffer: -2.616919 - vertex_buffer: -9.00794 - vertex_buffer: 4.171379 + vertex_buffer: -2.6169233 + vertex_buffer: -9.007946 + vertex_buffer: 4.1713715 vertex_buffer: 0.385764 vertex_buffer: 0.883154 - vertex_buffer: -0.33355132 - vertex_buffer: -2.6564388 - vertex_buffer: 6.7840347 + vertex_buffer: -0.3335521 + vertex_buffer: -2.656437 + vertex_buffer: 6.784031 vertex_buffer: 0.490967 vertex_buffer: 0.579378 - vertex_buffer: -2.5573 - vertex_buffer: -0.3414135 - vertex_buffer: 4.7602386 + vertex_buffer: -2.557304 + vertex_buffer: -0.34140968 + vertex_buffer: 4.760235 vertex_buffer: 0.382385 vertex_buffer: 0.508573 - vertex_buffer: -6.5110135 - vertex_buffer: 2.69775 - vertex_buffer: 1.3233795 + vertex_buffer: -6.511022 + vertex_buffer: 2.6977615 + vertex_buffer: 1.3233719 vertex_buffer: 0.174399 vertex_buffer: 0.397671 - vertex_buffer: -3.575697 - vertex_buffer: 2.4028053 - vertex_buffer: 3.2938614 + vertex_buffer: -3.5757012 + vertex_buffer: 2.4028168 + vertex_buffer: 3.2938538 vertex_buffer: 0.318785 vertex_buffer: 0.396235 - vertex_buffer: -3.063976 - vertex_buffer: 2.3301315 + vertex_buffer: -3.0639794 + vertex_buffer: 2.330141 vertex_buffer: 3.4520035 vertex_buffer: 0.343364 vertex_buffer: 0.400597 - vertex_buffer: -3.1599689 - vertex_buffer: -4.557003 - vertex_buffer: 3.7644958 + vertex_buffer: -3.1599734 + vertex_buffer: -4.557002 + vertex_buffer: 3.7644844 vertex_buffer: 0.3961 vertex_buffer: 0.710217 - vertex_buffer: -6.6972475 - vertex_buffer: -1.218895 - vertex_buffer: 1.9558678 + vertex_buffer: -6.6972566 + vertex_buffer: -1.2188911 + vertex_buffer: 1.9558601 vertex_buffer: 0.187885 vertex_buffer: 0.588538 - vertex_buffer: -1.4729099 - vertex_buffer: -10.254865 - vertex_buffer: 4.3090057 + vertex_buffer: -1.4729123 + vertex_buffer: -10.254872 + vertex_buffer: 4.308998 vertex_buffer: 0.430987 vertex_buffer: 0.944065 - vertex_buffer: -3.3332095 - vertex_buffer: -8.75649 - vertex_buffer: 2.8909073 + vertex_buffer: -3.3332138 + vertex_buffer: -8.756494 + vertex_buffer: 2.8909035 vertex_buffer: 0.318993 vertex_buffer: 0.898285 - vertex_buffer: -4.0895295 - vertex_buffer: -7.7846384 - vertex_buffer: 2.1102676 + vertex_buffer: -4.0895343 + vertex_buffer: -7.7846413 + vertex_buffer: 2.11026 vertex_buffer: 0.266248 vertex_buffer: 0.869701 - vertex_buffer: 0.09045166 - vertex_buffer: 6.096033 + vertex_buffer: 0.090452164 + vertex_buffer: 6.0960445 vertex_buffer: 4.42157 vertex_buffer: 0.500023 vertex_buffer: 0.190576 - vertex_buffer: -0.13197118 - vertex_buffer: -10.441204 - vertex_buffer: 4.505329 + vertex_buffer: -0.13197199 + vertex_buffer: -10.44121 + vertex_buffer: 4.5053177 vertex_buffer: 0.499977 vertex_buffer: 0.954453 - vertex_buffer: -2.5613708 - vertex_buffer: 2.3542728 - vertex_buffer: 3.479847 + vertex_buffer: -2.561374 + vertex_buffer: 2.3542805 + vertex_buffer: 3.4798431 vertex_buffer: 0.36617 vertex_buffer: 0.398822 - vertex_buffer: -2.0838258 - vertex_buffer: 2.4268627 - vertex_buffer: 3.3835602 + vertex_buffer: -2.0838282 + vertex_buffer: 2.4268703 + vertex_buffer: 3.3835564 vertex_buffer: 0.393207 vertex_buffer: 0.395537 - vertex_buffer: -1.7622774 - vertex_buffer: 2.4867592 - vertex_buffer: 3.2451553 + vertex_buffer: -1.762279 + vertex_buffer: 2.4867668 + vertex_buffer: 3.2451515 vertex_buffer: 0.410373 vertex_buffer: 0.39108 - vertex_buffer: -6.192421 - vertex_buffer: 3.5102692 - vertex_buffer: 1.7944756 + vertex_buffer: -6.192429 + vertex_buffer: 3.5102787 + vertex_buffer: 1.7944717 vertex_buffer: 0.194993 vertex_buffer: 0.342102 - vertex_buffer: -2.2021916 - vertex_buffer: 2.8239002 - vertex_buffer: 3.3703003 + vertex_buffer: -2.202194 + vertex_buffer: 2.8239098 + vertex_buffer: 3.3702965 vertex_buffer: 0.388665 vertex_buffer: 0.362284 - vertex_buffer: -2.7161317 - vertex_buffer: 2.9166946 + vertex_buffer: -2.716135 + vertex_buffer: 2.9167042 vertex_buffer: 3.4998627 vertex_buffer: 0.365962 vertex_buffer: 0.355971 - vertex_buffer: -3.1931787 - vertex_buffer: 2.9413223 + vertex_buffer: -3.1931822 + vertex_buffer: 2.9413319 vertex_buffer: 3.4861336 vertex_buffer: 0.343364 vertex_buffer: 0.355357 - vertex_buffer: -3.6920981 - vertex_buffer: 2.897976 + vertex_buffer: -3.6921024 + vertex_buffer: 2.8979855 vertex_buffer: 3.3511238 vertex_buffer: 0.318785 vertex_buffer: 0.35834 - vertex_buffer: -4.030469 - vertex_buffer: 2.838482 + vertex_buffer: -4.0304737 + vertex_buffer: 2.8384895 vertex_buffer: 3.1473236 vertex_buffer: 0.301415 vertex_buffer: 0.363156 - vertex_buffer: -7.534716 - vertex_buffer: 5.377041 - vertex_buffer: -1.4887314 + vertex_buffer: -7.5347247 + vertex_buffer: 5.377056 + vertex_buffer: -1.4887466 vertex_buffer: 0.058133 vertex_buffer: 0.319076 - vertex_buffer: -3.9188557 - vertex_buffer: 2.5209522 - vertex_buffer: 3.0982513 + vertex_buffer: -3.918861 + vertex_buffer: 2.5209599 + vertex_buffer: 3.0982437 vertex_buffer: 0.301415 vertex_buffer: 0.387449 - vertex_buffer: -0.056284398 - vertex_buffer: -3.0145187 + vertex_buffer: -0.05628477 + vertex_buffer: -3.0145168 vertex_buffer: 6.172512 vertex_buffer: 0.499988 vertex_buffer: 0.618434 - vertex_buffer: -2.4927456 - vertex_buffer: -2.8486595 - vertex_buffer: 5.3483696 + vertex_buffer: -2.4927492 + vertex_buffer: -2.8486557 + vertex_buffer: 5.348366 vertex_buffer: 0.415838 vertex_buffer: 0.624196 - vertex_buffer: -1.7608254 - vertex_buffer: -1.9913807 - vertex_buffer: 6.25832 + vertex_buffer: -1.7608279 + vertex_buffer: -1.9913769 + vertex_buffer: 6.258316 vertex_buffer: 0.445682 vertex_buffer: 0.566077 - vertex_buffer: -1.0984011 - vertex_buffer: -2.9661674 - vertex_buffer: 6.087311 + vertex_buffer: -1.0984027 + vertex_buffer: -2.9661636 + vertex_buffer: 6.087303 vertex_buffer: 0.465844 vertex_buffer: 0.620641 - vertex_buffer: 0.034968972 - vertex_buffer: 2.4771328 - vertex_buffer: 5.1017494 + vertex_buffer: 0.03496912 + vertex_buffer: 2.4771404 + vertex_buffer: 5.1017456 vertex_buffer: 0.499923 vertex_buffer: 0.351524 - vertex_buffer: -4.2818146 - vertex_buffer: -6.8262396 - vertex_buffer: 2.5633278 + vertex_buffer: -4.2818203 + vertex_buffer: -6.8262415 + vertex_buffer: 2.5633202 vertex_buffer: 0.288719 vertex_buffer: 0.819946 - vertex_buffer: -3.4656432 - vertex_buffer: -7.906004 - vertex_buffer: 3.3282852 + vertex_buffer: -3.4656482 + vertex_buffer: -7.906007 + vertex_buffer: 3.3282738 vertex_buffer: 0.335279 vertex_buffer: 0.85282 - vertex_buffer: -1.483537 - vertex_buffer: -9.706334 - vertex_buffer: 4.882229 + vertex_buffer: -1.4835393 + vertex_buffer: -9.706341 + vertex_buffer: 4.882221 vertex_buffer: 0.440512 vertex_buffer: 0.902419 - vertex_buffer: -5.8149533 - vertex_buffer: -5.1643486 - vertex_buffer: 0.16642761 + vertex_buffer: -5.81496 + vertex_buffer: -5.1643467 + vertex_buffer: 0.16641235 vertex_buffer: 0.128294 vertex_buffer: 0.791941 - vertex_buffer: -1.818965 - vertex_buffer: 2.6498566 + vertex_buffer: -1.8189665 + vertex_buffer: 2.649866 vertex_buffer: 3.2792091 vertex_buffer: 0.408772 vertex_buffer: 0.373894 - vertex_buffer: -0.98224825 - vertex_buffer: 0.49705887 - vertex_buffer: 5.730484 + vertex_buffer: -0.9822496 + vertex_buffer: 0.4970646 + vertex_buffer: 5.73048 vertex_buffer: 0.455607 vertex_buffer: 0.451801 - vertex_buffer: -0.11441803 - vertex_buffer: -9.953 - vertex_buffer: 5.0419807 + vertex_buffer: -0.114418834 + vertex_buffer: -9.953008 + vertex_buffer: 5.041973 vertex_buffer: 0.499877 vertex_buffer: 0.90899 - vertex_buffer: -2.4964733 - vertex_buffer: -9.630109 - vertex_buffer: 3.7255173 + vertex_buffer: -2.4964771 + vertex_buffer: -9.630114 + vertex_buffer: 3.7255058 vertex_buffer: 0.375437 vertex_buffer: 0.924192 - vertex_buffer: -7.1446133 - vertex_buffer: -1.3387032 - vertex_buffer: 0.109550476 + vertex_buffer: -7.1446214 + vertex_buffer: -1.3386974 + vertex_buffer: 0.10954285 vertex_buffer: 0.11421 vertex_buffer: 0.615022 - vertex_buffer: -1.6615427 + vertex_buffer: -1.6615449 vertex_buffer: -5.223667 - vertex_buffer: 4.611267 + vertex_buffer: 4.6112633 vertex_buffer: 0.448662 vertex_buffer: 0.695278 - vertex_buffer: -1.7678626 - vertex_buffer: -5.3901997 - vertex_buffer: 4.7363663 + vertex_buffer: -1.7678653 + vertex_buffer: -5.3902016 + vertex_buffer: 4.7363586 vertex_buffer: 0.44802 vertex_buffer: 0.704632 - vertex_buffer: -1.8549478 - vertex_buffer: -5.672799 - vertex_buffer: 4.8468857 + vertex_buffer: -1.8549504 + vertex_buffer: -5.672801 + vertex_buffer: 4.846882 vertex_buffer: 0.447112 vertex_buffer: 0.715808 - vertex_buffer: -1.914007 - vertex_buffer: -5.997034 - vertex_buffer: 4.790905 + vertex_buffer: -1.9140096 + vertex_buffer: -5.997035 + vertex_buffer: 4.7908974 vertex_buffer: 0.444832 vertex_buffer: 0.730794 - vertex_buffer: -2.1178837 - vertex_buffer: -6.5948715 - vertex_buffer: 4.522606 + vertex_buffer: -2.1178868 + vertex_buffer: -6.5948734 + vertex_buffer: 4.5225983 vertex_buffer: 0.430012 vertex_buffer: 0.766809 - vertex_buffer: -3.028949 - vertex_buffer: -3.8013363 - vertex_buffer: 4.1697693 + vertex_buffer: -3.0289533 + vertex_buffer: -3.8013344 + vertex_buffer: 4.169758 vertex_buffer: 0.406787 vertex_buffer: 0.685673 - vertex_buffer: -3.1446614 + vertex_buffer: -3.1446655 vertex_buffer: -3.7468567 - vertex_buffer: 4.1317177 + vertex_buffer: 4.13171 vertex_buffer: 0.400738 vertex_buffer: 0.681069 - vertex_buffer: -3.2364228 + vertex_buffer: -3.2364273 vertex_buffer: -3.6879635 - vertex_buffer: 4.140358 + vertex_buffer: 4.1403503 vertex_buffer: 0.3924 vertex_buffer: 0.677703 - vertex_buffer: -3.808137 - vertex_buffer: -3.2578716 - vertex_buffer: 4.065346 + vertex_buffer: -3.808142 + vertex_buffer: -3.2578678 + vertex_buffer: 4.065342 vertex_buffer: 0.367856 vertex_buffer: 0.663919 - vertex_buffer: -5.896332 - vertex_buffer: -1.8010445 - vertex_buffer: 3.2329674 + vertex_buffer: -5.8963394 + vertex_buffer: -1.8010387 + vertex_buffer: 3.2329636 vertex_buffer: 0.247923 vertex_buffer: 0.601333 - vertex_buffer: -0.9425743 - vertex_buffer: 1.3286476 - vertex_buffer: 5.0629654 + vertex_buffer: -0.94257545 + vertex_buffer: 1.3286552 + vertex_buffer: 5.062958 vertex_buffer: 0.45277 vertex_buffer: 0.42085 - vertex_buffer: -1.1735758 - vertex_buffer: 2.8160534 - vertex_buffer: 3.6443062 + vertex_buffer: -1.1735771 + vertex_buffer: 2.816063 + vertex_buffer: 3.6442986 vertex_buffer: 0.436392 vertex_buffer: 0.359887 - vertex_buffer: -1.5453613 - vertex_buffer: 2.8436775 + vertex_buffer: -1.5453631 + vertex_buffer: 2.843687 vertex_buffer: 3.3741112 vertex_buffer: 0.416164 vertex_buffer: 0.368714 - vertex_buffer: -2.933605 - vertex_buffer: -3.8272305 - vertex_buffer: 4.132 + vertex_buffer: -2.933609 + vertex_buffer: -3.8272285 + vertex_buffer: 4.1319923 vertex_buffer: 0.413386 vertex_buffer: 0.692366 - vertex_buffer: -5.9479156 - vertex_buffer: -3.431078 - vertex_buffer: 2.3189926 + vertex_buffer: -5.947923 + vertex_buffer: -3.431076 + vertex_buffer: 2.318985 vertex_buffer: 0.228018 vertex_buffer: 0.683572 - vertex_buffer: -0.6934988 - vertex_buffer: 2.5053654 - vertex_buffer: 4.5957108 + vertex_buffer: -0.69349945 + vertex_buffer: 2.505373 + vertex_buffer: 4.595703 vertex_buffer: 0.468268 vertex_buffer: 0.352671 - vertex_buffer: -2.3358104 - vertex_buffer: -7.2656155 - vertex_buffer: 4.386078 + vertex_buffer: -2.3358138 + vertex_buffer: -7.2656193 + vertex_buffer: 4.386074 vertex_buffer: 0.411362 vertex_buffer: 0.804327 - vertex_buffer: -0.03143847 - vertex_buffer: -0.11379433 - vertex_buffer: 7.009609 + vertex_buffer: -0.03143871 + vertex_buffer: -0.113788605 + vertex_buffer: 7.0096054 vertex_buffer: 0.499989 vertex_buffer: 0.469825 - vertex_buffer: -0.53570485 - vertex_buffer: 0.67900276 - vertex_buffer: 6.160198 + vertex_buffer: -0.53570557 + vertex_buffer: 0.6790085 + vertex_buffer: 6.1601944 vertex_buffer: 0.479154 vertex_buffer: 0.442654 - vertex_buffer: -0.021373034 - vertex_buffer: 0.75474167 + vertex_buffer: -0.021373123 + vertex_buffer: 0.7547493 vertex_buffer: 6.3074684 vertex_buffer: 0.499974 vertex_buffer: 0.439637 - vertex_buffer: -1.508091 - vertex_buffer: -0.4657917 - vertex_buffer: 5.860111 + vertex_buffer: -1.5080929 + vertex_buffer: -0.4657898 + vertex_buffer: 5.8601074 vertex_buffer: 0.432112 vertex_buffer: 0.493589 - vertex_buffer: -0.10210103 - vertex_buffer: -9.131883 - vertex_buffer: 5.220871 + vertex_buffer: -0.1021018 + vertex_buffer: -9.131888 + vertex_buffer: 5.2208633 vertex_buffer: 0.499886 vertex_buffer: 0.866917 - vertex_buffer: -0.07414225 - vertex_buffer: -8.138553 - vertex_buffer: 5.1079254 + vertex_buffer: -0.07414287 + vertex_buffer: -8.138557 + vertex_buffer: 5.107918 vertex_buffer: 0.499913 vertex_buffer: 0.821729 - vertex_buffer: -1.2661052 - vertex_buffer: -7.9413004 - vertex_buffer: 4.9270973 + vertex_buffer: -1.2661073 + vertex_buffer: -7.941305 + vertex_buffer: 4.9270897 vertex_buffer: 0.456549 vertex_buffer: 0.819201 - vertex_buffer: -3.778433 - vertex_buffer: -5.121208 - vertex_buffer: 3.4297066 + vertex_buffer: -3.778438 + vertex_buffer: -5.12121 + vertex_buffer: 3.429699 vertex_buffer: 0.344549 vertex_buffer: 0.745439 - vertex_buffer: -3.004316 - vertex_buffer: -1.5952225 - vertex_buffer: 4.7329597 + vertex_buffer: -3.0043197 + vertex_buffer: -1.5952206 + vertex_buffer: 4.732956 vertex_buffer: 0.378909 vertex_buffer: 0.57401 - vertex_buffer: -3.1108878 - vertex_buffer: -6.3109074 - vertex_buffer: 3.8444405 + vertex_buffer: -3.110892 + vertex_buffer: -6.3109083 + vertex_buffer: 3.844429 vertex_buffer: 0.374293 vertex_buffer: 0.780185 - vertex_buffer: -4.4307637 - vertex_buffer: -1.4898453 - vertex_buffer: 4.4148483 + vertex_buffer: -4.4307694 + vertex_buffer: -1.4898396 + vertex_buffer: 4.4148407 vertex_buffer: 0.319688 vertex_buffer: 0.570738 - vertex_buffer: -3.635557 - vertex_buffer: -2.2050133 - vertex_buffer: 4.4749756 + vertex_buffer: -3.6355615 + vertex_buffer: -2.2050095 + vertex_buffer: 4.474972 vertex_buffer: 0.357155 vertex_buffer: 0.60427 - vertex_buffer: -5.041381 - vertex_buffer: -2.4032383 - vertex_buffer: 3.8168907 + vertex_buffer: -5.041387 + vertex_buffer: -2.4032345 + vertex_buffer: 3.816887 vertex_buffer: 0.295284 vertex_buffer: 0.621581 - vertex_buffer: -1.4315786 - vertex_buffer: -8.901685 - vertex_buffer: 5.0258293 + vertex_buffer: -1.4315811 + vertex_buffer: -8.9016905 + vertex_buffer: 5.0258217 vertex_buffer: 0.44775 vertex_buffer: 0.862477 - vertex_buffer: -1.9901543 - vertex_buffer: -0.60396385 - vertex_buffer: 5.4264526 + vertex_buffer: -1.9901569 + vertex_buffer: -0.60396004 + vertex_buffer: 5.426449 vertex_buffer: 0.410986 vertex_buffer: 0.508723 - vertex_buffer: -4.198247 - vertex_buffer: -5.8466797 - vertex_buffer: 3.0578117 + vertex_buffer: -4.1982517 + vertex_buffer: -5.8466806 + vertex_buffer: 3.057808 vertex_buffer: 0.313951 vertex_buffer: 0.775308 - vertex_buffer: -3.3484576 - vertex_buffer: -7.0401354 - vertex_buffer: 3.6005745 + vertex_buffer: -3.3484623 + vertex_buffer: -7.0401382 + vertex_buffer: 3.6005669 vertex_buffer: 0.354128 vertex_buffer: 0.812553 - vertex_buffer: -4.370291 - vertex_buffer: -4.006874 - vertex_buffer: 3.421772 + vertex_buffer: -4.3702974 + vertex_buffer: -4.006872 + vertex_buffer: 3.4217644 vertex_buffer: 0.324548 vertex_buffer: 0.703993 - vertex_buffer: -6.537344 - vertex_buffer: -2.4688148 - vertex_buffer: 1.8110313 + vertex_buffer: -6.537352 + vertex_buffer: -2.468813 + vertex_buffer: 1.8110199 vertex_buffer: 0.189096 vertex_buffer: 0.6463 - vertex_buffer: -5.0787134 - vertex_buffer: -4.265339 - vertex_buffer: 2.9516716 + vertex_buffer: -5.07872 + vertex_buffer: -4.265337 + vertex_buffer: 2.951664 vertex_buffer: 0.279777 vertex_buffer: 0.714658 - vertex_buffer: -6.688318 - vertex_buffer: -2.9515648 - vertex_buffer: 0.50354004 + vertex_buffer: -6.688326 + vertex_buffer: -2.951559 + vertex_buffer: 0.5035248 vertex_buffer: 0.133823 vertex_buffer: 0.682701 - vertex_buffer: -4.2454205 - vertex_buffer: -2.8282986 - vertex_buffer: 4.063446 + vertex_buffer: -4.2454257 + vertex_buffer: -2.8282967 + vertex_buffer: 4.0634384 vertex_buffer: 0.336768 vertex_buffer: 0.644733 - vertex_buffer: -1.4265988 - vertex_buffer: 0.2535839 + vertex_buffer: -1.4266007 + vertex_buffer: 0.25359154 vertex_buffer: 5.3043365 vertex_buffer: 0.429884 vertex_buffer: 0.466522 - vertex_buffer: -1.5445278 - vertex_buffer: -2.0910187 + vertex_buffer: -1.5445302 + vertex_buffer: -2.0910168 vertex_buffer: 7.042053 vertex_buffer: 0.455528 vertex_buffer: 0.548623 - vertex_buffer: -2.0375082 - vertex_buffer: -1.9200306 - vertex_buffer: 6.35569 + vertex_buffer: -2.0375109 + vertex_buffer: -1.9200287 + vertex_buffer: 6.355686 vertex_buffer: 0.437114 vertex_buffer: 0.558896 - vertex_buffer: -1.1873711 - vertex_buffer: -1.8289967 + vertex_buffer: -1.1873728 + vertex_buffer: -1.8289948 vertex_buffer: 7.5009995 vertex_buffer: 0.467288 vertex_buffer: 0.529925 - vertex_buffer: -1.5844951 - vertex_buffer: 3.1812801 - vertex_buffer: 3.7376862 + vertex_buffer: -1.5844966 + vertex_buffer: 3.1812916 + vertex_buffer: 3.7376823 vertex_buffer: 0.414712 vertex_buffer: 0.33522 - vertex_buffer: -2.5590115 - vertex_buffer: 3.4128933 - vertex_buffer: 3.864193 + vertex_buffer: -2.5590146 + vertex_buffer: 3.4129047 + vertex_buffer: 3.8641853 vertex_buffer: 0.377046 vertex_buffer: 0.322778 - vertex_buffer: -3.4523673 - vertex_buffer: 3.4629288 - vertex_buffer: 3.869194 + vertex_buffer: -3.4523714 + vertex_buffer: 3.4629383 + vertex_buffer: 3.8691864 vertex_buffer: 0.344108 vertex_buffer: 0.320151 - vertex_buffer: -4.252108 - vertex_buffer: 3.39855 - vertex_buffer: 3.6475868 + vertex_buffer: -4.2521133 + vertex_buffer: 3.3985596 + vertex_buffer: 3.647583 vertex_buffer: 0.312876 vertex_buffer: 0.322332 - vertex_buffer: -4.8403144 - vertex_buffer: 3.246275 - vertex_buffer: 3.259838 + vertex_buffer: -4.8403196 + vertex_buffer: 3.2462864 + vertex_buffer: 3.2598343 vertex_buffer: 0.283526 vertex_buffer: 0.33319 - vertex_buffer: -5.280643 - vertex_buffer: 2.652216 - vertex_buffer: 2.3635025 + vertex_buffer: -5.2806497 + vertex_buffer: 2.6522255 + vertex_buffer: 2.3634949 vertex_buffer: 0.241246 vertex_buffer: 0.382786 - vertex_buffer: -7.408005 - vertex_buffer: 1.8267059 - vertex_buffer: -0.24343109 + vertex_buffer: -7.408014 + vertex_buffer: 1.8267155 + vertex_buffer: -0.24343872 vertex_buffer: 0.102986 vertex_buffer: 0.468763 - vertex_buffer: -4.746617 - vertex_buffer: 1.8352318 - vertex_buffer: 2.945816 + vertex_buffer: -4.746623 + vertex_buffer: 1.8352432 + vertex_buffer: 2.9458122 vertex_buffer: 0.267612 vertex_buffer: 0.42456 - vertex_buffer: -4.077169 - vertex_buffer: 1.5753078 - vertex_buffer: 3.3490562 + vertex_buffer: -4.077174 + vertex_buffer: 1.5753174 + vertex_buffer: 3.3490486 vertex_buffer: 0.297879 vertex_buffer: 0.433176 - vertex_buffer: -3.2392907 - vertex_buffer: 1.4697704 - vertex_buffer: 3.6222115 + vertex_buffer: -3.2392948 + vertex_buffer: 1.4697781 + vertex_buffer: 3.6222038 vertex_buffer: 0.333434 vertex_buffer: 0.433878 - vertex_buffer: -2.4502244 - vertex_buffer: 1.5540104 - vertex_buffer: 3.6770782 + vertex_buffer: -2.4502277 + vertex_buffer: 1.554018 + vertex_buffer: 3.6770744 vertex_buffer: 0.366427 vertex_buffer: 0.426116 - vertex_buffer: -1.836082 - vertex_buffer: 1.712101 + vertex_buffer: -1.8360842 + vertex_buffer: 1.7121105 vertex_buffer: 3.73732 vertex_buffer: 0.396012 vertex_buffer: 0.416696 - vertex_buffer: -1.4331815 - vertex_buffer: 1.8971863 + vertex_buffer: -1.4331831 + vertex_buffer: 1.897192 vertex_buffer: 3.7476387 vertex_buffer: 0.420121 vertex_buffer: 0.410228 - vertex_buffer: -7.6809406 - vertex_buffer: 2.3514805 - vertex_buffer: -2.5189133 + vertex_buffer: -7.68095 + vertex_buffer: 2.35149 + vertex_buffer: -2.5189285 vertex_buffer: 0.007561 vertex_buffer: 0.480777 - vertex_buffer: -2.060004 + vertex_buffer: -2.060007 vertex_buffer: -1.9369621 - vertex_buffer: 6.0419006 + vertex_buffer: 6.041897 vertex_buffer: 0.432949 vertex_buffer: 0.569518 - vertex_buffer: -1.007299 - vertex_buffer: -0.26156425 + vertex_buffer: -1.0073003 + vertex_buffer: -0.26155853 vertex_buffer: 6.350731 vertex_buffer: 0.458639 vertex_buffer: 0.479089 - vertex_buffer: -1.0573628 - vertex_buffer: -2.3342552 - vertex_buffer: 7.56081 + vertex_buffer: -1.0573645 + vertex_buffer: -2.3342514 + vertex_buffer: 7.5608063 vertex_buffer: 0.473466 vertex_buffer: 0.545744 - vertex_buffer: -0.734902 + vertex_buffer: -0.73490334 vertex_buffer: -2.5882797 vertex_buffer: 7.3467026 vertex_buffer: 0.476088 vertex_buffer: 0.56383 - vertex_buffer: -1.0897002 - vertex_buffer: -2.3481636 + vertex_buffer: -1.0897019 + vertex_buffer: -2.3481655 vertex_buffer: 7.254524 vertex_buffer: 0.468472 vertex_buffer: 0.555057 - vertex_buffer: -1.9190764 - vertex_buffer: -2.0415 - vertex_buffer: 5.8397217 + vertex_buffer: -1.9190793 + vertex_buffer: -2.0414982 + vertex_buffer: 5.839718 vertex_buffer: 0.433991 vertex_buffer: 0.582362 - vertex_buffer: -0.6197953 - vertex_buffer: -2.7030697 + vertex_buffer: -0.6197964 + vertex_buffer: -2.7030678 vertex_buffer: 7.5222588 vertex_buffer: 0.483518 vertex_buffer: 0.562984 - vertex_buffer: -0.55812526 + vertex_buffer: -0.5581263 vertex_buffer: -2.5777187 vertex_buffer: 6.7721367 vertex_buffer: 0.482483 vertex_buffer: 0.577849 - vertex_buffer: -1.4102633 - vertex_buffer: 2.4753132 + vertex_buffer: -1.4102648 + vertex_buffer: 2.4753227 vertex_buffer: 3.3475037 vertex_buffer: 0.42645 vertex_buffer: 0.389799 - vertex_buffer: -1.1224699 - vertex_buffer: 2.2189388 - vertex_buffer: 3.7918434 + vertex_buffer: -1.1224712 + vertex_buffer: 2.2189503 + vertex_buffer: 3.7918396 vertex_buffer: 0.438999 vertex_buffer: 0.396495 - vertex_buffer: -0.95554155 - vertex_buffer: 1.9475307 - vertex_buffer: 4.352604 + vertex_buffer: -0.9555427 + vertex_buffer: 1.9475403 + vertex_buffer: 4.3526 vertex_buffer: 0.450067 vertex_buffer: 0.400434 - vertex_buffer: -4.2466135 - vertex_buffer: 2.7904205 - vertex_buffer: 2.9572182 + vertex_buffer: -4.2466187 + vertex_buffer: 2.7904282 + vertex_buffer: 2.9572067 vertex_buffer: 0.289712 vertex_buffer: 0.368253 - vertex_buffer: -4.777348 - vertex_buffer: 2.9552498 - vertex_buffer: 2.9142609 + vertex_buffer: -4.777354 + vertex_buffer: 2.9552612 + vertex_buffer: 2.914257 vertex_buffer: 0.27667 vertex_buffer: 0.363373 - vertex_buffer: 0.45843273 - vertex_buffer: -0.12243652 - vertex_buffer: 6.785671 + vertex_buffer: 0.45843312 + vertex_buffer: -0.1224308 + vertex_buffer: 6.7856674 vertex_buffer: 0.517862 vertex_buffer: 0.471948 - vertex_buffer: 4.2937965 - vertex_buffer: 2.7028866 - vertex_buffer: 2.880951 + vertex_buffer: 4.2938013 + vertex_buffer: 2.7028942 + vertex_buffer: 2.880947 vertex_buffer: 0.710288 vertex_buffer: 0.380764 - vertex_buffer: 0.563513 - vertex_buffer: -2.4813404 + vertex_buffer: 0.5635134 + vertex_buffer: -2.4813385 vertex_buffer: 6.769951 vertex_buffer: 0.526227 vertex_buffer: 0.57391 - vertex_buffer: 7.1661444 - vertex_buffer: 6.344303 - vertex_buffer: -0.44289398 + vertex_buffer: 7.166153 + vertex_buffer: 6.3443146 + vertex_buffer: -0.4429016 vertex_buffer: 0.895093 vertex_buffer: 0.254141 - vertex_buffer: 2.565932 - vertex_buffer: 2.0484772 - vertex_buffer: 3.5651283 + vertex_buffer: 2.5659351 + vertex_buffer: 2.0484886 + vertex_buffer: 3.5651245 vertex_buffer: 0.63407 vertex_buffer: 0.409576 - vertex_buffer: 3.1454465 - vertex_buffer: 1.9983158 - vertex_buffer: 3.5477753 + vertex_buffer: 3.1454506 + vertex_buffer: 1.9983253 + vertex_buffer: 3.5477676 vertex_buffer: 0.661242 vertex_buffer: 0.413025 - vertex_buffer: 3.7646618 - vertex_buffer: 2.0593147 - vertex_buffer: 3.3925018 + vertex_buffer: 3.7646666 + vertex_buffer: 2.0593224 + vertex_buffer: 3.392498 vertex_buffer: 0.68888 vertex_buffer: 0.40946 - vertex_buffer: 4.6312976 - vertex_buffer: 2.4678402 - vertex_buffer: 2.835617 + vertex_buffer: 4.631304 + vertex_buffer: 2.4678497 + vertex_buffer: 2.8356094 vertex_buffer: 0.725342 vertex_buffer: 0.389131 - vertex_buffer: 2.0515 - vertex_buffer: 2.162346 - vertex_buffer: 3.5288582 + vertex_buffer: 2.0515027 + vertex_buffer: 2.1623554 + vertex_buffer: 3.5288506 vertex_buffer: 0.60663 vertex_buffer: 0.403705 - vertex_buffer: 3.488614 - vertex_buffer: 3.3744526 + vertex_buffer: 3.4886181 + vertex_buffer: 3.374464 vertex_buffer: 3.65485 vertex_buffer: 0.654766 vertex_buffer: 0.344011 - vertex_buffer: 2.8172188 - vertex_buffer: 3.3635693 - vertex_buffer: 3.6155205 + vertex_buffer: 2.817222 + vertex_buffer: 3.3635788 + vertex_buffer: 3.6155167 vertex_buffer: 0.629906 vertex_buffer: 0.346076 - vertex_buffer: 4.1270013 - vertex_buffer: 3.2975655 - vertex_buffer: 3.4852333 + vertex_buffer: 4.1270065 + vertex_buffer: 3.297575 + vertex_buffer: 3.4852295 vertex_buffer: 0.680678 vertex_buffer: 0.347265 - vertex_buffer: 4.5772557 - vertex_buffer: 3.166031 - vertex_buffer: 3.235653 + vertex_buffer: 4.5772614 + vertex_buffer: 3.1660423 + vertex_buffer: 3.2356453 vertex_buffer: 0.702097 vertex_buffer: 0.353591 - vertex_buffer: 5.2090616 - vertex_buffer: 2.1991062 - vertex_buffer: 2.5853577 + vertex_buffer: 5.209068 + vertex_buffer: 2.1991177 + vertex_buffer: 2.5853539 vertex_buffer: 0.752212 vertex_buffer: 0.410805 - vertex_buffer: 2.4598808 - vertex_buffer: -8.035987 - vertex_buffer: 4.3135834 + vertex_buffer: 2.459883 + vertex_buffer: -8.035992 + vertex_buffer: 4.313572 vertex_buffer: 0.602918 vertex_buffer: 0.842863 - vertex_buffer: 4.5487585 - vertex_buffer: 2.8514423 - vertex_buffer: 2.6670074 + vertex_buffer: 4.5487647 + vertex_buffer: 2.8514538 + vertex_buffer: 2.6669998 vertex_buffer: 0.719902 vertex_buffer: 0.3756 - vertex_buffer: 7.2774625 - vertex_buffer: 3.2908878 - vertex_buffer: -0.31038666 + vertex_buffer: 7.277471 + vertex_buffer: 3.2908974 + vertex_buffer: -0.3103943 vertex_buffer: 0.893693 vertex_buffer: 0.39996 - vertex_buffer: 5.9919224 - vertex_buffer: 2.67066 - vertex_buffer: 1.8980141 + vertex_buffer: 5.9919305 + vertex_buffer: 2.6706696 + vertex_buffer: 1.8980103 vertex_buffer: 0.790082 vertex_buffer: 0.391354 - vertex_buffer: 3.2944977 - vertex_buffer: -0.7652359 - vertex_buffer: 4.665989 + vertex_buffer: 3.2945013 + vertex_buffer: -0.765234 + vertex_buffer: 4.665985 vertex_buffer: 0.643998 vertex_buffer: 0.534488 - vertex_buffer: 0.96783084 - vertex_buffer: -3.7113724 + vertex_buffer: 0.9678316 + vertex_buffer: -3.7113743 vertex_buffer: 6.1592064 vertex_buffer: 0.528249 vertex_buffer: 0.650404 - vertex_buffer: 0.9485325 - vertex_buffer: -4.094248 - vertex_buffer: 5.9200745 + vertex_buffer: 0.9485334 + vertex_buffer: -4.0942497 + vertex_buffer: 5.9200706 vertex_buffer: 0.52585 vertex_buffer: 0.680191 - vertex_buffer: 1.9597192 + vertex_buffer: 1.9597212 vertex_buffer: -3.6362228 - vertex_buffer: 5.690422 + vertex_buffer: 5.6904182 vertex_buffer: 0.560215 vertex_buffer: 0.657229 - vertex_buffer: 2.7033787 + vertex_buffer: 2.7033815 vertex_buffer: -3.5398808 - vertex_buffer: 4.9262657 + vertex_buffer: 4.926262 vertex_buffer: 0.585384 vertex_buffer: 0.666541 - vertex_buffer: 1.7868371 + vertex_buffer: 1.7868391 vertex_buffer: -3.933569 - vertex_buffer: 5.5071907 + vertex_buffer: 5.507183 vertex_buffer: 0.549626 vertex_buffer: 0.680861 - vertex_buffer: 2.472351 - vertex_buffer: -3.7304955 - vertex_buffer: 4.8139343 + vertex_buffer: 2.472354 + vertex_buffer: -3.7304974 + vertex_buffer: 4.8139305 vertex_buffer: 0.571228 vertex_buffer: 0.682692 - vertex_buffer: 3.5094898 - vertex_buffer: -4.677987 - vertex_buffer: 3.6270447 + vertex_buffer: 3.5094936 + vertex_buffer: -4.677991 + vertex_buffer: 3.6270409 vertex_buffer: 0.624852 vertex_buffer: 0.728099 - vertex_buffer: 0.42484653 + vertex_buffer: 0.42484662 vertex_buffer: -2.513298 vertex_buffer: 7.892376 vertex_buffer: 0.51305 vertex_buffer: 0.547282 - vertex_buffer: 0.48307395 - vertex_buffer: -1.9238777 + vertex_buffer: 0.48307416 + vertex_buffer: -1.9238758 vertex_buffer: 8.017586 vertex_buffer: 0.515097 vertex_buffer: 0.527252 - vertex_buffer: 5.405193 - vertex_buffer: 3.5107555 - vertex_buffer: 3.0897903 + vertex_buffer: 5.4052 + vertex_buffer: 3.5107632 + vertex_buffer: 3.0897827 vertex_buffer: 0.742247 vertex_buffer: 0.314507 - vertex_buffer: 1.8350537 - vertex_buffer: 0.72956085 - vertex_buffer: 4.5594444 + vertex_buffer: 1.8350562 + vertex_buffer: 0.7295685 + vertex_buffer: 4.559437 vertex_buffer: 0.598631 vertex_buffer: 0.454979 - vertex_buffer: 2.090765 - vertex_buffer: -1.5839405 - vertex_buffer: 6.243725 + vertex_buffer: 2.0907671 + vertex_buffer: -1.5839367 + vertex_buffer: 6.243721 vertex_buffer: 0.570338 vertex_buffer: 0.548575 - vertex_buffer: 2.1136065 - vertex_buffer: -1.156723 - vertex_buffer: 5.9879227 + vertex_buffer: 2.1136088 + vertex_buffer: -1.1567173 + vertex_buffer: 5.987919 vertex_buffer: 0.578632 vertex_buffer: 0.533623 - vertex_buffer: 5.2506566 - vertex_buffer: -0.35006142 - vertex_buffer: 3.8119774 + vertex_buffer: 5.250663 + vertex_buffer: -0.3500576 + vertex_buffer: 3.8119736 vertex_buffer: 0.723087 vertex_buffer: 0.532054 - vertex_buffer: 0.4557106 - vertex_buffer: -0.9948273 + vertex_buffer: 0.45571092 + vertex_buffer: -0.99482155 vertex_buffer: 7.528843 vertex_buffer: 0.516446 vertex_buffer: 0.499639 - vertex_buffer: 3.7202723 - vertex_buffer: 3.7115192 - vertex_buffer: 4.1818047 + vertex_buffer: 3.7202773 + vertex_buffer: 3.7115288 + vertex_buffer: 4.181797 vertex_buffer: 0.662801 vertex_buffer: 0.282918 - vertex_buffer: 4.683783 - vertex_buffer: 3.6740036 + vertex_buffer: 4.683789 + vertex_buffer: 3.6740131 vertex_buffer: 3.73135 vertex_buffer: 0.703624 vertex_buffer: 0.293271 - vertex_buffer: 6.4526186 - vertex_buffer: 7.177004 - vertex_buffer: 0.64782715 + vertex_buffer: 6.4526267 + vertex_buffer: 7.1770153 + vertex_buffer: 0.6478195 vertex_buffer: 0.830705 vertex_buffer: 0.193814 - vertex_buffer: 1.140491 - vertex_buffer: 3.0603447 - vertex_buffer: 4.7917366 + vertex_buffer: 1.1404927 + vertex_buffer: 3.0603523 + vertex_buffer: 4.791733 vertex_buffer: 0.552386 vertex_buffer: 0.302568 - vertex_buffer: 2.2480388 - vertex_buffer: 3.2713928 + vertex_buffer: 2.2480416 + vertex_buffer: 3.2714005 vertex_buffer: 3.4257584 vertex_buffer: 0.60761 vertex_buffer: 0.353888 - vertex_buffer: 4.12534 + vertex_buffer: 4.1253448 vertex_buffer: -3.6359367 - vertex_buffer: 3.5436134 + vertex_buffer: 3.5436096 vertex_buffer: 0.645429 vertex_buffer: 0.696707 - vertex_buffer: 6.629145 - vertex_buffer: -3.3822517 - vertex_buffer: -1.0385818 + vertex_buffer: 6.629153 + vertex_buffer: -3.3822498 + vertex_buffer: -1.0385971 vertex_buffer: 0.932695 vertex_buffer: 0.730105 - vertex_buffer: 1.641717 - vertex_buffer: -1.9287262 - vertex_buffer: 6.0156593 + vertex_buffer: 1.641719 + vertex_buffer: -1.9287205 + vertex_buffer: 6.0156555 vertex_buffer: 0.557261 vertex_buffer: 0.572826 - vertex_buffer: 1.0766811 - vertex_buffer: -2.2036495 + vertex_buffer: 1.0766822 + vertex_buffer: -2.2036476 vertex_buffer: 6.1743774 vertex_buffer: 0.542902 vertex_buffer: 0.584792 - vertex_buffer: 3.624907 - vertex_buffer: -3.503807 - vertex_buffer: 3.442051 + vertex_buffer: 3.6249113 + vertex_buffer: -3.5038052 + vertex_buffer: 3.4420433 vertex_buffer: 0.618026 vertex_buffer: 0.694711 - vertex_buffer: 3.4461956 - vertex_buffer: -3.5624218 - vertex_buffer: 3.5712547 + vertex_buffer: 3.4461992 + vertex_buffer: -3.56242 + vertex_buffer: 3.571247 vertex_buffer: 0.607591 vertex_buffer: 0.694203 - vertex_buffer: 5.070438 - vertex_buffer: 4.2198524 - vertex_buffer: 3.3080864 + vertex_buffer: 5.070444 + vertex_buffer: 4.219864 + vertex_buffer: 3.3080826 vertex_buffer: 0.722943 vertex_buffer: 0.271963 - vertex_buffer: 2.1962533 - vertex_buffer: -1.6779041 + vertex_buffer: 2.1962554 + vertex_buffer: -1.6779022 vertex_buffer: 5.9193077 vertex_buffer: 0.577414 vertex_buffer: 0.563167 - vertex_buffer: 2.5358021 - vertex_buffer: 3.5196838 - vertex_buffer: 4.539776 + vertex_buffer: 2.5358052 + vertex_buffer: 3.5196915 + vertex_buffer: 4.539768 vertex_buffer: 0.614083 vertex_buffer: 0.281387 - vertex_buffer: 2.6456668 - vertex_buffer: 4.0347195 + vertex_buffer: 2.6456702 + vertex_buffer: 4.034727 vertex_buffer: 4.60878 vertex_buffer: 0.616907 vertex_buffer: 0.255886 - vertex_buffer: 3.675518 - vertex_buffer: 7.9906406 - vertex_buffer: 2.9460526 + vertex_buffer: 3.675523 + vertex_buffer: 7.990658 + vertex_buffer: 2.9460487 vertex_buffer: 0.668509 vertex_buffer: 0.119914 - vertex_buffer: 5.6913533 - vertex_buffer: 5.756613 - vertex_buffer: 2.0977516 + vertex_buffer: 5.6913605 + vertex_buffer: 5.756626 + vertex_buffer: 2.0977478 vertex_buffer: 0.770092 vertex_buffer: 0.232021 - vertex_buffer: 3.0768647 - vertex_buffer: 6.201044 + vertex_buffer: 3.0768685 + vertex_buffer: 6.2010574 vertex_buffer: 3.7766457 vertex_buffer: 0.635536 vertex_buffer: 0.189249 - vertex_buffer: 5.831969 - vertex_buffer: 4.0339775 - vertex_buffer: 2.4737282 + vertex_buffer: 5.831976 + vertex_buffer: 4.033989 + vertex_buffer: 2.4737244 vertex_buffer: 0.770391 vertex_buffer: 0.299556 - vertex_buffer: 6.487762 - vertex_buffer: 5.180357 - vertex_buffer: 1.0317268 + vertex_buffer: 6.4877706 + vertex_buffer: 5.1803684 + vertex_buffer: 1.0317192 vertex_buffer: 0.826722 vertex_buffer: 0.278755 - vertex_buffer: 0.9777184 + vertex_buffer: 0.9777193 vertex_buffer: -3.945898 - vertex_buffer: 6.0843506 + vertex_buffer: 6.084347 vertex_buffer: 0.527121 vertex_buffer: 0.666198 - vertex_buffer: 1.8753169 - vertex_buffer: -3.8008995 - vertex_buffer: 5.5984306 + vertex_buffer: 1.8753189 + vertex_buffer: -3.8008976 + vertex_buffer: 5.598423 vertex_buffer: 0.553172 vertex_buffer: 0.668527 - vertex_buffer: 2.5915039 + vertex_buffer: 2.5915065 vertex_buffer: -3.6574516 - vertex_buffer: 4.915451 + vertex_buffer: 4.915447 vertex_buffer: 0.577238 vertex_buffer: 0.67389 - vertex_buffer: 1.5212762 - vertex_buffer: -1.9988365 - vertex_buffer: 5.9348717 + vertex_buffer: 1.5212778 + vertex_buffer: -1.9988327 + vertex_buffer: 5.934868 vertex_buffer: 0.554692 vertex_buffer: 0.580066 - vertex_buffer: 3.5580156 - vertex_buffer: -3.5292492 - vertex_buffer: 3.51326 + vertex_buffer: 3.5580196 + vertex_buffer: -3.5292473 + vertex_buffer: 3.5132523 vertex_buffer: 0.611897 vertex_buffer: 0.693961 - vertex_buffer: 3.0891697 - vertex_buffer: -4.274349 - vertex_buffer: 3.8229141 + vertex_buffer: 3.0891728 + vertex_buffer: -4.2743473 + vertex_buffer: 3.8229103 vertex_buffer: 0.596961 vertex_buffer: 0.70654 - vertex_buffer: 3.3768747 - vertex_buffer: -3.600422 - vertex_buffer: 3.5875587 + vertex_buffer: 3.3768785 + vertex_buffer: -3.60042 + vertex_buffer: 3.587551 vertex_buffer: 0.596371 vertex_buffer: 0.693953 - vertex_buffer: 1.2161286 - vertex_buffer: -2.1466656 - vertex_buffer: 6.9213715 + vertex_buffer: 1.2161298 + vertex_buffer: -2.1466618 + vertex_buffer: 6.9213676 vertex_buffer: 0.539958 vertex_buffer: 0.557139 - vertex_buffer: 2.3516455 - vertex_buffer: -3.7717247 - vertex_buffer: 4.75354 + vertex_buffer: 2.351648 + vertex_buffer: -3.7717228 + vertex_buffer: 4.753536 vertex_buffer: 0.568842 vertex_buffer: 0.692366 - vertex_buffer: 1.6980877 - vertex_buffer: -3.940525 - vertex_buffer: 5.294464 + vertex_buffer: 1.6980895 + vertex_buffer: -3.9405231 + vertex_buffer: 5.2944603 vertex_buffer: 0.547818 vertex_buffer: 0.692366 - vertex_buffer: 0.8974841 + vertex_buffer: 0.8974848 vertex_buffer: -4.14546 vertex_buffer: 5.696514 vertex_buffer: 0.524613 vertex_buffer: 0.692366 - vertex_buffer: 1.0443137 - vertex_buffer: -7.1271086 - vertex_buffer: 4.956272 + vertex_buffer: 1.0443145 + vertex_buffer: -7.1271124 + vertex_buffer: 4.9562645 vertex_buffer: 0.53409 vertex_buffer: 0.779141 - vertex_buffer: 0.987621 - vertex_buffer: -6.3614016 - vertex_buffer: 5.318268 + vertex_buffer: 0.9876218 + vertex_buffer: -6.3614044 + vertex_buffer: 5.31826 vertex_buffer: 0.527671 vertex_buffer: 0.736226 - vertex_buffer: 0.96364325 - vertex_buffer: -6.0137033 - vertex_buffer: 5.3195267 + vertex_buffer: 0.963644 + vertex_buffer: -6.013706 + vertex_buffer: 5.319519 vertex_buffer: 0.526913 vertex_buffer: 0.717857 - vertex_buffer: 0.92498523 - vertex_buffer: -5.6456995 - vertex_buffer: 5.0970116 + vertex_buffer: 0.9249861 + vertex_buffer: -5.6457005 + vertex_buffer: 5.097004 vertex_buffer: 0.526878 vertex_buffer: 0.704626 - vertex_buffer: 0.8514866 + vertex_buffer: 0.8514873 vertex_buffer: -5.4173384 - vertex_buffer: 4.945057 + vertex_buffer: 4.9450493 vertex_buffer: 0.526967 vertex_buffer: 0.695278 - vertex_buffer: 2.2936535 + vertex_buffer: 2.2936554 vertex_buffer: -4.7114153 - vertex_buffer: 4.1935005 + vertex_buffer: 4.193493 vertex_buffer: 0.572058 vertex_buffer: 0.695278 - vertex_buffer: 2.3988762 - vertex_buffer: -4.7940826 - vertex_buffer: 4.318264 + vertex_buffer: 2.3988786 + vertex_buffer: -4.7940845 + vertex_buffer: 4.31826 vertex_buffer: 0.573521 vertex_buffer: 0.70354 - vertex_buffer: 2.52226 + vertex_buffer: 2.5222626 vertex_buffer: -4.9738216 - vertex_buffer: 4.4091377 + vertex_buffer: 4.40913 vertex_buffer: 0.576838 vertex_buffer: 0.711846 - vertex_buffer: 2.618237 + vertex_buffer: 2.6182396 vertex_buffer: -5.130539 - vertex_buffer: 4.305893 + vertex_buffer: 4.3058853 vertex_buffer: 0.581691 vertex_buffer: 0.720063 - vertex_buffer: 3.2409048 + vertex_buffer: 3.2409084 vertex_buffer: -2.943821 - vertex_buffer: 4.736702 + vertex_buffer: 4.736698 vertex_buffer: 0.609945 vertex_buffer: 0.63991 - vertex_buffer: 7.502926 - vertex_buffer: 0.59869766 - vertex_buffer: -2.361534 + vertex_buffer: 7.5029354 + vertex_buffer: 0.5987072 + vertex_buffer: -2.3615494 vertex_buffer: 0.986046 vertex_buffer: 0.560034 - vertex_buffer: 2.8017242 + vertex_buffer: 2.8017273 vertex_buffer: -4.2447968 - vertex_buffer: 3.7659073 + vertex_buffer: 3.7659035 vertex_buffer: 0.5868 vertex_buffer: 0.6954 - vertex_buffer: 2.929969 - vertex_buffer: -4.253687 - vertex_buffer: 3.836361 + vertex_buffer: 2.9299724 + vertex_buffer: -4.253685 + vertex_buffer: 3.8363533 vertex_buffer: 0.590372 vertex_buffer: 0.701823 - vertex_buffer: 0.8780336 + vertex_buffer: 0.87803423 vertex_buffer: -2.4603996 vertex_buffer: 6.1991234 vertex_buffer: 0.531915 vertex_buffer: 0.601537 - vertex_buffer: 2.025136 + vertex_buffer: 2.0251384 vertex_buffer: -1.8861618 - vertex_buffer: 5.4707375 + vertex_buffer: 5.4707336 vertex_buffer: 0.577268 vertex_buffer: 0.585935 - vertex_buffer: 0.967478 - vertex_buffer: -2.3653927 + vertex_buffer: 0.9674789 + vertex_buffer: -2.3653946 vertex_buffer: 6.2165947 vertex_buffer: 0.536915 vertex_buffer: 0.593786 - vertex_buffer: 2.5313063 - vertex_buffer: 0.5067806 - vertex_buffer: 4.393051 + vertex_buffer: 2.531309 + vertex_buffer: 0.50678635 + vertex_buffer: 4.3930473 vertex_buffer: 0.627543 vertex_buffer: 0.473352 - vertex_buffer: 3.617191 - vertex_buffer: 0.18891716 - vertex_buffer: 4.296589 + vertex_buffer: 3.6171951 + vertex_buffer: 0.18892479 + vertex_buffer: 4.296585 vertex_buffer: 0.665586 vertex_buffer: 0.495951 - vertex_buffer: 2.3044915 - vertex_buffer: -1.2837143 - vertex_buffer: 5.6435738 + vertex_buffer: 2.304494 + vertex_buffer: -1.2837124 + vertex_buffer: 5.64357 vertex_buffer: 0.588354 vertex_buffer: 0.546862 - vertex_buffer: 5.3009076 - vertex_buffer: 7.7115097 - vertex_buffer: 1.8507767 + vertex_buffer: 5.3009152 + vertex_buffer: 7.711525 + vertex_buffer: 1.850769 vertex_buffer: 0.757824 vertex_buffer: 0.147676 - vertex_buffer: 4.5862565 - vertex_buffer: 6.1100426 - vertex_buffer: 2.9832764 + vertex_buffer: 4.586262 + vertex_buffer: 6.1100597 + vertex_buffer: 2.9832726 vertex_buffer: 0.70925 vertex_buffer: 0.201508 - vertex_buffer: 3.9455872 - vertex_buffer: 4.307043 - vertex_buffer: 3.948494 + vertex_buffer: 3.945592 + vertex_buffer: 4.3070507 + vertex_buffer: 3.9484863 vertex_buffer: 0.672684 vertex_buffer: 0.256581 - vertex_buffer: 2.880608 - vertex_buffer: -5.605525 - vertex_buffer: 4.019226 + vertex_buffer: 2.8806114 + vertex_buffer: -5.605528 + vertex_buffer: 4.0192223 vertex_buffer: 0.600409 vertex_buffer: 0.749005 - vertex_buffer: 1.3191733 - vertex_buffer: 3.8142223 - vertex_buffer: 4.931698 + vertex_buffer: 1.3191751 + vertex_buffer: 3.8142319 + vertex_buffer: 4.931694 vertex_buffer: 0.558266 vertex_buffer: 0.261672 - vertex_buffer: 1.6745269 - vertex_buffer: 6.172098 + vertex_buffer: 1.6745294 + vertex_buffer: 6.1721096 vertex_buffer: 4.240856 vertex_buffer: 0.570304 vertex_buffer: 0.187871 - vertex_buffer: 2.0401752 - vertex_buffer: 8.078575 + vertex_buffer: 2.0401783 + vertex_buffer: 8.078587 vertex_buffer: 3.6178894 vertex_buffer: 0.588166 vertex_buffer: 0.109044 - vertex_buffer: 4.2982955 - vertex_buffer: 2.2354507 - vertex_buffer: 3.1172485 + vertex_buffer: 4.2983003 + vertex_buffer: 2.2354584 + vertex_buffer: 3.1172447 vertex_buffer: 0.711045 vertex_buffer: 0.398952 - vertex_buffer: 5.918257 - vertex_buffer: 1.8992882 - vertex_buffer: 2.2622795 + vertex_buffer: 5.9182653 + vertex_buffer: 1.8992939 + vertex_buffer: 2.2622757 vertex_buffer: 0.78107 vertex_buffer: 0.435405 - vertex_buffer: 1.7605395 - vertex_buffer: 2.3266659 - vertex_buffer: 3.42659 + vertex_buffer: 1.7605418 + vertex_buffer: 2.3266754 + vertex_buffer: 3.4265862 vertex_buffer: 0.587247 vertex_buffer: 0.398932 - vertex_buffer: 5.2549534 - vertex_buffer: 3.0604095 - vertex_buffer: 2.6985817 + vertex_buffer: 5.2549596 + vertex_buffer: 3.060419 + vertex_buffer: 2.698578 vertex_buffer: 0.74287 vertex_buffer: 0.355446 - vertex_buffer: 1.3581057 - vertex_buffer: 1.0136013 - vertex_buffer: 4.7732735 + vertex_buffer: 1.3581073 + vertex_buffer: 1.0136089 + vertex_buffer: 4.7732697 vertex_buffer: 0.572156 vertex_buffer: 0.437652 - vertex_buffer: 1.5795965 - vertex_buffer: -1.6822205 - vertex_buffer: 6.861721 + vertex_buffer: 1.5795982 + vertex_buffer: -1.6822186 + vertex_buffer: 6.861717 vertex_buffer: 0.551868 vertex_buffer: 0.53657 - vertex_buffer: 6.6445875 - vertex_buffer: 1.6044445 - vertex_buffer: 1.6377296 + vertex_buffer: 6.6445966 + vertex_buffer: 1.6044521 + vertex_buffer: 1.637722 vertex_buffer: 0.821442 vertex_buffer: 0.457556 - vertex_buffer: 5.438588 - vertex_buffer: 1.4010563 - vertex_buffer: 2.8901978 + vertex_buffer: 5.4385943 + vertex_buffer: 1.4010639 + vertex_buffer: 2.890194 vertex_buffer: 0.752702 vertex_buffer: 0.457182 - vertex_buffer: 4.6051846 - vertex_buffer: 1.0590935 - vertex_buffer: 3.492607 + vertex_buffer: 4.60519 + vertex_buffer: 1.0591011 + vertex_buffer: 3.4926033 vertex_buffer: 0.713757 vertex_buffer: 0.467627 - vertex_buffer: 3.4062254 - vertex_buffer: 0.9862709 + vertex_buffer: 3.4062293 + vertex_buffer: 0.9862766 vertex_buffer: 3.838192 vertex_buffer: 0.667113 vertex_buffer: 0.460673 - vertex_buffer: 2.4928606 - vertex_buffer: 1.1352692 + vertex_buffer: 2.4928637 + vertex_buffer: 1.1352768 vertex_buffer: 3.9427338 vertex_buffer: 0.631101 vertex_buffer: 0.447154 - vertex_buffer: 1.8204823 - vertex_buffer: 1.3184166 - vertex_buffer: 4.103424 + vertex_buffer: 1.8204843 + vertex_buffer: 1.3184261 + vertex_buffer: 4.1034203 vertex_buffer: 0.600862 vertex_buffer: 0.432473 - vertex_buffer: 0.5738768 - vertex_buffer: 1.5734634 + vertex_buffer: 0.57387745 + vertex_buffer: 1.5734692 vertex_buffer: 5.444023 vertex_buffer: 0.523481 vertex_buffer: 0.405627 - vertex_buffer: 6.6679235 - vertex_buffer: 0.22402573 - vertex_buffer: 1.9835739 + vertex_buffer: 6.667932 + vertex_buffer: 0.22403145 + vertex_buffer: 1.9835663 vertex_buffer: 0.810748 vertex_buffer: 0.523926 - vertex_buffer: 5.7407603 - vertex_buffer: 3.1849308 - vertex_buffer: 2.401783 + vertex_buffer: 5.7407665 + vertex_buffer: 3.1849384 + vertex_buffer: 2.4017792 vertex_buffer: 0.771046 vertex_buffer: 0.348959 - vertex_buffer: 0.23790747 - vertex_buffer: -2.7562447 + vertex_buffer: 0.23790738 + vertex_buffer: -2.7562466 vertex_buffer: 7.6178856 vertex_buffer: 0.509127 vertex_buffer: 0.562718 - vertex_buffer: 1.8713548 - vertex_buffer: 0.033107758 + vertex_buffer: 1.8713568 + vertex_buffer: 0.033109665 vertex_buffer: 4.94363 vertex_buffer: 0.595293 vertex_buffer: 0.485024 - vertex_buffer: 7.683481 - vertex_buffer: 3.9597416 - vertex_buffer: -2.3439941 + vertex_buffer: 7.683491 + vertex_buffer: 3.959755 + vertex_buffer: -2.3440018 vertex_buffer: 0.980531 vertex_buffer: 0.401564 - vertex_buffer: 1.3603485 - vertex_buffer: 1.609335 - vertex_buffer: 4.160656 + vertex_buffer: 1.3603501 + vertex_buffer: 1.6093407 + vertex_buffer: 4.160652 vertex_buffer: 0.5735 vertex_buffer: 0.42 - vertex_buffer: 2.412501 - vertex_buffer: -1.0677414 + vertex_buffer: 2.4125037 + vertex_buffer: -1.0677376 vertex_buffer: 4.927147 vertex_buffer: 0.602995 vertex_buffer: 0.548688 - vertex_buffer: 4.92097 - vertex_buffer: 2.8164482 - vertex_buffer: 2.5507278 + vertex_buffer: 4.920976 + vertex_buffer: 2.8164597 + vertex_buffer: 2.550724 vertex_buffer: 0.73353 vertex_buffer: 0.376977 - vertex_buffer: 1.6102984 - vertex_buffer: -1.1498814 - vertex_buffer: 6.5544205 + vertex_buffer: 1.6103003 + vertex_buffer: -1.1498775 + vertex_buffer: 6.5544167 vertex_buffer: 0.560611 vertex_buffer: 0.519017 - vertex_buffer: 7.1972446 - vertex_buffer: -1.315155 - vertex_buffer: -1.8514862 + vertex_buffer: 7.197254 + vertex_buffer: -1.3151512 + vertex_buffer: -1.8515015 vertex_buffer: 0.967686 vertex_buffer: 0.644357 - vertex_buffer: 1.7810719 - vertex_buffer: 2.5902557 - vertex_buffer: 3.1927032 + vertex_buffer: 1.7810742 + vertex_buffer: 2.5902653 + vertex_buffer: 3.1926994 vertex_buffer: 0.580985 vertex_buffer: 0.38716 - vertex_buffer: 1.0172381 - vertex_buffer: -1.0713634 - vertex_buffer: 7.1607246 + vertex_buffer: 1.017239 + vertex_buffer: -1.0713615 + vertex_buffer: 7.160721 vertex_buffer: 0.537728 vertex_buffer: 0.505385 - vertex_buffer: 5.189652 - vertex_buffer: -5.526311 - vertex_buffer: 1.940609 + vertex_buffer: 5.189658 + vertex_buffer: -5.526314 + vertex_buffer: 1.9405975 vertex_buffer: 0.760966 vertex_buffer: 0.779753 - vertex_buffer: 4.9922805 - vertex_buffer: -6.4331474 - vertex_buffer: 1.185112 + vertex_buffer: 4.9922867 + vertex_buffer: -6.4331484 + vertex_buffer: 1.1851006 vertex_buffer: 0.801779 vertex_buffer: 0.831938 - vertex_buffer: 7.3186183 - vertex_buffer: 0.3694744 - vertex_buffer: -0.21674347 + vertex_buffer: 7.318627 + vertex_buffer: 0.36948395 + vertex_buffer: -0.2167511 vertex_buffer: 0.892441 vertex_buffer: 0.540761 - vertex_buffer: 6.056026 - vertex_buffer: -4.434767 - vertex_buffer: 1.2825623 + vertex_buffer: 6.0560336 + vertex_buffer: -4.434766 + vertex_buffer: 1.2825546 vertex_buffer: 0.816351 vertex_buffer: 0.74026 - vertex_buffer: 6.927325 - vertex_buffer: 4.4287663 - vertex_buffer: 0.16318512 + vertex_buffer: 6.927334 + vertex_buffer: 4.4287796 + vertex_buffer: 0.16317749 vertex_buffer: 0.865595 vertex_buffer: 0.333687 - vertex_buffer: 2.4872823 - vertex_buffer: -8.917337 - vertex_buffer: 4.1714897 + vertex_buffer: 2.4872847 + vertex_buffer: -8.917343 + vertex_buffer: 4.1714783 vertex_buffer: 0.614074 vertex_buffer: 0.883246 - vertex_buffer: 0.18507135 + vertex_buffer: 0.18507123 vertex_buffer: -2.6379662 vertex_buffer: 6.7830925 vertex_buffer: 0.508953 vertex_buffer: 0.579438 - vertex_buffer: 2.4989808 - vertex_buffer: -0.297657 - vertex_buffer: 4.7593803 + vertex_buffer: 2.4989836 + vertex_buffer: -0.2976513 + vertex_buffer: 4.7593727 vertex_buffer: 0.617942 vertex_buffer: 0.508316 - vertex_buffer: 6.560847 - vertex_buffer: 2.7619705 - vertex_buffer: 1.2603416 + vertex_buffer: 6.560855 + vertex_buffer: 2.76198 + vertex_buffer: 1.260334 vertex_buffer: 0.825608 vertex_buffer: 0.397675 - vertex_buffer: 3.6496165 - vertex_buffer: 2.498146 - vertex_buffer: 3.2662697 + vertex_buffer: 3.649621 + vertex_buffer: 2.4981575 + vertex_buffer: 3.2662582 vertex_buffer: 0.681215 vertex_buffer: 0.396235 - vertex_buffer: 3.155451 - vertex_buffer: 2.4324074 - vertex_buffer: 3.4271507 + vertex_buffer: 3.1554554 + vertex_buffer: 2.432415 + vertex_buffer: 3.427147 vertex_buffer: 0.656636 vertex_buffer: 0.400597 - vertex_buffer: 3.1980324 - vertex_buffer: -4.3204155 - vertex_buffer: 3.7522202 + vertex_buffer: 3.198036 + vertex_buffer: -4.3204174 + vertex_buffer: 3.7522125 vertex_buffer: 0.6039 vertex_buffer: 0.710217 - vertex_buffer: 6.7209496 - vertex_buffer: -1.2013817 - vertex_buffer: 1.948906 + vertex_buffer: 6.720958 + vertex_buffer: -1.201376 + vertex_buffer: 1.9488983 vertex_buffer: 0.812086 vertex_buffer: 0.588539 - vertex_buffer: 1.2246006 - vertex_buffer: -10.21917 - vertex_buffer: 4.311302 + vertex_buffer: 1.2246015 + vertex_buffer: -10.219176 + vertex_buffer: 4.3112946 vertex_buffer: 0.568013 vertex_buffer: 0.944565 - vertex_buffer: 3.186315 - vertex_buffer: -8.656027 - vertex_buffer: 2.8606186 + vertex_buffer: 3.1863186 + vertex_buffer: -8.656031 + vertex_buffer: 2.8606071 vertex_buffer: 0.681008 vertex_buffer: 0.898285 - vertex_buffer: 4.006108 - vertex_buffer: -7.676532 - vertex_buffer: 2.0705223 + vertex_buffer: 4.006112 + vertex_buffer: -7.6765366 + vertex_buffer: 2.0705109 vertex_buffer: 0.733752 vertex_buffer: 0.869701 - vertex_buffer: 2.6482399 - vertex_buffer: 2.4515076 - vertex_buffer: 3.45673 + vertex_buffer: 2.648243 + vertex_buffer: 2.451519 + vertex_buffer: 3.4567223 vertex_buffer: 0.63383 vertex_buffer: 0.398822 - vertex_buffer: 2.2057068 - vertex_buffer: 2.506771 - vertex_buffer: 3.3655663 + vertex_buffer: 2.2057095 + vertex_buffer: 2.5067787 + vertex_buffer: 3.3655624 vertex_buffer: 0.606793 vertex_buffer: 0.395537 - vertex_buffer: 1.9240649 - vertex_buffer: 2.5685253 - vertex_buffer: 3.2245102 + vertex_buffer: 1.9240671 + vertex_buffer: 2.5685349 + vertex_buffer: 3.2245064 vertex_buffer: 0.58966 vertex_buffer: 0.391062 - vertex_buffer: 6.265646 - vertex_buffer: 3.5524426 - vertex_buffer: 1.7489777 + vertex_buffer: 6.2656546 + vertex_buffer: 3.552454 + vertex_buffer: 1.7489738 vertex_buffer: 0.805016 vertex_buffer: 0.342108 - vertex_buffer: 2.3390827 - vertex_buffer: 2.884201 + vertex_buffer: 2.3390853 + vertex_buffer: 2.8842087 vertex_buffer: 3.3479233 vertex_buffer: 0.611335 vertex_buffer: 0.362284 - vertex_buffer: 2.8314958 - vertex_buffer: 2.9797497 - vertex_buffer: 3.476944 + vertex_buffer: 2.8314993 + vertex_buffer: 2.9797573 + vertex_buffer: 3.4769402 vertex_buffer: 0.634038 vertex_buffer: 0.355971 - vertex_buffer: 3.3190825 - vertex_buffer: 2.999731 - vertex_buffer: 3.4610176 + vertex_buffer: 3.3190868 + vertex_buffer: 2.9997406 + vertex_buffer: 3.4610138 vertex_buffer: 0.656636 vertex_buffer: 0.355357 - vertex_buffer: 3.7897182 - vertex_buffer: 2.961588 - vertex_buffer: 3.317852 + vertex_buffer: 3.789723 + vertex_buffer: 2.9615993 + vertex_buffer: 3.3178482 vertex_buffer: 0.681215 vertex_buffer: 0.35834 - vertex_buffer: 4.145367 - vertex_buffer: 2.891821 - vertex_buffer: 3.1133041 + vertex_buffer: 4.1453724 + vertex_buffer: 2.8918304 + vertex_buffer: 3.1133003 vertex_buffer: 0.698585 vertex_buffer: 0.363156 - vertex_buffer: 7.5618553 - vertex_buffer: 5.420641 - vertex_buffer: -1.5621185 + vertex_buffer: 7.561866 + vertex_buffer: 5.420656 + vertex_buffer: -1.5621338 vertex_buffer: 0.941867 vertex_buffer: 0.319076 - vertex_buffer: 4.0105886 - vertex_buffer: 2.5953503 + vertex_buffer: 4.010593 + vertex_buffer: 2.595358 vertex_buffer: 3.0636559 vertex_buffer: 0.698585 vertex_buffer: 0.387449 - vertex_buffer: 2.4361215 - vertex_buffer: -2.783411 - vertex_buffer: 5.3472176 + vertex_buffer: 2.4361243 + vertex_buffer: -2.783409 + vertex_buffer: 5.3472137 vertex_buffer: 0.584177 vertex_buffer: 0.624107 - vertex_buffer: 1.6050982 - vertex_buffer: -1.9455338 + vertex_buffer: 1.6051 + vertex_buffer: -1.9455318 vertex_buffer: 6.2601547 vertex_buffer: 0.554318 vertex_buffer: 0.566077 - vertex_buffer: 1.0137081 - vertex_buffer: -2.9374046 - vertex_buffer: 6.0877533 + vertex_buffer: 1.013709 + vertex_buffer: -2.9374008 + vertex_buffer: 6.0877495 vertex_buffer: 0.534154 vertex_buffer: 0.62064 - vertex_buffer: 4.2468576 - vertex_buffer: -6.707801 - vertex_buffer: 2.5414963 + vertex_buffer: 4.2468624 + vertex_buffer: -6.7078037 + vertex_buffer: 2.5414886 vertex_buffer: 0.711218 vertex_buffer: 0.819975 - vertex_buffer: 3.3817728 - vertex_buffer: -7.8021345 - vertex_buffer: 3.3166351 + vertex_buffer: 3.3817763 + vertex_buffer: -7.8021374 + vertex_buffer: 3.3166275 vertex_buffer: 0.66463 vertex_buffer: 0.852871 - vertex_buffer: 1.2883689 - vertex_buffer: -9.677936 - vertex_buffer: 4.8929443 + vertex_buffer: 1.28837 + vertex_buffer: -9.677942 + vertex_buffer: 4.8929367 vertex_buffer: 0.5591 vertex_buffer: 0.902632 - vertex_buffer: 5.832577 + vertex_buffer: 5.8325844 vertex_buffer: -5.1295977 - vertex_buffer: 0.12055969 + vertex_buffer: 0.12055206 vertex_buffer: 0.871706 vertex_buffer: 0.791941 - vertex_buffer: 1.9733884 - vertex_buffer: 2.7239323 - vertex_buffer: 3.2599945 + vertex_buffer: 1.9733912 + vertex_buffer: 2.72394 + vertex_buffer: 3.2599869 vertex_buffer: 0.591234 vertex_buffer: 0.373894 - vertex_buffer: 0.93809897 - vertex_buffer: 0.5055809 - vertex_buffer: 5.7244835 + vertex_buffer: 0.9381001 + vertex_buffer: 0.5055866 + vertex_buffer: 5.724476 vertex_buffer: 0.544341 vertex_buffer: 0.451584 - vertex_buffer: 2.3015637 - vertex_buffer: -9.5524845 - vertex_buffer: 3.7121239 + vertex_buffer: 2.3015656 + vertex_buffer: -9.552489 + vertex_buffer: 3.7121162 vertex_buffer: 0.624563 vertex_buffer: 0.924192 - vertex_buffer: 7.139088 - vertex_buffer: -1.3418045 - vertex_buffer: 0.07254028 + vertex_buffer: 7.139097 + vertex_buffer: -1.3418007 + vertex_buffer: 0.072525024 vertex_buffer: 0.88577 vertex_buffer: 0.615029 - vertex_buffer: 1.6386461 - vertex_buffer: -5.1091747 - vertex_buffer: 4.613632 + vertex_buffer: 1.6386478 + vertex_buffer: -5.1091776 + vertex_buffer: 4.6136246 vertex_buffer: 0.551338 vertex_buffer: 0.695278 - vertex_buffer: 1.728884 - vertex_buffer: -5.284889 - vertex_buffer: 4.7355385 + vertex_buffer: 1.7288855 + vertex_buffer: -5.284891 + vertex_buffer: 4.7355347 vertex_buffer: 0.55198 vertex_buffer: 0.704632 - vertex_buffer: 1.8350952 - vertex_buffer: -5.5643196 - vertex_buffer: 4.855278 + vertex_buffer: 1.835097 + vertex_buffer: -5.5643215 + vertex_buffer: 4.855274 vertex_buffer: 0.552888 vertex_buffer: 0.715808 - vertex_buffer: 1.887146 - vertex_buffer: -5.842271 - vertex_buffer: 4.792778 + vertex_buffer: 1.887148 + vertex_buffer: -5.8422728 + vertex_buffer: 4.7927704 vertex_buffer: 0.555168 vertex_buffer: 0.730794 - vertex_buffer: 2.1029322 - vertex_buffer: -6.506687 - vertex_buffer: 4.525856 + vertex_buffer: 2.1029344 + vertex_buffer: -6.50669 + vertex_buffer: 4.5258446 vertex_buffer: 0.569944 vertex_buffer: 0.767035 - vertex_buffer: 3.061017 - vertex_buffer: -3.6268826 - vertex_buffer: 4.165062 + vertex_buffer: 3.0610201 + vertex_buffer: -3.6268806 + vertex_buffer: 4.165058 vertex_buffer: 0.593203 vertex_buffer: 0.685676 - vertex_buffer: 3.1612403 + vertex_buffer: 3.1612434 vertex_buffer: -3.5417175 - vertex_buffer: 4.122753 + vertex_buffer: 4.1227455 vertex_buffer: 0.599262 vertex_buffer: 0.681069 - vertex_buffer: 3.2587636 - vertex_buffer: -3.4861755 - vertex_buffer: 4.125637 + vertex_buffer: 3.2587674 + vertex_buffer: -3.4861774 + vertex_buffer: 4.1256294 vertex_buffer: 0.6076 vertex_buffer: 0.677703 - vertex_buffer: 3.8822236 + vertex_buffer: 3.882228 vertex_buffer: -3.1814423 - vertex_buffer: 4.05299 + vertex_buffer: 4.0529823 vertex_buffer: 0.631938 vertex_buffer: 0.6635 - vertex_buffer: 5.9277196 - vertex_buffer: -1.7721405 + vertex_buffer: 5.9277267 + vertex_buffer: -1.7721386 vertex_buffer: 3.2307892 vertex_buffer: 0.752033 vertex_buffer: 0.601315 - vertex_buffer: 0.9365861 - vertex_buffer: 1.3445549 - vertex_buffer: 5.05389 + vertex_buffer: 0.9365872 + vertex_buffer: 1.3445606 + vertex_buffer: 5.0538864 vertex_buffer: 0.547226 vertex_buffer: 0.420395 - vertex_buffer: 1.3117576 - vertex_buffer: 2.863121 - vertex_buffer: 3.6299706 + vertex_buffer: 1.3117594 + vertex_buffer: 2.8631287 + vertex_buffer: 3.6299667 vertex_buffer: 0.563544 vertex_buffer: 0.359828 - vertex_buffer: 1.710571 - vertex_buffer: 2.8931694 - vertex_buffer: 3.3603477 + vertex_buffer: 1.7105736 + vertex_buffer: 2.893179 + vertex_buffer: 3.360344 vertex_buffer: 0.583841 vertex_buffer: 0.368714 - vertex_buffer: 2.9408464 - vertex_buffer: -3.642933 - vertex_buffer: 4.1286964 + vertex_buffer: 2.9408498 + vertex_buffer: -3.642931 + vertex_buffer: 4.1286926 vertex_buffer: 0.586614 vertex_buffer: 0.692366 - vertex_buffer: 6.0170794 - vertex_buffer: -3.4064217 - vertex_buffer: 2.3072815 + vertex_buffer: 6.0170856 + vertex_buffer: -3.4064198 + vertex_buffer: 2.3072739 vertex_buffer: 0.771915 vertex_buffer: 0.683578 - vertex_buffer: 0.7762793 - vertex_buffer: 2.5255527 - vertex_buffer: 4.588814 + vertex_buffer: 0.7762805 + vertex_buffer: 2.5255604 + vertex_buffer: 4.58881 vertex_buffer: 0.531597 vertex_buffer: 0.352483 - vertex_buffer: 2.2923872 - vertex_buffer: -7.176118 - vertex_buffer: 4.395317 + vertex_buffer: 2.2923894 + vertex_buffer: -7.1761208 + vertex_buffer: 4.3953133 vertex_buffer: 0.588371 vertex_buffer: 0.804441 - vertex_buffer: 0.5088966 - vertex_buffer: 0.68442154 - vertex_buffer: 6.1560097 + vertex_buffer: 0.5088972 + vertex_buffer: 0.68442535 + vertex_buffer: 6.156002 vertex_buffer: 0.520797 vertex_buffer: 0.442565 - vertex_buffer: 1.4301867 - vertex_buffer: -0.42539024 + vertex_buffer: 1.4301882 + vertex_buffer: -0.42538643 vertex_buffer: 5.853489 vertex_buffer: 0.567985 vertex_buffer: 0.493479 - vertex_buffer: 1.1599193 - vertex_buffer: -7.917469 - vertex_buffer: 4.9433784 + vertex_buffer: 1.1599201 + vertex_buffer: -7.917472 + vertex_buffer: 4.9433746 vertex_buffer: 0.543283 vertex_buffer: 0.819255 - vertex_buffer: 3.8333597 - vertex_buffer: -5.020096 - vertex_buffer: 3.4186134 + vertex_buffer: 3.833364 + vertex_buffer: -5.0200977 + vertex_buffer: 3.418602 vertex_buffer: 0.655317 vertex_buffer: 0.745515 - vertex_buffer: 2.9397933 - vertex_buffer: -1.5405922 - vertex_buffer: 4.737316 + vertex_buffer: 2.9397964 + vertex_buffer: -1.5405884 + vertex_buffer: 4.7373123 vertex_buffer: 0.621009 vertex_buffer: 0.574018 - vertex_buffer: 3.1156285 - vertex_buffer: -6.183937 - vertex_buffer: 3.8408585 + vertex_buffer: 3.1156316 + vertex_buffer: -6.183938 + vertex_buffer: 3.840847 vertex_buffer: 0.62556 vertex_buffer: 0.780312 - vertex_buffer: 4.4402833 - vertex_buffer: -1.4682465 - vertex_buffer: 4.4270935 + vertex_buffer: 4.440288 + vertex_buffer: -1.4682484 + vertex_buffer: 4.427086 vertex_buffer: 0.680198 vertex_buffer: 0.570719 - vertex_buffer: 3.6551414 - vertex_buffer: -2.1533604 - vertex_buffer: 4.4729767 + vertex_buffer: 3.6551456 + vertex_buffer: -2.1533585 + vertex_buffer: 4.472973 vertex_buffer: 0.642764 vertex_buffer: 0.604338 - vertex_buffer: 5.071462 + vertex_buffer: 5.0714684 vertex_buffer: -2.356783 - vertex_buffer: 3.8250847 + vertex_buffer: 3.8250809 vertex_buffer: 0.704663 vertex_buffer: 0.62153 - vertex_buffer: 1.2959383 - vertex_buffer: -8.866041 - vertex_buffer: 5.036125 + vertex_buffer: 1.2959392 + vertex_buffer: -8.866048 + vertex_buffer: 5.0361176 vertex_buffer: 0.552012 vertex_buffer: 0.862592 - vertex_buffer: 1.8981676 - vertex_buffer: -0.56002426 - vertex_buffer: 5.423908 + vertex_buffer: 1.8981696 + vertex_buffer: -0.56001854 + vertex_buffer: 5.4239044 vertex_buffer: 0.589072 vertex_buffer: 0.508637 - vertex_buffer: 4.186279 - vertex_buffer: -5.741809 - vertex_buffer: 3.0424232 + vertex_buffer: 4.1862836 + vertex_buffer: -5.741811 + vertex_buffer: 3.0424118 vertex_buffer: 0.685945 vertex_buffer: 0.775357 - vertex_buffer: 3.320751 - vertex_buffer: -6.9206285 - vertex_buffer: 3.599121 + vertex_buffer: 3.3207548 + vertex_buffer: -6.9206305 + vertex_buffer: 3.5991096 vertex_buffer: 0.645735 vertex_buffer: 0.81264 - vertex_buffer: 4.447845 - vertex_buffer: -3.8932133 - vertex_buffer: 3.412632 + vertex_buffer: 4.44785 + vertex_buffer: -3.8932114 + vertex_buffer: 3.4126244 vertex_buffer: 0.675343 vertex_buffer: 0.703978 - vertex_buffer: 6.592035 - vertex_buffer: -2.4481583 - vertex_buffer: 1.7984543 + vertex_buffer: 6.5920415 + vertex_buffer: -2.4481525 + vertex_buffer: 1.7984428 vertex_buffer: 0.810858 vertex_buffer: 0.646305 - vertex_buffer: 5.1553717 - vertex_buffer: -4.20376 - vertex_buffer: 2.9434357 + vertex_buffer: 5.1553783 + vertex_buffer: -4.2037582 + vertex_buffer: 2.943428 vertex_buffer: 0.720122 vertex_buffer: 0.714667 - vertex_buffer: 6.7463055 - vertex_buffer: -2.9502907 - vertex_buffer: 0.47159576 + vertex_buffer: 6.7463136 + vertex_buffer: -2.9502869 + vertex_buffer: 0.47158813 vertex_buffer: 0.866152 vertex_buffer: 0.682705 - vertex_buffer: 4.296149 + vertex_buffer: 4.296153 vertex_buffer: -2.7682056 - vertex_buffer: 4.064972 + vertex_buffer: 4.064968 vertex_buffer: 0.663187 vertex_buffer: 0.644597 - vertex_buffer: 1.3760409 - vertex_buffer: 0.28308105 - vertex_buffer: 5.2958755 + vertex_buffer: 1.3760425 + vertex_buffer: 0.28308678 + vertex_buffer: 5.2958717 vertex_buffer: 0.570082 vertex_buffer: 0.466326 - vertex_buffer: 1.3695476 - vertex_buffer: -2.0489216 - vertex_buffer: 7.035427 + vertex_buffer: 1.369549 + vertex_buffer: -2.0489197 + vertex_buffer: 7.0354233 vertex_buffer: 0.544562 vertex_buffer: 0.548376 - vertex_buffer: 1.8866544 - vertex_buffer: -1.8764362 + vertex_buffer: 1.8866564 + vertex_buffer: -1.8764381 vertex_buffer: 6.3556557 vertex_buffer: 0.562759 vertex_buffer: 0.558785 - vertex_buffer: 1.0421567 - vertex_buffer: -1.8033142 + vertex_buffer: 1.0421576 + vertex_buffer: -1.8033104 vertex_buffer: 7.4998665 vertex_buffer: 0.531987 vertex_buffer: 0.53014 - vertex_buffer: 1.7499065 - vertex_buffer: 3.2145195 + vertex_buffer: 1.749909 + vertex_buffer: 3.2145271 vertex_buffer: 3.722023 vertex_buffer: 0.585271 vertex_buffer: 0.335177 - vertex_buffer: 2.6983814 - vertex_buffer: 3.4560604 + vertex_buffer: 2.6983845 + vertex_buffer: 3.456068 vertex_buffer: 3.8515701 vertex_buffer: 0.622953 vertex_buffer: 0.322779 - vertex_buffer: 3.5685785 - vertex_buffer: 3.4978447 - vertex_buffer: 3.8557777 + vertex_buffer: 3.568583 + vertex_buffer: 3.4978542 + vertex_buffer: 3.855774 vertex_buffer: 0.655896 vertex_buffer: 0.320163 - vertex_buffer: 4.353757 - vertex_buffer: 3.4455433 - vertex_buffer: 3.6230545 + vertex_buffer: 4.3537617 + vertex_buffer: 3.445551 + vertex_buffer: 3.6230507 vertex_buffer: 0.687132 vertex_buffer: 0.322346 - vertex_buffer: 4.9293575 - vertex_buffer: 3.3051414 - vertex_buffer: 3.2275352 + vertex_buffer: 4.9293637 + vertex_buffer: 3.305151 + vertex_buffer: 3.2275314 vertex_buffer: 0.716482 vertex_buffer: 0.333201 - vertex_buffer: 5.3603797 - vertex_buffer: 2.7067642 + vertex_buffer: 5.360387 + vertex_buffer: 2.7067719 vertex_buffer: 2.3262787 vertex_buffer: 0.758757 vertex_buffer: 0.382787 - vertex_buffer: 7.3424206 - vertex_buffer: 1.8563805 - vertex_buffer: -0.3018341 + vertex_buffer: 7.3424306 + vertex_buffer: 1.85639 + vertex_buffer: -0.30184937 vertex_buffer: 0.897013 vertex_buffer: 0.468769 - vertex_buffer: 4.8157883 - vertex_buffer: 1.8869534 - vertex_buffer: 2.9183197 + vertex_buffer: 4.8157945 + vertex_buffer: 1.886961 + vertex_buffer: 2.918312 vertex_buffer: 0.732392 vertex_buffer: 0.424547 - vertex_buffer: 4.117226 - vertex_buffer: 1.6413307 - vertex_buffer: 3.3260117 + vertex_buffer: 4.1172314 + vertex_buffer: 1.6413403 + vertex_buffer: 3.3260078 vertex_buffer: 0.702114 vertex_buffer: 0.433163 - vertex_buffer: 3.2758975 - vertex_buffer: 1.5367126 + vertex_buffer: 3.2759013 + vertex_buffer: 1.5367184 vertex_buffer: 3.6020355 vertex_buffer: 0.666525 vertex_buffer: 0.433866 - vertex_buffer: 2.4952793 - vertex_buffer: 1.6289577 - vertex_buffer: 3.6608086 + vertex_buffer: 2.4952826 + vertex_buffer: 1.6289654 + vertex_buffer: 3.660801 vertex_buffer: 0.633505 vertex_buffer: 0.426088 - vertex_buffer: 1.9153266 - vertex_buffer: 1.7806721 - vertex_buffer: 3.7212715 + vertex_buffer: 1.9153291 + vertex_buffer: 1.7806797 + vertex_buffer: 3.721264 vertex_buffer: 0.603876 vertex_buffer: 0.416587 - vertex_buffer: 1.5041876 - vertex_buffer: 1.947649 - vertex_buffer: 3.7355576 + vertex_buffer: 1.5041896 + vertex_buffer: 1.9476566 + vertex_buffer: 3.73555 vertex_buffer: 0.579658 vertex_buffer: 0.409945 - vertex_buffer: 7.62904 - vertex_buffer: 2.3723297 + vertex_buffer: 7.6290493 + vertex_buffer: 2.3723392 vertex_buffer: -2.5805435 vertex_buffer: 0.99244 vertex_buffer: 0.480777 - vertex_buffer: 1.9259956 - vertex_buffer: -1.8737621 + vertex_buffer: 1.9259976 + vertex_buffer: -1.8737583 vertex_buffer: 6.0489655 vertex_buffer: 0.567192 vertex_buffer: 0.56942 - vertex_buffer: 0.9361145 - vertex_buffer: -0.23275948 + vertex_buffer: 0.9361154 + vertex_buffer: -0.23275757 vertex_buffer: 6.340225 vertex_buffer: 0.541366 vertex_buffer: 0.478899 - vertex_buffer: 0.8889647 - vertex_buffer: -2.3080616 - vertex_buffer: 7.563141 + vertex_buffer: 0.8889656 + vertex_buffer: -2.3080597 + vertex_buffer: 7.5631332 vertex_buffer: 0.526564 vertex_buffer: 0.546118 - vertex_buffer: 0.57550555 + vertex_buffer: 0.575506 vertex_buffer: -2.5688763 vertex_buffer: 7.3445854 vertex_buffer: 0.523913 vertex_buffer: 0.56383 - vertex_buffer: 0.9179589 - vertex_buffer: -2.3308792 - vertex_buffer: 7.255516 + vertex_buffer: 0.9179598 + vertex_buffer: -2.3308773 + vertex_buffer: 7.255512 vertex_buffer: 0.531529 vertex_buffer: 0.555057 - vertex_buffer: 1.8029492 - vertex_buffer: -1.9879112 - vertex_buffer: 5.845417 + vertex_buffer: 1.8029512 + vertex_buffer: -1.9879055 + vertex_buffer: 5.845413 vertex_buffer: 0.566036 vertex_buffer: 0.582329 - vertex_buffer: 0.44685143 + vertex_buffer: 0.44685158 vertex_buffer: -2.682705 - vertex_buffer: 7.525955 + vertex_buffer: 7.5259476 vertex_buffer: 0.516311 vertex_buffer: 0.563054 - vertex_buffer: 0.40712923 - vertex_buffer: -2.5848274 - vertex_buffer: 6.774906 + vertex_buffer: 0.40712938 + vertex_buffer: -2.5848255 + vertex_buffer: 6.7748985 vertex_buffer: 0.517472 vertex_buffer: 0.577877 - vertex_buffer: 1.5503142 - vertex_buffer: 2.5380096 + vertex_buffer: 1.5503162 + vertex_buffer: 2.5380173 vertex_buffer: 3.3313866 vertex_buffer: 0.573595 vertex_buffer: 0.389807 - vertex_buffer: 1.2257941 - vertex_buffer: 2.2664852 - vertex_buffer: 3.7727013 + vertex_buffer: 1.2257959 + vertex_buffer: 2.266489 + vertex_buffer: 3.7726974 vertex_buffer: 0.560698 vertex_buffer: 0.395332 - vertex_buffer: 1.0358994 - vertex_buffer: 1.9741764 - vertex_buffer: 4.3474655 + vertex_buffer: 1.0359008 + vertex_buffer: 1.974184 + vertex_buffer: 4.3474617 vertex_buffer: 0.549756 vertex_buffer: 0.399751 - vertex_buffer: 4.367508 - vertex_buffer: 2.8476372 - vertex_buffer: 2.9235992 + vertex_buffer: 4.3675127 + vertex_buffer: 2.8476448 + vertex_buffer: 2.9235954 vertex_buffer: 0.710288 vertex_buffer: 0.368253 - vertex_buffer: 4.9116855 - vertex_buffer: 3.011652 + vertex_buffer: 4.911691 + vertex_buffer: 3.0116596 vertex_buffer: 2.8815002 vertex_buffer: 0.72333 vertex_buffer: 0.363373 @@ -5041,20 +5041,20 @@ mesh { pose_transform_matrix { rows: 4 cols: 4 - packed_data: 0.99995184 - packed_data: 0.006250852 - packed_data: -0.0075720036 + packed_data: 0.9999519 + packed_data: 0.006250915 + packed_data: -0.007572061 packed_data: 0 - packed_data: -0.0060578818 - packed_data: 0.9996628 - packed_data: 0.025243768 + packed_data: -0.006057945 + packed_data: 0.9996629 + packed_data: 0.025243366 packed_data: 0 - packed_data: 0.0077272463 - packed_data: -0.025196675 - packed_data: 0.9996526 + packed_data: 0.0077273026 + packed_data: -0.025196271 + packed_data: 0.9996527 packed_data: 0 - packed_data: -0.35120884 - packed_data: 21.932339 - packed_data: -64.35148 + packed_data: -0.3512094 + packed_data: 21.932364 + packed_data: -64.351555 packed_data: 1 } diff --git a/mediapipe/tasks/web/audio/audio_classifier/BUILD b/mediapipe/tasks/web/audio/audio_classifier/BUILD index a94b4931d9..14c2d453cf 100644 --- a/mediapipe/tasks/web/audio/audio_classifier/BUILD +++ b/mediapipe/tasks/web/audio/audio_classifier/BUILD @@ -64,5 +64,5 @@ mediapipe_ts_library( jasmine_node_test( name = "audio_classifier_test", - deps = [":audio_classifier_test_lib"], + srcs = [":audio_classifier_test_lib"], ) diff --git a/mediapipe/tasks/web/audio/audio_embedder/BUILD b/mediapipe/tasks/web/audio/audio_embedder/BUILD index 61afacfdb8..12929029ff 100644 --- a/mediapipe/tasks/web/audio/audio_embedder/BUILD +++ b/mediapipe/tasks/web/audio/audio_embedder/BUILD @@ -61,5 +61,5 @@ mediapipe_ts_library( jasmine_node_test( name = "audio_embedder_test", - deps = [":audio_embedder_test_lib"], + srcs = [":audio_embedder_test_lib"], ) diff --git a/mediapipe/tasks/web/components/processors/BUILD b/mediapipe/tasks/web/components/processors/BUILD index d81fbc79a4..d986bf7262 100644 --- a/mediapipe/tasks/web/components/processors/BUILD +++ b/mediapipe/tasks/web/components/processors/BUILD @@ -17,7 +17,7 @@ mediapipe_ts_library( mediapipe_ts_library( name = "classifier_options_test_lib", testonly = True, - srcs = ["classifier_options.test.ts"], + srcs = ["classifier_options_test.ts"], deps = [ ":classifier_options", "//mediapipe/tasks/cc/components/processors/proto:classifier_options_jspb_proto", @@ -27,7 +27,7 @@ mediapipe_ts_library( jasmine_node_test( name = "classifier_options_test", - deps = [":classifier_options_test_lib"], + srcs = [":classifier_options_test_lib"], ) mediapipe_ts_library( @@ -43,7 +43,7 @@ mediapipe_ts_library( mediapipe_ts_library( name = "classifier_result_test_lib", testonly = True, - srcs = ["classifier_result.test.ts"], + srcs = ["classifier_result_test.ts"], deps = [ ":classifier_result", "//mediapipe/framework/formats:classification_jspb_proto", @@ -53,7 +53,7 @@ mediapipe_ts_library( jasmine_node_test( name = "classifier_result_test", - deps = [":classifier_result_test_lib"], + srcs = [":classifier_result_test_lib"], ) mediapipe_ts_library( @@ -68,7 +68,7 @@ mediapipe_ts_library( mediapipe_ts_library( name = "detection_result_test_lib", testonly = True, - srcs = ["detection_result.test.ts"], + srcs = ["detection_result_test.ts"], deps = [ ":detection_result", "//mediapipe/framework/formats:detection_jspb_proto", @@ -89,7 +89,7 @@ mediapipe_ts_library( mediapipe_ts_library( name = "embedder_result_test_lib", testonly = True, - srcs = ["embedder_result.test.ts"], + srcs = ["embedder_result_test.ts"], deps = [ ":embedder_result", "//mediapipe/tasks/cc/components/containers/proto:embeddings_jspb_proto", @@ -98,7 +98,7 @@ mediapipe_ts_library( jasmine_node_test( name = "embedder_result_test", - deps = [":embedder_result_test_lib"], + srcs = [":embedder_result_test_lib"], ) mediapipe_ts_library( @@ -113,7 +113,7 @@ mediapipe_ts_library( mediapipe_ts_library( name = "embedder_options_test_lib", testonly = True, - srcs = ["embedder_options.test.ts"], + srcs = ["embedder_options_test.ts"], deps = [ ":embedder_options", "//mediapipe/tasks/cc/components/processors/proto:embedder_options_jspb_proto", @@ -123,7 +123,7 @@ mediapipe_ts_library( jasmine_node_test( name = "embedder_options_test", - deps = [":embedder_options_test_lib"], + srcs = [":embedder_options_test_lib"], ) mediapipe_ts_library( @@ -141,11 +141,11 @@ mediapipe_ts_library( mediapipe_ts_library( name = "landmark_result_test_lib", testonly = True, - srcs = ["landmark_result.test.ts"], + srcs = ["landmark_result_test.ts"], deps = [":landmark_result"], ) jasmine_node_test( name = "landmark_result_test", - deps = [":landmark_result_test_lib"], + srcs = [":landmark_result_test_lib"], ) diff --git a/mediapipe/tasks/web/components/processors/classifier_options.test.ts b/mediapipe/tasks/web/components/processors/classifier_options_test.ts similarity index 100% rename from mediapipe/tasks/web/components/processors/classifier_options.test.ts rename to mediapipe/tasks/web/components/processors/classifier_options_test.ts diff --git a/mediapipe/tasks/web/components/processors/classifier_result.test.ts b/mediapipe/tasks/web/components/processors/classifier_result_test.ts similarity index 100% rename from mediapipe/tasks/web/components/processors/classifier_result.test.ts rename to mediapipe/tasks/web/components/processors/classifier_result_test.ts diff --git a/mediapipe/tasks/web/components/processors/detection_result.test.ts b/mediapipe/tasks/web/components/processors/detection_result_test.ts similarity index 100% rename from mediapipe/tasks/web/components/processors/detection_result.test.ts rename to mediapipe/tasks/web/components/processors/detection_result_test.ts diff --git a/mediapipe/tasks/web/components/processors/embedder_options.test.ts b/mediapipe/tasks/web/components/processors/embedder_options_test.ts similarity index 100% rename from mediapipe/tasks/web/components/processors/embedder_options.test.ts rename to mediapipe/tasks/web/components/processors/embedder_options_test.ts diff --git a/mediapipe/tasks/web/components/processors/embedder_result.test.ts b/mediapipe/tasks/web/components/processors/embedder_result_test.ts similarity index 100% rename from mediapipe/tasks/web/components/processors/embedder_result.test.ts rename to mediapipe/tasks/web/components/processors/embedder_result_test.ts diff --git a/mediapipe/tasks/web/components/processors/landmark_result.test.ts b/mediapipe/tasks/web/components/processors/landmark_result_test.ts similarity index 100% rename from mediapipe/tasks/web/components/processors/landmark_result.test.ts rename to mediapipe/tasks/web/components/processors/landmark_result_test.ts diff --git a/mediapipe/tasks/web/components/utils/BUILD b/mediapipe/tasks/web/components/utils/BUILD index 4844294cd8..3b84751e1d 100644 --- a/mediapipe/tasks/web/components/utils/BUILD +++ b/mediapipe/tasks/web/components/utils/BUILD @@ -14,7 +14,7 @@ mediapipe_ts_library( mediapipe_ts_library( name = "cosine_similarity_test_lib", testonly = True, - srcs = ["cosine_similarity.test.ts"], + srcs = ["cosine_similarity_test.ts"], deps = [ ":cosine_similarity", "//mediapipe/tasks/web/components/containers:embedding_result", @@ -23,5 +23,5 @@ mediapipe_ts_library( jasmine_node_test( name = "cosine_similarity_test", - deps = [":cosine_similarity_test_lib"], + srcs = [":cosine_similarity_test_lib"], ) diff --git a/mediapipe/tasks/web/components/utils/cosine_similarity.test.ts b/mediapipe/tasks/web/components/utils/cosine_similarity_test.ts similarity index 100% rename from mediapipe/tasks/web/components/utils/cosine_similarity.test.ts rename to mediapipe/tasks/web/components/utils/cosine_similarity_test.ts diff --git a/mediapipe/tasks/web/core/BUILD b/mediapipe/tasks/web/core/BUILD index 51c8b3c378..156946abd1 100644 --- a/mediapipe/tasks/web/core/BUILD +++ b/mediapipe/tasks/web/core/BUILD @@ -74,7 +74,7 @@ mediapipe_ts_library( jasmine_node_test( name = "task_runner_test", - deps = [":task_runner_test_lib"], + srcs = [":task_runner_test_lib"], ) mediapipe_ts_declaration( diff --git a/mediapipe/tasks/web/text/language_detector/BUILD b/mediapipe/tasks/web/text/language_detector/BUILD index da47a8d6c0..802873e988 100644 --- a/mediapipe/tasks/web/text/language_detector/BUILD +++ b/mediapipe/tasks/web/text/language_detector/BUILD @@ -62,5 +62,5 @@ mediapipe_ts_library( jasmine_node_test( name = "language_detector_test", - deps = [":language_detector_test_lib"], + srcs = [":language_detector_test_lib"], ) diff --git a/mediapipe/tasks/web/text/text_classifier/BUILD b/mediapipe/tasks/web/text/text_classifier/BUILD index 61af214060..74a4a6fecb 100644 --- a/mediapipe/tasks/web/text/text_classifier/BUILD +++ b/mediapipe/tasks/web/text/text_classifier/BUILD @@ -65,5 +65,5 @@ mediapipe_ts_library( jasmine_node_test( name = "text_classifier_test", - deps = [":text_classifier_test_lib"], + srcs = [":text_classifier_test_lib"], ) diff --git a/mediapipe/tasks/web/text/text_embedder/BUILD b/mediapipe/tasks/web/text/text_embedder/BUILD index eb3efaddae..8b120d51fb 100644 --- a/mediapipe/tasks/web/text/text_embedder/BUILD +++ b/mediapipe/tasks/web/text/text_embedder/BUILD @@ -63,5 +63,5 @@ mediapipe_ts_library( jasmine_node_test( name = "text_embedder_test", - deps = [":text_embedder_test_lib"], + srcs = [":text_embedder_test_lib"], ) diff --git a/mediapipe/tasks/web/vision/core/BUILD b/mediapipe/tasks/web/vision/core/BUILD index 2c0b61e417..867444a0f4 100644 --- a/mediapipe/tasks/web/vision/core/BUILD +++ b/mediapipe/tasks/web/vision/core/BUILD @@ -61,7 +61,7 @@ mediapipe_ts_library( jasmine_node_test( name = "drawing_utils_test", - deps = [":drawing_utils_test_lib"], + srcs = [":drawing_utils_test_lib"], ) mediapipe_ts_library( @@ -87,7 +87,7 @@ mediapipe_ts_library( jasmine_node_test( name = "image_test", - deps = [":image_test_lib"], + srcs = [":image_test_lib"], ) mediapipe_ts_library( @@ -113,7 +113,7 @@ mediapipe_ts_library( jasmine_node_test( name = "mask_test", - deps = [":mask_test_lib"], + srcs = [":mask_test_lib"], ) mediapipe_ts_library( @@ -138,7 +138,7 @@ mediapipe_ts_library( mediapipe_ts_library( name = "vision_task_runner_test_lib", testonly = True, - srcs = ["vision_task_runner.test.ts"], + srcs = ["vision_task_runner_test.ts"], deps = [ ":image_processing_options", ":vision_task_options", @@ -152,5 +152,5 @@ mediapipe_ts_library( jasmine_node_test( name = "vision_task_runner_test", - deps = [":vision_task_runner_test_lib"], + srcs = [":vision_task_runner_test_lib"], ) diff --git a/mediapipe/tasks/web/vision/core/vision_task_runner.test.ts b/mediapipe/tasks/web/vision/core/vision_task_runner_test.ts similarity index 100% rename from mediapipe/tasks/web/vision/core/vision_task_runner.test.ts rename to mediapipe/tasks/web/vision/core/vision_task_runner_test.ts diff --git a/mediapipe/tasks/web/vision/face_detector/BUILD b/mediapipe/tasks/web/vision/face_detector/BUILD index b5d6d04910..cb76a21ea4 100644 --- a/mediapipe/tasks/web/vision/face_detector/BUILD +++ b/mediapipe/tasks/web/vision/face_detector/BUILD @@ -66,6 +66,6 @@ mediapipe_ts_library( jasmine_node_test( name = "face_detector_test", + srcs = [":face_detector_test_lib"], tags = ["nomsan"], - deps = [":face_detector_test_lib"], ) diff --git a/mediapipe/tasks/web/vision/face_landmarker/BUILD b/mediapipe/tasks/web/vision/face_landmarker/BUILD index 4a5e36a1bd..f9daeda67b 100644 --- a/mediapipe/tasks/web/vision/face_landmarker/BUILD +++ b/mediapipe/tasks/web/vision/face_landmarker/BUILD @@ -85,6 +85,6 @@ mediapipe_ts_library( jasmine_node_test( name = "face_landmarker_test", + srcs = [":face_landmarker_test_lib"], tags = ["nomsan"], - deps = [":face_landmarker_test_lib"], ) diff --git a/mediapipe/tasks/web/vision/face_stylizer/BUILD b/mediapipe/tasks/web/vision/face_stylizer/BUILD index 253fdee387..da3bafbeeb 100644 --- a/mediapipe/tasks/web/vision/face_stylizer/BUILD +++ b/mediapipe/tasks/web/vision/face_stylizer/BUILD @@ -53,6 +53,6 @@ mediapipe_ts_library( jasmine_node_test( name = "face_stylizer_test", + srcs = [":face_stylizer_test_lib"], tags = ["nomsan"], - deps = [":face_stylizer_test_lib"], ) diff --git a/mediapipe/tasks/web/vision/gesture_recognizer/BUILD b/mediapipe/tasks/web/vision/gesture_recognizer/BUILD index c74b2a01eb..a9df1493a0 100644 --- a/mediapipe/tasks/web/vision/gesture_recognizer/BUILD +++ b/mediapipe/tasks/web/vision/gesture_recognizer/BUILD @@ -75,6 +75,6 @@ mediapipe_ts_library( jasmine_node_test( name = "gesture_recognizer_test", + srcs = [":gesture_recognizer_test_lib"], tags = ["nomsan"], - deps = [":gesture_recognizer_test_lib"], ) diff --git a/mediapipe/tasks/web/vision/hand_landmarker/BUILD b/mediapipe/tasks/web/vision/hand_landmarker/BUILD index 8d2111de65..3137a712ba 100644 --- a/mediapipe/tasks/web/vision/hand_landmarker/BUILD +++ b/mediapipe/tasks/web/vision/hand_landmarker/BUILD @@ -71,8 +71,8 @@ mediapipe_ts_library( jasmine_node_test( name = "hand_landmarker_test", + srcs = [":hand_landmarker_test_lib"], tags = ["nomsan"], - deps = [":hand_landmarker_test_lib"], ) mediapipe_ts_library( diff --git a/mediapipe/tasks/web/vision/holistic_landmarker/BUILD b/mediapipe/tasks/web/vision/holistic_landmarker/BUILD index 4f613ffdbf..93d5b85421 100644 --- a/mediapipe/tasks/web/vision/holistic_landmarker/BUILD +++ b/mediapipe/tasks/web/vision/holistic_landmarker/BUILD @@ -80,8 +80,8 @@ mediapipe_ts_library( jasmine_node_test( name = "holistic_landmarker_test", + srcs = [":holistic_landmarker_test_lib"], tags = ["nomsan"], - deps = [":holistic_landmarker_test_lib"], ) mediapipe_ts_library( diff --git a/mediapipe/tasks/web/vision/image_classifier/BUILD b/mediapipe/tasks/web/vision/image_classifier/BUILD index a163bb3ab3..1591865a1d 100644 --- a/mediapipe/tasks/web/vision/image_classifier/BUILD +++ b/mediapipe/tasks/web/vision/image_classifier/BUILD @@ -66,6 +66,6 @@ mediapipe_ts_library( jasmine_node_test( name = "image_classifier_test", + srcs = [":image_classifier_test_lib"], tags = ["nomsan"], - deps = [":image_classifier_test_lib"], ) diff --git a/mediapipe/tasks/web/vision/image_embedder/BUILD b/mediapipe/tasks/web/vision/image_embedder/BUILD index e45221e732..be0c8208bb 100644 --- a/mediapipe/tasks/web/vision/image_embedder/BUILD +++ b/mediapipe/tasks/web/vision/image_embedder/BUILD @@ -65,5 +65,5 @@ mediapipe_ts_library( jasmine_node_test( name = "image_embedder_test", - deps = [":image_embedder_test_lib"], + srcs = [":image_embedder_test_lib"], ) diff --git a/mediapipe/tasks/web/vision/image_segmenter/BUILD b/mediapipe/tasks/web/vision/image_segmenter/BUILD index 14688f36b8..c8bfee7c33 100644 --- a/mediapipe/tasks/web/vision/image_segmenter/BUILD +++ b/mediapipe/tasks/web/vision/image_segmenter/BUILD @@ -56,6 +56,6 @@ mediapipe_ts_library( jasmine_node_test( name = "image_segmenter_test", + srcs = [":image_segmenter_test_lib"], tags = ["nomsan"], - deps = [":image_segmenter_test_lib"], ) diff --git a/mediapipe/tasks/web/vision/interactive_segmenter/BUILD b/mediapipe/tasks/web/vision/interactive_segmenter/BUILD index a78f088b56..3fc3382678 100644 --- a/mediapipe/tasks/web/vision/interactive_segmenter/BUILD +++ b/mediapipe/tasks/web/vision/interactive_segmenter/BUILD @@ -61,6 +61,6 @@ mediapipe_ts_library( jasmine_node_test( name = "interactive_segmenter_test", + srcs = [":interactive_segmenter_test_lib"], tags = ["nomsan"], - deps = [":interactive_segmenter_test_lib"], ) diff --git a/mediapipe/tasks/web/vision/object_detector/BUILD b/mediapipe/tasks/web/vision/object_detector/BUILD index 0e8387e27e..74fef73527 100644 --- a/mediapipe/tasks/web/vision/object_detector/BUILD +++ b/mediapipe/tasks/web/vision/object_detector/BUILD @@ -66,6 +66,6 @@ mediapipe_ts_library( jasmine_node_test( name = "object_detector_test", + srcs = [":object_detector_test_lib"], tags = ["nomsan"], - deps = [":object_detector_test_lib"], ) diff --git a/mediapipe/tasks/web/vision/pose_landmarker/BUILD b/mediapipe/tasks/web/vision/pose_landmarker/BUILD index a0ad1da8bc..5b9b7bf10b 100644 --- a/mediapipe/tasks/web/vision/pose_landmarker/BUILD +++ b/mediapipe/tasks/web/vision/pose_landmarker/BUILD @@ -64,8 +64,8 @@ mediapipe_ts_library( jasmine_node_test( name = "pose_landmarker_test", + srcs = [":pose_landmarker_test_lib"], tags = ["nomsan"], - deps = [":pose_landmarker_test_lib"], ) mediapipe_ts_library( diff --git a/mediapipe/util/BUILD b/mediapipe/util/BUILD index f783b9f5c0..d8cccf0b46 100644 --- a/mediapipe/util/BUILD +++ b/mediapipe/util/BUILD @@ -97,6 +97,13 @@ cc_library( }), ) +cc_library( + name = "fd_test_util", + testonly = True, + hdrs = ["fd_test_util.h"], + visibility = ["//mediapipe:__subpackages__"], +) + cc_library( name = "header_util", srcs = ["header_util.cc"], @@ -201,11 +208,12 @@ cc_library( hdrs = ["sync_wait.h"], visibility = ["//mediapipe:__subpackages__"], deps = [ + "//mediapipe/framework/formats:shared_fd", + "//mediapipe/framework/formats:unique_fd", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", - "@com_google_absl//absl/cleanup", - "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/time", ], @@ -217,6 +225,8 @@ cc_test( deps = [ ":sync_wait", "//mediapipe/framework:port", + "//mediapipe/framework/formats:shared_fd", + "//mediapipe/framework/formats:unique_fd", "//mediapipe/framework/port:benchmark", "//mediapipe/framework/port:gtest_main", "@com_google_absl//absl/log:absl_check", @@ -271,7 +281,6 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:singleton", "//mediapipe/framework/port:status", - "//mediapipe/framework/port:statusor", "@com_google_absl//absl/base:core_headers", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", diff --git a/mediapipe/util/audio_decoder.cc b/mediapipe/util/audio_decoder.cc index c0697dce42..dfae851d7c 100644 --- a/mediapipe/util/audio_decoder.cc +++ b/mediapipe/util/audio_decoder.cc @@ -38,9 +38,11 @@ extern "C" { #include "libavcodec/avcodec.h" #include "libavformat/avformat.h" +#include "libavformat/version.h" #include "libavutil/avutil.h" #include "libavutil/mem.h" #include "libavutil/samplefmt.h" +#include "libavutil/version.h" } ABSL_FLAG(int64_t, media_decoder_allowed_audio_gap_merge, 5, @@ -208,11 +210,9 @@ absl::Status LogStatus(const absl::Status& status, class AVPacketDeleter { public: - void operator()(void* x) const { - AVPacket* packet = static_cast(x); - if (packet) { - av_free_packet(packet); - delete packet; + void operator()(AVPacket* packet) const { + if (packet != nullptr) { + av_packet_free(&packet); } } }; @@ -241,7 +241,7 @@ absl::Status BasePacketProcessor::GetData(Packet* packet) { absl::Status BasePacketProcessor::Flush() { int64_t last_num_frames_processed; do { - std::unique_ptr av_packet(new AVPacket()); + std::unique_ptr av_packet(av_packet_alloc()); av_init_packet(av_packet.get()); av_packet->size = 0; av_packet->data = nullptr; @@ -592,7 +592,11 @@ int64_t AudioPacketProcessor::MaybeCorrectPtsForRollover(int64_t media_pts) { } // AudioDecoder -AudioDecoder::AudioDecoder() { av_register_all(); } +AudioDecoder::AudioDecoder() { +#if LIBAVFORMAT_VERSION_INT < AV_VERSION_INT(58, 79, 100) + av_register_all(); +#endif +} AudioDecoder::~AudioDecoder() { absl::Status status = Close(); @@ -771,7 +775,7 @@ absl::Status AudioDecoder::FillAudioHeader( } absl::Status AudioDecoder::ProcessPacket() { - std::unique_ptr av_packet(new AVPacket()); + std::unique_ptr av_packet(av_packet_alloc()); av_init_packet(av_packet.get()); av_packet->size = 0; av_packet->data = nullptr; diff --git a/mediapipe/util/fd_test_util.h b/mediapipe/util/fd_test_util.h new file mode 100644 index 0000000000..0d2241d91c --- /dev/null +++ b/mediapipe/util/fd_test_util.h @@ -0,0 +1,17 @@ +#ifndef MEDIAPIPE_UTIL_FD_TEST_UTIL_H_ +#define MEDIAPIPE_UTIL_FD_TEST_UTIL_H_ + +#include +#include + +namespace mediapipe { + +// Returns a valid system file descriptor. +inline int GetValidFd() { return dup(STDOUT_FILENO); } + +// Helper function to check if the file descriptor is valid (still open). +inline int IsFdValid(int fd) { return fcntl(fd, F_GETFD) != -1; } + +} // namespace mediapipe + +#endif // MEDIAPIPE_UTIL_FD_TEST_UTIL_H_ diff --git a/mediapipe/util/resource_util.h b/mediapipe/util/resource_util.h index 40f6839b84..c8a0e93914 100644 --- a/mediapipe/util/resource_util.h +++ b/mediapipe/util/resource_util.h @@ -28,9 +28,9 @@ namespace mediapipe { // - If the input path is an absolute path, it is returned as-is. // - If the input path is relative, it is searched in a platform-specific // location: -// - On Android, we look for an asset with the given relative path; if -// it exists, it is copied to the file system (using the AssetCache), -// and a path to that file is returned. +// - On Android with `shadow_copy`, we look for an asset with the given +// relative path; if it exists, it is copied to the file system (using +// the AssetCache), and a path to that file is returned. // - On iOS, we look for a resource with the given relative path in the // application bundle. // @@ -38,9 +38,9 @@ namespace mediapipe { // Note: This function should be used by code that needs a resource to be // accessible as a normal file, usually to call an existing API that only // accepts file paths. Code that can access data as a stream or as a buffer -// should read from an asset directly on Android; an API for this will be -// provided later. TODO. -absl::StatusOr PathToResourceAsFile(const std::string& path); +// should use the Resources API (see below). +absl::StatusOr PathToResourceAsFile(const std::string& path, + bool shadow_copy = true); // DEPRECATED: use `CalculatorContext::GetResources` and // `SubgraphContext::GetResources` which allow for fine grained per graph diff --git a/mediapipe/util/resource_util_android.cc b/mediapipe/util/resource_util_android.cc index 8678b97312..431b9dca3d 100644 --- a/mediapipe/util/resource_util_android.cc +++ b/mediapipe/util/resource_util_android.cc @@ -15,11 +15,13 @@ #include #include "absl/log/absl_log.h" +#include "absl/status/statusor.h" #include "absl/strings/match.h" +#include "absl/strings/str_cat.h" #include "mediapipe/framework/port/file_helpers.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/singleton.h" -#include "mediapipe/framework/port/statusor.h" +#include "mediapipe/framework/port/status_builder.h" #include "mediapipe/util/android/asset_manager_util.h" #include "mediapipe/util/android/file/base/helpers.h" @@ -27,7 +29,11 @@ namespace mediapipe { namespace { absl::StatusOr PathToResourceAsFileInternal( - const std::string& path) { + const std::string& path, bool shadow_copy) { + if (!shadow_copy) { + return absl::UnavailableError(absl::StrCat( + "Not copying asset '", path, "' due to `shadow_copy == false`")); + } return Singleton::get()->CachedFileFromAsset(path); } } // namespace @@ -65,7 +71,8 @@ absl::Status DefaultGetResourceContents(const std::string& path, } } // namespace internal -absl::StatusOr PathToResourceAsFile(const std::string& path) { +absl::StatusOr PathToResourceAsFile(const std::string& path, + bool shadow_copy) { // Return full path. if (absl::StartsWith(path, "/")) { return path; @@ -73,7 +80,7 @@ absl::StatusOr PathToResourceAsFile(const std::string& path) { // Try to load a relative path or a base filename as is. { - auto status_or_path = PathToResourceAsFileInternal(path); + auto status_or_path = PathToResourceAsFileInternal(path, shadow_copy); if (status_or_path.ok()) { ABSL_LOG(INFO) << "Successfully loaded: " << path; return status_or_path; @@ -86,7 +93,7 @@ absl::StatusOr PathToResourceAsFile(const std::string& path) { RET_CHECK(last_slash_idx != std::string::npos) << path << " doesn't have a slash in it"; // Make sure it's a path. auto base_name = path.substr(last_slash_idx + 1); - auto status_or_path = PathToResourceAsFileInternal(base_name); + auto status_or_path = PathToResourceAsFileInternal(base_name, shadow_copy); if (status_or_path.ok()) { ABSL_LOG(INFO) << "Successfully loaded: " << base_name; return status_or_path; diff --git a/mediapipe/util/resource_util_apple.cc b/mediapipe/util/resource_util_apple.cc index b78be35824..d2ba375b4b 100644 --- a/mediapipe/util/resource_util_apple.cc +++ b/mediapipe/util/resource_util_apple.cc @@ -18,10 +18,10 @@ #include #include "absl/log/absl_log.h" +#include "absl/status/statusor.h" #include "absl/strings/match.h" #include "mediapipe/framework/port/file_helpers.h" #include "mediapipe/framework/port/ret_check.h" -#include "mediapipe/framework/port/statusor.h" #include "mediapipe/util/resource_util.h" namespace mediapipe { @@ -55,7 +55,8 @@ absl::Status DefaultGetResourceContents(const std::string& path, } } // namespace internal -absl::StatusOr PathToResourceAsFile(const std::string& path) { +absl::StatusOr PathToResourceAsFile(const std::string& path, + bool /*shadow_copy*/) { // Return full path. if (absl::StartsWith(path, "/")) { return path; diff --git a/mediapipe/util/resource_util_default.cc b/mediapipe/util/resource_util_default.cc index 3ebbd1f34b..9197295481 100644 --- a/mediapipe/util/resource_util_default.cc +++ b/mediapipe/util/resource_util_default.cc @@ -15,9 +15,9 @@ #include #include "absl/flags/flag.h" +#include "absl/status/statusor.h" #include "mediapipe/framework/deps/file_path.h" #include "mediapipe/framework/port/file_helpers.h" -#include "mediapipe/framework/port/statusor.h" ABSL_FLAG( std::string, resource_root_dir, "", @@ -38,7 +38,8 @@ absl::Status DefaultGetResourceContents(const std::string& path, } } // namespace internal -absl::StatusOr PathToResourceAsFile(const std::string& path) { +absl::StatusOr PathToResourceAsFile(const std::string& path, + bool /*shadow_copy*/) { if (absl::StartsWith(path, "/")) { return path; } diff --git a/mediapipe/util/resource_util_emscripten.cc b/mediapipe/util/resource_util_emscripten.cc index 1243ad115f..ca3d386dc4 100644 --- a/mediapipe/util/resource_util_emscripten.cc +++ b/mediapipe/util/resource_util_emscripten.cc @@ -16,14 +16,16 @@ #include #include "absl/log/absl_log.h" +#include "absl/status/statusor.h" #include "absl/strings/str_format.h" #include "mediapipe/framework/port/file_helpers.h" #include "mediapipe/framework/port/ret_check.h" -#include "mediapipe/framework/port/statusor.h" +#include "mediapipe/util/resource_util.h" namespace mediapipe { -absl::StatusOr PathToResourceAsFile(const std::string& path) { +absl::StatusOr PathToResourceAsFile(const std::string& path, + bool /*shadow_copy*/) { if (absl::StartsWith(path, "/")) { return path; } diff --git a/mediapipe/util/resource_util_windows.cc b/mediapipe/util/resource_util_windows.cc index 6d620e58c4..e28094eef3 100644 --- a/mediapipe/util/resource_util_windows.cc +++ b/mediapipe/util/resource_util_windows.cc @@ -15,10 +15,10 @@ #include #include "absl/flags/flag.h" +#include "absl/status/statusor.h" #include "mediapipe/framework/deps/file_path.h" #include "mediapipe/framework/port/file_helpers.h" #include "mediapipe/framework/port/singleton.h" -#include "mediapipe/framework/port/statusor.h" #include "tools/cpp/runfiles/runfiles.h" ABSL_FLAG( @@ -70,7 +70,8 @@ absl::Status DefaultGetResourceContents(const std::string& path, } // namespace internal -absl::StatusOr PathToResourceAsFile(const std::string& path) { +absl::StatusOr PathToResourceAsFile(const std::string& path, + bool /*shadow_copy*/) { std::string qualified_path = path; if (absl::StartsWith(qualified_path, "./")) { qualified_path = "mediapipe" + qualified_path.substr(1); diff --git a/mediapipe/util/sequence/BUILD b/mediapipe/util/sequence/BUILD index 2385830433..59b0888c78 100644 --- a/mediapipe/util/sequence/BUILD +++ b/mediapipe/util/sequence/BUILD @@ -25,6 +25,7 @@ cc_library( hdrs = ["media_sequence_util.h"], visibility = [ "//home/interaction:__subpackages__", + "//learning/eval/canon/util/conversion/tensorflow/mediasequence:__pkg__", "//mediapipe:__subpackages__", ], deps = [ diff --git a/mediapipe/util/sync_wait.cc b/mediapipe/util/sync_wait.cc index 14ed0a320e..72c87b7d59 100644 --- a/mediapipe/util/sync_wait.cc +++ b/mediapipe/util/sync_wait.cc @@ -7,8 +7,11 @@ #include #include "absl/status/status.h" +#include "absl/status/statusor.h" #include "absl/strings/str_format.h" #include "absl/time/time.h" +#include "mediapipe/framework/formats/shared_fd.h" +#include "mediapipe/framework/formats/unique_fd.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status_macros.h" @@ -46,4 +49,45 @@ absl::Status SyncWait(int fd, absl::Duration timeout) { absl::StrFormat("Failed to wait for fd: %d.", fd)); } +absl::Status SyncWait(const UniqueFd& fd, absl::Duration timeout) { + RET_CHECK(fd.IsValid()); + return SyncWait(fd.Get(), timeout); +} + +absl::Status SyncWait(const SharedFd& fd, absl::Duration timeout) { + RET_CHECK(fd); + return SyncWait(fd.Get(), timeout); +} + +absl::StatusOr IsSignaled(int fd) { + RET_CHECK_GE(fd, 0) << "Invalid file descriptor."; + + struct pollfd fds; + fds.fd = fd; + fds.events = POLLIN; + int ret; + do { + ret = poll(&fds, 1, /*timeout_millis=*/0); + if (ret == 1) { + RET_CHECK((fds.revents & POLLERR) == 0); + RET_CHECK((fds.revents & POLLNVAL) == 0); + return true; + } else if (ret == 0) { + return false; + } + } while (ret == -1 && (errno == EINTR || errno == EAGAIN)); + + return absl::ErrnoToStatus( + errno, absl::StrFormat("Failed to check if fd: %d is signaled.", fd)); +} + +absl::StatusOr IsSignaled(const UniqueFd& fd) { + return IsSignaled(fd.Get()); +} + +absl::StatusOr IsSignaled(const SharedFd& fd) { + RET_CHECK(fd); + return IsSignaled(fd.Get()); +} + } // namespace mediapipe diff --git a/mediapipe/util/sync_wait.h b/mediapipe/util/sync_wait.h index ed99f6ede2..62935a8939 100644 --- a/mediapipe/util/sync_wait.h +++ b/mediapipe/util/sync_wait.h @@ -2,7 +2,10 @@ #define MEDIAPIPE_UTIL_SYNC_WAIT_H_ #include "absl/status/status.h" +#include "absl/status/statusor.h" #include "absl/time/time.h" +#include "mediapipe/framework/formats/shared_fd.h" +#include "mediapipe/framework/formats/unique_fd.h" namespace mediapipe { @@ -11,6 +14,25 @@ namespace mediapipe { // signaled. absl::Status SyncWait(int fd, absl::Duration timeout); +// `fd` - represents a sync +// `timeout` - wait timeout, pass `absl::InfiniteDuration()` to wait until +// signaled. +absl::Status SyncWait(const UniqueFd& fd, absl::Duration timeout); + +// `fd` - represents a sync +// `timeout` - wait timeout, pass `absl::InfiniteDuration()` to wait until +// signaled. +absl::Status SyncWait(const SharedFd& fd, absl::Duration timeout); + +// Checks if sync represented by `fd` is signaled. +absl::StatusOr IsSignaled(int fd); + +// Checks if sync represented by `fd` is signaled. +absl::StatusOr IsSignaled(const UniqueFd& fd); + +// Checks if sync represented by `fd` is signaled. +absl::StatusOr IsSignaled(const SharedFd& fd); + } // namespace mediapipe #endif // MEDIAPIPE_UTIL_SYNC_WAIT_H_ diff --git a/mediapipe/util/sync_wait_test.cc b/mediapipe/util/sync_wait_test.cc index 803e9d1701..dde1dae66b 100644 --- a/mediapipe/util/sync_wait_test.cc +++ b/mediapipe/util/sync_wait_test.cc @@ -2,9 +2,13 @@ #include +#include + #include "absl/log/absl_check.h" #include "absl/status/status.h" #include "absl/time/time.h" +#include "mediapipe/framework/formats/shared_fd.h" +#include "mediapipe/framework/formats/unique_fd.h" #include "mediapipe/framework/port.h" // IWYU pragma: keep (DRIHSTI_OSX) #include "mediapipe/framework/port/benchmark.h" #include "mediapipe/framework/port/gmock.h" @@ -21,16 +25,7 @@ namespace mediapipe { namespace { struct TestTimer { - TestTimer() = default; - ~TestTimer() { - if (fd != -1) { - ABSL_CHECK_EQ(close(fd), 0); - } - } - TestTimer(TestTimer&& timer) = default; - TestTimer& operator=(TestTimer&& timer) = default; - - int fd = -1; + UniqueFd fd; }; #ifdef MEDIAPIPE_OSX @@ -45,24 +40,22 @@ TestTimer CreateTestTimer(absl::Duration duration) { NOTE_CRITICAL, timeout, NULL); kevent(kq, &kev, 1, NULL, 0, NULL); - TestTimer timer; - timer.fd = kq; - return timer; + return TestTimer{UniqueFd(kq)}; } #else TestTimer CreateTestTimer(absl::Duration duration) { - TestTimer timer; - timer.fd = timerfd_create(CLOCK_MONOTONIC, /*flags*/ 0); - ABSL_CHECK_NE(timer.fd, -1); + const int fd = timerfd_create(CLOCK_MONOTONIC, /*flags*/ 0); + ABSL_CHECK_NE(fd, -1); + TestTimer timer = {UniqueFd(fd)}; struct itimerspec new_value; new_value.it_value = absl::ToTimespec(duration); new_value.it_interval.tv_sec = 0; new_value.it_interval.tv_nsec = 0; - ABSL_CHECK_NE( - timerfd_settime(timer.fd, /*flags=*/0, &new_value, /*oldtimer=*/nullptr), - -1); + ABSL_CHECK_NE(timerfd_settime(timer.fd.Get(), /*flags=*/0, &new_value, + /*oldtimer=*/nullptr), + -1); return timer; } @@ -70,42 +63,76 @@ TestTimer CreateTestTimer(absl::Duration duration) { TEST(SyncWait, WorksWithIndefiniteTimeout) { TestTimer timer = CreateTestTimer(absl::Milliseconds(2)); - MP_EXPECT_OK(mediapipe::SyncWait(timer.fd, absl::InfiniteDuration())); + MP_EXPECT_OK(SyncWait(timer.fd, absl::InfiniteDuration())); +} + +TEST(SyncWait, WorksWithSharedFd) { + TestTimer timer = CreateTestTimer(absl::Milliseconds(2)); + SharedFd fd(std::move(timer).fd); + MP_EXPECT_OK(SyncWait(fd, absl::InfiniteDuration())); } TEST(SyncWait, WorksWithDefiniteTimeout) { TestTimer timer = CreateTestTimer(absl::Milliseconds(5)); - MP_EXPECT_OK(mediapipe::SyncWait(timer.fd, absl::Milliseconds(10))); + MP_EXPECT_OK(SyncWait(timer.fd, absl::Milliseconds(10))); } TEST(SyncWait, WorksWithReadyFd) { TestTimer timer = CreateTestTimer(absl::Milliseconds(5)); // timer.fd is not available for read - MP_EXPECT_OK(mediapipe::SyncWait(timer.fd, absl::InfiniteDuration())); + MP_EXPECT_OK(SyncWait(timer.fd, absl::InfiniteDuration())); // timer.fd is available for read - MP_EXPECT_OK(mediapipe::SyncWait(timer.fd, absl::InfiniteDuration())); - MP_EXPECT_OK(mediapipe::SyncWait(timer.fd, absl::Milliseconds(1))); + MP_EXPECT_OK(SyncWait(timer.fd, absl::InfiniteDuration())); + MP_EXPECT_OK(SyncWait(timer.fd, absl::Milliseconds(1))); } TEST(SyncWait, ReportsTimeout) { TestTimer timer = CreateTestTimer(absl::Milliseconds(100)); - EXPECT_THAT(mediapipe::SyncWait(timer.fd, absl::Milliseconds(5)), + EXPECT_THAT(SyncWait(timer.fd, absl::Milliseconds(5)), StatusIs(absl::StatusCode::kDeadlineExceeded)); } TEST(SyncWait, ReportsInvalidFd) { const int fd = -1; - EXPECT_THAT(mediapipe::SyncWait(fd, absl::InfiniteDuration()), + EXPECT_THAT(SyncWait(fd, absl::InfiniteDuration()), StatusIs(absl::StatusCode::kInternal)); } +TEST(SyncWait, IsSignaledWorks) { + TestTimer timer = CreateTestTimer(absl::Milliseconds(100)); + MP_ASSERT_OK_AND_ASSIGN(bool is_signaled, IsSignaled(timer.fd)); + EXPECT_FALSE(is_signaled); + + MP_ASSERT_OK(SyncWait(timer.fd, absl::InfiniteDuration())); + + MP_ASSERT_OK_AND_ASSIGN(is_signaled, IsSignaled(timer.fd)); + EXPECT_TRUE(is_signaled); +} + +TEST(SyncWait, IsSignaledWorksWithSharedFd) { + TestTimer timer = CreateTestTimer(absl::Milliseconds(100)); + SharedFd fd(std::move(timer).fd); + MP_ASSERT_OK_AND_ASSIGN(bool is_signaled, IsSignaled(fd)); + EXPECT_FALSE(is_signaled); + + MP_ASSERT_OK(SyncWait(fd, absl::InfiniteDuration())); + + MP_ASSERT_OK_AND_ASSIGN(is_signaled, IsSignaled(fd)); + EXPECT_TRUE(is_signaled); +} + +TEST(SyncWait, IsSignaledReportsInvalidFd) { + const int fd = -1; + EXPECT_THAT(IsSignaled(fd), StatusIs(absl::StatusCode::kInternal)); +} + void BM_SyncWaitZeroTimeout(benchmark::State& state) { // Non blocking waits will be used and timer canceled automatically after // benchmark completion. TestTimer timer = CreateTestTimer(absl::Minutes(1)); for (auto s : state) { - ABSL_CHECK_EQ(mediapipe::SyncWait(timer.fd, absl::ZeroDuration()).code(), + ABSL_CHECK_EQ(SyncWait(timer.fd, absl::ZeroDuration()).code(), absl::StatusCode::kDeadlineExceeded); } } diff --git a/mediapipe/util/tflite/BUILD b/mediapipe/util/tflite/BUILD index 587e1407bb..9013c716af 100644 --- a/mediapipe/util/tflite/BUILD +++ b/mediapipe/util/tflite/BUILD @@ -154,15 +154,12 @@ cc_library_with_tflite( ], visibility = ["//visibility:public"], deps = [ - ":error_reporter", "//mediapipe/framework:resources", "//mediapipe/framework/api2:packet", - "//mediapipe/framework/port:file_helpers", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/util:resource_util", "@com_google_absl//absl/base:core_headers", - "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings:string_view", diff --git a/mediapipe/util/tflite/tflite_model_loader.cc b/mediapipe/util/tflite/tflite_model_loader.cc index 90359624b2..9d144418d0 100644 --- a/mediapipe/util/tflite/tflite_model_loader.cc +++ b/mediapipe/util/tflite/tflite_model_loader.cc @@ -15,75 +15,40 @@ #include "mediapipe/util/tflite/tflite_model_loader.h" #include +#include #include -#include -#include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "absl/strings/string_view.h" #include "mediapipe/framework/api2/packet.h" -#include "mediapipe/framework/port/file_helpers.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status_macros.h" #include "mediapipe/framework/resources.h" -#include "mediapipe/util/resource_util.h" -#include "mediapipe/util/tflite/error_reporter.h" -#include "tensorflow/lite/allocation.h" #include "tensorflow/lite/model_builder.h" namespace mediapipe { -using ::mediapipe::util::tflite::ErrorReporter; -using ::tflite::Allocation; using ::tflite::FlatBufferModel; -using ::tflite::MMAPAllocation; absl::StatusOr> TfLiteModelLoader::LoadFromPath( const Resources& resources, const std::string& path, bool try_mmap) { std::string model_path = path; - bool file_exists = file::Exists(model_path).ok(); - if (!file_exists) { - // TODO: get rid of manual resolving with PathToResourceAsFile - // as soon as it's incorporated into GetResourceContents. - absl::StatusOr resolved_model_path = - mediapipe::PathToResourceAsFile(model_path); - if (resolved_model_path.ok()) { - VLOG(2) << "Loading the model from " << model_path; - model_path = *std::move(resolved_model_path); - file_exists = true; - } - } - - // Try to memory map file if available. Falls back to loading from buffer on - // error. - if (file_exists && try_mmap && MMAPAllocation::IsSupported()) { - ErrorReporter error_reporter; - std::unique_ptr allocation = - std::make_unique(model_path.c_str(), &error_reporter); - - if (!error_reporter.HasError()) { - auto model = FlatBufferModel::BuildFromAllocation(std::move(allocation)); - if (model) { - return api2::MakePacket( - model.release(), [](FlatBufferModel* model) { delete model; }); - } - } - - ABSL_LOG(WARNING) << "Failed to memory map model from path '" << model_path - << "'; falling back to loading from buffer. Error: " - << error_reporter.message(); - } - // Load model resource. - MP_ASSIGN_OR_RETURN(std::unique_ptr model_resource, - resources.Get(model_path)); + MP_ASSIGN_OR_RETURN( + std::unique_ptr model_resource, + resources.Get( + model_path, + Resources::Options{ + .mmap_mode = try_mmap ? std::make_optional(MMapMode::kMMapOrRead) + : std::nullopt})); absl::string_view model_view = model_resource->ToStringView(); auto model = FlatBufferModel::VerifyAndBuildFromBuffer(model_view.data(), model_view.size()); - RET_CHECK(model) << "Failed to load model from path " << model_path; + RET_CHECK(model) << "Failed to load model from path (resource ID) " + << model_path; return api2::MakePacket( model.release(), [model_resource = model_resource.release()]( FlatBufferModel* model) mutable { diff --git a/mediapipe/util/tflite/tflite_model_loader.h b/mediapipe/util/tflite/tflite_model_loader.h index f7fd119f3d..1389ad694b 100644 --- a/mediapipe/util/tflite/tflite_model_loader.h +++ b/mediapipe/util/tflite/tflite_model_loader.h @@ -19,7 +19,6 @@ #include #include -#include "absl/base/attributes.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "mediapipe/framework/api2/packet.h" diff --git a/mediapipe/web/graph_runner/BUILD b/mediapipe/web/graph_runner/BUILD index ad63311415..64ca2ee02a 100644 --- a/mediapipe/web/graph_runner/BUILD +++ b/mediapipe/web/graph_runner/BUILD @@ -113,7 +113,7 @@ mediapipe_ts_library( jasmine_node_test( name = "platform_utils_test", - deps = [ + srcs = [ ":platform_utils_test_lib", ], ) diff --git a/mediapipe/web/graph_runner/graph_runner_webgpu.ts b/mediapipe/web/graph_runner/graph_runner_webgpu.ts index 21556be9f8..596da271e2 100644 --- a/mediapipe/web/graph_runner/graph_runner_webgpu.ts +++ b/mediapipe/web/graph_runner/graph_runner_webgpu.ts @@ -31,8 +31,13 @@ export declare interface GPUDeviceWithAdapterInfo extends GPUDevice { * manner. */ export declare interface WasmAsyncCloseModule { - ccall: (name: string, type: string, inParams: unknown, outParams: unknown, - options: unknown) => Promise; + ccall: ( + name: string, + type: string, + inParams: unknown, + outParams: unknown, + options: unknown, + ) => Promise; } /** @@ -49,8 +54,9 @@ export function SupportWebGpu(Base: TBase) { * @param adapter The adapter to request GPUDevice. */ static async requestWebGpuDevice( - deviceDescriptor?: GPUDeviceDescriptor, - adapter?: GPUAdapter): Promise { + deviceDescriptor?: GPUDeviceDescriptor, + adapter?: GPUAdapter, + ): Promise { if (!adapter) { adapter = await WebGpuSupportedGraphRunner.requestWebGpuAdapter(); } @@ -65,13 +71,14 @@ export function SupportWebGpu(Base: TBase) { } const updatedDescriptor: GPUDeviceDescriptor = { ...deviceDescriptor, - requiredFeatures: supportedFeatures + requiredFeatures: supportedFeatures, }; try { device = await adapter.requestDevice(updatedDescriptor); } catch (e: unknown) { console.error( - 'Unable to initialize WebGPU with the requested features.'); + 'Unable to initialize WebGPU with the requested features.', + ); // Rethrow original error. throw e; } @@ -81,9 +88,12 @@ export function SupportWebGpu(Base: TBase) { // Our inference engines can utilize the adapter info to optimize WebGPU // shader performance. Therefore, we attempt to attach that information to // our internal GPUDevice reference. - const adapterInfo = await adapter.requestAdapterInfo(); - (device as unknown as GPUDeviceWithAdapterInfo).adapterInfo = adapterInfo; - + // We only apply workaround for browsers/environments where necessary, + // otherwise we'll encounter a runtime error, since this is read-only. + const deviceWithInfo = (device as unknown as GPUDeviceWithAdapterInfo); + if (!deviceWithInfo.adapterInfo) { + deviceWithInfo.adapterInfo = adapter.info; + } return device; } @@ -92,11 +102,13 @@ export function SupportWebGpu(Base: TBase) { * @param adapterDescriptor The adapterDescriptor to request GPUAdapter. */ static async requestWebGpuAdapter( - adapterDescriptor?: GPURequestAdapterOptions): Promise { + adapterDescriptor?: GPURequestAdapterOptions, + ): Promise { const adapter = await navigator.gpu.requestAdapter(adapterDescriptor); if (!adapter) { throw new Error( - 'Unable to request adapter from navigator.gpu; Ensure WebGPU is enabled.'); + 'Unable to request adapter from navigator.gpu; Ensure WebGPU is enabled.', + ); } return adapter; } @@ -111,14 +123,17 @@ export function SupportWebGpu(Base: TBase) { * canvas will be created. */ initializeForWebGpu( - device: GPUDevice, canvas?: HTMLCanvasElement|OffscreenCanvas) { + device: GPUDevice, + canvas?: HTMLCanvasElement | OffscreenCanvas, + ) { if (!canvas) { canvas = new OffscreenCanvas(1, 1); } else if ( - typeof HTMLCanvasElement !== 'undefined' && - canvas instanceof HTMLCanvasElement) { + typeof HTMLCanvasElement !== 'undefined' && + canvas instanceof HTMLCanvasElement + ) { // TODO b/327324051 - Stop using a hard-coded `canvas_webgpu` selector. - canvas.id = 'canvas_webgpu'; // id used as default for WebGPU code + canvas.id = 'canvas_webgpu'; // id used as default for WebGPU code } const context = canvas.getContext('webgpu') as GPUCanvasContext; context.configure({ @@ -136,7 +151,12 @@ export function SupportWebGpu(Base: TBase) { */ closeGraphAsync(): Promise { return (this.wasmModule as unknown as WasmAsyncCloseModule).ccall( - "closeGraph", "void", [], [], {async: true}); + 'closeGraph', + 'void', + [], + [], + {async: true}, + ); } }; } diff --git a/package.json b/package.json index 99494df7a5..2b799c335a 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,7 @@ "@types/jasmine": "^4.3.1", "@types/node": "^18.11.11", "@types/offscreencanvas": "^2019.7.0", - "@webgpu/types": "^0.1.40", + "@webgpu/types": "^0.1.49", "google-protobuf": "^3.21.2", "jasmine": "^4.5.0", "jasmine-core": "^4.5.0", diff --git a/run_llm_inference.sh b/run_llm_inference.sh new file mode 100644 index 0000000000..df3b3393b4 --- /dev/null +++ b/run_llm_inference.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +# This is a simple script to run LLM inference on Android via the MediaPipe +# LLM inference engine. +# +# This script allows running transformer-based LLM models in *.task or *.bin +# format. We recommend using `gemma2-2b-it-cpu-int8.task` (from +# https://www.kaggle.com/models/google/gemma-2/tfLite/gemma2-2b-it-cpu-int8) or +# the smaller `gemma-1.1-2b-it-cpu-int4.bin` model (from +# https://www.kaggle.com/models/google/gemma/tfLite/gemma-1.1-2b-it-cpu-int4). + +MODEL_FILENAME="gemma2-2b-it-cpu-int8.task" +ADB_WORK_DIR="/data/local/tmp" +INPUT_PROMPT="What is the most famous building in Paris?" + +if [ ! -f "${MODEL_FILENAME}" ]; then + echo "Error: ${MODEL_FILENAME} not found." + echo "Please download it from https://www.kaggle.com/models/google/gemma-2/tfLite/gemma2-2b-it-cpu-int8" + exit 1 +fi + +adb push "${MODEL_FILENAME}" "${ADB_WORK_DIR}/${MODEL_FILENAME}" + +# Build the MediaPipe Docker base image. +docker build . --tag=mediapipe + +# Build the LLM inference engine binary and copy to the conencted Android device. +CONTAINER_NAME=mediapipe_$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 10 | head -n 1) +docker run --name "$CONTAINER_NAME" mediapipe:latest sh -c " + chmod +x setup_android_sdk_and_ndk.sh && \ + ./setup_android_sdk_and_ndk.sh ~/Android/Sdk ~/Android/Ndk r26d \ + --accept-licenses && + bazel build --config android_arm64 --client_env=CC=clang-16 -c opt \ + --copt=-DABSL_FLAGS_STRIP_NAMES=0 \ + --host_crosstool_top=@bazel_tools//tools/cpp:toolchain \ + //mediapipe/tasks/cc/genai/inference/c:llm_inference_engine_cpu_main +" +docker cp "$CONTAINER_NAME":/mediapipe/bazel-bin/mediapipe/tasks/cc/genai/inference/c/llm_inference_engine_cpu_main llm_inference_engine_cpu_main +adb push llm_inference_engine_cpu_main "${ADB_WORK_DIR}"/llm_inference_engine_cpu_main + +# Run the inference. +adb shell "taskset f0 ${ADB_WORK_DIR}/llm_inference_engine_cpu_main \ + --model_path='${ADB_WORK_DIR}/${MODEL_FILENAME}' \ + --prompt='${INPUT_PROMPT}'" diff --git a/third_party/BUILD b/third_party/BUILD index 799b6cd231..6b00be7e9b 100644 --- a/third_party/BUILD +++ b/third_party/BUILD @@ -15,6 +15,10 @@ load("@bazel_skylib//:bzl_library.bzl", "bzl_library") load("@rules_foreign_cc//foreign_cc:cmake.bzl", "cmake") +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) licenses(["notice"]) # Apache License 2.0 @@ -28,6 +32,10 @@ exports_files([ "requirements_lock_3_12.txt", ]) +mediapipe_files(srcs = [ + "libc++_shared.so", +]) + cc_library( name = "glog", visibility = ["//visibility:public"], @@ -427,3 +435,16 @@ java_import( "@com_google_protobuf//java/core:libcore.jar", ], ) + +cc_binary( + name = "libc++_shared", + srcs = ["libc++_shared.so"], + linkshared = 1, + linkstatic = 1, +) + +cc_library( + name = "libc++_shared_lib", + srcs = [":libc++_shared.so"], + alwayslink = 1, +) diff --git a/third_party/external_files.bzl b/third_party/external_files.bzl index cb07233293..7a04fd8c22 100644 --- a/third_party/external_files.bzl +++ b/third_party/external_files.bzl @@ -288,8 +288,8 @@ def external_files(): http_file( name = "com_google_mediapipe_efficientdet_lite0_fp16_no_nms_tflite", - sha256 = "bcda125c96d3767bca894c8cbe7bc458379c9974c9fd8bdc6204e7124a74082a", - urls = ["https://storage.googleapis.com/mediapipe-assets/efficientdet_lite0_fp16_no_nms.tflite?generation=1728573738871723"], + sha256 = "237a58389081333e5cf4154e42b593ce7dd357445536fcaf4ca5bc51c2c50f1c", + urls = ["https://storage.googleapis.com/mediapipe-assets/efficientdet_lite0_fp16_no_nms.tflite?generation=1730305296514873"], ) http_file( @@ -712,6 +712,12 @@ def external_files(): urls = ["https://storage.googleapis.com/mediapipe-assets/leopard.jpg?generation=1685997280368627"], ) + http_file( + name = "com_google_mediapipe_libc___shared_so", + sha256 = "816d497229b6678db485b5dc16ae7d2ac63dc015691b1828bc35c4aa2ed6eed4", + urls = ["https://storage.googleapis.com/mediapipe-assets/libc++_shared.so?generation=1730305298946708"], + ) + http_file( name = "com_google_mediapipe_libimagegenerator_gpu_so", sha256 = "e4407c7c0a2559b168a0f76cda6eb23ce2d167fa757a0d4887ccf57af70c0179", diff --git a/third_party/opencv_macos.BUILD b/third_party/opencv_macos.BUILD index d05c83ccc1..4139f188e7 100644 --- a/third_party/opencv_macos.BUILD +++ b/third_party/opencv_macos.BUILD @@ -7,8 +7,34 @@ licenses(["notice"]) # BSD license exports_files(["LICENSE"]) -# The path to OpenCV is a combination of the path set for "macos_opencv" -# in the WORKSPACE file and the prefix here. +# Example configurations: +# +# # OpenCV 3 +# To configure OpenCV 3, obtain the path of OpenCV 3 from Homebrew. The +# following commands show the output of the command with version 3.4.16_10: +# +# $ brew ls opencv@3 | grep version.hpp +# $ /opt/homebrew/Cellar/opencv@3/3.4.16_10/include/opencv2/core/version.hpp +# +# Then set path in "macos_opencv" rule in the WORKSPACE file to +# "/opt/homebrew/Cellar" and the PREFIX below to "opencv/" (e.g. +# "opencv/3.4.16_10" for the example above). +# +# # OpenCV 4 +# To configure OpenCV 4, obtain the path of OpenCV 4 from Homebrew. The +# following commands show the output of the command with version 4.10.0_12: +# +# $ brew ls opencv | grep version.hpp +# $ /opt/homebrew/Cellar/opencv/4.10.0_12/include/opencv4/opencv2/core/version.hpp +# $ /opt/homebrew/Cellar/opencv/4.10.0_12/include/opencv4/opencv2/dnn/version.hpp +# +# Then set path in "macos_opencv" rule in the WORKSPACE file to +# "/opt/homebrew/Cellar" and the PREFIX below to "opencv/" (e.g. +# "opencv/4.10.0_12" for the example above). For OpenCV 4, you will also need to +# adjust the include paths. The header search path should be +# "include/opencv4/opencv2/**/*.h*" and the include prefix needs to be set to +# "include/opencv4". + PREFIX = "opt/opencv@3" cc_library( diff --git a/third_party/prebuilts/BUILD b/third_party/prebuilts/BUILD index 5597763bce..8b60a11811 100644 --- a/third_party/prebuilts/BUILD +++ b/third_party/prebuilts/BUILD @@ -38,6 +38,7 @@ cc_import( cc_library( name = "opencv_darwin", + linkstatic = 1, deps = [ ":opencv_core_darwin", ":opencv_imgproc_darwin", @@ -47,6 +48,7 @@ cc_library( cc_library( name = "opencv_darwin_arm64", + linkstatic = 1, deps = [ ":opencv_core_darwin_arm64", ":opencv_imgproc_darwin_arm64", diff --git a/third_party/wasm_files.bzl b/third_party/wasm_files.bzl index 0426a17631..4dfd1cfa46 100644 --- a/third_party/wasm_files.bzl +++ b/third_party/wasm_files.bzl @@ -12,120 +12,120 @@ def wasm_files(): http_file( name = "com_google_mediapipe_wasm_audio_wasm_internal_js", - sha256 = "dcf29ade023e427c0b5c75701b35c9c0f1bf3b222f1cf197bffff3c5bc821564", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.js?generation=1729631783325350"], + sha256 = "9f8d59a241abaa0d3b69dad5a094b843e9f0fdf6d2b7349d3d40541e9679725e", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.js?generation=1733776155050960"], ) http_file( name = "com_google_mediapipe_wasm_audio_wasm_internal_wasm", - sha256 = "22ca6087b4e3f7d2c82a4cbc5ecfbe3da77790823154057fdebc626d335bc606", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.wasm?generation=1729631784951205"], + sha256 = "a57c300fa8fe6756396c1718ddbe4d134e1361e973087ce192bcdab3eea528d1", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.wasm?generation=1733776156973681"], ) http_file( name = "com_google_mediapipe_wasm_audio_wasm_nosimd_internal_js", - sha256 = "863b7c9c3424210b47df34504d811e5731c0da7fb1ce447b417b2f8a41904d24", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.js?generation=1729631786438352"], + sha256 = "b9cd5366d4b460d58f151b02ce0ec5784e13130ffb67396cbb532a52ad14c966", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.js?generation=1733776158667129"], ) http_file( name = "com_google_mediapipe_wasm_audio_wasm_nosimd_internal_wasm", - sha256 = "ae88030aa97bf5a16f172802e1e98de3385dbeac5e2c903315d4a97ba1e1ec4b", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.wasm?generation=1729631788203650"], + sha256 = "cdd5c603a5225d85dbb30944fa1e66c46a76790ec246682c4f3d88c571b5a3a6", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.wasm?generation=1733776160452697"], ) http_file( name = "com_google_mediapipe_wasm_genai_experimental_wasm_internal_js", - sha256 = "2fe752a463559b6611fa340788c91b774adeaa8130a5df1c7a9ded1faf7346c6", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/genai_experimental_wasm_internal.js?generation=1729631789833117"], + sha256 = "d717a50336544581e619ef6470bef6b6cbb80419ba7628cb1d7894bcb78b134f", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/genai_experimental_wasm_internal.js?generation=1733776162207511"], ) http_file( name = "com_google_mediapipe_wasm_genai_experimental_wasm_internal_wasm", - sha256 = "5817c6ca0f9243d2a1a50a325461021fc22cf43f0547dfd08d0fe748a56b2386", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/genai_experimental_wasm_internal.wasm?generation=1729631791598882"], + sha256 = "856430599596575fdde3418e87ab774bc5e5cb74fd2d258c167ef27eedf62f20", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/genai_experimental_wasm_internal.wasm?generation=1733776163952500"], ) http_file( name = "com_google_mediapipe_wasm_genai_experimental_wasm_nosimd_internal_js", - sha256 = "a533f74126602e016d422c40891a3029fcab7739012b2c61c82294125e25b79f", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/genai_experimental_wasm_nosimd_internal.js?generation=1729631793098394"], + sha256 = "6cd43b71667383e643069365b5c76d3fa9a4684b7c53b0342501b2298a4acf36", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/genai_experimental_wasm_nosimd_internal.js?generation=1733776165676349"], ) http_file( name = "com_google_mediapipe_wasm_genai_experimental_wasm_nosimd_internal_wasm", - sha256 = "cc19e6c3659865a095354481817797d98f14a21e4c4bf4046feac5905d1b40c4", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/genai_experimental_wasm_nosimd_internal.wasm?generation=1729631794847288"], + sha256 = "e22418a0e8f3b2781137d2f78bf03088f075512183149695d8d13109e2607e48", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/genai_experimental_wasm_nosimd_internal.wasm?generation=1733776167401717"], ) http_file( name = "com_google_mediapipe_wasm_genai_wasm_internal_js", - sha256 = "27876af95fca95bfc31e480cd8a8c39b3c583885631184ac6b3902823bc7bf30", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/genai_wasm_internal.js?generation=1729631796541863"], + sha256 = "762a2bdc0cf50598cfb136212e5f4ccd948dd1f8818328f18dc0d6954e34303c", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/genai_wasm_internal.js?generation=1733776169085687"], ) http_file( name = "com_google_mediapipe_wasm_genai_wasm_internal_wasm", - sha256 = "7ea22cf096a95d74656374d2c93326f1b3e55ce05f4e9d8c5af15e27fe36917e", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/genai_wasm_internal.wasm?generation=1729631798322940"], + sha256 = "dc42f8170316ab700cadfc39c0dc65872ea20d72f610b634a1e7850ae2a5449b", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/genai_wasm_internal.wasm?generation=1733776170938658"], ) http_file( name = "com_google_mediapipe_wasm_genai_wasm_nosimd_internal_js", - sha256 = "bfecf8ad4e6c1f4468730d508ecdf7a659dd66878f75eb84f05442d73252d191", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/genai_wasm_nosimd_internal.js?generation=1729631799937929"], + sha256 = "7945ddee65bf96a7fc4d0ee0e12314cbf4da4c948ed11f874c627bb0aa69e10e", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/genai_wasm_nosimd_internal.js?generation=1733776172955953"], ) http_file( name = "com_google_mediapipe_wasm_genai_wasm_nosimd_internal_wasm", - sha256 = "0627903513f58ddd0df058be8877f52499842d37f9d1acdb7fc063e7ed0adc50", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/genai_wasm_nosimd_internal.wasm?generation=1729631801590775"], + sha256 = "3bdecd9c4e978b9b912f40e8a174eadd3702eea55f1f7d4255c4e50cf021cbb3", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/genai_wasm_nosimd_internal.wasm?generation=1733776174810127"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_internal_js", - sha256 = "5bc536c7d0de0ea284f90aebcbf3a2e1a4254a94f988cd6b0c80932af9377e1a", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.js?generation=1729631803074796"], + sha256 = "903ddd3412782ce598655b1c052156577da9918e6daf34b8958d730aca685d61", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.js?generation=1733776176490879"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_internal_wasm", - sha256 = "89db81068af5254c863f2d2a758d4c80ba58a8ce3667d460d099e99ceefae375", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.wasm?generation=1729631804845629"], + sha256 = "fc5e0e540e48e94b00d95f5d29228c2c087cc7e61e77c9b28cdae350d89fdd6b", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.wasm?generation=1733776178221074"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_nosimd_internal_js", - sha256 = "a05a51517f2c31a30bad54ca95f8006d634fbed0f553f62518ffd9fa73e7ae89", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.js?generation=1729631806522140"], + sha256 = "643302ccf7ae8bc455b7a67fe0ec726018a49a509395dbc50c5ec03784e69165", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.js?generation=1733776179901587"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_nosimd_internal_wasm", - sha256 = "5c24f8ffe5b15735983c226a3a731fc9195b97ec23a9274ab82d58bf9d484bb4", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.wasm?generation=1729631808337308"], + sha256 = "e9cbe88bbb169a5afa1cea60d7f3bc08badc0bb192b3d20c51781827eec11210", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.wasm?generation=1733776181647902"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_internal_js", - sha256 = "2b120e1c7272905719f7893e5f09e033ead468b46b510e6b490d93e3d94ec69c", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.js?generation=1729631809992099"], + sha256 = "4a97e2520ba506c680ecd6ba6acfb146888afa0e2746d57f205352bc6ebb82eb", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.js?generation=1733776183245106"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_internal_wasm", - sha256 = "35d67ac01df034a04a38cb0533d6438595bcc65c485aed61dd47c86c6c3839cd", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.wasm?generation=1729631811756873"], + sha256 = "f00ec4731faa23b3e714d00e88d4d10e2df5c0a427d3a2b4ae6e3526fdd14ef7", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.wasm?generation=1733776185044729"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_nosimd_internal_js", - sha256 = "d206ba4e27c42a5863b4001c6d9366345f4507979cd6efd087a149ef7781ccd3", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.js?generation=1729631813404093"], + sha256 = "927def7b465c51b86e4b3060f93646aca4e27121f4b8fc0483786e407ea9cf1f", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.js?generation=1733776186780167"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_nosimd_internal_wasm", - sha256 = "17c2bff095305fcae98faa0817cbf72f13df9baf76f2067992a8624ef54d18cb", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.wasm?generation=1729631815062139"], + sha256 = "3821ea9b1f7fb8c549ef2a064ef5c85750bf375c545a49fd6eea0df44a95f1f4", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.wasm?generation=1733776188639956"], ) diff --git a/version.bzl b/version.bzl index fdca852210..146c5d9fc6 100644 --- a/version.bzl +++ b/version.bzl @@ -2,4 +2,4 @@ # The next version of MediaPipe (e.g. the version that is currently in development). # This version should be bumped after every release. -MEDIAPIPE_FULL_VERSION = "0.10.18" +MEDIAPIPE_FULL_VERSION = "0.10.21" diff --git a/yarn.lock b/yarn.lock index 12cde6067e..c0268f5310 100644 --- a/yarn.lock +++ b/yarn.lock @@ -249,10 +249,10 @@ resolved "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.2.tgz" integrity sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q== -"@webgpu/types@^0.1.40": - version "0.1.40" - resolved "https://registry.yarnpkg.com/@webgpu/types/-/types-0.1.40.tgz#cf72d1df6f9f8adc5d39556041f20ff2e8a58885" - integrity sha512-/BBkHLS6/eQjyWhY2H7Dx5DHcVrS2ICj9owvSRdgtQT6KcafLZA86tPze0xAOsd4FbsYKCUBUQyNi87q7gV7kw== +"@webgpu/types@^0.1.49": + version "0.1.49" + resolved "https://registry.yarnpkg.com/@webgpu/types/-/types-0.1.49.tgz#eb9f6535e321214e5c6e9dcc6c7d17e0f7584707" + integrity sha512-NMmS8/DofhH/IFeW+876XrHVWel+J/vdcFCHLDqeJgkH9x0DeiwjVd8LcBdaxdG/T7Rf8VUAYsA8X1efMzLjRQ== "@xmldom/xmldom@^0.8.5": version "0.8.10"