Skip to content

Commit

Permalink
feat: Expose unified VisionCameraProxy object, make `FrameProcessor…
Browse files Browse the repository at this point in the history
…Plugin`s object-oriented (#1660)

* feat: Replace `FrameProcessorRuntimeManager` with `VisionCameraProxy` (iOS)

* Make `FrameProcessorPlugin` a constructable HostObject

* fix: Fix `name` override

* Simplify `useFrameProcessor

* fix: Fix lint errors

* Remove FrameProcessorPlugin::name

* JSIUtils -> JSINSObjectConversion
  • Loading branch information
mrousavy authored Jul 21, 2023
1 parent 375e894 commit 44ed42d
Show file tree
Hide file tree
Showing 41 changed files with 761 additions and 606 deletions.
24 changes: 18 additions & 6 deletions VisionCamera.podspec
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,25 @@ while !Dir.exist?(File.join(nodeModules, "node_modules")) && tries < 10
end
nodeModules = File.join(nodeModules, "node_modules")

puts("[VisionCamera] node modules #{Dir.exist?(nodeModules) ? "found at #{nodeModules}" : "not found!"}")
forceDisableFrameProcessors = false
if defined?($VCDisableFrameProcessors)
Pod::UI.puts "[VisionCamera] $VCDisableFrameProcesors is set to #{$VCDisableFrameProcessors}!"
forceDisableFrameProcessors = $VCDisableFrameProcessors
end
forceDisableSkia = false
if defined?($VCDisableSkia)
Pod::UI.puts "[VisionCamera] $VCDisableSkia is set to #{$VCDisableSkia}!"
forceDisableSkia = $VCDisableSkia
end

Pod::UI.puts("[VisionCamera] node modules #{Dir.exist?(nodeModules) ? "found at #{nodeModules}" : "not found!"}")
workletsPath = File.join(nodeModules, "react-native-worklets")
hasWorklets = File.exist?(workletsPath)
puts "[VisionCamera] react-native-worklets #{hasWorklets ? "found" : "not found"}, Frame Processors #{hasWorklets ? "enabled" : "disabled"}!"
hasWorklets = File.exist?(workletsPath) && !forceDisableFrameProcessors
Pod::UI.puts("[VisionCamera] react-native-worklets #{hasWorklets ? "found" : "not found"}, Frame Processors #{hasWorklets ? "enabled" : "disabled"}!")

skiaPath = File.join(nodeModules, "@shopify", "react-native-skia")
hasSkia = hasWorklets && File.exist?(skiaPath)
puts "[VisionCamera] react-native-skia #{hasSkia ? "found" : "not found"}, Skia Frame Processors #{hasSkia ? "enabled" : "disabled"}!"
hasSkia = hasWorklets && File.exist?(skiaPath) && !forceDisableSkia
Pod::UI.puts("[VisionCamera] react-native-skia #{hasSkia ? "found" : "not found"}, Skia Frame Processors #{hasSkia ? "enabled" : "disabled"}!")

Pod::Spec.new do |s|
s.name = "VisionCamera"
Expand Down Expand Up @@ -54,8 +65,9 @@ Pod::Spec.new do |s|
hasWorklets ? "ios/Frame Processor/*.{m,mm,swift}" : "",
hasWorklets ? "ios/Frame Processor/Frame.h" : "",
hasWorklets ? "ios/Frame Processor/FrameProcessor.h" : "",
hasWorklets ? "ios/Frame Processor/FrameProcessorRuntimeManager.h" : "",
hasWorklets ? "ios/Frame Processor/FrameProcessorPlugin.h" : "",
hasWorklets ? "ios/Frame Processor/FrameProcessorPluginRegistry.h" : "",
hasWorklets ? "ios/Frame Processor/VisionCameraProxy.h" : "",
hasWorklets ? "cpp/**/*.{cpp}" : "",

# Skia Frame Processors
Expand Down
6 changes: 2 additions & 4 deletions cpp/JSITypedArray.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -73,10 +73,8 @@ class PropNameIDCache {

PropNameIDCache propNameIDCache;

InvalidateCacheOnDestroy::InvalidateCacheOnDestroy(jsi::Runtime &runtime) {
key = reinterpret_cast<uintptr_t>(&runtime);
}
InvalidateCacheOnDestroy::~InvalidateCacheOnDestroy() {
void invalidateArrayBufferCache(jsi::Runtime& runtime) {
auto key = reinterpret_cast<uintptr_t>(&runtime);
propNameIDCache.invalidate(key);
}

Expand Down
19 changes: 1 addition & 18 deletions cpp/JSITypedArray.h
Original file line number Diff line number Diff line change
Expand Up @@ -74,24 +74,7 @@ struct typedArrayTypeMap<TypedArrayKind::Float64Array> {
typedef double type;
};

// Instance of this class will invalidate PropNameIDCache when destructor is called.
// Attach this object to global in specific jsi::Runtime to make sure lifecycle of
// the cache object is connected to the lifecycle of the js runtime
class InvalidateCacheOnDestroy : public jsi::HostObject {
public:
explicit InvalidateCacheOnDestroy(jsi::Runtime &runtime);
virtual ~InvalidateCacheOnDestroy();
virtual jsi::Value get(jsi::Runtime &, const jsi::PropNameID &name) {
return jsi::Value::null();
}
virtual void set(jsi::Runtime &, const jsi::PropNameID &name, const jsi::Value &value) {}
virtual std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime &rt) {
return {};
}

private:
uintptr_t key;
};
void invalidateArrayBufferCache(jsi::Runtime& runtime);

class TypedArrayBase : public jsi::Object {
public:
Expand Down
44 changes: 34 additions & 10 deletions docs/docs/guides/FRAME_PROCESSORS.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,36 @@ const frameProcessor = useFrameProcessor((frame) => {
}, [onQRCodeDetected])
```

### Running asynchronously

Since Frame Processors run synchronously with the Camera Pipeline, anything taking longer than one Frame interval might block the Camera from streaming new Frames. To avoid this, you can use `runAsync` to run code asynchronously on a different Thread:

```ts
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log('I'm running synchronously at 60 FPS!')
runAsync(() => {
'worklet'
console.log('I'm running asynchronously, possibly at a lower FPS rate!')
})
}, [])
```
### Running at a throttled FPS rate
Some Frame Processor Plugins don't need to run on every Frame, for example a Frame Processor that detects the brightness in a Frame only needs to run twice per second:
```ts
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log('I'm running synchronously at 60 FPS!')
runAtTargetFps(2, () => {
'worklet'
console.log('I'm running synchronously at 2 FPS!')
})
}, [])
```
### Using Frame Processor Plugins
Frame Processor Plugins are distributed through npm. To install the [**vision-camera-image-labeler**](https://github.com/mrousavy/vision-camera-image-labeler) plugin, run:
Expand Down Expand Up @@ -204,26 +234,20 @@ The Frame Processor API spawns a secondary JavaScript Runtime which consumes a s
Inside your `gradle.properties` file, add the `disableFrameProcessors` flag:
```
```groovy
disableFrameProcessors=true
```
Then, clean and rebuild your project.
#### iOS
Inside your `project.pbxproj`, find the `GCC_PREPROCESSOR_DEFINITIONS` group and add the flag:
Inside your `Podfile`, add the `VCDisableFrameProcessors` flag:
```txt {3}
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"VISION_CAMERA_DISABLE_FRAME_PROCESSORS=1",
"$(inherited)",
);
```ruby
$VCDisableFrameProcessors = true
```
Make sure to add this to your Debug-, as well as your Release-configuration.

</TabItem>
<TabItem value="expo">
Expand Down
50 changes: 17 additions & 33 deletions docs/docs/guides/FRAME_PROCESSORS_CREATE_OVERVIEW.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,14 @@ import TabItem from '@theme/TabItem';

Frame Processor Plugins are **native functions** which can be directly called from a JS Frame Processor. (See ["Frame Processors"](frame-processors))

They **receive a frame from the Camera** as an input and can return any kind of output. For example, a `scanQRCodes` function returns an array of detected QR code strings in the frame:
They **receive a frame from the Camera** as an input and can return any kind of output. For example, a `detectFaces` function returns an array of detected faces in the frame:

```tsx {4-5}
function App() {
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
const qrCodes = scanQRCodes(frame)
console.log(`QR Codes in Frame: ${qrCodes}`)
const faces = detectFaces(frame)
console.log(`Faces in Frame: ${faces}`)
}, [])

return (
Expand All @@ -28,7 +28,7 @@ function App() {
}
```

To achieve **maximum performance**, the `scanQRCodes` function is written in a native language (e.g. Objective-C), but it will be directly called from the VisionCamera Frame Processor JavaScript-Runtime.
To achieve **maximum performance**, the `detectFaces` function is written in a native language (e.g. Objective-C), but it will be directly called from the VisionCamera Frame Processor JavaScript-Runtime.

### Types

Expand All @@ -43,15 +43,15 @@ Similar to a TurboModule, the Frame Processor Plugin Registry API automatically
| `{}` | `NSDictionary*` | `ReadableNativeMap` |
| `undefined` / `null` | `nil` | `null` |
| `(any, any) => void` | [`RCTResponseSenderBlock`][4] | `(Object, Object) -> void` |
| [`Frame`][1] | [`Frame*`][2] | [`ImageProxy`][3] |
| [`Frame`][1] | [`Frame*`][2] | [`Frame`][3] |

### Return values

Return values will automatically be converted to JS values, assuming they are representable in the ["Types" table](#types). So the following Java Frame Processor Plugin:

```java
@Override
public Object callback(ImageProxy image, Object[] params) {
public Object callback(Frame frame, Object[] params) {
return "cat";
}
```
Expand All @@ -66,13 +66,13 @@ export function detectObject(frame: Frame): string {
}
```

You can also manipulate the buffer and return it (or a copy of it) by returning a [`Frame`][2]/[`ImageProxy`][3] instance:
You can also manipulate the buffer and return it (or a copy of it) by returning a [`Frame`][2]/[`Frame`][3] instance:

```java
@Override
public Object callback(ImageProxy image, Object[] params) {
ImageProxy resizedImage = new ImageProxy(/* ... */);
return resizedImage;
public Object callback(Frame frame, Object[] params) {
Frame resizedFrame = new Frame(/* ... */);
return resizedFrame;
}
```

Expand All @@ -97,16 +97,7 @@ Frame Processors can also accept parameters, following the same type convention
```ts
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
const codes = scanCodes(frame, ['qr', 'barcode'])
}, [])
```

Or with multiple ("variadic") parameters:

```ts
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
const codes = scanCodes(frame, true, 'hello-world', 42)
const codes = scanCodes(frame, { codes: ['qr', 'barcode'] })
}, [])
```

Expand All @@ -116,7 +107,7 @@ To let the user know that something went wrong you can use Exceptions:

```java
@Override
public Object callback(ImageProxy image, Object[] params) {
public Object callback(Frame frame, Object[] params) {
if (params[0] instanceof String) {
// ...
} else {
Expand Down Expand Up @@ -152,13 +143,13 @@ For example, a realtime video chat application might use WebRTC to send the fram
```java
@Override
public Object callback(ImageProxy image, Object[] params) {
public Object callback(Frame frame, Object[] params) {
String serverURL = (String)params[0];
ImageProxy imageCopy = new ImageProxy(/* ... */);
Frame frameCopy = new Frame(/* ... */);

uploaderQueue.runAsync(() -> {
WebRTC.uploadImage(imageCopy, serverURL);
imageCopy.close();
WebRTC.uploadImage(frameCopy, serverURL);
frameCopy.close();
});

return null;
Expand Down Expand Up @@ -195,14 +186,7 @@ This way you can handle queueing up the frames yourself and asynchronously call
### Benchmarking Frame Processor Plugins
Your Frame Processor Plugins have to be fast. VisionCamera automatically detects slow Frame Processors and outputs relevant information in the native console (Xcode: **Debug Area**, Android Studio: **Logcat**):
<div align="center">
<img src={useBaseUrl("img/slow-log.png")} width="80%" />
</div>
<div align="center">
<img src={useBaseUrl("img/slow-log-2.png")} width="80%" />
</div>
Your Frame Processor Plugins have to be fast. Use the FPS Graph (`enableFpsGraph`) to see how fast your Camera is running, if it is not running at the target FPS, your Frame Processor is too slow.
<br />
Expand Down
110 changes: 110 additions & 0 deletions docs/docs/guides/FRAME_PROCESSORS_SKIA.mdx
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
---
id: frame-processors-skia
title: Skia Frame Processors
sidebar_label: Skia Frame Processors
---

import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
import useBaseUrl from '@docusaurus/useBaseUrl';

<div>
<svg xmlns="http://www.w3.org/2000/svg" width="283" height="535" style={{ float: 'right' }}>
<image href={useBaseUrl("img/frame-processors.gif")} x="18" y="33" width="247" height="469" />
<image href={useBaseUrl("img/frame.png")} width="283" height="535" />
</svg>
</div>

### What are Skia Frame Processors?

Skia Frame Processors are [Frame Processors](frame-processors) that allow you to draw onto the Frame using [react-native-skia](https://github.com/Shopify/react-native-skia).

For example, you might want to draw a rectangle around a user's face **without writing any native code**, while still **achieving native performance**:

```jsx
function App() {
const frameProcessor = useSkiaFrameProcessor((frame) => {
'worklet'
const faces = detectFaces(frame)
faces.forEach((face) => {
frame.drawRect(face.rectangle, redPaint)
})
}, [])

return (
<Camera
{...cameraProps}
frameProcessor={frameProcessor}
/>
)
}
```

With Skia, you can also implement realtime filters, blurring, shaders, and much more. For example, this is how you invert the colors in a Frame:

```jsx
const INVERTED_COLORS_SHADER = `
uniform shader image;
half4 main(vec2 pos) {
vec4 color = image.eval(pos);
return vec4(1.0 - color.rgb, 1.0);
}
`;

function App() {
const imageFilter = Skia.ImageFilter.MakeRuntimeShader(/* INVERTED_COLORS_SHADER */)
const paint = Skia.Paint()
paint.setImageFilter(imageFilter)

const frameProcessor = useSkiaFrameProcessor((frame) => {
'worklet'
frame.render(paint)
}, [])

return (
<Camera
{...cameraProps}
frameProcessor={frameProcessor}
/>
)
}
```

### Rendered outputs

The rendered results of the Skia Frame Processor are rendered to an offscreen context and will be displayed in the Camera Preview, recorded to a video file (`startRecording()`) and captured in a photo (`takePhoto()`). In other words, you draw into the Frame, not just ontop of it.

### Performance

VisionCamera sets up an additional Skia rendering context which requires a few resources.

On iOS, Metal is used for GPU Acceleration. On Android, OpenGL is used for GPU Acceleration.
C++/JSI is used for highly efficient communication between JS and Skia.

### Disabling Skia Frame Processors

Skia Frame Processors ship with additional C++ files which might slightly increase the app's build time. If you're not using Skia Frame Processors at all, you can disable them:

#### Android

Inside your `gradle.properties` file, add the `disableSkia` flag:

```groovy
disableSkia=true
```

Then, clean and rebuild your project.

#### iOS

Inside your `Podfile`, add the `VCDisableSkia` flag:

```ruby
$VCDisableSkia = true
```


<br />

#### 🚀 Next section: [Zooming](/docs/guides/zooming) (or [creating a Frame Processor Plugin](/docs/guides/frame-processors-plugins-overview))
Loading

0 comments on commit 44ed42d

Please sign in to comment.