From a9ee69ad08e2deeade5bc058ea7fba5db027c24e Mon Sep 17 00:00:00 2001 From: nathan contino Date: Wed, 7 May 2025 16:39:38 -0400 Subject: [PATCH 1/7] Refactor AI & Data section around quickstart, processing, inference high-level concepts --- docs/data-ai/_index.md | 38 +- docs/data-ai/ai/_index.md | 4 +- docs/data-ai/ai/create-dataset.md | 341 ------------------ docs/data-ai/ai/{advanced => infer}/_index.md | 5 +- docs/data-ai/ai/{ => infer}/run-inference.md | 6 +- docs/data-ai/ai/react/_index.md | 9 + docs/data-ai/ai/{ => react}/act.md | 6 +- docs/data-ai/ai/{ => react}/alert.md | 12 +- docs/data-ai/ai/train/_index.md | 9 + docs/data-ai/ai/{ => train}/deploy.md | 6 +- docs/data-ai/ai/{ => train}/train-tflite.md | 14 +- docs/data-ai/ai/{ => train}/train.md | 10 +- .../upload-external-data.md | 12 +- docs/data-ai/capture-data/advanced/_index.md | 8 - .../capture-data/capture-other-sources.md | 12 - docs/data-ai/data/_index.md | 4 +- docs/data-ai/data/advanced/_index.md | 8 - docs/data-ai/data/cloud/_index.md | 10 + docs/data-ai/data/{ => cloud}/export.md | 4 +- docs/data-ai/data/{ => cloud}/query.md | 10 +- docs/data-ai/data/{ => cloud}/visualize.md | 12 +- docs/data-ai/data/edge/_index.md | 10 + .../edge}/capture-sync.md | 20 +- .../edge}/conditional-sync.md | 12 +- .../edge}/filter-before-sync.md | 6 +- docs/data-ai/data/react/_index.md | 9 + .../data/{advanced => react}/alert-data.md | 8 +- .../{capture-data => get-started}/_index.md | 5 +- docs/data-ai/get-started/annotate-images.md | 90 +++++ docs/data-ai/get-started/capture-images.md | 42 +++ .../get-started/create-training-dataset.md | 155 ++++++++ .../how-sync-works.md | 14 +- docs/data-ai/get-started/quickstart.md | 33 ++ .../advanced-data-capture-sync.md | 12 +- docs/data-ai/reference/mlmodel-design.md | 10 +- docs/data-ai/reference/temp.md | 10 + docs/dev/_index.md | 6 +- docs/dev/reference/apis/services/data.md | 2 +- docs/dev/reference/apis/services/ml.md | 2 +- docs/dev/reference/changelog.md | 36 +- docs/dev/reference/glossary/mql.md | 2 +- docs/dev/reference/glossary/sql.md | 2 +- docs/dev/reference/sdks/connectivity.md | 2 +- docs/dev/reference/sdks/python/python-venv.md | 2 +- docs/dev/tools/cli.md | 10 +- docs/manage/fleet/reuse-configuration.md | 6 +- docs/manage/manage/rbac.md | 2 +- docs/manage/troubleshoot/alert.md | 6 +- .../teleoperate/default-interface.md | 2 +- docs/operate/control/headless-app.md | 2 +- .../get-started/other-hardware/_index.md | 2 +- .../get-started/supported-hardware/_index.md | 4 +- docs/operate/mobility/use-input-to-act.md | 4 +- .../reference/advanced-modules/_index.md | 2 +- docs/operate/reference/architecture/_index.md | 2 +- .../reference/components/camera/_index.md | 4 +- .../reference/components/camera/calibrate.md | 2 +- .../components/camera/esp32-camera.md | 4 +- .../components/camera/fake-micro-server.md | 2 +- .../reference/components/camera/fake.md | 2 +- .../reference/components/camera/ffmpeg.md | 2 +- .../reference/components/camera/image-file.md | 2 +- .../reference/components/camera/transform.md | 2 +- .../reference/components/camera/webcam.md | 4 +- .../reference/components/encoder/_index.md | 2 +- .../components/movement-sensor/_index.md | 4 +- .../components/power-sensor/_index.md | 4 +- .../reference/components/sensor/_index.md | 4 +- .../reference/components/sensor/fake.md | 2 +- .../components/sensor/ultrasonic-micro-rdk.md | 2 +- .../services/slam/cloudslam/_index.md | 10 +- .../reference/services/vision/_index.md | 2 +- .../reference/services/vision/mlmodel.md | 10 +- docs/tutorials/configure/pet-photographer.md | 12 +- docs/tutorials/control/air-quality-fleet.md | 2 +- docs/tutorials/projects/claw-game.md | 2 +- docs/tutorials/projects/helmet.md | 8 +- .../projects/integrating-viam-with-openai.md | 4 +- .../tutorials/projects/send-security-photo.md | 4 +- .../tutorials/projects/verification-system.md | 6 +- .../services/visualize-data-grafana.md | 20 +- layouts/docs/tutorials.html | 2 +- 82 files changed, 603 insertions(+), 593 deletions(-) delete mode 100644 docs/data-ai/ai/create-dataset.md rename docs/data-ai/ai/{advanced => infer}/_index.md (53%) rename docs/data-ai/ai/{ => infer}/run-inference.md (95%) create mode 100644 docs/data-ai/ai/react/_index.md rename docs/data-ai/ai/{ => react}/act.md (98%) rename docs/data-ai/ai/{ => react}/alert.md (92%) create mode 100644 docs/data-ai/ai/train/_index.md rename docs/data-ai/ai/{ => train}/deploy.md (90%) rename docs/data-ai/ai/{ => train}/train-tflite.md (90%) rename docs/data-ai/ai/{ => train}/train.md (99%) rename docs/data-ai/ai/{advanced => train}/upload-external-data.md (97%) delete mode 100644 docs/data-ai/capture-data/advanced/_index.md delete mode 100644 docs/data-ai/capture-data/capture-other-sources.md delete mode 100644 docs/data-ai/data/advanced/_index.md create mode 100644 docs/data-ai/data/cloud/_index.md rename docs/data-ai/data/{ => cloud}/export.md (97%) rename docs/data-ai/data/{ => cloud}/query.md (97%) rename docs/data-ai/data/{ => cloud}/visualize.md (95%) create mode 100644 docs/data-ai/data/edge/_index.md rename docs/data-ai/{capture-data => data/edge}/capture-sync.md (83%) rename docs/data-ai/{capture-data => data/edge}/conditional-sync.md (96%) rename docs/data-ai/{capture-data => data/edge}/filter-before-sync.md (97%) create mode 100644 docs/data-ai/data/react/_index.md rename docs/data-ai/data/{advanced => react}/alert-data.md (97%) rename docs/data-ai/{capture-data => get-started}/_index.md (63%) create mode 100644 docs/data-ai/get-started/annotate-images.md create mode 100644 docs/data-ai/get-started/capture-images.md create mode 100644 docs/data-ai/get-started/create-training-dataset.md rename docs/data-ai/{capture-data/advanced => get-started}/how-sync-works.md (95%) create mode 100644 docs/data-ai/get-started/quickstart.md rename docs/data-ai/{capture-data/advanced => reference}/advanced-data-capture-sync.md (98%) create mode 100644 docs/data-ai/reference/temp.md diff --git a/docs/data-ai/_index.md b/docs/data-ai/_index.md index 9554e6f352..351b75b381 100644 --- a/docs/data-ai/_index.md +++ b/docs/data-ai/_index.md @@ -25,32 +25,36 @@ You can also monitor your machines through teleop, power your application logic,
-{{< how-to-expand "Capture data" "3" "BEGINNER-FRIENDLY" "" "data-platform-capture" >}} +{{< how-to-expand "Get started" "4" "BEGINNER-FRIENDLY" "" "data-platform-capture" >}} {{< cards >}} -{{% card link="/data-ai/capture-data/capture-sync/" noimage="true" %}} -{{% card link="/data-ai/capture-data/filter-before-sync/" noimage="true" %}} -{{% card link="/data-ai/capture-data/conditional-sync/" noimage="true" %}} +{{% card link="/data-ai/get-started/quickstart/" noimage="true" %}} +{{% card link="/data-ai/get-started/capture-images/" noimage="true" %}} +{{% card link="/data-ai/get-started/create-training-dataset/" noimage="true" %}} +{{% card link="/data-ai/get-started/annotate-images/" noimage="true" %}} {{< /cards >}} {{< /how-to-expand >}} -{{< how-to-expand "Work with data" "4" "BEGINNER-FRIENDLY" "" "data-platform-work" >}} +{{< how-to-expand "Process data" "7" "INTERMEDIATE" "" "data-platform-work" >}} {{< cards >}} -{{% card link="/data-ai/data/query/" noimage="true" %}} -{{% card link="/data-ai/data/visualize/" noimage="true" %}} -{{% card link="/data-ai/data/advanced/alert-data/" noimage="true" %}} -{{% card link="/data-ai/data/export/" noimage="true" %}} +{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/data/edge/filter-before-sync/" noimage="true" %}} +{{% card link="/data-ai/data/edge/conditional-sync/" noimage="true" %}} +{{% card link="/data-ai/data/cloud/query/" noimage="true" %}} +{{% card link="/data-ai/data/cloud/visualize/" noimage="true" %}} +{{% card link="/data-ai/data/react/alert-data/" noimage="true" %}} +{{% card link="/data-ai/data/cloud/export/" noimage="true" %}} {{< /cards >}} {{< /how-to-expand >}} -{{< how-to-expand "Leverage AI" "8" "INTERMEDIATE" "" "data-platform-ai" >}} +{{< how-to-expand "Integrate AI" "8" "INTERMEDIATE" "" "data-platform-ai" >}} {{< cards >}} -{{% card link="/data-ai/ai/create-dataset/" noimage="true" %}} -{{% card link="/data-ai/ai/train-tflite/" noimage="true" %}} -{{% card link="/data-ai/ai/train/" noimage="true" %}} -{{% card link="/data-ai/ai/deploy/" noimage="true" %}} -{{% card link="/data-ai/ai/run-inference/" noimage="true" %}} -{{% card link="/data-ai/ai/alert/" noimage="true" %}} -{{% card link="/data-ai/ai/act/" noimage="true" %}} +{{% card link="/data-ai/ai/train/create-dataset/" noimage="true" %}} +{{% card link="/data-ai/ai/train/train-tflite/" noimage="true" %}} +{{% card link="/data-ai/ai/train/train/" noimage="true" %}} +{{% card link="/data-ai/ai/train/deploy/" noimage="true" %}} +{{% card link="/data-ai/ai/infer/run-inference/" noimage="true" %}} +{{% card link="/data-ai/ai/react/alert/" noimage="true" %}} +{{% card link="/data-ai/ai/react/act/" noimage="true" %}} {{< /cards >}} {{< /how-to-expand >}} diff --git a/docs/data-ai/ai/_index.md b/docs/data-ai/ai/_index.md index a7a53e72e5..e09e15c850 100644 --- a/docs/data-ai/ai/_index.md +++ b/docs/data-ai/ai/_index.md @@ -1,6 +1,6 @@ --- -linkTitle: "Leverage AI" -title: "Leverage AI" +linkTitle: "AI Inference" +title: "AI Inference" weight: 300 layout: "empty" type: "docs" diff --git a/docs/data-ai/ai/create-dataset.md b/docs/data-ai/ai/create-dataset.md deleted file mode 100644 index cdac56ad88..0000000000 --- a/docs/data-ai/ai/create-dataset.md +++ /dev/null @@ -1,341 +0,0 @@ ---- -linkTitle: "Create a dataset" -title: "Create a dataset" -weight: 10 -layout: "docs" -type: "docs" -description: "Create a dataset to train a machine learning model." -aliases: - - /fleet/dataset/ - - /manage/data/label/ - - /manage/data/dataset/ - - /data/dataset/ ---- - -To ensure a machine learning model you create performs well, you need to train it on a variety of images that cover the range of things your machine should be able to recognize. - -To train a model, you need a dataset that meets the following criteria: - -- the dataset contains at least 15 images -- at least 80% of the images have labels -- for each selected label, at least 10 bounding boxes exist - -This page explains how to create a dataset that meets these criteria for your training purposes. - -## Prerequisites - -{{% expand "a machine connected to the Viam app" %}} - -{{% snippet "setup.md" %}} - -{{% /expand %}} - -{{% expand "a camera, connected to your machine, to capture images" %}} - -Follow the guide to configure a [webcam](/operate/reference/components/camera/webcam/) or similar [camera component](/operate/reference/components/camera/). - -{{% /expand%}} - -## Create a dataset - -To create a dataset, use the Viam CLI or the Viam app: - -{{< tabs >}} -{{% tab name="Viam app" %}} - -1. Open the [**DATASETS** tab on the **DATA** page](https://app.viam.com/data/datasets) of the Viam app. - -1. Click the **+ Create dataset** button. - - {{< imgproc src="/services/data/create-dataset.png" alt="The **DATASET** tab of the **DATA** page, showing the **+ Create dataset** button." resize="800x" style="width:500px" class="imgzoom" >}} - -1. Enter a unique name for the dataset. - -1. Click the **Create dataset** button to create the dataset. - -{{% /tab %}} -{{% tab name="CLI" %}} - -1. First, install the Viam CLI and authenticate: - - {{< readfile "/static/include/how-to/install-cli.md" >}} - -1. [Log in to the CLI](/dev/tools/cli/#authenticate). - -1. Run the following command to create a dataset, replacing the `` and `` placeholders with your organization ID and a unique name for the dataset: - - ```sh {class="command-line" data-prompt="$"} - viam dataset create --org-id= --name= - ``` - -{{% /tab %}} -{{< /tabs >}} - -## Capture images - -{{< tabs >}} -{{% tab name="One image" %}} - -You can add images to a dataset directly from a camera or vision component feed in the **CONTROL** or **CONFIGURATION** tabs of the Viam app. - -To add an image directly to a dataset from a visual feed, complete the following steps: - -1. Open the **TEST** panel of any camera or vision service component to view a feed of images from the camera. -1. Click the button marked with the camera icon to save the currently displayed image to a dataset: - {{< imgproc src="/components/camera/add_image_to_dataset_button.png" alt="A button marked with the outline of a camera, emphasized in red" resize="800x" style="width:500px" class="imgzoom" >}} -1. Select an existing dataset. -1. Click **Add** to add the image to the selected dataset. -1. When you see a success notification that reads "Saved image to dataset", you have successfully added the image to the dataset. - -To view images added to your dataset, go to the **DATA** page's [**DATASETS** tab](https://app.viam.com/data/datasets) in the Viam app and select your dataset. - -{{% /tab %}} -{{% tab name="Many images" %}} - -To capture a large number of images for training an ML model, [Capture and sync image data](/data-ai/capture-data/capture-sync/) using the data management service with your camera. - -Viam stores the images saved by capture and sync on the [**DATA** page](https://app.viam.com/data/), but does not add the images to a dataset. -We recommend you tag the images first and then use the CLI to [add the tagged images to a dataset](/data-ai/ai/create-dataset/#add-tagged-images-to-a-dataset). - -{{< alert title="Tip" color="tip" >}} - -Once you have enough images, consider disabling data capture to [avoid incurring fees](https://www.viam.com/product/pricing) for capturing large amounts of training data. - -{{< /alert >}} -{{% /tab %}} -{{< /tabs >}} - -Once you've captured enough images for training, you must annotate them to train a model. - -## Annotate images - -Use the interface on the [**DATA** page](https://app.viam.com/data/view) to annotate your images. -Always follow best practices when you label your images: - -More data means better models - -: Incorporate as much data as you practically can to improve your model's overall performance. - -Include counterexamples - -: Include images with and without the object you’re looking to classify. -This helps the model distinguish the target object from the background and reduces the chances of false positives by teaching the model what the object is _not_. - -Avoid class imbalance - -: Don't train excessively on one specific type or class, make sure each category has a roughly equal number of images. -For instance, if you're training a dog detector, include images of various dog breeds to avoid bias towards one breed. -An imbalanced dataset can lead the model to favor one class over others, reducing its overall accuracy. - -Match training images to intended use case - -: Use images that reflect the quality and conditions of your production environment. -For example, if you plan to use a low-quality camera in production, train with low-quality images. -Similarly, if your model will run all day, capture images in daylight, nighttime, dusk, and dawn conditions. - -Vary angles and distances - -: Include image examples from every angle and distance that you expect the model to handle. - -Viam enables you to annotate images for the following machine learning methods: - -{{< tabs >}} -{{% tab name="Classification" %}} - -Classification determines a descriptive tag or set of tags for an image. -For example, classification could help you identify: - -- whether an image of a food display appears `full`, `empty`, or `average` -- the quality of manufacturing output: `good` or `bad` -- what combination of toppings exists on a pizza: `pepperoni`, `sausage` and `pepper`, or `pineapple` and `ham` and `mushroom` - -Viam supports single and multiple label classification. -To create a training set for classification, annotate tags to describe your images. - -To tag an image: - -1. Click on an image, then click the **+** next to the **Tags** option. -1. Add one or more tags to your image. - - {{}} - -Repeat these steps for all images in the dataset. - -{{% /tab %}} -{{% tab name="Object detection" %}} - -Object detection identifies and determines the location of certain objects in an image. -For example, object detection could help you identify: - -- how many `pizza` objects appear on a counter -- the number of `bicycle` and `pedestrian` objects on a greenway -- which `plant` objects are popular with `deer` in your garden - -To create a training set for object detection, annotate bounding boxes to teach your model to identify objects that you want to detect in future images. - -To label an object with a bounding box: - -1. Click on an image, then click the **Annotate** button in right side menu. -1. Choose an existing label or create a new label. -1. Holding the command key (on macOS), or the control key (on Linux and Windows), click and drag on the image to create the bounding box: - - {{}} - -{{< alert title="Tip" color="tip" >}} - -Once created, you can move, resize, or delete the bounding box. -{{< /alert >}} - -Repeat these steps for all images in the dataset. - -{{% /tab %}} -{{< /tabs >}} - -## Add tagged images to a dataset - -{{< tabs >}} -{{% tab name="Viam app" %}} - -1. Open the [**DATA** page](https://app.viam.com/data/view) of the Viam app. - -1. Navigate to the **ALL DATA** tab. - -1. Use the checkbox in the upper left of each image to select labeled images. - -1. Click the **Add to dataset** button, select a dataset, and click the **Add ... images** button to add the selected images to the dataset. - -{{% /tab %}} -{{% tab name="CLI" %}} - -Use the Viam CLI to filter images by label and add the filtered images to a dataset: - -1. First, [create a dataset](#create-a-dataset), if you haven't already. - -1. If you just created a dataset, use the dataset ID output by the creation command. - If your dataset already exists, run the following command to get a list of dataset names and corresponding IDs: - - ```sh {class="command-line" data-prompt="$"} - viam dataset list - ``` - -1. Run the following [command](/dev/tools/cli/#dataset) to add all images labeled with a subset of tags to the dataset, replacing the `` placeholder with the dataset ID output by the command in the previous step: - - ```sh {class="command-line" data-prompt="$"} - viam dataset data add filter --dataset-id= --tags=red_star,blue_square - ``` - -{{% /tab %}} -{{% tab name="Data Client API" %}} - -The following script adds all images captured from a certain machine to a new dataset. Complete the following steps to use the script: - -1. Copy and paste the following code into a file named add_images_from_machine_to_dataset.py on your machine. - - ```python {class="line-numbers linkable-line-numbers" data-line="9-13" } - import asyncio - from typing import List, Optional - - from viam.rpc.dial import DialOptions, Credentials - from viam.app.viam_client import ViamClient - from viam.utils import create_filter - - # Configuration constants – replace with your actual values - DATASET_NAME = "" # a unique, new name for the dataset you want to create - ORG_ID = "" # your organization ID, find in your organization settings - PART_ID = "" # id of machine that captured target images, find in machine config - API_KEY = "" # API key, find or create in your organization settings - API_KEY_ID = "" # API key ID, find or create in your organization settings - - # Adjust the maximum number of images to add to the dataset - MAX_MATCHES = 500 - - async def connect() -> ViamClient: - """Establish a connection to the Viam client using API credentials.""" - dial_options = DialOptions( - credentials=Credentials( - type="api-key", - payload=API_KEY, - ), - auth_entity=API_KEY_ID, - ) - return await ViamClient.create_from_dial_options(dial_options) - - - async def fetch_binary_data_ids(data_client, part_id: str) -> List[str]: - """Fetch binary data metadata and return a list of BinaryData objects.""" - data_filter = create_filter(part_id=part_id) - all_matches = [] - last: Optional[str] = None - - print("Getting data for part...") - - while len(all_matches) < MAX_MATCHES: - print("Fetching more data...") - data, _, last = await data_client.binary_data_by_filter( - data_filter, - limit=50, - last=last, - include_binary_data=False, - ) - if not data: - break - all_matches.extend(data) - - return all_matches - - - async def main() -> int: - """Main execution function.""" - viam_client = await connect() - data_client = viam_client.data_client - - matching_data = await fetch_binary_data_ids(data_client, PART_ID) - - print("Creating dataset...") - - try: - dataset_id = await data_client.create_dataset( - name=DATASET_NAME, - organization_id=ORG_ID, - ) - print(f"Created dataset: {dataset_id}") - except Exception as e: - print("Error creating dataset. It may already exist.") - print("See: https://app.viam.com/data/datasets") - print(f"Exception: {e}") - return 1 - - print("Adding data to dataset...") - - await data_client.add_binary_data_to_dataset_by_ids( - binary_ids=[obj.metadata.binary_data_id for obj in matching_data], - dataset_id=dataset_id - ) - - print("Added files to dataset.") - print(f"See dataset: https://app.viam.com/data/datasets?id={dataset_id}") - - viam_client.close() - return 0 - - - if __name__ == "__main__": - asyncio.run(main()) - ``` - -1. Fill in the placeholders with values for your own organization, API key, machine, and dataset. - -1. Install the [Viam Python SDK](https://python.viam.dev/) by running the following command: - - ```sh {class="command-line" data-prompt="$"} - pip install viam-sdk - ``` - -1. Finally, run the following command to add the images to the dataset: - - ```sh {class="command-line" data-prompt="$"} - python add_images_from_machine_to_dataset.py - ``` - -{{% /tab %}} -{{< /tabs >}} diff --git a/docs/data-ai/ai/advanced/_index.md b/docs/data-ai/ai/infer/_index.md similarity index 53% rename from docs/data-ai/ai/advanced/_index.md rename to docs/data-ai/ai/infer/_index.md index f004c032cf..9608b0e841 100644 --- a/docs/data-ai/ai/advanced/_index.md +++ b/docs/data-ai/ai/infer/_index.md @@ -1,8 +1,9 @@ --- -linkTitle: "Advanced" -title: "Advanced" +linkTitle: "Models" +title: "Models" weight: 200 layout: "empty" type: "docs" empty_node: true +open_on_desktop: true --- diff --git a/docs/data-ai/ai/run-inference.md b/docs/data-ai/ai/infer/run-inference.md similarity index 95% rename from docs/data-ai/ai/run-inference.md rename to docs/data-ai/ai/infer/run-inference.md index 661774802d..a177c77b14 100644 --- a/docs/data-ai/ai/run-inference.md +++ b/docs/data-ai/ai/infer/run-inference.md @@ -1,6 +1,6 @@ --- -linkTitle: "Run inference" -title: "Run inference on a model" +linkTitle: "Vision service" +title: "Inference with the vision service" weight: 50 layout: "docs" type: "docs" @@ -37,7 +37,7 @@ One vision service you can use to run inference on a camera stream if you have a ### Configure an mlmodel vision service Add the `vision / ML model` service to your machine. -Then, from the **Select model** dropdown, select the name of the ML model service you configured when [deploying](/data-ai/ai/deploy/) your model (for example, `mlmodel-1`). +Then, from the **Select model** dropdown, select the name of the ML model service you configured when [deploying](/data-ai/ai/train/deploy/) your model (for example, `mlmodel-1`). **Save** your changes. diff --git a/docs/data-ai/ai/react/_index.md b/docs/data-ai/ai/react/_index.md new file mode 100644 index 0000000000..9301fe6258 --- /dev/null +++ b/docs/data-ai/ai/react/_index.md @@ -0,0 +1,9 @@ +--- +linkTitle: "React" +title: "React" +weight: 300 +layout: "empty" +type: "docs" +empty_node: true +open_on_desktop: true +--- diff --git a/docs/data-ai/ai/act.md b/docs/data-ai/ai/react/act.md similarity index 98% rename from docs/data-ai/ai/act.md rename to docs/data-ai/ai/react/act.md index 53dea9a2d4..70c719e1f9 100644 --- a/docs/data-ai/ai/act.md +++ b/docs/data-ai/ai/react/act.md @@ -1,11 +1,11 @@ --- -linkTitle: "Act based on inferences" -title: "Act based on inferences" +linkTitle: "Autonomous decision-making" +title: "Autonomous decision-making" weight: 70 layout: "docs" type: "docs" description: "Use the vision service API to act based on inferences." -next: "/data-ai/ai/advanced/upload-external-data/" +next: "/data-ai/ai/train/upload-external-data/" --- You can use the [vision service API](/dev/reference/apis/services/vision/) to get information about your machine's inferences and program behavior based on that. diff --git a/docs/data-ai/ai/alert.md b/docs/data-ai/ai/react/alert.md similarity index 92% rename from docs/data-ai/ai/alert.md rename to docs/data-ai/ai/react/alert.md index 682c89eb2e..99726d6fc9 100644 --- a/docs/data-ai/ai/alert.md +++ b/docs/data-ai/ai/react/alert.md @@ -1,13 +1,13 @@ --- -linkTitle: "Alert on inferences" -title: "Alert on inferences" +linkTitle: "Detect anomalies" +title: "Detect anomalies" weight: 60 layout: "docs" type: "docs" description: "Use triggers to send email notifications when inferences are made." --- -At this point, you should have already set up and tested [computer vision functionality](/data-ai/ai/run-inference/). +At this point, you should have already set up and tested [computer vision functionality](/data-ai/ai/infer/run-inference/). On this page, you'll learn how to use triggers to send alerts in the form of email notifications or webhook requests when certain detections or classifications are made. You will build a system that can monitor camera feeds and detect situations that require review. @@ -27,7 +27,7 @@ Next, you'll configure a trigger to send email notifications or webhook requests {{< expand "A configured camera and vision service. Click to see instructions." >}} -Follow the instructions to [configure a camera](/operate/reference/components/camera/) and [run inference](/data-ai/ai/run-inference/). +Follow the instructions to [configure a camera](/operate/reference/components/camera/) and [run inference](/data-ai/ai/infer/run-inference/). {{< /expand >}} @@ -74,7 +74,7 @@ For example, if using the YOLOv8 model (named `yolo`) for hardhat detection, you ## Configure data capture and sync -Viam's built-in [data management service](/data-ai/capture-data/capture-sync/#configure-data-capture-and-sync-for-individual-resources) allows you to, among other things, capture images and sync them to the cloud. +Viam's built-in [data management service](/data-ai/data/edge/capture-sync/#configure-data-capture-and-sync-for-individual-resources) allows you to, among other things, capture images and sync them to the cloud. Configure data capture on the `filtered-camera` camera to capture images of detections or classifications: @@ -99,7 +99,7 @@ Configure data capture on the `filtered-camera` camera to capture images of dete ## Set up alerts -[Triggers](/data-ai/data/advanced/alert-data/) allow you to send webhook requests or email notifications when certain events happen. +[Triggers](/data-ai/data/react/alert-data/) allow you to send webhook requests or email notifications when certain events happen. You can use the **Data has been synced to the cloud** (`part_data_ingested`) trigger to send alerts whenever an image with an anomaly detection is synced to the cloud from your object filter camera. diff --git a/docs/data-ai/ai/train/_index.md b/docs/data-ai/ai/train/_index.md new file mode 100644 index 0000000000..2d6f06dd9a --- /dev/null +++ b/docs/data-ai/ai/train/_index.md @@ -0,0 +1,9 @@ +--- +linkTitle: "Train" +title: "Train" +weight: 100 +layout: "empty" +type: "docs" +empty_node: true +open_on_desktop: true +--- diff --git a/docs/data-ai/ai/deploy.md b/docs/data-ai/ai/train/deploy.md similarity index 90% rename from docs/data-ai/ai/deploy.md rename to docs/data-ai/ai/train/deploy.md index be88f3fa72..ebe26d25ee 100644 --- a/docs/data-ai/ai/deploy.md +++ b/docs/data-ai/ai/train/deploy.md @@ -75,15 +75,15 @@ Save your config to use your specified version of the ML model. The service works with models trained inside and outside the Viam app: -- You can [train TFlite](/data-ai/ai/train-tflite/) or [other model frameworks](/data-ai/ai/train/) on data from your machines. +- You can [train TFlite](/data-ai/ai/train/train-tflite/) or [other model frameworks](/data-ai/ai/train/train/) on data from your machines. - You can use [ML models](https://app.viam.com/registry?type=ML+Model) from the [Viam Registry](https://app.viam.com/registry). - You can upload externally trained models from a model file on the [**MODELS** tab](https://app.viam.com/models) in the **DATA** section of the Viam app. -- You can use a [model](/data-ai/ai/deploy/#deploy-your-ml-model-on-an-ml-model-service) trained outside the Viam platform whose files are on your machine. See the documentation of the model of ML model service you're using (pick one that supports your model framework) for instructions on this. +- You can use a [model](/data-ai/ai/train/deploy/#deploy-your-ml-model-on-an-ml-model-service) trained outside the Viam platform whose files are on your machine. See the documentation of the model of ML model service you're using (pick one that supports your model framework) for instructions on this. On its own the ML model service only runs the model. After deploying your model, you need to configure an additional service to use the deployed model. For example, you can configure an [`mlmodel` vision service](/operate/reference/services/vision/) to visualize the inferences your model makes. -Follow our docs to [run inference](/data-ai/ai/run-inference/) to add an `mlmodel` vision service and see inferences. +Follow our docs to [run inference](/data-ai/ai/infer/run-inference/) to add an `mlmodel` vision service and see inferences. For other use cases, consider [creating custom functionality with a module](/operate/get-started/other-hardware/). diff --git a/docs/data-ai/ai/train-tflite.md b/docs/data-ai/ai/train/train-tflite.md similarity index 90% rename from docs/data-ai/ai/train-tflite.md rename to docs/data-ai/ai/train/train-tflite.md index 2bc624e051..f29efb07e6 100644 --- a/docs/data-ai/ai/train-tflite.md +++ b/docs/data-ai/ai/train/train-tflite.md @@ -1,6 +1,6 @@ --- -linkTitle: "Train TFlite model" -title: "Train a TFlite model" +linkTitle: "TFlite model" +title: "Train TFlite model" weight: 20 type: "docs" tags: ["vision", "data", "services"] @@ -39,7 +39,7 @@ Follow this guide to use your image data to train an ML model, so that your mach {{% expand "a dataset with labels" %}} -Follow the guide to [create a dataset](/data-ai/ai/create-dataset/). +Follow the guide to [create a dataset](/data-ai/ai/train/create-dataset/). {{% /expand%}} @@ -131,7 +131,7 @@ If the results exceed the confidence threshold, the **Run model** section shows You can test both detection models and classifier models using the following resources together: - [a camera](/operate/reference/components/camera/) -- [a `tflite_cpu` ML model](/data-ai/ai/deploy/) with the model you just trained +- [a `tflite_cpu` ML model](/data-ai/ai/train/deploy/) with the model you just trained - [an `mlmodel` vision service](/operate/reference/services/vision/mlmodel/) using the `tflite_cpu` model ## Iterate on your ML model @@ -145,16 +145,16 @@ Using this approach, each subsequent model version becomes more accurate than th To capture images of edge cases and re-train your model using those images, complete the following steps: -1. Add edge case images to your training dataset. You can find edge cases in your existing data on the [**DATA** page](https://app.viam.com/data/) or [capture new images and add them to your training dataset](/data-ai/ai/create-dataset/#capture-images). +1. Add edge case images to your training dataset. You can find edge cases in your existing data on the [**DATA** page](https://app.viam.com/data/) or [capture new images and add them to your training dataset](/data-ai/ai/train/create-dataset/#capture-images). 1. Visit the **DATASET** tab of the **DATA** page and annotate the image. -1. Repeat the [steps above](/data-ai/ai/train-tflite/#train-a-machine-learning-model) to train and release a new version of your ML model. Your machines will automatically update to the new version of the model soon after release. +1. Repeat the [steps above](/data-ai/ai/train/train-tflite/#train-a-machine-learning-model) to train and release a new version of your ML model. Your machines will automatically update to the new version of the model soon after release. ## Next steps Now your machine can make inferences about its environment. -The next step is to [deploy](/data-ai/ai/deploy/) the ML model and then [act](/data-ai/ai/act/) or [alert](/data-ai/ai/alert/) based on these inferences. +The next step is to [deploy](/data-ai/ai/train/deploy/) the ML model and then [act](/data-ai/ai/react/act/) or [alert](/data-ai/ai/react/alert/) based on these inferences. See the following tutorials for examples of using machine learning models to make your machine do things based on its inferences about its environment: diff --git a/docs/data-ai/ai/train.md b/docs/data-ai/ai/train/train.md similarity index 99% rename from docs/data-ai/ai/train.md rename to docs/data-ai/ai/train/train.md index 9d7bddf16e..0072aa214f 100644 --- a/docs/data-ai/ai/train.md +++ b/docs/data-ai/ai/train/train.md @@ -1,6 +1,6 @@ --- -linkTitle: "Train other models" -title: "Train other models" +linkTitle: "Train your own model" +title: "Train your own model" tags: ["data management", "ml", "model training"] weight: 30 layout: "docs" @@ -27,7 +27,7 @@ If you wish to do this, skip to [Submit a training job](#submit-a-training-job). {{% expand "A dataset with data you can train an ML model on. Click to see instructions." %}} -For images, follow the instructions to [Create a dataset](/data-ai/ai/create-dataset/) to create a dataset and label data. +For images, follow the instructions to [Create a dataset](/data-ai/ai/train/create-dataset/) to create a dataset and label data. For other data, use the [Data Client API](/dev/reference/apis/data-client/) from within the training script to store data in the Viam Cloud. @@ -845,5 +845,5 @@ You can also view your training jobs' logs with the [`viam train logs`](/dev/too {{% /tablestep %}} {{< /table >}} -To use your new model with machines, you must [deploy it](/data-ai/ai/deploy/) with the appropriate ML model service. -Then you can use another service, such as the vision service, to [run inference](/data-ai/ai/run-inference/). +To use your new model with machines, you must [deploy it](/data-ai/ai/train/deploy/) with the appropriate ML model service. +Then you can use another service, such as the vision service, to [run inference](/data-ai/ai/infer/run-inference/). diff --git a/docs/data-ai/ai/advanced/upload-external-data.md b/docs/data-ai/ai/train/upload-external-data.md similarity index 97% rename from docs/data-ai/ai/advanced/upload-external-data.md rename to docs/data-ai/ai/train/upload-external-data.md index 6fd5b9def5..39ac8a8e3e 100644 --- a/docs/data-ai/ai/advanced/upload-external-data.md +++ b/docs/data-ai/ai/train/upload-external-data.md @@ -1,8 +1,8 @@ --- -linkTitle: "Upload external data" +linkTitle: "Upload training data" title: "Upload external data for training" images: ["/services/icons/data-folder.svg"] -weight: 20 +weight: 100 layout: "docs" type: "docs" languages: ["python"] @@ -13,7 +13,7 @@ aliases: - /how-tos/upload-data/ date: "2024-12-04" description: "Upload data to the Viam app from your local computer or mobile device using the data client API, Viam CLI, or Viam mobile app." -prev: "/data-ai/ai/act/" +prev: "/data-ai/ai/react/act/" --- When you configure the data management service, Viam automatically uploads data from the default directory `~/.viam/capture` and any directory you configured. @@ -38,7 +38,7 @@ However, if you already have a cache of data you'd like to use with Viam, you ca {{< expand "Enable data capture and sync on your machine." >}} -Add the [data management service](/data-ai/capture-data/capture-sync/#configure-data-capture-and-sync-for-individual-resources): +Add the [data management service](/data-ai/data/edge/capture-sync/#configure-data-capture-and-sync-for-individual-resources): On your machine's **CONFIGURE** tab, click the **+** icon next to your machine part in the left-hand menu and select **Service**. @@ -296,5 +296,5 @@ However, the uploaded images will not be associated with a component or method. ## Next steps -Now that you have a batch of data uploaded, you can [train an ML model](/data-ai/ai/train-tflite/) on it. -Or, if you want to collect and upload data _not_ in a batch, see [Create a dataset](/data-ai/ai/create-dataset/). +Now that you have a batch of data uploaded, you can [train an ML model](/data-ai/ai/train/train-tflite/) on it. +Or, if you want to collect and upload data _not_ in a batch, see [Create a dataset](/data-ai/ai/train/create-dataset/). diff --git a/docs/data-ai/capture-data/advanced/_index.md b/docs/data-ai/capture-data/advanced/_index.md deleted file mode 100644 index f004c032cf..0000000000 --- a/docs/data-ai/capture-data/advanced/_index.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -linkTitle: "Advanced" -title: "Advanced" -weight: 200 -layout: "empty" -type: "docs" -empty_node: true ---- diff --git a/docs/data-ai/capture-data/capture-other-sources.md b/docs/data-ai/capture-data/capture-other-sources.md deleted file mode 100644 index 3f8ef32e65..0000000000 --- a/docs/data-ai/capture-data/capture-other-sources.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -linkTitle: "Capture other data sources" -title: "Capture and sync other data sources" -tags: ["data management", "data", "services"] -weight: 12 -layout: "docs" -type: "docs" -platformarea: ["data"] -description: "TODO" -date: "2024-12-17" -draft: true ---- diff --git a/docs/data-ai/data/_index.md b/docs/data-ai/data/_index.md index 422b01922b..d3c754f140 100644 --- a/docs/data-ai/data/_index.md +++ b/docs/data-ai/data/_index.md @@ -1,6 +1,6 @@ --- -linkTitle: "Work with data" -title: "Work with data" +linkTitle: "Process Data" +title: "Process Data" weight: 200 layout: "empty" type: "docs" diff --git a/docs/data-ai/data/advanced/_index.md b/docs/data-ai/data/advanced/_index.md deleted file mode 100644 index f004c032cf..0000000000 --- a/docs/data-ai/data/advanced/_index.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -linkTitle: "Advanced" -title: "Advanced" -weight: 200 -layout: "empty" -type: "docs" -empty_node: true ---- diff --git a/docs/data-ai/data/cloud/_index.md b/docs/data-ai/data/cloud/_index.md new file mode 100644 index 0000000000..4f645ff051 --- /dev/null +++ b/docs/data-ai/data/cloud/_index.md @@ -0,0 +1,10 @@ +--- +linkTitle: "Analyze" +title: "Analyze" +weight: 20 +layout: "empty" +type: "docs" +empty_node: true +icon: true +open_on_desktop: true +--- diff --git a/docs/data-ai/data/export.md b/docs/data-ai/data/cloud/export.md similarity index 97% rename from docs/data-ai/data/export.md rename to docs/data-ai/data/cloud/export.md index 4d53b0be07..a2365d6a51 100644 --- a/docs/data-ai/data/export.md +++ b/docs/data-ai/data/cloud/export.md @@ -1,5 +1,5 @@ --- -linkTitle: "Export data" +linkTitle: "Export" title: "Export data" weight: 40 description: "Download data from the Viam app using the data client API or the Viam CLI." @@ -15,7 +15,7 @@ aliases: viamresources: ["sensor", "data_manager"] platformarea: ["data", "cli"] date: "2024-12-03" -next: "/data-ai/data/advanced/alert-data/" +next: "/data-ai/data/react/alert-data/" --- You can download machine data from cloud storage to your computer with the Viam CLI. diff --git a/docs/data-ai/data/query.md b/docs/data-ai/data/cloud/query.md similarity index 97% rename from docs/data-ai/data/query.md rename to docs/data-ai/data/cloud/query.md index e8ca14cb39..42badd74c0 100644 --- a/docs/data-ai/data/query.md +++ b/docs/data-ai/data/cloud/query.md @@ -1,5 +1,5 @@ --- -linkTitle: "Query data" +linkTitle: "Query" title: "Query data" weight: 20 layout: "docs" @@ -17,7 +17,7 @@ date: "2024-12-03" description: "Query sensor data that you have synced to the Viam app using the Viam app with SQL or MQL." --- -You can use the data management service to [capture sensor data](/data-ai/capture-data/capture-sync/) from any machine and sync that data to the cloud. +You can use the data management service to [capture sensor data](/data-ai/data/edge/sync/) from any machine and sync that data to the cloud. Then, you can follow the steps on this page to query it using {{< glossary_tooltip term_id="sql" text="SQL" >}} or {{< glossary_tooltip term_id="mql" text="MQL" >}}. For example, you can configure data capture for several sensors on one machine, or for several sensors across multiple machines, to report the ambient operating temperature. You can then run queries against that data to search for outliers or edge cases, to analyze how the ambient temperature affects your machines' operation. @@ -32,7 +32,7 @@ You can then run queries against that data to search for outliers or edge cases, ### Prerequisites You must have captured sensor data. -See [capture sensor data](/data-ai/capture-data/capture-sync/) for more information. +See [capture sensor data](/data-ai/data/edge/capture-sync/) for more information. ### Query from the app @@ -218,7 +218,7 @@ Query results are displayed as a [JSON array](https://json-schema.org/understand {{% expand "Captured sensor data. Click to see instructions." %}} -Follow the guide to [capture sensor data](/data-ai/capture-data/capture-sync/). +Follow the guide to [capture sensor data](/data-ai/data/edge/capture-sync/). {{% /expand%}} @@ -353,4 +353,4 @@ db.readings.aggregate( For information on connecting to your Atlas instance from other MQL clients, see the MongoDB Atlas [Connect to your Cluster Tutorial](https://www.mongodb.com/docs/atlas/tutorial/connect-to-your-cluster/). -On top of querying sensor data with third-party tools, you can also [query it with the Python SDK](/data-ai/reference/data-client/) or [visualize it](/data-ai/data/visualize/). +On top of querying sensor data with third-party tools, you can also [query it with the Python SDK](/data-ai/reference/data-client/) or [visualize it](/data-ai/data/cloud/visualize/). diff --git a/docs/data-ai/data/visualize.md b/docs/data-ai/data/cloud/visualize.md similarity index 95% rename from docs/data-ai/data/visualize.md rename to docs/data-ai/data/cloud/visualize.md index d8e41de4d7..1f289ce7d4 100644 --- a/docs/data-ai/data/visualize.md +++ b/docs/data-ai/data/cloud/visualize.md @@ -1,5 +1,5 @@ --- -linkTitle: "Visualize data" +linkTitle: "Visualize" title: "Visualize data" weight: 20 layout: "docs" @@ -17,7 +17,7 @@ date: "2024-12-04" description: "Use teleop or grafana to visualize sensor data from the Viam app." --- -Once you have used the data management service to [capture data](/data-ai/capture-data/capture-sync/), you can visualize your data on a dashboard with the Viam app's **TELEOP** page or a variety of third-party tools, including Grafana, Tableau, Google's Looker Studio, and more. +Once you have used the data management service to [capture data](/data-ai/data/edge/capture-sync/), you can visualize your data on a dashboard with the Viam app's **TELEOP** page or a variety of third-party tools, including Grafana, Tableau, Google's Looker Studio, and more. ## Teleop @@ -80,7 +80,7 @@ Configure data query and use a third-party visualization tool like Grafana to vi {{% expand "Captured sensor data. Click to see instructions." %}} -Follow the docs to [capture data](/data-ai/capture-data/capture-sync/) from a sensor. +Follow the docs to [capture data](/data-ai/data/edge/capture-sync/) from a sensor. {{% /expand%}} @@ -176,7 +176,7 @@ sensorData.readings.aggregate([ ) ``` -See the [guide on querying data](/data-ai/data/query/) for more information. +See the [guide on querying data](/data-ai/data/cloud/query/) for more information. @@ -255,7 +255,7 @@ Some third-party visualization tools support the ability to directly query your You might use this functionality to visualize only a single day's metrics, limit the visualization to a select machine or component, or to isolate an outlier in your reported data, for example. While every third-party tool is different, you would generally query your data using either {{< glossary_tooltip term_id="sql" text="SQL" >}} or {{< glossary_tooltip term_id="mql" text="MQL" >}}. -See the [guide on querying data](/data-ai/data/query/) for more information. +See the [guide on querying data](/data-ai/data/cloud/query/) for more information. @@ -266,7 +266,7 @@ See the [guide on querying data](/data-ai/data/query/) for more information. For more detailed instructions on using Grafana, including a full step-by-step configuration walkthrough, see [visualizing data with Grafana](/tutorials/services/visualize-data-grafana/). -On top of visualizing sensor data with third-party tools, you can also [query it with the Python SDK](/dev/reference/apis/data-client/) or [query it with the Viam app](/data-ai/data/query/). +On top of visualizing sensor data with third-party tools, you can also [query it with the Python SDK](/dev/reference/apis/data-client/) or [query it with the Viam app](/data-ai/data/cloud/query/). To see full projects using visualization, check out these resources: diff --git a/docs/data-ai/data/edge/_index.md b/docs/data-ai/data/edge/_index.md new file mode 100644 index 0000000000..01ec93729b --- /dev/null +++ b/docs/data-ai/data/edge/_index.md @@ -0,0 +1,10 @@ +--- +linkTitle: "Ingest" +title: "Ingest" +weight: 10 +layout: "empty" +type: "docs" +icon: true +empty_node: true +open_on_desktop: true +--- diff --git a/docs/data-ai/capture-data/capture-sync.md b/docs/data-ai/data/edge/capture-sync.md similarity index 83% rename from docs/data-ai/capture-data/capture-sync.md rename to docs/data-ai/data/edge/capture-sync.md index 18a8bb4331..d1ea0b9c13 100644 --- a/docs/data-ai/capture-data/capture-sync.md +++ b/docs/data-ai/data/edge/capture-sync.md @@ -1,6 +1,6 @@ --- -linkTitle: "Capture and sync edge data" -title: "Capture and sync edge data" +linkTitle: "Sync" +title: "Sync edge data" tags: ["data management", "data", "services"] weight: 10 layout: "docs" @@ -40,7 +40,7 @@ aliases: - /fleet/data-management/ --- -You can use the data management service to capture data from [supported components and services](/data-ai/capture-data/capture-sync/#click-to-see-resources-that-support-data-capture-and-cloud-sync), then sync it to the cloud. +You can use the data management service to capture data from [supported components and services](/data-ai/data/capture-sync/#click-to-see-resources-that-support-data-capture-and-cloud-sync), then sync it to the cloud. You can also sync data from arbitrary folders on your machine. ## How data capture and data sync works @@ -51,7 +51,7 @@ The data management service writes data from your configured Viam resources to l - The data management service syncs data to the Viam cloud at a configured sync interval using encrypted gRPC calls and deletes it from the disk once synced. - You can capture and sync data independently; one can run without the other. -For more information, see [How sync works](/data-ai/capture-data/advanced/how-sync-works/). +For more information, see [How sync works](/data-ai/data/advanced/how-sync-works/). ## Configure data capture and sync for individual resources @@ -80,7 +80,7 @@ Some models do not support all options, for example webcams do not capture point {{< /expand >}} -For instructions on configuring data capture and sync with JSON, go to [Advanced data capture and sync configurations](/data-ai/capture-data/advanced/advanced-data-capture-sync/) and follow the instructions for JSON examples. +For instructions on configuring data capture and sync with JSON, go to [Advanced data capture and sync configurations](/data-ai/reference/advanced-data-capture-sync/) and follow the instructions for JSON examples. ## View captured data @@ -120,13 +120,13 @@ For instructions on configuring data capture and sync with JSON, go to [Advanced For other ways to control data synchronization, see: -- [Conditional sync](/data-ai/capture-data/conditional-sync/) -- [Retention policies](/data-ai/capture-data/advanced/advanced-data-capture-sync/#cloud-data-retention) -- [Sync optimization](/data-ai/capture-data/advanced/advanced-data-capture-sync/#sync-optimization) +- [Conditional sync](/data-ai/data/conditional-sync/) +- [Retention policies](/data-ai/reference/advanced-data-capture-sync/#cloud-data-retention) +- [Sync optimization](/data-ai/reference/advanced-data-capture-sync/#sync-optimization) {{< /alert >}} ## Next steps -For more information on available configuration attributes and options like capturing directly to MongoDB or conditional sync, see [Advanced data capture and sync configurations](/data-ai/capture-data/advanced/advanced-data-capture-sync/). -To leverage AI, you can now [create a dataset](/data-ai/ai/create-dataset/) with the data you've captured. +For more information on available configuration attributes and options like capturing directly to MongoDB or conditional sync, see [Advanced data capture and sync configurations](/data-ai/reference/advanced-data-capture-sync/). +To leverage AI, you can now [create a dataset](/data-ai/ai/train/create-dataset/) with the data you've captured. diff --git a/docs/data-ai/capture-data/conditional-sync.md b/docs/data-ai/data/edge/conditional-sync.md similarity index 96% rename from docs/data-ai/capture-data/conditional-sync.md rename to docs/data-ai/data/edge/conditional-sync.md index 6654b109ff..24867720fc 100644 --- a/docs/data-ai/capture-data/conditional-sync.md +++ b/docs/data-ai/data/edge/conditional-sync.md @@ -1,5 +1,5 @@ --- -title: "Conditional cloud sync" +title: "Conditional sync" linkTitle: "Conditional sync" description: "Trigger cloud sync to sync captured data when custom conditions are met." type: "docs" @@ -15,17 +15,15 @@ aliases: languages: [] viamresources: ["sensor", "data_manager"] platformarea: ["data", "registry"] -next: /data-ai/capture-data/advanced/advanced-data-capture-sync/ +next: /data-ai/reference/advanced-data-capture-sync/ date: "2024-12-04" --- -### Conditional sync - By default, `viam-server` checks for new data to sync at the configured interval (`sync_interval_mins`). You can additionally configure sync to only happen when certain conditions are met. For example: -- Only sync when on WiFi +- Only sync when connected to Wi-Fi - Sync when conditions are met or events are detected - Sync during certain time windows @@ -44,7 +42,7 @@ You can also view [trigger-sync-examples module](https://github.com/viam-labs/tr {{< expand "Enable data capture and sync on your machine." >}} -Add the [data management service](/data-ai/capture-data/capture-sync/#configure-data-capture-and-sync-for-individual-resources): +Add the [data management service](/data-ai/data/edge/capture-sync/#configure-data-capture-and-sync-for-individual-resources): On your machine's **CONFIGURE** tab, click the **+** icon next to your machine part in the left-hand menu and select **Service**. @@ -290,7 +288,7 @@ You have now configured sync to happen during a specific time slot. ## Test your sync configuration -To test your setup, [configure a webcam](/operate/reference/components/camera/webcam/) or another component and [enable data capture on the component](/data-ai/capture-data/capture-sync/#configure-data-capture-and-sync-for-individual-resources). +To test your setup, [configure a webcam](/operate/reference/components/camera/webcam/) or another component and [enable data capture on the component](/data-ai/data/edge/capture-sync/#configure-data-capture-and-sync-for-individual-resources). Make sure to physically connect any hardware parts to the computer controlling your machine. For a camera component, use the `ReadImage` method. The data manager will now capture data. diff --git a/docs/data-ai/capture-data/filter-before-sync.md b/docs/data-ai/data/edge/filter-before-sync.md similarity index 97% rename from docs/data-ai/capture-data/filter-before-sync.md rename to docs/data-ai/data/edge/filter-before-sync.md index 0f44042e47..31d68aa11e 100644 --- a/docs/data-ai/capture-data/filter-before-sync.md +++ b/docs/data-ai/data/edge/filter-before-sync.md @@ -1,7 +1,7 @@ --- -linkTitle: "Filter data" +linkTitle: "Filter" title: "Filter data before sync" -weight: 13 +weight: 25 layout: "docs" type: "docs" description: "Use filtering to collect and sync only certain images." @@ -96,7 +96,7 @@ You can test the vision service from the [**CONTROL** tab](/manage/troubleshoot/ **(Optional) Trigger sync with custom logic** By default, the captured data syncs at the regular interval you specified in the data capture config. -If you need to trigger sync in a different way, see [Conditional cloud sync](/data-ai/capture-data/conditional-sync/) for a documented example of syncing data only at certain times of day. +If you need to trigger sync in a different way, see [Conditional cloud sync](/data-ai/data/edge/conditional-sync/) for a documented example of syncing data only at certain times of day. {{% /tablestep %}} {{< /table >}} diff --git a/docs/data-ai/data/react/_index.md b/docs/data-ai/data/react/_index.md new file mode 100644 index 0000000000..9301fe6258 --- /dev/null +++ b/docs/data-ai/data/react/_index.md @@ -0,0 +1,9 @@ +--- +linkTitle: "React" +title: "React" +weight: 300 +layout: "empty" +type: "docs" +empty_node: true +open_on_desktop: true +--- diff --git a/docs/data-ai/data/advanced/alert-data.md b/docs/data-ai/data/react/alert-data.md similarity index 97% rename from docs/data-ai/data/advanced/alert-data.md rename to docs/data-ai/data/react/alert-data.md index 86c06d89db..88f1811e80 100644 --- a/docs/data-ai/data/advanced/alert-data.md +++ b/docs/data-ai/data/react/alert-data.md @@ -1,11 +1,11 @@ --- -linkTitle: "Alert on data" -title: "Alert on data" +linkTitle: "Respond to new data" +title: "Respond to new data" weight: 60 layout: "docs" type: "docs" description: "Use triggers to send email notifications or webhook requests when data from the machine is synced." -prev: "/data-ai/data/export/" +prev: "/data-ai/data/cloud/export/" --- You can use triggers to send email notifications or webhook requests when data from the machine is synced, even captured from a specific component with a specified condition. @@ -53,7 +53,7 @@ For example, a trigger configured to fire when data is captured from the motor ` For more information, see [Conditions](#conditions). {{% alert title="Note" color="note" %}} -You must [configure data capture](/data-ai/capture-data/capture-sync/#configure-data-capture-and-sync-for-individual-resources) for your component to use this trigger. +You must [configure data capture](/data-ai/data/edge/capture-sync/#configure-data-capture-and-sync-for-individual-resources) for your component to use this trigger. {{% /alert %}} {{% /tab %}} diff --git a/docs/data-ai/capture-data/_index.md b/docs/data-ai/get-started/_index.md similarity index 63% rename from docs/data-ai/capture-data/_index.md rename to docs/data-ai/get-started/_index.md index 4758d426bb..8046842842 100644 --- a/docs/data-ai/capture-data/_index.md +++ b/docs/data-ai/get-started/_index.md @@ -1,11 +1,10 @@ --- -linkTitle: "Capture data" -title: "Capture data" +linkTitle: "Get started" +title: "Get started" weight: 100 layout: "empty" type: "docs" empty_node: true open_on_desktop: true header_only: true -noedit: true --- diff --git a/docs/data-ai/get-started/annotate-images.md b/docs/data-ai/get-started/annotate-images.md new file mode 100644 index 0000000000..11496999b4 --- /dev/null +++ b/docs/data-ai/get-started/annotate-images.md @@ -0,0 +1,90 @@ +--- +linkTitle: "Annotate images" +title: "Annotate images" +weight: 40 +layout: "docs" +type: "docs" +description: "Annotate images with class tags and bounding box labels" +--- + +Use the interface on the [**DATA** page](https://app.viam.com/data/view) to annotate your images. +Always follow best practices when you label your images: + +More data means better models + +: Incorporate as much data as you practically can to improve your model's overall performance. + +Include counterexamples + +: Include images with and without the object you’re looking to classify. +This helps the model distinguish the target object from the background and reduces the chances of false positives by teaching the model what the object is _not_. + +Avoid class imbalance + +: Don't train excessively on one specific type or class, make sure each category has a roughly equal number of images. +For instance, if you're training a dog detector, include images of various dog breeds to avoid bias towards one breed. +An imbalanced dataset can lead the model to favor one class over others, reducing its overall accuracy. + +Match training images to intended use case + +: Use images that reflect the quality and conditions of your production environment. +For example, if you plan to use a low-quality camera in production, train with low-quality images. +Similarly, if your model will run all day, capture images in daylight, nighttime, dusk, and dawn conditions. + +Vary angles and distances + +: Include image examples from every angle and distance that you expect the model to handle. + +Viam enables you to annotate images for the following machine learning methods: + +{{< tabs >}} +{{% tab name="Classification" %}} + +Classification determines a descriptive tag or set of tags for an image. +For example, classification could help you identify: + +- whether an image of a food display appears `full`, `empty`, or `average` +- the quality of manufacturing output: `good` or `bad` +- what combination of toppings exists on a pizza: `pepperoni`, `sausage` and `pepper`, or `pineapple` and `ham` and `mushroom` + +Viam supports single and multiple label classification. +To create a training set for classification, annotate tags to describe your images. + +To tag an image: + +1. Click on an image, then click the **+** next to the **Tags** option. +1. Add one or more tags to your image. + + {{}} + +Repeat these steps for all images in the dataset. + +{{% /tab %}} +{{% tab name="Object detection" %}} + +Object detection identifies and determines the location of certain objects in an image. +For example, object detection could help you identify: + +- how many `pizza` objects appear on a counter +- the number of `bicycle` and `pedestrian` objects on a greenway +- which `plant` objects are popular with `deer` in your garden + +To create a training set for object detection, annotate bounding boxes to teach your model to identify objects that you want to detect in future images. + +To label an object with a bounding box: + +1. Click on an image, then click the **Annotate** button in right side menu. +1. Choose an existing label or create a new label. +1. Holding the command key (on macOS), or the control key (on Linux and Windows), click and drag on the image to create the bounding box: + + {{}} + +{{< alert title="Tip" color="tip" >}} + +Once created, you can move, resize, or delete the bounding box. +{{< /alert >}} + +Repeat these steps for all images in the dataset. + +{{% /tab %}} +{{< /tabs >}} diff --git a/docs/data-ai/get-started/capture-images.md b/docs/data-ai/get-started/capture-images.md new file mode 100644 index 0000000000..16ff30c845 --- /dev/null +++ b/docs/data-ai/get-started/capture-images.md @@ -0,0 +1,42 @@ +--- +linkTitle: "Capture images" +title: "Capture images" +weight: 20 +layout: "docs" +type: "docs" +description: "Capture images for a training dataset" +--- + +{{< tabs >}} +{{% tab name="One image" %}} + +You can add images to a dataset directly from a camera or vision component feed in the **CONTROL** or **CONFIGURATION** tabs of the Viam app. + +To add an image directly to a dataset from a visual feed, complete the following steps: + +1. Open the **TEST** panel of any camera or vision service component to view a feed of images from the camera. +1. Click the button marked with the camera icon to save the currently displayed image to a dataset: + {{< imgproc src="/components/camera/add_image_to_dataset_button.png" alt="A button marked with the outline of a camera, emphasized in red" resize="800x" style="width:500px" class="imgzoom" >}} +1. Select an existing dataset. +1. Click **Add** to add the image to the selected dataset. +1. When you see a success notification that reads "Saved image to dataset", you have successfully added the image to the dataset. + +To view images added to your dataset, go to the **DATA** page's [**DATASETS** tab](https://app.viam.com/data/datasets) in the Viam app and select your dataset. + +{{% /tab %}} +{{% tab name="Many images" %}} + +To capture a large number of images for training an ML model, [Capture and sync image data](/data-ai/data/edge/capture-sync/) using the data management service with your camera. + +Viam stores the images saved by capture and sync on the [**DATA** page](https://app.viam.com/data/), but does not add the images to a dataset. +We recommend you tag the images first and then use the CLI to [add the tagged images to a dataset](/data-ai/ai/train/create-dataset/#add-tagged-images-to-a-dataset). + +{{< alert title="Tip" color="tip" >}} + +Once you have enough images, consider disabling data capture to [avoid incurring fees](https://www.viam.com/product/pricing) for capturing large amounts of training data. + +{{< /alert >}} +{{% /tab %}} +{{< /tabs >}} + +Once you've captured enough images for training, you must annotate them to train a model. diff --git a/docs/data-ai/get-started/create-training-dataset.md b/docs/data-ai/get-started/create-training-dataset.md new file mode 100644 index 0000000000..d0b5147e99 --- /dev/null +++ b/docs/data-ai/get-started/create-training-dataset.md @@ -0,0 +1,155 @@ +--- +linkTitle: "Create a training dataset" +title: "Create a training dataset" +weight: 30 +layout: "docs" +type: "docs" +description: "Create a dataset to use for AI model training" +--- + +{{< tabs >}} +{{% tab name="Viam app" %}} + +1. Open the [**DATA** page](https://app.viam.com/data/view) of the Viam app. + +1. Navigate to the **ALL DATA** tab. + +1. Use the checkbox in the upper left of each image to select labeled images. + +1. Click the **Add to dataset** button, select a dataset, and click the **Add ... images** button to add the selected images to the dataset. + +{{% /tab %}} +{{% tab name="CLI" %}} + +Use the Viam CLI to filter images by label and add the filtered images to a dataset: + +1. First, [create a dataset](#create-a-dataset), if you haven't already. + +1. If you just created a dataset, use the dataset ID output by the creation command. + If your dataset already exists, run the following command to get a list of dataset names and corresponding IDs: + + ```sh {class="command-line" data-prompt="$"} + viam dataset list + ``` + +1. Run the following [command](/dev/tools/cli/#dataset) to add all images labeled with a subset of tags to the dataset, replacing the `` placeholder with the dataset ID output by the command in the previous step: + + ```sh {class="command-line" data-prompt="$"} + viam dataset data add filter --dataset-id= --tags=red_star,blue_square + ``` + +{{% /tab %}} +{{% tab name="Data Client API" %}} + +The following script adds all images captured from a certain machine to a new dataset. Complete the following steps to use the script: + +1. Copy and paste the following code into a file named add_images_from_machine_to_dataset.py on your machine. + + ```python {class="line-numbers linkable-line-numbers" data-line="9-13" } + import asyncio + from typing import List, Optional + + from viam.rpc.dial import DialOptions, Credentials + from viam.app.viam_client import ViamClient + from viam.utils import create_filter + + # Configuration constants – replace with your actual values + DATASET_NAME = "" # a unique, new name for the dataset you want to create + ORG_ID = "" # your organization ID, find in your organization settings + PART_ID = "" # id of machine that captured target images, find in machine config + API_KEY = "" # API key, find or create in your organization settings + API_KEY_ID = "" # API key ID, find or create in your organization settings + + # Adjust the maximum number of images to add to the dataset + MAX_MATCHES = 500 + + async def connect() -> ViamClient: + """Establish a connection to the Viam client using API credentials.""" + dial_options = DialOptions( + credentials=Credentials( + type="api-key", + payload=API_KEY, + ), + auth_entity=API_KEY_ID, + ) + return await ViamClient.create_from_dial_options(dial_options) + + + async def fetch_binary_data_ids(data_client, part_id: str) -> List[str]: + """Fetch binary data metadata and return a list of BinaryData objects.""" + data_filter = create_filter(part_id=part_id) + all_matches = [] + last: Optional[str] = None + + print("Getting data for part...") + + while len(all_matches) < MAX_MATCHES: + print("Fetching more data...") + data, _, last = await data_client.binary_data_by_filter( + data_filter, + limit=50, + last=last, + include_binary_data=False, + ) + if not data: + break + all_matches.extend(data) + + return all_matches + + + async def main() -> int: + """Main execution function.""" + viam_client = await connect() + data_client = viam_client.data_client + + matching_data = await fetch_binary_data_ids(data_client, PART_ID) + + print("Creating dataset...") + + try: + dataset_id = await data_client.create_dataset( + name=DATASET_NAME, + organization_id=ORG_ID, + ) + print(f"Created dataset: {dataset_id}") + except Exception as e: + print("Error creating dataset. It may already exist.") + print("See: https://app.viam.com/data/datasets") + print(f"Exception: {e}") + return 1 + + print("Adding data to dataset...") + + await data_client.add_binary_data_to_dataset_by_ids( + binary_ids=[obj.metadata.binary_data_id for obj in matching_data], + dataset_id=dataset_id + ) + + print("Added files to dataset.") + print(f"See dataset: https://app.viam.com/data/datasets?id={dataset_id}") + + viam_client.close() + return 0 + + + if __name__ == "__main__": + asyncio.run(main()) + ``` + +1. Fill in the placeholders with values for your own organization, API key, machine, and dataset. + +1. Install the [Viam Python SDK](https://python.viam.dev/) by running the following command: + + ```sh {class="command-line" data-prompt="$"} + pip install viam-sdk + ``` + +1. Finally, run the following command to add the images to the dataset: + + ```sh {class="command-line" data-prompt="$"} + python add_images_from_machine_to_dataset.py + ``` + +{{% /tab %}} +{{< /tabs >}} diff --git a/docs/data-ai/capture-data/advanced/how-sync-works.md b/docs/data-ai/get-started/how-sync-works.md similarity index 95% rename from docs/data-ai/capture-data/advanced/how-sync-works.md rename to docs/data-ai/get-started/how-sync-works.md index a6f943f2af..f8082411a2 100644 --- a/docs/data-ai/capture-data/advanced/how-sync-works.md +++ b/docs/data-ai/get-started/how-sync-works.md @@ -1,14 +1,14 @@ --- -linkTitle: "How sync works" -title: "How sync works" +linkTitle: "Sync data" +title: "Sync data" tags: ["data management", "data", "services"] -weight: 12 +weight: 100 layout: "docs" type: "docs" platformarea: ["data"] description: "Data capture and sync works differently for viam-server and viam-micro-server." date: "2024-12-18" -prev: "/data-ai/capture-data/advanced/advanced-data-capture-sync/" +prev: "/data-ai/reference/advanced-data-capture-sync/" --- Data capture and cloud sync works differently for `viam-server` and `viam-micro-server`. @@ -72,7 +72,7 @@ When the connection is restored and sync resumes, the service continues sync whe If the interruption happens mid-file, sync resumes from the beginning of that file. To avoid syncing files that are still being written to, the data management service only syncs arbitrary files that haven't been modified in the previous 10 seconds. -This default can be changed with the [`file_last_modified_millis` config attribute](/data-ai/capture-data/capture-sync/). +This default can be changed with the [`file_last_modified_millis` config attribute](/data-ai/data/edge/capture-sync/). ## Automatic data deletion @@ -112,7 +112,7 @@ When a machine loses its internet connection, it cannot resume cloud sync until To ensure that the machine can store all data captured while it has no connection, you need to provide enough local data storage. If your robot is offline and can't sync and your machine's disk fills up beyond a certain threshold, the data management service will delete captured data to free up additional space and maintain a working machine. -For more information, see [Automatic data deletion details](/data-ai/capture-data/advanced/how-sync-works/) +For more information, see [Automatic data deletion details](/data-ai/data/advanced/how-sync-works/) Data capture supports capturing tabular data directly to MongoDB in addition to capturing to disk. -For more information, see [Capture directly to MongoDB](/data-ai/capture-data/advanced/advanced-data-capture-sync/#capture-directly-to-your-own-mongodb-cluster). +For more information, see [Capture directly to MongoDB](/data-ai/reference/advanced-data-capture-sync/#capture-directly-to-your-own-mongodb-cluster). diff --git a/docs/data-ai/get-started/quickstart.md b/docs/data-ai/get-started/quickstart.md new file mode 100644 index 0000000000..e8d71c4942 --- /dev/null +++ b/docs/data-ai/get-started/quickstart.md @@ -0,0 +1,33 @@ +--- +linkTitle: "Quickstart" +title: "Quickstart" +weight: 10 +layout: "docs" +type: "docs" +description: "Select and ready a machine for use with the Viam platform" +--- + +To ensure a machine learning model you create performs well, you need to train it on a variety of images that cover the range of things your machine should be able to recognize. + +To train a model, you need a dataset that meets the following criteria: + +- the dataset contains at least 15 images +- at least 80% of the images have labels +- for each selected label, at least 10 bounding boxes exist + +This quickstart explains how to create a dataset that meets these criteria for your training purposes. + +## Prerequisites + +TODO flesh out + +- machine +- Viam account + +## Connect machine to the Viam app + +{{% snippet "setup.md" %}} + +## Add a camera to your machine + +Follow the guide to configure a [webcam](/operate/reference/components/camera/webcam/) or similar [camera component](/operate/reference/components/camera/). diff --git a/docs/data-ai/capture-data/advanced/advanced-data-capture-sync.md b/docs/data-ai/reference/advanced-data-capture-sync.md similarity index 98% rename from docs/data-ai/capture-data/advanced/advanced-data-capture-sync.md rename to docs/data-ai/reference/advanced-data-capture-sync.md index 99be13c5ef..cd63b8a1c7 100644 --- a/docs/data-ai/capture-data/advanced/advanced-data-capture-sync.md +++ b/docs/data-ai/reference/advanced-data-capture-sync.md @@ -1,13 +1,13 @@ --- -linkTitle: "Advanced data capture and sync configurations" -title: "Advanced data capture and sync configurations" +linkTitle: "Sync configuration" +title: "Sync configuration" tags: ["data management", "data", "services"] weight: 10 layout: "docs" type: "docs" platformarea: ["data"] description: "Advanced data capture and data sync configurations." -prev: /data-ai/capture-data/conditional-sync/ +prev: /data-ai/data/edge/conditional-sync/ date: "2025-02-10" --- @@ -522,7 +522,7 @@ The following attributes are available for data capture configuration: | Name | Type | Required? | Description | | ------------------ | ------ | --------- | ----------- | | `capture_frequency_hz` | float | **Required** | Frequency in hertz at which to capture data. For example, to capture a reading every 2 seconds, enter `0.5`. | -| `method` | string | **Required** | Depends on the type of component or service. See [Supported components and services](/data-ai/capture-data/capture-sync/#click-to-see-resources-that-support-data-capture-and-cloud-sync). | +| `method` | string | **Required** | Depends on the type of component or service. See [Supported components and services](/data-ai/data/edge/capture-sync/#click-to-see-resources-that-support-data-capture-and-cloud-sync). | | `retention_policy` | object | Optional | Option to configure how long data collected by this component or service should remain stored in the Viam Cloud. You must set this in JSON mode. See the JSON example for a camera component.
**Options:** `"days": `, `"binary_limit_gb": `, `"tabular_limit_gb": `.
Days are in UTC time. Setting a retention policy of 1 day means that data stored now will be deleted the following day **in UTC time**. You can set either or both of the size limit options and size is in gigabytes. The `retention_policy` does not affect logs. For information about logs, see [Logging](/operate/reference/viam-server/#logging). | | `recent_data_store` | object | Optional | Configure a rolling time frame of recent data to store in a [hot data store](#capture-to-the-hot-data-store) for faster access. Example: `{ "stored_hours": 24 }` | | `additional_params` | depends | depends | Varies based on the method. For example, `ReadImage` requires a MIME type. | @@ -705,9 +705,9 @@ Failing to write to MongoDB doesn't affect capturing and syncing data to cloud s Configure how long your synced data remains stored in the cloud: - **Retain data up to a certain size (for example, 100GB) or for a specific length of time (for example, 14 days):** Set `retention_policies` at the resource level. - See the `retention_policy` field in [data capture configuration attributes](/data-ai/capture-data/advanced/advanced-data-capture-sync/#click-to-view-data-capture-attributes). + See the `retention_policy` field in [data capture configuration attributes](/data-ai/reference/advanced-data-capture-sync/#click-to-view-data-capture-attributes). - **Delete data captured by a machine when you delete the machine:** Control whether your cloud data is deleted when a machine or machine part is removed. - See the `delete_data_on_part_deletion` field in the [data management service configuration attributes](/data-ai/capture-data/advanced/advanced-data-capture-sync/#click-to-view-data-management-attributes). + See the `delete_data_on_part_deletion` field in the [data management service configuration attributes](/data-ai/reference/advanced-data-capture-sync/#click-to-view-data-management-attributes). ### Sync optimization diff --git a/docs/data-ai/reference/mlmodel-design.md b/docs/data-ai/reference/mlmodel-design.md index 29b375f44a..d7b2f723b0 100644 --- a/docs/data-ai/reference/mlmodel-design.md +++ b/docs/data-ai/reference/mlmodel-design.md @@ -1,7 +1,7 @@ --- -title: "Design your ML Models for Vision" -linkTitle: "ML model service design" -weight: 60 +title: "Vision service tensor formats" +linkTitle: "Vision service tensor formats" +weight: 20 type: "docs" tags: ["data management", "ml", "model training", "vision"] description: "Design your ML Model service to work with Viam's vision services." @@ -15,11 +15,11 @@ aliases: - /operate/reference/advanced-modules/mlmodel-design/ --- -The [Machine Learning (ML) model service](/data-ai/ai/deploy/) allows you to deploy machine learning models to your smart machine. +The [Machine Learning (ML) model service](/data-ai/ai/train/deploy/) allows you to deploy machine learning models to your smart machine. Vision services, like [an `"mlmodel"` detector](/dev/reference/apis/services/vision/#detections) or [classifier](/dev/reference/apis/services/vision/#classifications), enable your machines to identify and classify objects in images with the deployed models' predictions. The two services work closely together, with the vision service relying on the deployed ML model to make inferences. -If you are [designing your own ML Model service](/data-ai/ai/deploy/), you must try to make your ML models' shapes match the input and output tensors the `mlmodel` vision service expects to work with if you want the two services to coordinate in classification or detection. +If you are [designing your own ML Model service](/data-ai/ai/train/deploy/), you must try to make your ML models' shapes match the input and output tensors the `mlmodel` vision service expects to work with if you want the two services to coordinate in classification or detection. To be able to use a deployed ML model, the `mlmodel` vision service checks for descriptions of these characteristics in the [metadata](/dev/reference/apis/services/ml/#metadata) of the model, as defined in [the Python SDK](https://python.viam.dev/autoapi/viam/gen/service/mlmodel/v1/mlmodel_pb2/index.html#viam.gen.service.mlmodel.v1.mlmodel_pb2.Metadata). For an example of this, see [Example Metadata](#example-metadata). diff --git a/docs/data-ai/reference/temp.md b/docs/data-ai/reference/temp.md new file mode 100644 index 0000000000..db9f6cfa53 --- /dev/null +++ b/docs/data-ai/reference/temp.md @@ -0,0 +1,10 @@ +--- +linkTitle: "Trigger configuration" +title: "Trigger configuration" +tags: ["data management", "data", "services"] +weight: 10 +layout: "docs" +type: "docs" +platformarea: ["data"] +description: "Stand-in page for separate branch's reference page on triggers" +--- diff --git a/docs/dev/_index.md b/docs/dev/_index.md index 61781740e0..f6a9b62539 100644 --- a/docs/dev/_index.md +++ b/docs/dev/_index.md @@ -303,7 +303,7 @@ std::cout << "co2-monitor get_readings return value " << co2monitor_get_readings Once you have configured a physical sensor or anything else that provides measurements, you can get sensor readings using the sensor API. -[Collect sensor data →](/data-ai/capture-data/capture-sync/) +[Collect sensor data →](/data-ai/data/edge/capture-sync/)
@@ -546,7 +546,7 @@ tabular_data, count, last = await data_client.tabular_data_by_filter( You can query synced sensor data, images, and any other binary or timeseries data from all your machines using the data client API. -[Learn about Data Management →](/data-ai/capture-data/capture-sync/) +[Learn about Data Management →](/data-ai/data/edge/capture-sync/) @@ -753,7 +753,7 @@ job_metadata = await ml_training_client.get_training_job( Build machine learning models based on your machines' data any time using the ML training client API -[Train and deploy ML models →](/data-ai/ai/train-tflite/) +[Train and deploy ML models →](/data-ai/ai/train/train-tflite/) diff --git a/docs/dev/reference/apis/services/data.md b/docs/dev/reference/apis/services/data.md index ebf902707a..47a114b1b4 100644 --- a/docs/dev/reference/apis/services/data.md +++ b/docs/dev/reference/apis/services/data.md @@ -14,7 +14,7 @@ aliases: The data management service API allows you to sync data stored on the machine it is deployed on to the cloud. -The [data management service](/data-ai/capture-data/capture-sync/) supports the following methods: +The [data management service](/data-ai/data/edge/capture-sync/) supports the following methods: {{< readfile "/static/include/services/apis/generated/data_manager-table.md" >}} diff --git a/docs/dev/reference/apis/services/ml.md b/docs/dev/reference/apis/services/ml.md index 903caf5f08..f9a894761a 100644 --- a/docs/dev/reference/apis/services/ml.md +++ b/docs/dev/reference/apis/services/ml.md @@ -15,7 +15,7 @@ aliases: The ML model service API allows you to make inferences based on a provided ML model. -The [ML Model service](/data-ai/ai/deploy/) supports the following methods: +The [ML Model service](/data-ai/ai/train/deploy/) supports the following methods: {{< readfile "/static/include/services/apis/generated/mlmodel-table.md" >}} diff --git a/docs/dev/reference/changelog.md b/docs/dev/reference/changelog.md index 478a8374ad..13f590053c 100644 --- a/docs/dev/reference/changelog.md +++ b/docs/dev/reference/changelog.md @@ -120,7 +120,7 @@ You can store and retrieve arbitrary metadata about your organization, location, {{% changelog color="added" title="Hot Data Store" date="2025-03-11" %}} -The [hot data store](/data-ai/capture-data/advanced/advanced-data-capture-sync/#capture-to-the-hot-data-store) allows you to access recent data faster. +The [hot data store](/data-ai/reference/advanced-data-capture-sync/#capture-to-the-hot-data-store) allows you to access recent data faster. {{% /changelog %}} @@ -262,7 +262,7 @@ The [arm interface](/dev/reference/apis/components/arm/) now includes a [MoveThr {{% changelog date="2024-10-16" color="added" title="Set data retention policies" %}} You can now set how long data collected by a component should remain stored in the Viam Cloud in the component's data capture configuration. -For more information, see [Data management service](/data-ai/capture-data/capture-sync/). +For more information, see [Data management service](/data-ai/data/edge/capture-sync/). {{% /changelog %}} @@ -296,14 +296,14 @@ For more information, see [Configure provisioning with viam-agent](/manage/fleet {{% changelog date="2024-08-16" color="added" title="Data capture for vision" %}} Data capture is now possible for the vision service. -For more information, see [Supported components and services](/data-ai/capture-data/capture-sync/#click-to-see-resources-that-support-data-capture-and-cloud-sync). +For more information, see [Supported components and services](/data-ai/data/edge/capture-sync/#click-to-see-resources-that-support-data-capture-and-cloud-sync). {{% /changelog %}} {{% changelog date="2024-08-01" color="added" title="Create custom training scripts" %}} You can now upload custom training scripts to the Viam Registry and use them to train machine learning models. -For more information, see [Create custom training scripts](/data-ai/ai/train/). +For more information, see [Create custom training scripts](/data-ai/ai/train/train/). {{% /changelog %}} @@ -428,7 +428,7 @@ In addition to other improvements, your component, service, and other resource c {{% changelog date="2024-03-01" color="added" title="Additional ML models" %}} Viam has added support for the TensorFlow, PyTorch, and ONNX ML model frameworks, expanding upon the existing support for TensorFlow Lite models. -You can now upload your own ML model(/data-ai/ai/deploy/#deploy-your-ml-model-on-an-ml-model-service) using any of these frameworks for use with the Vision service. +You can now upload your own ML model(/data-ai/ai/train/deploy/#deploy-your-ml-model-on-an-ml-model-service) using any of these frameworks for use with the Vision service. {{% /changelog %}} @@ -467,7 +467,7 @@ You can now use the [generic service](/operate/reference/components/generic/) to {{% changelog date="2024-02-12" color="added" title="ML models in the registry" %}} -You can now upload [machine learning (ML) models](/data-ai/ai/deploy/#deploy-your-ml-model-on-an-ml-model-service) to the Viam Registry, in addition to modules. +You can now upload [machine learning (ML) models](/data-ai/ai/train/deploy/#deploy-your-ml-model-on-an-ml-model-service) to the Viam Registry, in addition to modules. You may upload models you have trained yourself using the Viam app, or models you have trained outside of the App. When uploading, you have the option to make your model available to the general public for reuse. @@ -481,8 +481,8 @@ Viam has added a [sensor-controlled base](/operate/reference/components/base/sen {{% changelog date="2024-01-31" color="added" title="Visualize captured data" %}} -You can now [visualize your data](/data-ai/data/visualize/) using many popular third-party visualization tools, including Grafana, Tableau, Google’s Looker Studio, and more. -You can visualize any data, such as sensor readings, that you have [synced](/data-ai/capture-data/capture-sync/) to the Viam app from your machine. +You can now [visualize your data](/data-ai/data/cloud/visualize/) using many popular third-party visualization tools, including Grafana, Tableau, Google’s Looker Studio, and more. +You can visualize any data, such as sensor readings, that you have [synced](/data-ai/data/edge/capture-sync/) to the Viam app from your machine. See [Visualize data with Grafana](/tutorials/services/visualize-data-grafana/) for a full walkthrough focused on Grafana specifically. @@ -490,7 +490,7 @@ See [Visualize data with Grafana](/tutorials/services/visualize-data-grafana/) f {{% changelog date="2024-01-31" color="added" title="Use triggers to trigger actions" %}} -You can now configure [triggers](/data-ai/data/advanced/alert-data/) (previously called webhooks) to execute actions when certain types of data are sent from your machine to the cloud. +You can now configure [triggers](/data-ai/data/react/alert-data/) (previously called webhooks) to execute actions when certain types of data are sent from your machine to the cloud. {{% /changelog %}} @@ -527,13 +527,13 @@ Location secrets, the previous method of authentication, is deprecated and will Once you have added the data management service and synced data, such as sensor readings, to the Viam app, you can now run queries against both captured data as well as its metadata using either SQL or MQL. -For more information, see [Query Data with SQL or MQL](/data-ai/data/query/). +For more information, see [Query Data with SQL or MQL](/data-ai/data/cloud/query/). {{% /changelog %}} {{% changelog date="2023-11-30" color="changed" title="Model training from datasets" %}} -To make it easier to iterate while training machine learning models from image data, you now train models from [datasets](/data-ai/ai/create-dataset/). +To make it easier to iterate while training machine learning models from image data, you now train models from [datasets](/data-ai/ai/train/create-dataset/). {{% /changelog %}} @@ -604,7 +604,7 @@ You now have the capability to use a [power sensor component](/operate/reference {{% /changelog %}} {{% changelog date="2023-09-30" color="added" title="Filter component’s data before the cloud" %}} -Viam has written a module that allows you to filter data based on specific criteria before syncing it to [Viam's cloud](/data-ai/capture-data/capture-sync/). +Viam has written a module that allows you to filter data based on specific criteria before syncing it to [Viam's cloud](/data-ai/data/edge/capture-sync/). It equips machines to: - Remove data that is not of interest @@ -709,7 +709,7 @@ To better control gantries with Viam, you can now: {{% changelog date="2023-06-30" color="improved" title="Optimized Viam-trained object detection models" %}} -This update for TFlite object detection models [trained with the machine learning service](/data-ai/ai/train-tflite/) brings significant improvements, including: +This update for TFlite object detection models [trained with the machine learning service](/data-ai/ai/train/train-tflite/) brings significant improvements, including: - 76% faster model inference for camera streams - 64% quicker model training for object detection @@ -725,7 +725,7 @@ The beta release of the [TypeScript SDK](https://github.com/viamrobotics/viam-ty {{% changelog date="2023-05-31" color="added" title="Train object detection ML models" %}} -You now have the capability to directly [train a TFlite object detection models](/data-ai/ai/train-tflite/) in addition to image classification models from within the Viam app. +You now have the capability to directly [train a TFlite object detection models](/data-ai/ai/train/train-tflite/) in addition to image classification models from within the Viam app. This update allows you to: @@ -1142,15 +1142,15 @@ You will no longer be able to add or remove models using the SDKs. #### Add machine learning vision models to a vision service The way to add machine learning vision models is changing. -You will need to first register the machine learning model file with the [ML model service](/data-ai/ai/deploy/) and then add that registered model to a vision service. +You will need to first register the machine learning model file with the [ML model service](/data-ai/ai/train/deploy/) and then add that registered model to a vision service. {{% /changelog %}} {{% changelog date="2023-03-31" color="added" title="Machine learning for image classification models" %}} -You can now [train](/data-ai/ai/train-tflite/) and [deploy](/data-ai/ai/deploy/) image classification models with the [data management service](/data-ai/capture-data/capture-sync/) and use your machine's image data directly within Viam. -Additionally, you can upload and use existing [machine learning models](/data-ai/ai/deploy/#deploy-your-ml-model-on-an-ml-model-service) with your machines. -For more information on using data synced to the cloud to train machine learning models, read [train a TFlite](/data-ai/ai/train-tflite/) or [another model](/data-ai/ai/train/). +You can now [train](/data-ai/ai/train/train-tflite/) and [deploy](/data-ai/ai/train/deploy/) image classification models with the [data management service](/data-ai/data/edge/capture-sync/) and use your machine's image data directly within Viam. +Additionally, you can upload and use existing [machine learning models](/data-ai/ai/train/deploy/#deploy-your-ml-model-on-an-ml-model-service) with your machines. +For more information on using data synced to the cloud to train machine learning models, read [train a TFlite](/data-ai/ai/train/train-tflite/) or [another model](/data-ai/ai/train/train/). {{% /changelog %}} diff --git a/docs/dev/reference/glossary/mql.md b/docs/dev/reference/glossary/mql.md index 99c20e8f5d..d8780dc684 100644 --- a/docs/dev/reference/glossary/mql.md +++ b/docs/dev/reference/glossary/mql.md @@ -7,4 +7,4 @@ short_description: MQL is the MongoDB query language, similar to SQL but specifi MQL is the [MongoDB query language](https://www.mongodb.com/docs/manual/tutorial/query-documents/), similar to {{< glossary_tooltip term_id="sql" text="SQL" >}} but specific to the MongoDB document model. -You can use MQL to query data that you have synced to the Viam app using the [data management service](/data-ai/capture-data/capture-sync/). +You can use MQL to query data that you have synced to the Viam app using the [data management service](/data-ai/data/edge/capture-sync/). diff --git a/docs/dev/reference/glossary/sql.md b/docs/dev/reference/glossary/sql.md index 3615114646..0eb9148767 100644 --- a/docs/dev/reference/glossary/sql.md +++ b/docs/dev/reference/glossary/sql.md @@ -7,4 +7,4 @@ short_description: SQL (structured query language) is the widely-used, industry- [SQL (structured query language)](https://en.wikipedia.org/wiki/SQL) is the widely-used, industry-standard query language popular with [relational databases](https://en.wikipedia.org/wiki/Relational_database). -You can use SQL to query data that you have synced to the Viam app using the [data management service](/data-ai/capture-data/capture-sync/). +You can use SQL to query data that you have synced to the Viam app using the [data management service](/data-ai/data/edge/capture-sync/). diff --git a/docs/dev/reference/sdks/connectivity.md b/docs/dev/reference/sdks/connectivity.md index 5ad9708343..cf026052e3 100644 --- a/docs/dev/reference/sdks/connectivity.md +++ b/docs/dev/reference/sdks/connectivity.md @@ -104,7 +104,7 @@ When a machine loses its connection to the internet but is still connected to a - Client sessions connected through the same LAN or WAN will function normally. - Client sessions connected through the internet will timeout and end. If the client is on the same LAN or WAN but the route it chose to connect is through the internet, the client will automatically disconnect and then reconnect over LAN. -- Cloud sync for the [data management service](/data-ai/capture-data/capture-sync/) will pause until the internet connection is re-established since the machine will be unable to connect to the [Viam app](https://app.viam.com). +- Cloud sync for the [data management service](/data-ai/data/edge/capture-sync/) will pause until the internet connection is re-established since the machine will be unable to connect to the [Viam app](https://app.viam.com). When a machine loses its connection to LAN or WAN, all client sessions will timeout and end by default. diff --git a/docs/dev/reference/sdks/python/python-venv.md b/docs/dev/reference/sdks/python/python-venv.md index 34518e1a80..7837350871 100644 --- a/docs/dev/reference/sdks/python/python-venv.md +++ b/docs/dev/reference/sdks/python/python-venv.md @@ -68,7 +68,7 @@ pip3 install viam-sdk This installs the Viam Python SDK and all required general dependencies. -If you intend to use the [ML (machine learning) model service](/data-ai/ai/deploy/), install the Python SDK using the `mlmodel` extra: +If you intend to use the [ML (machine learning) model service](/data-ai/ai/train/deploy/), install the Python SDK using the `mlmodel` extra: ```sh {class="command-line" data-prompt="$"} pip3 install 'viam-sdk[mlmodel]' diff --git a/docs/dev/tools/cli.md b/docs/dev/tools/cli.md index fe4a50ed3a..0b1439df76 100644 --- a/docs/dev/tools/cli.md +++ b/docs/dev/tools/cli.md @@ -399,7 +399,7 @@ The **Binary Data ID** is shown under the **DETAILS** subtab that appears on the You cannot use filter arguments such as `--start` or `--end` with the `ids` argument. -See [Create a dataset](/data-ai/ai/create-dataset/) for more information. +See [Create a dataset](/data-ai/ai/train/create-dataset/) for more information. ##### Using the `filter` argument @@ -430,7 +430,7 @@ Removing the `viam data export` string, you can use the same filter parameters ( You cannot use the `--binary-data-ids` argument when using `filter`. -See [Create a dataset](/data-ai/ai/create-dataset/) for more information. +See [Create a dataset](/data-ai/ai/train/create-dataset/) for more information. ### `data` @@ -499,8 +499,8 @@ done | `export tabular` | Export tabular or sensor data to a specified location in the .ndjson output format. You can copy this from the UI with a filter. See [Copy `export` command](#copy-export-command). | - | | `export binary` | Export binary or image data to a specified location. Binary data will be downloaded in the original output it was specified as. You can copy this from the UI with a filter. See [Copy `export` command](#copy-export-command). | - | | `tag` | Add or remove tags from data matching the IDs or filter. | `ids`, `filter` | -| `database configure` | Create a new database user for the Viam organization's MongoDB Atlas Data Federation instance, or change the password of an existing user. See [Configure data query](/data-ai/data/query/#configure-data-query). | - | -| `database hostname` | Get the MongoDB Atlas Data Federation instance hostname and connection URI. See [Configure data query](/data-ai/data/query/#configure-data-query). | - | +| `database configure` | Create a new database user for the Viam organization's MongoDB Atlas Data Federation instance, or change the password of an existing user. See [Configure data query](/data-ai/data/cloud/query/#configure-data-query). | - | +| `database hostname` | Get the MongoDB Atlas Data Federation instance hostname and connection URI. See [Configure data query](/data-ai/data/cloud/query/#configure-data-query). | - | | `delete binary` | Delete binary data from the Viam Cloud. | - | | `delete tabular` | Delete tabular data from the Viam Cloud. | - | | `--help` | Return help | - | @@ -1309,7 +1309,7 @@ You can set a default profile by using the `VIAM_CLI_PROFILE_NAME` environment v ### `training-script` -Manage training scripts for [custom ML training](/data-ai/ai/train/). +Manage training scripts for [custom ML training](/data-ai/ai/train/train/). ```sh {class="command-line" data-prompt="$"} viam training-script upload --framework= --org-id= --path= --script-name= --type= diff --git a/docs/manage/fleet/reuse-configuration.md b/docs/manage/fleet/reuse-configuration.md index 288c03c431..5c86d48dac 100644 --- a/docs/manage/fleet/reuse-configuration.md +++ b/docs/manage/fleet/reuse-configuration.md @@ -39,7 +39,7 @@ You must be an [organization owner](/manage/manage/rbac/) to create fragments fo {{% tablestep number=2 %}} **Add and configure all the resources** you want to use on your machines. -Fragments support all available resources except [triggers](/data-ai/data/advanced/alert-data/). +Fragments support all available resources except [triggers](/data-ai/data/react/alert-data/). You can even add other fragments inside a fragment. {{< alert title="Tip" color="tip" >}} @@ -296,7 +296,7 @@ This example uses [`$set`](https://www.mongodb.com/docs/manual/reference/operato {{< /expand >}} {{< expand "Modify data sync settings" >}} -This example uses [`$set`](https://www.mongodb.com/docs/manual/reference/operator/update/set/#mongodb-update-up.-set) to change the sync interval for a [data management service](/data-ai/capture-data/capture-sync/) named `data-management` in the fragment: +This example uses [`$set`](https://www.mongodb.com/docs/manual/reference/operator/update/set/#mongodb-update-up.-set) to change the sync interval for a [data management service](/data-ai/data/edge/capture-sync/) named `data-management` in the fragment: ```json {class="line-numbers linkable-line-numbers"} "fragment_mods": [ @@ -341,7 +341,7 @@ The `version` field supports the following values: {{< /expand >}} {{< expand "Set a package version" >}} -This example uses [`$set`](https://www.mongodb.com/docs/manual/reference/operator/update/set/#mongodb-update-up.-set) to configure [version update settings for a package](/data-ai/ai/deploy/#deploy-a-specific-version-of-an-ml-model) named `package_name` from the fragment: +This example uses [`$set`](https://www.mongodb.com/docs/manual/reference/operator/update/set/#mongodb-update-up.-set) to configure [version update settings for a package](/data-ai/ai/train/deploy/#deploy-a-specific-version-of-an-ml-model) named `package_name` from the fragment: ```json {class="line-numbers linkable-line-numbers"} "fragment_mods": [ diff --git a/docs/manage/manage/rbac.md b/docs/manage/manage/rbac.md index 0a53836a03..ceb0ec4b53 100644 --- a/docs/manage/manage/rbac.md +++ b/docs/manage/manage/rbac.md @@ -102,7 +102,7 @@ Permissions for managing {{< glossary_tooltip term_id="fragment" text="fragments ## Data and machine learning -Permissions for [data management](/data-ai/capture-data/capture-sync/) and [machine learning](/data-ai/ai/deploy/) are as follows: +Permissions for [data management](/data-ai/data/edge/capture-sync/) and [machine learning](/data-ai/ai/train/deploy/) are as follows: | Permissions | Org owner | Org operator | Location owner | Location operator | Machine owner | Machine operator | diff --git a/docs/manage/troubleshoot/alert.md b/docs/manage/troubleshoot/alert.md index f79884134b..c4312e295b 100644 --- a/docs/manage/troubleshoot/alert.md +++ b/docs/manage/troubleshoot/alert.md @@ -34,7 +34,7 @@ You can receive alerts for the following events involving machine performance te For example, you can configure a trigger to send you a notification when your machine's CPU usage reaches a certain threshold. {{< alert title="Tip" color="tip" >}} -You can also configure alerts on any other machine data, for more information on that, see [Alert on data](/data-ai/data/advanced/alert-data/). +You can also configure alerts on any other machine data, for more information on that, see [Alert on data](/data-ai/data/react/alert-data/). {{< /alert >}} ## Data meets condition @@ -98,7 +98,7 @@ You can also see readings on the **CONTROL** tab. ### Configure data management -To capture or alert on the data from your configured sensor, you must add the [data management service](/data-ai/capture-data/capture-sync/) and configure it to capture and sync the sensor data: +To capture or alert on the data from your configured sensor, you must add the [data management service](/data-ai/data/edge/capture-sync/) and configure it to capture and sync the sensor data: {{< table >}} {{% tablestep number=1 %}} @@ -378,7 +378,7 @@ Click the **Save** button in the top right corner of the page to save your confi ## Data synced -You must [configure data capture](/data-ai/capture-data/capture-sync/) for your machine to use this trigger. +You must [configure data capture](/data-ai/data/edge/capture-sync/) for your machine to use this trigger. {{< tabs >}} {{% tab name="Builder mode" %}} diff --git a/docs/manage/troubleshoot/teleoperate/default-interface.md b/docs/manage/troubleshoot/teleoperate/default-interface.md index 03966caee1..24c83ba742 100644 --- a/docs/manage/troubleshoot/teleoperate/default-interface.md +++ b/docs/manage/troubleshoot/teleoperate/default-interface.md @@ -45,7 +45,7 @@ Additionally, the app allows you to: - see if your machines are online - [view a machine's logs](/manage/troubleshoot/troubleshoot/#check-logs) -- [upload images from your phone to the cloud](/data-ai/ai/advanced/upload-external-data/#upload-images-with-the-viam-mobile-app) +- [upload images from your phone to the cloud](/data-ai/ai/train/upload-external-data/#upload-images-with-the-viam-mobile-app) - [invite people to collaborate with you and modify access](/manage/troubleshoot/teleoperate/default-interface/#viam-mobile-app)
diff --git a/docs/operate/control/headless-app.md b/docs/operate/control/headless-app.md index 20280a2788..45894e52b0 100644 --- a/docs/operate/control/headless-app.md +++ b/docs/operate/control/headless-app.md @@ -63,7 +63,7 @@ Windows is not supported. If you are using Windows, use the [Windows Subsystem for Linux (WSL)](https://learn.microsoft.com/en-us/windows/wsl/install) and install the Python SDK using the preceding instructions for Linux. For other unsupported systems, see [Installing from source](https://python.viam.dev/#installing-from-source). -If you intend to use the [ML (machine learning) model service](/data-ai/ai/deploy/), use the following command instead, which installs additional required dependencies along with the Python SDK: +If you intend to use the [ML (machine learning) model service](/data-ai/ai/train/deploy/), use the following command instead, which installs additional required dependencies along with the Python SDK: ```sh {class="command-line" data-prompt="$"} pip install 'viam-sdk[mlmodel]' diff --git a/docs/operate/get-started/other-hardware/_index.md b/docs/operate/get-started/other-hardware/_index.md index 85323ed947..2de4dbda24 100644 --- a/docs/operate/get-started/other-hardware/_index.md +++ b/docs/operate/get-started/other-hardware/_index.md @@ -58,7 +58,7 @@ For C++ module examples, see the [C++ examples directory on GitHub](https://gith {{< expand "How and where do modules run?" >}} Modules run on your machine, alongside `viam-server` as separate processes, communicating with `viam-server` over UNIX sockets. -[`viam-server` manages](/operate/reference/viam-server/) the dependencies, start-up, reconfiguration, [data management](/data-ai/capture-data/capture-sync/), and shutdown behavior of your modular resource. +[`viam-server` manages](/operate/reference/viam-server/) the dependencies, start-up, reconfiguration, [data management](/data-ai/data/edge/capture-sync/), and shutdown behavior of your modular resource. The lifecycle of a module and the resources it provides is as follows: diff --git a/docs/operate/get-started/supported-hardware/_index.md b/docs/operate/get-started/supported-hardware/_index.md index 77822b97c1..310b0cc821 100644 --- a/docs/operate/get-started/supported-hardware/_index.md +++ b/docs/operate/get-started/supported-hardware/_index.md @@ -219,8 +219,8 @@ Modules in the list above are officially supported and maintained by Viam only i If you have other hardware you need to integrate with a custom module, continue to [Integrate other hardware](/operate/get-started/other-hardware/). If you have configured all your hardware, you can do a variety of things with your machine: -- [Capture data from your machines](/data-ai/capture-data/capture-sync/) -- [Create a dataset](/data-ai/ai/create-dataset/) and [train an AI model](/data-ai/ai/train-tflite/) +- [Capture data from your machines](/data-ai/data/edge/capture-sync/) +- [Create a dataset](/data-ai/ai/train/create-dataset/) and [train an AI model](/data-ai/ai/train/train-tflite/) - [Write an app](/operate/control/web-app/) to interact with your machines using any of the Viam SDKs - [Deploy control logic to run directly on your machines](/manage/software/control-logic/) - [Share the configuration across multiple machines](/manage/fleet/reuse-configuration/) diff --git a/docs/operate/mobility/use-input-to-act.md b/docs/operate/mobility/use-input-to-act.md index fefa540e6f..45832f0566 100644 --- a/docs/operate/mobility/use-input-to-act.md +++ b/docs/operate/mobility/use-input-to-act.md @@ -51,9 +51,9 @@ readings = await my_sensor.get_readings() Other common inputs include the methods of a [board](/dev/reference/apis/components/board/) (`GetGPIO`, `GetPWM`, `PWMFrequency`, `GetDigitalInterruptValue`, and `ReadAnalogReader`), or a [power sensor](/dev/reference/apis/components/power-sensor/) (`GetVoltage`, `GetCurrent`, `GetPower`, and `GetReadings`). You can also use camera input, for example to detect objects and pick them up with an arm. -See [Act based on inferences](/data-ai/ai/act/) for relevant examples. +See [Act based on inferences](/data-ai/ai/react/act/) for relevant examples. -If you want to send alerts based on computer vision or captured data, see [Alert on inferences](/data-ai/ai/alert/) or [Alert on data](/data-ai/data/advanced/alert-data/). +If you want to send alerts based on computer vision or captured data, see [Alert on inferences](/data-ai/ai/react/alert/) or [Alert on data](/data-ai/data/react/alert-data/). {{% /tablestep %}} {{% tablestep number=3 %}} diff --git a/docs/operate/reference/advanced-modules/_index.md b/docs/operate/reference/advanced-modules/_index.md index 237f4ce3b1..342692eb87 100644 --- a/docs/operate/reference/advanced-modules/_index.md +++ b/docs/operate/reference/advanced-modules/_index.md @@ -52,6 +52,6 @@ If you need to package and deploy a module using Docker, for example if your mod ## Design a custom ML model -When working with the [ML model service](/dev/reference/apis/services/ml/), you can deploy an [existing model](/data-ai/ai/deploy/) or [train your own model](/data-ai/ai/train/). +When working with the [ML model service](/dev/reference/apis/services/ml/), you can deploy an [existing model](/data-ai/ai/train/deploy/) or [train your own model](/data-ai/ai/train/train/). However, if you are writing your own {{< glossary_tooltip term_id="module" text="module" >}} that uses the ML model service together with the [vision service](/dev/reference/apis/services/vision/), you can also [design your own ML model](/data-ai/reference/mlmodel-design/) to better match your specific use case. diff --git a/docs/operate/reference/architecture/_index.md b/docs/operate/reference/architecture/_index.md index 8eff944670..2352ea7773 100644 --- a/docs/operate/reference/architecture/_index.md +++ b/docs/operate/reference/architecture/_index.md @@ -132,7 +132,7 @@ Data is captured and synced to the Viam Cloud as follows: If a device has intermittent internet connectivity, data is stored locally until the machine can reconnect to the cloud. -For more information, see [Data management service](/data-ai/capture-data/capture-sync/). +For more information, see [Data management service](/data-ai/data/edge/capture-sync/). ## Basic machine example diff --git a/docs/operate/reference/components/camera/_index.md b/docs/operate/reference/components/camera/_index.md index 27412314d5..9aed1dd74a 100644 --- a/docs/operate/reference/components/camera/_index.md +++ b/docs/operate/reference/components/camera/_index.md @@ -90,13 +90,13 @@ Provide at least the width and height values to start. For general configuration, development, and usage info, see: {{< cards >}} -{{% card link="/data-ai/capture-data/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} {{% card link="/operate/control/web-app/" noimage="true" %}} {{< /cards >}} You can also use the camera component with the following services: -- [Data management service](/data-ai/capture-data/capture-sync/): To capture and sync the camera's data +- [Data management service](/data-ai/data/edge/capture-sync/): To capture and sync the camera's data - [Vision service](/operate/reference/services/vision/): To use computer vision to interpret the camera stream - [SLAM service](/operate/reference/services/slam/): for mapping diff --git a/docs/operate/reference/components/camera/calibrate.md b/docs/operate/reference/components/camera/calibrate.md index 0eed6982bf..39753e4890 100644 --- a/docs/operate/reference/components/camera/calibrate.md +++ b/docs/operate/reference/components/camera/calibrate.md @@ -114,5 +114,5 @@ For more configuration and usage info, see: {{< cards >}} {{% card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="true" %}} -{{% card link="/data-ai/capture-data/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/camera/esp32-camera.md b/docs/operate/reference/components/camera/esp32-camera.md index 6e4ba36c25..b7d21d8c72 100644 --- a/docs/operate/reference/components/camera/esp32-camera.md +++ b/docs/operate/reference/components/camera/esp32-camera.md @@ -40,7 +40,7 @@ Finish building and flashing custom firmware, then return to this guide. {{< alert title="Data management not supported" color="caution" >}} -The `esp32-camera` camera model does not currently support the [data management service](/data-ai/capture-data/capture-sync/). +The `esp32-camera` camera model does not currently support the [data management service](/data-ai/data/edge/capture-sync/). {{< /alert >}} @@ -201,5 +201,5 @@ For more configuration and usage info, see: {{< cards >}} {{% card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="true" %}} -{{% card link="/data-ai/capture-data/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/camera/fake-micro-server.md b/docs/operate/reference/components/camera/fake-micro-server.md index de4f755d81..deeb0d196a 100644 --- a/docs/operate/reference/components/camera/fake-micro-server.md +++ b/docs/operate/reference/components/camera/fake-micro-server.md @@ -66,5 +66,5 @@ For more configuration and usage info, see: {{< cards >}} {{% card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="true" %}} -{{% card link="/data-ai/capture-data/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/camera/fake.md b/docs/operate/reference/components/camera/fake.md index 61ab223362..70a20da105 100644 --- a/docs/operate/reference/components/camera/fake.md +++ b/docs/operate/reference/components/camera/fake.md @@ -77,5 +77,5 @@ For more configuration and usage info, see: {{< cards >}} {{% card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="true" %}} -{{% card link="/data-ai/capture-data/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/camera/ffmpeg.md b/docs/operate/reference/components/camera/ffmpeg.md index 74dfdf0bf4..6412774fd9 100644 --- a/docs/operate/reference/components/camera/ffmpeg.md +++ b/docs/operate/reference/components/camera/ffmpeg.md @@ -103,5 +103,5 @@ For more configuration and usage info, see: {{< cards >}} {{% card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="true" %}} -{{% card link="/data-ai/capture-data/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/camera/image-file.md b/docs/operate/reference/components/camera/image-file.md index 7bb88b2522..9685535d74 100644 --- a/docs/operate/reference/components/camera/image-file.md +++ b/docs/operate/reference/components/camera/image-file.md @@ -92,5 +92,5 @@ For more configuration and usage info, see: {{< cards >}} {{% card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="true" %}} -{{% card link="/data-ai/capture-data/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/camera/transform.md b/docs/operate/reference/components/camera/transform.md index 86cc2e0a53..2d01d27ae3 100644 --- a/docs/operate/reference/components/camera/transform.md +++ b/docs/operate/reference/components/camera/transform.md @@ -317,5 +317,5 @@ For more configuration and usage info, see: {{< cards >}} {{% card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="true" %}} -{{% card link="/data-ai/capture-data/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/camera/webcam.md b/docs/operate/reference/components/camera/webcam.md index f350419522..5510621764 100644 --- a/docs/operate/reference/components/camera/webcam.md +++ b/docs/operate/reference/components/camera/webcam.md @@ -266,7 +266,7 @@ If you are capturing camera data, it can happen that the camera captures and syn If you are using a CSI camera v1.3 or v2.0, or v3.0, use the [`viam:camera:csi` module](https://github.com/viamrobotics/csi-camera/) instead. For Raspberry Pi AI cameras like the IMX500 AI camera, use a module such as [this `viam-pi-ai-camera` vision service](https://github.com/HipsterBrown/viam-pi-ai-camera). -For more information about the vision service, see [run inference](https://docs.viam.com/data-ai/ai/run-inference/). +For more information about the vision service, see [run inference](https://docs.viam.com/data-ai/ai/infer/run-inference/). {{% /expand%}} {{% expand "High CPU usage" %}} @@ -281,5 +281,5 @@ For more configuration and usage info, see: {{< cards >}} {{% card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="true" %}} -{{% card link="/data-ai/capture-data/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/encoder/_index.md b/docs/operate/reference/components/encoder/_index.md index b358f0a0e5..c18fa4fec6 100644 --- a/docs/operate/reference/components/encoder/_index.md +++ b/docs/operate/reference/components/encoder/_index.md @@ -94,6 +94,6 @@ For general configuration, development, and usage info, see: You can also use the encoder component with the following services: -- [Data management service](/data-ai/capture-data/capture-sync/): To capture and sync the encoder's data +- [Data management service](/data-ai/data/edge/capture-sync/): To capture and sync the encoder's data - [Motion service](/operate/reference/services/motion/): To move machines or components of machines - [Navigation service](/operate/reference/services/navigation/): To navigate with GPS diff --git a/docs/operate/reference/components/movement-sensor/_index.md b/docs/operate/reference/components/movement-sensor/_index.md index 8c2d7d79be..8799e968f4 100644 --- a/docs/operate/reference/components/movement-sensor/_index.md +++ b/docs/operate/reference/components/movement-sensor/_index.md @@ -84,12 +84,12 @@ For general configuration and development info, see: {{< cards >}} {{% card link="/operate/get-started/supported-hardware/" noimage="true" %}} {{% card link="/operate/control/web-app/" noimage="true" %}} -{{% card link="/data-ai/capture-data/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} {{< /cards >}} To capture data from the movement sensor or use it for motion, see the following services: -- [data management service](/data-ai/capture-data/capture-sync/): to capture and sync the movement sensor's data +- [data management service](/data-ai/data/edge/capture-sync/): to capture and sync the movement sensor's data - [motion service](/operate/reference/services/motion/): to move machines or components of machines - [navigation service](/operate/reference/services/navigation/): to navigate with GPS - [SLAM service](/operate/reference/services/slam/): for mapping diff --git a/docs/operate/reference/components/power-sensor/_index.md b/docs/operate/reference/components/power-sensor/_index.md index 843420bb7a..8c6ca77e80 100644 --- a/docs/operate/reference/components/power-sensor/_index.md +++ b/docs/operate/reference/components/power-sensor/_index.md @@ -72,7 +72,7 @@ For general configuration and development info, see: {{< cards >}} {{% card link="/operate/get-started/supported-hardware/" noimage="true" %}} {{% card link="/operate/control/web-app/" noimage="true" %}} -{{% card link="/data-ai/capture-data/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} {{< /cards >}} -To capture data from the power sensor and sync it in the cloud, see the [data management service](/data-ai/capture-data/capture-sync/). +To capture data from the power sensor and sync it in the cloud, see the [data management service](/data-ai/data/edge/capture-sync/). diff --git a/docs/operate/reference/components/sensor/_index.md b/docs/operate/reference/components/sensor/_index.md index 7d9a50d877..09ae4811ee 100644 --- a/docs/operate/reference/components/sensor/_index.md +++ b/docs/operate/reference/components/sensor/_index.md @@ -90,7 +90,7 @@ For general configuration and development info, see: {{< cards >}} {{% card link="/operate/get-started/supported-hardware/" noimage="true" %}} {{% card link="/operate/control/web-app/" noimage="true" %}} -{{% card link="/data-ai/capture-data/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} {{< /cards >}} -To capture data from the sensor, see the [data management service](/data-ai/capture-data/capture-sync/). +To capture data from the sensor, see the [data management service](/data-ai/data/edge/capture-sync/). diff --git a/docs/operate/reference/components/sensor/fake.md b/docs/operate/reference/components/sensor/fake.md index 7a25f6016d..f06bf9568f 100644 --- a/docs/operate/reference/components/sensor/fake.md +++ b/docs/operate/reference/components/sensor/fake.md @@ -62,6 +62,6 @@ Check out the [sensor API](/dev/reference/apis/components/sensor/) or check out {{< cards >}} {{% card link="/dev/reference/apis/components/sensor/" customTitle="Sensor API" noimage="true" %}} -{{% card link="/data-ai/capture-data/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} {{% card link="/operate/get-started/supported-hardware/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/sensor/ultrasonic-micro-rdk.md b/docs/operate/reference/components/sensor/ultrasonic-micro-rdk.md index 4e060d9e33..f57d08d2c7 100644 --- a/docs/operate/reference/components/sensor/ultrasonic-micro-rdk.md +++ b/docs/operate/reference/components/sensor/ultrasonic-micro-rdk.md @@ -120,6 +120,6 @@ Check out the [sensor API](/dev/reference/apis/components/sensor/) or check out {{< cards >}} {{% card link="/dev/reference/apis/components/sensor/" customTitle="Sensor API" noimage="true" %}} -{{% card link="/data-ai/capture-data/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} {{% card link="/operate/get-started/supported-hardware/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/services/slam/cloudslam/_index.md b/docs/operate/reference/services/slam/cloudslam/_index.md index 98940ed8c5..3480835704 100644 --- a/docs/operate/reference/services/slam/cloudslam/_index.md +++ b/docs/operate/reference/services/slam/cloudslam/_index.md @@ -19,7 +19,7 @@ SLAM Algorithms can have varying levels of resource requirements in order to run In order to better support running SLAM on resource limited machines, Viam provides a service to run SLAM algorithms for machines in the cloud as well as management of the maps generated in their location. CloudSLAM can be used with both a live machine or with previously captured data in your location. -In [live mode](#mapping-with-a-live-machine-online-mode) using the [data management service](/data-ai/capture-data/capture-sync/) and the [cloudslam-wrapper](https://github.com/viam-modules/cloudslam-wrapper) module, Viam takes your LiDAR camera and movement sensor data from your local machine and sends it to the cloudslam server. +In [live mode](#mapping-with-a-live-machine-online-mode) using the [data management service](/data-ai/data/edge/capture-sync/) and the [cloudslam-wrapper](https://github.com/viam-modules/cloudslam-wrapper) module, Viam takes your LiDAR camera and movement sensor data from your local machine and sends it to the cloudslam server. The CloudSLAM server will then process that data and produce a map that can then be used on any machine in your location. When using an [offline machine](#using-previously-captured-data-offline-mode), you can select data from specific sensors over a period of time to build a map with. @@ -76,8 +76,8 @@ To use CloudSLAM on a live machine, you must meet the following requirements: To use CloudSLAM you must enable data capture and configure your `cloudslam-wrapper` SLAM service: {{< alert title="Tip: Managing Data Capture" color="tip" >}} -Note that when the [data management service](/data-ai/capture-data/capture-sync/) is enabled, it continuously monitors and syncs your machine’s sensor data while the machine is running. -To avoid incurring charges while not in use, [turn off data capture for your sensors](/data-ai/capture-data/capture-sync/#stop-data-capture-or-data-sync) once you have finished your SLAM session. +Note that when the [data management service](/data-ai/data/edge/capture-sync/) is enabled, it continuously monitors and syncs your machine’s sensor data while the machine is running. +To avoid incurring charges while not in use, [turn off data capture for your sensors](/data-ai/data/edge/capture-sync/#stop-data-capture-or-data-sync) once you have finished your SLAM session. {{< /alert >}} {{< tabs name="Create new map">}} @@ -92,7 +92,7 @@ To avoid incurring charges while not in use, [turn off data capture for your sen On the panel that appears, you can manage the capturing and syncing functions. You can also specify the **directory**, the sync **interval**, and any **tags** to apply to captured data. - See the [data management service](/data-ai/capture-data/capture-sync/) for more information. + See the [data management service](/data-ai/data/edge/capture-sync/) for more information. 2. Enable data capture for your camera, and for your movement sensor if you would like to use IMU data, odometry data, or both: @@ -183,7 +183,7 @@ You _do not_ need to configure data capture on the individual IMU and odometer. This example JSON configuration: - adds the `viam:rplidar`, `viam:cartographer`, and `viam:cloudslam-wrapper` modules -- configures the `viam:slam:cartographer`, `viam:cloudslam-wrapper:cloudslam`, and the [data management](/data-ai/capture-data/capture-sync/) services +- configures the `viam:slam:cartographer`, `viam:cloudslam-wrapper:cloudslam`, and the [data management](/data-ai/data/edge/capture-sync/) services - adds a `viam:lidar:rplidar` camera with data capture configured ```json {class="line-numbers linkable-line-numbers"} diff --git a/docs/operate/reference/services/vision/_index.md b/docs/operate/reference/services/vision/_index.md index 0d47151f02..14112ebf42 100644 --- a/docs/operate/reference/services/vision/_index.md +++ b/docs/operate/reference/services/vision/_index.md @@ -4,5 +4,5 @@ linkTitle: "Vision Service" weight: 20 type: "docs" layout: "empty" -canonical: "/data-ai/ai/run-inference/" +canonical: "/data-ai/ai/infer/run-inference/" --- diff --git a/docs/operate/reference/services/vision/mlmodel.md b/docs/operate/reference/services/vision/mlmodel.md index 281532f5e3..e07dc66e0d 100644 --- a/docs/operate/reference/services/vision/mlmodel.md +++ b/docs/operate/reference/services/vision/mlmodel.md @@ -29,14 +29,14 @@ Before configuring your `mlmodel` detector or classifier, you need to:

1. Train or upload an ML model

-You can add an [existing model](/data-ai/ai/deploy/#deploy-your-ml-model-on-an-ml-model-service) or [train a TFlite](/data-ai/ai/train-tflite/) or [another model](/data-ai/ai/train/) for object detection and classification using your data in the [Viam Cloud](/data-ai/capture-data/capture-sync/). +You can add an [existing model](/data-ai/ai/train/deploy/#deploy-your-ml-model-on-an-ml-model-service) or [train a TFlite](/data-ai/ai/train/train-tflite/) or [another model](/data-ai/ai/train/train/) for object detection and classification using your data in the [Viam Cloud](/data-ai/data/edge/capture-sync/). {{% /manualcard %}} {{% manualcard %}}

2. Deploy your ML model

-To use ML models with your machine, use a suitable [ML model service](/data-ai/ai/deploy/) to deploy and run the model. +To use ML models with your machine, use a suitable [ML model service](/data-ai/ai/train/deploy/) to deploy and run the model. {{% /manualcard %}} {{< /cards >}} @@ -125,7 +125,7 @@ The following attributes are available for an `mlmodel` detector or classifier: | Parameter | Type | Required? | Description | | --------- | ---- | --------- | ----------- | -| `mlmodel_name` | string | **Required** | The name of the [ML model service](/data-ai/ai/deploy/) you want to use the model from. | +| `mlmodel_name` | string | **Required** | The name of the [ML model service](/data-ai/ai/train/deploy/) you want to use the model from. | | `remap_output_names` | object | Optional | The names of your output tensors, mapped to the service requirements. See [Tensor names](#tensor-names) for more information. | | `remap_input_names` | object | Optional | The name of your input tensor, mapped to the service requirements. See [Tensor names](#tensor-names) for more information. | | `input_image_bgr` | bool | Optional | Set this to `true` if the ML model service expects the input image to have BGR pixels, rather than RGB pixels.
Default: `false` | @@ -146,7 +146,7 @@ Both the `mlmodel` detector and classifier require that the input and output ten - The _input tensor_ must be named `image` - The _output tensor_ must be named `probability` -If you [trained a TFlite ML model using the Viam app](/data-ai/ai/train-tflite/), your `mlmodel` tensors are already named in this fashion, and you can proceed to [test your detector or classifier](#test-your-detector-or-classifier). +If you [trained a TFlite ML model using the Viam app](/data-ai/ai/train/train-tflite/), your `mlmodel` tensors are already named in this fashion, and you can proceed to [test your detector or classifier](#test-your-detector-or-classifier). However, if you uploaded your own ML model, or are using one from the [Viam Registry](https://app.viam.com/registry), you may need to remap your tensor names to meet this requirement, and should follow the instructions to [remap tensor names](#remap-tensor-names). #### Remap tensor names @@ -219,7 +219,7 @@ The feature is only available for classifiers that were uploaded after September {{}} -If you have images stored in the [Viam Cloud](/data-ai/capture-data/capture-sync/), you can run your classifier against your images in the [Viam app](https://app.viam.com/). +If you have images stored in the [Viam Cloud](/data-ai/data/edge/capture-sync/), you can run your classifier against your images in the [Viam app](https://app.viam.com/). 1. Navigate to the [Data tab](https://app.viam.com/data/view) and click on the **Images** subtab. 2. Click on an image to open the side menu, and select the **Actions** tab under the **Data** tab. diff --git a/docs/tutorials/configure/pet-photographer.md b/docs/tutorials/configure/pet-photographer.md index ab3a6c5859..971e81ab9a 100644 --- a/docs/tutorials/configure/pet-photographer.md +++ b/docs/tutorials/configure/pet-photographer.md @@ -24,7 +24,7 @@ After following this tutorial, you will understand how to control sync parameter Note: Consider this tutorial alongside filtered camera tutorial. --> -If your machine [captures](/data-ai/capture-data/capture-sync/) a lot of data, you might want to filter captured data to selectively store only the data you are interested in. +If your machine [captures](/data-ai/data/edge/capture-sync/) a lot of data, you might want to filter captured data to selectively store only the data you are interested in. For example, you might want to use your smart machine's camera to capture images based on specific criteria, such as the presence of a certain color, and omit captured images that don't meet that criteria. In this tutorial, you will use a custom {{< glossary_tooltip term_id="module" text="module" >}} to function as a color filter, and use it with a [camera](/operate/reference/components/camera/) to only capture images where your pet is in the frame in the following way: @@ -161,7 +161,7 @@ For more information, refer to [Write your new resource model definition](/opera The filter function in your custom filter module must contain two critical elements: -1. A utility function that will check if the caller of the filter function is the [data management service](/data-ai/capture-data/capture-sync/). +1. A utility function that will check if the caller of the filter function is the [data management service](/data-ai/data/edge/capture-sync/). 1. A safeguard that ensures if the data management service is not the caller, an error and the unfiltered data is returned. {{< alert title="Important" color="note" >}} @@ -827,12 +827,12 @@ Whether you've downloaded the `colorfilter` module, or written your own color fi Next, add the following services to your smart machine to support the color filter module: -- The [data management service](/data-ai/capture-data/capture-sync/) enables your smart machine to capture data and sync it to the cloud. +- The [data management service](/data-ai/data/edge/capture-sync/) enables your smart machine to capture data and sync it to the cloud. - The [vision service](/dev/reference/apis/services/vision/#detections) enables your smart machine to perform color detection on objects in a camera stream. ### Add the data management service -To enable data capture on your machine, add and configure the [data management service](/data-ai/capture-data/capture-sync/) to capture and store data on your machine's computer: +To enable data capture on your machine, add and configure the [data management service](/data-ai/data/edge/capture-sync/) to capture and store data on your machine's computer: {{< tabs >}} {{% tab name="Config Builder" %}} @@ -849,7 +849,7 @@ To enable data capture on your machine, add and configure the [data management s ![An instance of the data management service named "dm". The cloud sync and capturing options are toggled on and the directory is empty. The interval is set to 0.1](/tutorials/pet-photographer/data-management-services.png) - For more detailed information, see [Add the data management service](/data-ai/capture-data/capture-sync/). + For more detailed information, see [Add the data management service](/data-ai/data/edge/capture-sync/). {{% /tab %}} {{% tab name="JSON Template" %}} Add the data management service to the services array in your rover’s raw JSON configuration: @@ -1005,5 +1005,5 @@ Try these other tutorials for more on working with the data management and visio {{% card link="/tutorials/projects/pet-treat-dispenser/" %}} {{% card link="/tutorials/projects/guardian/" %}} {{% card link="/tutorials/projects/send-security-photo/" %}} -{{% card link="/data-ai/ai/deploy/" %}} +{{% card link="/data-ai/ai/train/deploy/" %}} {{< /cards >}} diff --git a/docs/tutorials/control/air-quality-fleet.md b/docs/tutorials/control/air-quality-fleet.md index a0d054f089..3be4751702 100644 --- a/docs/tutorials/control/air-quality-fleet.md +++ b/docs/tutorials/control/air-quality-fleet.md @@ -235,7 +235,7 @@ Once you understand how to configure machines and use fragments, you can use [Pr #### Configure data capture and sync You have configured the sensor so the board can communicate with it, but sensor data is not yet being saved anywhere. -Viam's [data management service](/data-ai/capture-data/capture-sync/) lets you capture data locally from each sensor and then sync it to the cloud where you can access historical sensor data and see trends over time. +Viam's [data management service](/data-ai/data/edge/capture-sync/) lets you capture data locally from each sensor and then sync it to the cloud where you can access historical sensor data and see trends over time. Once you configure the rest of your sensing machines, you'll be able to remotely access data from all sensors in all locations, and when you're ready, you can give customers [access](/manage/manage/access/) to the data from the sensors in their locations. Configure data capture and sync as follows: diff --git a/docs/tutorials/projects/claw-game.md b/docs/tutorials/projects/claw-game.md index 316ae859ea..31a65841a6 100644 --- a/docs/tutorials/projects/claw-game.md +++ b/docs/tutorials/projects/claw-game.md @@ -932,7 +932,7 @@ In this tutorial, you learned how to: For some next steps, you could: - Use the advanced interface included in the project repository to leverage the [motion service](/operate/reference/services/motion/) for larger, more complex arm movement within the enclosure. -- Add a camera and use the [vision service](/operate/reference/services/vision/) to add color detection, or use an [ML model](/data-ai/ai/deploy/) to determine grab success rate and create a score counter. +- Add a camera and use the [vision service](/operate/reference/services/vision/) to add color detection, or use an [ML model](/data-ai/ai/train/deploy/) to determine grab success rate and create a score counter. - Design a hard mode where the prizes are shuffled around with the arm every few attempts. - Add a camera and extend the interface to allow folks from anywhere in the world to play the claw game and win. diff --git a/docs/tutorials/projects/helmet.md b/docs/tutorials/projects/helmet.md index 7a62a46384..a7ea8626ed 100644 --- a/docs/tutorials/projects/helmet.md +++ b/docs/tutorials/projects/helmet.md @@ -197,7 +197,7 @@ Now that the detector is configured, it's time to test it! ## Configure data capture and sync -Viam's built-in [data management service](/data-ai/capture-data/capture-sync/) allows you to, among other things, capture images and sync them to the cloud. +Viam's built-in [data management service](/data-ai/data/edge/capture-sync/) allows you to, among other things, capture images and sync them to the cloud. For this project, you will capture images of people without hard hats so that you can see who wasn't wearing one, and so that you can trigger notifications when these images are captured and synced. Configure data capture on the `objectfilter` camera to capture images of people without hard hats: @@ -251,7 +251,7 @@ Now that you have verified that the detector and data sync are working, modify y ## Set up email notifications -[Triggers](/data-ai/ai/alert/) allow you to send webhook requests or email notifications when certain events happen. +[Triggers](/data-ai/ai/react/alert/) allow you to send webhook requests or email notifications when certain events happen. For example, you can set up a trigger to perform an action whenever an image of someone without a hard hat is uploaded to the cloud. @@ -460,10 +460,10 @@ Here are some ways you could expand on this project: - Change your cloud function to send a different kind of notification, or trigger some other action. For an example demonstrating how to configure text notifications, see the [Detect a Person and Send a Photo tutorial](/tutorials/projects/send-security-photo/). -- Use a different existing model or [train your own](/data-ai/ai/train-tflite/), to detect and send notifications about something else such as [forklifts](https://huggingface.co/keremberke/yolov8m-forklift-detection) appearing in your camera stream. +- Use a different existing model or [train your own](/data-ai/ai/train/train-tflite/), to detect and send notifications about something else such as [forklifts](https://huggingface.co/keremberke/yolov8m-forklift-detection) appearing in your camera stream. {{< cards >}} {{% card link="/tutorials/projects/send-security-photo/" %}} -{{% card link="/data-ai/ai/train-tflite/" %}} +{{% card link="/data-ai/ai/train/train-tflite/" %}} {{% card link="/tutorials/services/navigate-with-rover-base/" %}} {{< /cards >}} diff --git a/docs/tutorials/projects/integrating-viam-with-openai.md b/docs/tutorials/projects/integrating-viam-with-openai.md index cc3a26b379..7148288ec9 100644 --- a/docs/tutorials/projects/integrating-viam-with-openai.md +++ b/docs/tutorials/projects/integrating-viam-with-openai.md @@ -242,7 +242,7 @@ We found that if set up this way, the following positions accurately show the co ### 2. Configure the ML Model and vision services to use the detector -The [ML model service](/data-ai/ai/deploy/) allows you to deploy a machine learning model to your robot. +The [ML model service](/data-ai/ai/train/deploy/) allows you to deploy a machine learning model to your robot. This tutorial uses a pre-trained machine learning (ML) model from the Viam Registry named [`EfficientDet-COCO`](https://app.viam.com/ml-model/viam-labs/EfficientDet-COCO). This model can detect a variety of objects, which you can find in the provided [labels.txt](https://github.com/viam-labs/devrel-demos/raw/main/Light%20up%20bot/labels.txt) file. @@ -422,6 +422,6 @@ Some ideas: - Make the voice recognition software listen in the background, so the robot can move and interact with the world while listening and responding. - Integrate another ML model that is used to follow a human (when told to do so). - Add Lidar and integrate Viam's {{< glossary_tooltip term_id="slam" text="SLAM service" >}} to map the world around it. -- Use Viam's [Data Management](/data-ai/capture-data/capture-sync/) to collect environmental data and use this data to train new ML models that allow the robot to improve its functionality. +- Use Viam's [Data Management](/data-ai/data/edge/capture-sync/) to collect environmental data and use this data to train new ML models that allow the robot to improve its functionality. We'd love to see where you decide to take this. If you build your own companion robot, let us and others know on the [Community Discord](https://discord.gg/viam). diff --git a/docs/tutorials/projects/send-security-photo.md b/docs/tutorials/projects/send-security-photo.md index 5a3974091a..eb6f9d6e77 100644 --- a/docs/tutorials/projects/send-security-photo.md +++ b/docs/tutorials/projects/send-security-photo.md @@ -24,7 +24,7 @@ Maybe someone is eating your chocolates when you are away. You're not sure who, but you suspect Steve. This robot will help you catch the culprit. -When someone comes to your desk, the robot will use the [vision service](/operate/reference/services/vision/) and the [ML model service](/data-ai/ai/deploy/) to detect a person, take their photo, and text you an alert with a photo of the person. +When someone comes to your desk, the robot will use the [vision service](/operate/reference/services/vision/) and the [ML model service](/data-ai/ai/train/deploy/) to detect a person, take their photo, and text you an alert with a photo of the person. ![Text message reading "Alert There is someone at your desk beware" with a photo of a person (Steve) detected by the camera as he approaches the desk.](/tutorials/send-security-photo/text-message.png) @@ -88,7 +88,7 @@ This tutorial uses a pre-trained Machine Learning model from the Viam Registry c The model can detect a variety of things, including `Persons`. You can see a full list of what the model can detect in [labels.txt](https://github.com/viam-labs/devrel-demos/raw/main/Light%20up%20bot/labels.txt) file. -If you want to train your own model instead, follow the instructions to [train a TFlite](/data-ai/ai/train-tflite/) or [another model](/data-ai/ai/train/). +If you want to train your own model instead, follow the instructions to [train a TFlite](/data-ai/ai/train/train-tflite/) or [another model](/data-ai/ai/train/train/). 1. **Configure the ML model service** diff --git a/docs/tutorials/projects/verification-system.md b/docs/tutorials/projects/verification-system.md index 32920cc1ac..4609f3fd6f 100644 --- a/docs/tutorials/projects/verification-system.md +++ b/docs/tutorials/projects/verification-system.md @@ -90,7 +90,7 @@ In order for your machine's camera to detect the presence of a person in its fie ### Use an existing ML model -The [ML model service](/data-ai/ai/deploy/) allows you to deploy a machine learning model to your robot. +The [ML model service](/data-ai/ai/train/deploy/) allows you to deploy a machine learning model to your robot. For your machine to be able to detect people, you will use a Machine Learning model from the Viam Registry called [`EfficientDet-COCO`](https://app.viam.com/ml-model/viam-labs/EfficientDet-COCO). The model can detect a variety of things which you can see in [labels.txt](https://github.com/viam-labs/devrel-demos/raw/main/Light%20up%20bot/labels.txt) file including `person`s. @@ -172,9 +172,9 @@ Then, train a new model using that model: {{}} 1. When you have created bounding boxes for all `person` objects in the image, click the right arrow key to navigate to the next image. Repeat the process for each image in your dataset, drawing bounding boxes for every person in every image. -1. [Train a TFlite model on your dataset](/data-ai/ai/train-tflite/). +1. [Train a TFlite model on your dataset](/data-ai/ai/train/train-tflite/). Give it the name `"persondetect"`, and select **Object Detection** as the **Model Type**. -1. [Deploy the model](/data-ai/ai/deploy/) to your machine so it can be used by other services, such as the vision service. +1. [Deploy the model](/data-ai/ai/train/deploy/) to your machine so it can be used by other services, such as the vision service. Finally, configure an `mlmodel` detector to use your new `"persondetect"` ML model: diff --git a/docs/tutorials/services/visualize-data-grafana.md b/docs/tutorials/services/visualize-data-grafana.md index 254efdaf30..462e73e32e 100644 --- a/docs/tutorials/services/visualize-data-grafana.md +++ b/docs/tutorials/services/visualize-data-grafana.md @@ -19,7 +19,7 @@ no_list: true -Once you have [configured data query](/data-ai/data/query/#query-data-using-third-party-tools) for your organization's data store, you can visualize your data from a variety of third-party tools, including Grafana. +Once you have [configured data query](/data-ai/data/cloud/query/#query-data-using-third-party-tools) for your organization's data store, you can visualize your data from a variety of third-party tools, including Grafana. You can choose to visualize data from a component on one machine, from multiple components together, or from many components across a fleet of machines, all from a single pane of glass. @@ -31,7 +31,7 @@ Follow the steps in this tutorial to learn how to collect data from your machine {{% alert title="Info" color="info" %}} This tutorial focuses on using Grafana to visualize your captured data. -For general guidance appropriate for any third-party visualization tool, see [Visualize data](/data-ai/data/visualize/). +For general guidance appropriate for any third-party visualization tool, see [Visualize data](/data-ai/data/cloud/visualize/). {{% /alert %}} {{}} @@ -73,7 +73,7 @@ First, add the data management service to your machine to be able capture and sy {{< imgproc src="/tutorials/data-management/data-management-conf.png" alt="The data management service configuration pane with default settings shown for both capturing and syncing" resize="900x" >}} -For more information, see [data management service configuration](/data-ai/capture-data/capture-sync/). +For more information, see [data management service configuration](/data-ai/data/edge/capture-sync/). ### Configure data capture for a component @@ -98,17 +98,17 @@ To enable data capture for a sensor component: After a short while, your sensor will begin capturing live readings, and syncing those readings to the Viam app. You can check that data is being captured and synced by clicking on the menu icon on the sensor configuration pane. and selecting **View captured data**. -For more information see [data management service configuration](/data-ai/capture-data/capture-sync/). +For more information see [data management service configuration](/data-ai/data/edge/capture-sync/). ### Configure data query Next, enable the ability to query your synced data. When you sync captured data to Viam, that data is stored in the Viam organization’s [MongoDB Atlas Data Federation](https://www.mongodb.com/docs/atlas/data-federation/overview/) instance. -Configuring data query allows you to directly [query your data](/data-ai/data/query/#query-data-in-the-viam-app) using the Viam app or a compatible client (such as `mongosh`), but also allows Grafana to access that data and visualize it. +Configuring data query allows you to directly [query your data](/data-ai/data/cloud/query/#query-data-in-the-viam-app) using the Viam app or a compatible client (such as `mongosh`), but also allows Grafana to access that data and visualize it. To enable data query: -1. Follow the steps to [configure data query](/data-ai/data/query/#query-data-in-the-viam-app). +1. Follow the steps to [configure data query](/data-ai/data/cloud/query/#query-data-in-the-viam-app). 1. Note the username and hostname returned from these steps, in addition to the password you chose for that user. You will use this information in the next section. @@ -141,7 +141,7 @@ With your machine capturing data and syncing it to Viam, and direct query of tha ``` The connection string is specific to your organization ID and configured user. - You must have followed the steps under [configure data query](/data-ai/data/query/#configure-data-query) previously in order for this URL to be valid. + You must have followed the steps under [configure data query](/data-ai/data/cloud/query/#configure-data-query) previously in order for this URL to be valid. - **Credentials: User**: Enter the following username, substituting your organization ID as determined earlier, for ``: @@ -155,7 +155,7 @@ With your machine capturing data and syncing it to Viam, and direct query of tha db-user-abcdef12-abcd-abcd-abcd-abcdef123456 ``` - - **Credentials: Password**: Enter the password you provided when you [configured data query](/data-ai/data/query/#configure-data-query) previously. + - **Credentials: Password**: Enter the password you provided when you [configured data query](/data-ai/data/cloud/query/#configure-data-query) previously. {{}} @@ -222,8 +222,8 @@ See Grafana's [Global variables documentation](https://grafana.com/docs/grafana/ In this tutorial, you learned: -- how to use the [data management service](/data-ai/capture-data/capture-sync/) to capture data from your machine and sync it to the Viam app -- how to [enable data query access](/data-ai/data/query/#configure-data-query) to your synced data +- how to use the [data management service](/data-ai/data/edge/capture-sync/) to capture data from your machine and sync it to the Viam app +- how to [enable data query access](/data-ai/data/cloud/query/#configure-data-query) to your synced data - how to connect Grafana to your data - how to build a dashboard visualizing that data - how to use query language to dynamically update the visualization based on UI selections diff --git a/layouts/docs/tutorials.html b/layouts/docs/tutorials.html index e659b3c1df..82f46d4108 100644 --- a/layouts/docs/tutorials.html +++ b/layouts/docs/tutorials.html @@ -92,7 +92,7 @@

Javascript

{{ partial "tutorialcard-no-js.html" (dict "link" "/tutorials/services/constrain-motion/") }} {{ partial "tutorialcard-no-js.html" (dict "link" "/tutorials/services/color-detection-scuttle/") }} {{ partial "tutorialcard-no-js.html" (dict "link" "/tutorials/services/webcam-line-follower-robot/") }} - {{ partial "tutorialcard-no-js.html" (dict "link" "/data-ai/ai/train-tflite/") }} + {{ partial "tutorialcard-no-js.html" (dict "link" "/data-ai/ai/train/train-tflite/") }} From 3a2431c9967026e07efe88108bf7453ab9c7f323 Mon Sep 17 00:00:00 2001 From: nathan contino Date: Thu, 8 May 2025 10:00:33 -0400 Subject: [PATCH 2/7] Improve sidebar title narrative --- .github/workflows/update_sdk_methods.py | 1 - docs/data-ai/_index.md | 1 - docs/data-ai/ai/train/train-tflite.md | 4 +- docs/data-ai/ai/train/train.md | 2 +- docs/data-ai/ai/train/upload-external-data.md | 2 +- docs/data-ai/data/edge/capture-sync.md | 2 +- docs/data-ai/data/edge/store.md | 188 ++++++++++++++++++ docs/data-ai/get-started/capture-images.md | 2 +- .../get-started/create-training-dataset.md | 4 +- .../reference/advanced-data-capture-sync.md | 178 ----------------- 10 files changed, 196 insertions(+), 188 deletions(-) create mode 100644 docs/data-ai/data/edge/store.md diff --git a/.github/workflows/update_sdk_methods.py b/.github/workflows/update_sdk_methods.py index a05d0e03e9..686eb69afb 100755 --- a/.github/workflows/update_sdk_methods.py +++ b/.github/workflows/update_sdk_methods.py @@ -342,7 +342,6 @@ "frame": "/services/frame-system/", "Viam app": "https://app.viam.com/", "organization settings page": "/manage/reference/organize/", - "image tags": "/data-ai/ai/create-dataset/#label-your-images", "API key": "/fleet/cli/#authenticate", "board model": "/dev/reference/apis/components/board/" } diff --git a/docs/data-ai/_index.md b/docs/data-ai/_index.md index 351b75b381..64afa87354 100644 --- a/docs/data-ai/_index.md +++ b/docs/data-ai/_index.md @@ -48,7 +48,6 @@ You can also monitor your machines through teleop, power your application logic, {{< how-to-expand "Integrate AI" "8" "INTERMEDIATE" "" "data-platform-ai" >}} {{< cards >}} -{{% card link="/data-ai/ai/train/create-dataset/" noimage="true" %}} {{% card link="/data-ai/ai/train/train-tflite/" noimage="true" %}} {{% card link="/data-ai/ai/train/train/" noimage="true" %}} {{% card link="/data-ai/ai/train/deploy/" noimage="true" %}} diff --git a/docs/data-ai/ai/train/train-tflite.md b/docs/data-ai/ai/train/train-tflite.md index f29efb07e6..61383a57db 100644 --- a/docs/data-ai/ai/train/train-tflite.md +++ b/docs/data-ai/ai/train/train-tflite.md @@ -39,7 +39,7 @@ Follow this guide to use your image data to train an ML model, so that your mach {{% expand "a dataset with labels" %}} -Follow the guide to [create a dataset](/data-ai/ai/train/create-dataset/). +Follow the [Quickstart](/data-ai/get-started/quickstart/). {{% /expand%}} @@ -145,7 +145,7 @@ Using this approach, each subsequent model version becomes more accurate than th To capture images of edge cases and re-train your model using those images, complete the following steps: -1. Add edge case images to your training dataset. You can find edge cases in your existing data on the [**DATA** page](https://app.viam.com/data/) or [capture new images and add them to your training dataset](/data-ai/ai/train/create-dataset/#capture-images). +1. Add edge case images to your training dataset. You can find edge cases in your existing data on the [**DATA** page](https://app.viam.com/data/) or [capture new images and add them to your training dataset](/data-ai/get-started/capture-images). 1. Visit the **DATASET** tab of the **DATA** page and annotate the image. diff --git a/docs/data-ai/ai/train/train.md b/docs/data-ai/ai/train/train.md index 0072aa214f..3efbc1427d 100644 --- a/docs/data-ai/ai/train/train.md +++ b/docs/data-ai/ai/train/train.md @@ -27,7 +27,7 @@ If you wish to do this, skip to [Submit a training job](#submit-a-training-job). {{% expand "A dataset with data you can train an ML model on. Click to see instructions." %}} -For images, follow the instructions to [Create a dataset](/data-ai/ai/train/create-dataset/) to create a dataset and label data. +For images, follow the [Quickstart](/data-ai/get-started/quickstart/) to create a dataset and label data. For other data, use the [Data Client API](/dev/reference/apis/data-client/) from within the training script to store data in the Viam Cloud. diff --git a/docs/data-ai/ai/train/upload-external-data.md b/docs/data-ai/ai/train/upload-external-data.md index 39ac8a8e3e..fd332ad77c 100644 --- a/docs/data-ai/ai/train/upload-external-data.md +++ b/docs/data-ai/ai/train/upload-external-data.md @@ -297,4 +297,4 @@ However, the uploaded images will not be associated with a component or method. ## Next steps Now that you have a batch of data uploaded, you can [train an ML model](/data-ai/ai/train/train-tflite/) on it. -Or, if you want to collect and upload data _not_ in a batch, see [Create a dataset](/data-ai/ai/train/create-dataset/). +Or, if you want to collect and upload data _not_ in a batch, see [Create a training dataset](/data-ai/get-started/create-training-dataset/). diff --git a/docs/data-ai/data/edge/capture-sync.md b/docs/data-ai/data/edge/capture-sync.md index d1ea0b9c13..c2ed1ca4d2 100644 --- a/docs/data-ai/data/edge/capture-sync.md +++ b/docs/data-ai/data/edge/capture-sync.md @@ -129,4 +129,4 @@ For other ways to control data synchronization, see: ## Next steps For more information on available configuration attributes and options like capturing directly to MongoDB or conditional sync, see [Advanced data capture and sync configurations](/data-ai/reference/advanced-data-capture-sync/). -To leverage AI, you can now [create a dataset](/data-ai/ai/train/create-dataset/) with the data you've captured. +You can now use your data to [create a training dataset](/data-ai/get-started/create-training-dataset/). diff --git a/docs/data-ai/data/edge/store.md b/docs/data-ai/data/edge/store.md new file mode 100644 index 0000000000..c9100db227 --- /dev/null +++ b/docs/data-ai/data/edge/store.md @@ -0,0 +1,188 @@ +--- +linkTitle: "Store" +title: "Store data" +weight: 40 +layout: "docs" +type: "docs" +languages: [] +date: "2024-12-03" +description: "Different ways you can store data in the Viam cloud." +--- + +## Cache most recent data + +If you want faster access to your most recent sensor readings, you can configure hot data storage. +The hot data store keeps a rolling window of hot data for faster queries. +All historical data remains in your default storage. + +To configure the hot data store: + +1. Use the `recent_data_store` attribute on each capture method in your data manager service. +2. Configure your queries' data source to the hot data store by passing the `use_recent_data` boolean argument to [tabularDataByMQL](/dev/reference/apis/data-client/#tabulardatabymql). + +{{% expand "Click to view a sample configuration" %}} + +The following sample configuration captures data from a sensor at 0.5 Hz. +`viam-server` stores the last 24 hours of data in a shared recent-data database, while continuing to write all data to blob storage: + +```json {class="line-numbers linkable-line-numbers" data-line="17-19"} +{ + "components": [ + { + "name": "sensor-1", + "api": "rdk:component:sensor", + "model": "rdk:builtin:fake", + "attributes": {}, + "service_configs": [ + { + "type": "data_manager", + "attributes": { + "capture_methods": [ + { + "method": "Readings", + "capture_frequency_hz": 0.5, + "additional_params": {}, + "recent_data_store": { + "stored_hours": 24 + } + } + ] + } + } + ] + } + ] +} +``` + +{{% /expand%}} + +## Store in your own MongoDB cluster + +You can configure direct capture of tabular data to a MongoDB instance alongside disk storage on your edge device. +This can be useful for powering real-time dashboards before data is synced from the edge to the cloud. +The MongoDB instance can be a locally running instance or a cluster in the cloud. + +Configure using the `mongo_capture_config` attributes in your data manager service. +You can configure data sync to a MongoDB instance separately from data sync to the Viam Cloud. + +{{< expand "Click to view sample configuration with MongoDB data store." >}} + +This sample configuration captures fake sensor readings both to the configured MongoDB URI as well as to the `~/.viam/capture` directory on disk. +It does not sync the data to the Viam Cloud. + +```json +{ + "components": [ + { + "name": "sensor-1", + "api": "rdk:component:sensor", + "model": "rdk:builtin:fake", + "attributes": {}, + "service_configs": [ + { + "type": "data_manager", + "attributes": { + "capture_methods": [ + { + "method": "Readings", + "capture_frequency_hz": 0.5, + "additional_params": {} + } + ] + } + } + ] + } + ], + "services": [ + { + "name": "data_manager-1", + "api": "rdk:service:data_manager", + "attributes": { + "mongo_capture_config": { + "uri": "mongodb://127.0.0.1:27017/?directConnection=true&serverSelectionTimeoutMS=2000" + } + } + } + ] +} +``` + +{{< /expand >}} + +{{< expand "Click to view sample configuration with MongoDB data store and sync to the Viam Cloud." >}} + +This sample configuration captures fake sensor readings both to the configured MongoDB URI as well as to the `~/.viam/capture` directory on disk. +It syncs data to the Viam Cloud every 0.1 minutes. + +```json +{ + "components": [ + { + "name": "sensor-1", + "api": "rdk:component:sensor", + "model": "rdk:builtin:fake", + "attributes": {}, + "service_configs": [ + { + "type": "data_manager", + "attributes": { + "capture_methods": [ + { + "method": "Readings", + "capture_frequency_hz": 0.5, + "additional_params": {} + } + ] + } + } + ] + } + ], + "services": [ + { + "name": "data_manager-1", + "api": "rdk:service:data_manager", + "attributes": { + "mongo_capture_config": { + "uri": "mongodb://127.0.0.1:27017/?directConnection=true&serverSelectionTimeoutMS=2000" + }, + "additional_sync_paths": [], + "sync_interval_mins": 0.1, + "capture_dir": "", + "capture_disabled": false, + "sync_disabled": false, + "tags": [] + } + } + ] +} +``` + +{{< /expand >}} + +When `mongo_capture_config.uri` is configured, data capture will attempt to connect to the configured MongoDB server and write captured tabular data to the configured `mongo_capture_config.database` and `mongo_capture_config.collection` (or their defaults if unconfigured) after enqueuing that data to be written to disk. + +If writes to MongoDB fail for any reason, data capture will log an error for each failed write and continue capturing. + +Failing to write to MongoDB doesn't affect capturing and syncing data to cloud storage other than adding capture latency. + +{{< alert title="Caution" color="caution" >}} + +- Capturing directly to MongoDB may write data to MongoDB that later fails to be written to disk (and therefore never gets synced to cloud storage). +- Capturing directly to MongoDB does not retry failed writes to MongoDB. As a consequence, it is NOT guaranteed all data captured will be written to MongoDB. + This can happen in cases such as MongoDB being inaccessible to `viam-server` or writes timing out. +- Capturing directly to MongoDB may reduce the maximum frequency that data capture can capture data due to the added latency of writing to MongoDB. + If your use case needs to support very high capture rates, this feature may not be appropriate. + +{{< /alert >}} + +## Configure retention + +Configure how long your synced data remains stored in the cloud: + +- **Retain data up to a certain size (for example, 100GB) or for a specific length of time (for example, 14 days):** Set `retention_policies` at the resource level. + See the `retention_policy` field in [data capture configuration attributes](/data-ai/reference/advanced-data-capture-sync/#click-to-view-data-capture-attributes). +- **Delete data captured by a machine when you delete the machine:** Control whether your cloud data is deleted when a machine or machine part is removed. + See the `delete_data_on_part_deletion` field in the [data management service configuration attributes](/data-ai/reference/advanced-data-capture-sync/#click-to-view-data-management-attributes). diff --git a/docs/data-ai/get-started/capture-images.md b/docs/data-ai/get-started/capture-images.md index 16ff30c845..18bbea7a58 100644 --- a/docs/data-ai/get-started/capture-images.md +++ b/docs/data-ai/get-started/capture-images.md @@ -29,7 +29,7 @@ To view images added to your dataset, go to the **DATA** page's [**DATASETS** ta To capture a large number of images for training an ML model, [Capture and sync image data](/data-ai/data/edge/capture-sync/) using the data management service with your camera. Viam stores the images saved by capture and sync on the [**DATA** page](https://app.viam.com/data/), but does not add the images to a dataset. -We recommend you tag the images first and then use the CLI to [add the tagged images to a dataset](/data-ai/ai/train/create-dataset/#add-tagged-images-to-a-dataset). +We recommend you tag the images first and then use the CLI to [add the tagged images to a dataset](/data-ai/g//#add-tagged-images-to-a-dataset). {{< alert title="Tip" color="tip" >}} diff --git a/docs/data-ai/get-started/create-training-dataset.md b/docs/data-ai/get-started/create-training-dataset.md index d0b5147e99..0b8593bc46 100644 --- a/docs/data-ai/get-started/create-training-dataset.md +++ b/docs/data-ai/get-started/create-training-dataset.md @@ -4,7 +4,7 @@ title: "Create a training dataset" weight: 30 layout: "docs" type: "docs" -description: "Create a dataset to use for AI model training" +description: "Create a dataset from your captured data to use for AI model training" --- {{< tabs >}} @@ -23,7 +23,7 @@ description: "Create a dataset to use for AI model training" Use the Viam CLI to filter images by label and add the filtered images to a dataset: -1. First, [create a dataset](#create-a-dataset), if you haven't already. +1. First, create a dataset, if you haven't already. 1. If you just created a dataset, use the dataset ID output by the creation command. If your dataset already exists, run the following command to get a list of dataset names and corresponding IDs: diff --git a/docs/data-ai/reference/advanced-data-capture-sync.md b/docs/data-ai/reference/advanced-data-capture-sync.md index cd63b8a1c7..ecbda6c853 100644 --- a/docs/data-ai/reference/advanced-data-capture-sync.md +++ b/docs/data-ai/reference/advanced-data-capture-sync.md @@ -531,184 +531,6 @@ The following attributes are available for data capture configuration: You can edit the JSON directly by switching to **JSON** mode in the UI. -### Capture to the hot data store - -If you want faster access to your most recent sensor readings, you can configure hot data storage. -The hot data store keeps a rolling window of hot data for faster queries. -All historical data remains in your default storage. - -To configure the hot data store: - -1. Use the `recent_data_store` attribute on each capture method in your data manager service. -2. Configure your queries' data source to the hot data store by passing the `use_recent_data` boolean argument to [tabularDataByMQL](/dev/reference/apis/data-client/#tabulardatabymql). - -{{% expand "Click to view a sample configuration" %}} - -The following sample configuration captures data from a sensor at 0.5 Hz. -`viam-server` stores the last 24 hours of data in a shared recent-data database, while continuing to write all data to blob storage: - -```json {class="line-numbers linkable-line-numbers" data-line="17-19"} -{ - "components": [ - { - "name": "sensor-1", - "api": "rdk:component:sensor", - "model": "rdk:builtin:fake", - "attributes": {}, - "service_configs": [ - { - "type": "data_manager", - "attributes": { - "capture_methods": [ - { - "method": "Readings", - "capture_frequency_hz": 0.5, - "additional_params": {}, - "recent_data_store": { - "stored_hours": 24 - } - } - ] - } - } - ] - } - ] -} -``` - -{{% /expand%}} - -### Capture directly to your own MongoDB cluster - -You can configure direct capture of tabular data to a MongoDB instance alongside disk storage on your edge device. -This can be useful for powering real-time dashboards before data is synced from the edge to the cloud. -The MongoDB instance can be a locally running instance or a cluster in the cloud. - -Configure using the `mongo_capture_config` attributes in your data manager service. -You can configure data sync to a MongoDB instance separately from data sync to the Viam Cloud. - -{{< expand "Click to view sample configuration with MongoDB data store." >}} - -This sample configuration captures fake sensor readings both to the configured MongoDB URI as well as to the `~/.viam/capture` directory on disk. -It does not sync the data to the Viam Cloud. - -```json -{ - "components": [ - { - "name": "sensor-1", - "api": "rdk:component:sensor", - "model": "rdk:builtin:fake", - "attributes": {}, - "service_configs": [ - { - "type": "data_manager", - "attributes": { - "capture_methods": [ - { - "method": "Readings", - "capture_frequency_hz": 0.5, - "additional_params": {} - } - ] - } - } - ] - } - ], - "services": [ - { - "name": "data_manager-1", - "api": "rdk:service:data_manager", - "attributes": { - "mongo_capture_config": { - "uri": "mongodb://127.0.0.1:27017/?directConnection=true&serverSelectionTimeoutMS=2000" - } - } - } - ] -} -``` - -{{< /expand >}} - -{{< expand "Click to view sample configuration with MongoDB data store and sync to the Viam Cloud." >}} - -This sample configuration captures fake sensor readings both to the configured MongoDB URI as well as to the `~/.viam/capture` directory on disk. -It syncs data to the Viam Cloud every 0.1 minutes. - -```json -{ - "components": [ - { - "name": "sensor-1", - "api": "rdk:component:sensor", - "model": "rdk:builtin:fake", - "attributes": {}, - "service_configs": [ - { - "type": "data_manager", - "attributes": { - "capture_methods": [ - { - "method": "Readings", - "capture_frequency_hz": 0.5, - "additional_params": {} - } - ] - } - } - ] - } - ], - "services": [ - { - "name": "data_manager-1", - "api": "rdk:service:data_manager", - "attributes": { - "mongo_capture_config": { - "uri": "mongodb://127.0.0.1:27017/?directConnection=true&serverSelectionTimeoutMS=2000" - }, - "additional_sync_paths": [], - "sync_interval_mins": 0.1, - "capture_dir": "", - "capture_disabled": false, - "sync_disabled": false, - "tags": [] - } - } - ] -} -``` - -{{< /expand >}} - -When `mongo_capture_config.uri` is configured, data capture will attempt to connect to the configured MongoDB server and write captured tabular data to the configured `mongo_capture_config.database` and `mongo_capture_config.collection` (or their defaults if unconfigured) after enqueuing that data to be written to disk. - -If writes to MongoDB fail for any reason, data capture will log an error for each failed write and continue capturing. - -Failing to write to MongoDB doesn't affect capturing and syncing data to cloud storage other than adding capture latency. - -{{< alert title="Caution" color="caution" >}} - -- Capturing directly to MongoDB may write data to MongoDB that later fails to be written to disk (and therefore never gets synced to cloud storage). -- Capturing directly to MongoDB does not retry failed writes to MongoDB. As a consequence, it is NOT guaranteed all data captured will be written to MongoDB. - This can happen in cases such as MongoDB being inaccessible to `viam-server` or writes timing out. -- Capturing directly to MongoDB may reduce the maximum frequency that data capture can capture data due to the added latency of writing to MongoDB. - If your use case needs to support very high capture rates, this feature may not be appropriate. - -{{< /alert >}} - -### Cloud data retention - -Configure how long your synced data remains stored in the cloud: - -- **Retain data up to a certain size (for example, 100GB) or for a specific length of time (for example, 14 days):** Set `retention_policies` at the resource level. - See the `retention_policy` field in [data capture configuration attributes](/data-ai/reference/advanced-data-capture-sync/#click-to-view-data-capture-attributes). -- **Delete data captured by a machine when you delete the machine:** Control whether your cloud data is deleted when a machine or machine part is removed. - See the `delete_data_on_part_deletion` field in the [data management service configuration attributes](/data-ai/reference/advanced-data-capture-sync/#click-to-view-data-management-attributes). - ### Sync optimization **Configurable sync threads:** You can control how many concurrent sync operations occur by adjusting the `maximum_num_sync_threads` setting. From 937d62e726f451eac2cc7fce8257e99b191a8547 Mon Sep 17 00:00:00 2001 From: nathan contino Date: Thu, 8 May 2025 10:16:15 -0400 Subject: [PATCH 3/7] Use infer verb phrase --- docs/data-ai/ai/infer/_index.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/data-ai/ai/infer/_index.md b/docs/data-ai/ai/infer/_index.md index 9608b0e841..502765bc64 100644 --- a/docs/data-ai/ai/infer/_index.md +++ b/docs/data-ai/ai/infer/_index.md @@ -1,6 +1,6 @@ --- -linkTitle: "Models" -title: "Models" +linkTitle: "Infer" +title: "Infer" weight: 200 layout: "empty" type: "docs" From 8e09bc435d8f96b1c643a22f67d53717caa9f933 Mon Sep 17 00:00:00 2001 From: nathan contino Date: Thu, 8 May 2025 11:59:12 -0400 Subject: [PATCH 4/7] Naming tweak --- docs/data-ai/ai/_index.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/data-ai/ai/_index.md b/docs/data-ai/ai/_index.md index e09e15c850..0918a7eb0e 100644 --- a/docs/data-ai/ai/_index.md +++ b/docs/data-ai/ai/_index.md @@ -1,6 +1,6 @@ --- -linkTitle: "AI Inference" -title: "AI Inference" +linkTitle: "Apply AI models" +title: "Apply AI models" weight: 300 layout: "empty" type: "docs" From 8c9499e158a2dd94be4f26165a91f9e635ccb783 Mon Sep 17 00:00:00 2001 From: nathan contino Date: Tue, 13 May 2025 11:12:36 -0400 Subject: [PATCH 5/7] Refactor based on ian feedback --- docs/data-ai/_index.md | 26 +++++++------- docs/data-ai/ai/react/_index.md | 9 ----- docs/data-ai/{data => }/cloud/_index.md | 3 +- docs/data-ai/{data => }/cloud/export.md | 2 +- docs/data-ai/{data => }/cloud/query.md | 8 ++--- docs/data-ai/{data => }/cloud/visualize.md | 10 +++--- docs/data-ai/data/react/_index.md | 9 ----- docs/data-ai/{data => }/edge/_index.md | 5 +-- docs/data-ai/{data => }/edge/capture-sync.md | 0 .../{data => }/edge/conditional-sync.md | 4 +-- .../{data => }/edge/filter-before-sync.md | 2 +- docs/data-ai/{data => }/edge/store.md | 0 docs/data-ai/get-started/_index.md | 2 +- docs/data-ai/get-started/capture-images.md | 2 +- docs/data-ai/get-started/how-sync-works.md | 2 +- docs/data-ai/{ai => }/infer/_index.md | 3 +- docs/data-ai/{ai => }/infer/run-inference.md | 2 +- docs/data-ai/{ai => react}/_index.md | 4 +-- docs/data-ai/{ai => }/react/act.md | 2 +- docs/data-ai/{data => }/react/alert-data.md | 4 +-- docs/data-ai/{ai => }/react/alert.md | 8 ++--- .../{ai/train => reference/APIs}/_index.md | 4 +-- .../reference/{ => APIs}/data-client.md | 0 .../{ => APIs}/data-management-client.md | 0 .../reference/{ => APIs}/ml-model-client.md | 0 .../{ => APIs}/ml-training-client.md | 0 .../reference/{ => APIs}/vision-client.md | 0 .../reference/advanced-data-capture-sync.md | 4 +-- docs/data-ai/reference/architecture.md | 2 +- docs/data-ai/reference/mlmodel-design.md | 4 +-- docs/data-ai/{data => train}/_index.md | 6 ++-- docs/data-ai/{ai => }/train/deploy.md | 6 ++-- docs/data-ai/{ai => }/train/train-tflite.md | 6 ++-- docs/data-ai/{ai => }/train/train.md | 4 +-- .../{ai => }/train/upload-external-data.md | 6 ++-- docs/dev/_index.md | 6 ++-- docs/dev/reference/apis/services/data.md | 2 +- docs/dev/reference/apis/services/ml.md | 2 +- docs/dev/reference/changelog.md | 34 +++++++++---------- docs/dev/reference/glossary/mql.md | 2 +- docs/dev/reference/glossary/sql.md | 2 +- docs/dev/reference/sdks/connectivity.md | 2 +- docs/dev/reference/sdks/python/python-venv.md | 2 +- docs/dev/tools/cli.md | 10 +++--- docs/manage/fleet/reuse-configuration.md | 6 ++-- docs/manage/manage/rbac.md | 2 +- docs/manage/troubleshoot/alert.md | 6 ++-- .../teleoperate/default-interface.md | 2 +- docs/operate/control/headless-app.md | 2 +- .../get-started/other-hardware/_index.md | 2 +- .../get-started/supported-hardware/_index.md | 4 +-- docs/operate/mobility/use-input-to-act.md | 4 +-- .../reference/advanced-modules/_index.md | 2 +- docs/operate/reference/architecture/_index.md | 2 +- .../reference/components/camera/_index.md | 4 +-- .../reference/components/camera/calibrate.md | 2 +- .../components/camera/esp32-camera.md | 4 +-- .../components/camera/fake-micro-server.md | 2 +- .../reference/components/camera/fake.md | 2 +- .../reference/components/camera/ffmpeg.md | 2 +- .../reference/components/camera/image-file.md | 2 +- .../reference/components/camera/transform.md | 2 +- .../reference/components/camera/webcam.md | 4 +-- .../reference/components/encoder/_index.md | 2 +- .../components/movement-sensor/_index.md | 4 +-- .../components/power-sensor/_index.md | 4 +-- .../reference/components/sensor/_index.md | 4 +-- .../reference/components/sensor/fake.md | 2 +- .../components/sensor/ultrasonic-micro-rdk.md | 2 +- .../services/slam/cloudslam/_index.md | 10 +++--- .../reference/services/vision/_index.md | 2 +- .../reference/services/vision/mlmodel.md | 10 +++--- docs/tutorials/configure/pet-photographer.md | 12 +++---- docs/tutorials/control/air-quality-fleet.md | 2 +- docs/tutorials/projects/claw-game.md | 2 +- docs/tutorials/projects/helmet.md | 8 ++--- .../projects/integrating-viam-with-openai.md | 4 +-- .../tutorials/projects/send-security-photo.md | 4 +-- .../tutorials/projects/verification-system.md | 6 ++-- .../services/visualize-data-grafana.md | 20 +++++------ layouts/docs/tutorials.html | 2 +- .../include/app/apis/generated/mltraining.md | 34 +++++++++---------- .../mltraining.SubmitCustomTrainingJob.md | 2 +- 83 files changed, 194 insertions(+), 209 deletions(-) delete mode 100644 docs/data-ai/ai/react/_index.md rename docs/data-ai/{data => }/cloud/_index.md (81%) rename docs/data-ai/{data => }/cloud/export.md (98%) rename docs/data-ai/{data => }/cloud/query.md (97%) rename docs/data-ai/{data => }/cloud/visualize.md (95%) delete mode 100644 docs/data-ai/data/react/_index.md rename docs/data-ai/{data => }/edge/_index.md (65%) rename docs/data-ai/{data => }/edge/capture-sync.md (100%) rename docs/data-ai/{data => }/edge/conditional-sync.md (97%) rename docs/data-ai/{data => }/edge/filter-before-sync.md (97%) rename docs/data-ai/{data => }/edge/store.md (100%) rename docs/data-ai/{ai => }/infer/_index.md (71%) rename docs/data-ai/{ai => }/infer/run-inference.md (97%) rename docs/data-ai/{ai => react}/_index.md (66%) rename docs/data-ai/{ai => }/react/act.md (99%) rename docs/data-ai/{data => }/react/alert-data.md (98%) rename docs/data-ai/{ai => }/react/alert.md (93%) rename docs/data-ai/{ai/train => reference/APIs}/_index.md (72%) rename docs/data-ai/reference/{ => APIs}/data-client.md (100%) rename docs/data-ai/reference/{ => APIs}/data-management-client.md (100%) rename docs/data-ai/reference/{ => APIs}/ml-model-client.md (100%) rename docs/data-ai/reference/{ => APIs}/ml-training-client.md (100%) rename docs/data-ai/reference/{ => APIs}/vision-client.md (100%) rename docs/data-ai/{data => train}/_index.md (61%) rename docs/data-ai/{ai => }/train/deploy.md (90%) rename docs/data-ai/{ai => }/train/train-tflite.md (93%) rename docs/data-ai/{ai => }/train/train.md (99%) rename docs/data-ai/{ai => }/train/upload-external-data.md (98%) diff --git a/docs/data-ai/_index.md b/docs/data-ai/_index.md index 64afa87354..7b443e3c74 100644 --- a/docs/data-ai/_index.md +++ b/docs/data-ai/_index.md @@ -36,24 +36,24 @@ You can also monitor your machines through teleop, power your application logic, {{< how-to-expand "Process data" "7" "INTERMEDIATE" "" "data-platform-work" >}} {{< cards >}} -{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} -{{% card link="/data-ai/data/edge/filter-before-sync/" noimage="true" %}} -{{% card link="/data-ai/data/edge/conditional-sync/" noimage="true" %}} -{{% card link="/data-ai/data/cloud/query/" noimage="true" %}} -{{% card link="/data-ai/data/cloud/visualize/" noimage="true" %}} -{{% card link="/data-ai/data/react/alert-data/" noimage="true" %}} -{{% card link="/data-ai/data/cloud/export/" noimage="true" %}} +{{% card link="/data-ai/edge/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/edge/filter-before-sync/" noimage="true" %}} +{{% card link="/data-ai/edge/conditional-sync/" noimage="true" %}} +{{% card link="/data-ai/cloud/query/" noimage="true" %}} +{{% card link="/data-ai/cloud/visualize/" noimage="true" %}} +{{% card link="/data-ai/react/alert-data/" noimage="true" %}} +{{% card link="/data-ai/cloud/export/" noimage="true" %}} {{< /cards >}} {{< /how-to-expand >}} {{< how-to-expand "Integrate AI" "8" "INTERMEDIATE" "" "data-platform-ai" >}} {{< cards >}} -{{% card link="/data-ai/ai/train/train-tflite/" noimage="true" %}} -{{% card link="/data-ai/ai/train/train/" noimage="true" %}} -{{% card link="/data-ai/ai/train/deploy/" noimage="true" %}} -{{% card link="/data-ai/ai/infer/run-inference/" noimage="true" %}} -{{% card link="/data-ai/ai/react/alert/" noimage="true" %}} -{{% card link="/data-ai/ai/react/act/" noimage="true" %}} +{{% card link="/data-ai/train/train-tflite/" noimage="true" %}} +{{% card link="/data-ai/train/train/" noimage="true" %}} +{{% card link="/data-ai/train/deploy/" noimage="true" %}} +{{% card link="/data-ai/infer/run-inference/" noimage="true" %}} +{{% card link="/data-ai/react/alert/" noimage="true" %}} +{{% card link="/data-ai/react/act/" noimage="true" %}} {{< /cards >}} {{< /how-to-expand >}} diff --git a/docs/data-ai/ai/react/_index.md b/docs/data-ai/ai/react/_index.md deleted file mode 100644 index 9301fe6258..0000000000 --- a/docs/data-ai/ai/react/_index.md +++ /dev/null @@ -1,9 +0,0 @@ ---- -linkTitle: "React" -title: "React" -weight: 300 -layout: "empty" -type: "docs" -empty_node: true -open_on_desktop: true ---- diff --git a/docs/data-ai/data/cloud/_index.md b/docs/data-ai/cloud/_index.md similarity index 81% rename from docs/data-ai/data/cloud/_index.md rename to docs/data-ai/cloud/_index.md index 4f645ff051..6d03863929 100644 --- a/docs/data-ai/data/cloud/_index.md +++ b/docs/data-ai/cloud/_index.md @@ -1,10 +1,11 @@ --- linkTitle: "Analyze" title: "Analyze" -weight: 20 +weight: 30 layout: "empty" type: "docs" empty_node: true icon: true open_on_desktop: true +header_only: true --- diff --git a/docs/data-ai/data/cloud/export.md b/docs/data-ai/cloud/export.md similarity index 98% rename from docs/data-ai/data/cloud/export.md rename to docs/data-ai/cloud/export.md index a2365d6a51..f116808163 100644 --- a/docs/data-ai/data/cloud/export.md +++ b/docs/data-ai/cloud/export.md @@ -15,7 +15,7 @@ aliases: viamresources: ["sensor", "data_manager"] platformarea: ["data", "cli"] date: "2024-12-03" -next: "/data-ai/data/react/alert-data/" +next: "/data-ai/react/alert-data/" --- You can download machine data from cloud storage to your computer with the Viam CLI. diff --git a/docs/data-ai/data/cloud/query.md b/docs/data-ai/cloud/query.md similarity index 97% rename from docs/data-ai/data/cloud/query.md rename to docs/data-ai/cloud/query.md index 42badd74c0..39999bbe37 100644 --- a/docs/data-ai/data/cloud/query.md +++ b/docs/data-ai/cloud/query.md @@ -17,7 +17,7 @@ date: "2024-12-03" description: "Query sensor data that you have synced to the Viam app using the Viam app with SQL or MQL." --- -You can use the data management service to [capture sensor data](/data-ai/data/edge/sync/) from any machine and sync that data to the cloud. +You can use the data management service to [capture sensor data](/data-ai/edge/sync/) from any machine and sync that data to the cloud. Then, you can follow the steps on this page to query it using {{< glossary_tooltip term_id="sql" text="SQL" >}} or {{< glossary_tooltip term_id="mql" text="MQL" >}}. For example, you can configure data capture for several sensors on one machine, or for several sensors across multiple machines, to report the ambient operating temperature. You can then run queries against that data to search for outliers or edge cases, to analyze how the ambient temperature affects your machines' operation. @@ -32,7 +32,7 @@ You can then run queries against that data to search for outliers or edge cases, ### Prerequisites You must have captured sensor data. -See [capture sensor data](/data-ai/data/edge/capture-sync/) for more information. +See [capture sensor data](/data-ai/edge/capture-sync/) for more information. ### Query from the app @@ -218,7 +218,7 @@ Query results are displayed as a [JSON array](https://json-schema.org/understand {{% expand "Captured sensor data. Click to see instructions." %}} -Follow the guide to [capture sensor data](/data-ai/data/edge/capture-sync/). +Follow the guide to [capture sensor data](/data-ai/edge/capture-sync/). {{% /expand%}} @@ -353,4 +353,4 @@ db.readings.aggregate( For information on connecting to your Atlas instance from other MQL clients, see the MongoDB Atlas [Connect to your Cluster Tutorial](https://www.mongodb.com/docs/atlas/tutorial/connect-to-your-cluster/). -On top of querying sensor data with third-party tools, you can also [query it with the Python SDK](/data-ai/reference/data-client/) or [visualize it](/data-ai/data/cloud/visualize/). +On top of querying sensor data with third-party tools, you can also [query it with the Python SDK](/data-ai/reference/data-client/) or [visualize it](/data-ai/cloud/visualize/). diff --git a/docs/data-ai/data/cloud/visualize.md b/docs/data-ai/cloud/visualize.md similarity index 95% rename from docs/data-ai/data/cloud/visualize.md rename to docs/data-ai/cloud/visualize.md index 1f289ce7d4..8bc62fae9c 100644 --- a/docs/data-ai/data/cloud/visualize.md +++ b/docs/data-ai/cloud/visualize.md @@ -17,7 +17,7 @@ date: "2024-12-04" description: "Use teleop or grafana to visualize sensor data from the Viam app." --- -Once you have used the data management service to [capture data](/data-ai/data/edge/capture-sync/), you can visualize your data on a dashboard with the Viam app's **TELEOP** page or a variety of third-party tools, including Grafana, Tableau, Google's Looker Studio, and more. +Once you have used the data management service to [capture data](/data-ai/edge/capture-sync/), you can visualize your data on a dashboard with the Viam app's **TELEOP** page or a variety of third-party tools, including Grafana, Tableau, Google's Looker Studio, and more. ## Teleop @@ -80,7 +80,7 @@ Configure data query and use a third-party visualization tool like Grafana to vi {{% expand "Captured sensor data. Click to see instructions." %}} -Follow the docs to [capture data](/data-ai/data/edge/capture-sync/) from a sensor. +Follow the docs to [capture data](/data-ai/edge/capture-sync/) from a sensor. {{% /expand%}} @@ -176,7 +176,7 @@ sensorData.readings.aggregate([ ) ``` -See the [guide on querying data](/data-ai/data/cloud/query/) for more information. +See the [guide on querying data](/data-ai/cloud/query/) for more information. @@ -255,7 +255,7 @@ Some third-party visualization tools support the ability to directly query your You might use this functionality to visualize only a single day's metrics, limit the visualization to a select machine or component, or to isolate an outlier in your reported data, for example. While every third-party tool is different, you would generally query your data using either {{< glossary_tooltip term_id="sql" text="SQL" >}} or {{< glossary_tooltip term_id="mql" text="MQL" >}}. -See the [guide on querying data](/data-ai/data/cloud/query/) for more information. +See the [guide on querying data](/data-ai/cloud/query/) for more information. @@ -266,7 +266,7 @@ See the [guide on querying data](/data-ai/data/cloud/query/) for more informatio For more detailed instructions on using Grafana, including a full step-by-step configuration walkthrough, see [visualizing data with Grafana](/tutorials/services/visualize-data-grafana/). -On top of visualizing sensor data with third-party tools, you can also [query it with the Python SDK](/dev/reference/apis/data-client/) or [query it with the Viam app](/data-ai/data/cloud/query/). +On top of visualizing sensor data with third-party tools, you can also [query it with the Python SDK](/dev/reference/apis/data-client/) or [query it with the Viam app](/data-ai/cloud/query/). To see full projects using visualization, check out these resources: diff --git a/docs/data-ai/data/react/_index.md b/docs/data-ai/data/react/_index.md deleted file mode 100644 index 9301fe6258..0000000000 --- a/docs/data-ai/data/react/_index.md +++ /dev/null @@ -1,9 +0,0 @@ ---- -linkTitle: "React" -title: "React" -weight: 300 -layout: "empty" -type: "docs" -empty_node: true -open_on_desktop: true ---- diff --git a/docs/data-ai/data/edge/_index.md b/docs/data-ai/edge/_index.md similarity index 65% rename from docs/data-ai/data/edge/_index.md rename to docs/data-ai/edge/_index.md index 01ec93729b..e91c727301 100644 --- a/docs/data-ai/data/edge/_index.md +++ b/docs/data-ai/edge/_index.md @@ -1,10 +1,11 @@ --- -linkTitle: "Ingest" +linkTitle: "Ingest data" title: "Ingest" -weight: 10 +weight: 20 layout: "empty" type: "docs" icon: true empty_node: true open_on_desktop: true +header_only: true --- diff --git a/docs/data-ai/data/edge/capture-sync.md b/docs/data-ai/edge/capture-sync.md similarity index 100% rename from docs/data-ai/data/edge/capture-sync.md rename to docs/data-ai/edge/capture-sync.md diff --git a/docs/data-ai/data/edge/conditional-sync.md b/docs/data-ai/edge/conditional-sync.md similarity index 97% rename from docs/data-ai/data/edge/conditional-sync.md rename to docs/data-ai/edge/conditional-sync.md index 24867720fc..917015cb5b 100644 --- a/docs/data-ai/data/edge/conditional-sync.md +++ b/docs/data-ai/edge/conditional-sync.md @@ -42,7 +42,7 @@ You can also view [trigger-sync-examples module](https://github.com/viam-labs/tr {{< expand "Enable data capture and sync on your machine." >}} -Add the [data management service](/data-ai/data/edge/capture-sync/#configure-data-capture-and-sync-for-individual-resources): +Add the [data management service](/data-ai/edge/capture-sync/#configure-data-capture-and-sync-for-individual-resources): On your machine's **CONFIGURE** tab, click the **+** icon next to your machine part in the left-hand menu and select **Service**. @@ -288,7 +288,7 @@ You have now configured sync to happen during a specific time slot. ## Test your sync configuration -To test your setup, [configure a webcam](/operate/reference/components/camera/webcam/) or another component and [enable data capture on the component](/data-ai/data/edge/capture-sync/#configure-data-capture-and-sync-for-individual-resources). +To test your setup, [configure a webcam](/operate/reference/components/camera/webcam/) or another component and [enable data capture on the component](/data-ai/edge/capture-sync/#configure-data-capture-and-sync-for-individual-resources). Make sure to physically connect any hardware parts to the computer controlling your machine. For a camera component, use the `ReadImage` method. The data manager will now capture data. diff --git a/docs/data-ai/data/edge/filter-before-sync.md b/docs/data-ai/edge/filter-before-sync.md similarity index 97% rename from docs/data-ai/data/edge/filter-before-sync.md rename to docs/data-ai/edge/filter-before-sync.md index 31d68aa11e..8f3e7d3261 100644 --- a/docs/data-ai/data/edge/filter-before-sync.md +++ b/docs/data-ai/edge/filter-before-sync.md @@ -96,7 +96,7 @@ You can test the vision service from the [**CONTROL** tab](/manage/troubleshoot/ **(Optional) Trigger sync with custom logic** By default, the captured data syncs at the regular interval you specified in the data capture config. -If you need to trigger sync in a different way, see [Conditional cloud sync](/data-ai/data/edge/conditional-sync/) for a documented example of syncing data only at certain times of day. +If you need to trigger sync in a different way, see [Conditional cloud sync](/data-ai/edge/conditional-sync/) for a documented example of syncing data only at certain times of day. {{% /tablestep %}} {{< /table >}} diff --git a/docs/data-ai/data/edge/store.md b/docs/data-ai/edge/store.md similarity index 100% rename from docs/data-ai/data/edge/store.md rename to docs/data-ai/edge/store.md diff --git a/docs/data-ai/get-started/_index.md b/docs/data-ai/get-started/_index.md index 8046842842..d9b65eeef4 100644 --- a/docs/data-ai/get-started/_index.md +++ b/docs/data-ai/get-started/_index.md @@ -1,7 +1,7 @@ --- linkTitle: "Get started" title: "Get started" -weight: 100 +weight: 10 layout: "empty" type: "docs" empty_node: true diff --git a/docs/data-ai/get-started/capture-images.md b/docs/data-ai/get-started/capture-images.md index 18bbea7a58..c4643fa87a 100644 --- a/docs/data-ai/get-started/capture-images.md +++ b/docs/data-ai/get-started/capture-images.md @@ -26,7 +26,7 @@ To view images added to your dataset, go to the **DATA** page's [**DATASETS** ta {{% /tab %}} {{% tab name="Many images" %}} -To capture a large number of images for training an ML model, [Capture and sync image data](/data-ai/data/edge/capture-sync/) using the data management service with your camera. +To capture a large number of images for training an ML model, [Capture and sync image data](/data-ai/edge/capture-sync/) using the data management service with your camera. Viam stores the images saved by capture and sync on the [**DATA** page](https://app.viam.com/data/), but does not add the images to a dataset. We recommend you tag the images first and then use the CLI to [add the tagged images to a dataset](/data-ai/g//#add-tagged-images-to-a-dataset). diff --git a/docs/data-ai/get-started/how-sync-works.md b/docs/data-ai/get-started/how-sync-works.md index f8082411a2..40a04dc97a 100644 --- a/docs/data-ai/get-started/how-sync-works.md +++ b/docs/data-ai/get-started/how-sync-works.md @@ -72,7 +72,7 @@ When the connection is restored and sync resumes, the service continues sync whe If the interruption happens mid-file, sync resumes from the beginning of that file. To avoid syncing files that are still being written to, the data management service only syncs arbitrary files that haven't been modified in the previous 10 seconds. -This default can be changed with the [`file_last_modified_millis` config attribute](/data-ai/data/edge/capture-sync/). +This default can be changed with the [`file_last_modified_millis` config attribute](/data-ai/edge/capture-sync/). ## Automatic data deletion diff --git a/docs/data-ai/ai/infer/_index.md b/docs/data-ai/infer/_index.md similarity index 71% rename from docs/data-ai/ai/infer/_index.md rename to docs/data-ai/infer/_index.md index 502765bc64..0eed2b5e1c 100644 --- a/docs/data-ai/ai/infer/_index.md +++ b/docs/data-ai/infer/_index.md @@ -1,9 +1,10 @@ --- -linkTitle: "Infer" +linkTitle: "Inference" title: "Infer" weight: 200 layout: "empty" type: "docs" empty_node: true open_on_desktop: true +header_only: true --- diff --git a/docs/data-ai/ai/infer/run-inference.md b/docs/data-ai/infer/run-inference.md similarity index 97% rename from docs/data-ai/ai/infer/run-inference.md rename to docs/data-ai/infer/run-inference.md index a177c77b14..e5042ed420 100644 --- a/docs/data-ai/ai/infer/run-inference.md +++ b/docs/data-ai/infer/run-inference.md @@ -37,7 +37,7 @@ One vision service you can use to run inference on a camera stream if you have a ### Configure an mlmodel vision service Add the `vision / ML model` service to your machine. -Then, from the **Select model** dropdown, select the name of the ML model service you configured when [deploying](/data-ai/ai/train/deploy/) your model (for example, `mlmodel-1`). +Then, from the **Select model** dropdown, select the name of the ML model service you configured when [deploying](/data-ai/train/deploy/) your model (for example, `mlmodel-1`). **Save** your changes. diff --git a/docs/data-ai/ai/_index.md b/docs/data-ai/react/_index.md similarity index 66% rename from docs/data-ai/ai/_index.md rename to docs/data-ai/react/_index.md index 0918a7eb0e..9a0fdbd784 100644 --- a/docs/data-ai/ai/_index.md +++ b/docs/data-ai/react/_index.md @@ -1,6 +1,6 @@ --- -linkTitle: "Apply AI models" -title: "Apply AI models" +linkTitle: "Act" +title: "Act" weight: 300 layout: "empty" type: "docs" diff --git a/docs/data-ai/ai/react/act.md b/docs/data-ai/react/act.md similarity index 99% rename from docs/data-ai/ai/react/act.md rename to docs/data-ai/react/act.md index 70c719e1f9..edef08ad41 100644 --- a/docs/data-ai/ai/react/act.md +++ b/docs/data-ai/react/act.md @@ -5,7 +5,7 @@ weight: 70 layout: "docs" type: "docs" description: "Use the vision service API to act based on inferences." -next: "/data-ai/ai/train/upload-external-data/" +next: "/data-ai/train/upload-external-data/" --- You can use the [vision service API](/dev/reference/apis/services/vision/) to get information about your machine's inferences and program behavior based on that. diff --git a/docs/data-ai/data/react/alert-data.md b/docs/data-ai/react/alert-data.md similarity index 98% rename from docs/data-ai/data/react/alert-data.md rename to docs/data-ai/react/alert-data.md index 88f1811e80..50c12b7642 100644 --- a/docs/data-ai/data/react/alert-data.md +++ b/docs/data-ai/react/alert-data.md @@ -5,7 +5,7 @@ weight: 60 layout: "docs" type: "docs" description: "Use triggers to send email notifications or webhook requests when data from the machine is synced." -prev: "/data-ai/data/cloud/export/" +prev: "/data-ai/cloud/export/" --- You can use triggers to send email notifications or webhook requests when data from the machine is synced, even captured from a specific component with a specified condition. @@ -53,7 +53,7 @@ For example, a trigger configured to fire when data is captured from the motor ` For more information, see [Conditions](#conditions). {{% alert title="Note" color="note" %}} -You must [configure data capture](/data-ai/data/edge/capture-sync/#configure-data-capture-and-sync-for-individual-resources) for your component to use this trigger. +You must [configure data capture](/data-ai/edge/capture-sync/#configure-data-capture-and-sync-for-individual-resources) for your component to use this trigger. {{% /alert %}} {{% /tab %}} diff --git a/docs/data-ai/ai/react/alert.md b/docs/data-ai/react/alert.md similarity index 93% rename from docs/data-ai/ai/react/alert.md rename to docs/data-ai/react/alert.md index 99726d6fc9..262131ef5c 100644 --- a/docs/data-ai/ai/react/alert.md +++ b/docs/data-ai/react/alert.md @@ -7,7 +7,7 @@ type: "docs" description: "Use triggers to send email notifications when inferences are made." --- -At this point, you should have already set up and tested [computer vision functionality](/data-ai/ai/infer/run-inference/). +At this point, you should have already set up and tested [computer vision functionality](/data-ai/infer/run-inference/). On this page, you'll learn how to use triggers to send alerts in the form of email notifications or webhook requests when certain detections or classifications are made. You will build a system that can monitor camera feeds and detect situations that require review. @@ -27,7 +27,7 @@ Next, you'll configure a trigger to send email notifications or webhook requests {{< expand "A configured camera and vision service. Click to see instructions." >}} -Follow the instructions to [configure a camera](/operate/reference/components/camera/) and [run inference](/data-ai/ai/infer/run-inference/). +Follow the instructions to [configure a camera](/operate/reference/components/camera/) and [run inference](/data-ai/infer/run-inference/). {{< /expand >}} @@ -74,7 +74,7 @@ For example, if using the YOLOv8 model (named `yolo`) for hardhat detection, you ## Configure data capture and sync -Viam's built-in [data management service](/data-ai/data/edge/capture-sync/#configure-data-capture-and-sync-for-individual-resources) allows you to, among other things, capture images and sync them to the cloud. +Viam's built-in [data management service](/data-ai/edge/capture-sync/#configure-data-capture-and-sync-for-individual-resources) allows you to, among other things, capture images and sync them to the cloud. Configure data capture on the `filtered-camera` camera to capture images of detections or classifications: @@ -99,7 +99,7 @@ Configure data capture on the `filtered-camera` camera to capture images of dete ## Set up alerts -[Triggers](/data-ai/data/react/alert-data/) allow you to send webhook requests or email notifications when certain events happen. +[Triggers](/data-ai/react/alert-data/) allow you to send webhook requests or email notifications when certain events happen. You can use the **Data has been synced to the cloud** (`part_data_ingested`) trigger to send alerts whenever an image with an anomaly detection is synced to the cloud from your object filter camera. diff --git a/docs/data-ai/ai/train/_index.md b/docs/data-ai/reference/APIs/_index.md similarity index 72% rename from docs/data-ai/ai/train/_index.md rename to docs/data-ai/reference/APIs/_index.md index 2d6f06dd9a..24ab0655e2 100644 --- a/docs/data-ai/ai/train/_index.md +++ b/docs/data-ai/reference/APIs/_index.md @@ -1,6 +1,6 @@ --- -linkTitle: "Train" -title: "Train" +linkTitle: "APIs" +title: "APIs" weight: 100 layout: "empty" type: "docs" diff --git a/docs/data-ai/reference/data-client.md b/docs/data-ai/reference/APIs/data-client.md similarity index 100% rename from docs/data-ai/reference/data-client.md rename to docs/data-ai/reference/APIs/data-client.md diff --git a/docs/data-ai/reference/data-management-client.md b/docs/data-ai/reference/APIs/data-management-client.md similarity index 100% rename from docs/data-ai/reference/data-management-client.md rename to docs/data-ai/reference/APIs/data-management-client.md diff --git a/docs/data-ai/reference/ml-model-client.md b/docs/data-ai/reference/APIs/ml-model-client.md similarity index 100% rename from docs/data-ai/reference/ml-model-client.md rename to docs/data-ai/reference/APIs/ml-model-client.md diff --git a/docs/data-ai/reference/ml-training-client.md b/docs/data-ai/reference/APIs/ml-training-client.md similarity index 100% rename from docs/data-ai/reference/ml-training-client.md rename to docs/data-ai/reference/APIs/ml-training-client.md diff --git a/docs/data-ai/reference/vision-client.md b/docs/data-ai/reference/APIs/vision-client.md similarity index 100% rename from docs/data-ai/reference/vision-client.md rename to docs/data-ai/reference/APIs/vision-client.md diff --git a/docs/data-ai/reference/advanced-data-capture-sync.md b/docs/data-ai/reference/advanced-data-capture-sync.md index ecbda6c853..e221e1ca1f 100644 --- a/docs/data-ai/reference/advanced-data-capture-sync.md +++ b/docs/data-ai/reference/advanced-data-capture-sync.md @@ -7,7 +7,7 @@ layout: "docs" type: "docs" platformarea: ["data"] description: "Advanced data capture and data sync configurations." -prev: /data-ai/data/edge/conditional-sync/ +prev: /data-ai/edge/conditional-sync/ date: "2025-02-10" --- @@ -522,7 +522,7 @@ The following attributes are available for data capture configuration: | Name | Type | Required? | Description | | ------------------ | ------ | --------- | ----------- | | `capture_frequency_hz` | float | **Required** | Frequency in hertz at which to capture data. For example, to capture a reading every 2 seconds, enter `0.5`. | -| `method` | string | **Required** | Depends on the type of component or service. See [Supported components and services](/data-ai/data/edge/capture-sync/#click-to-see-resources-that-support-data-capture-and-cloud-sync). | +| `method` | string | **Required** | Depends on the type of component or service. See [Supported components and services](/data-ai/edge/capture-sync/#click-to-see-resources-that-support-data-capture-and-cloud-sync). | | `retention_policy` | object | Optional | Option to configure how long data collected by this component or service should remain stored in the Viam Cloud. You must set this in JSON mode. See the JSON example for a camera component.
**Options:** `"days": `, `"binary_limit_gb": `, `"tabular_limit_gb": `.
Days are in UTC time. Setting a retention policy of 1 day means that data stored now will be deleted the following day **in UTC time**. You can set either or both of the size limit options and size is in gigabytes. The `retention_policy` does not affect logs. For information about logs, see [Logging](/operate/reference/viam-server/#logging). | | `recent_data_store` | object | Optional | Configure a rolling time frame of recent data to store in a [hot data store](#capture-to-the-hot-data-store) for faster access. Example: `{ "stored_hours": 24 }` | | `additional_params` | depends | depends | Varies based on the method. For example, `ReadImage` requires a MIME type. | diff --git a/docs/data-ai/reference/architecture.md b/docs/data-ai/reference/architecture.md index 9a1d7aeed4..900e517e23 100644 --- a/docs/data-ai/reference/architecture.md +++ b/docs/data-ai/reference/architecture.md @@ -1,7 +1,7 @@ --- linkTitle: "Machine-cloud architecture" title: "Viam Architecture" -weight: 1000 +weight: 80 layout: "docs" type: "docs" layout: "empty" diff --git a/docs/data-ai/reference/mlmodel-design.md b/docs/data-ai/reference/mlmodel-design.md index d7b2f723b0..f2be95f713 100644 --- a/docs/data-ai/reference/mlmodel-design.md +++ b/docs/data-ai/reference/mlmodel-design.md @@ -15,11 +15,11 @@ aliases: - /operate/reference/advanced-modules/mlmodel-design/ --- -The [Machine Learning (ML) model service](/data-ai/ai/train/deploy/) allows you to deploy machine learning models to your smart machine. +The [Machine Learning (ML) model service](/data-ai/train/deploy/) allows you to deploy machine learning models to your smart machine. Vision services, like [an `"mlmodel"` detector](/dev/reference/apis/services/vision/#detections) or [classifier](/dev/reference/apis/services/vision/#classifications), enable your machines to identify and classify objects in images with the deployed models' predictions. The two services work closely together, with the vision service relying on the deployed ML model to make inferences. -If you are [designing your own ML Model service](/data-ai/ai/train/deploy/), you must try to make your ML models' shapes match the input and output tensors the `mlmodel` vision service expects to work with if you want the two services to coordinate in classification or detection. +If you are [designing your own ML Model service](/data-ai/train/deploy/), you must try to make your ML models' shapes match the input and output tensors the `mlmodel` vision service expects to work with if you want the two services to coordinate in classification or detection. To be able to use a deployed ML model, the `mlmodel` vision service checks for descriptions of these characteristics in the [metadata](/dev/reference/apis/services/ml/#metadata) of the model, as defined in [the Python SDK](https://python.viam.dev/autoapi/viam/gen/service/mlmodel/v1/mlmodel_pb2/index.html#viam.gen.service.mlmodel.v1.mlmodel_pb2.Metadata). For an example of this, see [Example Metadata](#example-metadata). diff --git a/docs/data-ai/data/_index.md b/docs/data-ai/train/_index.md similarity index 61% rename from docs/data-ai/data/_index.md rename to docs/data-ai/train/_index.md index d3c754f140..1c8732fd26 100644 --- a/docs/data-ai/data/_index.md +++ b/docs/data-ai/train/_index.md @@ -1,7 +1,7 @@ --- -linkTitle: "Process Data" -title: "Process Data" -weight: 200 +linkTitle: "Train AI" +title: "Train AI" +weight: 100 layout: "empty" type: "docs" empty_node: true diff --git a/docs/data-ai/ai/train/deploy.md b/docs/data-ai/train/deploy.md similarity index 90% rename from docs/data-ai/ai/train/deploy.md rename to docs/data-ai/train/deploy.md index ebe26d25ee..2557c15ee0 100644 --- a/docs/data-ai/ai/train/deploy.md +++ b/docs/data-ai/train/deploy.md @@ -75,15 +75,15 @@ Save your config to use your specified version of the ML model. The service works with models trained inside and outside the Viam app: -- You can [train TFlite](/data-ai/ai/train/train-tflite/) or [other model frameworks](/data-ai/ai/train/train/) on data from your machines. +- You can [train TFlite](/data-ai/train/train-tflite/) or [other model frameworks](/data-ai/train/train/) on data from your machines. - You can use [ML models](https://app.viam.com/registry?type=ML+Model) from the [Viam Registry](https://app.viam.com/registry). - You can upload externally trained models from a model file on the [**MODELS** tab](https://app.viam.com/models) in the **DATA** section of the Viam app. -- You can use a [model](/data-ai/ai/train/deploy/#deploy-your-ml-model-on-an-ml-model-service) trained outside the Viam platform whose files are on your machine. See the documentation of the model of ML model service you're using (pick one that supports your model framework) for instructions on this. +- You can use a [model](/data-ai/train/deploy/#deploy-your-ml-model-on-an-ml-model-service) trained outside the Viam platform whose files are on your machine. See the documentation of the model of ML model service you're using (pick one that supports your model framework) for instructions on this. On its own the ML model service only runs the model. After deploying your model, you need to configure an additional service to use the deployed model. For example, you can configure an [`mlmodel` vision service](/operate/reference/services/vision/) to visualize the inferences your model makes. -Follow our docs to [run inference](/data-ai/ai/infer/run-inference/) to add an `mlmodel` vision service and see inferences. +Follow our docs to [run inference](/data-ai/infer/run-inference/) to add an `mlmodel` vision service and see inferences. For other use cases, consider [creating custom functionality with a module](/operate/get-started/other-hardware/). diff --git a/docs/data-ai/ai/train/train-tflite.md b/docs/data-ai/train/train-tflite.md similarity index 93% rename from docs/data-ai/ai/train/train-tflite.md rename to docs/data-ai/train/train-tflite.md index 61383a57db..5e1b351a03 100644 --- a/docs/data-ai/ai/train/train-tflite.md +++ b/docs/data-ai/train/train-tflite.md @@ -131,7 +131,7 @@ If the results exceed the confidence threshold, the **Run model** section shows You can test both detection models and classifier models using the following resources together: - [a camera](/operate/reference/components/camera/) -- [a `tflite_cpu` ML model](/data-ai/ai/train/deploy/) with the model you just trained +- [a `tflite_cpu` ML model](/data-ai/train/deploy/) with the model you just trained - [an `mlmodel` vision service](/operate/reference/services/vision/mlmodel/) using the `tflite_cpu` model ## Iterate on your ML model @@ -149,12 +149,12 @@ To capture images of edge cases and re-train your model using those images, comp 1. Visit the **DATASET** tab of the **DATA** page and annotate the image. -1. Repeat the [steps above](/data-ai/ai/train/train-tflite/#train-a-machine-learning-model) to train and release a new version of your ML model. Your machines will automatically update to the new version of the model soon after release. +1. Repeat the [steps above](/data-ai/train/train-tflite/#train-a-machine-learning-model) to train and release a new version of your ML model. Your machines will automatically update to the new version of the model soon after release. ## Next steps Now your machine can make inferences about its environment. -The next step is to [deploy](/data-ai/ai/train/deploy/) the ML model and then [act](/data-ai/ai/react/act/) or [alert](/data-ai/ai/react/alert/) based on these inferences. +The next step is to [deploy](/data-ai/train/deploy/) the ML model and then [act](/data-ai/react/act/) or [alert](/data-ai/react/alert/) based on these inferences. See the following tutorials for examples of using machine learning models to make your machine do things based on its inferences about its environment: diff --git a/docs/data-ai/ai/train/train.md b/docs/data-ai/train/train.md similarity index 99% rename from docs/data-ai/ai/train/train.md rename to docs/data-ai/train/train.md index 3efbc1427d..b04736701b 100644 --- a/docs/data-ai/ai/train/train.md +++ b/docs/data-ai/train/train.md @@ -845,5 +845,5 @@ You can also view your training jobs' logs with the [`viam train logs`](/dev/too {{% /tablestep %}} {{< /table >}} -To use your new model with machines, you must [deploy it](/data-ai/ai/train/deploy/) with the appropriate ML model service. -Then you can use another service, such as the vision service, to [run inference](/data-ai/ai/infer/run-inference/). +To use your new model with machines, you must [deploy it](/data-ai/train/deploy/) with the appropriate ML model service. +Then you can use another service, such as the vision service, to [run inference](/data-ai/infer/run-inference/). diff --git a/docs/data-ai/ai/train/upload-external-data.md b/docs/data-ai/train/upload-external-data.md similarity index 98% rename from docs/data-ai/ai/train/upload-external-data.md rename to docs/data-ai/train/upload-external-data.md index fd332ad77c..3dc8a959e9 100644 --- a/docs/data-ai/ai/train/upload-external-data.md +++ b/docs/data-ai/train/upload-external-data.md @@ -13,7 +13,7 @@ aliases: - /how-tos/upload-data/ date: "2024-12-04" description: "Upload data to the Viam app from your local computer or mobile device using the data client API, Viam CLI, or Viam mobile app." -prev: "/data-ai/ai/react/act/" +prev: "/data-ai/react/act/" --- When you configure the data management service, Viam automatically uploads data from the default directory `~/.viam/capture` and any directory you configured. @@ -38,7 +38,7 @@ However, if you already have a cache of data you'd like to use with Viam, you ca {{< expand "Enable data capture and sync on your machine." >}} -Add the [data management service](/data-ai/data/edge/capture-sync/#configure-data-capture-and-sync-for-individual-resources): +Add the [data management service](/data-ai/edge/capture-sync/#configure-data-capture-and-sync-for-individual-resources): On your machine's **CONFIGURE** tab, click the **+** icon next to your machine part in the left-hand menu and select **Service**. @@ -296,5 +296,5 @@ However, the uploaded images will not be associated with a component or method. ## Next steps -Now that you have a batch of data uploaded, you can [train an ML model](/data-ai/ai/train/train-tflite/) on it. +Now that you have a batch of data uploaded, you can [train an ML model](/data-ai/train/train-tflite/) on it. Or, if you want to collect and upload data _not_ in a batch, see [Create a training dataset](/data-ai/get-started/create-training-dataset/). diff --git a/docs/dev/_index.md b/docs/dev/_index.md index f6a9b62539..e0cd0aa8ca 100644 --- a/docs/dev/_index.md +++ b/docs/dev/_index.md @@ -303,7 +303,7 @@ std::cout << "co2-monitor get_readings return value " << co2monitor_get_readings Once you have configured a physical sensor or anything else that provides measurements, you can get sensor readings using the sensor API. -[Collect sensor data →](/data-ai/data/edge/capture-sync/) +[Collect sensor data →](/data-ai/edge/capture-sync/) @@ -546,7 +546,7 @@ tabular_data, count, last = await data_client.tabular_data_by_filter( You can query synced sensor data, images, and any other binary or timeseries data from all your machines using the data client API. -[Learn about Data Management →](/data-ai/data/edge/capture-sync/) +[Learn about Data Management →](/data-ai/edge/capture-sync/) @@ -753,7 +753,7 @@ job_metadata = await ml_training_client.get_training_job( Build machine learning models based on your machines' data any time using the ML training client API -[Train and deploy ML models →](/data-ai/ai/train/train-tflite/) +[Train and deploy ML models →](/data-ai/train/train-tflite/) diff --git a/docs/dev/reference/apis/services/data.md b/docs/dev/reference/apis/services/data.md index 47a114b1b4..6b40efaadf 100644 --- a/docs/dev/reference/apis/services/data.md +++ b/docs/dev/reference/apis/services/data.md @@ -14,7 +14,7 @@ aliases: The data management service API allows you to sync data stored on the machine it is deployed on to the cloud. -The [data management service](/data-ai/data/edge/capture-sync/) supports the following methods: +The [data management service](/data-ai/edge/capture-sync/) supports the following methods: {{< readfile "/static/include/services/apis/generated/data_manager-table.md" >}} diff --git a/docs/dev/reference/apis/services/ml.md b/docs/dev/reference/apis/services/ml.md index f9a894761a..437bb42275 100644 --- a/docs/dev/reference/apis/services/ml.md +++ b/docs/dev/reference/apis/services/ml.md @@ -15,7 +15,7 @@ aliases: The ML model service API allows you to make inferences based on a provided ML model. -The [ML Model service](/data-ai/ai/train/deploy/) supports the following methods: +The [ML Model service](/data-ai/train/deploy/) supports the following methods: {{< readfile "/static/include/services/apis/generated/mlmodel-table.md" >}} diff --git a/docs/dev/reference/changelog.md b/docs/dev/reference/changelog.md index 13f590053c..ccbc7329f1 100644 --- a/docs/dev/reference/changelog.md +++ b/docs/dev/reference/changelog.md @@ -262,7 +262,7 @@ The [arm interface](/dev/reference/apis/components/arm/) now includes a [MoveThr {{% changelog date="2024-10-16" color="added" title="Set data retention policies" %}} You can now set how long data collected by a component should remain stored in the Viam Cloud in the component's data capture configuration. -For more information, see [Data management service](/data-ai/data/edge/capture-sync/). +For more information, see [Data management service](/data-ai/edge/capture-sync/). {{% /changelog %}} @@ -296,14 +296,14 @@ For more information, see [Configure provisioning with viam-agent](/manage/fleet {{% changelog date="2024-08-16" color="added" title="Data capture for vision" %}} Data capture is now possible for the vision service. -For more information, see [Supported components and services](/data-ai/data/edge/capture-sync/#click-to-see-resources-that-support-data-capture-and-cloud-sync). +For more information, see [Supported components and services](/data-ai/edge/capture-sync/#click-to-see-resources-that-support-data-capture-and-cloud-sync). {{% /changelog %}} {{% changelog date="2024-08-01" color="added" title="Create custom training scripts" %}} You can now upload custom training scripts to the Viam Registry and use them to train machine learning models. -For more information, see [Create custom training scripts](/data-ai/ai/train/train/). +For more information, see [Create custom training scripts](/data-ai/train/train/). {{% /changelog %}} @@ -428,7 +428,7 @@ In addition to other improvements, your component, service, and other resource c {{% changelog date="2024-03-01" color="added" title="Additional ML models" %}} Viam has added support for the TensorFlow, PyTorch, and ONNX ML model frameworks, expanding upon the existing support for TensorFlow Lite models. -You can now upload your own ML model(/data-ai/ai/train/deploy/#deploy-your-ml-model-on-an-ml-model-service) using any of these frameworks for use with the Vision service. +You can now upload your own ML model(/data-ai/train/deploy/#deploy-your-ml-model-on-an-ml-model-service) using any of these frameworks for use with the Vision service. {{% /changelog %}} @@ -467,7 +467,7 @@ You can now use the [generic service](/operate/reference/components/generic/) to {{% changelog date="2024-02-12" color="added" title="ML models in the registry" %}} -You can now upload [machine learning (ML) models](/data-ai/ai/train/deploy/#deploy-your-ml-model-on-an-ml-model-service) to the Viam Registry, in addition to modules. +You can now upload [machine learning (ML) models](/data-ai/train/deploy/#deploy-your-ml-model-on-an-ml-model-service) to the Viam Registry, in addition to modules. You may upload models you have trained yourself using the Viam app, or models you have trained outside of the App. When uploading, you have the option to make your model available to the general public for reuse. @@ -481,8 +481,8 @@ Viam has added a [sensor-controlled base](/operate/reference/components/base/sen {{% changelog date="2024-01-31" color="added" title="Visualize captured data" %}} -You can now [visualize your data](/data-ai/data/cloud/visualize/) using many popular third-party visualization tools, including Grafana, Tableau, Google’s Looker Studio, and more. -You can visualize any data, such as sensor readings, that you have [synced](/data-ai/data/edge/capture-sync/) to the Viam app from your machine. +You can now [visualize your data](/data-ai/cloud/visualize/) using many popular third-party visualization tools, including Grafana, Tableau, Google’s Looker Studio, and more. +You can visualize any data, such as sensor readings, that you have [synced](/data-ai/edge/capture-sync/) to the Viam app from your machine. See [Visualize data with Grafana](/tutorials/services/visualize-data-grafana/) for a full walkthrough focused on Grafana specifically. @@ -490,7 +490,7 @@ See [Visualize data with Grafana](/tutorials/services/visualize-data-grafana/) f {{% changelog date="2024-01-31" color="added" title="Use triggers to trigger actions" %}} -You can now configure [triggers](/data-ai/data/react/alert-data/) (previously called webhooks) to execute actions when certain types of data are sent from your machine to the cloud. +You can now configure [triggers](/data-ai/react/alert-data/) (previously called webhooks) to execute actions when certain types of data are sent from your machine to the cloud. {{% /changelog %}} @@ -527,13 +527,13 @@ Location secrets, the previous method of authentication, is deprecated and will Once you have added the data management service and synced data, such as sensor readings, to the Viam app, you can now run queries against both captured data as well as its metadata using either SQL or MQL. -For more information, see [Query Data with SQL or MQL](/data-ai/data/cloud/query/). +For more information, see [Query Data with SQL or MQL](/data-ai/cloud/query/). {{% /changelog %}} {{% changelog date="2023-11-30" color="changed" title="Model training from datasets" %}} -To make it easier to iterate while training machine learning models from image data, you now train models from [datasets](/data-ai/ai/train/create-dataset/). +To make it easier to iterate while training machine learning models from image data, you now train models from [datasets](/data-ai/train/create-dataset/). {{% /changelog %}} @@ -604,7 +604,7 @@ You now have the capability to use a [power sensor component](/operate/reference {{% /changelog %}} {{% changelog date="2023-09-30" color="added" title="Filter component’s data before the cloud" %}} -Viam has written a module that allows you to filter data based on specific criteria before syncing it to [Viam's cloud](/data-ai/data/edge/capture-sync/). +Viam has written a module that allows you to filter data based on specific criteria before syncing it to [Viam's cloud](/data-ai/edge/capture-sync/). It equips machines to: - Remove data that is not of interest @@ -709,7 +709,7 @@ To better control gantries with Viam, you can now: {{% changelog date="2023-06-30" color="improved" title="Optimized Viam-trained object detection models" %}} -This update for TFlite object detection models [trained with the machine learning service](/data-ai/ai/train/train-tflite/) brings significant improvements, including: +This update for TFlite object detection models [trained with the machine learning service](/data-ai/train/train-tflite/) brings significant improvements, including: - 76% faster model inference for camera streams - 64% quicker model training for object detection @@ -725,7 +725,7 @@ The beta release of the [TypeScript SDK](https://github.com/viamrobotics/viam-ty {{% changelog date="2023-05-31" color="added" title="Train object detection ML models" %}} -You now have the capability to directly [train a TFlite object detection models](/data-ai/ai/train/train-tflite/) in addition to image classification models from within the Viam app. +You now have the capability to directly [train a TFlite object detection models](/data-ai/train/train-tflite/) in addition to image classification models from within the Viam app. This update allows you to: @@ -1142,15 +1142,15 @@ You will no longer be able to add or remove models using the SDKs. #### Add machine learning vision models to a vision service The way to add machine learning vision models is changing. -You will need to first register the machine learning model file with the [ML model service](/data-ai/ai/train/deploy/) and then add that registered model to a vision service. +You will need to first register the machine learning model file with the [ML model service](/data-ai/train/deploy/) and then add that registered model to a vision service. {{% /changelog %}} {{% changelog date="2023-03-31" color="added" title="Machine learning for image classification models" %}} -You can now [train](/data-ai/ai/train/train-tflite/) and [deploy](/data-ai/ai/train/deploy/) image classification models with the [data management service](/data-ai/data/edge/capture-sync/) and use your machine's image data directly within Viam. -Additionally, you can upload and use existing [machine learning models](/data-ai/ai/train/deploy/#deploy-your-ml-model-on-an-ml-model-service) with your machines. -For more information on using data synced to the cloud to train machine learning models, read [train a TFlite](/data-ai/ai/train/train-tflite/) or [another model](/data-ai/ai/train/train/). +You can now [train](/data-ai/train/train-tflite/) and [deploy](/data-ai/train/deploy/) image classification models with the [data management service](/data-ai/edge/capture-sync/) and use your machine's image data directly within Viam. +Additionally, you can upload and use existing [machine learning models](/data-ai/train/deploy/#deploy-your-ml-model-on-an-ml-model-service) with your machines. +For more information on using data synced to the cloud to train machine learning models, read [train a TFlite](/data-ai/train/train-tflite/) or [another model](/data-ai/train/train/). {{% /changelog %}} diff --git a/docs/dev/reference/glossary/mql.md b/docs/dev/reference/glossary/mql.md index d8780dc684..8579e21044 100644 --- a/docs/dev/reference/glossary/mql.md +++ b/docs/dev/reference/glossary/mql.md @@ -7,4 +7,4 @@ short_description: MQL is the MongoDB query language, similar to SQL but specifi MQL is the [MongoDB query language](https://www.mongodb.com/docs/manual/tutorial/query-documents/), similar to {{< glossary_tooltip term_id="sql" text="SQL" >}} but specific to the MongoDB document model. -You can use MQL to query data that you have synced to the Viam app using the [data management service](/data-ai/data/edge/capture-sync/). +You can use MQL to query data that you have synced to the Viam app using the [data management service](/data-ai/edge/capture-sync/). diff --git a/docs/dev/reference/glossary/sql.md b/docs/dev/reference/glossary/sql.md index 0eb9148767..627a1518a9 100644 --- a/docs/dev/reference/glossary/sql.md +++ b/docs/dev/reference/glossary/sql.md @@ -7,4 +7,4 @@ short_description: SQL (structured query language) is the widely-used, industry- [SQL (structured query language)](https://en.wikipedia.org/wiki/SQL) is the widely-used, industry-standard query language popular with [relational databases](https://en.wikipedia.org/wiki/Relational_database). -You can use SQL to query data that you have synced to the Viam app using the [data management service](/data-ai/data/edge/capture-sync/). +You can use SQL to query data that you have synced to the Viam app using the [data management service](/data-ai/edge/capture-sync/). diff --git a/docs/dev/reference/sdks/connectivity.md b/docs/dev/reference/sdks/connectivity.md index cf026052e3..2b961f8077 100644 --- a/docs/dev/reference/sdks/connectivity.md +++ b/docs/dev/reference/sdks/connectivity.md @@ -104,7 +104,7 @@ When a machine loses its connection to the internet but is still connected to a - Client sessions connected through the same LAN or WAN will function normally. - Client sessions connected through the internet will timeout and end. If the client is on the same LAN or WAN but the route it chose to connect is through the internet, the client will automatically disconnect and then reconnect over LAN. -- Cloud sync for the [data management service](/data-ai/data/edge/capture-sync/) will pause until the internet connection is re-established since the machine will be unable to connect to the [Viam app](https://app.viam.com). +- Cloud sync for the [data management service](/data-ai/edge/capture-sync/) will pause until the internet connection is re-established since the machine will be unable to connect to the [Viam app](https://app.viam.com). When a machine loses its connection to LAN or WAN, all client sessions will timeout and end by default. diff --git a/docs/dev/reference/sdks/python/python-venv.md b/docs/dev/reference/sdks/python/python-venv.md index 7837350871..73977696a5 100644 --- a/docs/dev/reference/sdks/python/python-venv.md +++ b/docs/dev/reference/sdks/python/python-venv.md @@ -68,7 +68,7 @@ pip3 install viam-sdk This installs the Viam Python SDK and all required general dependencies. -If you intend to use the [ML (machine learning) model service](/data-ai/ai/train/deploy/), install the Python SDK using the `mlmodel` extra: +If you intend to use the [ML (machine learning) model service](/data-ai/train/deploy/), install the Python SDK using the `mlmodel` extra: ```sh {class="command-line" data-prompt="$"} pip3 install 'viam-sdk[mlmodel]' diff --git a/docs/dev/tools/cli.md b/docs/dev/tools/cli.md index 0b1439df76..601728b24c 100644 --- a/docs/dev/tools/cli.md +++ b/docs/dev/tools/cli.md @@ -399,7 +399,7 @@ The **Binary Data ID** is shown under the **DETAILS** subtab that appears on the You cannot use filter arguments such as `--start` or `--end` with the `ids` argument. -See [Create a dataset](/data-ai/ai/train/create-dataset/) for more information. +See [Create a dataset](/data-ai/train/create-dataset/) for more information. ##### Using the `filter` argument @@ -430,7 +430,7 @@ Removing the `viam data export` string, you can use the same filter parameters ( You cannot use the `--binary-data-ids` argument when using `filter`. -See [Create a dataset](/data-ai/ai/train/create-dataset/) for more information. +See [Create a dataset](/data-ai/train/create-dataset/) for more information. ### `data` @@ -499,8 +499,8 @@ done | `export tabular` | Export tabular or sensor data to a specified location in the .ndjson output format. You can copy this from the UI with a filter. See [Copy `export` command](#copy-export-command). | - | | `export binary` | Export binary or image data to a specified location. Binary data will be downloaded in the original output it was specified as. You can copy this from the UI with a filter. See [Copy `export` command](#copy-export-command). | - | | `tag` | Add or remove tags from data matching the IDs or filter. | `ids`, `filter` | -| `database configure` | Create a new database user for the Viam organization's MongoDB Atlas Data Federation instance, or change the password of an existing user. See [Configure data query](/data-ai/data/cloud/query/#configure-data-query). | - | -| `database hostname` | Get the MongoDB Atlas Data Federation instance hostname and connection URI. See [Configure data query](/data-ai/data/cloud/query/#configure-data-query). | - | +| `database configure` | Create a new database user for the Viam organization's MongoDB Atlas Data Federation instance, or change the password of an existing user. See [Configure data query](/data-ai/cloud/query/#configure-data-query). | - | +| `database hostname` | Get the MongoDB Atlas Data Federation instance hostname and connection URI. See [Configure data query](/data-ai/cloud/query/#configure-data-query). | - | | `delete binary` | Delete binary data from the Viam Cloud. | - | | `delete tabular` | Delete tabular data from the Viam Cloud. | - | | `--help` | Return help | - | @@ -1309,7 +1309,7 @@ You can set a default profile by using the `VIAM_CLI_PROFILE_NAME` environment v ### `training-script` -Manage training scripts for [custom ML training](/data-ai/ai/train/train/). +Manage training scripts for [custom ML training](/data-ai/train/train/). ```sh {class="command-line" data-prompt="$"} viam training-script upload --framework= --org-id= --path= --script-name= --type= diff --git a/docs/manage/fleet/reuse-configuration.md b/docs/manage/fleet/reuse-configuration.md index 5c86d48dac..815ac25674 100644 --- a/docs/manage/fleet/reuse-configuration.md +++ b/docs/manage/fleet/reuse-configuration.md @@ -39,7 +39,7 @@ You must be an [organization owner](/manage/manage/rbac/) to create fragments fo {{% tablestep number=2 %}} **Add and configure all the resources** you want to use on your machines. -Fragments support all available resources except [triggers](/data-ai/data/react/alert-data/). +Fragments support all available resources except [triggers](/data-ai/react/alert-data/). You can even add other fragments inside a fragment. {{< alert title="Tip" color="tip" >}} @@ -296,7 +296,7 @@ This example uses [`$set`](https://www.mongodb.com/docs/manual/reference/operato {{< /expand >}} {{< expand "Modify data sync settings" >}} -This example uses [`$set`](https://www.mongodb.com/docs/manual/reference/operator/update/set/#mongodb-update-up.-set) to change the sync interval for a [data management service](/data-ai/data/edge/capture-sync/) named `data-management` in the fragment: +This example uses [`$set`](https://www.mongodb.com/docs/manual/reference/operator/update/set/#mongodb-update-up.-set) to change the sync interval for a [data management service](/data-ai/edge/capture-sync/) named `data-management` in the fragment: ```json {class="line-numbers linkable-line-numbers"} "fragment_mods": [ @@ -341,7 +341,7 @@ The `version` field supports the following values: {{< /expand >}} {{< expand "Set a package version" >}} -This example uses [`$set`](https://www.mongodb.com/docs/manual/reference/operator/update/set/#mongodb-update-up.-set) to configure [version update settings for a package](/data-ai/ai/train/deploy/#deploy-a-specific-version-of-an-ml-model) named `package_name` from the fragment: +This example uses [`$set`](https://www.mongodb.com/docs/manual/reference/operator/update/set/#mongodb-update-up.-set) to configure [version update settings for a package](/data-ai/train/deploy/#deploy-a-specific-version-of-an-ml-model) named `package_name` from the fragment: ```json {class="line-numbers linkable-line-numbers"} "fragment_mods": [ diff --git a/docs/manage/manage/rbac.md b/docs/manage/manage/rbac.md index ceb0ec4b53..77a87bc6cc 100644 --- a/docs/manage/manage/rbac.md +++ b/docs/manage/manage/rbac.md @@ -102,7 +102,7 @@ Permissions for managing {{< glossary_tooltip term_id="fragment" text="fragments ## Data and machine learning -Permissions for [data management](/data-ai/data/edge/capture-sync/) and [machine learning](/data-ai/ai/train/deploy/) are as follows: +Permissions for [data management](/data-ai/edge/capture-sync/) and [machine learning](/data-ai/train/deploy/) are as follows: | Permissions | Org owner | Org operator | Location owner | Location operator | Machine owner | Machine operator | diff --git a/docs/manage/troubleshoot/alert.md b/docs/manage/troubleshoot/alert.md index c4312e295b..cf5fdb82b6 100644 --- a/docs/manage/troubleshoot/alert.md +++ b/docs/manage/troubleshoot/alert.md @@ -34,7 +34,7 @@ You can receive alerts for the following events involving machine performance te For example, you can configure a trigger to send you a notification when your machine's CPU usage reaches a certain threshold. {{< alert title="Tip" color="tip" >}} -You can also configure alerts on any other machine data, for more information on that, see [Alert on data](/data-ai/data/react/alert-data/). +You can also configure alerts on any other machine data, for more information on that, see [Alert on data](/data-ai/react/alert-data/). {{< /alert >}} ## Data meets condition @@ -98,7 +98,7 @@ You can also see readings on the **CONTROL** tab. ### Configure data management -To capture or alert on the data from your configured sensor, you must add the [data management service](/data-ai/data/edge/capture-sync/) and configure it to capture and sync the sensor data: +To capture or alert on the data from your configured sensor, you must add the [data management service](/data-ai/edge/capture-sync/) and configure it to capture and sync the sensor data: {{< table >}} {{% tablestep number=1 %}} @@ -378,7 +378,7 @@ Click the **Save** button in the top right corner of the page to save your confi ## Data synced -You must [configure data capture](/data-ai/data/edge/capture-sync/) for your machine to use this trigger. +You must [configure data capture](/data-ai/edge/capture-sync/) for your machine to use this trigger. {{< tabs >}} {{% tab name="Builder mode" %}} diff --git a/docs/manage/troubleshoot/teleoperate/default-interface.md b/docs/manage/troubleshoot/teleoperate/default-interface.md index 24c83ba742..dab6f61b4c 100644 --- a/docs/manage/troubleshoot/teleoperate/default-interface.md +++ b/docs/manage/troubleshoot/teleoperate/default-interface.md @@ -45,7 +45,7 @@ Additionally, the app allows you to: - see if your machines are online - [view a machine's logs](/manage/troubleshoot/troubleshoot/#check-logs) -- [upload images from your phone to the cloud](/data-ai/ai/train/upload-external-data/#upload-images-with-the-viam-mobile-app) +- [upload images from your phone to the cloud](/data-ai/train/upload-external-data/#upload-images-with-the-viam-mobile-app) - [invite people to collaborate with you and modify access](/manage/troubleshoot/teleoperate/default-interface/#viam-mobile-app)
diff --git a/docs/operate/control/headless-app.md b/docs/operate/control/headless-app.md index 45894e52b0..91ce3d76ef 100644 --- a/docs/operate/control/headless-app.md +++ b/docs/operate/control/headless-app.md @@ -63,7 +63,7 @@ Windows is not supported. If you are using Windows, use the [Windows Subsystem for Linux (WSL)](https://learn.microsoft.com/en-us/windows/wsl/install) and install the Python SDK using the preceding instructions for Linux. For other unsupported systems, see [Installing from source](https://python.viam.dev/#installing-from-source). -If you intend to use the [ML (machine learning) model service](/data-ai/ai/train/deploy/), use the following command instead, which installs additional required dependencies along with the Python SDK: +If you intend to use the [ML (machine learning) model service](/data-ai/train/deploy/), use the following command instead, which installs additional required dependencies along with the Python SDK: ```sh {class="command-line" data-prompt="$"} pip install 'viam-sdk[mlmodel]' diff --git a/docs/operate/get-started/other-hardware/_index.md b/docs/operate/get-started/other-hardware/_index.md index 2de4dbda24..72b68bec1a 100644 --- a/docs/operate/get-started/other-hardware/_index.md +++ b/docs/operate/get-started/other-hardware/_index.md @@ -58,7 +58,7 @@ For C++ module examples, see the [C++ examples directory on GitHub](https://gith {{< expand "How and where do modules run?" >}} Modules run on your machine, alongside `viam-server` as separate processes, communicating with `viam-server` over UNIX sockets. -[`viam-server` manages](/operate/reference/viam-server/) the dependencies, start-up, reconfiguration, [data management](/data-ai/data/edge/capture-sync/), and shutdown behavior of your modular resource. +[`viam-server` manages](/operate/reference/viam-server/) the dependencies, start-up, reconfiguration, [data management](/data-ai/edge/capture-sync/), and shutdown behavior of your modular resource. The lifecycle of a module and the resources it provides is as follows: diff --git a/docs/operate/get-started/supported-hardware/_index.md b/docs/operate/get-started/supported-hardware/_index.md index 310b0cc821..554042414c 100644 --- a/docs/operate/get-started/supported-hardware/_index.md +++ b/docs/operate/get-started/supported-hardware/_index.md @@ -219,8 +219,8 @@ Modules in the list above are officially supported and maintained by Viam only i If you have other hardware you need to integrate with a custom module, continue to [Integrate other hardware](/operate/get-started/other-hardware/). If you have configured all your hardware, you can do a variety of things with your machine: -- [Capture data from your machines](/data-ai/data/edge/capture-sync/) -- [Create a dataset](/data-ai/ai/train/create-dataset/) and [train an AI model](/data-ai/ai/train/train-tflite/) +- [Capture data from your machines](/data-ai/edge/capture-sync/) +- [Create a dataset](/data-ai/train/create-dataset/) and [train an AI model](/data-ai/train/train-tflite/) - [Write an app](/operate/control/web-app/) to interact with your machines using any of the Viam SDKs - [Deploy control logic to run directly on your machines](/manage/software/control-logic/) - [Share the configuration across multiple machines](/manage/fleet/reuse-configuration/) diff --git a/docs/operate/mobility/use-input-to-act.md b/docs/operate/mobility/use-input-to-act.md index 45832f0566..02e61e7334 100644 --- a/docs/operate/mobility/use-input-to-act.md +++ b/docs/operate/mobility/use-input-to-act.md @@ -51,9 +51,9 @@ readings = await my_sensor.get_readings() Other common inputs include the methods of a [board](/dev/reference/apis/components/board/) (`GetGPIO`, `GetPWM`, `PWMFrequency`, `GetDigitalInterruptValue`, and `ReadAnalogReader`), or a [power sensor](/dev/reference/apis/components/power-sensor/) (`GetVoltage`, `GetCurrent`, `GetPower`, and `GetReadings`). You can also use camera input, for example to detect objects and pick them up with an arm. -See [Act based on inferences](/data-ai/ai/react/act/) for relevant examples. +See [Act based on inferences](/data-ai/react/act/) for relevant examples. -If you want to send alerts based on computer vision or captured data, see [Alert on inferences](/data-ai/ai/react/alert/) or [Alert on data](/data-ai/data/react/alert-data/). +If you want to send alerts based on computer vision or captured data, see [Alert on inferences](/data-ai/react/alert/) or [Alert on data](/data-ai/react/alert-data/). {{% /tablestep %}} {{% tablestep number=3 %}} diff --git a/docs/operate/reference/advanced-modules/_index.md b/docs/operate/reference/advanced-modules/_index.md index 342692eb87..bd26be4d2c 100644 --- a/docs/operate/reference/advanced-modules/_index.md +++ b/docs/operate/reference/advanced-modules/_index.md @@ -52,6 +52,6 @@ If you need to package and deploy a module using Docker, for example if your mod ## Design a custom ML model -When working with the [ML model service](/dev/reference/apis/services/ml/), you can deploy an [existing model](/data-ai/ai/train/deploy/) or [train your own model](/data-ai/ai/train/train/). +When working with the [ML model service](/dev/reference/apis/services/ml/), you can deploy an [existing model](/data-ai/train/deploy/) or [train your own model](/data-ai/train/train/). However, if you are writing your own {{< glossary_tooltip term_id="module" text="module" >}} that uses the ML model service together with the [vision service](/dev/reference/apis/services/vision/), you can also [design your own ML model](/data-ai/reference/mlmodel-design/) to better match your specific use case. diff --git a/docs/operate/reference/architecture/_index.md b/docs/operate/reference/architecture/_index.md index 2352ea7773..b4baceeaff 100644 --- a/docs/operate/reference/architecture/_index.md +++ b/docs/operate/reference/architecture/_index.md @@ -132,7 +132,7 @@ Data is captured and synced to the Viam Cloud as follows: If a device has intermittent internet connectivity, data is stored locally until the machine can reconnect to the cloud. -For more information, see [Data management service](/data-ai/data/edge/capture-sync/). +For more information, see [Data management service](/data-ai/edge/capture-sync/). ## Basic machine example diff --git a/docs/operate/reference/components/camera/_index.md b/docs/operate/reference/components/camera/_index.md index 9aed1dd74a..a8a4c7837c 100644 --- a/docs/operate/reference/components/camera/_index.md +++ b/docs/operate/reference/components/camera/_index.md @@ -90,13 +90,13 @@ Provide at least the width and height values to start. For general configuration, development, and usage info, see: {{< cards >}} -{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/edge/capture-sync/" noimage="true" %}} {{% card link="/operate/control/web-app/" noimage="true" %}} {{< /cards >}} You can also use the camera component with the following services: -- [Data management service](/data-ai/data/edge/capture-sync/): To capture and sync the camera's data +- [Data management service](/data-ai/edge/capture-sync/): To capture and sync the camera's data - [Vision service](/operate/reference/services/vision/): To use computer vision to interpret the camera stream - [SLAM service](/operate/reference/services/slam/): for mapping diff --git a/docs/operate/reference/components/camera/calibrate.md b/docs/operate/reference/components/camera/calibrate.md index 39753e4890..5a84afc681 100644 --- a/docs/operate/reference/components/camera/calibrate.md +++ b/docs/operate/reference/components/camera/calibrate.md @@ -114,5 +114,5 @@ For more configuration and usage info, see: {{< cards >}} {{% card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="true" %}} -{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/edge/capture-sync/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/camera/esp32-camera.md b/docs/operate/reference/components/camera/esp32-camera.md index b7d21d8c72..1284628614 100644 --- a/docs/operate/reference/components/camera/esp32-camera.md +++ b/docs/operate/reference/components/camera/esp32-camera.md @@ -40,7 +40,7 @@ Finish building and flashing custom firmware, then return to this guide. {{< alert title="Data management not supported" color="caution" >}} -The `esp32-camera` camera model does not currently support the [data management service](/data-ai/data/edge/capture-sync/). +The `esp32-camera` camera model does not currently support the [data management service](/data-ai/edge/capture-sync/). {{< /alert >}} @@ -201,5 +201,5 @@ For more configuration and usage info, see: {{< cards >}} {{% card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="true" %}} -{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/edge/capture-sync/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/camera/fake-micro-server.md b/docs/operate/reference/components/camera/fake-micro-server.md index deeb0d196a..9e63643a52 100644 --- a/docs/operate/reference/components/camera/fake-micro-server.md +++ b/docs/operate/reference/components/camera/fake-micro-server.md @@ -66,5 +66,5 @@ For more configuration and usage info, see: {{< cards >}} {{% card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="true" %}} -{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/edge/capture-sync/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/camera/fake.md b/docs/operate/reference/components/camera/fake.md index 70a20da105..8da4b34869 100644 --- a/docs/operate/reference/components/camera/fake.md +++ b/docs/operate/reference/components/camera/fake.md @@ -77,5 +77,5 @@ For more configuration and usage info, see: {{< cards >}} {{% card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="true" %}} -{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/edge/capture-sync/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/camera/ffmpeg.md b/docs/operate/reference/components/camera/ffmpeg.md index 6412774fd9..3e77f9c2fc 100644 --- a/docs/operate/reference/components/camera/ffmpeg.md +++ b/docs/operate/reference/components/camera/ffmpeg.md @@ -103,5 +103,5 @@ For more configuration and usage info, see: {{< cards >}} {{% card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="true" %}} -{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/edge/capture-sync/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/camera/image-file.md b/docs/operate/reference/components/camera/image-file.md index 9685535d74..c46450b2ba 100644 --- a/docs/operate/reference/components/camera/image-file.md +++ b/docs/operate/reference/components/camera/image-file.md @@ -92,5 +92,5 @@ For more configuration and usage info, see: {{< cards >}} {{% card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="true" %}} -{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/edge/capture-sync/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/camera/transform.md b/docs/operate/reference/components/camera/transform.md index 2d01d27ae3..8f8a839e2e 100644 --- a/docs/operate/reference/components/camera/transform.md +++ b/docs/operate/reference/components/camera/transform.md @@ -317,5 +317,5 @@ For more configuration and usage info, see: {{< cards >}} {{% card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="true" %}} -{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/edge/capture-sync/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/camera/webcam.md b/docs/operate/reference/components/camera/webcam.md index 5510621764..408fec1994 100644 --- a/docs/operate/reference/components/camera/webcam.md +++ b/docs/operate/reference/components/camera/webcam.md @@ -266,7 +266,7 @@ If you are capturing camera data, it can happen that the camera captures and syn If you are using a CSI camera v1.3 or v2.0, or v3.0, use the [`viam:camera:csi` module](https://github.com/viamrobotics/csi-camera/) instead. For Raspberry Pi AI cameras like the IMX500 AI camera, use a module such as [this `viam-pi-ai-camera` vision service](https://github.com/HipsterBrown/viam-pi-ai-camera). -For more information about the vision service, see [run inference](https://docs.viam.com/data-ai/ai/infer/run-inference/). +For more information about the vision service, see [run inference](https://docs.viam.com/data-ai/infer/run-inference/). {{% /expand%}} {{% expand "High CPU usage" %}} @@ -281,5 +281,5 @@ For more configuration and usage info, see: {{< cards >}} {{% card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="true" %}} -{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/edge/capture-sync/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/encoder/_index.md b/docs/operate/reference/components/encoder/_index.md index c18fa4fec6..fcfabb8411 100644 --- a/docs/operate/reference/components/encoder/_index.md +++ b/docs/operate/reference/components/encoder/_index.md @@ -94,6 +94,6 @@ For general configuration, development, and usage info, see: You can also use the encoder component with the following services: -- [Data management service](/data-ai/data/edge/capture-sync/): To capture and sync the encoder's data +- [Data management service](/data-ai/edge/capture-sync/): To capture and sync the encoder's data - [Motion service](/operate/reference/services/motion/): To move machines or components of machines - [Navigation service](/operate/reference/services/navigation/): To navigate with GPS diff --git a/docs/operate/reference/components/movement-sensor/_index.md b/docs/operate/reference/components/movement-sensor/_index.md index 8799e968f4..6231aeac77 100644 --- a/docs/operate/reference/components/movement-sensor/_index.md +++ b/docs/operate/reference/components/movement-sensor/_index.md @@ -84,12 +84,12 @@ For general configuration and development info, see: {{< cards >}} {{% card link="/operate/get-started/supported-hardware/" noimage="true" %}} {{% card link="/operate/control/web-app/" noimage="true" %}} -{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/edge/capture-sync/" noimage="true" %}} {{< /cards >}} To capture data from the movement sensor or use it for motion, see the following services: -- [data management service](/data-ai/data/edge/capture-sync/): to capture and sync the movement sensor's data +- [data management service](/data-ai/edge/capture-sync/): to capture and sync the movement sensor's data - [motion service](/operate/reference/services/motion/): to move machines or components of machines - [navigation service](/operate/reference/services/navigation/): to navigate with GPS - [SLAM service](/operate/reference/services/slam/): for mapping diff --git a/docs/operate/reference/components/power-sensor/_index.md b/docs/operate/reference/components/power-sensor/_index.md index 8c6ca77e80..22f3a2b90b 100644 --- a/docs/operate/reference/components/power-sensor/_index.md +++ b/docs/operate/reference/components/power-sensor/_index.md @@ -72,7 +72,7 @@ For general configuration and development info, see: {{< cards >}} {{% card link="/operate/get-started/supported-hardware/" noimage="true" %}} {{% card link="/operate/control/web-app/" noimage="true" %}} -{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/edge/capture-sync/" noimage="true" %}} {{< /cards >}} -To capture data from the power sensor and sync it in the cloud, see the [data management service](/data-ai/data/edge/capture-sync/). +To capture data from the power sensor and sync it in the cloud, see the [data management service](/data-ai/edge/capture-sync/). diff --git a/docs/operate/reference/components/sensor/_index.md b/docs/operate/reference/components/sensor/_index.md index 09ae4811ee..4c068e4026 100644 --- a/docs/operate/reference/components/sensor/_index.md +++ b/docs/operate/reference/components/sensor/_index.md @@ -90,7 +90,7 @@ For general configuration and development info, see: {{< cards >}} {{% card link="/operate/get-started/supported-hardware/" noimage="true" %}} {{% card link="/operate/control/web-app/" noimage="true" %}} -{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/edge/capture-sync/" noimage="true" %}} {{< /cards >}} -To capture data from the sensor, see the [data management service](/data-ai/data/edge/capture-sync/). +To capture data from the sensor, see the [data management service](/data-ai/edge/capture-sync/). diff --git a/docs/operate/reference/components/sensor/fake.md b/docs/operate/reference/components/sensor/fake.md index f06bf9568f..16eac85a31 100644 --- a/docs/operate/reference/components/sensor/fake.md +++ b/docs/operate/reference/components/sensor/fake.md @@ -62,6 +62,6 @@ Check out the [sensor API](/dev/reference/apis/components/sensor/) or check out {{< cards >}} {{% card link="/dev/reference/apis/components/sensor/" customTitle="Sensor API" noimage="true" %}} -{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/edge/capture-sync/" noimage="true" %}} {{% card link="/operate/get-started/supported-hardware/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/components/sensor/ultrasonic-micro-rdk.md b/docs/operate/reference/components/sensor/ultrasonic-micro-rdk.md index f57d08d2c7..4f5a778b4e 100644 --- a/docs/operate/reference/components/sensor/ultrasonic-micro-rdk.md +++ b/docs/operate/reference/components/sensor/ultrasonic-micro-rdk.md @@ -120,6 +120,6 @@ Check out the [sensor API](/dev/reference/apis/components/sensor/) or check out {{< cards >}} {{% card link="/dev/reference/apis/components/sensor/" customTitle="Sensor API" noimage="true" %}} -{{% card link="/data-ai/data/edge/capture-sync/" noimage="true" %}} +{{% card link="/data-ai/edge/capture-sync/" noimage="true" %}} {{% card link="/operate/get-started/supported-hardware/" noimage="true" %}} {{< /cards >}} diff --git a/docs/operate/reference/services/slam/cloudslam/_index.md b/docs/operate/reference/services/slam/cloudslam/_index.md index 3480835704..2830d51d13 100644 --- a/docs/operate/reference/services/slam/cloudslam/_index.md +++ b/docs/operate/reference/services/slam/cloudslam/_index.md @@ -19,7 +19,7 @@ SLAM Algorithms can have varying levels of resource requirements in order to run In order to better support running SLAM on resource limited machines, Viam provides a service to run SLAM algorithms for machines in the cloud as well as management of the maps generated in their location. CloudSLAM can be used with both a live machine or with previously captured data in your location. -In [live mode](#mapping-with-a-live-machine-online-mode) using the [data management service](/data-ai/data/edge/capture-sync/) and the [cloudslam-wrapper](https://github.com/viam-modules/cloudslam-wrapper) module, Viam takes your LiDAR camera and movement sensor data from your local machine and sends it to the cloudslam server. +In [live mode](#mapping-with-a-live-machine-online-mode) using the [data management service](/data-ai/edge/capture-sync/) and the [cloudslam-wrapper](https://github.com/viam-modules/cloudslam-wrapper) module, Viam takes your LiDAR camera and movement sensor data from your local machine and sends it to the cloudslam server. The CloudSLAM server will then process that data and produce a map that can then be used on any machine in your location. When using an [offline machine](#using-previously-captured-data-offline-mode), you can select data from specific sensors over a period of time to build a map with. @@ -76,8 +76,8 @@ To use CloudSLAM on a live machine, you must meet the following requirements: To use CloudSLAM you must enable data capture and configure your `cloudslam-wrapper` SLAM service: {{< alert title="Tip: Managing Data Capture" color="tip" >}} -Note that when the [data management service](/data-ai/data/edge/capture-sync/) is enabled, it continuously monitors and syncs your machine’s sensor data while the machine is running. -To avoid incurring charges while not in use, [turn off data capture for your sensors](/data-ai/data/edge/capture-sync/#stop-data-capture-or-data-sync) once you have finished your SLAM session. +Note that when the [data management service](/data-ai/edge/capture-sync/) is enabled, it continuously monitors and syncs your machine’s sensor data while the machine is running. +To avoid incurring charges while not in use, [turn off data capture for your sensors](/data-ai/edge/capture-sync/#stop-data-capture-or-data-sync) once you have finished your SLAM session. {{< /alert >}} {{< tabs name="Create new map">}} @@ -92,7 +92,7 @@ To avoid incurring charges while not in use, [turn off data capture for your sen On the panel that appears, you can manage the capturing and syncing functions. You can also specify the **directory**, the sync **interval**, and any **tags** to apply to captured data. - See the [data management service](/data-ai/data/edge/capture-sync/) for more information. + See the [data management service](/data-ai/edge/capture-sync/) for more information. 2. Enable data capture for your camera, and for your movement sensor if you would like to use IMU data, odometry data, or both: @@ -183,7 +183,7 @@ You _do not_ need to configure data capture on the individual IMU and odometer. This example JSON configuration: - adds the `viam:rplidar`, `viam:cartographer`, and `viam:cloudslam-wrapper` modules -- configures the `viam:slam:cartographer`, `viam:cloudslam-wrapper:cloudslam`, and the [data management](/data-ai/data/edge/capture-sync/) services +- configures the `viam:slam:cartographer`, `viam:cloudslam-wrapper:cloudslam`, and the [data management](/data-ai/edge/capture-sync/) services - adds a `viam:lidar:rplidar` camera with data capture configured ```json {class="line-numbers linkable-line-numbers"} diff --git a/docs/operate/reference/services/vision/_index.md b/docs/operate/reference/services/vision/_index.md index 14112ebf42..e776bba51b 100644 --- a/docs/operate/reference/services/vision/_index.md +++ b/docs/operate/reference/services/vision/_index.md @@ -4,5 +4,5 @@ linkTitle: "Vision Service" weight: 20 type: "docs" layout: "empty" -canonical: "/data-ai/ai/infer/run-inference/" +canonical: "/data-ai/infer/run-inference/" --- diff --git a/docs/operate/reference/services/vision/mlmodel.md b/docs/operate/reference/services/vision/mlmodel.md index e07dc66e0d..2770a3ced2 100644 --- a/docs/operate/reference/services/vision/mlmodel.md +++ b/docs/operate/reference/services/vision/mlmodel.md @@ -29,14 +29,14 @@ Before configuring your `mlmodel` detector or classifier, you need to:

1. Train or upload an ML model

-You can add an [existing model](/data-ai/ai/train/deploy/#deploy-your-ml-model-on-an-ml-model-service) or [train a TFlite](/data-ai/ai/train/train-tflite/) or [another model](/data-ai/ai/train/train/) for object detection and classification using your data in the [Viam Cloud](/data-ai/data/edge/capture-sync/). +You can add an [existing model](/data-ai/train/deploy/#deploy-your-ml-model-on-an-ml-model-service) or [train a TFlite](/data-ai/train/train-tflite/) or [another model](/data-ai/train/train/) for object detection and classification using your data in the [Viam Cloud](/data-ai/edge/capture-sync/). {{% /manualcard %}} {{% manualcard %}}

2. Deploy your ML model

-To use ML models with your machine, use a suitable [ML model service](/data-ai/ai/train/deploy/) to deploy and run the model. +To use ML models with your machine, use a suitable [ML model service](/data-ai/train/deploy/) to deploy and run the model. {{% /manualcard %}} {{< /cards >}} @@ -125,7 +125,7 @@ The following attributes are available for an `mlmodel` detector or classifier: | Parameter | Type | Required? | Description | | --------- | ---- | --------- | ----------- | -| `mlmodel_name` | string | **Required** | The name of the [ML model service](/data-ai/ai/train/deploy/) you want to use the model from. | +| `mlmodel_name` | string | **Required** | The name of the [ML model service](/data-ai/train/deploy/) you want to use the model from. | | `remap_output_names` | object | Optional | The names of your output tensors, mapped to the service requirements. See [Tensor names](#tensor-names) for more information. | | `remap_input_names` | object | Optional | The name of your input tensor, mapped to the service requirements. See [Tensor names](#tensor-names) for more information. | | `input_image_bgr` | bool | Optional | Set this to `true` if the ML model service expects the input image to have BGR pixels, rather than RGB pixels.
Default: `false` | @@ -146,7 +146,7 @@ Both the `mlmodel` detector and classifier require that the input and output ten - The _input tensor_ must be named `image` - The _output tensor_ must be named `probability` -If you [trained a TFlite ML model using the Viam app](/data-ai/ai/train/train-tflite/), your `mlmodel` tensors are already named in this fashion, and you can proceed to [test your detector or classifier](#test-your-detector-or-classifier). +If you [trained a TFlite ML model using the Viam app](/data-ai/train/train-tflite/), your `mlmodel` tensors are already named in this fashion, and you can proceed to [test your detector or classifier](#test-your-detector-or-classifier). However, if you uploaded your own ML model, or are using one from the [Viam Registry](https://app.viam.com/registry), you may need to remap your tensor names to meet this requirement, and should follow the instructions to [remap tensor names](#remap-tensor-names). #### Remap tensor names @@ -219,7 +219,7 @@ The feature is only available for classifiers that were uploaded after September {{}} -If you have images stored in the [Viam Cloud](/data-ai/data/edge/capture-sync/), you can run your classifier against your images in the [Viam app](https://app.viam.com/). +If you have images stored in the [Viam Cloud](/data-ai/edge/capture-sync/), you can run your classifier against your images in the [Viam app](https://app.viam.com/). 1. Navigate to the [Data tab](https://app.viam.com/data/view) and click on the **Images** subtab. 2. Click on an image to open the side menu, and select the **Actions** tab under the **Data** tab. diff --git a/docs/tutorials/configure/pet-photographer.md b/docs/tutorials/configure/pet-photographer.md index 971e81ab9a..de38a48040 100644 --- a/docs/tutorials/configure/pet-photographer.md +++ b/docs/tutorials/configure/pet-photographer.md @@ -24,7 +24,7 @@ After following this tutorial, you will understand how to control sync parameter Note: Consider this tutorial alongside filtered camera tutorial. --> -If your machine [captures](/data-ai/data/edge/capture-sync/) a lot of data, you might want to filter captured data to selectively store only the data you are interested in. +If your machine [captures](/data-ai/edge/capture-sync/) a lot of data, you might want to filter captured data to selectively store only the data you are interested in. For example, you might want to use your smart machine's camera to capture images based on specific criteria, such as the presence of a certain color, and omit captured images that don't meet that criteria. In this tutorial, you will use a custom {{< glossary_tooltip term_id="module" text="module" >}} to function as a color filter, and use it with a [camera](/operate/reference/components/camera/) to only capture images where your pet is in the frame in the following way: @@ -161,7 +161,7 @@ For more information, refer to [Write your new resource model definition](/opera The filter function in your custom filter module must contain two critical elements: -1. A utility function that will check if the caller of the filter function is the [data management service](/data-ai/data/edge/capture-sync/). +1. A utility function that will check if the caller of the filter function is the [data management service](/data-ai/edge/capture-sync/). 1. A safeguard that ensures if the data management service is not the caller, an error and the unfiltered data is returned. {{< alert title="Important" color="note" >}} @@ -827,12 +827,12 @@ Whether you've downloaded the `colorfilter` module, or written your own color fi Next, add the following services to your smart machine to support the color filter module: -- The [data management service](/data-ai/data/edge/capture-sync/) enables your smart machine to capture data and sync it to the cloud. +- The [data management service](/data-ai/edge/capture-sync/) enables your smart machine to capture data and sync it to the cloud. - The [vision service](/dev/reference/apis/services/vision/#detections) enables your smart machine to perform color detection on objects in a camera stream. ### Add the data management service -To enable data capture on your machine, add and configure the [data management service](/data-ai/data/edge/capture-sync/) to capture and store data on your machine's computer: +To enable data capture on your machine, add and configure the [data management service](/data-ai/edge/capture-sync/) to capture and store data on your machine's computer: {{< tabs >}} {{% tab name="Config Builder" %}} @@ -849,7 +849,7 @@ To enable data capture on your machine, add and configure the [data management s ![An instance of the data management service named "dm". The cloud sync and capturing options are toggled on and the directory is empty. The interval is set to 0.1](/tutorials/pet-photographer/data-management-services.png) - For more detailed information, see [Add the data management service](/data-ai/data/edge/capture-sync/). + For more detailed information, see [Add the data management service](/data-ai/edge/capture-sync/). {{% /tab %}} {{% tab name="JSON Template" %}} Add the data management service to the services array in your rover’s raw JSON configuration: @@ -1005,5 +1005,5 @@ Try these other tutorials for more on working with the data management and visio {{% card link="/tutorials/projects/pet-treat-dispenser/" %}} {{% card link="/tutorials/projects/guardian/" %}} {{% card link="/tutorials/projects/send-security-photo/" %}} -{{% card link="/data-ai/ai/train/deploy/" %}} +{{% card link="/data-ai/train/deploy/" %}} {{< /cards >}} diff --git a/docs/tutorials/control/air-quality-fleet.md b/docs/tutorials/control/air-quality-fleet.md index 3be4751702..a344769657 100644 --- a/docs/tutorials/control/air-quality-fleet.md +++ b/docs/tutorials/control/air-quality-fleet.md @@ -235,7 +235,7 @@ Once you understand how to configure machines and use fragments, you can use [Pr #### Configure data capture and sync You have configured the sensor so the board can communicate with it, but sensor data is not yet being saved anywhere. -Viam's [data management service](/data-ai/data/edge/capture-sync/) lets you capture data locally from each sensor and then sync it to the cloud where you can access historical sensor data and see trends over time. +Viam's [data management service](/data-ai/edge/capture-sync/) lets you capture data locally from each sensor and then sync it to the cloud where you can access historical sensor data and see trends over time. Once you configure the rest of your sensing machines, you'll be able to remotely access data from all sensors in all locations, and when you're ready, you can give customers [access](/manage/manage/access/) to the data from the sensors in their locations. Configure data capture and sync as follows: diff --git a/docs/tutorials/projects/claw-game.md b/docs/tutorials/projects/claw-game.md index 31a65841a6..0987a3d3a8 100644 --- a/docs/tutorials/projects/claw-game.md +++ b/docs/tutorials/projects/claw-game.md @@ -932,7 +932,7 @@ In this tutorial, you learned how to: For some next steps, you could: - Use the advanced interface included in the project repository to leverage the [motion service](/operate/reference/services/motion/) for larger, more complex arm movement within the enclosure. -- Add a camera and use the [vision service](/operate/reference/services/vision/) to add color detection, or use an [ML model](/data-ai/ai/train/deploy/) to determine grab success rate and create a score counter. +- Add a camera and use the [vision service](/operate/reference/services/vision/) to add color detection, or use an [ML model](/data-ai/train/deploy/) to determine grab success rate and create a score counter. - Design a hard mode where the prizes are shuffled around with the arm every few attempts. - Add a camera and extend the interface to allow folks from anywhere in the world to play the claw game and win. diff --git a/docs/tutorials/projects/helmet.md b/docs/tutorials/projects/helmet.md index a7ea8626ed..e22d879759 100644 --- a/docs/tutorials/projects/helmet.md +++ b/docs/tutorials/projects/helmet.md @@ -197,7 +197,7 @@ Now that the detector is configured, it's time to test it! ## Configure data capture and sync -Viam's built-in [data management service](/data-ai/data/edge/capture-sync/) allows you to, among other things, capture images and sync them to the cloud. +Viam's built-in [data management service](/data-ai/edge/capture-sync/) allows you to, among other things, capture images and sync them to the cloud. For this project, you will capture images of people without hard hats so that you can see who wasn't wearing one, and so that you can trigger notifications when these images are captured and synced. Configure data capture on the `objectfilter` camera to capture images of people without hard hats: @@ -251,7 +251,7 @@ Now that you have verified that the detector and data sync are working, modify y ## Set up email notifications -[Triggers](/data-ai/ai/react/alert/) allow you to send webhook requests or email notifications when certain events happen. +[Triggers](/data-ai/react/alert/) allow you to send webhook requests or email notifications when certain events happen. For example, you can set up a trigger to perform an action whenever an image of someone without a hard hat is uploaded to the cloud. @@ -460,10 +460,10 @@ Here are some ways you could expand on this project: - Change your cloud function to send a different kind of notification, or trigger some other action. For an example demonstrating how to configure text notifications, see the [Detect a Person and Send a Photo tutorial](/tutorials/projects/send-security-photo/). -- Use a different existing model or [train your own](/data-ai/ai/train/train-tflite/), to detect and send notifications about something else such as [forklifts](https://huggingface.co/keremberke/yolov8m-forklift-detection) appearing in your camera stream. +- Use a different existing model or [train your own](/data-ai/train/train-tflite/), to detect and send notifications about something else such as [forklifts](https://huggingface.co/keremberke/yolov8m-forklift-detection) appearing in your camera stream. {{< cards >}} {{% card link="/tutorials/projects/send-security-photo/" %}} -{{% card link="/data-ai/ai/train/train-tflite/" %}} +{{% card link="/data-ai/train/train-tflite/" %}} {{% card link="/tutorials/services/navigate-with-rover-base/" %}} {{< /cards >}} diff --git a/docs/tutorials/projects/integrating-viam-with-openai.md b/docs/tutorials/projects/integrating-viam-with-openai.md index 7148288ec9..a692961d8b 100644 --- a/docs/tutorials/projects/integrating-viam-with-openai.md +++ b/docs/tutorials/projects/integrating-viam-with-openai.md @@ -242,7 +242,7 @@ We found that if set up this way, the following positions accurately show the co ### 2. Configure the ML Model and vision services to use the detector -The [ML model service](/data-ai/ai/train/deploy/) allows you to deploy a machine learning model to your robot. +The [ML model service](/data-ai/train/deploy/) allows you to deploy a machine learning model to your robot. This tutorial uses a pre-trained machine learning (ML) model from the Viam Registry named [`EfficientDet-COCO`](https://app.viam.com/ml-model/viam-labs/EfficientDet-COCO). This model can detect a variety of objects, which you can find in the provided [labels.txt](https://github.com/viam-labs/devrel-demos/raw/main/Light%20up%20bot/labels.txt) file. @@ -422,6 +422,6 @@ Some ideas: - Make the voice recognition software listen in the background, so the robot can move and interact with the world while listening and responding. - Integrate another ML model that is used to follow a human (when told to do so). - Add Lidar and integrate Viam's {{< glossary_tooltip term_id="slam" text="SLAM service" >}} to map the world around it. -- Use Viam's [Data Management](/data-ai/data/edge/capture-sync/) to collect environmental data and use this data to train new ML models that allow the robot to improve its functionality. +- Use Viam's [Data Management](/data-ai/edge/capture-sync/) to collect environmental data and use this data to train new ML models that allow the robot to improve its functionality. We'd love to see where you decide to take this. If you build your own companion robot, let us and others know on the [Community Discord](https://discord.gg/viam). diff --git a/docs/tutorials/projects/send-security-photo.md b/docs/tutorials/projects/send-security-photo.md index eb6f9d6e77..810adf8457 100644 --- a/docs/tutorials/projects/send-security-photo.md +++ b/docs/tutorials/projects/send-security-photo.md @@ -24,7 +24,7 @@ Maybe someone is eating your chocolates when you are away. You're not sure who, but you suspect Steve. This robot will help you catch the culprit. -When someone comes to your desk, the robot will use the [vision service](/operate/reference/services/vision/) and the [ML model service](/data-ai/ai/train/deploy/) to detect a person, take their photo, and text you an alert with a photo of the person. +When someone comes to your desk, the robot will use the [vision service](/operate/reference/services/vision/) and the [ML model service](/data-ai/train/deploy/) to detect a person, take their photo, and text you an alert with a photo of the person. ![Text message reading "Alert There is someone at your desk beware" with a photo of a person (Steve) detected by the camera as he approaches the desk.](/tutorials/send-security-photo/text-message.png) @@ -88,7 +88,7 @@ This tutorial uses a pre-trained Machine Learning model from the Viam Registry c The model can detect a variety of things, including `Persons`. You can see a full list of what the model can detect in [labels.txt](https://github.com/viam-labs/devrel-demos/raw/main/Light%20up%20bot/labels.txt) file. -If you want to train your own model instead, follow the instructions to [train a TFlite](/data-ai/ai/train/train-tflite/) or [another model](/data-ai/ai/train/train/). +If you want to train your own model instead, follow the instructions to [train a TFlite](/data-ai/train/train-tflite/) or [another model](/data-ai/train/train/). 1. **Configure the ML model service** diff --git a/docs/tutorials/projects/verification-system.md b/docs/tutorials/projects/verification-system.md index 4609f3fd6f..a9d79f054b 100644 --- a/docs/tutorials/projects/verification-system.md +++ b/docs/tutorials/projects/verification-system.md @@ -90,7 +90,7 @@ In order for your machine's camera to detect the presence of a person in its fie ### Use an existing ML model -The [ML model service](/data-ai/ai/train/deploy/) allows you to deploy a machine learning model to your robot. +The [ML model service](/data-ai/train/deploy/) allows you to deploy a machine learning model to your robot. For your machine to be able to detect people, you will use a Machine Learning model from the Viam Registry called [`EfficientDet-COCO`](https://app.viam.com/ml-model/viam-labs/EfficientDet-COCO). The model can detect a variety of things which you can see in [labels.txt](https://github.com/viam-labs/devrel-demos/raw/main/Light%20up%20bot/labels.txt) file including `person`s. @@ -172,9 +172,9 @@ Then, train a new model using that model: {{}} 1. When you have created bounding boxes for all `person` objects in the image, click the right arrow key to navigate to the next image. Repeat the process for each image in your dataset, drawing bounding boxes for every person in every image. -1. [Train a TFlite model on your dataset](/data-ai/ai/train/train-tflite/). +1. [Train a TFlite model on your dataset](/data-ai/train/train-tflite/). Give it the name `"persondetect"`, and select **Object Detection** as the **Model Type**. -1. [Deploy the model](/data-ai/ai/train/deploy/) to your machine so it can be used by other services, such as the vision service. +1. [Deploy the model](/data-ai/train/deploy/) to your machine so it can be used by other services, such as the vision service. Finally, configure an `mlmodel` detector to use your new `"persondetect"` ML model: diff --git a/docs/tutorials/services/visualize-data-grafana.md b/docs/tutorials/services/visualize-data-grafana.md index 462e73e32e..a9aed1d0f7 100644 --- a/docs/tutorials/services/visualize-data-grafana.md +++ b/docs/tutorials/services/visualize-data-grafana.md @@ -19,7 +19,7 @@ no_list: true -Once you have [configured data query](/data-ai/data/cloud/query/#query-data-using-third-party-tools) for your organization's data store, you can visualize your data from a variety of third-party tools, including Grafana. +Once you have [configured data query](/data-ai/cloud/query/#query-data-using-third-party-tools) for your organization's data store, you can visualize your data from a variety of third-party tools, including Grafana. You can choose to visualize data from a component on one machine, from multiple components together, or from many components across a fleet of machines, all from a single pane of glass. @@ -31,7 +31,7 @@ Follow the steps in this tutorial to learn how to collect data from your machine {{% alert title="Info" color="info" %}} This tutorial focuses on using Grafana to visualize your captured data. -For general guidance appropriate for any third-party visualization tool, see [Visualize data](/data-ai/data/cloud/visualize/). +For general guidance appropriate for any third-party visualization tool, see [Visualize data](/data-ai/cloud/visualize/). {{% /alert %}} {{}} @@ -73,7 +73,7 @@ First, add the data management service to your machine to be able capture and sy {{< imgproc src="/tutorials/data-management/data-management-conf.png" alt="The data management service configuration pane with default settings shown for both capturing and syncing" resize="900x" >}} -For more information, see [data management service configuration](/data-ai/data/edge/capture-sync/). +For more information, see [data management service configuration](/data-ai/edge/capture-sync/). ### Configure data capture for a component @@ -98,17 +98,17 @@ To enable data capture for a sensor component: After a short while, your sensor will begin capturing live readings, and syncing those readings to the Viam app. You can check that data is being captured and synced by clicking on the menu icon on the sensor configuration pane. and selecting **View captured data**. -For more information see [data management service configuration](/data-ai/data/edge/capture-sync/). +For more information see [data management service configuration](/data-ai/edge/capture-sync/). ### Configure data query Next, enable the ability to query your synced data. When you sync captured data to Viam, that data is stored in the Viam organization’s [MongoDB Atlas Data Federation](https://www.mongodb.com/docs/atlas/data-federation/overview/) instance. -Configuring data query allows you to directly [query your data](/data-ai/data/cloud/query/#query-data-in-the-viam-app) using the Viam app or a compatible client (such as `mongosh`), but also allows Grafana to access that data and visualize it. +Configuring data query allows you to directly [query your data](/data-ai/cloud/query/#query-data-in-the-viam-app) using the Viam app or a compatible client (such as `mongosh`), but also allows Grafana to access that data and visualize it. To enable data query: -1. Follow the steps to [configure data query](/data-ai/data/cloud/query/#query-data-in-the-viam-app). +1. Follow the steps to [configure data query](/data-ai/cloud/query/#query-data-in-the-viam-app). 1. Note the username and hostname returned from these steps, in addition to the password you chose for that user. You will use this information in the next section. @@ -141,7 +141,7 @@ With your machine capturing data and syncing it to Viam, and direct query of tha ``` The connection string is specific to your organization ID and configured user. - You must have followed the steps under [configure data query](/data-ai/data/cloud/query/#configure-data-query) previously in order for this URL to be valid. + You must have followed the steps under [configure data query](/data-ai/cloud/query/#configure-data-query) previously in order for this URL to be valid. - **Credentials: User**: Enter the following username, substituting your organization ID as determined earlier, for ``: @@ -155,7 +155,7 @@ With your machine capturing data and syncing it to Viam, and direct query of tha db-user-abcdef12-abcd-abcd-abcd-abcdef123456 ``` - - **Credentials: Password**: Enter the password you provided when you [configured data query](/data-ai/data/cloud/query/#configure-data-query) previously. + - **Credentials: Password**: Enter the password you provided when you [configured data query](/data-ai/cloud/query/#configure-data-query) previously. {{}} @@ -222,8 +222,8 @@ See Grafana's [Global variables documentation](https://grafana.com/docs/grafana/ In this tutorial, you learned: -- how to use the [data management service](/data-ai/data/edge/capture-sync/) to capture data from your machine and sync it to the Viam app -- how to [enable data query access](/data-ai/data/cloud/query/#configure-data-query) to your synced data +- how to use the [data management service](/data-ai/edge/capture-sync/) to capture data from your machine and sync it to the Viam app +- how to [enable data query access](/data-ai/cloud/query/#configure-data-query) to your synced data - how to connect Grafana to your data - how to build a dashboard visualizing that data - how to use query language to dynamically update the visualization based on UI selections diff --git a/layouts/docs/tutorials.html b/layouts/docs/tutorials.html index 82f46d4108..efe18326de 100644 --- a/layouts/docs/tutorials.html +++ b/layouts/docs/tutorials.html @@ -92,7 +92,7 @@

Javascript

{{ partial "tutorialcard-no-js.html" (dict "link" "/tutorials/services/constrain-motion/") }} {{ partial "tutorialcard-no-js.html" (dict "link" "/tutorials/services/color-detection-scuttle/") }} {{ partial "tutorialcard-no-js.html" (dict "link" "/tutorials/services/webcam-line-follower-robot/") }} - {{ partial "tutorialcard-no-js.html" (dict "link" "/data-ai/ai/train/train-tflite/") }} + {{ partial "tutorialcard-no-js.html" (dict "link" "/data-ai/train/train-tflite/") }} diff --git a/static/include/app/apis/generated/mltraining.md b/static/include/app/apis/generated/mltraining.md index 7aabb1eefa..be23944d90 100644 --- a/static/include/app/apis/generated/mltraining.md +++ b/static/include/app/apis/generated/mltraining.md @@ -55,12 +55,12 @@ For more information, see the [Python SDK Docs](https://python.viam.dev/autoapi/ ```ts {class="line-numbers linkable-line-numbers"} await mlTrainingClient.submitTrainingJob( - '', - '', - '', - '1.0.0', + "", + "", + "", + "1.0.0", ModelType.SINGLE_LABEL_CLASSIFICATION, - ['tag1', 'tag2'] + ["tag1", "tag2"], ); ``` @@ -72,7 +72,7 @@ For more information, see the [TypeScript SDK Docs](https://ts.viam.dev/classes/ ### SubmitCustomTrainingJob Submit a training job from a custom training script. -Follow the guide to [Train a Model with a Custom Python Training Script](/data-ai/ai/train/). +Follow the guide to [Train a Model with a Custom Python Training Script](/data-ai/train/). {{< tabs >}} {{% tab name="Python" %}} @@ -125,12 +125,12 @@ For more information, see the [Python SDK Docs](https://python.viam.dev/autoapi/ ```ts {class="line-numbers linkable-line-numbers"} await mlTrainingClient.submitCustomTrainingJob( - '', - '', - 'viam:classification-tflite', - '1.0.0', - '', - '1.0.0' + "", + "", + "viam:classification-tflite", + "1.0.0", + "", + "1.0.0", ); ``` @@ -177,7 +177,7 @@ For more information, see the [Python SDK Docs](https://python.viam.dev/autoapi/ **Example:** ```ts {class="line-numbers linkable-line-numbers"} -const job = await mlTrainingClient.getTrainingJob(''); +const job = await mlTrainingClient.getTrainingJob(""); ``` For more information, see the [TypeScript SDK Docs](https://ts.viam.dev/classes/MlTrainingClient.html#gettrainingjob). @@ -228,8 +228,8 @@ For more information, see the [Python SDK Docs](https://python.viam.dev/autoapi/ ```ts {class="line-numbers linkable-line-numbers"} const jobs = await mlTrainingClient.listTrainingJobs( - '', - TrainingStatus.RUNNING + "", + TrainingStatus.RUNNING, ); ``` @@ -280,7 +280,7 @@ For more information, see the [Python SDK Docs](https://python.viam.dev/autoapi/ **Example:** ```ts {class="line-numbers linkable-line-numbers"} -await mlTrainingClient.cancelTrainingJob(''); +await mlTrainingClient.cancelTrainingJob(""); ``` For more information, see the [TypeScript SDK Docs](https://ts.viam.dev/classes/MlTrainingClient.html#canceltrainingjob). @@ -326,7 +326,7 @@ For more information, see the [Python SDK Docs](https://python.viam.dev/autoapi/ **Example:** ```ts {class="line-numbers linkable-line-numbers"} -await mlTrainingClient.deleteCompletedTrainingJob(''); +await mlTrainingClient.deleteCompletedTrainingJob(""); ``` For more information, see the [TypeScript SDK Docs](https://ts.viam.dev/classes/MlTrainingClient.html#deletecompletedtrainingjob). diff --git a/static/include/app/apis/overrides/protos/mltraining.SubmitCustomTrainingJob.md b/static/include/app/apis/overrides/protos/mltraining.SubmitCustomTrainingJob.md index fd12ae964b..23a05de363 100644 --- a/static/include/app/apis/overrides/protos/mltraining.SubmitCustomTrainingJob.md +++ b/static/include/app/apis/overrides/protos/mltraining.SubmitCustomTrainingJob.md @@ -1,2 +1,2 @@ Submit a training job from a custom training script. -Follow the guide to [Train a Model with a Custom Python Training Script](/data-ai/ai/train/). +Follow the guide to [Train a Model with a Custom Python Training Script](/data-ai/train/). From dbf6b6d5a65dd1c21ad4070b6b61bb20646b2eda Mon Sep 17 00:00:00 2001 From: nathan contino Date: Wed, 14 May 2025 09:34:48 -0400 Subject: [PATCH 6/7] Implement Ian feedback --- docs/data-ai/_index.md | 6 +- docs/data-ai/cloud/export.md | 2 +- docs/data-ai/edge/_index.md | 110 +++++++++++++++- docs/data-ai/get-started/annotate-images.md | 4 +- docs/data-ai/get-started/capture-images.md | 4 +- .../get-started/create-training-dataset.md | 6 +- docs/data-ai/get-started/how-sync-works.md | 118 ------------------ docs/data-ai/get-started/quickstart.md | 6 + docs/data-ai/{react => infer}/act.md | 0 docs/data-ai/{react => infer}/alert-data.md | 0 docs/data-ai/{react => infer}/alert.md | 2 +- docs/data-ai/infer/run-inference.md | 4 +- docs/data-ai/infer/track.md | 9 ++ docs/data-ai/react/_index.md | 10 -- docs/data-ai/train/_index.md | 6 +- docs/data-ai/train/train-tflite.md | 2 +- docs/data-ai/train/upload-external-data.md | 2 +- docs/dev/reference/changelog.md | 2 +- docs/manage/fleet/reuse-configuration.md | 2 +- docs/manage/troubleshoot/alert.md | 2 +- docs/operate/mobility/use-input-to-act.md | 4 +- docs/tutorials/projects/helmet.md | 2 +- 22 files changed, 148 insertions(+), 155 deletions(-) delete mode 100644 docs/data-ai/get-started/how-sync-works.md rename docs/data-ai/{react => infer}/act.md (100%) rename docs/data-ai/{react => infer}/alert-data.md (100%) rename docs/data-ai/{react => infer}/alert.md (99%) create mode 100644 docs/data-ai/infer/track.md delete mode 100644 docs/data-ai/react/_index.md diff --git a/docs/data-ai/_index.md b/docs/data-ai/_index.md index 7b443e3c74..7823ce82f7 100644 --- a/docs/data-ai/_index.md +++ b/docs/data-ai/_index.md @@ -41,7 +41,7 @@ You can also monitor your machines through teleop, power your application logic, {{% card link="/data-ai/edge/conditional-sync/" noimage="true" %}} {{% card link="/data-ai/cloud/query/" noimage="true" %}} {{% card link="/data-ai/cloud/visualize/" noimage="true" %}} -{{% card link="/data-ai/react/alert-data/" noimage="true" %}} +{{% card link="/data-ai/infer/alert-data/" noimage="true" %}} {{% card link="/data-ai/cloud/export/" noimage="true" %}} {{< /cards >}} {{< /how-to-expand >}} @@ -52,8 +52,8 @@ You can also monitor your machines through teleop, power your application logic, {{% card link="/data-ai/train/train/" noimage="true" %}} {{% card link="/data-ai/train/deploy/" noimage="true" %}} {{% card link="/data-ai/infer/run-inference/" noimage="true" %}} -{{% card link="/data-ai/react/alert/" noimage="true" %}} -{{% card link="/data-ai/react/act/" noimage="true" %}} +{{% card link="/data-ai/infer/alert/" noimage="true" %}} +{{% card link="/data-ai/infer/act/" noimage="true" %}} {{< /cards >}} {{< /how-to-expand >}} diff --git a/docs/data-ai/cloud/export.md b/docs/data-ai/cloud/export.md index f116808163..e244830bc6 100644 --- a/docs/data-ai/cloud/export.md +++ b/docs/data-ai/cloud/export.md @@ -15,7 +15,7 @@ aliases: viamresources: ["sensor", "data_manager"] platformarea: ["data", "cli"] date: "2024-12-03" -next: "/data-ai/react/alert-data/" +next: "/data-ai/infer/alert-data/" --- You can download machine data from cloud storage to your computer with the Viam CLI. diff --git a/docs/data-ai/edge/_index.md b/docs/data-ai/edge/_index.md index e91c727301..26e438b84b 100644 --- a/docs/data-ai/edge/_index.md +++ b/docs/data-ai/edge/_index.md @@ -1,6 +1,6 @@ --- -linkTitle: "Ingest data" -title: "Ingest" +linkTitle: "Capture" +title: "Capture" weight: 20 layout: "empty" type: "docs" @@ -9,3 +9,109 @@ empty_node: true open_on_desktop: true header_only: true --- + +Data capture and cloud sync works differently for `viam-server` and `viam-micro-server`. + +{{< tabs >}} +{{% tab name="viam-server" %}} + +The data is captured locally on the machine's storage and, by default, stored in the `~/.viam/capture` directory. +For Linux root or sudo users, the `~/.viam/capture` directory resolves to `/root/.viam/capture`. + +{{% expand "Can't find the directory data is stored in? Click here." %}} + +The relative path for the data capture directory depends on where `viam-server` is run from, as well as the operating system of the machine. + +To find the `$HOME` value, check your machine's logs on startup which will log it in the environment variables: + +```sh +2025-01-15T14:27:26.073Z INFO rdk server/entrypoint.go:77 Starting viam-server with following environment variables {"HOME":"/home/johnsmith"} +``` + +{{% /expand%}} + +If a machine restarts for any reason, data capture automatically resumes and any data already stored but not yet synced is synced. + +The service can capture data from multiple resources at the same or different frequencies. +The service does not impose a lower or upper limit on the frequency of data collection. +However, in practice, your hardware may impose limits on the frequency of data collection. +Avoid configuring data capture to higher rates than your hardware can handle, as this could lead to performance degradation. + +Data capture is frequently used with cloud sync. +You can start and stop capture and sync independently. +You can also enable cloud sync without data capture and it will sync data in the capture directory, as well as the additional sync paths configured in the `viam-server` config. +If you place data like images or files in the `~/.viam/capture` directory or another directory set up for sync with the data manager, for example with the `"additional_sync_paths"` config attribute, it will sync this data to the cloud. + +{{% /tab %}} +{{% tab name="viam-micro-server" %}} + +The data is captured in the ESP32's flash memory until it is uploaded to the Viam Cloud. + +If the machine restarts before all data is synced, all unsynced data captured since the last sync point is lost. + +The service can capture data from multiple resources at the same or different frequencies. +The service does not impose a lower or upper limit on the frequency of data collection. +However, in practice, high frequency data collection (> 100Hz) requires special considerations on the ESP32. + +{{% /tab %}} +{{< /tabs >}} + +## Security + +The data management service uses {{< glossary_tooltip term_id="grpc" text="gRPC" >}} calls to send and receive data, so your data is encrypted while in flight. +When data is stored in the cloud, it is encrypted at rest by the cloud storage provider. + +## Data integrity + +Viam's data management service is designed to safeguard against data loss, data duplication and otherwise compromised data. + +If the internet becomes unavailable or the machine needs to restart during the sync process, the sync is interrupted. +If the sync process is interrupted, the service will retry uploading the data at exponentially increasing intervals until the interval in between tries is at one hour, at which point the service retries the sync every hour. +When the connection is restored and sync resumes, the service continues sync where it left off without duplicating data. +If the interruption happens mid-file, sync resumes from the beginning of that file. + +To avoid syncing files that are still being written to, the data management service only syncs arbitrary files that haven't been modified in the previous 10 seconds. +This default can be changed with the [`file_last_modified_millis` config attribute](/data-ai/edge/capture-sync/). + +## Automatic data deletion + +If cloud sync is enabled, the data management service deletes captured data from the disk once it has successfully synced to the cloud. + +{{< alert title="Warning" color="warning" >}} + +If your robot is offline and can't sync and your machine's disk fills up beyond a certain threshold, the data management service will delete captured data to free up additional space and maintain a working machine. + +{{< /alert >}} + +The data management service will also automatically delete local data in the event your machine's local storage fills up. +Local data is automatically deleted when _all_ of the following conditions are met: + +- Data capture is enabled on the data management service +- Local disk usage percentage is greater than or equal to 90% +- The Viam capture directory is at least 50% of the current local disk usage + +If local disk usage is greater than or equal to 90%, but the Viam capture directory is not at least 50% of that usage, a warning log message will be emitted instead and no action will be taken. + +Automatic file deletion only applies to files in the specified Viam capture directory, which is set to `~/.viam/capture` by default. +Data outside of this directory is not touched by automatic data deletion. + +If your machine captures a large amount of data, or frequently goes offline for long periods of time while capturing data, consider moving the Viam capture directory to a larger, dedicated storage device on your machine if available. +You can change the capture directory using the `capture_dir` attribute. + +You can also control how local data is deleted if your machine's local storage becomes full, using the `delete_every_nth_when_disk_full` attribute. + +## Storage + +Data that is successfully synced to the cloud is automatically deleted from local storage. + +When a machine loses its internet connection, it cannot resume cloud sync until it can reach the Viam Cloud again. + +{{}} + +To ensure that the machine can store all data captured while it has no connection, you need to provide enough local data storage. + +If your robot is offline and can't sync and your machine's disk fills up beyond a certain threshold, the data management service will delete captured data to free up additional space and maintain a working machine. +For more information, see [Automatic data deletion details](/data-ai/data/advanced/how-sync-works/) + +Data capture supports capturing tabular data directly to MongoDB in addition to capturing to disk. +For more information, see [Capture directly to MongoDB](/data-ai/reference/advanced-data-capture-sync/#capture-directly-to-your-own-mongodb-cluster). diff --git a/docs/data-ai/get-started/annotate-images.md b/docs/data-ai/get-started/annotate-images.md index 11496999b4..3579784d8b 100644 --- a/docs/data-ai/get-started/annotate-images.md +++ b/docs/data-ai/get-started/annotate-images.md @@ -1,6 +1,6 @@ --- -linkTitle: "Annotate images" -title: "Annotate images" +linkTitle: "Annotate" +title: "Annotate" weight: 40 layout: "docs" type: "docs" diff --git a/docs/data-ai/get-started/capture-images.md b/docs/data-ai/get-started/capture-images.md index c4643fa87a..63b850fa62 100644 --- a/docs/data-ai/get-started/capture-images.md +++ b/docs/data-ai/get-started/capture-images.md @@ -1,6 +1,6 @@ --- -linkTitle: "Capture images" -title: "Capture images" +linkTitle: "Capture" +title: "Capture" weight: 20 layout: "docs" type: "docs" diff --git a/docs/data-ai/get-started/create-training-dataset.md b/docs/data-ai/get-started/create-training-dataset.md index 0b8593bc46..10c5447103 100644 --- a/docs/data-ai/get-started/create-training-dataset.md +++ b/docs/data-ai/get-started/create-training-dataset.md @@ -1,7 +1,7 @@ --- -linkTitle: "Create a training dataset" -title: "Create a training dataset" -weight: 30 +linkTitle: "Train" +title: "Train" +weight: 70 layout: "docs" type: "docs" description: "Create a dataset from your captured data to use for AI model training" diff --git a/docs/data-ai/get-started/how-sync-works.md b/docs/data-ai/get-started/how-sync-works.md deleted file mode 100644 index 40a04dc97a..0000000000 --- a/docs/data-ai/get-started/how-sync-works.md +++ /dev/null @@ -1,118 +0,0 @@ ---- -linkTitle: "Sync data" -title: "Sync data" -tags: ["data management", "data", "services"] -weight: 100 -layout: "docs" -type: "docs" -platformarea: ["data"] -description: "Data capture and sync works differently for viam-server and viam-micro-server." -date: "2024-12-18" -prev: "/data-ai/reference/advanced-data-capture-sync/" ---- - -Data capture and cloud sync works differently for `viam-server` and `viam-micro-server`. - -{{< tabs >}} -{{% tab name="viam-server" %}} - -The data is captured locally on the machine's storage and, by default, stored in the `~/.viam/capture` directory. -For Linux root or sudo users, the `~/.viam/capture` directory resolves to `/root/.viam/capture`. - -{{% expand "Can't find the directory data is stored in? Click here." %}} - -The relative path for the data capture directory depends on where `viam-server` is run from, as well as the operating system of the machine. - -To find the `$HOME` value, check your machine's logs on startup which will log it in the environment variables: - -```sh -2025-01-15T14:27:26.073Z INFO rdk server/entrypoint.go:77 Starting viam-server with following environment variables {"HOME":"/home/johnsmith"} -``` - -{{% /expand%}} - -If a machine restarts for any reason, data capture automatically resumes and any data already stored but not yet synced is synced. - -The service can capture data from multiple resources at the same or different frequencies. -The service does not impose a lower or upper limit on the frequency of data collection. -However, in practice, your hardware may impose limits on the frequency of data collection. -Avoid configuring data capture to higher rates than your hardware can handle, as this could lead to performance degradation. - -Data capture is frequently used with cloud sync. -You can start and stop capture and sync independently. -You can also enable cloud sync without data capture and it will sync data in the capture directory, as well as the additional sync paths configured in the `viam-server` config. -If you place data like images or files in the `~/.viam/capture` directory or another directory set up for sync with the data manager, for example with the `"additional_sync_paths"` config attribute, it will sync this data to the cloud. - -{{% /tab %}} -{{% tab name="viam-micro-server" %}} - -The data is captured in the ESP32's flash memory until it is uploaded to the Viam Cloud. - -If the machine restarts before all data is synced, all unsynced data captured since the last sync point is lost. - -The service can capture data from multiple resources at the same or different frequencies. -The service does not impose a lower or upper limit on the frequency of data collection. -However, in practice, high frequency data collection (> 100Hz) requires special considerations on the ESP32. - -{{% /tab %}} -{{< /tabs >}} - -## Security - -The data management service uses {{< glossary_tooltip term_id="grpc" text="gRPC" >}} calls to send and receive data, so your data is encrypted while in flight. -When data is stored in the cloud, it is encrypted at rest by the cloud storage provider. - -## Data integrity - -Viam's data management service is designed to safeguard against data loss, data duplication and otherwise compromised data. - -If the internet becomes unavailable or the machine needs to restart during the sync process, the sync is interrupted. -If the sync process is interrupted, the service will retry uploading the data at exponentially increasing intervals until the interval in between tries is at one hour, at which point the service retries the sync every hour. -When the connection is restored and sync resumes, the service continues sync where it left off without duplicating data. -If the interruption happens mid-file, sync resumes from the beginning of that file. - -To avoid syncing files that are still being written to, the data management service only syncs arbitrary files that haven't been modified in the previous 10 seconds. -This default can be changed with the [`file_last_modified_millis` config attribute](/data-ai/edge/capture-sync/). - -## Automatic data deletion - -If cloud sync is enabled, the data management service deletes captured data from the disk once it has successfully synced to the cloud. - -{{< alert title="Warning" color="warning" >}} - -If your robot is offline and can't sync and your machine's disk fills up beyond a certain threshold, the data management service will delete captured data to free up additional space and maintain a working machine. - -{{< /alert >}} - -The data management service will also automatically delete local data in the event your machine's local storage fills up. -Local data is automatically deleted when _all_ of the following conditions are met: - -- Data capture is enabled on the data management service -- Local disk usage percentage is greater than or equal to 90% -- The Viam capture directory is at least 50% of the current local disk usage - -If local disk usage is greater than or equal to 90%, but the Viam capture directory is not at least 50% of that usage, a warning log message will be emitted instead and no action will be taken. - -Automatic file deletion only applies to files in the specified Viam capture directory, which is set to `~/.viam/capture` by default. -Data outside of this directory is not touched by automatic data deletion. - -If your machine captures a large amount of data, or frequently goes offline for long periods of time while capturing data, consider moving the Viam capture directory to a larger, dedicated storage device on your machine if available. -You can change the capture directory using the `capture_dir` attribute. - -You can also control how local data is deleted if your machine's local storage becomes full, using the `delete_every_nth_when_disk_full` attribute. - -## Storage - -Data that is successfully synced to the cloud is automatically deleted from local storage. - -When a machine loses its internet connection, it cannot resume cloud sync until it can reach the Viam Cloud again. - -{{}} - -To ensure that the machine can store all data captured while it has no connection, you need to provide enough local data storage. - -If your robot is offline and can't sync and your machine's disk fills up beyond a certain threshold, the data management service will delete captured data to free up additional space and maintain a working machine. -For more information, see [Automatic data deletion details](/data-ai/data/advanced/how-sync-works/) - -Data capture supports capturing tabular data directly to MongoDB in addition to capturing to disk. -For more information, see [Capture directly to MongoDB](/data-ai/reference/advanced-data-capture-sync/#capture-directly-to-your-own-mongodb-cluster). diff --git a/docs/data-ai/get-started/quickstart.md b/docs/data-ai/get-started/quickstart.md index e8d71c4942..a7164153e1 100644 --- a/docs/data-ai/get-started/quickstart.md +++ b/docs/data-ai/get-started/quickstart.md @@ -31,3 +31,9 @@ TODO flesh out ## Add a camera to your machine Follow the guide to configure a [webcam](/operate/reference/components/camera/webcam/) or similar [camera component](/operate/reference/components/camera/). + +TODO CARDS + +1. capture +1. annotate +1. train diff --git a/docs/data-ai/react/act.md b/docs/data-ai/infer/act.md similarity index 100% rename from docs/data-ai/react/act.md rename to docs/data-ai/infer/act.md diff --git a/docs/data-ai/react/alert-data.md b/docs/data-ai/infer/alert-data.md similarity index 100% rename from docs/data-ai/react/alert-data.md rename to docs/data-ai/infer/alert-data.md diff --git a/docs/data-ai/react/alert.md b/docs/data-ai/infer/alert.md similarity index 99% rename from docs/data-ai/react/alert.md rename to docs/data-ai/infer/alert.md index 262131ef5c..310632c36f 100644 --- a/docs/data-ai/react/alert.md +++ b/docs/data-ai/infer/alert.md @@ -99,7 +99,7 @@ Configure data capture on the `filtered-camera` camera to capture images of dete ## Set up alerts -[Triggers](/data-ai/react/alert-data/) allow you to send webhook requests or email notifications when certain events happen. +[Triggers](/data-ai/infer/alert-data/) allow you to send webhook requests or email notifications when certain events happen. You can use the **Data has been synced to the cloud** (`part_data_ingested`) trigger to send alerts whenever an image with an anomaly detection is synced to the cloud from your object filter camera. diff --git a/docs/data-ai/infer/run-inference.md b/docs/data-ai/infer/run-inference.md index e5042ed420..732406b9de 100644 --- a/docs/data-ai/infer/run-inference.md +++ b/docs/data-ai/infer/run-inference.md @@ -1,6 +1,6 @@ --- -linkTitle: "Vision service" -title: "Inference with the vision service" +linkTitle: "Detect objects in images" +title: "Detect objects in images" weight: 50 layout: "docs" type: "docs" diff --git a/docs/data-ai/infer/track.md b/docs/data-ai/infer/track.md new file mode 100644 index 0000000000..b41a4eb635 --- /dev/null +++ b/docs/data-ai/infer/track.md @@ -0,0 +1,9 @@ +--- +linkTitle: "Track objects on video" +title: "Track objects on video" +weight: 60 +layout: "docs" +type: "docs" +modulescript: true +description: "Run inference on a model with a vision service or an SDK." +--- diff --git a/docs/data-ai/react/_index.md b/docs/data-ai/react/_index.md deleted file mode 100644 index 9a0fdbd784..0000000000 --- a/docs/data-ai/react/_index.md +++ /dev/null @@ -1,10 +0,0 @@ ---- -linkTitle: "Act" -title: "Act" -weight: 300 -layout: "empty" -type: "docs" -empty_node: true -open_on_desktop: true -header_only: true ---- diff --git a/docs/data-ai/train/_index.md b/docs/data-ai/train/_index.md index 1c8732fd26..677075db11 100644 --- a/docs/data-ai/train/_index.md +++ b/docs/data-ai/train/_index.md @@ -1,7 +1,7 @@ --- -linkTitle: "Train AI" -title: "Train AI" -weight: 100 +linkTitle: "Train" +title: "Train" +weight: 50 layout: "empty" type: "docs" empty_node: true diff --git a/docs/data-ai/train/train-tflite.md b/docs/data-ai/train/train-tflite.md index 5e1b351a03..ce87f5fd2b 100644 --- a/docs/data-ai/train/train-tflite.md +++ b/docs/data-ai/train/train-tflite.md @@ -154,7 +154,7 @@ To capture images of edge cases and re-train your model using those images, comp ## Next steps Now your machine can make inferences about its environment. -The next step is to [deploy](/data-ai/train/deploy/) the ML model and then [act](/data-ai/react/act/) or [alert](/data-ai/react/alert/) based on these inferences. +The next step is to [deploy](/data-ai/train/deploy/) the ML model and then [act](/data-ai/infer/act/) or [alert](/data-ai/infer/alert/) based on these inferences. See the following tutorials for examples of using machine learning models to make your machine do things based on its inferences about its environment: diff --git a/docs/data-ai/train/upload-external-data.md b/docs/data-ai/train/upload-external-data.md index 3dc8a959e9..045cff010c 100644 --- a/docs/data-ai/train/upload-external-data.md +++ b/docs/data-ai/train/upload-external-data.md @@ -13,7 +13,7 @@ aliases: - /how-tos/upload-data/ date: "2024-12-04" description: "Upload data to the Viam app from your local computer or mobile device using the data client API, Viam CLI, or Viam mobile app." -prev: "/data-ai/react/act/" +prev: "/data-ai/infer/act/" --- When you configure the data management service, Viam automatically uploads data from the default directory `~/.viam/capture` and any directory you configured. diff --git a/docs/dev/reference/changelog.md b/docs/dev/reference/changelog.md index ccbc7329f1..a1385f4e53 100644 --- a/docs/dev/reference/changelog.md +++ b/docs/dev/reference/changelog.md @@ -490,7 +490,7 @@ See [Visualize data with Grafana](/tutorials/services/visualize-data-grafana/) f {{% changelog date="2024-01-31" color="added" title="Use triggers to trigger actions" %}} -You can now configure [triggers](/data-ai/react/alert-data/) (previously called webhooks) to execute actions when certain types of data are sent from your machine to the cloud. +You can now configure [triggers](/data-ai/infer/alert-data/) (previously called webhooks) to execute actions when certain types of data are sent from your machine to the cloud. {{% /changelog %}} diff --git a/docs/manage/fleet/reuse-configuration.md b/docs/manage/fleet/reuse-configuration.md index 815ac25674..9a67f76c27 100644 --- a/docs/manage/fleet/reuse-configuration.md +++ b/docs/manage/fleet/reuse-configuration.md @@ -39,7 +39,7 @@ You must be an [organization owner](/manage/manage/rbac/) to create fragments fo {{% tablestep number=2 %}} **Add and configure all the resources** you want to use on your machines. -Fragments support all available resources except [triggers](/data-ai/react/alert-data/). +Fragments support all available resources except [triggers](/data-ai/infer/alert-data/). You can even add other fragments inside a fragment. {{< alert title="Tip" color="tip" >}} diff --git a/docs/manage/troubleshoot/alert.md b/docs/manage/troubleshoot/alert.md index cf5fdb82b6..5a074f087d 100644 --- a/docs/manage/troubleshoot/alert.md +++ b/docs/manage/troubleshoot/alert.md @@ -34,7 +34,7 @@ You can receive alerts for the following events involving machine performance te For example, you can configure a trigger to send you a notification when your machine's CPU usage reaches a certain threshold. {{< alert title="Tip" color="tip" >}} -You can also configure alerts on any other machine data, for more information on that, see [Alert on data](/data-ai/react/alert-data/). +You can also configure alerts on any other machine data, for more information on that, see [Alert on data](/data-ai/infer/alert-data/). {{< /alert >}} ## Data meets condition diff --git a/docs/operate/mobility/use-input-to-act.md b/docs/operate/mobility/use-input-to-act.md index 02e61e7334..64d313667c 100644 --- a/docs/operate/mobility/use-input-to-act.md +++ b/docs/operate/mobility/use-input-to-act.md @@ -51,9 +51,9 @@ readings = await my_sensor.get_readings() Other common inputs include the methods of a [board](/dev/reference/apis/components/board/) (`GetGPIO`, `GetPWM`, `PWMFrequency`, `GetDigitalInterruptValue`, and `ReadAnalogReader`), or a [power sensor](/dev/reference/apis/components/power-sensor/) (`GetVoltage`, `GetCurrent`, `GetPower`, and `GetReadings`). You can also use camera input, for example to detect objects and pick them up with an arm. -See [Act based on inferences](/data-ai/react/act/) for relevant examples. +See [Act based on inferences](/data-ai/infer/act/) for relevant examples. -If you want to send alerts based on computer vision or captured data, see [Alert on inferences](/data-ai/react/alert/) or [Alert on data](/data-ai/react/alert-data/). +If you want to send alerts based on computer vision or captured data, see [Alert on inferences](/data-ai/infer/alert/) or [Alert on data](/data-ai/infer/alert-data/). {{% /tablestep %}} {{% tablestep number=3 %}} diff --git a/docs/tutorials/projects/helmet.md b/docs/tutorials/projects/helmet.md index e22d879759..04a23690de 100644 --- a/docs/tutorials/projects/helmet.md +++ b/docs/tutorials/projects/helmet.md @@ -251,7 +251,7 @@ Now that you have verified that the detector and data sync are working, modify y ## Set up email notifications -[Triggers](/data-ai/react/alert/) allow you to send webhook requests or email notifications when certain events happen. +[Triggers](/data-ai/infer/alert/) allow you to send webhook requests or email notifications when certain events happen. For example, you can set up a trigger to perform an action whenever an image of someone without a hard hat is uploaded to the cloud. From 0eb23c4f0f19c49924531177f0582abb186abab3 Mon Sep 17 00:00:00 2001 From: nathan contino Date: Wed, 14 May 2025 09:40:01 -0400 Subject: [PATCH 7/7] Add aliases as temporary workaroundg --- docs/data-ai/get-started/quickstart.md | 34 ++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/docs/data-ai/get-started/quickstart.md b/docs/data-ai/get-started/quickstart.md index a7164153e1..37e4f83fd8 100644 --- a/docs/data-ai/get-started/quickstart.md +++ b/docs/data-ai/get-started/quickstart.md @@ -5,6 +5,40 @@ weight: 10 layout: "docs" type: "docs" description: "Select and ready a machine for use with the Viam platform" +aliases: + - /data/dataset/ + - /data-ai/ai/act/ + - /data-ai/ai/advanced/ + - /data-ai/ai/advanced/upload-external-data/ + - /data-ai/ai/alert/ + - /data-ai/ai/create-dataset/ + - /data-ai/ai/deploy/ + - /data-ai/ai/ + - /data-ai/ai/run-inference/ + - /data-ai/ai/train/ + - /data-ai/ai/train-tflite/ + - /data-ai/capture-data/advanced/advanced-data-capture-sync/ + - /data-ai/capture-data/advanced/how-sync-works/ + - /data-ai/capture-data/advanced/ + - /data-ai/capture-data/capture-sync/ + - /data-ai/capture-data/conditional-sync/ + - /data-ai/capture-data/filter-before-sync/ + - /data-ai/capture-data/fleet/data-management/ + - /data-ai/capture-data/ + - /data-ai/data/advanced/alert-data/ + - /data-ai/data/advanced/ + - /data-ai/data/export/ + - /data-ai/data/ + - /data-ai/data/query/ + - /data-ai/data/visualize/ + - /data-ai/reference/data-client/ + - /data-ai/reference/data-management-client/ + - /data-ai/reference/ml-model-client/ + - /data-ai/reference/ml-training-client/ + - /data-ai/reference/vision-client/ + - /fleet/dataset/ + - /manage/data/dataset/ + - /manage/data/label/ --- To ensure a machine learning model you create performs well, you need to train it on a variety of images that cover the range of things your machine should be able to recognize.