diff --git a/src/pages/[platform]/build-a-backend/add-aws-services/logging/set-up-logging/index.mdx b/src/pages/[platform]/build-a-backend/add-aws-services/logging/set-up-logging/index.mdx index 07956609a7b..dcf1b3239f5 100644 --- a/src/pages/[platform]/build-a-backend/add-aws-services/logging/set-up-logging/index.mdx +++ b/src/pages/[platform]/build-a-backend/add-aws-services/logging/set-up-logging/index.mdx @@ -113,11 +113,11 @@ The `` and `` configured in the CDK construct will be us - `` and `` are Amplify roles created as part of Amplify Auth configuration via Amplify CLI. ```ts +import * as path from "node:path" import * as cdk from "aws-cdk-lib" -import { Construct } from "constructs" import * as logs from "aws-cdk-lib/aws-logs" -import * as path from "path" import * as iam from "aws-cdk-lib/aws-iam" +import { Construct } from "constructs" export class RemoteLoggingConstraintsConstruct extends Construct { constructor(scope: Construct, id: string, props: RemoteLoggingConstraintProps) { diff --git a/src/pages/[platform]/build-a-backend/add-aws-services/predictions/set-up-predictions/index.mdx b/src/pages/[platform]/build-a-backend/add-aws-services/predictions/set-up-predictions/index.mdx index 53cc41e5cea..9b7642739dd 100644 --- a/src/pages/[platform]/build-a-backend/add-aws-services/predictions/set-up-predictions/index.mdx +++ b/src/pages/[platform]/build-a-backend/add-aws-services/predictions/set-up-predictions/index.mdx @@ -25,8 +25,6 @@ export function getStaticProps() { }; } -### Set up the backend - To enable Predictions we need to set up the appropriate IAM policy for Roles in your Cognito Identity Pool in order to use an appropriate feature. Additionally, we need to use the ```addOutput``` method to patch the custom Predictions resource to the expected output configuration. @@ -38,11 +36,9 @@ To learn more, check the docs of [Amazon Translate](https://docs.aws.amazon.com/ ```ts title="amplify/backend.ts" - +import { PolicyStatement } from "aws-cdk-lib/aws-iam"; import { defineBackend } from "@aws-amplify/backend"; import { auth } from "./auth/resource"; -import { Stack } from "aws-cdk-lib"; -import { PolicyStatement } from "aws-cdk-lib/aws-iam"; const backend = defineBackend({ auth, @@ -88,24 +84,21 @@ backend.addOutput({ targetLanguage: "es", }, proxy: false, - region: Stack.of(backend.auth.resources.unauthenticatedUserIamRole) - .region, + region: backend.auth.stack.region, }, speechGenerator: { defaults: { voiceId: "Ivy", }, proxy: false, - region: Stack.of(backend.auth.resources.unauthenticatedUserIamRole) - .region, + region: backend.auth.stack.region, }, transcription: { defaults: { language: "en-US", }, proxy: false, - region: Stack.of(backend.auth.resources.unauthenticatedUserIamRole) - .region, + region: backend.auth.stack.region, }, }, identify: { @@ -116,24 +109,21 @@ backend.addOutput({ }, celebrityDetectionEnabled: true, proxy: false, - region: Stack.of(backend.auth.resources.unauthenticatedUserIamRole) - .region, + region: backend.auth.stack.region, }, identifyLabels: { defaults: { type: "ALL", }, proxy: false, - region: Stack.of(backend.auth.resources.unauthenticatedUserIamRole) - .region, + region: backend.auth.stack.region, }, identifyText: { defaults: { format: "ALL", }, proxy: false, - region: Stack.of(backend.auth.resources.unauthenticatedUserIamRole) - .region, + region: backend.auth.stack.region, }, }, interpret: { @@ -142,17 +132,14 @@ backend.addOutput({ type: "ALL", }, proxy: false, - region: Stack.of(backend.auth.resources.unauthenticatedUserIamRole) - .region, + region: backend.auth.stack.region, }, }, }, }, }); - - - ``` + ## Install Amplify Libraries To install the Amplify library to use predictions features, run the following commands in your project's root folder: diff --git a/src/pages/[platform]/build-a-backend/data/custom-business-logic/connect-amazon-rekognition/index.mdx b/src/pages/[platform]/build-a-backend/data/custom-business-logic/connect-amazon-rekognition/index.mdx index 18737c13d50..61378979b58 100644 --- a/src/pages/[platform]/build-a-backend/data/custom-business-logic/connect-amazon-rekognition/index.mdx +++ b/src/pages/[platform]/build-a-backend/data/custom-business-logic/connect-amazon-rekognition/index.mdx @@ -75,12 +75,10 @@ export const storage = defineStorage({ To use the Amazon Rekognition service, you need to add Amazon Rekognition as an HTTP Data Source and configure the proper IAM policy for Lambda to effectively utilize the desired feature and grant permission to access the storage. In this case, you can add the `rekognition:DetectText` and `rekognition:DetectLabels` actions to the policy. Update the `amplify/backend.ts` file as shown below. ```ts title= "amplify/backend.ts" - +import { PolicyStatement } from 'aws-cdk-lib/aws-iam'; import { defineBackend } from '@aws-amplify/backend'; import { auth } from './auth/resource'; import { data } from './data/resource'; -import { Stack } from 'aws-cdk-lib'; -import { PolicyStatement } from 'aws-cdk-lib/aws-iam'; import { storage } from './storage/resource'; const backend = defineBackend({ @@ -89,8 +87,6 @@ const backend = defineBackend({ storage }); -const dataStack = Stack.of(backend.data) - // Set environment variables for the S3 Bucket name backend.data.resources.cfnResources.cfnGraphqlApi.environmentVariables = { S3_BUCKET_NAME: backend.storage.resources.bucket.bucketName, @@ -98,10 +94,10 @@ backend.data.resources.cfnResources.cfnGraphqlApi.environmentVariables = { const rekognitionDataSource = backend.data.addHttpDataSource( "RekognitionDataSource", - `https://rekognition.${dataStack.region}.amazonaws.com`, + `https://rekognition.${backend.data.stack.region}.amazonaws.com`, { authorizationConfig: { - signingRegion: dataStack.region, + signingRegion: backend.data.stack.region, signingServiceName: "rekognition", }, } diff --git a/src/pages/[platform]/build-a-backend/data/custom-business-logic/connect-amazon-translate/index.mdx b/src/pages/[platform]/build-a-backend/data/custom-business-logic/connect-amazon-translate/index.mdx index dc720f3b8d2..5163e1739da 100644 --- a/src/pages/[platform]/build-a-backend/data/custom-business-logic/connect-amazon-translate/index.mdx +++ b/src/pages/[platform]/build-a-backend/data/custom-business-logic/connect-amazon-translate/index.mdx @@ -60,24 +60,21 @@ npm add @aws-sdk/client-translate ```ts title="amplify/backend.ts" import { defineBackend } from '@aws-amplify/backend'; -import { auth } from './auth/resource'; -import { data } from "./data/resource"; -import { Stack } from 'aws-cdk-lib'; import { PolicyStatement } from 'aws-cdk-lib/aws-iam'; +import { auth } from './auth/resource'; +import { data } from './data/resource'; const backend = defineBackend({ auth, data }); -const dataStack = Stack.of(backend.data) - const translateDataSource = backend.data.addHttpDataSource( "TranslateDataSource", - `https://translate.${dataStack.region}.amazonaws.com`, + `https://translate.${backend.data.stack.region}.amazonaws.com`, { authorizationConfig: { - signingRegion: dataStack.region, + signingRegion: backend.data.stack.region, signingServiceName: "translate", }, } diff --git a/src/pages/[platform]/build-a-backend/data/custom-business-logic/connect-bedrock/index.mdx b/src/pages/[platform]/build-a-backend/data/custom-business-logic/connect-bedrock/index.mdx index 13ed512be0e..b141491b60b 100644 --- a/src/pages/[platform]/build-a-backend/data/custom-business-logic/connect-bedrock/index.mdx +++ b/src/pages/[platform]/build-a-backend/data/custom-business-logic/connect-bedrock/index.mdx @@ -85,11 +85,10 @@ backend.generateHaikuFunction.resources.lambda.addToRolePolicy( In your `amplify/backend.ts` file, replace the content with the following code to add an HTTP data source for Amazon Bedrock to your API and grant it permissions to invoke a generative AI model: ```ts title="amplify/backend.ts" +import { Effect, PolicyStatement } from "aws-cdk-lib/aws-iam"; import { defineBackend } from "@aws-amplify/backend"; import { auth } from "./auth/resource"; import { data } from "./data/resource"; -import { Effect, PolicyStatement } from "aws-cdk-lib/aws-iam"; -import { Stack } from "aws-cdk-lib"; export const backend = defineBackend({ auth, @@ -103,7 +102,7 @@ const bedrockDataSource = backend.data.addHttpDataSource( "https://bedrock-runtime.us-east-1.amazonaws.com", { authorizationConfig: { - signingRegion: Stack.of(backend.data).region, + signingRegion: backend.data.stack.region, signingServiceName: "bedrock", }, } @@ -114,7 +113,7 @@ bedrockDataSource.grantPrincipal.addToPrincipalPolicy( effect: Effect.ALLOW, actions: ["bedrock:InvokeModel"], resources: [ - `arn:aws:bedrock:${Stack.of(backend.data).region}::foundation-model/${MODEL_ID}`, + `arn:aws:bedrock:${backend.data.stack.region}::foundation-model/${MODEL_ID}`, ], }) ); @@ -352,32 +351,31 @@ const { data, errors } = await client.queries.generateHaiku({ Here's an example of a simple UI that prompts a generative AI model to create a haiku based on user input: ```tsx title="App.tsx" -import { FormEvent, useState } from "react"; - -import { generateClient } from "aws-amplify/api"; -import { Schema } from "@/amplify/data/resource"; - -import { Amplify } from "aws-amplify"; -import outputs from "@/amplify_outputs.json"; +import type { Schema } from '@/amplify/data/resource'; +import type { FormEvent } from 'react'; +import { useState } from 'react'; +import { Amplify } from 'aws-amplify'; +import { generateClient } from 'aws-amplify/api'; +import outputs from '@/amplify_outputs.json'; Amplify.configure(outputs); const client = generateClient(); export default function App() { - const [prompt, setPrompt] = useState(""); + const [prompt, setPrompt] = useState(''); const [answer, setAnswer] = useState(null); - const sendPrompt = async (e: FormEvent) => { - e.preventDefault(); + const sendPrompt = async (event: FormEvent) => { + event.preventDefault(); const { data, errors } = await client.queries.generateHaiku({ - prompt, + prompt }); if (!errors) { setAnswer(data); - setPrompt(""); + setPrompt(''); } else { console.log(errors); } @@ -387,17 +385,15 @@ export default function App() {

Haiku Generator

-
setPrompt(e.target.value)} + onChange={(event) => setPrompt(event.target.value)} />
-
{answer}
@@ -407,7 +403,7 @@ export default function App() { } ``` -![GIF of a webpage titled "Haiku Generator" and input field. "Frank Herbert's Dune" is entered and submitted. Shortly after, a haiku is rendered to the page.](/images/haiku-generator.gif) +![A webpage titled "Haiku Generator" and input field. "Frank Herbert's Dune" is entered and submitted. Shortly after, a haiku is rendered to the page.](/images/haiku-generator.gif) ## Conclusion diff --git a/src/pages/[platform]/build-a-backend/data/custom-business-logic/search-and-aggregate-queries/index.mdx b/src/pages/[platform]/build-a-backend/data/custom-business-logic/search-and-aggregate-queries/index.mdx index 8d4817a561f..6d0f35de055 100644 --- a/src/pages/[platform]/build-a-backend/data/custom-business-logic/search-and-aggregate-queries/index.mdx +++ b/src/pages/[platform]/build-a-backend/data/custom-business-logic/search-and-aggregate-queries/index.mdx @@ -36,12 +36,12 @@ DynamoDB users gain access to advanced OpenSearch features like full-text search Amazon OpenSearch Ingestion, combined with S3 exports and DynamoDB streams, facilitates seamless data input from DynamoDB tables and automatic ingestion into OpenSearch. Additionally, the pipeline can back up data to S3 for potential future re-ingestion as needed. ## Step 1: Setup the project + Begin by setting up your project by following the instructions in the [Quickstart guide](/[platform]/start/quickstart/). For the purpose of this guide, we'll sync a Todo table from DynamoDB to OpenSearch. Firstly, add the Todo model to your schema: ```ts title="amplify/data/resource.ts" - import { type ClientSchema, a, defineData } from "@aws-amplify/backend"; const schema = a.schema({ @@ -79,96 +79,91 @@ Enable DynamoDB streams to capture item changes that will be ingested into OpenS ```ts title="amplify/backend.ts" - -import { defineBackend } from "@aws-amplify/backend"; -import { auth } from "./auth/resource"; -import { data } from "./data/resource"; // highlight-start -import * as dynamodb from "aws-cdk-lib/aws-dynamodb"; +import * as dynamodb from 'aws-cdk-lib/aws-dynamodb'; // highlight-end +import { defineBackend } from '@aws-amplify/backend'; +import { auth } from './auth/resource'; +import { data } from './data/resource'; const backend = defineBackend({ auth, - data, + data }); // highlight-start const todoTable = - backend.data.resources.cfnResources.amplifyDynamoDbTables["Todo"]; + backend.data.resources.cfnResources.amplifyDynamoDbTables['Todo']; // Update table settings todoTable.pointInTimeRecoveryEnabled = true; todoTable.streamSpecification = { - streamViewType: dynamodb.StreamViewType.NEW_IMAGE, + streamViewType: dynamodb.StreamViewType.NEW_IMAGE }; // Get the DynamoDB table ARN -const tableArn = backend.data.resources.tables["Todo"].tableArn; +const tableArn = backend.data.resources.tables['Todo'].tableArn; // Get the DynamoDB table name -const tableName = backend.data.resources.tables["Todo"].tableName; -// highlight-end - +const tableName = backend.data.resources.tables['Todo'].tableName; +// highlight-end ``` ## Step 2: Setting Up the OpenSearch Instance Create an OpenSearch instance with encryption. -``` ts title="amplify/backend.ts" - -import { defineBackend } from "@aws-amplify/backend"; -import { auth } from "./auth/resource"; -import { data } from "./data/resource"; +```ts title="amplify/backend.ts" // highlight-start -import * as opensearch from "aws-cdk-lib/aws-opensearchservice"; -import { Stack } from "aws-cdk-lib"; +import * as opensearch from 'aws-cdk-lib/aws-opensearchservice'; // highlight-end +import { defineBackend } from '@aws-amplify/backend'; +import { auth } from './auth/resource'; +import { data } from './data/resource'; + +const backend = defineBackend({ + auth, + data +}); const todoTable = - backend.data.resources.cfnResources.amplifyDynamoDbTables["Todo"]; + backend.data.resources.cfnResources.amplifyDynamoDbTables['Todo']; // Update table settings todoTable.pointInTimeRecoveryEnabled = true; todoTable.streamSpecification = { - streamViewType: dynamodb.StreamViewType.NEW_IMAGE, + streamViewType: dynamodb.StreamViewType.NEW_IMAGE }; // Get the DynamoDB table ARN -const tableArn = backend.data.resources.tables["Todo"].tableArn; +const tableArn = backend.data.resources.tables['Todo'].tableArn; // Get the DynamoDB table name -const tableName = backend.data.resources.tables["Todo"].tableName; - -// highlight-start -// Get the data stack -const dataStack = Stack.of(backend.data); +const tableName = backend.data.resources.tables['Todo'].tableName; // Create the OpenSearch domain const openSearchDomain = new opensearch.Domain( - dataStack, - "OpenSearchDomain", + backend.data.stack, + 'OpenSearchDomain', { version: opensearch.EngineVersion.OPENSEARCH_2_11, nodeToNodeEncryption: true, encryptionAtRest: { - enabled: true, - }, + enabled: true + } } ); // highlight-end - ``` ## Step 3: Setting Up Zero ETL from DynamoDB to OpenSearch ### Step 3a: Setup Storage and IAM Role + Establish Storage to back up raw events consumed by the OpenSearch pipeline. Generate a file named `amplify/storage/resource.ts` and insert the provided content to set up a storage resource. Tailor your storage configurations to regulate access to different paths within your storage bucket. - ```ts title="amplify/storage/resource.ts" - import { defineStorage } from "@aws-amplify/backend" export const storage = defineStorage({ @@ -179,22 +174,20 @@ export const storage = defineStorage({ ] }) }) - ``` + Get the `s3BucketArn` and `s3BucketName` values from storage resource as shown below. Additionally, configure an IAM role for the pipeline and assign the roles as indicated below. For further information on the required IAM roles, please refer to the [Setting up roles and users](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/pipeline-security-overview.html#pipeline-security-create) documentation. -``` ts title="amplify/backend.ts" +```ts title="amplify/backend.ts" +import * as dynamodb from "aws-cdk-lib/aws-dynamodb"; +import * as opensearch from "aws-cdk-lib/aws-opensearchservice"; +// highlight-next-line +import * as iam from "aws-cdk-lib/aws-iam"; import { defineBackend } from "@aws-amplify/backend"; import { auth } from "./auth/resource"; import { data } from "./data/resource"; -import * as dynamodb from "aws-cdk-lib/aws-dynamodb"; -import * as opensearch from "aws-cdk-lib/aws-opensearchservice"; -import { Stack } from "aws-cdk-lib"; - -//highlight-start +// highlight-next-line import { storage } from "./storage/resource"; -import * as iam from "aws-cdk-lib/aws-iam"; -//highlight-end // Define backend resources const backend = defineBackend({ @@ -220,12 +213,9 @@ const tableArn = backend.data.resources.tables["Todo"].tableArn; // Get the DynamoDB table name const tableName = backend.data.resources.tables["Todo"].tableName; -// Get the data stack -const dataStack = Stack.of(backend.data); - // Create the OpenSearch domain const openSearchDomain = new opensearch.Domain( - dataStack, + backend.data.stack, "OpenSearchDomain", { version: opensearch.EngineVersion.OPENSEARCH_2_11, @@ -241,12 +231,9 @@ const s3BucketArn = backend.storage.resources.bucket.bucketArn; // Get the S3Bucket Name const s3BucketName = backend.storage.resources.bucket.bucketName; -//Get the region -const region = dataStack.region; - // Create an IAM role for OpenSearch integration const openSearchIntegrationPipelineRole = new iam.Role( - dataStack, + backend.data.stack, "OpenSearchIntegrationPipelineRole", { assumedBy: new iam.ServicePrincipal("osis-pipelines.amazonaws.com"), @@ -302,8 +289,9 @@ const openSearchIntegrationPipelineRole = new iam.Role( ], } ); -//highlight-end +// highlight-end ``` + For the S3 bucket, follow standard security practices: block public access, encrypt data at rest, and enable versioning. The IAM role should allow the OpenSearch Ingestion Service (OSIS) pipelines to assume it. Grant specific OpenSearch Service permissions and also provide DynamoDB and S3 access. You may customize permissions to follow the principle of least privilege. @@ -316,15 +304,14 @@ When using OpenSearch, you can define the index template or mapping in advance b Customize the `template_content` JSON-representation to define the data structure for the ingestion pipeline. -``` ts title="amplify/backend.ts" +```ts title="amplify/backend.ts" +import * as dynamodb from "aws-cdk-lib/aws-dynamodb"; +import * as opensearch from "aws-cdk-lib/aws-opensearchservice"; +import * as iam from "aws-cdk-lib/aws-iam"; import { defineBackend } from "@aws-amplify/backend"; import { auth } from "./auth/resource"; import { data } from "./data/resource"; -import * as dynamodb from "aws-cdk-lib/aws-dynamodb"; -import * as opensearch from "aws-cdk-lib/aws-opensearchservice"; -import { Stack } from "aws-cdk-lib"; import { storage } from "./storage/resource"; -import * as iam from "aws-cdk-lib/aws-iam"; // Define backend resources const backend = defineBackend({ @@ -348,12 +335,9 @@ const tableArn = backend.data.resources.tables["Todo"].tableArn; // Get the DynamoDB table name const tableName = backend.data.resources.tables["Todo"].tableName; -// Get the data stack -const dataStack = Stack.of(backend.data); - // Create the OpenSearch domain const openSearchDomain = new opensearch.Domain( - dataStack, + backend.data.stack, "OpenSearchDomain", { version: opensearch.EngineVersion.OPENSEARCH_2_11, @@ -369,12 +353,9 @@ const s3BucketArn = backend.storage.resources.bucket.bucketArn; // Get the S3Bucket Name const s3BucketName = backend.storage.resources.bucket.bucketName; -//Get the region -const region = dataStack.region; - // Create an IAM role for OpenSearch integration const openSearchIntegrationPipelineRole = new iam.Role( - dataStack, + backend.data.stack, "OpenSearchIntegrationPipelineRole", { assumedBy: new iam.ServicePrincipal("osis-pipelines.amazonaws.com"), @@ -432,7 +413,6 @@ const openSearchIntegrationPipelineRole = new iam.Role( ); // highlight-start - // Define OpenSearch index mappings const indexName = "todo"; @@ -456,21 +436,19 @@ const indexMapping = { }, }; // highlight-end - ``` The configuration is a data-prepper feature of OpenSearch. For specific documentation on DynamoDB configuration, refer to [OpenSearch data-prepper documentation](https://opensearch.org/docs/latest/data-prepper/pipelines/configuration/sources/dynamo-db/). -``` ts title="amplify/backend.ts" +```ts title="amplify/backend.ts" +import * as dynamodb from "aws-cdk-lib/aws-dynamodb"; +import * as opensearch from "aws-cdk-lib/aws-opensearchservice"; +import * as iam from "aws-cdk-lib/aws-iam"; import { defineBackend } from "@aws-amplify/backend"; import { auth } from "./auth/resource"; import { data } from "./data/resource"; -import * as dynamodb from "aws-cdk-lib/aws-dynamodb"; -import * as opensearch from "aws-cdk-lib/aws-opensearchservice"; -import { Stack } from "aws-cdk-lib"; import { storage } from "./storage/resource"; -import * as iam from "aws-cdk-lib/aws-iam"; // Define backend resources const backend = defineBackend({ @@ -494,12 +472,9 @@ const tableArn = backend.data.resources.tables["Todo"].tableArn; // Get the DynamoDB table name const tableName = backend.data.resources.tables["Todo"].tableName; -// Get the data stack -const dataStack = Stack.of(backend.data); - // Create the OpenSearch domain const openSearchDomain = new opensearch.Domain( - dataStack, + backend.data.stack, "OpenSearchDomain", { version: opensearch.EngineVersion.OPENSEARCH_2_11, @@ -515,12 +490,9 @@ const s3BucketArn = backend.storage.resources.bucket.bucketArn; // Get the S3Bucket Name const s3BucketName = backend.storage.resources.bucket.bucketName; -//Get the region -const region = dataStack.region; - // Create an IAM role for OpenSearch integration const openSearchIntegrationPipelineRole = new iam.Role( - dataStack, + backend.data.stack, "OpenSearchIntegrationPipelineRole", { assumedBy: new iam.ServicePrincipal("osis-pipelines.amazonaws.com"), @@ -618,11 +590,11 @@ dynamodb-pipeline: start_position: "LATEST" export: s3_bucket: "${s3BucketName}" - s3_region: "${region}" + s3_region: "${backend.storage.stack.region}" s3_prefix: "${tableName}/" aws: sts_role_arn: "${openSearchIntegrationPipelineRole.roleArn}" - region: "${region}" + region: "${backend.data.stack.region}" sink: - opensearch: hosts: @@ -638,7 +610,7 @@ dynamodb-pipeline: bulk_size: 4 aws: sts_role_arn: "${openSearchIntegrationPipelineRole.roleArn}" - region: "${region}" + region: "${backend.data.stack.region}" `; // highlight-end ``` @@ -654,25 +626,27 @@ The sink configuration is an array. To create a different index on the same tabl To index multiple tables, you'll need to configure multiple pipelines in the configuration. For further guidance, please consult the [pipeline section](https://opensearch.org/docs/latest/data-prepper/pipelines/pipelines/) of the OpenSearch documentation. -NOTE: An OpenSearch Ingestion pipeline supports only one DynamoDB table as its source. For more details on current limitations, Please refer to [Amazon OpenSearch Limitation](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/configure-client-ddb.html#ddb-pipeline-limitations) section. + +**Note**: An OpenSearch Ingestion pipeline supports only one DynamoDB table as its source. For more details on current limitations, Please refer to [Amazon OpenSearch Limitation](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/configure-client-ddb.html#ddb-pipeline-limitations) section. + -Now, create the OSIS pipeline resource: +Now, create the OSIS pipeline resource: -``` ts title="amplify/backend.ts" -import { defineBackend } from "@aws-amplify/backend"; -import { auth } from "./auth/resource"; -import { data } from "./data/resource"; +```ts title="amplify/backend.ts" import * as dynamodb from "aws-cdk-lib/aws-dynamodb"; import * as opensearch from "aws-cdk-lib/aws-opensearchservice"; -import { Stack } from "aws-cdk-lib"; -import { storage } from "./storage/resource"; import * as iam from "aws-cdk-lib/aws-iam"; - //highlight-start +// highlight-start import * as osis from "aws-cdk-lib/aws-osis"; import * as logs from "aws-cdk-lib/aws-logs"; import { RemovalPolicy } from "aws-cdk-lib"; - //highlight-end +// highlight-end +import { defineBackend } from "@aws-amplify/backend"; +import { auth } from "./auth/resource"; +import { data } from "./data/resource"; +import { storage } from "./storage/resource"; + // Define backend resources const backend = defineBackend({ auth, @@ -695,12 +669,9 @@ const tableArn = backend.data.resources.tables["Todo"].tableArn; // Get the DynamoDB table name const tableName = backend.data.resources.tables["Todo"].tableName; -// Get the data stack -const dataStack = Stack.of(backend.data); - // Create the OpenSearch domain const openSearchDomain = new opensearch.Domain( - dataStack, + backend.data.stack, "OpenSearchDomain", { version: opensearch.EngineVersion.OPENSEARCH_2_11, @@ -716,12 +687,9 @@ const s3BucketArn = backend.storage.resources.bucket.bucketArn; // Get the S3Bucket Name const s3BucketName = backend.storage.resources.bucket.bucketName; -//Get the region -const region = dataStack.region; - // Create an IAM role for OpenSearch integration const openSearchIntegrationPipelineRole = new iam.Role( - dataStack, + backend.data.stack, "OpenSearchIntegrationPipelineRole", { assumedBy: new iam.ServicePrincipal("osis-pipelines.amazonaws.com"), @@ -817,11 +785,11 @@ dynamodb-pipeline: start_position: "LATEST" export: s3_bucket: "${s3BucketName}" - s3_region: "${region}" + s3_region: "${backend.storage.stack.region}" s3_prefix: "${tableName}/" aws: sts_role_arn: "${openSearchIntegrationPipelineRole.roleArn}" - region: "${region}" + region: "${backend.data.stack.region}" sink: - opensearch: hosts: @@ -837,19 +805,19 @@ dynamodb-pipeline: bulk_size: 4 aws: sts_role_arn: "${openSearchIntegrationPipelineRole.roleArn}" - region: "${region}" + region: "${backend.data.stack.region}" `; // highlight-start // Create a CloudWatch log group -const logGroup = new logs.LogGroup(dataStack, "LogGroup", { +const logGroup = new logs.LogGroup(backend.data.stack, "LogGroup", { logGroupName: "/aws/vendedlogs/OpenSearchService/pipelines/1", removalPolicy: RemovalPolicy.DESTROY, }); // Create an OpenSearch Integration Service pipeline const cfnPipeline = new osis.CfnPipeline( - dataStack, + backend.data.stack, "OpenSearchIntegrationPipeline", { maxUnits: 4, @@ -864,9 +832,7 @@ const cfnPipeline = new osis.CfnPipeline( }, } ); - //highlight-end - ``` After deploying the resources, you can test the data ingestion process by adding an item to the `Todo` table. However, before doing that, let's verify that the pipeline has been set up correctly. @@ -885,7 +851,7 @@ You can also check this in the DynamoDB console by going to the Integrations sec First, Add the OpenSearch data source to the data backend. Add the following code to the end of the `amplify/backend.ts` file. -``` ts title="amplify/backend.ts" +```ts title="amplify/backend.ts" // Add OpenSearch data source const osDataSource = backend.data.addOpenSearchDataSource( "osDataSource", @@ -898,7 +864,6 @@ const osDataSource = backend.data.addOpenSearchDataSource( Let's create the search resolver. Create a new file named `amplify/data/searchTodoResolver.js` and paste the following code. For additional details please refer to [Amazon OpenSearch Service Resolvers](https://docs.aws.amazon.com/appsync/latest/devguide/tutorial-elasticsearch-resolvers-js.html) ```ts title="amplify/data/searchTodoResolver.js" - import { util } from "@aws-appsync/utils"; /** @@ -906,7 +871,6 @@ import { util } from "@aws-appsync/utils"; * @param {import('@aws-appsync/utils').Context} ctx the context * @returns {*} the request */ - export function request(ctx) { return { operation: "GET", @@ -919,23 +883,19 @@ export function request(ctx) { * @param {import('@aws-appsync/utils').Context} ctx the context * @returns {*} the result */ - export function response(ctx) { if (ctx.error) { util.error(ctx.error.message, ctx.error.type); } return ctx.result.hits.hits.map((hit) => hit._source); } - - ``` ### Step 4c: Add the AppSync Resolver for the Search Query Update the schema and add a searchTodo query. -``` ts title="amplify/data/resource.ts" - +```ts title="amplify/data/resource.ts" const schema = a.schema({ Todo: a .model({