diff --git a/docs/reference/config.md b/docs/reference/config.md index 45f63e89b..da6f7387a 100644 --- a/docs/reference/config.md +++ b/docs/reference/config.md @@ -1390,6 +1390,12 @@ nav_order: 2 |batchSize|Default read ahead to enable for subscriptions that do not explicitly configure readahead|`int`|`50` |batchTimeout|Default batch timeout|`int`|`50ms` +## subscription.events + +|Key|Description|Type|Default Value| +|---|-----------|----|-------------| +|maxScanLength|The maximum number of events a search for historical events matching a subscription will index from the database|`int`|`1000` + ## subscription.retry |Key|Description|Type|Default Value| diff --git a/docs/swagger/swagger.yaml b/docs/swagger/swagger.yaml index ff978ec5b..8c025adb2 100644 --- a/docs/swagger/swagger.yaml +++ b/docs/swagger/swagger.yaml @@ -29350,6 +29350,206 @@ paths: description: "" tags: - Non-Default Namespace + /namespaces/{ns}/subscriptions/{subid}/events: + get: + description: Gets a collection of events filtered by the subscription for further + filtering + operationId: getSubscriptionEventsFilteredNamespace + parameters: + - description: The subscription ID + in: path + name: subid + required: true + schema: + type: string + - description: The namespace which scopes this request + in: path + name: ns + required: true + schema: + example: default + type: string + - description: The sequence ID in the raw event stream to start indexing through + events from. Leave blank to start indexing from the most recent events + in: query + name: startsequence + schema: + type: string + - description: The sequence ID in the raw event stream to stop indexing through + events at. Leave blank to start indexing from the most recent events + in: query + name: endsequence + schema: + type: string + - description: Server-side request timeout (milliseconds, or set a custom suffix + like 10s) + in: header + name: Request-Timeout + schema: + default: 2m0s + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: correlator + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: created + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: id + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: reference + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: sequence + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: topic + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: tx + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: type + schema: + type: string + - description: Sort field. For multi-field sort use comma separated values (or + multiple query values) with '-' prefix for descending + in: query + name: sort + schema: + type: string + - description: Ascending sort order (overrides all fields in a multi-field sort) + in: query + name: ascending + schema: + type: string + - description: Descending sort order (overrides all fields in a multi-field + sort) + in: query + name: descending + schema: + type: string + - description: 'The number of records to skip (max: 1,000). Unsuitable for bulk + operations' + in: query + name: skip + schema: + type: string + - description: 'The maximum number of records to return (max: 1,000)' + in: query + name: limit + schema: + example: "25" + type: string + - description: Return a total count as well as items (adds extra database processing) + in: query + name: count + schema: + type: string + responses: + "200": + content: + application/json: + schema: + items: + properties: + correlator: + description: For message events, this is the 'header.cid' field + from the referenced message. For certain other event types, + a secondary object is referenced such as a token pool + format: uuid + type: string + created: + description: The time the event was emitted. Not guaranteed + to be unique, or to increase between events in the same order + as the final sequence events are delivered to your application. + As such, the 'sequence' field should be used instead of the + 'created' field for querying events in the exact order they + are delivered to applications + format: date-time + type: string + id: + description: The UUID assigned to this event by your local FireFly + node + format: uuid + type: string + namespace: + description: The namespace of the event. Your application must + subscribe to events within a namespace + type: string + reference: + description: The UUID of an resource that is the subject of + this event. The event type determines what type of resource + is referenced, and whether this field might be unset + format: uuid + type: string + sequence: + description: A sequence indicating the order in which events + are delivered to your application. Assure to be unique per + event in your local FireFly database (unlike the created timestamp) + format: int64 + type: integer + topic: + description: A stream of information this event relates to. + For message confirmation events, a separate event is emitted + for each topic in the message. For blockchain events, the + listener specifies the topic. Rules exist for how the topic + is set for other event types + type: string + tx: + description: The UUID of a transaction that is event is part + of. Not all events are part of a transaction + format: uuid + type: string + type: + description: All interesting activity in FireFly is emitted + as a FireFly event, of a given type. The 'type' combined with + the 'reference' can be used to determine how to process the + event within your application + enum: + - transaction_submitted + - message_confirmed + - message_rejected + - datatype_confirmed + - identity_confirmed + - identity_updated + - token_pool_confirmed + - token_pool_op_failed + - token_transfer_confirmed + - token_transfer_op_failed + - token_approval_confirmed + - token_approval_op_failed + - contract_interface_confirmed + - contract_api_confirmed + - blockchain_event_received + - blockchain_invoke_op_succeeded + - blockchain_invoke_op_failed + - blockchain_contract_deploy_op_succeeded + - blockchain_contract_deploy_op_failed + type: string + type: object + type: array + description: Success + default: + description: "" + tags: + - Non-Default Namespace /namespaces/{ns}/tokens/accounts: get: description: Gets a list of token accounts @@ -38265,6 +38465,199 @@ paths: description: "" tags: - Default Namespace + /subscriptions/{subid}/events: + get: + description: Gets a collection of events filtered by the subscription for further + filtering + operationId: getSubscriptionEventsFiltered + parameters: + - description: The subscription ID + in: path + name: subid + required: true + schema: + type: string + - description: The sequence ID in the raw event stream to start indexing through + events from. Leave blank to start indexing from the most recent events + in: query + name: startsequence + schema: + type: string + - description: The sequence ID in the raw event stream to stop indexing through + events at. Leave blank to start indexing from the most recent events + in: query + name: endsequence + schema: + type: string + - description: Server-side request timeout (milliseconds, or set a custom suffix + like 10s) + in: header + name: Request-Timeout + schema: + default: 2m0s + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: correlator + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: created + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: id + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: reference + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: sequence + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: topic + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: tx + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: type + schema: + type: string + - description: Sort field. For multi-field sort use comma separated values (or + multiple query values) with '-' prefix for descending + in: query + name: sort + schema: + type: string + - description: Ascending sort order (overrides all fields in a multi-field sort) + in: query + name: ascending + schema: + type: string + - description: Descending sort order (overrides all fields in a multi-field + sort) + in: query + name: descending + schema: + type: string + - description: 'The number of records to skip (max: 1,000). Unsuitable for bulk + operations' + in: query + name: skip + schema: + type: string + - description: 'The maximum number of records to return (max: 1,000)' + in: query + name: limit + schema: + example: "25" + type: string + - description: Return a total count as well as items (adds extra database processing) + in: query + name: count + schema: + type: string + responses: + "200": + content: + application/json: + schema: + items: + properties: + correlator: + description: For message events, this is the 'header.cid' field + from the referenced message. For certain other event types, + a secondary object is referenced such as a token pool + format: uuid + type: string + created: + description: The time the event was emitted. Not guaranteed + to be unique, or to increase between events in the same order + as the final sequence events are delivered to your application. + As such, the 'sequence' field should be used instead of the + 'created' field for querying events in the exact order they + are delivered to applications + format: date-time + type: string + id: + description: The UUID assigned to this event by your local FireFly + node + format: uuid + type: string + namespace: + description: The namespace of the event. Your application must + subscribe to events within a namespace + type: string + reference: + description: The UUID of an resource that is the subject of + this event. The event type determines what type of resource + is referenced, and whether this field might be unset + format: uuid + type: string + sequence: + description: A sequence indicating the order in which events + are delivered to your application. Assure to be unique per + event in your local FireFly database (unlike the created timestamp) + format: int64 + type: integer + topic: + description: A stream of information this event relates to. + For message confirmation events, a separate event is emitted + for each topic in the message. For blockchain events, the + listener specifies the topic. Rules exist for how the topic + is set for other event types + type: string + tx: + description: The UUID of a transaction that is event is part + of. Not all events are part of a transaction + format: uuid + type: string + type: + description: All interesting activity in FireFly is emitted + as a FireFly event, of a given type. The 'type' combined with + the 'reference' can be used to determine how to process the + event within your application + enum: + - transaction_submitted + - message_confirmed + - message_rejected + - datatype_confirmed + - identity_confirmed + - identity_updated + - token_pool_confirmed + - token_pool_op_failed + - token_transfer_confirmed + - token_transfer_op_failed + - token_approval_confirmed + - token_approval_op_failed + - contract_interface_confirmed + - contract_api_confirmed + - blockchain_event_received + - blockchain_invoke_op_succeeded + - blockchain_invoke_op_failed + - blockchain_contract_deploy_op_succeeded + - blockchain_contract_deploy_op_failed + type: string + type: object + type: array + description: Success + default: + description: "" + tags: + - Default Namespace /tokens/accounts: get: description: Gets a list of token accounts diff --git a/go.work.sum b/go.work.sum index b24756223..491d220db 100644 --- a/go.work.sum +++ b/go.work.sum @@ -148,13 +148,36 @@ cloud.google.com/go/vpcaccess v1.5.0/go.mod h1:drmg4HLk9NkZpGfCmZ3Tz0Bwnm2+DKqVi cloud.google.com/go/webrisk v1.7.0/go.mod h1:mVMHgEYH0r337nmt1JyLthzMr6YxwN1aAIEc2fTcq7A= cloud.google.com/go/websecurityscanner v1.4.0/go.mod h1:ebit/Fp0a+FWu5j4JOmJEV8S8CzdTkAS77oDsiSqYWQ= cloud.google.com/go/workflows v1.9.0/go.mod h1:ZGkj1aFIOd9c8Gerkjjq7OW7I5+l6cSvT3ujaO/WwSA= +github.com/Azure/azure-pipeline-go v0.2.3/go.mod h1:x841ezTBIMG6O3lAcl8ATHnsOPVl2bqk7S3ta6S6u4k= +github.com/Azure/azure-storage-blob-go v0.14.0/go.mod h1:SMqIBi+SuiQH32bvyjngEewEeXoPfKMgWlBDaYf6fck= +github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= +github.com/Azure/go-autorest/autorest/adal v0.9.16/go.mod h1:tGMin8I49Yij6AQ+rvV+Xa/zwxYQB5hmsd6DkfAx2+A= +github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74= +github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= +github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= +github.com/ClickHouse/clickhouse-go v1.4.3/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/apache/arrow/go/arrow v0.0.0-20211013220434-5962184e7a30/go.mod h1:Q7yQnSMnLvcXlZ8RV+jwz/6y1rQTqbX6C82SndT52Zs= +github.com/aws/aws-sdk-go v1.34.0/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= +github.com/aws/aws-sdk-go-v2 v1.9.2/go.mod h1:cK/D0BBs0b/oWPIcX/Z/obahJK1TT7IPVjy53i/mX/4= +github.com/aws/aws-sdk-go-v2/credentials v1.4.3/go.mod h1:FNNC6nQZQUuyhq5aE5c7ata8o9e4ECGmS4lAXC7o1mQ= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.5.4/go.mod h1:Ex7XQmbFmgFHrjUX6TN3mApKW5Hglyga+F7wZHTtYhA= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.3.0/go.mod h1:v8ygadNyATSm6elwJ/4gzJwcFhri9RqS8skgHKiwXPU= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.2/go.mod h1:72HRZDLMtmVQiLG2tLfQcaWLCssELvGl+Zf2WVxMmR8= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.7.2/go.mod h1:np7TMuJNT83O0oDOSF8i4dF3dvGqA6hPYYo6YYkzgRA= +github.com/aws/aws-sdk-go-v2/service/s3 v1.16.1/go.mod h1:CQe/KvWV1AqRc65KqeJjrLzr5X2ijnFTTVzJW0VBRCI= +github.com/aws/smithy-go v1.8.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E= +github.com/btcsuite/btcd/btcec/v2 v2.1.3/go.mod h1:ctjw4H1kknNJmRN4iP1R7bTQ+v3GJkZBd6mui8ZsAZE= +github.com/cenkalti/backoff/v4 v4.1.2/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.4.1/go.mod h1:4T9NM4+4Vw91VeyqjLS6ao50K5bOcLKN6Q42XnYaRYw= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58/go.mod h1:EOBUe0h4xcZ5GoxqC5SDxFQ8gwyZPKQoEzownBlhI80= github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= @@ -164,7 +187,13 @@ github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20220314180256-7f1daf1720fc/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20220520190051-1e77728a1eaa/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cockroachdb/cockroach-go/v2 v2.1.1/go.mod h1:7NtUnP6eK+l6k483WSYNrq3Kb23bWV10IRV1TyeSpwM= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/cznic/mathutil v0.0.0-20180504122225-ca4c9f2c1369/go.mod h1:e6NPNENfs9mPDVNRekM7lKScauxd5kXTr1Mfyig6TDM= +github.com/decred/dcrd/crypto/blake256 v1.0.1/go.mod h1:2OfgNZ5wDpcsFmHmCK5gZTPcCXqlm2ArzUIkw9czNJo= +github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= @@ -172,16 +201,31 @@ github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go. github.com/envoyproxy/go-control-plane v0.10.3/go.mod h1:fJJn/j26vwOu972OllsvAgJJM//w9BV6Fxbg2LuVd34= github.com/envoyproxy/protoc-gen-validate v0.6.7/go.mod h1:dyJXwwfPK2VSqiB9Klm1J6romD608Ba7Hij42vrOBCo= github.com/envoyproxy/protoc-gen-validate v0.6.13/go.mod h1:qEySVqXrEugbHKvmhI8ZqtQi75/RHSSRNpffvB4I6Bw= +github.com/form3tech-oss/jwt-go v3.2.5+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= +github.com/fsouza/fake-gcs-server v1.17.0/go.mod h1:D1rTE4YCyHFNa99oyJJ5HyclvN/0uQR+pM/VdlL83bw= +github.com/gabriel-vasile/mimetype v1.4.0/go.mod h1:fA8fi6KUiG7MgQQ+mEWotXoEOvmxRtOJlERCzSmRvr8= +github.com/go-kit/log v0.2.1/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= +github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/gobuffalo/here v0.6.0/go.mod h1:wAG085dHOYqUpf+Ap+WOdrPTp5IYcDAs/x7PLa8Y5fM= +github.com/gocql/gocql v0.0.0-20210515062232-b7ef815b4556/go.mod h1:DL0ekTmBSTdlNF25Orwt/JMzqIq3EJ4MVa/J/uK64OY= +github.com/golang-jwt/jwt/v4 v4.4.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= +github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/flatbuffers v2.0.0+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-github/v39 v39.2.0/go.mod h1:C1s8C5aCC9L+JXIYpJM5GYytdX52vC1bLvHEF1IhBrE= +github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= @@ -200,35 +244,86 @@ github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK github.com/googleapis/gax-go/v2 v2.5.1/go.mod h1:h6B0KMMFNtI2ddbGJn3T3ZbwkeT6yqEF02fYlzkUCyo= github.com/googleapis/gax-go/v2 v2.7.0/go.mod h1:TEop28CZZQ2y+c0VxMUmu1lV+fQx57QpBWsYpwqHJx8= github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= +github.com/gorilla/handlers v1.4.2/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks= github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w= +github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-hclog v1.2.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= +github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/pgconn v1.14.0/go.mod h1:9mBNlny0UvkgJdCDvdVHYSjI+8tD2rnKK69Wz8ti++E= +github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa/go.mod h1:a/s9Lp5W7n/DD0VrVoyJ00FbP2ytTPDVOivvn2bMlds= +github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3/v2 v2.3.2/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgtype v1.14.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= +github.com/jackc/pgx/v4 v4.18.1/go.mod h1:FydWkUyadDmdNH/mHnGob881GawxeEm7TcMCzkb+qQE= +github.com/jackc/pgx/v5 v5.3.1/go.mod h1:t3JDKnCBlYIc0ewLF0Q7B8MXmoIaBOZj/ic7iHozM/8= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/k0kubun/pp v2.3.0+incompatible/go.mod h1:GWse8YhT0p8pT4ir3ZgBbfZild3tgzSScAn6HmfYukg= +github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= +github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/ktrysmt/go-bitbucket v0.6.4/go.mod h1:9u0v3hsd2rqCHRIpbir1oP7F58uo5dq19sBYvuMoyQ4= github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA= github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA= -github.com/mattn/go-sqlite3 v1.14.19 h1:fhGleo2h1p8tVChob4I9HpmVFIAkKGpiukdrgQbWfGI= -github.com/mattn/go-sqlite3 v1.14.19/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= +github.com/markbates/pkger v0.15.1/go.mod h1:0JoVlrol20BSywW79rN3kdFFsE5xYM+rSCQDXbLhiuI= +github.com/mattn/go-ieproxy v0.0.1/go.mod h1:pYabZ6IHcRpFh7vIaLfK7rdcWgFEb3SFJ6/gNWuh88E= +github.com/microsoft/go-mssqldb v1.0.0/go.mod h1:+4wZTUnz/SV6nffv+RRRB/ss8jPng5Sho2SmM1l2ts4= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/mutecomm/go-sqlcipher/v4 v4.4.0/go.mod h1:PyN04SaWalavxRGH9E8ZftG6Ju7rsPrGmQRjrEaVpiY= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/nakagami/firebirdsql v0.0.0-20190310045651-3c02a58cfed8/go.mod h1:86wM1zFnC6/uDBfZGNwB65O+pR2OFi5q/YQaEUid1qA= +github.com/neo4j/neo4j-go-driver v1.8.1-0.20200803113522-b626aa943eba/go.mod h1:ncO5VaFWh0Nrt+4KT4mOZboaczBZcLuHrG+/sUeP8gI= +github.com/pierrec/lz4/v4 v4.1.8/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/snowflakedb/gosnowflake v1.6.3/go.mod h1:6hLajn6yxuJ4xUHZegMekpq9rnQbGJ7TMwXjgTmA6lg= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/xanzy/go-gitlab v0.15.0/go.mod h1:8zdQa/ri1dfn8eS3Ir1SyfvOKlw7WBJ8DVThkpGiXrs= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= +github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= +github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b/go.mod h1:T3BPAOm2cqquPa0MKWeNkmOM5RQsRhkrwMWonFMN7fE= +go.etcd.io/etcd/api/v3 v3.5.5/go.mod h1:KFtNaxGDw4Yx/BA4iPPwevUTAuqcsPxzyX8PHydchN8= +go.etcd.io/etcd/client/pkg/v3 v3.5.5/go.mod h1:ggrwbk069qxpKPq8/FKkQ3Xq9y39kbFR4LnKszpRXeQ= +go.etcd.io/etcd/client/v2 v2.305.5/go.mod h1:zQjKllfqfBVyVStbt4FaosoX2iYd8fV/GRy/PbowgP4= +go.etcd.io/etcd/client/v3 v3.5.5/go.mod h1:aApjR4WGlSumpnJ2kloS75h6aHUmAyaPLjHMxpc7E7c= +go.mongodb.org/mongo-driver v1.7.5/go.mod h1:VXEWRZ6URJIkUq2SCAyapmhH0ZLRBP+FT4xhp5Zvxng= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= +go.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= +golang.org/x/exp v0.0.0-20230315142452-642cacee5cc0/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc= golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= @@ -424,4 +519,28 @@ google.golang.org/grpc v1.51.0/go.mod h1:wgNDFcnuBGmxLKI/qn4T+m5BtEBYXJPvibbUPsA google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/bson.v2 v2.0.0-20171018101713-d8c8987b8862/go.mod h1:VN8wuk/3Ksp8lVZ82HHf/MI1FHOBDt5bPK9VZ8DvymM= +gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= +gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +lukechampine.com/uint128 v1.1.1/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= +modernc.org/b v1.0.0/go.mod h1:uZWcZfRj1BpYzfN9JTerzlNUnnPsV9O2ZA8JsRcubNg= +modernc.org/cc/v3 v3.36.0/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= +modernc.org/ccgo/v3 v3.16.6/go.mod h1:tGtX0gE9Jn7hdZFeU88slbTh1UtCYKusWOoCJuvkWsQ= +modernc.org/db v1.0.0/go.mod h1:kYD/cO29L/29RM0hXYl4i3+Q5VojL31kTUVpVJDw0s8= +modernc.org/file v1.0.0/go.mod h1:uqEokAEn1u6e+J45e54dsEA/pw4o7zLrA2GwyntZzjw= +modernc.org/fileutil v1.0.0/go.mod h1:JHsWpkrk/CnVV1H/eGlFf85BEpfkrp56ro8nojIq9Q8= +modernc.org/golex v1.0.0/go.mod h1:b/QX9oBD/LhixY6NDh+IdGv17hgB+51fET1i2kPSmvk= +modernc.org/internal v1.0.0/go.mod h1:VUD/+JAkhCpvkUitlEOnhpVxCgsBI90oTzSCRcqQVSM= +modernc.org/libc v1.16.7/go.mod h1:hYIV5VZczAmGZAnG15Vdngn5HSF5cSkbvfz2B7GRuVU= +modernc.org/lldb v1.0.0/go.mod h1:jcRvJGWfCGodDZz8BPwiKMJxGJngQ/5DrRapkQnLob8= +modernc.org/mathutil v1.4.1/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= +modernc.org/memory v1.1.1/go.mod h1:/0wo5ibyrQiaoUoH7f9D8dnglAmILJ5/cxZlRECf+Nw= +modernc.org/opt v0.1.1/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= +modernc.org/ql v1.0.0/go.mod h1:xGVyrLIatPcO2C1JvI/Co8c0sr6y91HKFNy4pt9JXEY= +modernc.org/sortutil v1.1.0/go.mod h1:ZyL98OQHJgH9IEfN71VsamvJgrtRX9Dj2gX+vH86L1k= +modernc.org/sqlite v1.18.0/go.mod h1:B9fRWZacNxJBHoCJZQr1R54zhVn3fjfl0aszflrTSxY= +modernc.org/strutil v1.1.1/go.mod h1:DE+MQQ/hjKBZS2zNInV5hhcipt5rLPWkmpbGeW5mmdw= +modernc.org/token v1.0.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= +modernc.org/zappy v1.0.0/go.mod h1:hHe+oGahLVII/aTTyWK/b53VDHMAGCBYYeZ9sn83HC4= diff --git a/internal/apiserver/route_get_subscription_events_filtered.go b/internal/apiserver/route_get_subscription_events_filtered.go new file mode 100644 index 000000000..d09a4d584 --- /dev/null +++ b/internal/apiserver/route_get_subscription_events_filtered.go @@ -0,0 +1,74 @@ +// Copyright © 2024 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "fmt" + "net/http" + "strconv" + + "github.com/hyperledger/firefly-common/pkg/ffapi" + "github.com/hyperledger/firefly-common/pkg/i18n" + "github.com/hyperledger/firefly/internal/coremsgs" + "github.com/hyperledger/firefly/pkg/core" + "github.com/hyperledger/firefly/pkg/database" +) + +var getSubscriptionEventsFiltered = &ffapi.Route{ + Name: "getSubscriptionEventsFiltered", + Path: "subscriptions/{subid}/events", + Method: http.MethodGet, + PathParams: []*ffapi.PathParam{ + {Name: "subid", Description: coremsgs.APIParamsSubscriptionID}, + }, + QueryParams: []*ffapi.QueryParam{ + {Name: "startsequence", IsBool: false, Description: coremsgs.APISubscriptionStartSequenceID}, + {Name: "endsequence", IsBool: false, Description: coremsgs.APISubscriptionEndSequenceID}, + }, + FilterFactory: database.EventQueryFactory, + Description: coremsgs.APIEndpointsGetSubscriptionEventsFiltered, + JSONInputValue: nil, + JSONOutputValue: func() interface{} { return []*core.Event{} }, + JSONOutputCodes: []int{http.StatusOK}, + Extensions: &coreExtensions{ + CoreJSONHandler: func(r *ffapi.APIRequest, cr *coreRequest) (output interface{}, err error) { + subscription, _ := cr.or.GetSubscriptionByID(cr.ctx, r.PP["subid"]) + var startSeq int + var endSeq int + + if r.QP["startsequence"] != "" { + startSeq, err = strconv.Atoi(r.QP["startsequence"]) + if err != nil { + return nil, i18n.NewError(cr.ctx, coremsgs.MsgSequenceIDDidNotParseToInt, fmt.Sprintf("startsequence: %s", r.QP["startsequence"])) + } + } else { + startSeq = -1 + } + + if r.QP["endsequence"] != "" { + endSeq, err = strconv.Atoi(r.QP["endsequence"]) + if err != nil { + return nil, i18n.NewError(cr.ctx, coremsgs.MsgSequenceIDDidNotParseToInt, fmt.Sprintf("endsequence: %s", r.QP["endsequence"])) + } + } else { + endSeq = -1 + } + + return r.FilterResult(cr.or.GetSubscriptionEventsHistorical(cr.ctx, subscription, r.Filter, startSeq, endSeq)) + }, + }, +} diff --git a/internal/apiserver/route_get_subscription_events_filtered_test.go b/internal/apiserver/route_get_subscription_events_filtered_test.go new file mode 100644 index 000000000..fd480294d --- /dev/null +++ b/internal/apiserver/route_get_subscription_events_filtered_test.go @@ -0,0 +1,85 @@ +// Copyright © 2024 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "net/http/httptest" + "testing" + + "github.com/hyperledger/firefly/pkg/core" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestGetSubscriptionEventsFiltered(t *testing.T) { + o, r := newTestAPIServer() + o.On("Authorize", mock.Anything, mock.Anything).Return(nil) + req := httptest.NewRequest("GET", "/api/v1/namespaces/mynamespace/subscriptions/abcd12345/events?startsequence=100&endsequence=200", nil) + req.Header.Set("Content-Type", "application/json; charset=utf-8") + res := httptest.NewRecorder() + + o.On("GetSubscriptionByID", mock.Anything, "abcd12345"). + Return(&core.Subscription{}, nil) + o.On("GetSubscriptionEventsHistorical", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything). + Return([]*core.EnrichedEvent{}, nil, nil) + + r.ServeHTTP(res, req) + assert.Equal(t, 200, res.Result().StatusCode) +} + +func TestGetSubscriptionEventsFilteredStartSequenceIDDoesNotParse(t *testing.T) { + o, r := newTestAPIServer() + o.On("Authorize", mock.Anything, mock.Anything).Return(nil) + req := httptest.NewRequest("GET", "/api/v1/namespaces/mynamespace/subscriptions/abcd12345/events?startsequence=helloworld", nil) + req.Header.Set("Content-Type", "application/json; charset=utf-8") + res := httptest.NewRecorder() + o.On("GetSubscriptionByID", mock.Anything, "abcd12345"). + Return(&core.Subscription{}, nil) + + r.ServeHTTP(res, req) + assert.Equal(t, 400, res.Result().StatusCode) + assert.Contains(t, res.Body.String(), "helloworld") +} + +func TestGetSubscriptionEventsFilteredEndSequenceIDDoesNotParse(t *testing.T) { + o, r := newTestAPIServer() + o.On("Authorize", mock.Anything, mock.Anything).Return(nil) + req := httptest.NewRequest("GET", "/api/v1/namespaces/mynamespace/subscriptions/abcd12345/events?endsequence=helloworld", nil) + req.Header.Set("Content-Type", "application/json; charset=utf-8") + res := httptest.NewRecorder() + o.On("GetSubscriptionByID", mock.Anything, "abcd12345"). + Return(&core.Subscription{}, nil) + + r.ServeHTTP(res, req) + assert.Equal(t, 400, res.Result().StatusCode) + assert.Contains(t, res.Body.String(), "helloworld") +} + +func TestGetSubscriptionEventsFilteredNoSequenceIDsProvided(t *testing.T) { + o, r := newTestAPIServer() + o.On("Authorize", mock.Anything, mock.Anything).Return(nil) + req := httptest.NewRequest("GET", "/api/v1/namespaces/mynamespace/subscriptions/abcd12345/events", nil) + req.Header.Set("Content-Type", "application/json; charset=utf-8") + res := httptest.NewRecorder() + o.On("GetSubscriptionByID", mock.Anything, "abcd12345"). + Return(&core.Subscription{}, nil) + o.On("GetSubscriptionEventsHistorical", mock.Anything, mock.Anything, mock.Anything, -1, -1). + Return([]*core.EnrichedEvent{}, nil, nil) + + r.ServeHTTP(res, req) + assert.Equal(t, 200, res.Result().StatusCode) +} \ No newline at end of file diff --git a/internal/apiserver/routes.go b/internal/apiserver/routes.go index 58e6e6166..28c960589 100644 --- a/internal/apiserver/routes.go +++ b/internal/apiserver/routes.go @@ -1,4 +1,4 @@ -// Copyright © 2023 Kaleido, Inc. +// Copyright © 2024 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -110,6 +110,7 @@ var routes = append( getStatusBatchManager, getSubscriptionByID, getSubscriptions, + getSubscriptionEventsFiltered, getTokenAccountPools, getTokenAccounts, getTokenApprovals, diff --git a/internal/apiserver/server.go b/internal/apiserver/server.go index 46a57ba22..fb61b64b8 100644 --- a/internal/apiserver/server.go +++ b/internal/apiserver/server.go @@ -1,4 +1,4 @@ -// Copyright © 2023 Kaleido, Inc. +// Copyright © 2024 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -23,8 +23,6 @@ import ( "strings" "time" - "github.com/prometheus/client_golang/prometheus/promhttp" - "github.com/gorilla/mux" "github.com/hyperledger/firefly-common/pkg/config" "github.com/hyperledger/firefly-common/pkg/ffapi" @@ -40,6 +38,7 @@ import ( "github.com/hyperledger/firefly/internal/metrics" "github.com/hyperledger/firefly/internal/namespace" "github.com/hyperledger/firefly/internal/orchestrator" + "github.com/prometheus/client_golang/prometheus/promhttp" ) var ( diff --git a/internal/coreconfig/coreconfig.go b/internal/coreconfig/coreconfig.go index 6ae2b8a78..401f69c1a 100644 --- a/internal/coreconfig/coreconfig.go +++ b/internal/coreconfig/coreconfig.go @@ -339,6 +339,8 @@ var ( SubscriptionsRetryMaxDelay = ffc("subscription.retry.maxDelay") // SubscriptionsRetryFactor the backoff factor to use for retry of database operations SubscriptionsRetryFactor = ffc("subscription.retry.factor") + // SubscriptionMaxHistoricalEventScanLength the maximum amount of historical events we scan for in the DB when indexing through old events against a subscription + SubscriptionMaxHistoricalEventScanLength = ffc("subscription.events.maxScanLength") // TransactionWriterCount TransactionWriterCount = ffc("transaction.writer.count") // TransactionWriterBatchTimeout @@ -459,6 +461,7 @@ func setDefaults() { viper.SetDefault(string(SubscriptionsRetryInitialDelay), "250ms") viper.SetDefault(string(SubscriptionsRetryMaxDelay), "30s") viper.SetDefault(string(SubscriptionsRetryFactor), 2.0) + viper.SetDefault(string(SubscriptionMaxHistoricalEventScanLength), 1000) viper.SetDefault(string(TransactionWriterBatchMaxTransactions), 100) viper.SetDefault(string(TransactionWriterBatchTimeout), "10ms") viper.SetDefault(string(TransactionWriterCount), 5) diff --git a/internal/coremsgs/en_api_translations.go b/internal/coremsgs/en_api_translations.go index 677d768f6..39ed59e83 100644 --- a/internal/coremsgs/en_api_translations.go +++ b/internal/coremsgs/en_api_translations.go @@ -1,4 +1,4 @@ -// Copyright © 2023 Kaleido, Inc. +// Copyright © 2024 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -133,6 +133,7 @@ var ( APIEndpointsGetWebSockets = ffm("api.endpoints.getStatusWebSockets", "Gets a list of the current WebSocket connections to this node") APIEndpointsGetStatus = ffm("api.endpoints.getStatus", "Gets the status of this namespace") APIEndpointsGetSubscriptionByID = ffm("api.endpoints.getSubscriptionByID", "Gets a subscription by its ID") + APIEndpointsGetSubscriptionEventsFiltered = ffm("api.endpoints.getSubscriptionEventsFiltered", "Gets a collection of events filtered by the subscription for further filtering") APIEndpointsGetSubscriptions = ffm("api.endpoints.getSubscriptions", "Gets a list of subscriptions") APIEndpointsGetTokenAccountPools = ffm("api.endpoints.getTokenAccountPools", "Gets a list of token pools that contain a given token account key") APIEndpointsGetTokenAccounts = ffm("api.endpoints.getTokenAccounts", "Gets a list of token accounts") @@ -208,4 +209,7 @@ var ( APISmartContractDetails = ffm("api.smartContractDetails", "Additional smart contract details") APISmartContractDetailsKey = ffm("api.smartContractDetailsKey", "Key") APISmartContractDetailsValue = ffm("api.smartContractDetailsValue", "Value") + + APISubscriptionStartSequenceID = ffm("api.startsequenceid", "The sequence ID in the raw event stream to start indexing through events from. Leave blank to start indexing from the most recent events") + APISubscriptionEndSequenceID = ffm("api.endsequenceid", "The sequence ID in the raw event stream to stop indexing through events at. Leave blank to start indexing from the most recent events") ) diff --git a/internal/coremsgs/en_config_descriptions.go b/internal/coremsgs/en_config_descriptions.go index b766b72b6..1015b7571 100644 --- a/internal/coremsgs/en_config_descriptions.go +++ b/internal/coremsgs/en_config_descriptions.go @@ -383,9 +383,10 @@ var ( ConfigPluginSharedstorageIpfsGatewayURL = ffc("config.plugins.sharedstorage[].ipfs.gateway.url", "The URL for the IPFS Gateway", urlStringType) ConfigPluginSharedstorageIpfsGatewayProxyURL = ffc("config.plugins.sharedstorage[].ipfs.gateway.proxy.url", "Optional HTTP proxy server to use when connecting to the IPFS Gateway", urlStringType) - ConfigSubscriptionMax = ffc("config.subscription.max", "The maximum number of pre-defined subscriptions that can exist (note for high fan-out consider connecting a dedicated pub/sub broker to the dispatcher)", i18n.IntType) - ConfigSubscriptionDefaultsBatchSize = ffc("config.subscription.defaults.batchSize", "Default read ahead to enable for subscriptions that do not explicitly configure readahead", i18n.IntType) - ConfigSubscriptionDefaultsBatchTimeout = ffc("config.subscription.defaults.batchTimeout", "Default batch timeout", i18n.IntType) + ConfigSubscriptionMax = ffc("config.subscription.max", "The maximum number of pre-defined subscriptions that can exist (note for high fan-out consider connecting a dedicated pub/sub broker to the dispatcher)", i18n.IntType) + ConfigSubscriptionDefaultsBatchSize = ffc("config.subscription.defaults.batchSize", "Default read ahead to enable for subscriptions that do not explicitly configure readahead", i18n.IntType) + ConfigSubscriptionDefaultsBatchTimeout = ffc("config.subscription.defaults.batchTimeout", "Default batch timeout", i18n.IntType) + ConfigSubscriptionMaxHistoricalEventScanLength = ffc("config.subscription.events.maxScanLength", "The maximum number of events a search for historical events matching a subscription will index from the database", i18n.IntType) ConfigTokensName = ffc("config.tokens[].name", "A name to identify this token plugin", i18n.StringType) ConfigTokensPlugin = ffc("config.tokens[].plugin", "The type of the token plugin to use", i18n.StringType) diff --git a/internal/coremsgs/en_error_messages.go b/internal/coremsgs/en_error_messages.go index a2a82675a..7ad5cec26 100644 --- a/internal/coremsgs/en_error_messages.go +++ b/internal/coremsgs/en_error_messages.go @@ -1,4 +1,4 @@ -// Copyright © 2023 Kaleido, Inc. +// Copyright © 2024 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -27,278 +27,280 @@ var ffe = func(key, translation string, statusHint ...int) i18n.ErrorMessageKey //revive:disable var ( - MsgConfigFailed = ffe("FF10101", "Failed to read config") - MsgJSONDecodeFailed = ffe("FF10103", "Failed to decode input JSON") - MsgTLSConfigFailed = ffe("FF10105", "Failed to initialize TLS configuration") - MsgWebsocketClientError = ffe("FF10108", "Error received from WebSocket client: %s") - Msg404NotFound = ffe("FF10109", "Not found", 404) - MsgUnknownBlockchainPlugin = ffe("FF10110", "Unknown blockchain plugin: %s") - MsgEthConnectorRESTErr = ffe("FF10111", "Error from ethereum connector: %s") - MsgDBInitFailed = ffe("FF10112", "Database initialization failed") - MsgDBQueryBuildFailed = ffe("FF10113", "Database query builder failed") - MsgDBBeginFailed = ffe("FF10114", "Database begin transaction failed") - MsgDBQueryFailed = ffe("FF10115", "Database query failed") - MsgDBInsertFailed = ffe("FF10116", "Database insert failed") - MsgDBUpdateFailed = ffe("FF10117", "Database update failed") - MsgDBDeleteFailed = ffe("FF10118", "Database delete failed") - MsgDBCommitFailed = ffe("FF10119", "Database commit failed") - MsgDBMissingJoin = ffe("FF10120", "Database missing expected join entry in table '%s' for id '%s'") - MsgDBReadErr = ffe("FF10121", "Database resultset read error from table '%s'") - MsgUnknownDatabasePlugin = ffe("FF10122", "Unknown database plugin '%s'") - MsgNullDataReferenceID = ffe("FF10123", "Data id is null in message data reference %d") - MsgDupDataReferenceID = ffe("FF10124", "Duplicate data ID in message '%s'", 409) - MsgScanFailed = ffe("FF10125", "Failed to restore type '%T' into '%T'") - MsgUnregisteredBatchType = ffe("FF10126", "Unregistered batch type '%s'") - MsgBatchDispatchTimeout = ffe("FF10127", "Timed out dispatching work to batch") - MsgInitializationNilDepError = ffe("FF10128", "Initialization failed in %s due to unmet dependency") - MsgNilResponseNon204 = ffe("FF10129", "No output from API call") - MsgDataNotFound = ffe("FF10133", "Data not found for message %s", 400) - MsgUnknownSharedStoragePlugin = ffe("FF10134", "Unknown Shared Storage plugin '%s'") - MsgIPFSHashDecodeFailed = ffe("FF10135", "Failed to decode IPFS hash into 32byte value '%s'") - MsgIPFSRESTErr = ffe("FF10136", "Error from IPFS: %s") - MsgSerializationFailed = ffe("FF10137", "Serialization failed") - MsgMissingPluginConfig = ffe("FF10138", "Missing configuration '%s' for %s") - MsgMissingDataHashIndex = ffe("FF10139", "Missing data hash for index '%d' in message", 400) - MsgInvalidEthAddress = ffe("FF10141", "Supplied ethereum address is invalid", 400) - MsgInvalidTezosAddress = ffe("FF10142", "Supplied tezos address is invalid", 400) - Msg404NoResult = ffe("FF10143", "No result found", 404) - MsgUnsupportedSQLOpInFilter = ffe("FF10150", "No SQL mapping implemented for filter operator '%s'", 400) - MsgFilterSortDesc = ffe("FF10154", "Sort field. For multi-field sort use comma separated values (or multiple query values) with '-' prefix for descending") - MsgContextCanceled = ffe("FF00154", "Context cancelled") - MsgDBMigrationFailed = ffe("FF10163", "Database migration failed") - MsgHashMismatch = ffe("FF10164", "Hash mismatch") - MsgDefaultNamespaceNotFound = ffe("FF10166", "namespaces.default '%s' must be included in the namespaces.predefined configuration") - MsgEventTypesParseFail = ffe("FF10168", "Unable to parse list of event types", 400) - MsgUnknownEventType = ffe("FF10169", "Unknown event type '%s'", 400) - MsgIDMismatch = ffe("FF10170", "ID mismatch") - MsgRegexpCompileFailed = ffe("FF10171", "Unable to compile '%s' regexp '%s'") - MsgUnknownEventTransportPlugin = ffe("FF10172", "Unknown event transport plugin: %s") - MsgWSConnectionNotActive = ffe("FF10173", "Websocket connection '%s' no longer active") - MsgWSSubAlreadyInFlight = ffe("FF10174", "Websocket subscription '%s' already has a message in flight") - MsgWSMsgSubNotMatched = ffe("FF10175", "Acknowledgment does not match an inflight event + subscription") - MsgWSClientSentInvalidData = ffe("FF10176", "Invalid data") - MsgWSClientUnknownAction = ffe("FF10177", "Unknown action '%s'") - MsgWSInvalidStartAction = ffe("FF10178", "A start action must set namespace and either a name or ephemeral=true") - MsgWSAutoAckChanged = ffe("FF10179", "The autoack option must be set consistently on all start requests") - MsgWSAutoAckEnabled = ffe("FF10180", "The autoack option is enabled on this connection") - MsgConnSubscriptionNotStarted = ffe("FF10181", "Subscription %v is not started on connection") - MsgDispatcherClosing = ffe("FF10182", "Event dispatcher closing") - MsgMaxFilterSkip = ffe("FF10183", "You have reached the maximum pagination limit for this query (%d)", 400) - MsgMaxFilterLimit = ffe("FF10184", "Your query exceeds the maximum filter limit (%d)", 400) - MsgAPIServerStaticFail = ffe("FF10185", "An error occurred loading static content", 500) - MsgEventListenerClosing = ffe("FF10186", "Event listener closing") - MsgNamespaceDoesNotExist = ffe("FF10187", "Namespace does not exist", 404) - MsgInvalidSubscription = ffe("FF10189", "Invalid subscription", 400) - MsgMismatchedTransport = ffe("FF10190", "Connection ID '%s' appears not to be unique between transport '%s' and '%s'", 400) - MsgInvalidFirstEvent = ffe("FF10191", "Invalid firstEvent definition - must be 'newest','oldest' or a sequence number", 400) - MsgNumberMustBeGreaterEqual = ffe("FF10192", "Number must be greater than or equal to %d", 400) - MsgAlreadyExists = ffe("FF10193", "A %s with name '%s:%s' already exists", 409) - MsgJSONValidatorBadRef = ffe("FF10194", "Cannot use JSON validator for data with type '%s' and validator reference '%v'", 400) - MsgDatatypeNotFound = ffe("FF10195", "Datatype '%v' not found", 400) - MsgSchemaLoadFailed = ffe("FF10196", "Datatype '%s' schema invalid", 400) - MsgDataCannotBeValidated = ffe("FF10197", "Data cannot be validated", 400) - MsgJSONDataInvalidPerSchema = ffe("FF10198", "Data does not conform to the JSON schema of datatype '%s': %s", 400) - MsgDataValueIsNull = ffe("FF10199", "Data value is null", 400) - MsgDataInvalidHash = ffe("FF10201", "Invalid data: hashes do not match Hash=%s Expected=%s", 400) - MsgDataReferenceUnresolvable = ffe("FF10204", "Data reference %d cannot be resolved", 400) - MsgDataMissing = ffe("FF10205", "Data entry %d has neither 'id' to refer to existing data, or 'value' to include in-line JSON data", 400) - MsgAuthorInvalid = ffe("FF10206", "Invalid author specified", 400) - MsgMessageNotFound = ffe("FF10207", "Message '%s' not found", 404) - MsgBatchNotFound = ffe("FF10209", "Batch '%s' not found for message", 404) - MsgMessageTXNotSet = ffe("FF10210", "Message '%s' does not have an assigned transaction", 404) - MsgOwnerMissing = ffe("FF10211", "Owner missing", 400) - MsgUnknownIdentityPlugin = ffe("FF10212", "Unknown Identity plugin '%s'") - MsgUnknownDataExchangePlugin = ffe("FF10213", "Unknown Data Exchange plugin '%s'") - MsgParentIdentityNotFound = ffe("FF10214", "Identity '%s' not found in identity chain for %s '%s'") - MsgInvalidSigningIdentity = ffe("FF10215", "Invalid signing identity") - MsgNodeAndOrgIDMustBeSet = ffe("FF10216", "node.name, org.name and org.key must be configured first", 409) - MsgBlobStreamingFailed = ffe("FF10217", "Blob streaming terminated with error", 500) - MsgNodeNotFound = ffe("FF10224", "Node with name or identity '%s' not found", 400) - MsgLocalNodeNotSet = ffe("FF10225", "Unable to resolve the local node. Please ensure node.name is configured", 500) - MsgGroupNotFound = ffe("FF10226", "Group '%s' not found", 404) - MsgDXRESTErr = ffe("FF10229", "Error from data exchange: %s") - MsgInvalidHex = ffe("FF10231", "Invalid hex supplied", 400) - MsgInvalidWrongLenB32 = ffe("FF00107", "Byte length must be 32 (64 hex characters)", 400) - MsgNodeNotFoundInOrg = ffe("FF10233", "Unable to find any nodes owned by org '%s', or parent orgs", 400) - MsgDXBadResponse = ffe("FF10237", "Unexpected '%s' in data exchange response: %s") - MsgDXBadHash = ffe("FF10238", "Unexpected hash returned from data exchange upload. Hash=%s Expected=%s") - MsgBlobNotFound = ffe("FF10239", "No blob has been uploaded or confirmed received, with hash=%s", 404) - MsgDownloadBlobFailed = ffe("FF10240", "Error download blob with reference '%s' from local data exchange") - MsgDataDoesNotHaveBlob = ffe("FF10241", "Data does not have a blob attachment", 404) - MsgWebhookURLEmpty = ffe("FF10242", "Webhook subscription option 'url' cannot be empty", 400) - MsgWebhookInvalidStringMap = ffe("FF10243", "Webhook subscription option '%s' must be map of string values. %s=%T", 400) - MsgWebsocketsNoData = ffe("FF10244", "Websockets subscriptions do not support streaming the full data payload, just the references (withData must be false)", 400) - MsgWebhooksWithData = ffe("FF10245", "Webhook subscriptions require the full data payload (withData must be true)", 400) - MsgWebhooksReplyBadJSON = ffe("FF10257", "Failed to process reply from webhook as JSON") - MsgRequestTimeout = ffe("FF10260", "The request with id '%s' timed out after %.2fms", 408) - MsgRequestReplyTagRequired = ffe("FF10261", "For request messages 'header.tag' must be set on the request message to route it to a suitable responder", 400) - MsgRequestCannotHaveCID = ffe("FF10262", "For request messages 'header.cid' must be unset", 400) - MsgSystemTransportInternal = ffe("FF10266", "You cannot create subscriptions on the system events transport") - MsgFilterCountNotSupported = ffe("FF10267", "This query does not support generating a count of all results") - MsgRejected = ffe("FF10269", "Message with ID '%s' was rejected. Please check the FireFly logs for more information") - MsgRequestMustBePrivate = ffe("FF10271", "For request messages you must specify a group of private recipients", 400) - MsgUnknownTokensPlugin = ffe("FF10272", "Unknown tokens plugin '%s'", 400) - MsgMissingTokensPluginConfig = ffe("FF10273", "Invalid tokens configuration - name and plugin are required", 400) - MsgTokensRESTErr = ffe("FF10274", "Error from tokens service: %s") - MsgTokenPoolDuplicate = ffe("FF10275", "Duplicate token pool: %s", 409) - MsgTokenPoolRejected = ffe("FF10276", "Token pool with ID '%s' was rejected. Please check the FireFly logs for more information") - MsgIdentityNotFoundByString = ffe("FF10277", "Identity could not be resolved via lookup string '%s'") - MsgAuthorOrgSigningKeyMismatch = ffe("FF10279", "Author organization '%s' is not associated with signing key '%s'") - MsgCannotTransferToSelf = ffe("FF10280", "From and to addresses must be different", 400) - MsgLocalOrgNotSet = ffe("FF10281", "Unable to resolve the local root org. Please ensure org.name is configured", 500) - MsgTezosconnectRESTErr = ffe("FF10283", "Error from tezos connector: %s") - MsgFabconnectRESTErr = ffe("FF10284", "Error from fabconnect: %s") - MsgInvalidIdentity = ffe("FF10285", "Supplied Fabric signer identity is invalid", 400) - MsgFailedToDecodeCertificate = ffe("FF10286", "Failed to decode certificate: %s", 500) - MsgInvalidMessageType = ffe("FF10287", "Invalid message type - allowed types are %s", 400) - MsgWSClosed = ffe("FF10290", "Websocket closed") - MsgFieldNotSpecified = ffe("FF10292", "Field '%s' must be specified", 400) - MsgTokenPoolNotActive = ffe("FF10293", "Token pool is not yet activated") - MsgHistogramCollectionParam = ffe("FF10297", "Collection to fetch") - MsgInvalidNumberOfIntervals = ffe("FF10298", "Number of time intervals must be between %d and %d", 400) - MsgInvalidChartNumberParam = ffe("FF10299", "Invalid %s. Must be a number.", 400) - MsgHistogramInvalidTimes = ffe("FF10300", "Start time must be before end time", 400) - MsgUnsupportedCollection = ffe("FF10301", "%s collection is not supported", 400) - MsgContractInterfaceExists = ffe("FF10302", "A contract interface already exists in the namespace: '%s' with name: '%s' and version: '%s'", 409) - MsgContractInterfaceNotFound = ffe("FF10303", "Contract interface %s not found", 404) - MsgContractMissingInputArgument = ffe("FF10304", "Missing required input argument '%s'", 400) - MsgContractWrongInputType = ffe("FF10305", "Input '%v' is of type '%v' not expected type of '%v'", 400) - MsgContractMissingInputField = ffe("FF10306", "Expected object of type '%v' to contain field named '%v' but it was missing", 400) - MsgContractMapInputType = ffe("FF10307", "Unable to map input type '%v' to known FireFly type - was expecting '%v'", 400) - MsgContractByteDecode = ffe("FF10308", "Unable to decode field '%v' as bytes", 400) - MsgContractInternalType = ffe("FF10309", "Input '%v' of type '%v' is not compatible blockchain internalType of '%v'", 400) - MsgContractLocationInvalid = ffe("FF10310", "Failed to validate contract location: %v", 400) - MsgContractParamInvalid = ffe("FF10311", "Failed to validate contract param: %v", 400) - MsgContractListenerNameExists = ffe("FF10312", "A contract listener already exists in the namespace: '%s' with name: '%s'", 409) - MsgContractMethodNotSet = ffe("FF10313", "Either an interface reference and method path, or in-line method definition, must be supplied on invoke contract request", 400) - MsgContractMethodResolveError = ffe("FF10315", "Unable to resolve contract method: %s", 400) - MsgContractLocationExists = ffe("FF10316", "The contract location cannot be changed after it is created", 400) - MsgListenerNoEvent = ffe("FF10317", "Either an interface reference and event path, or in-line event definition must be supplied when creating a contract listener", 400) - MsgListenerEventNotFound = ffe("FF10318", "No event was found in namespace '%s' with id '%s'", 400) - MsgEventNameMustBeSet = ffe("FF10319", "Event name must be set", 400) - MsgMethodNameMustBeSet = ffe("FF10320", "Method name must be set", 400) - MsgContractEventResolveError = ffe("FF10321", "Unable to resolve contract event", 400) - MsgQueryOpUnsupportedMod = ffe("FF10322", "Operation '%s' on '%s' does not support modifiers", 400) - MsgDXBadSize = ffe("FF10323", "Unexpected size returned from data exchange upload. Size=%d Expected=%d") - MsgTooLargeBroadcast = ffe("FF10327", "Message size %.2fkb is too large for the max broadcast batch size of %.2fkb", 400) - MsgTooLargePrivate = ffe("FF10328", "Message size %.2fkb is too large for the max private message size of %.2fkb", 400) - MsgManifestMismatch = ffe("FF10329", "Manifest mismatch overriding '%s' status as failure: '%s'", 400) - MsgFFIValidationFail = ffe("FF10331", "Field '%s' does not validate against the provided schema", 400) - MsgFFISchemaParseFail = ffe("FF10332", "Failed to parse schema for param '%s'", 400) - MsgFFISchemaCompileFail = ffe("FF10333", "Failed compile schema for param '%s'", 400) - MsgPluginInitializationFailed = ffe("FF10334", "Plugin initialization error", 500) - MsgUnknownTransactionType = ffe("FF10336", "Unknown transaction type '%s'", 400) - MsgGoTemplateCompileFailed = ffe("FF10337", "Go template compilation for '%s' failed: %s", 500) - MsgGoTemplateExecuteFailed = ffe("FF10338", "Go template execution for '%s' failed: %s", 500) - MsgAddressResolveFailed = ffe("FF10339", "Failed to resolve signing key string '%s': %s", 500) - MsgAddressResolveBadStatus = ffe("FF10340", "Failed to resolve signing key string '%s' [%d]: %s", 500) - MsgAddressResolveBadResData = ffe("FF10341", "Failed to resolve signing key string '%s' - invalid address returned '%s': %s", 500) - MsgDXNotInitialized = ffe("FF10342", "Data exchange is initializing") - MsgDBLockFailed = ffe("FF10345", "Database lock failed") - MsgFFIGenerationFailed = ffe("FF10346", "Error generating smart contract interface: %s", 400) - MsgFFIGenerationUnsupported = ffe("FF10347", "Smart contract interface generation is not supported by this blockchain plugin", 400) - MsgBlobHashMismatch = ffe("FF10348", "Blob hash mismatch sent=%s received=%s", 400) - MsgDIDResolverUnknown = ffe("FF10349", "DID resolver unknown for DID: %s", 400) - MsgIdentityNotOrg = ffe("FF10350", "Identity '%s' with DID '%s' is not an organization", 400) - MsgIdentityNotNode = ffe("FF10351", "Identity '%s' with DID '%s' is not a node", 400) - MsgBlockchainKeyNotSet = ffe("FF10352", "No blockchain key specified", 400) - MsgNoVerifierForIdentity = ffe("FF10353", "No %s verifier registered for identity %s", 400) - MsgNodeMissingBlockchainKey = ffe("FF10354", "No default signing key or organization signing key configured for this namespace", 400) - MsgAuthorRegistrationMismatch = ffe("FF10355", "Verifier '%s' cannot be used for signing with author '%s'. Verifier registered to '%s'", 400) - MsgAuthorMissingForKey = ffe("FF10356", "Key '%s' has not been registered by any identity, and a separate 'author' was not supplied", 404) - MsgAuthorIncorrectForRootReg = ffe("FF10357", "Author namespace '%s' and DID '%s' combination invalid for root organization registration", 400) - MsgKeyIdentityMissing = ffe("FF10358", "Identity owner of key '%s' not found", 500) - MsgIdentityChainLoop = ffe("FF10364", "Loop detected on identity %s in chain for %s (%s)", 400) - MsgInvalidIdentityParentType = ffe("FF10365", "Parent %s (%s) of type %s is invalid for child %s (%s) of type", 400) - MsgParentIdentityMissingClaim = ffe("FF10366", "Parent %s (%s) is invalid (missing claim)", 400) - MsgDXInfoMissingID = ffe("FF10367", "Data exchange endpoint info missing 'id' field", 500) - MsgEventNotFound = ffe("FF10370", "Event with name '%s' not found", 400) - MsgOperationNotSupported = ffe("FF10371", "Operation not supported: %s", 400) - MsgFailedToRetrieve = ffe("FF10372", "Failed to retrieve %s %s", 500) - MsgBlobMissingPublic = ffe("FF10373", "Blob for data %s missing public payload reference while flushing batch", 500) - MsgDBMultiRowConfigError = ffe("FF10374", "Database invalid configuration - using multi-row insert on DB plugin that does not support query syntax for input") - MsgDBNoSequence = ffe("FF10375", "Failed to retrieve sequence for insert row %d (could mean duplicate insert)", 500) - MsgDownloadSharedFailed = ffe("FF10376", "Error downloading data with reference '%s' from shared storage") - MsgDownloadBatchMaxBytes = ffe("FF10377", "Error downloading batch with reference '%s' from shared storage - maximum size limit reached") - MsgOperationDataIncorrect = ffe("FF10378", "Operation data type incorrect: %T", 400) - MsgDataMissingBlobHash = ffe("FF10379", "Blob for data %s cannot be transferred as it is missing a hash", 500) - MsgUnexpectedDXMessageType = ffe("FF10380", "Unexpected websocket event type from DX plugin: %s", 500) - MsgContractListenerExists = ffe("FF10383", "A contract listener already exists for this combination of topic + location + event", 409) - MsgInvalidOutputOption = ffe("FF10385", "invalid output option '%s'") - MsgInvalidPluginConfiguration = ffe("FF10386", "Invalid %s plugin configuration - name and type are required") - MsgReferenceMarkdownMissing = ffe("FF10387", "Reference markdown file missing: '%s'") - MsgFFSystemReservedName = ffe("FF10388", "Invalid namespace configuration - %s is a reserved name") - MsgInvalidNamespaceMode = ffe("FF10389", "Invalid %s namespace configuration - unknown mode") - MsgNamespaceUnknownPlugin = ffe("FF10390", "Invalid %s namespace configuration - unknown plugin %s") - MsgNamespaceWrongPluginsMultiparty = ffe("FF10391", "Invalid %s namespace configuration - multiparty mode requires database, blockchain, shared storage, and data exchange plugins") - MsgNamespaceNoDatabase = ffe("FF10392", "Invalid %s namespace configuration - a database plugin is required") - MsgNamespaceMultiplePluginType = ffe("FF10394", "Invalid %s namespace configuration - multiple %s plugins provided") - MsgDuplicatePluginName = ffe("FF10395", "Invalid plugin configuration - plugin with name %s already exists", 409) - MsgInvalidFireFlyContractIndex = ffe("FF10396", "No configuration found for FireFly contract at %s") - MsgUnrecognizedNetworkAction = ffe("FF10397", "Unrecognized network action: %s", 400) - MsgOverrideExistingFieldCustomOption = ffe("FF10398", "Cannot override existing field with custom option named '%s'", 400) - MsgTerminateNotSupported = ffe("FF10399", "The 'terminate' operation to mark a switchover of smart contracts is not supported on namespace %s", 400) - MsgDefRejectedBadPayload = ffe("FF10400", "Rejected %s message '%s' - invalid payload") - MsgDefRejectedAuthorBlank = ffe("FF10401", "Rejected %s message '%s' - author is blank") - MsgDefRejectedSignatureMismatch = ffe("FF10402", "Rejected %s message '%s' - signature mismatch") - MsgDefRejectedValidateFail = ffe("FF10403", "Rejected %s '%s' - validate failed") - MsgDefRejectedIDMismatch = ffe("FF10404", "Rejected %s '%s' - ID mismatch with existing record") - MsgDefRejectedLocationMismatch = ffe("FF10405", "Rejected %s '%s' - location mismatch with existing record") - MsgDefRejectedSchemaFail = ffe("FF10406", "Rejected %s '%s' - schema check: %s") - MsgDefRejectedConflict = ffe("FF10407", "Rejected %s '%s' - conflicts with existing: %s", 409) - MsgDefRejectedIdentityNotFound = ffe("FF10408", "Rejected %s '%s' - identity not found: %s") - MsgDefRejectedWrongAuthor = ffe("FF10409", "Rejected %s '%s' - wrong author: %s") - MsgDefRejectedHashMismatch = ffe("FF10410", "Rejected %s '%s' - hash mismatch: %s != %s") - MsgInvalidNamespaceUUID = ffe("FF10411", "Expected 'namespace:' prefix on ID '%s'", 400) - MsgBadNetworkVersion = ffe("FF10412", "Bad network version: %s") - MsgDefinitionRejected = ffe("FF10413", "Definition rejected") - MsgActionNotSupported = ffe("FF10414", "This action is not supported in this namespace", 400) - MsgMessagesNotSupported = ffe("FF10415", "Messages are not supported in this namespace", 400) - MsgInvalidSubscriptionForNetwork = ffe("FF10416", "Subscription name '%s' is invalid according to multiparty network rules in effect (network version=%d)") - MsgBlockchainNotConfigured = ffe("FF10417", "No blockchain plugin configured") - MsgInvalidBatchPinEvent = ffe("FF10418", "BatchPin event is not valid - %s (%s): %s") - MsgDuplicatePluginBroadcastName = ffe("FF10419", "Invalid %s plugin broadcast name: %s - broadcast names must be unique", 409) - MsgInvalidConnectorName = ffe("FF10420", "Could not find name %s for %s connector") - MsgCannotInitLegacyNS = ffe("FF10421", "could not initialize legacy '%s' namespace - found conflicting V1 multi-party config in %s") - MsgInvalidGroupMember = ffe("FF10422", "invalid group member - node '%s' is not owned by '%s' or any of its ancestors") - MsgContractListenerStatusInvalid = ffe("FF10423", "Failed to validate contract listener status: %v", 400) - MsgCacheMissSizeLimitKeyInternal = ffe("FF10424", "could not initialize cache - size limit config key is not provided") - MsgCacheMissTTLKeyInternal = ffe("FF10425", "could not initialize cache - ttl config key is not provided") - MsgCacheConfigKeyMismatchInternal = ffe("FF10426", "could not initialize cache - '%s' and '%s' do not have identical prefix, mismatching prefixes are: '%s','%s'") - MsgCacheUnexpectedSizeKeyNameInternal = ffe("FF10427", "could not initialize cache - '%s' is not an expected size configuration key suffix. Expected values are: 'size', 'limit'") - MsgUnknownVerifierType = ffe("FF10428", "Unknown verifier type", 400) - MsgNotSupportedByBlockchainPlugin = ffe("FF10429", "Not supported by blockchain plugin", 400) - MsgIdempotencyKeyDuplicateMessage = ffe("FF10430", "Idempotency key '%s' already used for message '%s'", 409) - MsgIdempotencyKeyDuplicateTransaction = ffe("FF10431", "Idempotency key '%s' already used for transaction '%s'", 409) - MsgNonIdempotencyKeyConflictTxInsert = ffe("FF10432", "Conflict on insert of transaction '%s'. No existing transaction matching idempotency key '%s' found", 409) - MsgErrorNameMustBeSet = ffe("FF10433", "The name of the error must be set", 400) - MsgContractErrorsResolveError = ffe("FF10434", "Unable to resolve contract errors: %s", 400) - MsgUnknownInterfaceFormat = ffe("FF10435", "Unknown interface format: %s", 400) - MsgUnknownNamespace = ffe("FF10436", "Unknown namespace '%s'", 404) - MsgMissingNamespace = ffe("FF10437", "Missing namespace in request", 400) - MsgDeprecatedResetWithAutoReload = ffe("FF10438", "The deprecated reset API cannot be used when dynamic config reload is enabled", 409) - MsgConfigArrayVsRawConfigMismatch = ffe("FF10439", "Error processing configuration - mismatch between raw and processed array lengths") - MsgDefaultChannelNotConfigured = ffe("FF10440", "No default channel configured for this namespace", 400) - MsgNamespaceInitializing = ffe("FF10441", "Namespace '%s' is initializing", 412) - MsgPinsNotAssigned = ffe("FF10442", "Message cannot be sent because pins have not been assigned") - MsgMethodDoesNotSupportPinning = ffe("FF10443", "This method does not support passing a payload for pinning") - MsgOperationNotFoundInTransaction = ffe("FF10444", "No operation of type %s was found in transaction '%s'") - MsgCannotSetParameterWithMessage = ffe("FF10445", "Cannot provide a value for '%s' when pinning a message", 400) - MsgNamespaceNotStarted = ffe("FF10446", "Namespace '%s' is not started", 412) - MsgNameExists = ffe("FF10447", "Name already exists", 409) - MsgNetworkNameExists = ffe("FF10448", "Network name already exists", 409) - MsgCannotDeletePublished = ffe("FF10449", "Cannot delete an item that has been published", 409) - MsgAlreadyPublished = ffe("FF10450", "Item has already been published", 409) - MsgContractInterfaceNotPublished = ffe("FF10451", "Contract interface '%s' has not been published", 409) - MsgInvalidMessageSigner = ffe("FF10452", "Invalid message '%s'. Key '%s' does not match the signer of the pin: %s") - MsgInvalidMessageIdentity = ffe("FF10453", "Invalid message '%s'. Author '%s' does not match identity registered to %s: %s (%s)") - MsgDuplicateTLSConfig = ffe("FF10454", "Found duplicate TLS Config '%s'", 400) - MsgNotFoundTLSConfig = ffe("FF10455", "Provided TLS Config name '%s' not found for namespace '%s'", 400) - MsgSQLInsertManyOutsideTransaction = ffe("FF10456", "Attempt to perform insert many outside of a transaction", 500) - MsgUnexpectedInterfaceType = ffe("FF10457", "Unexpected interface type: %T", 500) - MsgBlockchainConnectorRESTErrConflict = ffe("FF10458", "Conflict from blockchain connector: %s", 409) - MsgTokensRESTErrConflict = ffe("FF10459", "Conflict from tokens service: %s", 409) - MsgBatchWithDataNotSupported = ffe("FF10460", "Provided subscription '%s' enables batching and withData which is not supported", 400) - MsgBatchDeliveryNotSupported = ffe("FF10461", "Batch delivery not supported by transport '%s'", 400) - MsgWSWrongNamespace = ffe("FF10462", "Websocket request received on a namespace scoped connection but the provided namespace does not match") + MsgConfigFailed = ffe("FF10101", "Failed to read config") + MsgJSONDecodeFailed = ffe("FF10103", "Failed to decode input JSON") + MsgTLSConfigFailed = ffe("FF10105", "Failed to initialize TLS configuration") + MsgWebsocketClientError = ffe("FF10108", "Error received from WebSocket client: %s") + Msg404NotFound = ffe("FF10109", "Not found", 404) + MsgUnknownBlockchainPlugin = ffe("FF10110", "Unknown blockchain plugin: %s") + MsgEthConnectorRESTErr = ffe("FF10111", "Error from ethereum connector: %s") + MsgDBInitFailed = ffe("FF10112", "Database initialization failed") + MsgDBQueryBuildFailed = ffe("FF10113", "Database query builder failed") + MsgDBBeginFailed = ffe("FF10114", "Database begin transaction failed") + MsgDBQueryFailed = ffe("FF10115", "Database query failed") + MsgDBInsertFailed = ffe("FF10116", "Database insert failed") + MsgDBUpdateFailed = ffe("FF10117", "Database update failed") + MsgDBDeleteFailed = ffe("FF10118", "Database delete failed") + MsgDBCommitFailed = ffe("FF10119", "Database commit failed") + MsgDBMissingJoin = ffe("FF10120", "Database missing expected join entry in table '%s' for id '%s'") + MsgDBReadErr = ffe("FF10121", "Database resultset read error from table '%s'") + MsgUnknownDatabasePlugin = ffe("FF10122", "Unknown database plugin '%s'") + MsgNullDataReferenceID = ffe("FF10123", "Data id is null in message data reference %d") + MsgDupDataReferenceID = ffe("FF10124", "Duplicate data ID in message '%s'", 409) + MsgScanFailed = ffe("FF10125", "Failed to restore type '%T' into '%T'") + MsgUnregisteredBatchType = ffe("FF10126", "Unregistered batch type '%s'") + MsgBatchDispatchTimeout = ffe("FF10127", "Timed out dispatching work to batch") + MsgInitializationNilDepError = ffe("FF10128", "Initialization failed in %s due to unmet dependency") + MsgNilResponseNon204 = ffe("FF10129", "No output from API call") + MsgDataNotFound = ffe("FF10133", "Data not found for message %s", 400) + MsgUnknownSharedStoragePlugin = ffe("FF10134", "Unknown Shared Storage plugin '%s'") + MsgIPFSHashDecodeFailed = ffe("FF10135", "Failed to decode IPFS hash into 32byte value '%s'") + MsgIPFSRESTErr = ffe("FF10136", "Error from IPFS: %s") + MsgSerializationFailed = ffe("FF10137", "Serialization failed") + MsgMissingPluginConfig = ffe("FF10138", "Missing configuration '%s' for %s") + MsgMissingDataHashIndex = ffe("FF10139", "Missing data hash for index '%d' in message", 400) + MsgInvalidEthAddress = ffe("FF10141", "Supplied ethereum address is invalid", 400) + MsgInvalidTezosAddress = ffe("FF10142", "Supplied tezos address is invalid", 400) + Msg404NoResult = ffe("FF10143", "No result found", 404) + MsgUnsupportedSQLOpInFilter = ffe("FF10150", "No SQL mapping implemented for filter operator '%s'", 400) + MsgFilterSortDesc = ffe("FF10154", "Sort field. For multi-field sort use comma separated values (or multiple query values) with '-' prefix for descending") + MsgContextCanceled = ffe("FF00154", "Context cancelled") + MsgDBMigrationFailed = ffe("FF10163", "Database migration failed") + MsgHashMismatch = ffe("FF10164", "Hash mismatch") + MsgDefaultNamespaceNotFound = ffe("FF10166", "namespaces.default '%s' must be included in the namespaces.predefined configuration") + MsgEventTypesParseFail = ffe("FF10168", "Unable to parse list of event types", 400) + MsgUnknownEventType = ffe("FF10169", "Unknown event type '%s'", 400) + MsgIDMismatch = ffe("FF10170", "ID mismatch") + MsgRegexpCompileFailed = ffe("FF10171", "Unable to compile '%s' regexp '%s'") + MsgUnknownEventTransportPlugin = ffe("FF10172", "Unknown event transport plugin: %s") + MsgWSConnectionNotActive = ffe("FF10173", "Websocket connection '%s' no longer active") + MsgWSSubAlreadyInFlight = ffe("FF10174", "Websocket subscription '%s' already has a message in flight") + MsgWSMsgSubNotMatched = ffe("FF10175", "Acknowledgment does not match an inflight event + subscription") + MsgWSClientSentInvalidData = ffe("FF10176", "Invalid data") + MsgWSClientUnknownAction = ffe("FF10177", "Unknown action '%s'") + MsgWSInvalidStartAction = ffe("FF10178", "A start action must set namespace and either a name or ephemeral=true") + MsgWSAutoAckChanged = ffe("FF10179", "The autoack option must be set consistently on all start requests") + MsgWSAutoAckEnabled = ffe("FF10180", "The autoack option is enabled on this connection") + MsgConnSubscriptionNotStarted = ffe("FF10181", "Subscription %v is not started on connection") + MsgDispatcherClosing = ffe("FF10182", "Event dispatcher closing") + MsgMaxFilterSkip = ffe("FF10183", "You have reached the maximum pagination limit for this query (%d)", 400) + MsgMaxFilterLimit = ffe("FF10184", "Your query exceeds the maximum filter limit (%d)", 400) + MsgAPIServerStaticFail = ffe("FF10185", "An error occurred loading static content", 500) + MsgEventListenerClosing = ffe("FF10186", "Event listener closing") + MsgNamespaceDoesNotExist = ffe("FF10187", "Namespace does not exist", 404) + MsgInvalidSubscription = ffe("FF10189", "Invalid subscription", 400) + MsgMismatchedTransport = ffe("FF10190", "Connection ID '%s' appears not to be unique between transport '%s' and '%s'", 400) + MsgInvalidFirstEvent = ffe("FF10191", "Invalid firstEvent definition - must be 'newest','oldest' or a sequence number", 400) + MsgNumberMustBeGreaterEqual = ffe("FF10192", "Number must be greater than or equal to %d", 400) + MsgAlreadyExists = ffe("FF10193", "A %s with name '%s:%s' already exists", 409) + MsgJSONValidatorBadRef = ffe("FF10194", "Cannot use JSON validator for data with type '%s' and validator reference '%v'", 400) + MsgDatatypeNotFound = ffe("FF10195", "Datatype '%v' not found", 400) + MsgSchemaLoadFailed = ffe("FF10196", "Datatype '%s' schema invalid", 400) + MsgDataCannotBeValidated = ffe("FF10197", "Data cannot be validated", 400) + MsgJSONDataInvalidPerSchema = ffe("FF10198", "Data does not conform to the JSON schema of datatype '%s': %s", 400) + MsgDataValueIsNull = ffe("FF10199", "Data value is null", 400) + MsgDataInvalidHash = ffe("FF10201", "Invalid data: hashes do not match Hash=%s Expected=%s", 400) + MsgDataReferenceUnresolvable = ffe("FF10204", "Data reference %d cannot be resolved", 400) + MsgDataMissing = ffe("FF10205", "Data entry %d has neither 'id' to refer to existing data, or 'value' to include in-line JSON data", 400) + MsgAuthorInvalid = ffe("FF10206", "Invalid author specified", 400) + MsgMessageNotFound = ffe("FF10207", "Message '%s' not found", 404) + MsgBatchNotFound = ffe("FF10209", "Batch '%s' not found for message", 404) + MsgMessageTXNotSet = ffe("FF10210", "Message '%s' does not have an assigned transaction", 404) + MsgOwnerMissing = ffe("FF10211", "Owner missing", 400) + MsgUnknownIdentityPlugin = ffe("FF10212", "Unknown Identity plugin '%s'") + MsgUnknownDataExchangePlugin = ffe("FF10213", "Unknown Data Exchange plugin '%s'") + MsgParentIdentityNotFound = ffe("FF10214", "Identity '%s' not found in identity chain for %s '%s'") + MsgInvalidSigningIdentity = ffe("FF10215", "Invalid signing identity") + MsgNodeAndOrgIDMustBeSet = ffe("FF10216", "node.name, org.name and org.key must be configured first", 409) + MsgBlobStreamingFailed = ffe("FF10217", "Blob streaming terminated with error", 500) + MsgNodeNotFound = ffe("FF10224", "Node with name or identity '%s' not found", 400) + MsgLocalNodeNotSet = ffe("FF10225", "Unable to resolve the local node. Please ensure node.name is configured", 500) + MsgGroupNotFound = ffe("FF10226", "Group '%s' not found", 404) + MsgDXRESTErr = ffe("FF10229", "Error from data exchange: %s") + MsgInvalidHex = ffe("FF10231", "Invalid hex supplied", 400) + MsgInvalidWrongLenB32 = ffe("FF00107", "Byte length must be 32 (64 hex characters)", 400) + MsgNodeNotFoundInOrg = ffe("FF10233", "Unable to find any nodes owned by org '%s', or parent orgs", 400) + MsgDXBadResponse = ffe("FF10237", "Unexpected '%s' in data exchange response: %s") + MsgDXBadHash = ffe("FF10238", "Unexpected hash returned from data exchange upload. Hash=%s Expected=%s") + MsgBlobNotFound = ffe("FF10239", "No blob has been uploaded or confirmed received, with hash=%s", 404) + MsgDownloadBlobFailed = ffe("FF10240", "Error download blob with reference '%s' from local data exchange") + MsgDataDoesNotHaveBlob = ffe("FF10241", "Data does not have a blob attachment", 404) + MsgWebhookURLEmpty = ffe("FF10242", "Webhook subscription option 'url' cannot be empty", 400) + MsgWebhookInvalidStringMap = ffe("FF10243", "Webhook subscription option '%s' must be map of string values. %s=%T", 400) + MsgWebsocketsNoData = ffe("FF10244", "Websockets subscriptions do not support streaming the full data payload, just the references (withData must be false)", 400) + MsgWebhooksWithData = ffe("FF10245", "Webhook subscriptions require the full data payload (withData must be true)", 400) + MsgWebhooksReplyBadJSON = ffe("FF10257", "Failed to process reply from webhook as JSON") + MsgRequestTimeout = ffe("FF10260", "The request with id '%s' timed out after %.2fms", 408) + MsgRequestReplyTagRequired = ffe("FF10261", "For request messages 'header.tag' must be set on the request message to route it to a suitable responder", 400) + MsgRequestCannotHaveCID = ffe("FF10262", "For request messages 'header.cid' must be unset", 400) + MsgSystemTransportInternal = ffe("FF10266", "You cannot create subscriptions on the system events transport") + MsgFilterCountNotSupported = ffe("FF10267", "This query does not support generating a count of all results") + MsgRejected = ffe("FF10269", "Message with ID '%s' was rejected. Please check the FireFly logs for more information") + MsgRequestMustBePrivate = ffe("FF10271", "For request messages you must specify a group of private recipients", 400) + MsgUnknownTokensPlugin = ffe("FF10272", "Unknown tokens plugin '%s'", 400) + MsgMissingTokensPluginConfig = ffe("FF10273", "Invalid tokens configuration - name and plugin are required", 400) + MsgTokensRESTErr = ffe("FF10274", "Error from tokens service: %s") + MsgTokenPoolDuplicate = ffe("FF10275", "Duplicate token pool: %s", 409) + MsgTokenPoolRejected = ffe("FF10276", "Token pool with ID '%s' was rejected. Please check the FireFly logs for more information") + MsgIdentityNotFoundByString = ffe("FF10277", "Identity could not be resolved via lookup string '%s'") + MsgAuthorOrgSigningKeyMismatch = ffe("FF10279", "Author organization '%s' is not associated with signing key '%s'") + MsgCannotTransferToSelf = ffe("FF10280", "From and to addresses must be different", 400) + MsgLocalOrgNotSet = ffe("FF10281", "Unable to resolve the local root org. Please ensure org.name is configured", 500) + MsgTezosconnectRESTErr = ffe("FF10283", "Error from tezos connector: %s") + MsgFabconnectRESTErr = ffe("FF10284", "Error from fabconnect: %s") + MsgInvalidIdentity = ffe("FF10285", "Supplied Fabric signer identity is invalid", 400) + MsgFailedToDecodeCertificate = ffe("FF10286", "Failed to decode certificate: %s", 500) + MsgInvalidMessageType = ffe("FF10287", "Invalid message type - allowed types are %s", 400) + MsgWSClosed = ffe("FF10290", "Websocket closed") + MsgFieldNotSpecified = ffe("FF10292", "Field '%s' must be specified", 400) + MsgTokenPoolNotActive = ffe("FF10293", "Token pool is not yet activated") + MsgHistogramCollectionParam = ffe("FF10297", "Collection to fetch") + MsgInvalidNumberOfIntervals = ffe("FF10298", "Number of time intervals must be between %d and %d", 400) + MsgInvalidChartNumberParam = ffe("FF10299", "Invalid %s. Must be a number.", 400) + MsgHistogramInvalidTimes = ffe("FF10300", "Start time must be before end time", 400) + MsgUnsupportedCollection = ffe("FF10301", "%s collection is not supported", 400) + MsgContractInterfaceExists = ffe("FF10302", "A contract interface already exists in the namespace: '%s' with name: '%s' and version: '%s'", 409) + MsgContractInterfaceNotFound = ffe("FF10303", "Contract interface %s not found", 404) + MsgContractMissingInputArgument = ffe("FF10304", "Missing required input argument '%s'", 400) + MsgContractWrongInputType = ffe("FF10305", "Input '%v' is of type '%v' not expected type of '%v'", 400) + MsgContractMissingInputField = ffe("FF10306", "Expected object of type '%v' to contain field named '%v' but it was missing", 400) + MsgContractMapInputType = ffe("FF10307", "Unable to map input type '%v' to known FireFly type - was expecting '%v'", 400) + MsgContractByteDecode = ffe("FF10308", "Unable to decode field '%v' as bytes", 400) + MsgContractInternalType = ffe("FF10309", "Input '%v' of type '%v' is not compatible blockchain internalType of '%v'", 400) + MsgContractLocationInvalid = ffe("FF10310", "Failed to validate contract location: %v", 400) + MsgContractParamInvalid = ffe("FF10311", "Failed to validate contract param: %v", 400) + MsgContractListenerNameExists = ffe("FF10312", "A contract listener already exists in the namespace: '%s' with name: '%s'", 409) + MsgContractMethodNotSet = ffe("FF10313", "Either an interface reference and method path, or in-line method definition, must be supplied on invoke contract request", 400) + MsgContractMethodResolveError = ffe("FF10315", "Unable to resolve contract method: %s", 400) + MsgContractLocationExists = ffe("FF10316", "The contract location cannot be changed after it is created", 400) + MsgListenerNoEvent = ffe("FF10317", "Either an interface reference and event path, or in-line event definition must be supplied when creating a contract listener", 400) + MsgListenerEventNotFound = ffe("FF10318", "No event was found in namespace '%s' with id '%s'", 400) + MsgEventNameMustBeSet = ffe("FF10319", "Event name must be set", 400) + MsgMethodNameMustBeSet = ffe("FF10320", "Method name must be set", 400) + MsgContractEventResolveError = ffe("FF10321", "Unable to resolve contract event", 400) + MsgQueryOpUnsupportedMod = ffe("FF10322", "Operation '%s' on '%s' does not support modifiers", 400) + MsgDXBadSize = ffe("FF10323", "Unexpected size returned from data exchange upload. Size=%d Expected=%d") + MsgTooLargeBroadcast = ffe("FF10327", "Message size %.2fkb is too large for the max broadcast batch size of %.2fkb", 400) + MsgTooLargePrivate = ffe("FF10328", "Message size %.2fkb is too large for the max private message size of %.2fkb", 400) + MsgManifestMismatch = ffe("FF10329", "Manifest mismatch overriding '%s' status as failure: '%s'", 400) + MsgFFIValidationFail = ffe("FF10331", "Field '%s' does not validate against the provided schema", 400) + MsgFFISchemaParseFail = ffe("FF10332", "Failed to parse schema for param '%s'", 400) + MsgFFISchemaCompileFail = ffe("FF10333", "Failed compile schema for param '%s'", 400) + MsgPluginInitializationFailed = ffe("FF10334", "Plugin initialization error", 500) + MsgUnknownTransactionType = ffe("FF10336", "Unknown transaction type '%s'", 400) + MsgGoTemplateCompileFailed = ffe("FF10337", "Go template compilation for '%s' failed: %s", 500) + MsgGoTemplateExecuteFailed = ffe("FF10338", "Go template execution for '%s' failed: %s", 500) + MsgAddressResolveFailed = ffe("FF10339", "Failed to resolve signing key string '%s': %s", 500) + MsgAddressResolveBadStatus = ffe("FF10340", "Failed to resolve signing key string '%s' [%d]: %s", 500) + MsgAddressResolveBadResData = ffe("FF10341", "Failed to resolve signing key string '%s' - invalid address returned '%s': %s", 500) + MsgDXNotInitialized = ffe("FF10342", "Data exchange is initializing") + MsgDBLockFailed = ffe("FF10345", "Database lock failed") + MsgFFIGenerationFailed = ffe("FF10346", "Error generating smart contract interface: %s", 400) + MsgFFIGenerationUnsupported = ffe("FF10347", "Smart contract interface generation is not supported by this blockchain plugin", 400) + MsgBlobHashMismatch = ffe("FF10348", "Blob hash mismatch sent=%s received=%s", 400) + MsgDIDResolverUnknown = ffe("FF10349", "DID resolver unknown for DID: %s", 400) + MsgIdentityNotOrg = ffe("FF10350", "Identity '%s' with DID '%s' is not an organization", 400) + MsgIdentityNotNode = ffe("FF10351", "Identity '%s' with DID '%s' is not a node", 400) + MsgBlockchainKeyNotSet = ffe("FF10352", "No blockchain key specified", 400) + MsgNoVerifierForIdentity = ffe("FF10353", "No %s verifier registered for identity %s", 400) + MsgNodeMissingBlockchainKey = ffe("FF10354", "No default signing key or organization signing key configured for this namespace", 400) + MsgAuthorRegistrationMismatch = ffe("FF10355", "Verifier '%s' cannot be used for signing with author '%s'. Verifier registered to '%s'", 400) + MsgAuthorMissingForKey = ffe("FF10356", "Key '%s' has not been registered by any identity, and a separate 'author' was not supplied", 404) + MsgAuthorIncorrectForRootReg = ffe("FF10357", "Author namespace '%s' and DID '%s' combination invalid for root organization registration", 400) + MsgKeyIdentityMissing = ffe("FF10358", "Identity owner of key '%s' not found", 500) + MsgIdentityChainLoop = ffe("FF10364", "Loop detected on identity %s in chain for %s (%s)", 400) + MsgInvalidIdentityParentType = ffe("FF10365", "Parent %s (%s) of type %s is invalid for child %s (%s) of type", 400) + MsgParentIdentityMissingClaim = ffe("FF10366", "Parent %s (%s) is invalid (missing claim)", 400) + MsgDXInfoMissingID = ffe("FF10367", "Data exchange endpoint info missing 'id' field", 500) + MsgEventNotFound = ffe("FF10370", "Event with name '%s' not found", 400) + MsgOperationNotSupported = ffe("FF10371", "Operation not supported: %s", 400) + MsgFailedToRetrieve = ffe("FF10372", "Failed to retrieve %s %s", 500) + MsgBlobMissingPublic = ffe("FF10373", "Blob for data %s missing public payload reference while flushing batch", 500) + MsgDBMultiRowConfigError = ffe("FF10374", "Database invalid configuration - using multi-row insert on DB plugin that does not support query syntax for input") + MsgDBNoSequence = ffe("FF10375", "Failed to retrieve sequence for insert row %d (could mean duplicate insert)", 500) + MsgDownloadSharedFailed = ffe("FF10376", "Error downloading data with reference '%s' from shared storage") + MsgDownloadBatchMaxBytes = ffe("FF10377", "Error downloading batch with reference '%s' from shared storage - maximum size limit reached") + MsgOperationDataIncorrect = ffe("FF10378", "Operation data type incorrect: %T", 400) + MsgDataMissingBlobHash = ffe("FF10379", "Blob for data %s cannot be transferred as it is missing a hash", 500) + MsgUnexpectedDXMessageType = ffe("FF10380", "Unexpected websocket event type from DX plugin: %s", 500) + MsgContractListenerExists = ffe("FF10383", "A contract listener already exists for this combination of topic + location + event", 409) + MsgInvalidOutputOption = ffe("FF10385", "invalid output option '%s'") + MsgInvalidPluginConfiguration = ffe("FF10386", "Invalid %s plugin configuration - name and type are required") + MsgReferenceMarkdownMissing = ffe("FF10387", "Reference markdown file missing: '%s'") + MsgFFSystemReservedName = ffe("FF10388", "Invalid namespace configuration - %s is a reserved name") + MsgInvalidNamespaceMode = ffe("FF10389", "Invalid %s namespace configuration - unknown mode") + MsgNamespaceUnknownPlugin = ffe("FF10390", "Invalid %s namespace configuration - unknown plugin %s") + MsgNamespaceWrongPluginsMultiparty = ffe("FF10391", "Invalid %s namespace configuration - multiparty mode requires database, blockchain, shared storage, and data exchange plugins") + MsgNamespaceNoDatabase = ffe("FF10392", "Invalid %s namespace configuration - a database plugin is required") + MsgNamespaceMultiplePluginType = ffe("FF10394", "Invalid %s namespace configuration - multiple %s plugins provided") + MsgDuplicatePluginName = ffe("FF10395", "Invalid plugin configuration - plugin with name %s already exists", 409) + MsgInvalidFireFlyContractIndex = ffe("FF10396", "No configuration found for FireFly contract at %s") + MsgUnrecognizedNetworkAction = ffe("FF10397", "Unrecognized network action: %s", 400) + MsgOverrideExistingFieldCustomOption = ffe("FF10398", "Cannot override existing field with custom option named '%s'", 400) + MsgTerminateNotSupported = ffe("FF10399", "The 'terminate' operation to mark a switchover of smart contracts is not supported on namespace %s", 400) + MsgDefRejectedBadPayload = ffe("FF10400", "Rejected %s message '%s' - invalid payload") + MsgDefRejectedAuthorBlank = ffe("FF10401", "Rejected %s message '%s' - author is blank") + MsgDefRejectedSignatureMismatch = ffe("FF10402", "Rejected %s message '%s' - signature mismatch") + MsgDefRejectedValidateFail = ffe("FF10403", "Rejected %s '%s' - validate failed") + MsgDefRejectedIDMismatch = ffe("FF10404", "Rejected %s '%s' - ID mismatch with existing record") + MsgDefRejectedLocationMismatch = ffe("FF10405", "Rejected %s '%s' - location mismatch with existing record") + MsgDefRejectedSchemaFail = ffe("FF10406", "Rejected %s '%s' - schema check: %s") + MsgDefRejectedConflict = ffe("FF10407", "Rejected %s '%s' - conflicts with existing: %s", 409) + MsgDefRejectedIdentityNotFound = ffe("FF10408", "Rejected %s '%s' - identity not found: %s") + MsgDefRejectedWrongAuthor = ffe("FF10409", "Rejected %s '%s' - wrong author: %s") + MsgDefRejectedHashMismatch = ffe("FF10410", "Rejected %s '%s' - hash mismatch: %s != %s") + MsgInvalidNamespaceUUID = ffe("FF10411", "Expected 'namespace:' prefix on ID '%s'", 400) + MsgBadNetworkVersion = ffe("FF10412", "Bad network version: %s") + MsgDefinitionRejected = ffe("FF10413", "Definition rejected") + MsgActionNotSupported = ffe("FF10414", "This action is not supported in this namespace", 400) + MsgMessagesNotSupported = ffe("FF10415", "Messages are not supported in this namespace", 400) + MsgInvalidSubscriptionForNetwork = ffe("FF10416", "Subscription name '%s' is invalid according to multiparty network rules in effect (network version=%d)") + MsgBlockchainNotConfigured = ffe("FF10417", "No blockchain plugin configured") + MsgInvalidBatchPinEvent = ffe("FF10418", "BatchPin event is not valid - %s (%s): %s") + MsgDuplicatePluginBroadcastName = ffe("FF10419", "Invalid %s plugin broadcast name: %s - broadcast names must be unique", 409) + MsgInvalidConnectorName = ffe("FF10420", "Could not find name %s for %s connector") + MsgCannotInitLegacyNS = ffe("FF10421", "could not initialize legacy '%s' namespace - found conflicting V1 multi-party config in %s") + MsgInvalidGroupMember = ffe("FF10422", "invalid group member - node '%s' is not owned by '%s' or any of its ancestors") + MsgContractListenerStatusInvalid = ffe("FF10423", "Failed to validate contract listener status: %v", 400) + MsgCacheMissSizeLimitKeyInternal = ffe("FF10424", "could not initialize cache - size limit config key is not provided") + MsgCacheMissTTLKeyInternal = ffe("FF10425", "could not initialize cache - ttl config key is not provided") + MsgCacheConfigKeyMismatchInternal = ffe("FF10426", "could not initialize cache - '%s' and '%s' do not have identical prefix, mismatching prefixes are: '%s','%s'") + MsgCacheUnexpectedSizeKeyNameInternal = ffe("FF10427", "could not initialize cache - '%s' is not an expected size configuration key suffix. Expected values are: 'size', 'limit'") + MsgUnknownVerifierType = ffe("FF10428", "Unknown verifier type", 400) + MsgNotSupportedByBlockchainPlugin = ffe("FF10429", "Not supported by blockchain plugin", 400) + MsgIdempotencyKeyDuplicateMessage = ffe("FF10430", "Idempotency key '%s' already used for message '%s'", 409) + MsgIdempotencyKeyDuplicateTransaction = ffe("FF10431", "Idempotency key '%s' already used for transaction '%s'", 409) + MsgNonIdempotencyKeyConflictTxInsert = ffe("FF10432", "Conflict on insert of transaction '%s'. No existing transaction matching idempotency key '%s' found", 409) + MsgErrorNameMustBeSet = ffe("FF10433", "The name of the error must be set", 400) + MsgContractErrorsResolveError = ffe("FF10434", "Unable to resolve contract errors: %s", 400) + MsgUnknownInterfaceFormat = ffe("FF10435", "Unknown interface format: %s", 400) + MsgUnknownNamespace = ffe("FF10436", "Unknown namespace '%s'", 404) + MsgMissingNamespace = ffe("FF10437", "Missing namespace in request", 400) + MsgDeprecatedResetWithAutoReload = ffe("FF10438", "The deprecated reset API cannot be used when dynamic config reload is enabled", 409) + MsgConfigArrayVsRawConfigMismatch = ffe("FF10439", "Error processing configuration - mismatch between raw and processed array lengths") + MsgDefaultChannelNotConfigured = ffe("FF10440", "No default channel configured for this namespace", 400) + MsgNamespaceInitializing = ffe("FF10441", "Namespace '%s' is initializing", 412) + MsgPinsNotAssigned = ffe("FF10442", "Message cannot be sent because pins have not been assigned") + MsgMethodDoesNotSupportPinning = ffe("FF10443", "This method does not support passing a payload for pinning") + MsgOperationNotFoundInTransaction = ffe("FF10444", "No operation of type %s was found in transaction '%s'") + MsgCannotSetParameterWithMessage = ffe("FF10445", "Cannot provide a value for '%s' when pinning a message", 400) + MsgNamespaceNotStarted = ffe("FF10446", "Namespace '%s' is not started", 412) + MsgNameExists = ffe("FF10447", "Name already exists", 409) + MsgNetworkNameExists = ffe("FF10448", "Network name already exists", 409) + MsgCannotDeletePublished = ffe("FF10449", "Cannot delete an item that has been published", 409) + MsgAlreadyPublished = ffe("FF10450", "Item has already been published", 409) + MsgContractInterfaceNotPublished = ffe("FF10451", "Contract interface '%s' has not been published", 409) + MsgInvalidMessageSigner = ffe("FF10452", "Invalid message '%s'. Key '%s' does not match the signer of the pin: %s") + MsgInvalidMessageIdentity = ffe("FF10453", "Invalid message '%s'. Author '%s' does not match identity registered to %s: %s (%s)") + MsgDuplicateTLSConfig = ffe("FF10454", "Found duplicate TLS Config '%s'", 400) + MsgNotFoundTLSConfig = ffe("FF10455", "Provided TLS Config name '%s' not found for namespace '%s'", 400) + MsgSQLInsertManyOutsideTransaction = ffe("FF10456", "Attempt to perform insert many outside of a transaction", 500) + MsgUnexpectedInterfaceType = ffe("FF10457", "Unexpected interface type: %T", 500) + MsgBlockchainConnectorRESTErrConflict = ffe("FF10458", "Conflict from blockchain connector: %s", 409) + MsgTokensRESTErrConflict = ffe("FF10459", "Conflict from tokens service: %s", 409) + MsgBatchWithDataNotSupported = ffe("FF10460", "Provided subscription '%s' enables batching and withData which is not supported", 400) + MsgBatchDeliveryNotSupported = ffe("FF10461", "Batch delivery not supported by transport '%s'", 400) + MsgWSWrongNamespace = ffe("FF10462", "Websocket request received on a namespace scoped connection but the provided namespace does not match") + MsgMaxSubscriptionEventScanLimitBreached = ffe("FF10463", "Event scan limit breached with start sequence ID %d and end sequence ID %d. Please restrict your query to a narrower range", 400) + MsgSequenceIDDidNotParseToInt = ffe("FF10464", "Could not parse provided %s to an integer sequence ID", 400) ) diff --git a/internal/database/sqlcommon/event_sql.go b/internal/database/sqlcommon/event_sql.go index 1586bdeca..f0c63f4fd 100644 --- a/internal/database/sqlcommon/event_sql.go +++ b/internal/database/sqlcommon/event_sql.go @@ -196,12 +196,9 @@ func (s *SQLCommon) GetEventByID(ctx context.Context, namespace string, id *ffty return event, nil } -func (s *SQLCommon) GetEvents(ctx context.Context, namespace string, filter ffapi.Filter) (message []*core.Event, res *ffapi.FilterResult, err error) { - - cols := append([]string{}, eventColumns...) - cols = append(cols, s.SequenceColumn()) +func (s *SQLCommon) getEventsGeneric(ctx context.Context, namespace string, sql sq.SelectBuilder, filter ffapi.Filter) (message []*core.Event, res *ffapi.FilterResult, err error) { query, fop, fi, err := s.FilterSelect( - ctx, "", sq.Select(cols...).From(eventsTable), + ctx, "", sql, filter, eventFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err @@ -223,5 +220,29 @@ func (s *SQLCommon) GetEvents(ctx context.Context, namespace string, filter ffap } return events, s.QueryRes(ctx, eventsTable, tx, fop, nil, fi), err +} + +func (s *SQLCommon) GetEvents(ctx context.Context, namespace string, filter ffapi.Filter) (message []*core.Event, res *ffapi.FilterResult, err error) { + + cols := append([]string{}, eventColumns...) + cols = append(cols, s.SequenceColumn()) + + query := sq.Select(cols...).From(eventsTable) + + return s.getEventsGeneric(ctx, namespace, query, filter) +} + +func (s *SQLCommon) GetEventsInSequenceRange(ctx context.Context, namespace string, filter ffapi.Filter, startSequence int, endSequence int) (message []*core.Event, res *ffapi.FilterResult, err error) { + cols := append([]string{}, eventColumns...) + cols = append(cols, s.SequenceColumn()) + + filter.Limit(0) + + query := sq.Select(cols...).From(eventsTable).Where(sq.GtOrEq{ + "seq": startSequence, + }).Where(sq.Lt{ + "seq": endSequence, + }) + return s.getEventsGeneric(ctx, namespace, query, filter) } diff --git a/internal/database/sqlcommon/event_sql_test.go b/internal/database/sqlcommon/event_sql_test.go index 134460deb..17a95f0ad 100644 --- a/internal/database/sqlcommon/event_sql_test.go +++ b/internal/database/sqlcommon/event_sql_test.go @@ -89,6 +89,56 @@ func TestEventE2EWithDB(t *testing.T) { s.callbacks.AssertExpectations(t) } +func TestGetEventsInSequenceRangeE2EWithDB(t *testing.T) { + + s, cleanup := newSQLiteTestProvider(t) + defer cleanup() + ctx := context.Background() + + s.callbacks.On("OrderedUUIDCollectionNSEvent", database.CollectionEvents, core.ChangeEventTypeCreated, "ns1", mock.Anything, mock.Anything).Return() + + numberOfEvents := 1000 + var eventID *fftypes.UUID + for i := 0; i < numberOfEvents; i++ { + eventID = fftypes.NewUUID() + event := &core.Event{ + ID: eventID, + Namespace: "ns1", + Type: core.EventTypeMessageConfirmed, + Reference: fftypes.NewUUID(), + Correlator: fftypes.NewUUID(), + Topic: fmt.Sprintf("topic%d", i % 2), + Created: fftypes.Now(), + } + err := s.InsertEvent(ctx, event) + assert.NoError(t, err) + } + + fb := database.EventQueryFactory.NewFilter(ctx) + fb.Limit(1000) + + // Check we can get back some events given a start and end + events, _, err := s.GetEventsInSequenceRange(ctx, "ns1", fb.And(), 1, 1001) + assert.Nil(t, err) + assert.Equal(t, 1000, len(events)) + + // Do some basic filtering to show that works + fb2 := database.EventQueryFactory.NewFilter(ctx) + fb2.Limit(1000) + + events, _, err = s.GetEventsInSequenceRange(ctx, "ns1", fb2.And(fb2.Eq("topic", "topic1")), 1, 1001) + assert.Nil(t, err) + assert.Equal(t, 500, len(events)) + + // And get a single record from a range (EventID right now contains the last event ID created) + fb3 := database.EventQueryFactory.NewFilter(ctx) + fb3.Limit(1000) + + events, _, err = s.GetEventsInSequenceRange(ctx, "ns1", fb3.And(fb3.Eq("id", eventID.String())), 1, 1001) + assert.Nil(t, err) + assert.Equal(t, 1, len(events)) +} + func TestInsertEventFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) @@ -253,3 +303,29 @@ func TestGettEventsReadMessageFail(t *testing.T) { assert.Regexp(t, "FF10121", err) assert.NoError(t, mock.ExpectationsWereMet()) } + +func TestGetEventsInSequenceRangeQueryFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) + f := database.EventQueryFactory.NewFilter(context.Background()).Eq("id", "") + _, _, err := s.GetEventsInSequenceRange(context.Background(), "ns1", f, 0, 100) + assert.Regexp(t, "FF00176", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestGetEventsInSequenceRangeBuildQueryFail(t *testing.T) { + s, _ := newMockProvider().init() + f := database.EventQueryFactory.NewFilter(context.Background()).Eq("id", map[bool]bool{true: false}) + _, _, err := s.GetEventsInSequenceRange(context.Background(), "ns1", f, 0, 100) + assert.Regexp(t, "FF00143.*id", err) +} + +func TestGetEventsInSequenceRangeShouldCallGetEventsWhenNoSequencedProvidedAndThrowAnError(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"id", }).AddRow("only one")) + f := database.EventQueryFactory.NewFilter(context.Background()).And() + _, _, err := s.GetEventsInSequenceRange(context.Background(), "ns1", f, -1, -1) + assert.NotNil(t, err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + diff --git a/internal/events/event_dispatcher.go b/internal/events/event_dispatcher.go index 8afd99e39..f97a0174a 100644 --- a/internal/events/event_dispatcher.go +++ b/internal/events/event_dispatcher.go @@ -209,77 +209,9 @@ func (ed *eventDispatcher) enrichEvents(events []core.LocallySequenced) ([]*core func (ed *eventDispatcher) filterEvents(candidates []*core.EventDelivery) []*core.EventDelivery { matchingEvents := make([]*core.EventDelivery, 0, len(candidates)) for _, event := range candidates { - filter := ed.subscription - if filter.eventMatcher != nil && !filter.eventMatcher.MatchString(string(event.Type)) { - continue + if ed.subscription.MatchesEvent(&event.EnrichedEvent) { + matchingEvents = append(matchingEvents, event) } - - msg := event.Message - tx := event.Transaction - be := event.BlockchainEvent - tag := "" - topic := event.Topic - group := "" - author := "" - txType := "" - beName := "" - beListener := "" - - if msg != nil { - tag = msg.Header.Tag - author = msg.Header.Author - if msg.Header.Group != nil { - group = msg.Header.Group.String() - } - } - - if tx != nil { - txType = tx.Type.String() - } - - if be != nil { - beName = be.Name - beListener = be.Listener.String() - } - - if filter.topicFilter != nil { - topicsMatch := false - if filter.topicFilter.MatchString(topic) { - topicsMatch = true - } - if !topicsMatch { - continue - } - } - - if filter.messageFilter != nil { - if filter.messageFilter.tagFilter != nil && !filter.messageFilter.tagFilter.MatchString(tag) { - continue - } - if filter.messageFilter.authorFilter != nil && !filter.messageFilter.authorFilter.MatchString(author) { - continue - } - if filter.messageFilter.groupFilter != nil && !filter.messageFilter.groupFilter.MatchString(group) { - continue - } - } - - if filter.transactionFilter != nil { - if filter.transactionFilter.typeFilter != nil && !filter.transactionFilter.typeFilter.MatchString(txType) { - continue - } - } - - if filter.blockchainFilter != nil { - if filter.blockchainFilter.nameFilter != nil && !filter.blockchainFilter.nameFilter.MatchString(beName) { - continue - } - if filter.blockchainFilter.listenerFilter != nil && !filter.blockchainFilter.listenerFilter.MatchString(beListener) { - continue - } - } - - matchingEvents = append(matchingEvents, event) } return matchingEvents } diff --git a/internal/events/event_enrich.go b/internal/events/event_enrich.go index 2036b202d..9b986ae00 100644 --- a/internal/events/event_enrich.go +++ b/internal/events/event_enrich.go @@ -1,4 +1,4 @@ -// Copyright © 2023 Kaleido, Inc. +// Copyright © 2024 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -44,6 +44,18 @@ func newEventEnricher(ns string, di database.Plugin, dm data.Manager, om operati } } +func (em *eventEnricher) enrichEvents(ctx context.Context, events []*core.Event) ([]*core.EnrichedEvent, error) { + enriched := make([]*core.EnrichedEvent, len(events)) + for i, event := range events { + enrichedEvent, err := em.enrichEvent(ctx, event) + if err != nil { + return nil, err + } + enriched[i] = enrichedEvent + } + return enriched, nil +} + func (em *eventEnricher) enrichEvent(ctx context.Context, event *core.Event) (*core.EnrichedEvent, error) { e := &core.EnrichedEvent{ Event: *event, diff --git a/internal/events/event_enrich_test.go b/internal/events/event_enrich_test.go index 4c708cf7a..d98e7773f 100644 --- a/internal/events/event_enrich_test.go +++ b/internal/events/event_enrich_test.go @@ -70,6 +70,33 @@ func TestEnrichMessageConfirmed(t *testing.T) { assert.Equal(t, ref1, enriched.Message.Header.ID) } +func TestEnrichEventsMessageConfirmed(t *testing.T) { + em := newTestEventManager(t) + defer em.cleanup(t) + ctx := context.Background() + + // Setup the IDs + ref1 := fftypes.NewUUID() + ev1 := fftypes.NewUUID() + + // Setup enrichment + em.mdm.On("GetMessageWithDataCached", mock.Anything, ref1).Return(&core.Message{ + Header: core.MessageHeader{ID: ref1}, + }, nil, true, nil) + + event := []*core.Event{ + { + ID: ev1, + Type: core.EventTypeMessageConfirmed, + Reference: ref1, + }, + } + + enriched, err := em.EnrichEvents(ctx, event) + assert.NoError(t, err) + assert.Equal(t, ref1, enriched[0].Message.Header.ID) +} + func TestEnrichMessageFail(t *testing.T) { em := newTestEventEnricher() ctx := context.Background() @@ -613,3 +640,63 @@ func TestEnrichOperationFail(t *testing.T) { _, err := em.enrichEvent(ctx, event) assert.EqualError(t, err, "pop") } + +func TestEnrichEventsFails(t *testing.T) { + em := newTestEventEnricher() + ctx := context.Background() + + ev1 := fftypes.NewUUID() + ev2 := fftypes.NewUUID() + ref1 := fftypes.NewUUID() + + // Setup enrichment + mom := em.operations.(*operationmocks.Manager) + mom.On("GetOperationByIDCached", mock.Anything, mock.Anything).Return(&core.Operation{ + ID: ref1, + }, nil).Once() + mom.On("GetOperationByIDCached", mock.Anything, mock.Anything).Return(nil, fmt.Errorf("pop")) + + events := []*core.Event{ + { + ID: ev1, + Type: core.EventTypeApprovalOpFailed, + }, + { + ID: ev2, + Type: core.EventTypeApprovalOpFailed, + }, + } + + _, err := em.enrichEvents(ctx, events) + assert.EqualError(t, err, "pop") +} + +func TestEnrichEventsOK(t *testing.T) { + em := newTestEventEnricher() + ctx := context.Background() + + ev1 := fftypes.NewUUID() + ev2 := fftypes.NewUUID() + ref1 := fftypes.NewUUID() + + // Setup enrichment + mom := em.operations.(*operationmocks.Manager) + mom.On("GetOperationByIDCached", mock.Anything, mock.Anything).Return(&core.Operation{ + ID: ref1, + }, nil) + + events := []*core.Event{ + { + ID: ev1, + Type: core.EventTypeApprovalOpFailed, + }, + { + ID: ev2, + Type: core.EventTypeApprovalOpFailed, + }, + } + + result, err := em.enrichEvents(ctx, events) + assert.Nil(t, err) + assert.Equal(t, 2, len(result)) +} diff --git a/internal/events/event_manager.go b/internal/events/event_manager.go index 0bf7a02e3..9a57bda10 100644 --- a/internal/events/event_manager.go +++ b/internal/events/event_manager.go @@ -1,4 +1,4 @@ -// Copyright © 2023 Kaleido, Inc. +// Copyright © 2024 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -61,6 +61,8 @@ type EventManager interface { DeleteDurableSubscription(ctx context.Context, subDef *core.Subscription) (err error) CreateUpdateDurableSubscription(ctx context.Context, subDef *core.Subscription, mustNew bool) (err error) EnrichEvent(ctx context.Context, event *core.Event) (*core.EnrichedEvent, error) + EnrichEvents(ctx context.Context, events []*core.Event) ([]*core.EnrichedEvent, error) + FilterHistoricalEventsOnSubscription(ctx context.Context, events []*core.EnrichedEvent, sub *core.Subscription) ([]*core.EnrichedEvent, error) QueueBatchRewind(batchID *fftypes.UUID) ResolveTransportAndCapabilities(ctx context.Context, transportName string) (string, *events.Capabilities, error) Start() error @@ -300,6 +302,28 @@ func (em *eventManager) EnrichEvent(ctx context.Context, event *core.Event) (*co return em.enricher.enrichEvent(ctx, event) } +func (em *eventManager) EnrichEvents(ctx context.Context, events []*core.Event) ([]*core.EnrichedEvent, error) { + return em.enricher.enrichEvents(ctx, events) +} + func (em *eventManager) QueueBatchRewind(batchID *fftypes.UUID) { em.aggregator.queueBatchRewind(batchID) } + +func (em *eventManager) FilterHistoricalEventsOnSubscription(ctx context.Context, events []*core.EnrichedEvent, sub *core.Subscription) ([]*core.EnrichedEvent, error) { + // Transport must be provided for validation, but we're not using it for event delivery so fake the transport + sub.Transport = "websockets" + subscriptionDef, err := em.subManager.parseSubscriptionDef(ctx, sub) + if err != nil { + return nil, err + } + + matchingEvents := []*core.EnrichedEvent{} + for _, event := range events { + if subscriptionDef.MatchesEvent(event) { + matchingEvents = append(matchingEvents, event) + } + } + + return matchingEvents, nil +} diff --git a/internal/events/event_manager_test.go b/internal/events/event_manager_test.go index df18f7276..f634108c4 100644 --- a/internal/events/event_manager_test.go +++ b/internal/events/event_manager_test.go @@ -1,4 +1,4 @@ -// Copyright © 2021 Kaleido, Inc. +// Copyright © 2024 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -650,3 +650,197 @@ func TestResolveTransportAndCapabilitiesDefault(t *testing.T) { em.mev.AssertExpectations(t) } + +func TestEventFilterOnSubscriptionMatchesEventType(t *testing.T) { + em := newTestEventManager(t) + defer em.cleanup(t) + + events := []*core.EnrichedEvent{ + { + Event: core.Event{ + Type: core.EventTypeIdentityConfirmed, + }, + }, + } + + subscription := &core.Subscription{ + Filter: core.SubscriptionFilter{ + Events: core.EventTypeIdentityConfirmed.String(), + }, + } + + filteredEvents, _ := em.FilterHistoricalEventsOnSubscription(context.Background(), events, subscription) + assert.NotNil(t, filteredEvents) + assert.Equal(t, 1, len(filteredEvents)) + + events[0].Event.Type = "" + subscription.Filter.Events = "" + events[0].Event.Topic = "someTopic" + subscription.Filter.Topic = "someTopic" + + filteredEvents, _ = em.FilterHistoricalEventsOnSubscription(context.Background(), events, subscription) + assert.NotNil(t, filteredEvents) + assert.Equal(t, 1, len(filteredEvents)) + + listenerUuid := fftypes.NewUUID() + + events[0].Event.Topic = "" + subscription.Filter.Topic = "" + events[0].BlockchainEvent = &core.BlockchainEvent{ + Listener: listenerUuid, + } + subscription.Filter.BlockchainEvent.Listener = listenerUuid.String() + + filteredEvents, _ = em.FilterHistoricalEventsOnSubscription(context.Background(), events, subscription) + assert.NotNil(t, filteredEvents) + assert.Equal(t, 1, len(filteredEvents)) + + events[0].BlockchainEvent.Listener = nil + subscription.Filter.BlockchainEvent.Listener = "" + events[0].BlockchainEvent.Name = "someName" + subscription.Filter.BlockchainEvent.Name = "someName" + + filteredEvents, _ = em.FilterHistoricalEventsOnSubscription(context.Background(), events, subscription) + assert.NotNil(t, filteredEvents) + assert.Equal(t, 1, len(filteredEvents)) + + events[0].BlockchainEvent.Name = "" + subscription.Filter.BlockchainEvent.Name = "" + events[0].Transaction = &core.Transaction{ + Type: core.TransactionTypeContractInvoke, + } + subscription.Filter.Transaction.Type = core.TransactionTypeContractInvoke.String() + + filteredEvents, _ = em.FilterHistoricalEventsOnSubscription(context.Background(), events, subscription) + assert.NotNil(t, filteredEvents) + assert.Equal(t, 1, len(filteredEvents)) + + events[0].Transaction.Type = "" + subscription.Filter.Transaction.Type = "" + events[0].Message = &core.Message{ + Header: core.MessageHeader{ + Tag: "someTag", + }, + } + subscription.Filter.Message.Tag = "someTag" + + filteredEvents, _ = em.FilterHistoricalEventsOnSubscription(context.Background(), events, subscription) + assert.NotNil(t, filteredEvents) + assert.Equal(t, 1, len(filteredEvents)) + + group := &fftypes.Bytes32{} + + events[0].Message.Header.Tag = "" + subscription.Filter.Message.Tag = "" + events[0].Message.Header.Group = group + subscription.Filter.Message.Group = group.String() + + filteredEvents, _ = em.FilterHistoricalEventsOnSubscription(context.Background(), events, subscription) + assert.NotNil(t, filteredEvents) + assert.Equal(t, 1, len(filteredEvents)) + + events[0].Message.Header.Group = nil + subscription.Filter.Message.Group = "" + events[0].Message.Header.SignerRef = core.SignerRef{ + Author: "someAuthor", + } + subscription.Filter.Message.Author = "someAuthor" + + filteredEvents, _ = em.FilterHistoricalEventsOnSubscription(context.Background(), events, subscription) + assert.NotNil(t, filteredEvents) + assert.Equal(t, 1, len(filteredEvents)) +} + +func TestEventFilterOnSubscriptionFailsWithBadRegex(t *testing.T) { + em := newTestEventManager(t) + defer em.cleanup(t) + + regexThatFailsToCompile := "^(a(b)c$" + + events := []*core.EnrichedEvent{ + { + Event: core.Event{ + Type: core.EventTypeIdentityConfirmed, + }, + }, + } + + subscription := &core.Subscription{ + Filter: core.SubscriptionFilter{ + Events: regexThatFailsToCompile, + }, + } + + _, err := em.FilterHistoricalEventsOnSubscription(context.Background(), events, subscription) + assert.NotNil(t, err) + + events[0].Event.Type = "" + subscription.Filter.Events = "" + events[0].Event.Topic = "someTopic" + subscription.Filter.Topic = regexThatFailsToCompile + + _, err = em.FilterHistoricalEventsOnSubscription(context.Background(), events, subscription) + assert.NotNil(t, err) + + listenerUuid := fftypes.NewUUID() + + events[0].Event.Topic = "" + subscription.Filter.Topic = "" + events[0].BlockchainEvent = &core.BlockchainEvent{ + Listener: listenerUuid, + } + subscription.Filter.BlockchainEvent.Listener = regexThatFailsToCompile + + _, err = em.FilterHistoricalEventsOnSubscription(context.Background(), events, subscription) + assert.NotNil(t, err) + + events[0].BlockchainEvent.Listener = nil + subscription.Filter.BlockchainEvent.Listener = "" + events[0].BlockchainEvent.Name = "someName" + subscription.Filter.BlockchainEvent.Name = regexThatFailsToCompile + + _, err = em.FilterHistoricalEventsOnSubscription(context.Background(), events, subscription) + assert.NotNil(t, err) + + events[0].BlockchainEvent.Name = "" + subscription.Filter.BlockchainEvent.Name = "" + events[0].Transaction = &core.Transaction{ + Type: core.TransactionTypeContractInvoke, + } + subscription.Filter.Transaction.Type = regexThatFailsToCompile + + _, err = em.FilterHistoricalEventsOnSubscription(context.Background(), events, subscription) + assert.NotNil(t, err) + + events[0].Transaction.Type = "" + subscription.Filter.Transaction.Type = "" + events[0].Message = &core.Message{ + Header: core.MessageHeader{ + Tag: "someTag", + }, + } + subscription.Filter.Message.Tag = regexThatFailsToCompile + + _, err = em.FilterHistoricalEventsOnSubscription(context.Background(), events, subscription) + assert.NotNil(t, err) + + group := &fftypes.Bytes32{} + + events[0].Message.Header.Tag = "" + subscription.Filter.Message.Tag = "" + events[0].Message.Header.Group = group + subscription.Filter.Message.Group = regexThatFailsToCompile + + _, err = em.FilterHistoricalEventsOnSubscription(context.Background(), events, subscription) + assert.NotNil(t, err) + + events[0].Message.Header.Group = nil + subscription.Filter.Message.Group = "" + events[0].Message.Header.SignerRef = core.SignerRef{ + Author: "someAuthor", + } + subscription.Filter.Message.Author = regexThatFailsToCompile + + _, err = em.FilterHistoricalEventsOnSubscription(context.Background(), events, subscription) + assert.NotNil(t, err) +} diff --git a/internal/events/subscription_manager.go b/internal/events/subscription_manager.go index 928e3789e..a31b937e5 100644 --- a/internal/events/subscription_manager.go +++ b/internal/events/subscription_manager.go @@ -573,3 +573,75 @@ func (sm *subscriptionManager) deliveryResponse(ei events.Plugin, connID string, sm.mux.Unlock() dispatcher.deliveryResponse(inflight) } + +func (sub *subscription) MatchesEvent(event *core.EnrichedEvent) bool { + if sub.eventMatcher != nil && !sub.eventMatcher.MatchString(string(event.Type)) { + return false + } + + msg := event.Message + tx := event.Transaction + be := event.BlockchainEvent + tag := "" + topic := event.Topic + group := "" + author := "" + txType := "" + beName := "" + beListener := "" + + if msg != nil { + tag = msg.Header.Tag + author = msg.Header.Author + if msg.Header.Group != nil { + group = msg.Header.Group.String() + } + } + + if tx != nil { + txType = tx.Type.String() + } + + if be != nil { + beName = be.Name + beListener = be.Listener.String() + } + + if sub.topicFilter != nil { + topicsMatch := false + if sub.topicFilter.MatchString(topic) { + topicsMatch = true + } + if !topicsMatch { + return false + } + } + + if sub.messageFilter != nil { + if sub.messageFilter.tagFilter != nil && !sub.messageFilter.tagFilter.MatchString(tag) { + return false + } + if sub.messageFilter.authorFilter != nil && !sub.messageFilter.authorFilter.MatchString(author) { + return false + } + if sub.messageFilter.groupFilter != nil && !sub.messageFilter.groupFilter.MatchString(group) { + return false + } + } + + if sub.transactionFilter != nil { + if sub.transactionFilter.typeFilter != nil && !sub.transactionFilter.typeFilter.MatchString(txType) { + return false + } + } + + if sub.blockchainFilter != nil { + if sub.blockchainFilter.nameFilter != nil && !sub.blockchainFilter.nameFilter.MatchString(beName) { + return false + } + if sub.blockchainFilter.listenerFilter != nil && !sub.blockchainFilter.listenerFilter.MatchString(beListener) { + return false + } + } + return true +} diff --git a/internal/namespace/config.go b/internal/namespace/config.go index 16ff8ade6..7345b1a19 100644 --- a/internal/namespace/config.go +++ b/internal/namespace/config.go @@ -1,4 +1,4 @@ -// Copyright © 2023 Kaleido, Inc. +// Copyright © 2024 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // diff --git a/internal/namespace/manager.go b/internal/namespace/manager.go index 36409a574..018b04e5d 100644 --- a/internal/namespace/manager.go +++ b/internal/namespace/manager.go @@ -1,4 +1,4 @@ -// Copyright © 2023 Kaleido, Inc. +// Copyright © 2024 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -798,6 +798,7 @@ func (nm *namespaceManager) loadTLSConfig(ctx context.Context, tlsConfigs map[st return nil } +// nolint: gocyclo func (nm *namespaceManager) loadNamespace(ctx context.Context, name string, index int, conf config.Section, rawNSConfig fftypes.JSONObject, availablePlugins map[string]*plugin) (ns *namespace, err error) { if err := fftypes.ValidateFFNameField(ctx, name, fmt.Sprintf("namespaces.predefined[%d].name", index)); err != nil { return nil, err @@ -890,9 +891,10 @@ func (nm *namespaceManager) loadNamespace(ctx context.Context, name string, inde } config := orchestrator.Config{ - DefaultKey: conf.GetString(coreconfig.NamespaceDefaultKey), - TokenBroadcastNames: nm.tokenBroadcastNames, - KeyNormalization: keyNormalization, + DefaultKey: conf.GetString(coreconfig.NamespaceDefaultKey), + TokenBroadcastNames: nm.tokenBroadcastNames, + KeyNormalization: keyNormalization, + MaxHistoricalEventScanLimit: config.GetInt(coreconfig.SubscriptionMaxHistoricalEventScanLength), } if multipartyEnabled.(bool) { contractsConf := multipartyConf.SubArray(coreconfig.NamespaceMultipartyContract) diff --git a/internal/orchestrator/data_query.go b/internal/orchestrator/data_query.go index 36ac69d6a..f24555ec8 100644 --- a/internal/orchestrator/data_query.go +++ b/internal/orchestrator/data_query.go @@ -1,4 +1,4 @@ -// Copyright © 2023 Kaleido, Inc. +// Copyright © 2024 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -322,13 +322,24 @@ func (or *orchestrator) GetEventsWithReferences(ctx context.Context, filter ffap return nil, nil, err } - enriched := make([]*core.EnrichedEvent, len(events)) - for i, event := range events { - enrichedEvent, err := or.events.EnrichEvent(or.ctx, event) - if err != nil { - return nil, nil, err - } - enriched[i] = enrichedEvent + enriched, err := or.events.EnrichEvents(ctx, events) + if err != nil { + return nil, nil, err } + + return enriched, fr, err +} + +func (or *orchestrator) GetEventsWithReferencesInSequenceRange(ctx context.Context, filter ffapi.AndFilter, startSequence int, endSequence int) ([]*core.EnrichedEvent, *ffapi.FilterResult, error) { + events, fr, err := or.database().GetEventsInSequenceRange(ctx, or.namespace.Name, filter, startSequence, endSequence) + if err != nil { + return nil, nil, err + } + + enriched, err := or.events.EnrichEvents(ctx, events) + if err != nil { + return nil, nil, err + } + return enriched, fr, err } diff --git a/internal/orchestrator/data_query_test.go b/internal/orchestrator/data_query_test.go index 1de2489ef..384484ac6 100644 --- a/internal/orchestrator/data_query_test.go +++ b/internal/orchestrator/data_query_test.go @@ -691,25 +691,25 @@ func TestGetEventsWithReferences(t *testing.T) { Type: core.EventTypeMessageConfirmed, } - or.mem.On("EnrichEvent", mock.Anything, blockchainEvent).Return(&core.EnrichedEvent{ - Event: *blockchainEvent, - BlockchainEvent: &core.BlockchainEvent{ - ID: ref1, + or.mem.On("EnrichEvents", mock.Anything, mock.Anything).Return([]*core.EnrichedEvent{ + { + Event: *blockchainEvent, + BlockchainEvent: &core.BlockchainEvent{ + ID: ref1, + }, }, - }, nil) - - or.mem.On("EnrichEvent", mock.Anything, txEvent).Return(&core.EnrichedEvent{ - Event: *txEvent, - Transaction: &core.Transaction{ - ID: ref2, + { + Event: *txEvent, + Transaction: &core.Transaction{ + ID: ref2, + }, }, - }, nil) - - or.mem.On("EnrichEvent", mock.Anything, msgEvent).Return(&core.EnrichedEvent{ - Event: *msgEvent, - Message: &core.Message{ - Header: core.MessageHeader{ - ID: ref3, + { + Event: *msgEvent, + Message: &core.Message{ + Header: core.MessageHeader{ + ID: ref3, + }, }, }, }, nil) @@ -731,7 +731,7 @@ func TestGetEventsWithReferencesEnrichFail(t *testing.T) { u := fftypes.NewUUID() or.mdi.On("GetEvents", mock.Anything, "ns", mock.Anything).Return([]*core.Event{{ID: fftypes.NewUUID()}}, nil, nil) - or.mem.On("EnrichEvent", mock.Anything, mock.Anything).Return(nil, fmt.Errorf("pop")) + or.mem.On("EnrichEvents", mock.Anything, mock.Anything).Return(nil, fmt.Errorf("pop")) fb := database.EventQueryFactory.NewFilter(context.Background()) f := fb.And(fb.Eq("id", u)) _, _, err := or.GetEventsWithReferences(context.Background(), f) @@ -848,3 +848,15 @@ func TestGetNextPins(t *testing.T) { _, _, err := or.GetNextPins(context.Background(), f) assert.NoError(t, err) } + +func TestGetEventsInSequenceWithReferencesWhenEnrichEventsFails(t *testing.T) { + or := newTestOrchestrator() + defer or.cleanup(t) + u := fftypes.NewUUID() + or.mdi.On("GetEventsInSequenceRange", mock.Anything, "ns", mock.Anything, mock.Anything, mock.Anything).Return([]*core.Event{}, nil, nil) + or.mem.On("EnrichEvents", mock.Anything, mock.Anything).Return(nil, fmt.Errorf("Oops...")) + fb := database.EventQueryFactory.NewFilter(context.Background()) + f := fb.And(fb.Eq("id", u)) + _, _, err := or.GetEventsWithReferencesInSequenceRange(context.Background(), f, 0, 100) + assert.EqualError(t, err, "Oops...") +} diff --git a/internal/orchestrator/orchestrator.go b/internal/orchestrator/orchestrator.go index 186782d63..3b599b915 100644 --- a/internal/orchestrator/orchestrator.go +++ b/internal/orchestrator/orchestrator.go @@ -1,4 +1,4 @@ -// Copyright © 2023 Kaleido, Inc. +// Copyright © 2024 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -81,6 +81,7 @@ type Orchestrator interface { GetSubscriptions(ctx context.Context, filter ffapi.AndFilter) ([]*core.Subscription, *ffapi.FilterResult, error) GetSubscriptionByID(ctx context.Context, id string) (*core.Subscription, error) GetSubscriptionByIDWithStatus(ctx context.Context, id string) (*core.SubscriptionWithStatus, error) + GetSubscriptionEventsHistorical(ctx context.Context, subscription *core.Subscription, filter ffapi.AndFilter, startSequence int, endSequence int) ([]*core.EnrichedEvent, *ffapi.FilterResult, error) CreateSubscription(ctx context.Context, subDef *core.Subscription) (*core.Subscription, error) CreateUpdateSubscription(ctx context.Context, subDef *core.Subscription) (*core.Subscription, error) DeleteSubscription(ctx context.Context, id string) error @@ -181,10 +182,11 @@ type Plugins struct { } type Config struct { - DefaultKey string - KeyNormalization string - Multiparty multiparty.Config - TokenBroadcastNames map[string]string + DefaultKey string + KeyNormalization string + Multiparty multiparty.Config + TokenBroadcastNames map[string]string + MaxHistoricalEventScanLimit int } type orchestrator struct { diff --git a/internal/orchestrator/orchestrator_test.go b/internal/orchestrator/orchestrator_test.go index a3a3d31ca..695a6e374 100644 --- a/internal/orchestrator/orchestrator_test.go +++ b/internal/orchestrator/orchestrator_test.go @@ -171,6 +171,7 @@ func newTestOrchestrator() *testOrchestrator { tor.orchestrator.defhandler = tor.mdh tor.orchestrator.defsender = tor.mds tor.orchestrator.config.Multiparty.Enabled = true + tor.orchestrator.config.MaxHistoricalEventScanLimit = 1000 tor.orchestrator.plugins = &Plugins{ Blockchain: BlockchainPlugin{ Plugin: tor.mbi, diff --git a/internal/orchestrator/subscriptions.go b/internal/orchestrator/subscriptions.go index fcfd93412..d7ebe9743 100644 --- a/internal/orchestrator/subscriptions.go +++ b/internal/orchestrator/subscriptions.go @@ -1,4 +1,4 @@ -// Copyright © 2023 Kaleido, Inc. +// Copyright © 2024 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -18,11 +18,14 @@ package orchestrator import ( "context" + "math" "time" + "github.com/hyperledger/firefly-common/pkg/config" "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" + "github.com/hyperledger/firefly/internal/coreconfig" "github.com/hyperledger/firefly/internal/coremsgs" "github.com/hyperledger/firefly/internal/events/system" "github.com/hyperledger/firefly/pkg/core" @@ -134,3 +137,58 @@ func (or *orchestrator) GetSubscriptionByIDWithStatus(ctx context.Context, id st return subWithStatus, nil } + +func (or *orchestrator) GetSubscriptionEventsHistorical(ctx context.Context, subscription *core.Subscription, filter ffapi.AndFilter, startSequence int, endSequence int) ([]*core.EnrichedEvent, *ffapi.FilterResult, error) { + if startSequence != -1 && endSequence != -1 && endSequence-startSequence > config.GetInt(coreconfig.SubscriptionMaxHistoricalEventScanLength) { + return nil, nil, i18n.NewError(ctx, coremsgs.MsgMaxSubscriptionEventScanLimitBreached, startSequence, endSequence) + } + + requestedFiltering, err := filter.Finalize() + if err != nil { + return nil, nil, err + } + + var unfilteredEvents []*core.EnrichedEvent + if startSequence == -1 && endSequence == -1 { + unfilteredEvents, _, err = or.GetEventsWithReferences(ctx, filter) + if err != nil { + return nil, nil, err + } + } else { + if startSequence == -1 { + recordLimit := math.Min(float64(requestedFiltering.Limit), float64(config.GetInt(coreconfig.SubscriptionMaxHistoricalEventScanLength))) + if endSequence-int(recordLimit) > 0 { + startSequence = endSequence - int(recordLimit) + } else { + startSequence = 0 + } + } + + if endSequence == -1 { + // This blind assertion is safe since the DB won't blow up, it'll just return nothing + endSequence = startSequence + 1000 + } + + unfilteredEvents, _, err = or.GetEventsWithReferencesInSequenceRange(ctx, filter, startSequence, endSequence) + if err != nil { + return nil, nil, err + } + } + + filteredEvents, err := or.events.FilterHistoricalEventsOnSubscription(ctx, unfilteredEvents, subscription) + if err != nil { + return nil, nil, err + } + + var filteredEventsMatchingSubscription []*core.EnrichedEvent + if len(filteredEvents) > int(requestedFiltering.Limit) { + filteredEventsMatchingSubscription = filteredEvents[len(filteredEvents)-int(requestedFiltering.Limit):] + } else { + filteredEventsMatchingSubscription = filteredEvents + } + + filterResultLength := int64(len(filteredEventsMatchingSubscription)) + return filteredEventsMatchingSubscription, &ffapi.FilterResult{ + TotalCount: &filterResultLength, + }, nil +} diff --git a/internal/orchestrator/subscriptions_test.go b/internal/orchestrator/subscriptions_test.go index 58326469f..bee445506 100644 --- a/internal/orchestrator/subscriptions_test.go +++ b/internal/orchestrator/subscriptions_test.go @@ -358,3 +358,258 @@ func TestGetSGetSubscriptionsByIDWithStatusUnknownSub(t *testing.T) { assert.NoError(t, err) assert.Nil(t, subWithStatus) } + +func generateFakeEvents(eventCount int) ([]*core.Event, []*core.EnrichedEvent) { + baseEvents := []*core.Event{} + enrichedEvents := []*core.EnrichedEvent{} + baseEvent := &core.Event{ + Type: core.EventTypeIdentityConfirmed, + Topic: "Topic1", + } + enrichedEvent := &core.EnrichedEvent{ + Event: *baseEvent, + BlockchainEvent: &core.BlockchainEvent{ + Namespace: "ns1", + }, + } + + for i := 0; i < eventCount; i++ { + baseEvents = append(baseEvents, baseEvent) + enrichedEvents = append(enrichedEvents, enrichedEvent) + } + + return baseEvents, enrichedEvents +} + +func TestGetHistoricalEventsForSubscription(t *testing.T) { + or := newTestOrchestrator() + defer or.cleanup(t) + + baseEvents, enrichedEvents := generateFakeEvents(20) + + or.mdi.On("GetEventsInSequenceRange", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(baseEvents, nil, nil) + or.mem.On("EnrichEvents", mock.Anything, mock.Anything).Return(enrichedEvents, nil) + or.mem.On("FilterHistoricalEventsOnSubscription", mock.Anything, mock.Anything, mock.Anything).Return(enrichedEvents, nil) + + u := fftypes.NewUUID() + // Subscription will match all of the the fake events + sub := &core.Subscription{ + SubscriptionRef: core.SubscriptionRef{ + ID: u, + Name: "sub1", + Namespace: "ns1", + }, + } + + fb := database.SubscriptionQueryFactory.NewFilter(context.Background()) + filter := fb.And() + filter.Limit(20) + retEvents, _, err := or.GetSubscriptionEventsHistorical(context.Background(), sub, filter, 0, 100) + assert.Equal(t, err, nil) + assert.Equal(t, len(retEvents), 20) +} + +func TestGetHistoricalEventsForSubscriptionNotEnoughEventsToSatisfyLimit(t *testing.T) { + or := newTestOrchestrator() + defer or.cleanup(t) + + // Generate fewer events than the total event limit + baseEvents, enrichedEvents := generateFakeEvents(20) + + or.mdi.On("GetEventsInSequenceRange", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(baseEvents, nil, nil) + or.mem.On("EnrichEvents", mock.Anything, mock.Anything).Return(enrichedEvents, nil) + or.mem.On("FilterHistoricalEventsOnSubscription", mock.Anything, mock.Anything, mock.Anything).Return(enrichedEvents, nil) + + fb := database.SubscriptionQueryFactory.NewFilter(context.Background()) + filter := fb.And() + filter.Limit(50) + + retEvents, _, err := or.GetSubscriptionEventsHistorical(context.Background(), &core.Subscription{}, filter, 0, 100) + assert.Equal(t, err, nil) + assert.Equal(t, 20, len(retEvents)) +} + +func TestGetHistoricalEventsForSubscriptionMoreEventsThanRequired(t *testing.T) { + or := newTestOrchestrator() + defer or.cleanup(t) + + // Generate more events than the overall limit + baseEvents, enrichedEvents := generateFakeEvents(50) + + or.mdi.On("GetEventsInSequenceRange", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(baseEvents, nil, nil) + or.mem.On("EnrichEvents", mock.Anything, mock.Anything).Return(enrichedEvents, nil) + or.mem.On("FilterHistoricalEventsOnSubscription", mock.Anything, mock.Anything, mock.Anything).Return(enrichedEvents, nil) + + fb := database.SubscriptionQueryFactory.NewFilter(context.Background()) + filter := fb.And() + filter.Limit(25) // Limit of processing 25 unfiltered events + retEvents, _, err := or.GetSubscriptionEventsHistorical(context.Background(), &core.Subscription{}, filter, 0, 100) + assert.Equal(t, err, nil) + assert.Equal(t, 25, len(retEvents)) +} + +func TestGetHistoricalEventsForSubscriptionGetEventsFails(t *testing.T) { + or := newTestOrchestrator() + defer or.cleanup(t) + or.mdi.On("GetEventsInSequenceRange", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil, nil, fmt.Errorf("Something went wrong!")) + + fb := database.SubscriptionQueryFactory.NewFilter(context.Background()) + filter := fb.And() + filter.Limit(20) + + _, _, err := or.GetSubscriptionEventsHistorical(context.Background(), &core.Subscription{}, filter, 0, 100) + assert.NotNil(t, err) +} + +func TestGetHistoricalEventsForSubscriptionBadQueryFilter(t *testing.T) { + or := newTestOrchestrator() + defer or.cleanup(t) + + fb := database.SubscriptionQueryFactory.NewFilter(context.Background()) + filter := fb.And(fb.Eq("tag", map[bool]bool{true: false})) + _, _, err := or.GetSubscriptionEventsHistorical(context.Background(), &core.Subscription{}, filter, 0, 100) + assert.NotNil(t, err) +} + +func TestGetHistoricalEventsForSubscriptionGettingHistoricalEventsThrows(t *testing.T) { + or := newTestOrchestrator() + defer or.cleanup(t) + + baseEvents, _ := generateFakeEvents(20) + + or.mdi.On("GetEventsInSequenceRange", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(baseEvents, nil, nil) + or.mem.On("EnrichEvents", mock.Anything, mock.Anything).Return([]*core.EnrichedEvent{}, nil) + or.mem.On("FilterHistoricalEventsOnSubscription", mock.Anything, mock.Anything, mock.Anything).Return(nil, fmt.Errorf("KERRR-BOOM!")) + + fb := database.SubscriptionQueryFactory.NewFilter(context.Background()) + filter := fb.And() + filter.Limit(20) + + _, _, err := or.GetSubscriptionEventsHistorical(context.Background(), &core.Subscription{}, filter, 0, 100) + assert.NotNil(t, err) +} + +func TestGetHistoricalEventsForSubscriptionGettingHistoricalEventsGoesPastScanLimit(t *testing.T) { + or := newTestOrchestrator() + defer or.cleanup(t) + + fb := database.SubscriptionQueryFactory.NewFilter(context.Background()) + filter := fb.And() + + _, _, err := or.GetSubscriptionEventsHistorical(context.Background(), &core.Subscription{}, filter, 0, 2000) // Default limit is 1000 + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Event scan limit breached") +} + +func TestGetHistoricalEventsForSubscriptionEndSequenceNotProvided(t *testing.T) { + or := newTestOrchestrator() + defer or.cleanup(t) + + fb := database.SubscriptionQueryFactory.NewFilter(context.Background()) + filter := fb.And() + filter.Limit(1000) + + // Generate more events than the overall limit + baseEvents, enrichedEvents := generateFakeEvents(1500) + + or.mdi.On("GetEventsInSequenceRange", mock.Anything, mock.Anything, mock.Anything, 0, 1000).Return(baseEvents, nil, nil) + or.mem.On("EnrichEvents", mock.Anything, mock.Anything).Return(enrichedEvents, nil) + or.mem.On("FilterHistoricalEventsOnSubscription", mock.Anything, mock.Anything, mock.Anything).Return(enrichedEvents, nil) + + retEvents, _, err := or.GetSubscriptionEventsHistorical(context.Background(), &core.Subscription{}, filter, 0, -1) + assert.Equal(t, err, nil) + assert.Equal(t, 1000, len(retEvents)) +} + +func TestGetHistoricalEventsForSubscriptionEndSequencePastRecordCount(t *testing.T) { + or := newTestOrchestrator() + defer or.cleanup(t) + + fb := database.SubscriptionQueryFactory.NewFilter(context.Background()) + filter := fb.And() + filter.Limit(1000) + + // Generate more events than the overall limit + baseEvents, _ := generateFakeEvents(1500) + + or.mdi.On("GetEventsInSequenceRange", mock.Anything, mock.Anything, mock.Anything, 1000, 2000).Return(baseEvents, nil, nil) + or.mem.On("EnrichEvents", mock.Anything, mock.Anything).Return([]*core.EnrichedEvent{}, nil) + or.mem.On("FilterHistoricalEventsOnSubscription", mock.Anything, mock.Anything, mock.Anything).Return([]*core.EnrichedEvent{}, nil) + + retEvents, _, err := or.GetSubscriptionEventsHistorical(context.Background(), &core.Subscription{}, filter, 1000, -1) + assert.Equal(t, err, nil) + assert.Equal(t, 0, len(retEvents)) +} + +func TestGetHistoricalEventsForSubscriptionStartSequenceNotProvidedAndBelowTotalLimit(t *testing.T) { + or := newTestOrchestrator() + defer or.cleanup(t) + + fb := database.SubscriptionQueryFactory.NewFilter(context.Background()) + filter := fb.And() + filter.Limit(1000) + + // Generate more events than the overall limit + baseEvents, enrichedEvents := generateFakeEvents(200) + + or.mdi.On("GetEventsInSequenceRange", mock.Anything, mock.Anything, mock.Anything, 0, 200).Return(baseEvents, nil, nil) + or.mem.On("EnrichEvents", mock.Anything, mock.Anything).Return(enrichedEvents, nil) + or.mem.On("FilterHistoricalEventsOnSubscription", mock.Anything, mock.Anything, mock.Anything).Return(enrichedEvents, nil) + + retEvents, _, err := or.GetSubscriptionEventsHistorical(context.Background(), &core.Subscription{}, filter, -1, 200) + assert.Equal(t, err, nil) + assert.Equal(t, 200, len(retEvents)) +} + +func TestGetHistoricalEventsForSubscriptionStartSequenceNotProvided(t *testing.T) { + or := newTestOrchestrator() + defer or.cleanup(t) + + fb := database.SubscriptionQueryFactory.NewFilter(context.Background()) + filter := fb.And() + filter.Limit(1000) + + // Generate more events than the overall limit + baseEvents, enrichedEvents := generateFakeEvents(1000) + + or.mdi.On("GetEventsInSequenceRange", mock.Anything, mock.Anything, mock.Anything, 100, 1100).Return(baseEvents, nil, nil) + or.mem.On("EnrichEvents", mock.Anything, mock.Anything).Return(enrichedEvents, nil) + or.mem.On("FilterHistoricalEventsOnSubscription", mock.Anything, mock.Anything, mock.Anything).Return(enrichedEvents, nil) + + retEvents, _, err := or.GetSubscriptionEventsHistorical(context.Background(), &core.Subscription{}, filter, -1, 1100) + assert.Equal(t, err, nil) + assert.Equal(t, 1000, len(retEvents)) +} + +func TestGetHistoricalEventsForSubscriptionNoStartOrEndSequence(t *testing.T) { + or := newTestOrchestrator() + defer or.cleanup(t) + + fb := database.SubscriptionQueryFactory.NewFilter(context.Background()) + filter := fb.And() + filter.Limit(1000) + + baseEvents, enrichedEvents := generateFakeEvents(1000) + + or.mdi.On("GetEvents", mock.Anything, mock.Anything, mock.Anything).Return(baseEvents, nil, nil) + or.mem.On("EnrichEvents", mock.Anything, mock.Anything).Return(enrichedEvents, nil) + or.mem.On("FilterHistoricalEventsOnSubscription", mock.Anything, mock.Anything, mock.Anything).Return(enrichedEvents, nil) + + retEvents, _, err := or.GetSubscriptionEventsHistorical(context.Background(), &core.Subscription{}, filter, -1, -1) + assert.Equal(t, err, nil) + assert.Equal(t, 1000, len(retEvents)) +} + +func TestGetHistoricalEventsForSubscriptionNoStartOrEndSequenceFails(t *testing.T) { + or := newTestOrchestrator() + defer or.cleanup(t) + + fb := database.SubscriptionQueryFactory.NewFilter(context.Background()) + filter := fb.And() + filter.Limit(1000) + + or.mdi.On("GetEvents", mock.Anything, mock.Anything, mock.Anything).Return(nil, nil, fmt.Errorf("boom!")) + + _, _, err := or.GetSubscriptionEventsHistorical(context.Background(), &core.Subscription{}, filter, -1, -1) + assert.NotNil(t, err) +} diff --git a/mocks/databasemocks/plugin.go b/mocks/databasemocks/plugin.go index 9050baa0b..ad1c54eb7 100644 --- a/mocks/databasemocks/plugin.go +++ b/mocks/databasemocks/plugin.go @@ -1120,6 +1120,45 @@ func (_m *Plugin) GetEvents(ctx context.Context, namespace string, filter ffapi. return r0, r1, r2 } +// GetEventsInSequenceRange provides a mock function with given fields: ctx, namespace, filter, startSequence, endSequence +func (_m *Plugin) GetEventsInSequenceRange(ctx context.Context, namespace string, filter ffapi.Filter, startSequence int, endSequence int) ([]*core.Event, *ffapi.FilterResult, error) { + ret := _m.Called(ctx, namespace, filter, startSequence, endSequence) + + if len(ret) == 0 { + panic("no return value specified for GetEventsInSequenceRange") + } + + var r0 []*core.Event + var r1 *ffapi.FilterResult + var r2 error + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter, int, int) ([]*core.Event, *ffapi.FilterResult, error)); ok { + return rf(ctx, namespace, filter, startSequence, endSequence) + } + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter, int, int) []*core.Event); ok { + r0 = rf(ctx, namespace, filter, startSequence, endSequence) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*core.Event) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter, int, int) *ffapi.FilterResult); ok { + r1 = rf(ctx, namespace, filter, startSequence, endSequence) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*ffapi.FilterResult) + } + } + + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter, int, int) error); ok { + r2 = rf(ctx, namespace, filter, startSequence, endSequence) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + // GetFFI provides a mock function with given fields: ctx, namespace, name, version func (_m *Plugin) GetFFI(ctx context.Context, namespace string, name string, version string) (*fftypes.FFI, error) { ret := _m.Called(ctx, namespace, name, version) diff --git a/mocks/eventmocks/event_manager.go b/mocks/eventmocks/event_manager.go index 0251218d9..7514bf1bb 100644 --- a/mocks/eventmocks/event_manager.go +++ b/mocks/eventmocks/event_manager.go @@ -169,6 +169,66 @@ func (_m *EventManager) EnrichEvent(ctx context.Context, event *core.Event) (*co return r0, r1 } +// EnrichEvents provides a mock function with given fields: ctx, _a1 +func (_m *EventManager) EnrichEvents(ctx context.Context, _a1 []*core.Event) ([]*core.EnrichedEvent, error) { + ret := _m.Called(ctx, _a1) + + if len(ret) == 0 { + panic("no return value specified for EnrichEvents") + } + + var r0 []*core.EnrichedEvent + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, []*core.Event) ([]*core.EnrichedEvent, error)); ok { + return rf(ctx, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, []*core.Event) []*core.EnrichedEvent); ok { + r0 = rf(ctx, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*core.EnrichedEvent) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, []*core.Event) error); ok { + r1 = rf(ctx, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FilterHistoricalEventsOnSubscription provides a mock function with given fields: ctx, _a1, sub +func (_m *EventManager) FilterHistoricalEventsOnSubscription(ctx context.Context, _a1 []*core.EnrichedEvent, sub *core.Subscription) ([]*core.EnrichedEvent, error) { + ret := _m.Called(ctx, _a1, sub) + + if len(ret) == 0 { + panic("no return value specified for FilterHistoricalEventsOnSubscription") + } + + var r0 []*core.EnrichedEvent + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, []*core.EnrichedEvent, *core.Subscription) ([]*core.EnrichedEvent, error)); ok { + return rf(ctx, _a1, sub) + } + if rf, ok := ret.Get(0).(func(context.Context, []*core.EnrichedEvent, *core.Subscription) []*core.EnrichedEvent); ok { + r0 = rf(ctx, _a1, sub) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*core.EnrichedEvent) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, []*core.EnrichedEvent, *core.Subscription) error); ok { + r1 = rf(ctx, _a1, sub) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetPlugins provides a mock function with given fields: func (_m *EventManager) GetPlugins() []*core.NamespaceStatusPlugin { ret := _m.Called() diff --git a/mocks/orchestratormocks/orchestrator.go b/mocks/orchestratormocks/orchestrator.go index 0bb4fb57d..1ece33919 100644 --- a/mocks/orchestratormocks/orchestrator.go +++ b/mocks/orchestratormocks/orchestrator.go @@ -1347,6 +1347,45 @@ func (_m *Orchestrator) GetSubscriptionByIDWithStatus(ctx context.Context, id st return r0, r1 } +// GetSubscriptionEventsHistorical provides a mock function with given fields: ctx, subscription, filter, startSequence, endSequence +func (_m *Orchestrator) GetSubscriptionEventsHistorical(ctx context.Context, subscription *core.Subscription, filter ffapi.AndFilter, startSequence int, endSequence int) ([]*core.EnrichedEvent, *ffapi.FilterResult, error) { + ret := _m.Called(ctx, subscription, filter, startSequence, endSequence) + + if len(ret) == 0 { + panic("no return value specified for GetSubscriptionEventsHistorical") + } + + var r0 []*core.EnrichedEvent + var r1 *ffapi.FilterResult + var r2 error + if rf, ok := ret.Get(0).(func(context.Context, *core.Subscription, ffapi.AndFilter, int, int) ([]*core.EnrichedEvent, *ffapi.FilterResult, error)); ok { + return rf(ctx, subscription, filter, startSequence, endSequence) + } + if rf, ok := ret.Get(0).(func(context.Context, *core.Subscription, ffapi.AndFilter, int, int) []*core.EnrichedEvent); ok { + r0 = rf(ctx, subscription, filter, startSequence, endSequence) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*core.EnrichedEvent) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *core.Subscription, ffapi.AndFilter, int, int) *ffapi.FilterResult); ok { + r1 = rf(ctx, subscription, filter, startSequence, endSequence) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*ffapi.FilterResult) + } + } + + if rf, ok := ret.Get(2).(func(context.Context, *core.Subscription, ffapi.AndFilter, int, int) error); ok { + r2 = rf(ctx, subscription, filter, startSequence, endSequence) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + // GetSubscriptions provides a mock function with given fields: ctx, filter func (_m *Orchestrator) GetSubscriptions(ctx context.Context, filter ffapi.AndFilter) ([]*core.Subscription, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) diff --git a/pkg/database/plugin.go b/pkg/database/plugin.go index cd6fcac7f..367d2c963 100644 --- a/pkg/database/plugin.go +++ b/pkg/database/plugin.go @@ -1,4 +1,4 @@ -// Copyright © 2023 Kaleido, Inc. +// Copyright © 2024 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -268,6 +268,9 @@ type iEventCollection interface { // GetEvents - Get events GetEvents(ctx context.Context, namespace string, filter ffapi.Filter) (message []*core.Event, res *ffapi.FilterResult, err error) + + // GetEventsInSequenceRange - Get a range of events between 2 sequence values + GetEventsInSequenceRange(ctx context.Context, namespace string, filter ffapi.Filter, startSequence int, endSequence int) (message []*core.Event, res *ffapi.FilterResult, err error) } type iIdentitiesCollection interface {