From f135aba6e65762ac7944b85d854af5ac16fc86e2 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Wed, 27 Nov 2024 10:00:37 +0100 Subject: [PATCH 01/21] Revert "Revert "Change request/error handling"" This reverts commit 8914dab6ffc3e9d85783bb8d0c4d3072f1045911. --- CHANGELOG.md | 114 +++++++++++ README.md | 52 +++-- src/connection.ts | 226 ++++++++++++---------- src/database.ts | 28 +-- src/error.ts | 323 +++++++++++++++++++++++++------- src/job.ts | 8 +- src/lib/request.ts | 96 +++++----- src/route.ts | 39 ++-- src/test/27-query-management.ts | 4 +- 9 files changed, 627 insertions(+), 263 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 70f2ccf83..24531feb8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,59 @@ This driver uses semantic versioning: ## [9.2.0] - 2024-11-27 +### Changed + +- Errors encountered before a request completes are now wrapped in a + `NetworkError` or a subclass thereof + + This should help making it easier to diagnose network issues and distinguish + the relevant error conditions. + + The originating error can still be accessed using the `cause` property of the + `NetworkError` error. + +- `HttpError` now extends the `NetworkError` class + + This allows treating all non-`ArangoError` errors as one category of errors, + even when there is no server response available. + +- `db.waitForPropagation` now throws a `PropagationTimeoutError` error when + invoked with a `timeout` option and the timeout duration is exceeded + + The method would previously throw the most recent error encountered while + waiting for replication. The originating error can still be accessed using + the `cause` property of the `PropagationTimeoutError` error. + +- `db.waitForPropagation` now respects the `timeout` option more strictly + + Previously the method would only time out if the timeout duration was + exceeded after the most recent request failed. Now the timeout is + recalculated and passed on to each request, preventing it from exceeding + the specified duration. + + If the propagation timed out due to an underlying request exceeding the + timeout duration, the `cause` property of the `PropagationTimeoutError` + error will be a `ResponseTimeoutError` error. + +- `config.beforeRequest` and `config.afterResponse` callbacks can now return + promises + + If the callback returns a promise, it will be awaited before the request + and response cycle proceeds. If either callback throws an error or returns + a promise that is rejected, that error will be thrown instead. + +- `config.afterResponse` callback signature changed + + The callback signature previously used the internal `ArangojsResponse` type. + The new signature uses the `Response` type of the Fetch API with an + additional `request` property to more accurately represent the actual value + it receives as the `parsedBody` property will never be present. + +- `response` property on `ArangoError` is now optional + + This property should always be present but this allows using the error in + situations where a response might not be available. + ### Added - Added `database.availability` method @@ -26,6 +79,67 @@ This driver uses semantic versioning: - Added `database.supportInfo` method +- Added `onError` option to `Config` (DE-955) + + This option can be used to specify a callback function that will be invoked + whenever a request results in an error. Unlike `afterResponse`, this callback + will be invoked even if the request completed but returned an error status. + In this case the error will be the `HttpError` or `ArangoError` representing + the error response. + + If the `onError` callback throws an error or returns a promise that is + rejected, that error will be thrown instead. + +- Added `NetworkError` class + + This is the common base class for all errors (including `HttpError`) that + occur while making a request. The originating error can be accessed using the + `cause` property. The request object can be accessed using the `request` + property. + + Note that `ArangoError` and the new `PropagationTimeoutError` error type + do not extend `NetworkError` but may wrap an underlying error, which can + be accessed using the `cause` property. + +- Added `ResponseTimeoutError` class + + This error extends `NetworkError` and is thrown when a request deliberately + times out using the `timeout` option. + +- Added `RequestAbortedError` class + + This error extends `NetworkError` and is thrown when a request is aborted + by using the `db.close` method. + +- Added `FetchFailedError` class + + This error extends `NetworkError` and is thrown when a request fails because + the underlying `fetch` call fails (usually with a `TypeError`). + + In Node.js the root cause of this error (e.g. a network failure) can often be + found in the `cause` property of the originating error, i.e. the `cause` + property of the `cause` property of this error. + + In browsers the root cause is usually not exposed directly but can often + be diagnosed by examining the developer console or network tab. + +- Added `PropagationTimeoutError` class + + This error does not extend `NetworkError` but wraps the most recent error + encountered while waiting for replication, which can be accessed using the + `cause` property. This error is only thrown when `db.waitForPropagation` + is invoked with a `timeout` option and the timeout duration is exceeded. + +- Added `ProcessedResponse` type + + This type replaces the previously internal `ArangojsResponse` type and + extends the native `Response` type with additional properties. + +- Added optional `ArangoError#request` property + + This property is always present if the error has a `response` property. In + normal use this should always be the case. + - Added `keepNull` option to `CollectionInsertOptions` type (DE-946) This option was previously missing from the type. diff --git a/README.md b/README.md index 8f81115a1..5c48a1724 100644 --- a/README.md +++ b/README.md @@ -139,21 +139,42 @@ and [the `db` object](https://www.arangodb.com/docs/stable/appendix-references-d ## Error responses -If arangojs encounters an API error, it will throw an `ArangoError` with -an `errorNum` property indicating the ArangoDB error code and the `code` -property indicating the HTTP status code from the response body. +If the server returns an ArangoDB error response, arangojs will throw an +`ArangoError` with an `errorNum` property indicating the ArangoDB error code +and expose the response body as the `response` property of the error object. -For any other non-ArangoDB error responses (4xx/5xx status code), it will throw -an `HttpError` error with the status code indicated by the `code` property. +For all other errors during the request/response cycle, arangojs will throw a +`NetworkError` or a more specific subclass thereof and expose the originating +request object as the `request` property of the error object. -If the server response did not indicate an error but the response body could -not be parsed, a regular `SyntaxError` may be thrown instead. +If the server responded with a non-2xx status code, this `NetworkError` will +be an `HttpError` with a `code` property indicating the HTTP status code of the +response and a `response` property containing the response object itself. -In all of these cases the server response object will be exposed as the -`response` property on the error object. +If the error is caused by an exception, the originating exception will be +available as the `cause` property of the error object thrown by arangojs. For +network errors, this will often be a `TypeError`. -If the request failed at a network level or the connection was closed without -receiving a response, the underlying system error will be thrown instead. +### Node.js network errors + +In Node.js, network errors caused by a `TypeError` will often have a `cause` +property containing a more detailed exception. + +Specifically, these are often either system errors (represented by regular +`Error` objects with additional properties) or errors from the `undici` module +Node.js uses internally for its native `fetch` implementation. + +Node.js system error objects provide a `code` property containing the specific +string error code, a `syscall` property identifying the underlying system call +that triggered the error (e.g. `connect`), as well as other helpful properties. + +For more details on Node.js system errors, see the Node.js documentation of the +[`SystemError` interface](https://nodejs.org/api/errors.html#class-systemerror) +as well as the section on +[Node.js error codes](https://nodejs.org/api/errors.html#nodejs-error-codes). + +For more details on the errors thrown by `undici`, see the +[undici errors documentation](https://undici.nodejs.org/#/docs/api/Errors.md). ## Common issues @@ -170,6 +191,15 @@ Additionally please ensure that your version of Node.js (or browser) and ArangoDB are supported by the version of arangojs you are trying to use. See the [compatibility section](#compatibility) for additional information. +You can install an older version of arangojs using `npm` or `yarn`: + +```sh +# for version 8.x.x +yarn add arangojs@8 +# - or - +npm install --save arangojs@8 +``` + ### No code intelligence when using require instead of import If you are using `require` to import the `arangojs` module in JavaScript, the diff --git a/src/connection.ts b/src/connection.ts index 2565226e3..645830acd 100644 --- a/src/connection.ts +++ b/src/connection.ts @@ -13,18 +13,17 @@ import { Database } from "./database.js"; import { ArangoError, HttpError, + NetworkError, + PropagationTimeoutError, isArangoError, isArangoErrorResponse, - isSystemError, + isNetworkError, } from "./error.js"; import { ERROR_ARANGO_CONFLICT, - ERROR_ARANGO_MAINTENANCE_MODE, } from "./lib/codes.js"; import { normalizeUrl } from "./lib/normalizeUrl.js"; import { - ArangojsError, - ArangojsResponse, createRequest, RequestConfig, RequestFunction, @@ -162,8 +161,6 @@ export type RequestOptions = { /** * Time in milliseconds after which arangojs will abort the request if the * socket has not already timed out. - * - * See also `agentOptions.timeout` in {@link Config}. */ timeout?: number; /** @@ -180,17 +177,37 @@ export type RequestOptions = { search?: URLSearchParams | Record; }; +/** + * Processed response object. + */ +export interface ProcessedResponse extends globalThis.Response { + /** + * @internal + * + * Identifier of the ArangoDB host that served this request. + */ + arangojsHostUrl?: string; + /** + * Fetch request object. + */ + request: globalThis.Request; + /** + * Parsed response body. + */ + parsedBody?: T; +}; + /** * @internal */ -type Task = { +type Task = { hostUrl?: string; stack?: () => string; allowDirtyRead: boolean; retryOnConflict: number; - resolve: (result: any) => void; - reject: (error: Error) => void; - transform?: (res: ArangojsResponse) => any; + resolve: (result: T) => void; + reject: (error: unknown) => void; + transform?: (res: ProcessedResponse) => T; retries: number; options: { method: string; @@ -346,7 +363,7 @@ export type Config = { * * @param req - Request object or XHR instance used for this request. */ - beforeRequest?: (req: globalThis.Request) => void; + beforeRequest?: (req: globalThis.Request) => void | Promise; /** * Callback that will be invoked when the server response has been received * and processed or when the request has been failed without a response. @@ -357,7 +374,13 @@ export type Config = { * @param err - Error encountered when handling this request or `null`. * @param res - Response object for this request, if no error occurred. */ - afterResponse?: (err: ArangojsError | null, res?: ArangojsResponse) => void; + afterResponse?: (err: NetworkError | null, res?: globalThis.Response & { request: globalThis.Request }) => void | Promise; + /** + * Callback that will be invoked when a request + * + * @param err - Error encountered when handling this request. + */ + onError?: (err: Error) => void | Promise; /** * If set to a positive number, requests will automatically be retried at * most this many times if they result in a write-write conflict. @@ -425,6 +448,7 @@ export class Connection { protected _activeHostUrl: string; protected _activeDirtyHostUrl: string; protected _transactionId: string | null = null; + protected _onError?: (err: Error) => void | Promise; protected _precaptureStackTraces: boolean; protected _queueTimes = new LinkedList<[number, number]>(); protected _responseQueueTimeSamples: number; @@ -466,6 +490,7 @@ export class Connection { this._precaptureStackTraces = Boolean(config.precaptureStackTraces); this._responseQueueTimeSamples = config.responseQueueTimeSamples ?? 10; this._retryOnConflict = config.retryOnConflict ?? 0; + this._onError = config.onError; if (this._responseQueueTimeSamples < 0) { this._responseQueueTimeSamples = Infinity; } @@ -520,31 +545,29 @@ export class Connection { } protected async _runQueue() { - if (!this._queue.length || this._activeTasks >= this._taskPoolSize) return; - const task = this._queue.shift()!; + if (this._activeTasks >= this._taskPoolSize) return; + const task = this._queue.shift(); + if (!task) return; let hostUrl = this._activeHostUrl; - if (task.hostUrl !== undefined) { - hostUrl = task.hostUrl; - } else if (task.allowDirtyRead) { - hostUrl = this._activeDirtyHostUrl; - this._activeDirtyHostUrl = - this._hostUrls[ - (this._hostUrls.indexOf(this._activeDirtyHostUrl) + 1) % - this._hostUrls.length - ]; - task.options.headers.set("x-arango-allow-dirty-read", "true"); - } else if (this._loadBalancingStrategy === "ROUND_ROBIN") { - this._activeHostUrl = - this._hostUrls[ - (this._hostUrls.indexOf(this._activeHostUrl) + 1) % - this._hostUrls.length - ]; - } - this._activeTasks += 1; try { - const res = await this._hosts[this._hostUrls.indexOf(hostUrl)]( + this._activeTasks += 1; + if (task.hostUrl !== undefined) { + hostUrl = task.hostUrl; + } else if (task.allowDirtyRead) { + hostUrl = this._activeDirtyHostUrl; + const i = this._hostUrls.indexOf(this._activeDirtyHostUrl) + 1; + this._activeDirtyHostUrl = this._hostUrls[i % this._hostUrls.length]; + } else if (this._loadBalancingStrategy === "ROUND_ROBIN") { + const i = this._hostUrls.indexOf(this._activeHostUrl) + 1; + this._activeHostUrl = this._hostUrls[i % this._hostUrls.length]; + } + const res: globalThis.Response & { + request: globalThis.Request; + arangojsHostUrl: string; + parsedBody?: any; + } = Object.assign(await this._hosts[this._hostUrls.indexOf(hostUrl)]( task.options - ); + ), { arangojsHostUrl: hostUrl }); const leaderEndpoint = res.headers.get(LEADER_ENDPOINT_HEADER); if (res.status === 503 && leaderEndpoint) { const [cleanUrl] = this.addToHostList(leaderEndpoint); @@ -553,62 +576,54 @@ export class Connection { this._activeHostUrl = cleanUrl; } this._queue.push(task); - } else { - res.arangojsHostUrl = hostUrl; - const contentType = res.headers.get("content-type"); - const queueTime = res.headers.get("x-arango-queue-time-seconds"); - if (queueTime) { - this._queueTimes.push([Date.now(), Number(queueTime)]); - while (this._responseQueueTimeSamples < this._queueTimes.length) { - this._queueTimes.shift(); - } + return; + } + const queueTime = res.headers.get("x-arango-queue-time-seconds"); + if (queueTime) { + this._queueTimes.push([Date.now(), Number(queueTime)]); + while (this._responseQueueTimeSamples < this._queueTimes.length) { + this._queueTimes.shift(); } - if (res.status >= 400) { + } + const contentType = res.headers.get("content-type"); + if (res.status >= 400) { + if (contentType?.match(MIME_JSON)) { + const errorResponse = res.clone(); + let errorBody: any; try { - if (contentType?.match(MIME_JSON)) { - const errorResponse = res.clone(); - let errorBody: any; - try { - errorBody = await errorResponse.json(); - } catch { - // noop - } - if (isArangoErrorResponse(errorBody)) { - res.parsedBody = errorBody; - throw new ArangoError(res); - } - } - throw new HttpError(res); - } catch (err: any) { - if (task.stack) { - err.stack += task.stack(); - } - throw err; + errorBody = await errorResponse.json(); + } catch { + // noop } - } - if (res.body) { - if (task.options.expectBinary) { - res.parsedBody = await res.blob(); - } else if (contentType?.match(MIME_JSON)) { - res.parsedBody = await res.json(); - } else { - res.parsedBody = await res.text(); + if (isArangoErrorResponse(errorBody)) { + res.parsedBody = errorBody; + throw ArangoError.from(res); } } - task.resolve(task.transform ? task.transform(res) : res); + throw new HttpError(res); + } + if (res.body) { + if (task.options.expectBinary) { + res.parsedBody = await res.blob(); + } else if (contentType?.match(MIME_JSON)) { + res.parsedBody = await res.json(); + } else { + res.parsedBody = await res.text(); + } } - } catch (err: any) { + let result: any = res; + if (task.transform) result = task.transform(res); + task.resolve(result); + } catch (e: unknown) { + const err = e as Error; if ( !task.allowDirtyRead && this._hosts.length > 1 && this._activeHostUrl === hostUrl && this._loadBalancingStrategy !== "ROUND_ROBIN" ) { - this._activeHostUrl = - this._hostUrls[ - (this._hostUrls.indexOf(this._activeHostUrl) + 1) % - this._hostUrls.length - ]; + const i = this._hostUrls.indexOf(this._activeHostUrl) + 1; + this._activeHostUrl = this._hostUrls[i % this._hostUrls.length]; } if ( isArangoError(err) && @@ -617,28 +632,37 @@ export class Connection { ) { task.retryOnConflict -= 1; this._queue.push(task); - } else if ( - ((isSystemError(err) && - err.syscall === "connect" && - err.code === "ECONNREFUSED") || - (isArangoError(err) && - err.errorNum === ERROR_ARANGO_MAINTENANCE_MODE)) && + return; + } + if ( + (isNetworkError(err) || isArangoError(err)) && + err.isSafeToRetry && task.hostUrl === undefined && this._maxRetries !== false && task.retries < (this._maxRetries || this._hosts.length - 1) ) { task.retries += 1; this._queue.push(task); - } else { - if (task.stack) { - err.stack += task.stack(); + return; + } + if (task.stack) { + err.stack += task.stack(); + } + if (this._onError) { + try { + const p = this._onError(err); + if (p instanceof Promise) await p; + } catch (e) { + (e as Error).cause = err; + task.reject(e); + return; } - task.reject(err); } + task.reject(err); } finally { this._activeTasks -= 1; + setTimeout(() => this._runQueue(), 0); } - this._runQueue(); } setBearerAuth(auth: BearerAuthCredentials) { @@ -832,6 +856,7 @@ export class Connection { const numHosts = this._hosts.length; const propagated = [] as string[]; const started = Date.now(); + const endOfTime = started + timeout; let index = 0; while (true) { if (propagated.length === numHosts) { @@ -842,10 +867,17 @@ export class Connection { } const hostUrl = this._hostUrls[index]; try { - await this.request({ ...request, hostUrl }); - } catch (e: any) { - if (started + timeout < Date.now()) { - throw e; + await this.request({ + ...request, + hostUrl, + timeout: endOfTime - Date.now(), + }); + } catch (e) { + if (endOfTime < Date.now()) { + throw new PropagationTimeoutError( + undefined, + { cause: e as Error } + ); } await new Promise((resolve) => setTimeout(resolve, 1000)); continue; @@ -861,7 +893,7 @@ export class Connection { * * Performs a request using the arangojs connection pool. */ - request( + request( { hostUrl, method = "GET", @@ -876,7 +908,7 @@ export class Connection { path, search: params, }: RequestOptions, - transform?: (res: ArangojsResponse) => T + transform?: (res: globalThis.Response & { request: globalThis.Request; parsedBody?: any }) => T ): Promise { return new Promise((resolve, reject) => { const headers = mergeHeaders(this._headers, requestHeaders ?? {}); @@ -901,6 +933,10 @@ export class Connection { headers.set("x-arango-trx-id", this._transactionId); } + if (allowDirtyRead) { + headers.set("x-arango-allow-dirty-read", "true"); + } + const task: Task = { retries: 0, hostUrl, diff --git a/src/database.ts b/src/database.ts index 13836284e..d53f48623 100644 --- a/src/database.ts +++ b/src/database.ts @@ -29,6 +29,7 @@ import { } from "./collection.js"; import { ArangoApiResponse, + ProcessedResponse, Config, Connection, RequestOptions, @@ -44,7 +45,6 @@ import { } from "./graph.js"; import { Job } from "./job.js"; import { DATABASE_NOT_FOUND } from "./lib/codes.js"; -import { ArangojsResponse } from "./lib/request.js"; import { Route } from "./route.js"; import { Transaction } from "./transaction.js"; import { CreateViewOptions, View, ViewDescription } from "./view.js"; @@ -2064,10 +2064,10 @@ type TrappedError = { /** * @internal */ -type TrappedRequest = { +type TrappedRequest = { error?: false; jobId: string; - onResolve: (res: ArangojsResponse) => void; + onResolve: (res: ProcessedResponse) => void; onReject: (error: any) => void; }; @@ -2082,7 +2082,9 @@ export class Database { protected _collections = new Map(); protected _graphs = new Map(); protected _views = new Map(); - protected _trapRequest?: (trapped: TrappedError | TrappedRequest) => void; + protected _trapRequest?: ( + trapped: TrappedError | TrappedRequest + ) => void; /** * Creates a new `Database` instance with its own connection pool. @@ -2193,14 +2195,14 @@ export class Database { * If `absolutePath` is set to `true`, the database path will not be * automatically prepended to the `basePath`. * - * @param ReturnType - Return type to use. Defaults to the response object type. + * @param T - Return type to use. Defaults to the response object type. * @param options - Options for this request. * @param transform - An optional function to transform the low-level * response object to a more useful return value. */ - async request( + async request( options: RequestOptions & { absolutePath?: boolean }, - transform?: (res: ArangojsResponse) => ReturnType + transform?: (res: ProcessedResponse) => ReturnType ): Promise; /** * @internal @@ -2214,17 +2216,17 @@ export class Database { * @param transform - If set to `false`, the raw response object will be * returned. */ - async request( + async request( options: RequestOptions & { absolutePath?: boolean }, transform: false - ): Promise; - async request( + ): Promise>; + async request( { absolutePath = false, basePath, ...opts }: RequestOptions & { absolutePath?: boolean }, - transform: false | ((res: ArangojsResponse) => ReturnType) = (res) => res.parsedBody + transform: false | ((res: ProcessedResponse) => ReturnType) = (res) => res.parsedBody as ReturnType ): Promise { if (!absolutePath) { basePath = `/_db/${encodeURIComponent(this._name)}${basePath || ""}`; @@ -2236,7 +2238,7 @@ export class Database { const options = { ...opts }; options.headers = new Headers(options.headers); options.headers.set("x-arango-async", "store"); - let jobRes: ArangojsResponse; + let jobRes: ProcessedResponse; try { jobRes = await this._connection.request({ basePath, ...options }); } catch (e) { @@ -6410,7 +6412,7 @@ export class Database { * ``` */ async createJob(callback: () => Promise): Promise> { - const trap = new Promise((resolveTrap) => { + const trap = new Promise>((resolveTrap) => { this._trapRequest = (trapped) => resolveTrap(trapped); }); const eventualResult = callback(); diff --git a/src/error.ts b/src/error.ts index 02e5f5397..84bcd72b7 100644 --- a/src/error.ts +++ b/src/error.ts @@ -9,7 +9,8 @@ * @packageDocumentation */ -import { ArangojsResponse } from "./lib/request.js"; +import { ProcessedResponse } from "./connection.js"; +import { ERROR_ARANGO_MAINTENANCE_MODE } from "./lib/codes.js"; const messages: { [key: number]: string } = { 0: "Network Error", @@ -58,15 +59,6 @@ const messages: { [key: number]: string } = { 599: "Network Connect Timeout Error", }; -const nativeErrorKeys = [ - "fileName", - "lineNumber", - "columnNumber", - "stack", - "description", - "number", -] as (keyof Error)[]; - /** * Indicates whether the given value represents an {@link ArangoError}. * @@ -77,129 +69,326 @@ export function isArangoError(error: any): error is ArangoError { } /** - * Indicates whether the given value represents an ArangoDB error response. + * Indicates whether the given value represents a {@link NetworkError}. * + * @param error - A value that might be a `NetworkError`. + */ +export function isNetworkError(error: any): error is NetworkError { + return error instanceof NetworkError; +} + +/** * @internal +* + * Indicates whether the given value represents an ArangoDB error response. */ -export function isArangoErrorResponse(body: any): boolean { +export function isArangoErrorResponse(body: any): body is ArangoErrorResponse { return ( body && - body.hasOwnProperty("error") && - body.hasOwnProperty("code") && - body.hasOwnProperty("errorMessage") && - body.hasOwnProperty("errorNum") + body.error === true && + typeof body.code === 'number' && + typeof body.errorMessage === 'string' && + typeof body.errorNum === 'number' ); } /** + * @internal + * * Indicates whether the given value represents a Node.js `SystemError`. */ -export function isSystemError(err: any): err is SystemError { +function isSystemError(err: any): err is SystemError { return ( + err && Object.getPrototypeOf(err) === Error.prototype && - err.hasOwnProperty("code") && - err.hasOwnProperty("errno") && - err.hasOwnProperty("syscall") + typeof err.code === 'string' && + typeof err.errno !== 'undefined' && + typeof err.syscall === 'string' + ); +} + +/** + * @internal + * + * Indicates whether the given value represents a Node.js `UndiciError`. + */ +function isUndiciError(err: any): err is UndiciError { + return ( + err && + err instanceof Error && + typeof (err as UndiciError).code === 'string' && + (err as UndiciError).code.startsWith('UND_') ); } +/** + * @internal + * + * Determines whether the given failed fetch error cause is safe to retry. + */ +function isSafeToRetryFailedFetch(cause: Error): boolean | null { + if (isSystemError(cause) && cause.syscall === 'connect' && cause.code === 'ECONNREFUSED') { + return true; + } + if (isUndiciError(cause) && cause.code === 'UND_ERR_CONNECT_TIMEOUT') { + return true; + } + return null; +} + +/** + * @internal +* + * Interface representing an ArangoDB error response. + */ +export interface ArangoErrorResponse { + error: true; + code: number; + errorMessage: string; + errorNum: number; +} + +/** + * Interface representing a Node.js `UndiciError`. + * + * @internal + */ +interface UndiciError extends Error { + code: `UND_${string}`; +} + /** * Interface representing a Node.js `SystemError`. + * + * @internal */ -export interface SystemError extends Error { +interface SystemError extends Error { code: string; errno: number | string; syscall: string; } /** - * Represents an error returned by ArangoDB. + * Represents an error from a deliberate timeout encountered while waiting + * for propagation. */ -export class ArangoError extends Error { - name = "ArangoError"; +export class PropagationTimeoutError extends Error { + name = "PropagationTimeoutError"; + + constructor(message: string | undefined, options: { cause: Error }) { + super(message ?? 'Timed out while waiting for propagation', options); + } +} + +/** + * Represents a network error or an error encountered while performing a network request. + */ +export class NetworkError extends Error { + name = "NetworkError"; + /** - * ArangoDB error code. - * - * See [ArangoDB error documentation](https://www.arangodb.com/docs/stable/appendix-error-codes.html). + * Indicates whether the request that caused this error can be safely retried. */ - errorNum: number; + isSafeToRetry: boolean | null; + /** - * HTTP status code included in the server error response object. + * Fetch request object. */ - code: number; + request: globalThis.Request; + + constructor(message: string, options: { request: globalThis.Request, cause?: Error, isSafeToRetry?: boolean | null }) { + const { request, isSafeToRetry = null, ...opts } = options; + super(message, opts); + this.request = request; + this.isSafeToRetry = isSafeToRetry; + } + + toJSON() { + return { + error: true, + errorMessage: this.message, + code: 0, + }; + } +} + +/** + * Represents an error from a deliberate timeout encountered while waiting + * for a server response. + */ +export class ResponseTimeoutError extends NetworkError { + name = "ResponseTimeoutError"; + + constructor(message: string | undefined, options: { request: globalThis.Request, cause?: Error, isSafeToRetry?: boolean | null }) { + super(message ?? 'Timed out while waiting for server response', options); + } +} + +/** + * Represents an error from a request that was aborted. + */ +export class RequestAbortedError extends NetworkError { + name = "RequestAbortedError"; + + constructor(message: string | undefined, options: { request: globalThis.Request, cause?: Error, isSafeToRetry?: boolean | null }) { + super(message ?? 'Request aborted', options); + } +} + +/** + * Represents an error from a failed fetch request. + * + * The root cause is often extremely difficult to determine. + */ +export class FetchFailedError extends NetworkError { + name = "FetchFailedError"; + + constructor(message: string | undefined, options: { request: globalThis.Request, cause: TypeError, isSafeToRetry?: boolean | null }) { + let isSafeToRetry = options.isSafeToRetry; + if (options.cause.cause instanceof Error) { + if (isSafeToRetry === undefined) { + isSafeToRetry = isSafeToRetryFailedFetch(options.cause.cause) || undefined; + } + if (message === undefined) { + message = `Fetch failed: ${options.cause.cause.message}`; + } + } + super(message ?? 'Fetch failed', { ...options, isSafeToRetry }); + } +} + +/** + * Represents a plain HTTP error response. + */ +export class HttpError extends NetworkError { + name = "HttpError"; + /** - * Server response object. + * HTTP status code of the server response. */ - response: any; + code: number; /** - * @internal + * Server response object. */ - constructor(response: ArangojsResponse) { - super(); - this.response = response; - this.message = response.parsedBody.errorMessage; - this.errorNum = response.parsedBody.errorNum; - this.code = response.parsedBody.code; - const err = new Error(this.message); - err.name = this.name; - for (const key of nativeErrorKeys) { - if (err[key]) this[key] = err[key] as string; - } - } + response: ProcessedResponse; /** * @internal - * - * Indicates that this object represents an ArangoDB error. */ - get isArangoError(): true { - return true; + constructor(response: ProcessedResponse, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { + const message = messages[response.status] ?? messages[500]; + super(message, { ...options, request: response.request }); + this.response = response; + this.code = response.status; } toJSON() { return { error: true, errorMessage: this.message, - errorNum: this.errorNum, code: this.code, }; } } /** - * Represents a plain HTTP error response. + * Represents an error returned by ArangoDB. */ -export class HttpError extends Error { - name = "HttpError"; +export class ArangoError extends Error { + name = "ArangoError"; + /** - * Server response object. + * Indicates whether the request that caused this error can be safely retried. + * + * @internal */ - response: any; + isSafeToRetry: boolean | null = null; + /** - * HTTP status code of the server response. + * ArangoDB error code. + * + * See [ArangoDB error documentation](https://www.arangodb.com/docs/stable/appendix-error-codes.html). + */ + errorNum: number; + + /** + * Error message accompanying the error code. + */ + get errorMessage(): string { + return this.message; + } + + /** + * HTTP status code included in the server error response object. */ code: number; /** * @internal + * + * Creates a new `ArangoError` from a response object. */ - constructor(response: ArangojsResponse) { - super(); - this.response = response; - this.code = response.status || 500; - this.message = messages[this.code] || messages[500]; - const err = new Error(this.message); - err.name = this.name; - for (const key of nativeErrorKeys) { - if (err[key]) this[key] = err[key] as string; + static from(response: ProcessedResponse): ArangoError { + return new ArangoError(response.parsedBody!, { + cause: new HttpError(response) + }); + } + + /** + * Creates a new `ArangoError` from an ArangoDB error response. + */ + constructor(data: ArangoErrorResponse, options: { cause?: Error, isSafeToRetry?: boolean | null }) { + const { isSafeToRetry, ...opts } = options; + super(data.errorMessage, opts); + this.errorNum = data.errorNum; + this.code = data.code; + if (isSafeToRetry !== undefined) { + this.isSafeToRetry = isSafeToRetry; + } else if (this.errorNum === ERROR_ARANGO_MAINTENANCE_MODE) { + this.isSafeToRetry = true; + } else if (this.cause instanceof NetworkError) { + this.isSafeToRetry = this.cause.isSafeToRetry; } } - toJSON() { + /** + * Server response object. + */ + get response(): ProcessedResponse | undefined { + const cause = this.cause; + if (cause instanceof HttpError) { + return cause.response; + } + return undefined; + } + + /** + * Fetch request object. + */ + get request(): globalThis.Request | undefined { + const cause = this.cause; + if (cause instanceof NetworkError) { + return cause.request; + } + return undefined; + } + + /** + * @internal + * + * Indicates that this object represents an ArangoDB error. + */ + get isArangoError(): true { + return true; + } + + toJSON(): ArangoErrorResponse { return { error: true, + errorMessage: this.errorMessage, + errorNum: this.errorNum, code: this.code, }; } -} +} \ No newline at end of file diff --git a/src/job.ts b/src/job.ts index 86e53ab67..92c2ada79 100644 --- a/src/job.ts +++ b/src/job.ts @@ -1,5 +1,5 @@ +import { ProcessedResponse } from "./connection.js"; import { Database } from "./database.js"; -import { ArangojsResponse } from "./lib/request.js"; /** * Represents an async job in a {@link database.Database}. @@ -7,7 +7,7 @@ import { ArangojsResponse } from "./lib/request.js"; export class Job { protected _id: string; protected _db: Database; - protected _transformResponse?: (res: ArangojsResponse) => Promise; + protected _transformResponse?: (res: ProcessedResponse) => Promise; protected _transformError?: (error: any) => Promise; protected _loaded: boolean = false; protected _result: T | undefined; @@ -18,7 +18,7 @@ export class Job { constructor( db: Database, id: string, - transformResponse?: (res: ArangojsResponse) => Promise, + transformResponse?: (res: ProcessedResponse) => Promise, transformError?: (error: any) => Promise ) { this._db = db; @@ -73,7 +73,7 @@ export class Job { */ async load(): Promise { if (!this.isLoaded) { - let res: ArangojsResponse; + let res: ProcessedResponse; try { res = await this._db.request( { diff --git a/src/lib/request.ts b/src/lib/request.ts index 0b421ac80..50daed6d6 100644 --- a/src/lib/request.ts +++ b/src/lib/request.ts @@ -5,36 +5,14 @@ * @internal */ -import { SystemError } from "../error.js"; +import { FetchFailedError, NetworkError, RequestAbortedError, ResponseTimeoutError } from "../error.js"; -/** - * @internal - */ -function systemErrorToJSON(this: SystemError) { - return { - error: true, - errno: this.errno, - code: this.code, - syscall: this.syscall, - }; -} - -/** - * @internal - */ -export interface ArangojsResponse extends globalThis.Response { - request: globalThis.Request; - parsedBody?: any; - arangojsHostUrl?: string; +function timer(timeout: number, cb: () => void) { + const t = setTimeout(cb, timeout); + return () => clearTimeout(t); } -/** - * @internal - */ -export interface ArangojsError extends Error { - request: globalThis.Request; - toJSON: () => Record; -} +export const REASON_TIMEOUT = 'timeout'; /** * @internal @@ -55,15 +33,15 @@ export type RequestOptions = { export type RequestConfig = { credentials: "omit" | "include" | "same-origin"; keepalive: boolean; - beforeRequest?: (req: globalThis.Request) => void; - afterResponse?: (err: ArangojsError | null, res?: ArangojsResponse) => void; + beforeRequest?: (req: globalThis.Request) => void | Promise; + afterResponse?: (err: NetworkError | null, res?: globalThis.Response & { request: globalThis.Request }) => void | Promise; }; /** * @internal */ export type RequestFunction = { - (options: RequestOptions): Promise; + (options: RequestOptions): Promise; close?: () => void; }; @@ -85,7 +63,7 @@ export function createRequest( baseUrl: URL, config: RequestConfig ): RequestFunction { - let abort: AbortController | undefined; + let abort: () => void | undefined; return Object.assign( async function request({ method, @@ -128,38 +106,54 @@ export function createRequest( keepalive: config.keepalive, }); if (config.beforeRequest) { - config.beforeRequest(request); + const p = config.beforeRequest(request); + if (p instanceof Promise) await p; } - abort = new AbortController(); - let t: ReturnType | undefined; + const abortController = new AbortController(); + const signal = abortController.signal; + abort = () => abortController.abort(); + let clearTimer: (() => void) | undefined; if (timeout) { - t = setTimeout(() => { - abort?.abort(); - }, timeout); + clearTimer = timer(timeout, () => { + clearTimer = undefined; + abortController.abort(REASON_TIMEOUT); + }); } + let response: globalThis.Response & { request: globalThis.Request }; try { - const res = await fetch(request, { signal: abort.signal }); - if (t) clearTimeout(t); - const response = res as ArangojsResponse; - response.request = request; - if (config.afterResponse) { - config.afterResponse(null, response); + response = Object.assign(await fetch(request, { signal }), { request }); + } catch (e: unknown) { + const cause = e instanceof Error ? e : new Error(String(e)); + let error: NetworkError; + if (signal.aborted) { + const reason = typeof signal.reason == 'string' ? signal.reason : undefined; + if (reason === REASON_TIMEOUT) { + error = new ResponseTimeoutError(undefined, { request }); + } else { + error = new RequestAbortedError(reason, { request, cause }); + } + } else if (cause instanceof TypeError) { + error = new FetchFailedError(undefined, { request, cause }); + } else { + error = new NetworkError(cause.message, { request, cause }); } - return response; - } catch (err) { - if (t) clearTimeout(t); - const error = err as ArangojsError; - error.request = request; - error.toJSON = systemErrorToJSON; if (config.afterResponse) { - config.afterResponse(error); + const p = config.afterResponse(error); + if (p instanceof Promise) await p; } throw error; + } finally { + clearTimer?.(); + } + if (config.afterResponse) { + const p = config.afterResponse(null, response); + if (p instanceof Promise) await p; } + return response; }, { close() { - abort?.abort(); + abort?.(); }, } ); diff --git a/src/route.ts b/src/route.ts index fef1954a7..fe52bfce7 100644 --- a/src/route.ts +++ b/src/route.ts @@ -7,9 +7,8 @@ * * @packageDocumentation */ -import { RequestOptions } from "./connection.js"; +import { ProcessedResponse, RequestOptions } from "./connection.js"; import { Database } from "./database.js"; -import { ArangojsResponse } from "./lib/request.js"; import { mergeHeaders } from "./lib/mergeHeaders.js"; /** @@ -130,7 +129,7 @@ export class Route { path: string, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; + ): Promise; /** * Performs a DELETE request against the given path relative to this route * and returns the server response. @@ -149,8 +148,8 @@ export class Route { delete( search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; - delete(...args: any[]): Promise { + ): Promise; + delete(...args: any[]): Promise { const path = typeof args[0] === "string" ? args.shift() : undefined; const [search, headers] = args; return this.request({ method: "DELETE", path, search, headers }); @@ -175,7 +174,7 @@ export class Route { path: string, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; + ): Promise; /** * Performs a GET request against the given path relative to this route * and returns the server response. @@ -194,8 +193,8 @@ export class Route { get( search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; - get(...args: any[]): Promise { + ): Promise; + get(...args: any[]): Promise { const path = typeof args[0] === "string" ? args.shift() : undefined; const [search, headers] = args; return this.request({ method: "GET", path, search, headers }); @@ -220,7 +219,7 @@ export class Route { path: string, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; + ): Promise; /** * Performs a HEAD request against the given path relative to this route * and returns the server response. @@ -239,8 +238,8 @@ export class Route { head( search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; - head(...args: any[]): Promise { + ): Promise; + head(...args: any[]): Promise { const path = typeof args[0] === "string" ? args.shift() : undefined; const [search, headers] = args; return this.request({ method: "HEAD", path, search, headers }); @@ -267,7 +266,7 @@ export class Route { body?: any, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; + ): Promise; /** * Performs a PATCH request against the given path relative to this route * and returns the server response. @@ -290,8 +289,8 @@ export class Route { body?: any, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; - patch(...args: any[]): Promise { + ): Promise; + patch(...args: any[]): Promise { const path = typeof args[0] === "string" ? args.shift() : undefined; const [body, search, headers] = args; return this.request({ method: "PATCH", path, body, search, headers }); @@ -321,7 +320,7 @@ export class Route { body?: any, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; + ): Promise; /** * Performs a POST request against the given path relative to this route * and returns the server response. @@ -347,8 +346,8 @@ export class Route { body?: any, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; - post(...args: any[]): Promise { + ): Promise; + post(...args: any[]): Promise { const path = typeof args[0] === "string" ? args.shift() : undefined; const [body, search, headers] = args; return this.request({ method: "POST", path, body, search, headers }); @@ -375,7 +374,7 @@ export class Route { body?: any, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; + ): Promise; /** * Performs a PUT request against the given path relative to this route * and returns the server response. @@ -398,8 +397,8 @@ export class Route { body?: any, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; - put(...args: any[]): Promise { + ): Promise; + put(...args: any[]): Promise { const path = typeof args[0] === "string" ? args.shift() : undefined; const [body, search, headers] = args; return this.request({ method: "PUT", path, body, search, headers }); diff --git a/src/test/27-query-management.ts b/src/test/27-query-management.ts index b1e6bda27..8cf1b3d91 100644 --- a/src/test/27-query-management.ts +++ b/src/test/27-query-management.ts @@ -2,7 +2,7 @@ import { expect } from "chai"; import { aql } from "../aql.js"; import { ArrayCursor } from "../cursor.js"; import { Database } from "../database.js"; -import { ArangoError } from "../error.js"; +import { ArangoError, ResponseTimeoutError } from "../error.js"; import { config } from "./_config.js"; // NOTE These tests will not reliably work with load balancing. @@ -67,7 +67,7 @@ describe("Query Management API", function () { } catch (err: any) { expect(err).is.instanceof(Error); expect(err).is.not.instanceof(ArangoError); - expect(err.name).to.equal("AbortError"); + expect(err).is.instanceof(ResponseTimeoutError); return; } expect.fail(); From 940b896afec51f9e6e86febdf1c68173600449b5 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Tue, 26 Nov 2024 12:40:43 +0100 Subject: [PATCH 02/21] Update MIGRATING.md --- CHANGELOG.md | 5 +++++ MIGRATING.md | 18 ++++++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 24531feb8..f493bdeca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,11 @@ This driver uses semantic versioning: ## [9.2.0] - 2024-11-27 +This is a major release and breaks backwards compatibility. + +See [the migration guide](./MIGRATING.md#v9-to-v10) for detailed instructions +for upgrading your code to arangojs v10. + ### Changed - Errors encountered before a request completes are now wrapped in a diff --git a/MIGRATING.md b/MIGRATING.md index c27238a8e..63dfcdad3 100644 --- a/MIGRATING.md +++ b/MIGRATING.md @@ -1,5 +1,23 @@ # Migrating +## v9 to v10 + +Version 10 changes the error handling to make it easier to diagnose network +issues and distinguish between different error conditions. + +If you previously inspected errors other than `ArangoError` and `HttpError` +directly, you should now expect to see `NetworkError` or a subclass thereof +instead. The originating error can be found using the `cause` property of the +`NetworkError` error: + +```js +try { + await db.collection("my-collection").get(); +} catch (err) { + if (err instanceof NetworkError) console.log(err.cause); +} +``` + ## v8 to v9 Version 9 reverts the automatic NFC normalization introduced in v7.7.0. This From a4a336f0232ed53a272699e4a4f268c4256c9f1d Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Wed, 27 Nov 2024 17:33:13 +0100 Subject: [PATCH 03/21] Big rename/restructure --- CHANGELOG.md | 222 +- MIGRATING.md | 32 + src/administration.ts | 350 +++ src/{analyzer.ts => analyzers.ts} | 816 +++---- src/aql.ts | 26 +- src/cluster.ts | 186 ++ src/{collection.ts => collections.ts} | 1541 +++++-------- src/connection.ts | 65 +- src/{cursor.ts => cursors.ts} | 170 +- src/{database.ts => databases.ts} | 2751 ++++------------------- src/documents.ts | 483 +++- src/{error.ts => errors.ts} | 14 +- src/foxx-manifest.ts | 18 +- src/{graph.ts => graphs.ts} | 426 ++-- src/hot-backups.ts | 73 + src/index.ts | 22 +- src/indexes.ts | 857 ++++--- src/{job.ts => jobs.ts} | 39 +- src/lib/request.ts | 2 +- src/logs.ts | 104 + src/queries.ts | 513 +++++ src/{route.ts => routes.ts} | 51 +- src/services.ts | 538 +++++ src/test/01-manipulating-databases.ts | 6 +- src/test/02-accessing-collections.ts | 4 +- src/test/03-accessing-graphs.ts | 4 +- src/test/04-transactions.ts | 14 +- src/test/05-aql-helpers.ts | 2 +- src/test/06-managing-functions.ts | 14 +- src/test/07-routes.ts | 6 +- src/test/08-cursors.ts | 16 +- src/test/09-collection-metadata.ts | 4 +- src/test/10-manipulating-collections.ts | 4 +- src/test/11-managing-indexes.ts | 4 +- src/test/13-bulk-imports.ts | 4 +- src/test/14-document-collections.ts | 4 +- src/test/15-edge-collections.ts | 4 +- src/test/16-graphs.ts | 8 +- src/test/17-graph-vertices.ts | 8 +- src/test/18-graph-edges.ts | 4 +- src/test/19-graph-vertex-collections.ts | 4 +- src/test/20-graph-edge-collections.ts | 4 +- src/test/22-foxx-api.ts | 14 +- src/test/23-aql-queries-stream.ts | 11 +- src/test/24-accessing-views.ts | 4 +- src/test/25-view-metadata.ts | 4 +- src/test/26-manipulating-views.ts | 4 +- src/test/27-query-management.ts | 10 +- src/test/28-accessing-analyzers.ts | 4 +- src/test/29-manipulating-analyzers.ts | 6 +- src/test/29-queue-time.ts | 4 +- src/test/30-concurrent-transactions.ts | 8 +- src/test/31-conflicts.ts | 4 +- src/{transaction.ts => transactions.ts} | 184 +- src/users.ts | 106 + src/{view.ts => views.ts} | 509 +++-- 56 files changed, 5283 insertions(+), 5006 deletions(-) create mode 100644 src/administration.ts rename src/{analyzer.ts => analyzers.ts} (60%) create mode 100644 src/cluster.ts rename src/{collection.ts => collections.ts} (71%) rename src/{cursor.ts => cursors.ts} (90%) rename src/{database.ts => databases.ts} (65%) rename src/{error.ts => errors.ts} (94%) rename src/{graph.ts => graphs.ts} (79%) create mode 100644 src/hot-backups.ts rename src/{job.ts => jobs.ts} (75%) create mode 100644 src/logs.ts create mode 100644 src/queries.ts rename src/{route.ts => routes.ts} (90%) create mode 100644 src/services.ts rename src/{transaction.ts => transactions.ts} (70%) create mode 100644 src/users.ts rename src/{view.ts => views.ts} (80%) diff --git a/CHANGELOG.md b/CHANGELOG.md index f493bdeca..91cb1a0f3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,15 +14,211 @@ This driver uses semantic versioning: - A change in the major version (e.g. 1.Y.Z -> 2.0.0) indicates _breaking_ changes that require changes in your code to upgrade. -## [9.2.0] - 2024-11-27 +## [Unreleased] This is a major release and breaks backwards compatibility. See [the migration guide](./MIGRATING.md#v9-to-v10) for detailed instructions for upgrading your code to arangojs v10. +### Removed + +- Removed unused `CreateUserOptions` type + + The actual type used by the `db.createUser` method is still `UserOptions`. + +- Removed unused `IndexDetails` type + + This type was intended to be returned by `collection.indexes` when the + `withStats` option is set to `true` but the `figures` property is already + included in the current return type. + ### Changed +- Changed `QueueTimeMetrics` type to an interface + +- Changed `CursorExtras` and `CursorStats` interfaces to types + +- Changed `GraphVertexCollection` and `GraphEdgeCollection` generic types to + take separate `EntryResultType` and `EntryInputType` type parameters + +- Changed `db.collection`, `db.createCollection` and `db.createEdgeCollection` + methods to take separate `EntryResultType` and `EntryInputType` type + parameters + + These type parameters are used to narrow the the returned collection type. + +- Renamed `db.listServiceScripts` method to `db.getServiceScripts` +- Renamed `db.listHotBackups` method to `db.getHotBackups` +- Renamed `db.getLogMessages` method to `db.listLogMessages` +- Renamed `db.listFunctions` method to `db.listUserFunctions` +- Renamed `db.createFunction` method to `db.createUserFunction` +- Renamed `db.dropFunction` method to `db.dropUserFunction` +- Changed `db.removeUser` method to return `void` + +#### Module renaming + +- Renamed most modules to plural form for consistency + + The following modules were renamed: + + - `arangojs/analyzer` -> `arangojs/analyzers` + - `arangojs/collection` -> `arangojs/collections` + - `arangojs/cursor` -> `arangojs/cursors` + - `arangojs/database` -> `arangojs/databases` + - `arangojs/error` -> `arangojs/errors` + - `arangojs/graph` -> `arangojs/graphs` + - `arangojs/job` -> `arangojs/jobs` + - `arangojs/route` -> `arangojs/routes` + - `arangojs/transaction` -> `arangojs/transactions` + - `arangojs/view` -> `arangojs/views` + +#### Moved types + +- Moved document related types from `arangojs/collection` module to + `arangojs/documents` module + + The following types were moved: `DocumentOperationFailure`, + `DocumentOperationMetadata`, `DocumentExistsOptions`, + `CollectionReadOptions`, `CollectionBatchReadOptions`, + `CollectionInsertOptions`, `CollectionReplaceOptions`, + `CollectionUpdateOptions`, `CollectionRemoveOptions`, + `CollectionImportOptions`, `CollectionEdgesOptions`, + `CollectionImportResult` and `CollectionEdgesResult` + +- Moved index related types from `arangojs/collection` module to + `arangojs/indexes` module + + The following types were moved: `IndexListOptions`. + +- Moved transaction related types from `arangojs/database` module to + `arangojs/transactions` module + + The following types were moved: `TransactionCollections`, + `TransactionOptions` and `TransactionDetails`. + +- Moved cluster related types from `arangojs/database` module to new + `arangojs/clusters` module + + The following types were moved: `ClusterImbalanceInfo`, + `ClusterRebalanceState`, `ClusterRebalanceOptions`, `ClusterRebalanceMove` + and `ClusterRebalanceResult`. + +- Moved hot backup related types from `arangojs/database` module to new + `arangojs/hot-backups` module + + The following types were moved: `HotBackupOptions`, `HotBackupResult` and + `HotBackupList`. + +- Moved query related types from `arangojs/database` module to new + `arangojs/queries` module + + The following types were moved: `QueryOptions`, `ExplainOptions`, + `ExplainPlan`, `ExplainStats`, `SingleExplainResult`, `MultiExplainResult`, + `AstNode`, `ParseResult`, `QueryOptimizerRule`, `QueryTracking`, + `QueryTrackingOptions`, `QueryInfo` and `AqlUserFunction`. + +- Moved service related types from `arangojs/database` module to new + `arangojs/services` module + + The following types were moved: `InstallServiceOptions`, + `ReplaceServiceOptions`, `UpgradeServiceOptions`, `UninstallServiceOptions`, + `ServiceSummary`, `ServiceInfo`, `ServiceConfiguration`, + `SingleServiceDependency`, `MultiServiceDependency`, `ServiceTestStats`, + `ServiceTestStreamTest`, `ServiceTestStreamReport`, `ServiceTestSuiteTest`, + `ServiceTestSuite`, `ServiceTestSuiteReport`, `ServiceTestXunitTest`, + `ServiceTestXunitReport`, `ServiceTestTapReport`, `ServiceTestDefaultTest`, + `ServiceTestDefaultReport` and `SwaggerJson`. + +- Moved user related types from `arangojs/database` module to new + `arangojs/users` module + + The following types were moved: `AccessLevel`, `ArangoUser`, `UserOptions`, + `UserAccessLevelOptions` and `CreateDatabaseUser`. + +- Moved server administration related types from `arangojs/database` module to + new `arangojs/administration` module + + The following types were moved: `QueueTimeMetrics` and `VersionInfo`. + +#### Renamed types + +- Renamed `Index` types to `IndexDescription` for consistency + + The specific index types were also renamed accordingly: + + - `Index` -> `IndexDescription` + - `GeoIndex` -> `GeoIndexDescription` + - `PersistentIndex` -> `PersistentIndexDescription` + - `PrimaryIndex` -> `PrimaryIndexDescription` + - `TtlIndex` -> `TtlIndexDescription` + - `MdiIndex` -> `MdiIndexDescription` + - `InvertedIndex` -> `InvertedIndexDescription` + - `InternalArangosearchIndex` -> `ArangosearchIndexDescription` + - `InternalIndex` -> `InternalIndexDescription` + - `HiddenIndex` -> `HiddenIndexDescription` + + Note that the "Internal" prefix was dropped from `ArangosearchIndexDescription` + to more accurately reflect the index type name. The index type still refers + to an internal index, however. + +- Renamed various types for consistency: + + - `AqlUserFunction` -> `UserFunctionDescription` + - `CollectionMetadata` -> `CollectionDescription` + - `DatabaseInfo` -> `DatabaseDescription` + - `GraphInfo` -> `GraphDescription` + - `ServiceInfo` -> `ServiceDescription` + - `QueryInfo` -> `QueryDescription` + - `QueryTracking` -> `QueryTrackingInfo` + - `TransactionDetails` -> `TransactionInfo` + - `TransactionCollections` -> `TransactionCollectionOptions` + - `CreateDatabaseUser` -> `CreateDatabaseUserOptions` + + - Index operations: + - `IndexListOptions` -> `ListIndexesOptions` + + - Collection document operations: + - `DocumentExistsOptions` -> `DocumentExistsOptions` + - `CollectionReadOptions` -> `ReadDocumentOptions` + - `CollectionBatchReadOptions` -> `BulkReadDocumentsOptions` + - `CollectionInsertOptions` -> `InsertDocumentOptions` + - `CollectionReplaceOptions` -> `ReplaceDocumentOptions` + - `CollectionUpdateOptions` -> `UpdateDocumentOptions` + - `CollectionRemoveOptions` -> `RemoveDocumentOptions` + - `CollectionImportOptions` -> `ImportDocumentsOptions` + - `CollectionEdgesOptions` -> `DocumentEdgesOptions` + - `CollectionImportResult` -> `ImportDocumentsResult` + - `CollectionEdgesResult` -> `DocumentEdgesResult` + + - Graph collection document operation: + - `GraphCollectionReadOptions` -> `ReadGraphDocumentOptions` + - `GraphCollectionInsertOptions` -> `CreateGraphDocumentOptions` + - `GraphCollectionReplaceOptions` -> `ReplaceGraphDocumentOptions` + - `GraphCollectionRemoveOptions` -> `RemoveGraphDocumentOptions` + - `ViewPatchPropertiesOptions` -> `UpdateViewPropertiesOptions` + + - View operations: + - `ArangoSearchViewPatchPropertiesOptions` -> `UpdateArangoSearchViewPropertiesOptions` + - `SearchAliasViewPatchPropertiesOptions` -> `UpdateSearchAliasViewPropertiesOptions` + - `SearchAliasViewPatchIndexOptions` -> `UpdateSearchAliasViewIndexOptions` + - `ArangoSearchViewStoredValueOptions` -> `CreateArangoSearchViewStoredValueOptions` + +- Renamed `ArrayCursor` and `BatchedArrayCursor` classes to `Cursor` and + `BatchCursor` respectively + + The previous name was misleading because it conflicted with how the ArangoDB + distinguishes between array cursors and streaming cursors in the interactive + shell. This distinction does not apply to the driver. + +- Renamed various types to reduce ambiguity: + + - `ObjectWithId` (in `indexes` module) -> `ObjectWithIndexId` + - `ObjectWithId` (in `documents` module) -> `ObjectWithDocumentId` + - `ObjectWithKey` (in `documents` module) -> `ObjectWithDocumentKey` + +#### Error handling + - Errors encountered before a request completes are now wrapped in a `NetworkError` or a subclass thereof @@ -76,13 +272,10 @@ for upgrading your code to arangojs v10. ### Added -- Added `database.availability` method - -- Added `database.engine` method (DE-931) - -- Added `database.status` method ([#811](https://github.com/arangodb/arangojs/issues/811)) +- Added `BatchCursor#itemsView` property and `BatchCursorItemsView` interface -- Added `database.supportInfo` method + This property provides a low-level interface for consuming the items of the + cursor and is used by the regular item-wise `Cursor` class internally. - Added `onError` option to `Config` (DE-955) @@ -145,6 +338,18 @@ for upgrading your code to arangojs v10. This property is always present if the error has a `response` property. In normal use this should always be the case. +## [9.2.0] - 2024-11-27 + +### Added + +- Added `database.availability` method + +- Added `database.engine` method (DE-931) + +- Added `database.status` method ([#811](https://github.com/arangodb/arangojs/issues/811)) + +- Added `database.supportInfo` method + - Added `keepNull` option to `CollectionInsertOptions` type (DE-946) This option was previously missing from the type. @@ -185,6 +390,8 @@ for upgrading your code to arangojs v10. This property is only available when fetching indexes with the `withHidden` option set to `true`. +### Added + - Added `HiddenIndex` type (DE-849) This type is used to represent an index returned by `collection.indexes` when @@ -2081,6 +2288,7 @@ For a detailed list of changes between pre-release versions of v7 see the Graph methods now only return the relevant part of the response body. +[unreleased]: https://github.com/arangodb/arangojs/compare/v9.2.0...HEAD [9.2.0]: https://github.com/arangodb/arangojs/compare/v9.1.0...v9.2.0 [9.1.0]: https://github.com/arangodb/arangojs/compare/v9.0.0...v9.1.0 [9.0.0]: https://github.com/arangodb/arangojs/compare/v8.8.1...v9.0.0 diff --git a/MIGRATING.md b/MIGRATING.md index 63dfcdad3..f9df2657b 100644 --- a/MIGRATING.md +++ b/MIGRATING.md @@ -18,6 +18,38 @@ try { } ``` +### Module name changes + +Module names referring to resource types such as analyzers, collections, +databases, or views have been changed to use the plural form: + +```diff +-import { Database } from "arangojs/database"; ++import { Database } from "arangojs/databases"; +``` + +Note that the `aql` module and `foxx-manifest` modules have not been renamed +as these are utility modules. + +### Type imports + +Types that were previously exported by the `database` module but are not +related to managing databases have been moved to separate modules: + +```diff +-import type { +- ParseResult, +- TransactionOptions, +- VersionInfo +-} from "arangojs/database"; ++import type { VersionInfo } from "arangojs/administration"; ++import type { TransactionOptions } from "arangojs/transactions"; ++import type { ParseResult } from "arangojs/queries"; +``` + +Additionally, some types were renamed. For a full list of changes, see the +[changelog](./CHANGELOG.md). + ## v8 to v9 Version 9 reverts the automatic NFC normalization introduced in v7.7.0. This diff --git a/src/administration.ts b/src/administration.ts new file mode 100644 index 000000000..4a764c0ee --- /dev/null +++ b/src/administration.ts @@ -0,0 +1,350 @@ +/** + * ```ts + * import type { VersionInfo } from "arangojs/administration"; + * ``` + * + * The "administration" module provides types for database administration. + * + * @packageDocumentation + */ + +/** + * Result of retrieving database version information. + */ +export type VersionInfo = { + /** + * Value identifying the server type, i.e. `"arango"`. + */ + server: string; + /** + * ArangoDB license type or "edition". + */ + license: "community" | "enterprise"; + /** + * ArangoDB server version. + */ + version: string; + /** + * Additional information about the ArangoDB server. + */ + details?: { [key: string]: string }; +}; + +/** + * Information about the storage engine. + */ +export type EngineInfo = { + /** + * Endianness of the storage engine. + */ + endianness?: "little" | "big"; + /** + * Name of the storage engine. + */ + name: string; + /** + * Features supported by the storage engine. + */ + supports?: { + /** + * Index types supported by the storage engine. + */ + indexes?: string[]; + /** + * Aliases supported by the storage engine. + */ + aliases?: { + /** + * Index type aliases supported by the storage engine. + */ + indexes?: Record; + } + }; +}; + +/** + * Information about the server status. + */ +export type ServerStatusInformation = { + /** + * (Cluster Coordinators and DB-Servers only.) The address of the server. + */ + address?: string; + /** + * (Cluster Coordinators and DB-Servers only.) Information about the Agency. + */ + agency?: { + /** + * Information about the communication with the Agency. + */ + agencyComm: { + /** + * A list of possible Agency endpoints. + */ + endpoints: string[]; + }; + }; + /** + * (Cluster Agents only.) Information about the Agents. + */ + agent?: { + /** + * The endpoint of the queried Agent. + */ + endpoint: string; + /** + * Server ID of the queried Agent. + */ + id: string; + /** + * Server ID of the leading Agent. + */ + leaderId: string; + /** + * Whether the queried Agent is the leader. + */ + leading: boolean; + /** + * The current term number. + */ + term: number; + }; + /** + * (Cluster Coordinators only.) Information about the Coordinators. + */ + coordinator?: { + /** + * The server ID of the Coordinator that is the Foxx master. + */ + foxxmaster: string[]; + /** + * Whether the queried Coordinator is the Foxx master. + */ + isFoxxmaster: boolean[]; + }; + /** + * Whether the Foxx API is enabled. + */ + foxxApi: boolean; + /** + * A host identifier defined by the HOST or NODE_NAME environment variable, + * or a fallback value using a machine identifier or the cluster/Agency address. + */ + host: string; + /** + * A hostname defined by the HOSTNAME environment variable. + */ + hostname?: string; + /** + * ArangoDB Edition. + */ + license: "community" | "enterprise"; + /** + * Server operation mode. + * + * @deprecated use `operationMode` instead + */ + mode: "server" | "console"; + /** + * Server operation mode. + */ + operationMode: "server" | "console"; + /** + * The process ID of arangod. + */ + pid: number; + /** + * Server type. + */ + server: "arango"; + /** + * Information about the server status. + */ + serverInfo: { + /** + * Whether the maintenance mode is enabled. + */ + maintenance: boolean; + /** + * (Cluster only.) The persisted ID. + */ + persistedId?: string; + /** + * Startup and recovery information. + */ + progress: { + /** + * Internal name of the feature that is currently being prepared, started, stopped or unprepared. + */ + feature: string; + /** + * Name of the lifecycle phase the instance is currently in. + */ + phase: string; + /** + * Current recovery sequence number value. + */ + recoveryTick: number; + }; + /** + * Whether writes are disabled. + */ + readOnly: boolean; + /** + * (Cluster only.) The reboot ID. Changes on every restart. + */ + rebootId?: number; + /** + * Either "SINGLE", "COORDINATOR", "PRIMARY" (DB-Server), or "AGENT" + */ + role: "SINGLE" | "COORDINATOR" | "PRIMARY" | "AGENT"; + /** + * (Cluster Coordinators and DB-Servers only.) The server ID. + */ + serverId?: string; + /** + * (Cluster Coordinators and DB-Servers only.) Either "STARTUP", "SERVING", + * or "SHUTDOWN". + */ + state?: "STARTUP" | "SERVING" | "SHUTDOWN"; + /** + * The server version string. + */ + version: string; + /** + * Whether writes are enabled. + * + * @deprecated Use `readOnly` instead. + */ + writeOpsEnabled: boolean; + }; +}; + +/** + * Server availability. + * + * - `"default"`: The server is operational. + * + * - `"readonly"`: The server is in read-only mode. + * + * - `false`: The server is not available. + */ +export type ServerAvailability = "default" | "readonly" | false; + +/** + * Single server deployment information for support purposes. + */ +export type SingleServerSupportInfo = { + /** + * ISO 8601 datetime string of when the information was requested. + */ + date: string; + /** + * Information about the deployment. + */ + deployment: { + /** + * Deployment mode: + * + * - `"single"`: A single server deployment. + * + * - `"cluster"`: A cluster deployment. + */ + type: "single"; + }; +}; + +/** + * Cluster deployment information for support purposes. + */ +export type ClusterSupportInfo = { + /** + * ISO 8601 datetime string of when the information was requested. + */ + date: string; + /** + * Information about the deployment. + */ + deployment: { + /** + * Deployment mode: + * + * - `"single"`: A single server deployment. + * + * - `"cluster"`: A cluster deployment. + */ + type: "cluster"; + /** + * Information about the servers in the cluster. + */ + servers: Record>; + /** + * Number of agents in the cluster. + */ + agents: number; + /** + * Number of coordinators in the cluster. + */ + coordinators: number; + /** + * Number of DB-Servers in the cluster. + */ + dbServers: number; + /** + * Information about the shards in the cluster. + */ + shards: { + /** + * Number of collections in the cluster. + */ + collections: number; + /** + * Number of shards in the cluster. + */ + shards: number; + /** + * Number of leaders in the cluster. + */ + leaders: number; + /** + * Number of real leaders in the cluster. + */ + realLeaders: number; + /** + * Number of followers in the cluster. + */ + followers: number; + /** + * Number of servers in the cluster. + */ + servers: number; + } + }; + /** + * (Cluster only.) Information about the ArangoDB instance as well as the + * host machine. + */ + host: Record; +}; + +/** + * An object providing methods for accessing queue time metrics of the most + * recently received server responses if the server supports this feature. + */ +export interface QueueTimeMetrics { + /** + * Returns the queue time of the most recently received response in seconds. + */ + getLatest(): number | undefined; + /** + * Returns a list of the most recently received queue time values as tuples + * of the timestamp of the response being processed in milliseconds and the + * queue time in seconds. + */ + getValues(): [number, number][]; + /** + * Returns the average queue time of the most recently received responses + * in seconds. + */ + getAvg(): number; +}; diff --git a/src/analyzer.ts b/src/analyzers.ts similarity index 60% rename from src/analyzer.ts rename to src/analyzers.ts index b005a4cbb..727b42456 100644 --- a/src/analyzer.ts +++ b/src/analyzers.ts @@ -1,32 +1,56 @@ /** * ```ts - * import type { Analyzer } from "arangojs/analyzer.js"; + * import type { Analyzer } from "arangojs/analyzers"; * ``` * - * The "analyzer" module provides analyzer related types and interfaces + * The "analyzers" module provides Analyzer related types and interfaces * for TypeScript. * * @packageDocumentation */ -import { ArangoApiResponse } from "./connection.js"; -import { Database } from "./database.js"; -import { isArangoError } from "./error.js"; +import * as databases from "./databases.js"; +import * as connection from "./connection.js"; +import * as errors from "./errors.js"; import { ANALYZER_NOT_FOUND } from "./lib/codes.js"; +//#region Shared types /** - * Indicates whether the given value represents an {@link Analyzer}. - * - * @param analyzer - A value that might be an Analyzer. + * Name of a feature enabled for an Analyzer. */ -export function isArangoAnalyzer(analyzer: any): analyzer is Analyzer { - return Boolean(analyzer && analyzer.isArangoAnalyzer); -} +export type AnalyzerFeature = "frequency" | "norm" | "position" | "offset"; /** - * Name of a feature enabled for an Analyzer. + * Text case conversion type. */ -export type AnalyzerFeature = "frequency" | "norm" | "position" | "offset"; +export type CaseConversion = "lower" | "upper" | "none"; + +/** + * Token type for a Segmentation Analyzer. + */ +export type SegmentationTokenType = "all" | "alpha" | "graphic"; + +/** + * Token data type for an AQL Analyzer. + */ +export type AqlReturnTokenType = "string" | "number" | "bool"; + +/** + * GeoJSON type. + */ +export type GeoType = "shape" | "centroid" | "point"; + +/** + * Storage format of a Geo S2 Analyzer. + */ +export type GeoS2Format = "latLngDouble" | "latLngInt" | "s2Point"; + +/** + * Type of an Analyzer. + */ +export type AnalyzerType = AnalyzerDescription["type"]; +//#endregion +//#region CreateAnalyzerOptions /** * Analyzer type and its type-specific properties. */ @@ -51,106 +75,96 @@ export type CreateAnalyzerOptions = | CreateGeoPointAnalyzerOptions | CreateGeoS2AnalyzerOptions; +type CreateAnalyzerOptionsType< + Type extends AnalyzerType, + Properties = void +> = Properties extends void + ? { + /** + * Type of the Analyzer. + */ + type: Type; + /** + * Features to enable for this Analyzer. + */ + features?: AnalyzerFeature[]; + /** + * This Analyzer does not take additional properties. + */ + properties?: Record; + } + : { + /** + * Type of the Analyzer. + */ + type: Type; + /** + * Features to enable for this Analyzer. + */ + features?: AnalyzerFeature[]; + /** + * Additional properties for the Analyzer. + */ + properties: Properties; + }; + /** * Options for creating an Identity Analyzer. */ -export type CreateIdentityAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "identity"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - * - * The `identity` Analyzer does not take additional properties. - */ - properties?: Record; -}; +export type CreateIdentityAnalyzerOptions = CreateAnalyzerOptionsType< + "identity" +>; /** * Options for creating a Delimiter Analyzer. */ -export type CreateDelimiterAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "delimiter"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - * - * The value will be used as delimiter to split text into tokens as specified - * in RFC 4180, without starting new records on newlines. - */ - properties: string | { delimiter: string }; -}; +export type CreateDelimiterAnalyzerOptions = CreateAnalyzerOptionsType< + "delimiter", + | string + | { + /** + * This value will be used as delimiter to split text into tokens as + * specified in RFC 4180, without starting new records on newlines. + */ + delimiter: string; + } +>; /** * Options for creating a Multi-Delimiter Analyzer. */ -export type CreateMultiDelimiterAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "multi_delimiter"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - * - * The value will be used as delimiter to split text into tokens as specified - * in RFC 4180, without starting new records on newlines. - */ - properties: { delimiters: string[] }; -}; +export type CreateMultiDelimiterAnalyzerOptions = CreateAnalyzerOptionsType< + "multi_delimiter", + { + /** + * This value will be used as delimiter to split text into tokens as + * specified in RFC 4180, without starting new records on newlines. + */ + delimiters: string[]; + } +>; /** * Options for creating a Stem Analyzer. */ -export type CreateStemAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "stem"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - * - * The value defines the text locale. - * - * Format: `language[_COUNTRY][.encoding][@variant]` - */ - properties: { locale: string }; -}; +export type CreateStemAnalyzerOptions = CreateAnalyzerOptionsType< + "stem", + { + /** + * Text locale. + * + * Format: `language[_COUNTRY][.encoding][@variant]` + */ + locale: string; + } +>; /** * Options for creating a Norm Analyzer. */ -export type CreateNormAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "norm"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateNormAnalyzerOptions = CreateAnalyzerOptionsType< + "norm", + { /** * Text locale. * @@ -162,32 +176,22 @@ export type CreateNormAnalyzerOptions = { * * Default: `"lower"` */ - case?: "lower" | "none" | "upper"; + case?: CaseConversion; /** * Preserve accents in returned words. * * Default: `false` */ accent?: boolean; - }; -}; + } +>; /** * Options for creating an Ngram Analyzer. */ -export type CreateNgramAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "ngram"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateNgramAnalyzerOptions = CreateAnalyzerOptionsType< + "ngram", + { /** * Maximum n-gram length. */ @@ -200,25 +204,15 @@ export type CreateNgramAnalyzerOptions = { * Output the original value as well. */ preserveOriginal: boolean; - }; -}; + } +>; /** * Options for creating a Text Analyzer. */ -export type CreateTextAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "text"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateTextAnalyzerOptions = CreateAnalyzerOptionsType< + "text", + { /** * Text locale. * @@ -230,7 +224,7 @@ export type CreateTextAnalyzerOptions = { * * Default: `"lower"` */ - case?: "lower" | "none" | "upper"; + case?: CaseConversion; /** * Words to omit from result. * @@ -260,57 +254,41 @@ export type CreateTextAnalyzerOptions = { /** * If present, then edge n-grams are generated for each token (word). */ - edgeNgram?: { min?: number; max?: number; preserveOriginal?: boolean }; - }; -}; + edgeNgram?: { + min?: number; + max?: number; + preserveOriginal?: boolean; + }; + } +>; /** * Options for creating a Segmentation Analyzer */ -export type CreateSegmentationAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "segmentation"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateSegmentationAnalyzerOptions = CreateAnalyzerOptionsType< + "segmentation", + { /** * Which tokens should be returned. * * Default: `"alpha"` */ - break?: "all" | "alpha" | "graphic"; + break?: SegmentationTokenType; /** * What case all returned tokens should be converted to if applicable. * * Default: `"none"` */ - case?: "lower" | "upper" | "none"; - }; -}; + case?: CaseConversion; + } +>; /** * Options for creating an AQL Analyzer */ -export type CreateAqlAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "aql"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateAqlAnalyzerOptions = CreateAnalyzerOptionsType< + "aql", + { /** * AQL query to be executed. */ @@ -345,49 +323,29 @@ export type CreateAqlAnalyzerOptions = { * * Default: `"string"` */ - returnType?: "string" | "number" | "bool"; - }; -}; + returnType?: AqlReturnTokenType; + } +>; /** * Options for creating a Pipeline Analyzer */ -export type CreatePipelineAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "pipeline"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreatePipelineAnalyzerOptions = CreateAnalyzerOptionsType< + "pipeline", + { /** * Definitions for Analyzers to chain in this Pipeline Analyzer. */ pipeline: Omit[]; - }; -}; + } +>; /** * Options for creating a Stopwords Analyzer */ -export type CreateStopwordsAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "stopwords"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateStopwordsAnalyzerOptions = CreateAnalyzerOptionsType< + "stopwords", + { /** * Array of strings that describe the tokens to be discarded. */ @@ -398,50 +356,30 @@ export type CreateStopwordsAnalyzerOptions = { * Default: `false` */ hex?: boolean; - }; -}; + } +>; /** * Options for creating a Collation Analyzer */ -export type CreateCollationAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "collation"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateCollationAnalyzerOptions = CreateAnalyzerOptionsType< + "collation", + { /** * Text locale. * * Format: `language[_COUNTRY][.encoding][@variant]` */ locale: string; - }; -}; + } +>; /** * (Enterprise Edition only.) Options for creating a MinHash Analyzer */ -export type CreateMinHashAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "minhash"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateMinHashAnalyzerOptions = CreateAnalyzerOptionsType< + "minhash", + { /** * An Analyzer definition-like object with `type` and `properties` attributes. */ @@ -450,25 +388,15 @@ export type CreateMinHashAnalyzerOptions = { * Size of the MinHash signature. */ numHashes: number; - }; -}; + } +>; /** * (Enterprise Edition only.) Options for creating a Classification Analyzer */ -export type CreateClassificationAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "classification"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateClassificationAnalyzerOptions = CreateAnalyzerOptionsType< + "classification", + { /** * On-disk path to the trained fastText supervised model. */ @@ -485,25 +413,15 @@ export type CreateClassificationAnalyzerOptions = { * Default: `0.99` */ threshold?: number; - }; -}; + } +>; /** * (Enterprise Edition only.) Options for creating a NearestNeighbors Analyzer. */ -export type CreateNearestNeighborsAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "nearest_neighbors"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateNearestNeighborsAnalyzerOptions = CreateAnalyzerOptionsType< + "nearest_neighbors", + { /** * On-disk path to the trained fastText supervised model. */ @@ -514,25 +432,15 @@ export type CreateNearestNeighborsAnalyzerOptions = { * Default: `1` */ top_k?: number; - }; -}; + } +>; /** * Options for creating a Wildcard Analyzer. */ -export type CreateWildcardAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "wildcard"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateWildcardAnalyzerOptions = CreateAnalyzerOptionsType< + "wildcard", + { /** * N-gram length. Must be a positive integer greater than or equal to 2. */ @@ -541,25 +449,15 @@ export type CreateWildcardAnalyzerOptions = { * An Analyzer definition-like object with `type` and `properties` attributes. */ analyzer?: Omit; - }; -}; + } +>; /** * Options for creating a GeoJSON Analyzer */ -export type CreateGeoJsonAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "geojson"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateGeoJsonAnalyzerOptions = CreateAnalyzerOptionsType< + "geojson", + { /** * If set to `"centroid"`, only the centroid of the input geometry will be * computed and indexed. @@ -569,32 +467,26 @@ export type CreateGeoJsonAnalyzerOptions = { * * Default: `"shape"` */ - type?: "shape" | "centroid" | "point"; + type?: GeoType; /** * Options for fine-tuning geo queries. * * Default: `{ maxCells: 20, minLevel: 4, maxLevel: 23 }` */ - options?: { maxCells?: number; minLevel?: number; maxLevel?: number }; - }; -}; + options?: { + maxCells?: number; + minLevel?: number; + maxLevel?: number; + }; + } +>; /** * Options for creating a GeoPoint Analyzer */ -export type CreateGeoPointAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "geopoint"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateGeoPointAnalyzerOptions = CreateAnalyzerOptionsType< + "geopoint", + { /** * Attribute paths of the latitude value relative to the field for which the * Analyzer is defined in the View. @@ -610,26 +502,20 @@ export type CreateGeoPointAnalyzerOptions = { * * Default: `{ maxCells: 20, minLevel: 4, maxLevel: 23 }` */ - options?: { minCells?: number; minLevel?: number; maxLevel?: number }; - }; -}; + options?: { + minCells?: number; + minLevel?: number; + maxLevel?: number; + }; + } +>; /** * (Enterprise Edition only.) Options for creating a Geo S2 Analyzer */ -export type CreateGeoS2AnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "geo_s2"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateGeoS2AnalyzerOptions = CreateAnalyzerOptionsType< + "geo_s2", + { /** * If set to `"centroid"`, only the centroid of the input geometry will be * computed and indexed. @@ -639,13 +525,17 @@ export type CreateGeoS2AnalyzerOptions = { * * Default: `"shape"` */ - type?: "shape" | "centroid" | "point"; + type?: GeoType; /** * Options for fine-tuning geo queries. * * Default: `{ maxCells: 20, minLevel: 4, maxLevel: 23 }` */ - options?: { maxCells?: number; minLevel?: number; maxLevel?: number }; + options?: { + maxCells?: number; + minLevel?: number; + maxLevel?: number; + }; /** * If set to `"latLngDouble"`, each latitude and longitude value is stored * as an 8-byte floating-point value (16 bytes per coordinate pair). @@ -658,24 +548,12 @@ export type CreateGeoS2AnalyzerOptions = { * * Default: `"latLngDouble"` */ - format?: "latLngDouble" | "latLngInt" | "s2Point"; - }; -}; - -/** - * Shared attributes of all Analyzer descriptions. - */ -export type GenericAnalyzerDescription = { - /** - * A unique name for this Analyzer. - */ - name: string; - /** - * Features enabled for this Analyzer. - */ - features: AnalyzerFeature[]; -}; + format?: GeoS2Format; + } +>; +//#endregion +//#region AnalyzerDescription /** * An object describing an Analyzer. */ @@ -701,225 +579,274 @@ export type AnalyzerDescription = | GeoS2AnalyzerDescription; /** - * An object describing an Identity Analyzer. + * Shared attributes of all Analyzer descriptions. */ -export type IdentityAnalyzerDescription = GenericAnalyzerDescription & { - type: "identity"; - properties: Record; +type AnalyzerDescriptionType< + Type extends string, + Properties = Record +> = { + /** + * A unique name for this Analyzer. + */ + name: string; + /** + * Type of the Analyzer. + */ + type: Type; + /** + * Features to enable for this Analyzer. + */ + features?: AnalyzerFeature[]; + /** + * Additional properties for the Analyzer. + */ + properties: Properties; }; +/** + * An object describing an Identity Analyzer. + */ +export type IdentityAnalyzerDescription = AnalyzerDescriptionType<"identity">; + /** * An object describing a Delimiter Analyzer. */ -export type DelimiterAnalyzerDescription = GenericAnalyzerDescription & { - type: "delimiter"; - properties: { delimiter: string }; -}; +export type DelimiterAnalyzerDescription = AnalyzerDescriptionType< + "delimiter", + { delimiter: string } +>; /** * An object describing a Multi Delimiter Analyzer. */ -export type MultiDelimiterAnalyzerDescription = GenericAnalyzerDescription & { - type: "multi_delimiter"; - properties: { delimiters: string[] }; -}; +export type MultiDelimiterAnalyzerDescription = AnalyzerDescriptionType< + "multi_delimiter", + { delimiters: string[] } +>; /** * An object describing a Stem Analyzer. */ -export type StemAnalyzerDescription = GenericAnalyzerDescription & { - type: "stem"; - properties: { locale: string }; -}; +export type StemAnalyzerDescription = AnalyzerDescriptionType< + "stem", + { locale: string } +>; /** * An object describing a Norm Analyzer. */ -export type NormAnalyzerDescription = GenericAnalyzerDescription & { - type: "norm"; - properties: { +export type NormAnalyzerDescription = AnalyzerDescriptionType< + "norm", + { locale: string; - case: "lower" | "none" | "upper"; + case: CaseConversion; accent: boolean; - }; -}; + } +>; /** * An object describing an Ngram Analyzer. */ -export type NgramAnalyzerDescription = GenericAnalyzerDescription & { - type: "ngram"; - properties: { - max: number; +export type NgramAnalyzerDescription = AnalyzerDescriptionType< + "ngram", + { min: number; + max: number; preserveOriginal: boolean; - }; -}; + } +>; /** * An object describing a Text Analyzer. */ -export type TextAnalyzerDescription = GenericAnalyzerDescription & { - type: "text"; - properties: { +export type TextAnalyzerDescription = AnalyzerDescriptionType< + "text", + { locale: string; - case: "lower" | "none" | "upper"; + case: CaseConversion; stopwords: string[]; stopwordsPath: string; accent: boolean; stemming: boolean; - edgeNgram: { min: number; max: number; preserveOriginal: boolean }; - }; -}; + edgeNgram: { + min: number; + max: number; + preserveOriginal: boolean; + }; + } +>; /** * An object describing a Segmentation Analyzer */ -export type SegmentationAnalyzerDescription = GenericAnalyzerDescription & { - type: "segmentation"; - properties: { - break: "all" | "alpha" | "graphic"; - case: "lower" | "upper" | "none"; - }; -}; +export type SegmentationAnalyzerDescription = AnalyzerDescriptionType< + "segmentation", + { + break: SegmentationTokenType; + case: CaseConversion; + } +>; /** * An object describing an AQL Analyzer */ -export type AqlAnalyzerDescription = GenericAnalyzerDescription & { - type: "aql"; - properties: { +export type AqlAnalyzerDescription = AnalyzerDescriptionType< + "aql", + { queryString: string; collapsePositions: boolean; keepNull: boolean; batchSize: number; memoryLimit: number; - returnType: "string" | "number" | "bool"; - }; -}; + returnType: AqlReturnTokenType; + } +>; /** * An object describing a Pipeline Analyzer */ -export type PipelineAnalyzerDescription = GenericAnalyzerDescription & { - type: "pipeline"; - properties: { +export type PipelineAnalyzerDescription = AnalyzerDescriptionType< + "pipeline", + { pipeline: Omit[]; - }; -}; + } +>; /** * An object describing a Stopwords Analyzer */ -export type StopwordsAnalyzerDescription = GenericAnalyzerDescription & { - type: "stopwords"; - properties: { +export type StopwordsAnalyzerDescription = AnalyzerDescriptionType< + "stopwords", + { stopwords: string[]; hex: boolean; - }; -}; + } +>; /** * An object describing a Collation Analyzer */ -export type CollationAnalyzerDescription = GenericAnalyzerDescription & { - type: "collation"; - properties: { +export type CollationAnalyzerDescription = AnalyzerDescriptionType< + "collation", + { locale: string; - }; -}; + } +>; /** * (Enterprise Edition only.) An object describing a MinHash Analyzer */ -export type MinHashAnalyzerDescription = GenericAnalyzerDescription & { - type: "minhash"; - properties: { +export type MinHashAnalyzerDescription = AnalyzerDescriptionType< + "minhash", + { analyzer: Omit; numHashes: number; - }; -}; + } +>; /** * (Enterprise Edition only.) An object describing a Classification Analyzer */ -export type ClassificationAnalyzerDescription = GenericAnalyzerDescription & { - type: "classification"; - properties: { +export type ClassificationAnalyzerDescription = AnalyzerDescriptionType< + "classification", + { model_location: string; top_k: number; threshold: number; - }; -}; + } +>; /** * (Enterprise Edition only.) An object describing a NearestNeighbors Analyzer */ -export type NearestNeighborsAnalyzerDescription = GenericAnalyzerDescription & { - type: "nearest_neighbors"; - properties: { +export type NearestNeighborsAnalyzerDescription = AnalyzerDescriptionType< + "nearest_neighbors", + { model_location: string; top_k: number; - }; -}; + } +>; /** * An object describing a Wildcard Analyzer */ -export type WildcardAnalyzerDescription = GenericAnalyzerDescription & { - type: "wildcard"; - properties: { +export type WildcardAnalyzerDescription = AnalyzerDescriptionType< + "wildcard", + { ngramSize: number; analyzer?: Omit; - }; -}; + } +>; /** * An object describing a GeoJSON Analyzer */ -export type GeoJsonAnalyzerDescription = GenericAnalyzerDescription & { - type: "geojson"; - properties: { - type: "shape" | "centroid" | "point"; - description: { maxCells: number; minLevel: number; maxLevel: number }; - }; -}; +export type GeoJsonAnalyzerDescription = AnalyzerDescriptionType< + "geojson", + { + type: GeoType; + description: { + maxCells: number; + minLevel: number; + maxLevel: number; + }; + } +>; /** * An object describing a GeoPoint Analyzer */ -export type GeoPointAnalyzerDescription = GenericAnalyzerDescription & { - type: "geopoint"; - properties: { +export type GeoPointAnalyzerDescription = AnalyzerDescriptionType< + "geopoint", + { latitude: string[]; longitude: string[]; - description: { minCells: number; minLevel: number; maxLevel: number }; - }; -}; + description: { + minCells: number; + minLevel: number; + maxLevel: number; + }; + } +>; /** * (Enterprise Edition only.) An object describing a GeoS2 Analyzer */ -export type GeoS2AnalyzerDescription = GenericAnalyzerDescription & { - type: "geo_s2"; - properties: { - type: "shape" | "centroid" | "point"; - description: { maxCells: number; minLevel: number; maxLevel: number }; - format: "latLngDouble" | "latLngInt" | "s2Point"; - }; -}; +export type GeoS2AnalyzerDescription = AnalyzerDescriptionType< + "geo_s2", + { + type: GeoType; + description: { + maxCells: number; + minLevel: number; + maxLevel: number; + }; + format: GeoS2Format; + } +>; +//#endregion + +//#region Analyzer class +/** + * Indicates whether the given value represents an {@link Analyzer}. + * + * @param analyzer - A value that might be an Analyzer. + */ +export function isArangoAnalyzer(analyzer: any): analyzer is Analyzer { + return Boolean(analyzer && analyzer.isArangoAnalyzer); +} /** - * Represents an Analyzer in a {@link database.Database}. + * Represents an Analyzer in a {@link databases.Database}. */ export class Analyzer { protected _name: string; - protected _db: Database; + protected _db: databases.Database; /** * @internal */ - constructor(db: Database, name: string) { + constructor(db: databases.Database, name: string) { this._db = db; this._name = name; } @@ -943,7 +870,7 @@ export class Analyzer { /** * Name of this Analyzer. * - * See also {@link database.Database}. + * See also {@link databases.Database}. */ get name() { return this._name; @@ -965,7 +892,7 @@ export class Analyzer { await this.get(); return true; } catch (err: any) { - if (isArangoError(err) && err.errorNum === ANALYZER_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === ANALYZER_NOT_FOUND) { return false; } throw err; @@ -983,7 +910,7 @@ export class Analyzer { * // definition contains the Analyzer definition * ``` */ - get(): Promise> { + get(): Promise> { return this._db.request({ path: `/_api/analyzer/${encodeURIComponent(this._name)}`, }); @@ -992,7 +919,7 @@ export class Analyzer { /** * Creates a new Analyzer with the given `options` and the instance's name. * - * See also {@link database.Database#createAnalyzer}. + * See also {@link databases.Database#createAnalyzer}. * * @param options - Options for creating the Analyzer. * @@ -1064,7 +991,7 @@ export class Analyzer { * // the Analyzer "some-analyzer" no longer exists * ``` */ - drop(force: boolean = false): Promise> { + drop(force: boolean = false): Promise> { return this._db.request({ method: "DELETE", path: `/_api/analyzer/${encodeURIComponent(this._name)}`, @@ -1072,3 +999,4 @@ export class Analyzer { }); } } +//#endregion \ No newline at end of file diff --git a/src/aql.ts b/src/aql.ts index 36286a2b0..a45ae84cb 100644 --- a/src/aql.ts +++ b/src/aql.ts @@ -1,6 +1,6 @@ /** * ```js - * import { aql } from "arangojs/aql.js"; + * import { aql } from "arangojs/aql"; * ``` * * The "aql" module provides the {@link aql} template string handler and @@ -10,10 +10,10 @@ * * @packageDocumentation */ -import { isArangoAnalyzer } from "./analyzer.js"; -import { ArangoCollection, isArangoCollection } from "./collection.js"; -import { Graph, isArangoGraph } from "./graph.js"; -import { isArangoView, View } from "./view.js"; +import * as analyzers from "./analyzers.js"; +import * as collections from "./collections.js"; +import * as graphs from "./graphs.js"; +import * as views from "./views.js"; declare const type: unique symbol; @@ -72,9 +72,9 @@ export interface AqlLiteral { * helper function. */ export type AqlValue = - | ArangoCollection - | View - | Graph + | collections.ArangoCollection + | views.View + | graphs.Graph | GeneratedAqlQuery | AqlLiteral | string @@ -124,7 +124,7 @@ export function isAqlLiteral(literal: any): literal is AqlLiteral { * Tagged template strings will return an {@link AqlQuery} object with * `query` and `bindVars` attributes reflecting any interpolated values. * - * Any {@link collection.ArangoCollection} instance used in a query string will + * Any {@link collections.ArangoCollection} instance used in a query string will * be recognized as a collection reference and generate an AQL collection bind * parameter instead of a regular AQL value bind parameter. * @@ -246,10 +246,10 @@ export function aql( const isKnown = index !== -1; let name = `value${isKnown ? index : bindValues.length}`; if ( - isArangoCollection(rawValue) || - isArangoGraph(rawValue) || - isArangoView(rawValue) || - isArangoAnalyzer(rawValue) + collections.isArangoCollection(rawValue) || + graphs.isArangoGraph(rawValue) || + views.isArangoView(rawValue) || + analyzers.isArangoAnalyzer(rawValue) ) { name = `@${name}`; value = rawValue.name; diff --git a/src/cluster.ts b/src/cluster.ts new file mode 100644 index 000000000..4aa0169df --- /dev/null +++ b/src/cluster.ts @@ -0,0 +1,186 @@ +/** + * ```ts + * import type { ClusterImbalanceInfo } from "arangojs/cluster"; + * ``` + * + * The "cluster" module provides types for cluster management. + * + * @packageDocumentation + */ + +//#region Cluster operation options +/** + * Options for rebalancing the cluster. + */ +export type ClusterRebalanceOptions = { + /** + * Maximum number of moves to be computed. + * + * Default: `1000` + */ + maximumNumberOfMoves?: number; + /** + * Allow leader changes without moving data. + * + * Default: `true` + */ + leaderChanges?: boolean; + /** + * Allow moving leaders. + * + * Default: `false` + */ + moveLeaders?: boolean; + /** + * Allow moving followers. + * + * Default: `false` + */ + moveFollowers?: boolean; + /** + * Ignore system collections in the rebalance plan. + * + * Default: `false` + */ + excludeSystemCollections?: boolean; + /** + * Default: `256**6` + */ + piFactor?: number; + /** + * A list of database names to exclude from the analysis. + * + * Default: `[]` + */ + databasesExcluded?: string[]; +}; +//#endregion + +//#region Cluster operation results +/** + * The result of a cluster rebalance. + */ +export type ClusterRebalanceResult = { + /** + * Imbalance before the suggested move shard operations are applied. + */ + imbalanceBefore: ClusterImbalanceInfo; + /** + * Expected imbalance after the suggested move shard operations are applied. + */ + imbalanceAfter: ClusterImbalanceInfo; + /** + * Suggested move shard operations. + */ + moves: ClusterRebalanceMove[]; +}; + +/** + * Information about the current state of the cluster imbalance. + */ +export type ClusterRebalanceState = ClusterImbalanceInfo & { + /** + * The number of pending move shard operations. + */ + pendingMoveShards: number; + /** + * The number of planned move shard operations. + */ + todoMoveShards: number; +}; + +/** + * Information about a cluster imbalance. + */ +export type ClusterImbalanceInfo = { + /** + * Information about the leader imbalance. + */ + leader: { + /** + * The weight of leader shards per DB-Server. A leader has a weight of 1 by default but it is higher if collections can only be moved together because of `distributeShardsLike`. + */ + weightUsed: number[]; + /** + * The ideal weight of leader shards per DB-Server. + */ + targetWeight: number[]; + /** + * The number of leader shards per DB-Server. + */ + numberShards: number[]; + /** + * The measure of the leader shard distribution. The higher the number, the worse the distribution. + */ + leaderDupl: number[]; + /** + * The sum of all weights. + */ + totalWeight: number; + /** + * The measure of the total imbalance. A high value indicates a high imbalance. + */ + imbalance: number; + /** + * The sum of shards, counting leader shards only. + */ + totalShards: number; + }; + /** + * Information about the shard imbalance. + */ + shards: { + /** + * The size of shards per DB-Server. + */ + sizeUsed: number[]; + /** + * The ideal size of shards per DB-Server. + */ + targetSize: number[]; + /** + * The number of leader and follower shards per DB-Server. + */ + numberShards: number[]; + /** + * The sum of the sizes. + */ + totalUsed: number; + /** + * The sum of shards, counting leader and follower shards. + */ + totalShards: number; + /** + * The sum of system collection shards, counting leader shards only. + */ + totalShardsFromSystemCollections: number; + /** + * The measure of the total imbalance. A high value indicates a high imbalance. + */ + imbalance: number; + }; +}; + +export type ClusterRebalanceMove = { + /** + * The server name from which to move. + */ + from: string; + /** + * The ID of the destination server. + */ + to: string; + /** + * Shard ID of the shard to be moved. + */ + shard: string; + /** + * Collection ID of the collection the shard belongs to. + */ + collection: number; + /** + * True if this is a leader move shard operation. + */ + isLeader: boolean; +}; +//#endregion diff --git a/src/collection.ts b/src/collections.ts similarity index 71% rename from src/collection.ts rename to src/collections.ts index a9566f90b..942c064ee 100644 --- a/src/collection.ts +++ b/src/collections.ts @@ -3,48 +3,23 @@ * import type { * DocumentCollection, * EdgeCollection, - * } from "arangojs/collection.js"; + * } from "arangojs/collections"; * ``` * - * The "collection" module provides collection related types and interfaces + * The "collections" module provides collection related types and interfaces * for TypeScript. * * @packageDocumentation */ -import { AqlLiteral, AqlQuery, isAqlLiteral, isAqlQuery } from "./aql.js"; -import { ArangoApiResponse } from "./connection.js"; -import { Database } from "./database.js"; -import { - Document, - DocumentData, - DocumentMetadata, - DocumentSelector, - Edge, - EdgeData, - ObjectWithKey, - Patch, - _documentHandle, -} from "./documents.js"; -import { HttpError, isArangoError } from "./error.js"; -import { - EnsureGeoIndexOptions, - EnsureInvertedIndexOptions, - EnsurePersistentIndexOptions, - EnsureTtlIndexOptions, - EnsureMdiIndexOptions, - GeoIndex, - Index, - IndexSelector, - InvertedIndex, - PersistentIndex, - TtlIndex, - MdiIndex, - _indexHandle, - EnsureIndexOptions, - HiddenIndex, -} from "./indexes.js"; +import * as aql from "./aql.js"; +import * as connection from "./connection.js"; +import * as databases from "./databases.js"; +import * as documents from "./documents.js"; +import * as errors from "./errors.js"; +import * as indexes from "./indexes.js"; import { COLLECTION_NOT_FOUND, DOCUMENT_NOT_FOUND } from "./lib/codes.js"; +//#region ArangoCollection interface /** * Indicates whether the given value represents an {@link ArangoCollection}. * @@ -74,7 +49,7 @@ export function collectionToString( * A marker interface identifying objects that can be used in AQL template * strings to create references to ArangoDB collections. * - * See {@link aql!aql}. + * See {@link aql.aql}. */ export interface ArangoCollection { /** @@ -88,7 +63,9 @@ export interface ArangoCollection { */ readonly name: string; } +//#endregion +//#region Shared types /** * Integer values indicating the collection type. */ @@ -140,109 +117,91 @@ export type ValidationLevel = "none" | "new" | "moderate" | "strict"; * Write operation that can result in a computed value being computed. */ export type WriteOperation = "insert" | "update" | "replace"; +//#endregion +//#region Collection operation options /** - * Represents a bulk operation failure for an individual document. - */ -export type DocumentOperationFailure = { - /** - * Indicates that the operation failed. - */ - error: true; - /** - * Human-readable description of the failure. - */ - errorMessage: string; - /** - * Numeric representation of the failure. - */ - errorNum: number; -}; - -/** - * Metadata returned by a document operation. - */ -export type DocumentOperationMetadata = DocumentMetadata & { - /** - * Revision of the document that was updated or replaced by this operation. - */ - _oldRev?: string; -}; - -/** - * Properties defining a computed value. + * Options for creating a collection. + * + * See {@link databases.Database#createCollection}, {@link databases.Database#createEdgeCollection} + * and {@link DocumentCollection#create} or {@link EdgeCollection#create}. */ -export type ComputedValueProperties = { +export type CreateCollectionOptions = CollectionPropertiesOptions & { /** - * Name of the target attribute of the computed value. - */ - name: string; - /** - * AQL `RETURN` expression that computes the value. + * @internal + * + * Whether the collection should be created as a system collection. + * + * Default: `false` */ - expression: string; + isSystem?: boolean; /** - * If set to `false`, the computed value will not be applied if the - * expression evaluates to `null`. + * An object defining the collection's key generation. */ - overwrite: boolean; + keyOptions?: CollectionKeyOptions; /** - * Which operations should result in the value being computed. + * (Cluster only.) Unless set to `false`, the server will wait for all + * replicas to create the collection before returning. + * + * Default: `true` */ - computeOn: WriteOperation[]; + waitForSyncReplication?: boolean; /** - * If set to `false`, the field will be unset if the expression evaluates to - * `null`. Otherwise the field will be set to the value `null`. Has no effect - * if `overwrite` is set to `false`. + * (Cluster only.) Unless set to `false`, the server will check whether + * enough replicas are available at creation time and bail out otherwise. + * + * Default: `true` */ - keepNull: boolean; + enforceReplicationFactor?: boolean; /** - * Whether the write operation should fail if the expression produces a - * warning. + * (Cluster only.) Number of shards to distribute the collection across. + * + * Default: `1` */ - failOnWarning: boolean; -}; - -/** - * General information about a collection. - */ -export type CollectionMetadata = { + numberOfShards?: number; /** - * Collection name. + * (Cluster only.) Document attributes to use to determine the target shard + * for each document. + * + * Default: `["_key"]` */ - name: string; + shardKeys?: string[]; /** - * A globally unique identifier for this collection. + * (Cluster only.) Sharding strategy to use. */ - globallyUniqueId: string; + shardingStrategy?: ShardingStrategy; /** - * An integer indicating the collection loading status. + * (Enterprise Edition cluster only.) If set to a collection name, sharding + * of the new collection will follow the rules for that collection. As long + * as the new collection exists, the indicated collection can not be dropped. */ - status: CollectionStatus; + distributeShardsLike?: string; /** - * An integer indicating the collection type. + * (Enterprise Edition cluster only.) Attribute containing the shard key + * value of the referred-to smart join collection. */ - type: CollectionType; + smartJoinAttribute?: string; /** - * @internal - * - * Whether the collection is a system collection. + * (Enterprise Edition cluster only.) Attribute used for sharding. */ - isSystem: boolean; + smartGraphAttribute?: string; }; /** * An object defining the collection's key generation. */ -export type CollectionKeyProperties = { +export type CollectionKeyOptions = { /** * Type of key generator to use. */ - type: KeyGenerator; + type?: KeyGenerator; /** - * Whether documents can be created with a user-specified `_key` attribute. + * Unless set to `false`, documents can be created with a user-specified + * `_key` attribute. + * + * Default: `true` */ - allowUserKeys: boolean; + allowUserKeys?: boolean; /** * (Autoincrement only.) How many steps to increment the key each time. */ @@ -251,116 +210,68 @@ export type CollectionKeyProperties = { * (Autoincrement only.) Initial offset for the key. */ offset?: number; - /** - * Most recent key that has been generated. - */ - lastValue: number; }; /** - * Properties for validating documents in a collection. - */ -export type SchemaProperties = { - /** - * Type of document validation. - */ - type: "json"; - /** - * JSON Schema description of the validation schema for documents. - */ - rule: any; - /** - * When validation should be applied. - */ - level: ValidationLevel; - /** - * Message to be used if validation fails. - */ - message: string; -}; - -/** - * An object defining the properties of a collection. + * Options for setting a collection's properties. + * + * See {@link DocumentCollection#properties} and {@link EdgeCollection#properties}. */ -export type CollectionProperties = { - /** - * A human-readable representation of the collection loading status. - */ - statusString: string; +export type CollectionPropertiesOptions = { /** - * Whether data should be synchronized to disk before returning from + * If set to `true`, data will be synchronized to disk before returning from * a document create, update, replace or removal operation. + * + * Default: `false` */ - waitForSync: boolean; - /** - * An object defining the collection's key generation. - */ - keyOptions: CollectionKeyProperties; - /** - * Properties for validating documents in the collection. - */ - schema: SchemaProperties | null; - /** - * (Cluster only.) Write concern for this collection. - */ - writeConcern: number; - /** - * (Cluster only.) Number of shards of this collection. - */ - numberOfShards?: number; - /** - * (Cluster only.) Keys of this collection that will be used for - * sharding. - */ - shardKeys?: string[]; + waitForSync?: boolean; /** - * (Cluster only.) Replication factor of the collection. + * (Cluster only.) How many copies of each document should be kept in the + * cluster. + * + * Default: `1` */ replicationFactor?: number | "satellite"; /** - * (Cluster only.) Sharding strategy of the collection. - */ - shardingStrategy?: ShardingStrategy; - /** - * (Enterprise Edition cluster only.) If set to a collection name, sharding - * of the new collection will follow the rules for that collection. As long - * as the new collection exists, the indicated collection can not be dropped. - */ - distributeShardsLike?: string; - /** - * (Enterprise Edition cluster only.) Attribute containing the shard key - * value of the referred-to smart join collection. + * (Cluster only.) Write concern for this collection. */ - smartJoinAttribute?: string; + writeConcern?: number; /** - * (Enterprise Edition cluster only.) Attribute used for sharding. + * Options for validating documents in this collection. */ - smartGraphAttribute?: string; + schema?: SchemaOptions; /** - * Computed values applied to documents in this collection. + * Computed values to apply to documents in this collection. */ - computedValues: ComputedValueProperties[]; + computedValues?: ComputedValueOptions[]; /** * Whether the in-memory hash cache is enabled for this collection. + * + * Default: `false` */ - cacheEnabled: boolean; + cacheEnabled?: boolean; +}; + +/** + * Options for validating collection documents. + */ +export type SchemaOptions = { /** - * Whether the newer revision-based replication protocol is enabled for - * this collection. + * JSON Schema description of the validation schema for documents. */ - syncByRevision: boolean; + rule: any; /** - * (Enterprise Edition only.) Whether the collection is used in a SmartGraph or EnterpriseGraph. + * When validation should be applied. + * + * Default: `"strict"` */ - isSmart?: boolean; + level?: ValidationLevel; /** - * (Enterprise Edition only.) Whether the SmartGraph this collection belongs to is disjoint. + * Message to be used if validation fails. */ - isDisjoint?: string; + message?: string; }; -// Options - /** * Options for creating a computed value. */ @@ -374,7 +285,7 @@ export type ComputedValueOptions = { * * Note that when passing an AQL query object, the `bindVars` will be ignored. */ - expression: string | AqlLiteral | AqlQuery; + expression: string | aql.AqlLiteral | aql.AqlQuery; /** * If set to `false`, the computed value will not be applied if the * expression evaluates to `null`. @@ -405,64 +316,6 @@ export type ComputedValueOptions = { failOnWarning?: boolean; }; -/** - * Options for validating collection documents. - */ -export type SchemaOptions = { - /** - * JSON Schema description of the validation schema for documents. - */ - rule: any; - /** - * When validation should be applied. - * - * Default: `"strict"` - */ - level?: ValidationLevel; - /** - * Message to be used if validation fails. - */ - message?: string; -}; - -/** - * Options for setting a collection's properties. - * - * See {@link DocumentCollection#properties} and {@link EdgeCollection#properties}. - */ -export type CollectionPropertiesOptions = { - /** - * Whether data should be synchronized to disk before returning from - * a document create, update, replace or removal operation. - */ - waitForSync?: boolean; - /** - * (Cluster only.) How many copies of each document should be kept in the - * cluster. - * - * Default: `1` - */ - replicationFactor?: number | "satellite"; - /** - * (Cluster only.) Write concern for this collection. - */ - writeConcern?: number; - /** - * Options for validating documents in this collection. - */ - schema?: SchemaOptions; - /** - * Computed values to apply to documents in this collection. - */ - computedValues?: ComputedValueOptions[]; - /** - * Whether the in-memory hash cache is enabled for this collection. - * - * Default: `false` - */ - cacheEnabled?: boolean; -}; - /** * Options for retrieving a collection checksum. */ @@ -511,571 +364,205 @@ export type CollectionDropOptions = { */ isSystem?: boolean; }; +//#endregion +//#region CollectionDescription /** - * An object defining the collection's key generation. - */ -export type CollectionKeyOptions = { - /** - * Type of key generator to use. - */ - type?: KeyGenerator; - /** - * Unless set to `false`, documents can be created with a user-specified - * `_key` attribute. - * - * Default: `true` - */ - allowUserKeys?: boolean; - /** - * (Autoincrement only.) How many steps to increment the key each time. - */ - increment?: number; - /** - * (Autoincrement only.) Initial offset for the key. - */ - offset?: number; -}; - -/** - * Options for creating a collection. - * - * See {@link database.Database#createCollection}, {@link database.Database#createEdgeCollection} - * and {@link DocumentCollection#create} or {@link EdgeCollection#create}. - */ -export type CreateCollectionOptions = { - /** - * If set to `true`, data will be synchronized to disk before returning from - * a document create, update, replace or removal operation. - * - * Default: `false` - */ - waitForSync?: boolean; - /** - * @internal - * - * Whether the collection should be created as a system collection. - * - * Default: `false` - */ - isSystem?: boolean; - /** - * An object defining the collection's key generation. - */ - keyOptions?: CollectionKeyOptions; - /** - * Options for validating documents in the collection. - */ - schema?: SchemaOptions; - /** - * (Cluster only.) Unless set to `false`, the server will wait for all - * replicas to create the collection before returning. - * - * Default: `true` - */ - waitForSyncReplication?: boolean; - /** - * (Cluster only.) Unless set to `false`, the server will check whether - * enough replicas are available at creation time and bail out otherwise. - * - * Default: `true` - */ - enforceReplicationFactor?: boolean; - /** - * (Cluster only.) Number of shards to distribute the collection across. - * - * Default: `1` - */ - numberOfShards?: number; - /** - * (Cluster only.) Document attributes to use to determine the target shard - * for each document. - * - * Default: `["_key"]` - */ - shardKeys?: string[]; - /** - * (Cluster only.) How many copies of each document should be kept in the - * cluster. - * - * Default: `1` - */ - replicationFactor?: number; - /** - * (Cluster only.) Write concern for this collection. - */ - writeConcern?: number; - /** - * (Cluster only.) Sharding strategy to use. - */ - shardingStrategy?: ShardingStrategy; - /** - * (Enterprise Edition cluster only.) If set to a collection name, sharding - * of the new collection will follow the rules for that collection. As long - * as the new collection exists, the indicated collection can not be dropped. - */ - distributeShardsLike?: string; - /** - * (Enterprise Edition cluster only.) Attribute containing the shard key - * value of the referred-to smart join collection. - */ - smartJoinAttribute?: string; - /** - * (Enterprise Edition cluster only.) Attribute used for sharding. - */ - smartGraphAttribute?: string; - /** - * Computed values to apply to documents in this collection. - */ - computedValues?: ComputedValueOptions[]; - /** - * Whether the in-memory hash cache is enabled for this collection. - */ - cacheEnabled?: boolean; -}; - -/** - * Options for checking whether a document exists in a collection. - */ -export type DocumentExistsOptions = { - /** - * If set to `true`, the request will explicitly permit ArangoDB to return a - * potentially dirty or stale result and arangojs will load balance the - * request without distinguishing between leaders and followers. - */ - allowDirtyRead?: boolean; - /** - * If set to a document revision, the document will only match if its `_rev` - * matches the given revision. - */ - ifMatch?: string; - /** - * If set to a document revision, the document will only match if its `_rev` - * does not match the given revision. - */ - ifNoneMatch?: string; -}; - -/** - * Options for retrieving a document from a collection. - */ -export type CollectionReadOptions = { - /** - * If set to `true`, `null` is returned instead of an exception being thrown - * if the document does not exist. - */ - graceful?: boolean; - /** - * If set to `true`, the request will explicitly permit ArangoDB to return a - * potentially dirty or stale result and arangojs will load balance the - * request without distinguishing between leaders and followers. - */ - allowDirtyRead?: boolean; - /** - * If set to a document revision, the request will fail with an error if the - * document exists but its `_rev` does not match the given revision. - */ - ifMatch?: string; - /** - * If set to a document revision, the request will fail with an error if the - * document exists and its `_rev` matches the given revision. Note that an - * `HttpError` with code 304 will be thrown instead of an `ArangoError`. - */ - ifNoneMatch?: string; -}; - -/** - * Options for retrieving multiple documents from a collection. - */ -export type CollectionBatchReadOptions = { - /** - * If set to `true`, the request will explicitly permit ArangoDB to return a - * potentially dirty or stale result and arangojs will load balance the - * request without distinguishing between leaders and followers. - */ - allowDirtyRead?: boolean; - /** - * If set to `false`, the existing document will only be modified if its - * `_rev` property matches the same property on the new data. - * - * Default: `true` - */ - ignoreRevs?: boolean; -}; - -/** - * Options for inserting a new document into a collection. - */ -export type CollectionInsertOptions = { - /** - * If set to `true`, data will be synchronized to disk before returning. - * - * Default: `false` - */ - waitForSync?: boolean; - /** - * If set to `true`, no data will be returned by the server. This option can - * be used to reduce network traffic. - * - * Default: `false` - */ - silent?: boolean; - /** - * If set to `true`, the complete new document will be returned as the `new` - * property on the result object. Has no effect if `silent` is set to `true`. - * - * Default: `false` - */ - returnNew?: boolean; - /** - * If set to `true`, the complete old document will be returned as the `old` - * property on the result object. Has no effect if `silent` is set to `true`. - * This option is only available when `overwriteMode` is set to `"update"` or - * `"replace"`. - * - * Default: `false` - */ - returnOld?: boolean; - /** - * Defines what should happen if a document with the same `_key` or `_id` - * already exists, instead of throwing an exception. - * - * Default: `"conflict" - */ - overwriteMode?: "ignore" | "update" | "replace" | "conflict"; - /** - * If set to `false`, properties with a value of `null` will be removed from - * the new document. - * - * Default: `true` - */ - keepNull?: boolean; - /** - * If set to `false`, object properties that already exist in the old - * document will be overwritten rather than merged when an existing document - * with the same `_key` or `_id` is updated. This does not affect arrays. - * - * Default: `true` - */ - mergeObjects?: boolean; - /** - * If set to `true`, new entries will be added to in-memory index caches if - * document insertions affect the edge index or cache-enabled persistent - * indexes. - * - * Default: `false` - */ - refillIndexCaches?: boolean; - /** - * If set, the attribute with the name specified by the option is looked up - * in the stored document and the attribute value is compared numerically to - * the value of the versioning attribute in the supplied document that is - * supposed to update/replace it. - */ - versionAttribute?: string; -}; - -/** - * Options for replacing an existing document in a collection. + * General information about a collection. */ -export type CollectionReplaceOptions = { +export type CollectionDescription = { /** - * If set to `true`, data will be synchronized to disk before returning. - * - * Default: `false` - */ - waitForSync?: boolean; - /** - * If set to `true`, no data will be returned by the server. This option can - * be used to reduce network traffic. - * - * Default: `false` - */ - silent?: boolean; - /** - * If set to `true`, the complete new document will be returned as the `new` - * property on the result object. Has no effect if `silent` is set to `true`. - * - * Default: `false` - */ - returnNew?: boolean; - /** - * If set to `false`, the existing document will only be modified if its - * `_rev` property matches the same property on the new data. - * - * Default: `true` + * Collection name. */ - ignoreRevs?: boolean; + name: string; /** - * If set to `true`, the complete old document will be returned as the `old` - * property on the result object. Has no effect if `silent` is set to `true`. - * - * Default: `false` + * A globally unique identifier for this collection. */ - returnOld?: boolean; + globallyUniqueId: string; /** - * If set to a document revision, the document will only be replaced if its - * `_rev` matches the given revision. + * An integer indicating the collection loading status. */ - ifMatch?: string; + status: CollectionStatus; /** - * If set to `true`, existing entries in in-memory index caches will be - * updated if document replacements affect the edge index or cache-enabled - * persistent indexes. - * - * Default: `false` + * An integer indicating the collection type. */ - refillIndexCaches?: boolean; + type: CollectionType; /** - * If set, the attribute with the name specified by the option is looked up - * in the stored document and the attribute value is compared numerically to - * the value of the versioning attribute in the supplied document that is - * supposed to update/replace it. + * @internal + * + * Whether the collection is a system collection. */ - versionAttribute?: string; + isSystem: boolean; }; +//#endregion +//#region CollectionProperties /** - * Options for updating a document in a collection. + * An object defining the properties of a collection. */ -export type CollectionUpdateOptions = { +export type CollectionProperties = { /** - * If set to `true`, data will be synchronized to disk before returning. - * - * Default: `false` + * A human-readable representation of the collection loading status. */ - waitForSync?: boolean; + statusString: string; /** - * If set to `true`, no data will be returned by the server. This option can - * be used to reduce network traffic. - * - * Default: `false` + * Whether data should be synchronized to disk before returning from + * a document create, update, replace or removal operation. */ - silent?: boolean; + waitForSync: boolean; /** - * If set to `true`, the complete new document will be returned as the `new` - * property on the result object. Has no effect if `silent` is set to `true`. - * - * Default: `false` + * An object defining the collection's key generation. */ - returnNew?: boolean; + keyOptions: CollectionKeyProperties; /** - * If set to `false`, the existing document will only be modified if its - * `_rev` property matches the same property on the new data. - * - * Default: `true` + * Properties for validating documents in the collection. */ - ignoreRevs?: boolean; + schema: SchemaProperties | null; /** - * If set to `true`, the complete old document will be returned as the `old` - * property on the result object. Has no effect if `silent` is set to `true`. - * - * Default: `false` + * (Cluster only.) Write concern for this collection. */ - returnOld?: boolean; + writeConcern: number; /** - * If set to `false`, properties with a value of `null` will be removed from - * the new document. - * - * Default: `true` + * (Cluster only.) Number of shards of this collection. */ - keepNull?: boolean; + numberOfShards?: number; /** - * If set to `false`, object properties that already exist in the old - * document will be overwritten rather than merged. This does not affect - * arrays. - * - * Default: `true` + * (Cluster only.) Keys of this collection that will be used for + * sharding. */ - mergeObjects?: boolean; + shardKeys?: string[]; /** - * If set to a document revision, the document will only be updated if its - * `_rev` matches the given revision. + * (Cluster only.) Replication factor of the collection. */ - ifMatch?: string; + replicationFactor?: number | "satellite"; /** - * If set to `true`, existing entries in in-memory index caches will be - * updated if document updates affect the edge index or cache-enabled - * persistent indexes. - * - * Default: `false` + * (Cluster only.) Sharding strategy of the collection. */ - refillIndexCaches?: boolean; + shardingStrategy?: ShardingStrategy; /** - * If set, the attribute with the name specified by the option is looked up - * in the stored document and the attribute value is compared numerically to - * the value of the versioning attribute in the supplied document that is - * supposed to update/replace it. + * (Enterprise Edition cluster only.) If set to a collection name, sharding + * of the new collection will follow the rules for that collection. As long + * as the new collection exists, the indicated collection can not be dropped. */ - versionAttribute?: string; -}; - -/** - * Options for removing a document from a collection. - */ -export type CollectionRemoveOptions = { + distributeShardsLike?: string; /** - * If set to `true`, changes will be synchronized to disk before returning. - * - * Default: `false` + * (Enterprise Edition cluster only.) Attribute containing the shard key + * value of the referred-to smart join collection. */ - waitForSync?: boolean; + smartJoinAttribute?: string; /** - * If set to `true`, the complete old document will be returned as the `old` - * property on the result object. Has no effect if `silent` is set to `true`. - * - * Default: `false` + * (Enterprise Edition cluster only.) Attribute used for sharding. */ - returnOld?: boolean; + smartGraphAttribute?: string; /** - * If set to `true`, no data will be returned by the server. This option can - * be used to reduce network traffic. - * - * Default: `false` + * Computed values applied to documents in this collection. */ - silent?: boolean; + computedValues: ComputedValueProperties[]; /** - * If set to a document revision, the document will only be removed if its - * `_rev` matches the given revision. + * Whether the in-memory hash cache is enabled for this collection. */ - ifMatch?: string; + cacheEnabled: boolean; /** - * If set to `true`, existing entries in in-memory index caches will be - * deleted if document removals affect the edge index or cache-enabled - * persistent indexes. - * - * Default: `false` + * Whether the newer revision-based replication protocol is enabled for + * this collection. */ - refillIndexCaches?: boolean; -}; - -/** - * Options for bulk importing documents into a collection. - */ -export type CollectionImportOptions = { + syncByRevision: boolean; /** - * (Edge collections only.) Prefix to prepend to `_from` attribute values. + * (Enterprise Edition only.) Whether the collection is used in a SmartGraph or EnterpriseGraph. */ - fromPrefix?: string; + isSmart?: boolean; /** - * (Edge collections only.) Prefix to prepend to `_to` attribute values. + * (Enterprise Edition only.) Whether the SmartGraph this collection belongs to is disjoint. */ - toPrefix?: string; + isDisjoint?: string; +}; + +/** + * An object defining the collection's key generation. + */ +export type CollectionKeyProperties = { /** - * If set to `true`, the collection is truncated before the data is imported. - * - * Default: `false` + * Type of key generator to use. */ - overwrite?: boolean; + type: KeyGenerator; /** - * Whether to wait for the documents to have been synced to disk. + * Whether documents can be created with a user-specified `_key` attribute. */ - waitForSync?: boolean; + allowUserKeys: boolean; /** - * Controls behavior when a unique constraint is violated on the document key. - * - * * `"error"`: the document will not be imported. - * * `"update`: the document will be merged into the existing document. - * * `"replace"`: the document will replace the existing document. - * * `"ignore"`: the document will not be imported and the unique constraint - * error will be ignored. - * - * Default: `"error"` + * (Autoincrement only.) How many steps to increment the key each time. */ - onDuplicate?: "error" | "update" | "replace" | "ignore"; + increment?: number; /** - * If set to `true`, the import will abort if any error occurs. + * (Autoincrement only.) Initial offset for the key. */ - complete?: boolean; + offset?: number; /** - * Whether the response should contain additional details about documents - * that could not be imported. + * Most recent key that has been generated. */ - details?: boolean; + lastValue: number; }; /** - * Options for retrieving a document's edges from a collection. + * Properties for validating documents in a collection. */ -export type CollectionEdgesOptions = { +export type SchemaProperties = { /** - * If set to `true`, the request will explicitly permit ArangoDB to return a - * potentially dirty or stale result and arangojs will load balance the - * request without distinguishing between leaders and followers. + * Type of document validation. */ - allowDirtyRead?: boolean; -}; - -export type IndexListOptions = { + type: "json"; /** - * If set to `true`, includes additional information about each index. - * - * Default: `false` + * JSON Schema description of the validation schema for documents. */ - withStats?: boolean; + rule: any; /** - * If set to `true`, includes internal indexes as well as indexes that are - * not yet fully built but are in the building phase. - * - * You should cast the resulting indexes to `HiddenIndex` to ensure internal - * and incomplete indexes are accurately represented. - * - * Default: `false`. + * When validation should be applied. */ - withHidden?: boolean; + level: ValidationLevel; + /** + * Message to be used if validation fails. + */ + message: string; }; -// Results - /** - * Result of a collection bulk import. + * Properties defining a computed value. */ -export type CollectionImportResult = { - /** - * Whether the import failed. - */ - error: false; +export type ComputedValueProperties = { /** - * Number of new documents imported. + * Name of the target attribute of the computed value. */ - created: number; + name: string; /** - * Number of documents that failed with an error. + * AQL `RETURN` expression that computes the value. */ - errors: number; + expression: string; /** - * Number of empty documents. + * If set to `false`, the computed value will not be applied if the + * expression evaluates to `null`. */ - empty: number; + overwrite: boolean; /** - * Number of documents updated. + * Which operations should result in the value being computed. */ - updated: number; + computeOn: WriteOperation[]; /** - * Number of documents that failed with an error that is ignored. + * If set to `false`, the field will be unset if the expression evaluates to + * `null`. Otherwise the field will be set to the value `null`. Has no effect + * if `overwrite` is set to `false`. */ - ignored: number; + keepNull: boolean; /** - * Additional details about any errors encountered during the import. + * Whether the write operation should fail if the expression produces a + * warning. */ - details?: string[]; -}; - -/** - * Result of retrieving edges in a collection. - */ -export type CollectionEdgesResult = any> = { - edges: Edge[]; - stats: { - scannedIndex: number; - filtered: number; - }; + failOnWarning: boolean; }; +//#endregion -// Collections - +//#region DocumentCollection interface /** - * Represents an document collection in a {@link database.Database}. + * Represents an document collection in a {@link databases.Database}. * * See {@link EdgeCollection} for a variant of this interface more suited for * edge collections. @@ -1083,7 +570,10 @@ export type CollectionEdgesResult = any> = { * When using TypeScript, collections can be cast to a specific document data * type to increase type safety. * - * @param T - Type to use for document data. Defaults to `any`. + * @param EntryResultType - Type to represent document contents returned by the + * server (including computed properties). + * @param EntryInputType - Type to represent document contents passed when + * inserting or replacing documents (without computed properties). * * @example * ```ts @@ -1101,7 +591,8 @@ export interface DocumentCollection< /** * Database this collection belongs to. */ - readonly database: Database; + readonly database: databases.Database; + //#region Collection operations /** * Checks whether the collection exists. * @@ -1125,12 +616,12 @@ export interface DocumentCollection< * // data contains general information about the collection * ``` */ - get(): Promise>; + get(): Promise>; /** * Creates a collection with the given `options` and the instance's name. * - * See also {@link database.Database#createCollection} and - * {@link database.Database#createEdgeCollection}. + * See also {@link databases.Database#createCollection} and + * {@link databases.Database#createEdgeCollection}. * * **Note**: When called on an {@link EdgeCollection} instance in TypeScript, * the `type` option must still be set to the correct {@link CollectionType}. @@ -1173,7 +664,7 @@ export interface DocumentCollection< options?: CreateCollectionOptions & { type?: CollectionType; } - ): Promise>; + ): Promise>; /** * Retrieves the collection's properties. * @@ -1186,7 +677,7 @@ export interface DocumentCollection< * ``` */ properties(): Promise< - ArangoApiResponse + connection.ArangoApiResponse >; /** * Replaces the properties of the collection. @@ -1202,7 +693,7 @@ export interface DocumentCollection< */ properties( properties: CollectionPropertiesOptions - ): Promise>; + ): Promise>; /** * Retrieves information about the number of documents in a collection. * @@ -1215,8 +706,8 @@ export interface DocumentCollection< * ``` */ count(): Promise< - ArangoApiResponse< - CollectionMetadata & CollectionProperties & { count: number } + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { count: number } > >; /** @@ -1252,8 +743,8 @@ export interface DocumentCollection< figures( details?: boolean ): Promise< - ArangoApiResponse< - CollectionMetadata & + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { count: number; figures: Record } > >; @@ -1269,8 +760,8 @@ export interface DocumentCollection< * ``` */ revision(): Promise< - ArangoApiResponse< - CollectionMetadata & CollectionProperties & { revision: string } + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { revision: string } > >; /** @@ -1289,27 +780,14 @@ export interface DocumentCollection< checksum( options?: CollectionChecksumOptions ): Promise< - ArangoApiResponse< - CollectionMetadata & { revision: string; checksum: string } + connection.ArangoApiResponse< + CollectionDescription & { revision: string; checksum: string } > >; - /** - * Instructs ArangoDB to load as many indexes of the collection into memory - * as permitted by the memory limit. - * - * @example - * ```js - * const db = new Database(); - * const collection = db.collection("indexed-collection"); - * await collection.loadIndexes(); - * // the indexes are now loaded into memory - * ``` - */ - loadIndexes(): Promise; /** * Renames the collection and updates the instance's `name` to `newName`. * - * Additionally removes the instance from the {@link database.Database}'s internal + * Additionally removes the instance from the {@link databases.Database}'s internal * cache. * * **Note**: Renaming collections may not be supported when ArangoDB is @@ -1328,7 +806,7 @@ export interface DocumentCollection< * // collection1 and collection3 represent the same ArangoDB collection! * ``` */ - rename(newName: string): Promise>; + rename(newName: string): Promise>; /** * Deletes all documents in the collection. * @@ -1341,7 +819,7 @@ export interface DocumentCollection< * // the collection "some-collection" is now empty * ``` */ - truncate(options?: CollectionTruncateOptions): Promise>; + truncate(options?: CollectionTruncateOptions): Promise>; /** * Deletes the collection from the database. * @@ -1357,9 +835,22 @@ export interface DocumentCollection< */ drop( options?: CollectionDropOptions - ): Promise>>; + ): Promise>; + /** + * Triggers compaction for a collection. + * + * @example + * ```js + * const db = new Database(); + * const collection = db.collection("some-collection"); + * await collection.compact(); + * // Background compaction is triggered on the collection + * ``` + */ + compact(): Promise>; + //#endregion - //#region crud + //#region Document operations /** * Retrieves the `shardId` of the shard responsible for the given document. * @@ -1373,7 +864,7 @@ export interface DocumentCollection< * ``` */ getResponsibleShard( - document: Partial> + document: Partial> ): Promise; /** * Derives a document `_id` from the given selector for this collection. @@ -1408,7 +899,7 @@ export interface DocumentCollection< * console.log(collection2.documentId(meta._key)); // ok but wrong collection * ``` */ - documentId(selector: DocumentSelector): string; + documentId(selector: documents.DocumentSelector): string; /** * Checks whether a document matching the given key or id exists in this * collection. @@ -1430,8 +921,8 @@ export interface DocumentCollection< * ``` */ documentExists( - selector: DocumentSelector, - options?: DocumentExistsOptions + selector: documents.DocumentSelector, + options?: documents.DocumentExistsOptions ): Promise; /** * Retrieves the document matching the given key or id. @@ -1468,9 +959,9 @@ export interface DocumentCollection< * ``` */ document( - selector: DocumentSelector, - options?: CollectionReadOptions - ): Promise>; + selector: documents.DocumentSelector, + options?: documents.ReadDocumentOptions + ): Promise>; /** * Retrieves the document matching the given key or id. * @@ -1507,9 +998,9 @@ export interface DocumentCollection< * ``` */ document( - selector: DocumentSelector, + selector: documents.DocumentSelector, graceful: boolean - ): Promise>; + ): Promise>; /** * Retrieves the documents matching the given key or id values. * @@ -1533,9 +1024,9 @@ export interface DocumentCollection< * ``` */ documents( - selectors: (string | ObjectWithKey)[], - options?: CollectionBatchReadOptions - ): Promise[]>; + selectors: (string | documents.ObjectWithDocumentKey)[], + options?: documents.BulkReadDocumentsOptions + ): Promise[]>; /** * Inserts a new document with the given `data` into the collection. * @@ -1554,12 +1045,12 @@ export interface DocumentCollection< * ``` */ save( - data: DocumentData, - options?: CollectionInsertOptions + data: documents.DocumentData, + options?: documents.InsertDocumentOptions ): Promise< - DocumentOperationMetadata & { - new?: Document; - old?: Document; + documents.DocumentOperationMetadata & { + new?: documents.Document; + old?: documents.Document; } >; /** @@ -1584,15 +1075,15 @@ export interface DocumentCollection< * ``` */ saveAll( - data: Array>, - options?: CollectionInsertOptions + data: Array>, + options?: documents.InsertDocumentOptions ): Promise< Array< - | (DocumentOperationMetadata & { - new?: Document; - old?: Document; + | (documents.DocumentOperationMetadata & { + new?: documents.Document; + old?: documents.Document; }) - | DocumentOperationFailure + | documents.DocumentOperationFailure > >; /** @@ -1620,13 +1111,13 @@ export interface DocumentCollection< * ``` */ replace( - selector: DocumentSelector, - newData: DocumentData, - options?: CollectionReplaceOptions + selector: documents.DocumentSelector, + newData: documents.DocumentData, + options?: documents.ReplaceDocumentOptions ): Promise< - DocumentOperationMetadata & { - new?: Document; - old?: Document; + documents.DocumentOperationMetadata & { + new?: documents.Document; + old?: documents.Document; } >; /** @@ -1655,16 +1146,16 @@ export interface DocumentCollection< */ replaceAll( newData: Array< - DocumentData & ({ _key: string } | { _id: string }) + documents.DocumentData & ({ _key: string } | { _id: string }) >, - options?: Omit + options?: Omit ): Promise< Array< - | (DocumentOperationMetadata & { - new?: Document; - old?: Document; + | (documents.DocumentOperationMetadata & { + new?: documents.Document; + old?: documents.Document; }) - | DocumentOperationFailure + | documents.DocumentOperationFailure > >; /** @@ -1692,13 +1183,13 @@ export interface DocumentCollection< * ``` */ update( - selector: DocumentSelector, - newData: Patch>, - options?: CollectionUpdateOptions + selector: documents.DocumentSelector, + newData: documents.Patch>, + options?: documents.UpdateDocumentOptions ): Promise< - DocumentOperationMetadata & { - new?: Document; - old?: Document; + documents.DocumentOperationMetadata & { + new?: documents.Document; + old?: documents.Document; } >; /** @@ -1727,16 +1218,16 @@ export interface DocumentCollection< */ updateAll( newData: Array< - Patch> & ({ _key: string } | { _id: string }) + documents.Patch> & ({ _key: string } | { _id: string }) >, - options?: Omit + options?: Omit ): Promise< Array< - | (DocumentOperationMetadata & { - new?: Document; - old?: Document; + | (documents.DocumentOperationMetadata & { + new?: documents.Document; + old?: documents.Document; }) - | DocumentOperationFailure + | documents.DocumentOperationFailure > >; /** @@ -1767,9 +1258,9 @@ export interface DocumentCollection< * ``` */ remove( - selector: DocumentSelector, - options?: CollectionRemoveOptions - ): Promise }>; + selector: documents.DocumentSelector, + options?: documents.RemoveDocumentOptions + ): Promise }>; /** * Removes existing documents from the collection. * @@ -1789,12 +1280,12 @@ export interface DocumentCollection< * ``` */ removeAll( - selectors: (string | ObjectWithKey)[], - options?: Omit + selectors: (string | documents.ObjectWithDocumentKey)[], + options?: Omit ): Promise< Array< - | (DocumentMetadata & { old?: Document }) - | DocumentOperationFailure + | (documents.DocumentMetadata & { old?: documents.Document }) + | documents.DocumentOperationFailure > >; /** @@ -1817,9 +1308,9 @@ export interface DocumentCollection< * ``` */ import( - data: DocumentData[], - options?: CollectionImportOptions - ): Promise; + data: documents.DocumentData[], + options?: documents.ImportDocumentsOptions + ): Promise; /** * Bulk imports the given `data` into the collection. * @@ -1844,8 +1335,8 @@ export interface DocumentCollection< */ import( data: any[][], - options?: CollectionImportOptions - ): Promise; + options?: documents.ImportDocumentsOptions + ): Promise; /** * Bulk imports the given `data` into the collection. * @@ -1904,13 +1395,26 @@ export interface DocumentCollection< */ import( data: Buffer | Blob | string, - options?: CollectionImportOptions & { + options?: documents.ImportDocumentsOptions & { type?: "documents" | "list" | "auto"; } - ): Promise; + ): Promise; //#endregion - //#region indexes + //#region Index operations + /** + * Instructs ArangoDB to load as many indexes of the collection into memory + * as permitted by the memory limit. + * + * @example + * ```js + * const db = new Database(); + * const collection = db.collection("indexed-collection"); + * await collection.loadIndexes(); + * // the indexes are now loaded into memory + * ``` + */ + loadIndexes(): Promise; /** * Returns a list of all index descriptions for the collection. * @@ -1927,13 +1431,13 @@ export interface DocumentCollection< * ```js * const db = new Database(); * const collection = db.collection("some-collection"); - * const allIndexes = await collection.indexes({ + * const allIndexes = await collection.indexes({ * withHidden: true * }); * ``` */ - indexes( - options?: IndexListOptions + indexes( + options?: indexes.ListIndexesOptions ): Promise; /** * Returns an index description by name or `id` if it exists. @@ -1947,11 +1451,11 @@ export interface DocumentCollection< * const index = await collection.index("some-index"); * ``` */ - index(selector: IndexSelector): Promise; + index(selector: indexes.IndexSelector): Promise; /** * Creates a persistent index on the collection if it does not already exist. * - * @param details - Options for creating the persistent index. + * @param options - Options for creating the persistent index. * * @example * ```js @@ -1967,12 +1471,12 @@ export interface DocumentCollection< * ``` */ ensureIndex( - details: EnsurePersistentIndexOptions - ): Promise>; + options: indexes.EnsurePersistentIndexOptions + ): Promise>; /** * Creates a TTL index on the collection if it does not already exist. * - * @param details - Options for creating the TTL index. + * @param options - Options for creating the TTL index. * * @example * ```js @@ -1999,12 +1503,12 @@ export interface DocumentCollection< * ``` */ ensureIndex( - details: EnsureTtlIndexOptions - ): Promise>; + options: indexes.EnsureTtlIndexOptions + ): Promise>; /** * Creates a multi-dimensional index on the collection if it does not already exist. * - * @param details - Options for creating the multi-dimensional index. + * @param options - Options for creating the multi-dimensional index. * * @example * ```js @@ -2020,12 +1524,12 @@ export interface DocumentCollection< * ``` */ ensureIndex( - details: EnsureMdiIndexOptions - ): Promise>; + options: indexes.EnsureMdiIndexOptions + ): Promise>; /** * Creates a geo index on the collection if it does not already exist. * - * @param details - Options for creating the geo index. + * @param options - Options for creating the geo index. * * @example * ```js @@ -2040,12 +1544,12 @@ export interface DocumentCollection< * ``` */ ensureIndex( - details: EnsureGeoIndexOptions - ): Promise>; + options: indexes.EnsureGeoIndexOptions + ): Promise>; /** * Creates a inverted index on the collection if it does not already exist. * - * @param details - Options for creating the inverted index. + * @param options - Options for creating the inverted index. * * @example * ```js @@ -2059,12 +1563,12 @@ export interface DocumentCollection< * ``` */ ensureIndex( - details: EnsureInvertedIndexOptions - ): Promise>; + options: indexes.EnsureInvertedIndexOptions + ): Promise>; /** * Creates an index on the collection if it does not already exist. * - * @param details - Options for creating the index. + * @param options - Options for creating the index. * * @example * ```js @@ -2080,8 +1584,8 @@ export interface DocumentCollection< * ``` */ ensureIndex( - details: EnsureIndexOptions - ): Promise>; + options: indexes.EnsureIndexOptions + ): Promise>; /** * Deletes the index with the given name or `id` from the database. * @@ -2096,36 +1600,29 @@ export interface DocumentCollection< * ``` */ dropIndex( - selector: IndexSelector - ): Promise>; - /** - * Triggers compaction for a collection. - * - * @example - * ```js - * const db = new Database(); - * const collection = db.collection("some-collection"); - * await collection.compact(); - * // Background compaction is triggered on the collection - * ``` - */ - compact(): Promise>>; + selector: indexes.IndexSelector + ): Promise>; //#endregion } +//#endregion +//#region EdgeCollection interface /** - * Represents an edge collection in a {@link database.Database}. + * Represents an edge collection in a {@link databases.Database}. * * See {@link DocumentCollection} for a more generic variant of this interface * more suited for regular document collections. * - * See also {@link graph.GraphEdgeCollection} for the type representing an edge - * collection in a {@link graph.Graph}. + * See also {@link graphs.GraphEdgeCollection} for the type representing an edge + * collection in a {@link graphs.Graph}. * * When using TypeScript, collections can be cast to a specific edge document * data type to increase type safety. * - * @param T - Type to use for edge document data. Defaults to `any`. + * @param EntryResultType - Type to represent edge document contents returned + * by the server (including computed properties). + * @param EntryInputType - Type to represent edge document contents passed when + * inserting or replacing edge documents (without computed properties). * * @example * ```ts @@ -2141,6 +1638,7 @@ export interface EdgeCollection< EntryResultType extends Record = any, EntryInputType extends Record = EntryResultType, > extends DocumentCollection { + //#region Document operations /** * Retrieves the document matching the given key or id. * @@ -2176,9 +1674,9 @@ export interface EdgeCollection< * ``` */ document( - selector: DocumentSelector, - options?: CollectionReadOptions - ): Promise>; + selector: documents.DocumentSelector, + options?: documents.ReadDocumentOptions + ): Promise>; /** * Retrieves the document matching the given key or id. * @@ -2215,9 +1713,9 @@ export interface EdgeCollection< * ``` */ document( - selector: DocumentSelector, + selector: documents.DocumentSelector, graceful: boolean - ): Promise>; + ): Promise>; /** * Retrieves the documents matching the given key or id values. * @@ -2241,9 +1739,9 @@ export interface EdgeCollection< * ``` */ documents( - selectors: (string | ObjectWithKey)[], - options?: CollectionBatchReadOptions - ): Promise[]>; + selectors: (string | documents.ObjectWithDocumentKey)[], + options?: documents.BulkReadDocumentsOptions + ): Promise[]>; /** * Inserts a new document with the given `data` into the collection. * @@ -2261,12 +1759,12 @@ export interface EdgeCollection< * ``` */ save( - data: EdgeData, - options?: CollectionInsertOptions + data: documents.EdgeData, + options?: documents.InsertDocumentOptions ): Promise< - DocumentOperationMetadata & { - new?: Edge; - old?: Edge; + documents.DocumentOperationMetadata & { + new?: documents.Edge; + old?: documents.Edge; } >; /** @@ -2289,15 +1787,15 @@ export interface EdgeCollection< * ``` */ saveAll( - data: Array>, - options?: CollectionInsertOptions + data: Array>, + options?: documents.InsertDocumentOptions ): Promise< Array< - | (DocumentOperationMetadata & { - new?: Edge; - old?: Edge; + | (documents.DocumentOperationMetadata & { + new?: documents.Edge; + old?: documents.Edge; }) - | DocumentOperationFailure + | documents.DocumentOperationFailure > >; /** @@ -2333,13 +1831,13 @@ export interface EdgeCollection< * ``` */ replace( - selector: DocumentSelector, - newData: DocumentData, - options?: CollectionReplaceOptions + selector: documents.DocumentSelector, + newData: documents.DocumentData, + options?: documents.ReplaceDocumentOptions ): Promise< - DocumentOperationMetadata & { - new?: Edge; - old?: Edge; + documents.DocumentOperationMetadata & { + new?: documents.Edge; + old?: documents.Edge; } >; /** @@ -2384,16 +1882,16 @@ export interface EdgeCollection< */ replaceAll( newData: Array< - DocumentData & ({ _key: string } | { _id: string }) + documents.DocumentData & ({ _key: string } | { _id: string }) >, - options?: CollectionReplaceOptions + options?: documents.ReplaceDocumentOptions ): Promise< Array< - | (DocumentOperationMetadata & { - new?: Edge; - old?: Edge; + | (documents.DocumentOperationMetadata & { + new?: documents.Edge; + old?: documents.Edge; }) - | DocumentOperationFailure + | documents.DocumentOperationFailure > >; /** @@ -2429,13 +1927,13 @@ export interface EdgeCollection< * ``` */ update( - selector: DocumentSelector, - newData: Patch>, - options?: CollectionUpdateOptions + selector: documents.DocumentSelector, + newData: documents.Patch>, + options?: documents.UpdateDocumentOptions ): Promise< - DocumentOperationMetadata & { - new?: Edge; - old?: Edge; + documents.DocumentOperationMetadata & { + new?: documents.Edge; + old?: documents.Edge; } >; /** @@ -2478,16 +1976,16 @@ export interface EdgeCollection< */ updateAll( newData: Array< - Patch> & ({ _key: string } | { _id: string }) + documents.Patch> & ({ _key: string } | { _id: string }) >, - options?: CollectionUpdateOptions + options?: documents.UpdateDocumentOptions ): Promise< Array< - | (DocumentOperationMetadata & { - new?: Edge; - old?: Edge; + | (documents.DocumentOperationMetadata & { + new?: documents.Edge; + old?: documents.Edge; }) - | DocumentOperationFailure + | documents.DocumentOperationFailure > >; /** @@ -2510,9 +2008,9 @@ export interface EdgeCollection< * ``` */ remove( - selector: DocumentSelector, - options?: CollectionRemoveOptions - ): Promise }>; + selector: documents.DocumentSelector, + options?: documents.RemoveDocumentOptions + ): Promise }>; /** * Removes existing documents from the collection. * @@ -2532,12 +2030,12 @@ export interface EdgeCollection< * ``` */ removeAll( - selectors: DocumentSelector[], - options?: CollectionRemoveOptions + selectors: documents.DocumentSelector[], + options?: documents.RemoveDocumentOptions ): Promise< Array< - | (DocumentMetadata & { old?: Edge }) - | DocumentOperationFailure + | (documents.DocumentMetadata & { old?: documents.Edge }) + | documents.DocumentOperationFailure > >; /** @@ -2559,9 +2057,9 @@ export interface EdgeCollection< * ``` */ import( - data: EdgeData[], - options?: CollectionImportOptions - ): Promise; + data: documents.EdgeData[], + options?: documents.ImportDocumentsOptions + ): Promise; /** * Bulk imports the given `data` into the collection. * @@ -2585,8 +2083,8 @@ export interface EdgeCollection< */ import( data: any[][], - options?: CollectionImportOptions - ): Promise; + options?: documents.ImportDocumentsOptions + ): Promise; /** * Bulk imports the given `data` into the collection. * @@ -2642,16 +2140,16 @@ export interface EdgeCollection< */ import( data: Buffer | Blob | string, - options?: CollectionImportOptions & { + options?: documents.ImportDocumentsOptions & { type?: "documents" | "list" | "auto"; } - ): Promise; + ): Promise; //#endregion - //#region edges + //#region Edge operations /** - * Retrieves a list of all edges of the document matching the given - * `selector`. + * Retrieves a list of all edges in this collection of the document matching + * the given `selector`. * * Throws an exception when passed a document or `_id` from a different * collection. @@ -2675,9 +2173,9 @@ export interface EdgeCollection< * ``` */ edges( - selector: DocumentSelector, - options?: CollectionEdgesOptions - ): Promise>>; + selector: documents.DocumentSelector, + options?: documents.DocumentEdgesOptions + ): Promise>>; /** * Retrieves a list of all incoming edges of the document matching the given * `selector`. @@ -2704,9 +2202,9 @@ export interface EdgeCollection< * ``` */ inEdges( - selector: DocumentSelector, - options?: CollectionEdgesOptions - ): Promise>>; + selector: documents.DocumentSelector, + options?: documents.DocumentEdgesOptions + ): Promise>>; /** * Retrieves a list of all outgoing edges of the document matching the given * `selector`. @@ -2733,13 +2231,14 @@ export interface EdgeCollection< * ``` */ outEdges( - selector: DocumentSelector, - options?: CollectionEdgesOptions - ): Promise>>; - + selector: documents.DocumentSelector, + options?: documents.DocumentEdgesOptions + ): Promise>>; //#endregion } +//#endregion +//#region Collection class /** * @internal */ @@ -2750,20 +2249,17 @@ export class Collection< implements EdgeCollection, DocumentCollection { - //#region attributes protected _name: string; - protected _db: Database; - //#endregion + protected _db: databases.Database; /** * @internal */ - constructor(db: Database, name: string) { + constructor(db: databases.Database, name: string) { this._name = name; this._db = db; } - //#region metadata get isArangoCollection(): true { return true; } @@ -2776,6 +2272,7 @@ export class Collection< return this._name; } + //#region Collection operations get() { return this._db.request({ path: `/_api/collection/${encodeURIComponent(this._name)}`, @@ -2787,7 +2284,7 @@ export class Collection< await this.get(); return true; } catch (err: any) { - if (isArangoError(err) && err.errorNum === COLLECTION_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === COLLECTION_NOT_FOUND) { return false; } throw err; @@ -2806,13 +2303,13 @@ export class Collection< } = options; if (opts.computedValues) { opts.computedValues = opts.computedValues.map((computedValue) => { - if (isAqlLiteral(computedValue.expression)) { + if (aql.isAqlLiteral(computedValue.expression)) { return { ...computedValue, expression: computedValue.expression.toAQL(), }; } - if (isAqlQuery(computedValue.expression)) { + if (aql.isAqlQuery(computedValue.expression)) { return { ...computedValue, expression: computedValue.expression.query, @@ -2841,7 +2338,7 @@ export class Collection< properties( properties?: CollectionPropertiesOptions - ): Promise> { + ): Promise> { if (!properties) { return this._db.request({ path: `/_api/collection/${encodeURIComponent(this._name)}/properties`, @@ -2855,8 +2352,8 @@ export class Collection< } count(): Promise< - ArangoApiResponse< - CollectionMetadata & CollectionProperties & { count: number } + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { count: number } > > { return this._db.request({ @@ -2879,9 +2376,8 @@ export class Collection< figures( details = false ): Promise< - CollectionMetadata & - ArangoApiResponse< - CollectionProperties & { count: number; figures: Record } + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { count: number; figures: Record } > > { return this._db.request({ @@ -2891,8 +2387,8 @@ export class Collection< } revision(): Promise< - ArangoApiResponse< - CollectionMetadata & CollectionProperties & { revision: string } + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { revision: string } > > { return this._db.request({ @@ -2903,8 +2399,8 @@ export class Collection< checksum( options?: CollectionChecksumOptions ): Promise< - ArangoApiResponse< - CollectionMetadata & { revision: string; checksum: string } + connection.ArangoApiResponse< + CollectionDescription & { revision: string; checksum: string } > > { return this._db.request({ @@ -2913,25 +2409,13 @@ export class Collection< }); } - async loadIndexes(): Promise { - return this._db.request( - { - method: "PUT", - path: `/_api/collection/${encodeURIComponent( - this._name - )}/loadIndexesIntoMemory`, - }, - (res) => res.parsedBody.result - ); - } - async rename(newName: string) { const result = await this._db.renameCollection(this._name, newName); this._name = newName; return result; } - truncate(options?: CollectionTruncateOptions): Promise> { + truncate(options?: CollectionTruncateOptions): Promise> { return this._db.request({ method: "PUT", path: `/_api/collection/${this._name}/truncate`, @@ -2946,11 +2430,20 @@ export class Collection< search: options, }); } + + compact() { + return this._db.request( + { + method: "PUT", + path: `/_api/collection/${this._name}/compact`, + } + ); + } //#endregion - //#region crud + //#region Document operations getResponsibleShard( - document: Partial> + document: Partial> ): Promise { return this._db.request( { @@ -2964,13 +2457,13 @@ export class Collection< ); } - documentId(selector: DocumentSelector): string { - return _documentHandle(selector, this._name); + documentId(selector: documents.DocumentSelector): string { + return documents._documentHandle(selector, this._name); } async documentExists( - selector: DocumentSelector, - options: DocumentExistsOptions = {} + selector: documents.DocumentSelector, + options: documents.DocumentExistsOptions = {} ): Promise { const { ifMatch = undefined, ifNoneMatch = undefined } = options; const headers = {} as Record; @@ -2981,13 +2474,13 @@ export class Collection< { method: "HEAD", path: `/_api/document/${encodeURI( - _documentHandle(selector, this._name) + documents._documentHandle(selector, this._name) )}`, headers, }, (res) => { if (ifNoneMatch && res.status === 304) { - throw new HttpError(res); + throw new errors.HttpError(res); } return true; } @@ -3001,8 +2494,8 @@ export class Collection< } documents( - selectors: (string | ObjectWithKey)[], - options: CollectionBatchReadOptions = {} + selectors: (string | documents.ObjectWithDocumentKey)[], + options: documents.BulkReadDocumentsOptions = {} ) { const { allowDirtyRead = undefined } = options; return this._db.request({ @@ -3015,8 +2508,8 @@ export class Collection< } async document( - selector: DocumentSelector, - options: boolean | CollectionReadOptions = {} + selector: documents.DocumentSelector, + options: boolean | documents.ReadDocumentOptions = {} ) { if (typeof options === "boolean") { options = { graceful: options }; @@ -3033,14 +2526,14 @@ export class Collection< const result = this._db.request( { path: `/_api/document/${encodeURI( - _documentHandle(selector, this._name) + documents._documentHandle(selector, this._name) )}`, headers, allowDirtyRead, }, (res) => { if (ifNoneMatch && res.status === 304) { - throw new HttpError(res); + throw new errors.HttpError(res); } return res.parsedBody; } @@ -3049,14 +2542,14 @@ export class Collection< try { return await result; } catch (err: any) { - if (isArangoError(err) && err.errorNum === DOCUMENT_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === DOCUMENT_NOT_FOUND) { return null; } throw err; } } - save(data: DocumentData, options?: CollectionInsertOptions) { + save(data: documents.DocumentData, options?: documents.InsertDocumentOptions) { return this._db.request( { method: "POST", @@ -3069,8 +2562,8 @@ export class Collection< } saveAll( - data: Array>, - options?: CollectionInsertOptions + data: Array>, + options?: documents.InsertDocumentOptions ) { return this._db.request( { @@ -3084,9 +2577,9 @@ export class Collection< } replace( - selector: DocumentSelector, - newData: DocumentData, - options: CollectionReplaceOptions = {} + selector: documents.DocumentSelector, + newData: documents.DocumentData, + options: documents.ReplaceDocumentOptions = {} ) { const { ifMatch = undefined, ...opts } = options; const headers = {} as Record; @@ -3095,7 +2588,7 @@ export class Collection< { method: "PUT", path: `/_api/document/${encodeURI( - _documentHandle(selector, this._name) + documents._documentHandle(selector, this._name) )}`, headers, body: newData, @@ -3107,9 +2600,9 @@ export class Collection< replaceAll( newData: Array< - DocumentData & ({ _key: string } | { _id: string }) + documents.DocumentData & ({ _key: string } | { _id: string }) >, - options?: CollectionReplaceOptions + options?: documents.ReplaceDocumentOptions ) { return this._db.request( { @@ -3123,9 +2616,9 @@ export class Collection< } update( - selector: DocumentSelector, - newData: Patch>, - options: CollectionUpdateOptions = {} + selector: documents.DocumentSelector, + newData: documents.Patch>, + options: documents.UpdateDocumentOptions = {} ) { const { ifMatch = undefined, ...opts } = options; const headers = {} as Record; @@ -3134,7 +2627,7 @@ export class Collection< { method: "PATCH", path: `/_api/document/${encodeURI( - _documentHandle(selector, this._name) + documents._documentHandle(selector, this._name) )}`, headers, body: newData, @@ -3146,9 +2639,9 @@ export class Collection< updateAll( newData: Array< - Patch> & ({ _key: string } | { _id: string }) + documents.Patch> & ({ _key: string } | { _id: string }) >, - options?: CollectionUpdateOptions + options?: documents.UpdateDocumentOptions ) { return this._db.request( { @@ -3161,7 +2654,7 @@ export class Collection< ); } - remove(selector: DocumentSelector, options: CollectionRemoveOptions = {}) { + remove(selector: documents.DocumentSelector, options: documents.RemoveDocumentOptions = {}) { const { ifMatch = undefined, ...opts } = options; const headers = {} as Record; if (ifMatch) headers["if-match"] = ifMatch; @@ -3169,7 +2662,7 @@ export class Collection< { method: "DELETE", path: `/_api/document/${encodeURI( - _documentHandle(selector, this._name) + documents._documentHandle(selector, this._name) )}`, headers, search: opts, @@ -3179,8 +2672,8 @@ export class Collection< } removeAll( - selectors: (string | ObjectWithKey)[], - options?: CollectionRemoveOptions + selectors: (string | documents.ObjectWithDocumentKey)[], + options?: documents.RemoveDocumentOptions ) { return this._db.request( { @@ -3195,10 +2688,10 @@ export class Collection< import( data: Buffer | Blob | string | any[], - options: CollectionImportOptions & { + options: documents.ImportDocumentsOptions & { type?: "documents" | "list" | "auto"; } = {} - ): Promise { + ): Promise { const search = { ...options, collection: this._name }; if (Array.isArray(data)) { search.type = Array.isArray(data[0]) ? undefined : "documents"; @@ -3215,10 +2708,10 @@ export class Collection< } //#endregion - //#region edges + //#region Edge operations protected _edges( - selector: DocumentSelector, - options: CollectionEdgesOptions = {}, + selector: documents.DocumentSelector, + options: documents.DocumentEdgesOptions = {}, direction?: "in" | "out" ) { const { allowDirtyRead = undefined } = options; @@ -3227,26 +2720,38 @@ export class Collection< allowDirtyRead, search: { direction, - vertex: _documentHandle(selector, this._name, false), + vertex: documents._documentHandle(selector, this._name, false), }, }); } - edges(vertex: DocumentSelector, options?: CollectionEdgesOptions) { + edges(vertex: documents.DocumentSelector, options?: documents.DocumentEdgesOptions) { return this._edges(vertex, options); } - inEdges(vertex: DocumentSelector, options?: CollectionEdgesOptions) { + inEdges(vertex: documents.DocumentSelector, options?: documents.DocumentEdgesOptions) { return this._edges(vertex, options, "in"); } - outEdges(vertex: DocumentSelector, options?: CollectionEdgesOptions) { + outEdges(vertex: documents.DocumentSelector, options?: documents.DocumentEdgesOptions) { return this._edges(vertex, options, "out"); } //#endregion - //#region indexes - indexes(options?: IndexListOptions) { + //#region Index operations + async loadIndexes(): Promise { + return this._db.request( + { + method: "PUT", + path: `/_api/collection/${encodeURIComponent( + this._name + )}/loadIndexesIntoMemory`, + }, + (res) => res.parsedBody.result + ); + } + + indexes(options?: indexes.ListIndexesOptions) { return this._db.request( { path: "/_api/index", @@ -3256,20 +2761,13 @@ export class Collection< ); } - index(selector: IndexSelector) { + index(selector: indexes.IndexSelector) { return this._db.request({ - path: `/_api/index/${encodeURI(_indexHandle(selector, this._name))}`, + path: `/_api/index/${encodeURI(indexes._indexHandle(selector, this._name))}`, }); } - ensureIndex( - options: - | EnsurePersistentIndexOptions - | EnsureGeoIndexOptions - | EnsureTtlIndexOptions - | EnsureMdiIndexOptions - | EnsureInvertedIndexOptions - ) { + ensureIndex(options: indexes.EnsureIndexOptions) { return this._db.request({ method: "POST", path: "/_api/index", @@ -3278,21 +2776,12 @@ export class Collection< }); } - dropIndex(selector: IndexSelector) { + dropIndex(selector: indexes.IndexSelector) { return this._db.request({ method: "DELETE", - path: `/_api/index/${encodeURI(_indexHandle(selector, this._name))}`, + path: `/_api/index/${encodeURI(indexes._indexHandle(selector, this._name))}`, }); } - - compact() { - return this._db.request( - { - method: "PUT", - path: `/_api/collection/${this._name}/compact`, - }, - (res) => res.parsedBody - ); - } //#endregion } +//#endregion \ No newline at end of file diff --git a/src/connection.ts b/src/connection.ts index 645830acd..48efe1053 100644 --- a/src/connection.ts +++ b/src/connection.ts @@ -1,6 +1,6 @@ /** * ```ts - * import type { Config } from "arangojs/connection.js"; + * import type { Config } from "arangojs/connection"; * ``` * * The "connection" module provides connection and configuration related types @@ -8,20 +8,11 @@ * * @packageDocumentation */ +import * as administration from "./administration.js"; +import * as databases from "./databases.js"; +import * as errors from "./errors.js"; import { LinkedList } from "./lib/linkedList.js"; -import { Database } from "./database.js"; -import { - ArangoError, - HttpError, - NetworkError, - PropagationTimeoutError, - isArangoError, - isArangoErrorResponse, - isNetworkError, -} from "./error.js"; -import { - ERROR_ARANGO_CONFLICT, -} from "./lib/codes.js"; +import { ERROR_ARANGO_CONFLICT } from "./lib/codes.js"; import { normalizeUrl } from "./lib/normalizeUrl.js"; import { createRequest, @@ -233,7 +224,7 @@ export type Config = { /** * Base URL of the ArangoDB server or list of server URLs. * - * When working with a cluster, the method {@link database.Database#acquireHostList} + * When working with a cluster, the method {@link databases.Database#acquireHostList} * can be used to automatically pick up additional coordinators/followers at * any point. * @@ -263,8 +254,8 @@ export type Config = { /** * Credentials to use for authentication. * - * See also {@link database.Database#useBasicAuth} and - * {@link database.Database#useBearerAuth}. + * See also {@link databases.Database#useBasicAuth} and + * {@link databases.Database#useBearerAuth}. * * Default: `{ username: "root", password: "" }` */ @@ -374,7 +365,7 @@ export type Config = { * @param err - Error encountered when handling this request or `null`. * @param res - Response object for this request, if no error occurred. */ - afterResponse?: (err: NetworkError | null, res?: globalThis.Response & { request: globalThis.Request }) => void | Promise; + afterResponse?: (err: errors.NetworkError | null, res?: globalThis.Response & { request: globalThis.Request }) => void | Promise; /** * Callback that will be invoked when a request * @@ -392,7 +383,7 @@ export type Config = { * An object with additional headers to send with every request. * * If an `"authorization"` header is provided, it will be overridden when - * using {@link database.Database#useBasicAuth}, {@link database.Database#useBearerAuth} or + * using {@link databases.Database#useBasicAuth}, {@link databases.Database#useBearerAuth} or * the `auth` configuration option. */ headers?: Headers | Record; @@ -407,7 +398,7 @@ export type Config = { precaptureStackTraces?: boolean; /** * Limits the number of values of server-reported response queue times that - * will be stored and accessible using {@link database.Database#queueTime}. If set to + * will be stored and accessible using {@link databases.Database#queueTime}. If set to * a finite value, older values will be discarded to make room for new values * when that limit is reached. * @@ -442,7 +433,7 @@ export class Connection { protected _requestConfig: RequestConfig; protected _retryOnConflict: number; protected _queue = new LinkedList(); - protected _databases = new Map(); + protected _databases = new Map(); protected _hosts: RequestFunction[] = []; protected _hostUrls: string[] = []; protected _activeHostUrl: string; @@ -530,7 +521,7 @@ export class Connection { return true; } - get queueTime() { + get queueTime(): administration.QueueTimeMetrics { return { getLatest: () => this._queueTimes.last?.value[1], getValues: () => Array.from(this._queueTimes.values()), @@ -595,12 +586,12 @@ export class Connection { } catch { // noop } - if (isArangoErrorResponse(errorBody)) { + if (errors.isArangoErrorResponse(errorBody)) { res.parsedBody = errorBody; - throw ArangoError.from(res); + throw errors.ArangoError.from(res); } } - throw new HttpError(res); + throw new errors.HttpError(res); } if (res.body) { if (task.options.expectBinary) { @@ -626,7 +617,7 @@ export class Connection { this._activeHostUrl = this._hostUrls[i % this._hostUrls.length]; } if ( - isArangoError(err) && + errors.isArangoError(err) && err.errorNum === ERROR_ARANGO_CONFLICT && task.retryOnConflict > 0 ) { @@ -635,7 +626,7 @@ export class Connection { return; } if ( - (isNetworkError(err) || isArangoError(err)) && + (errors.isNetworkError(err) || errors.isArangoError(err)) && err.isSafeToRetry && task.hostUrl === undefined && this._maxRetries !== false && @@ -689,26 +680,26 @@ export class Connection { /** * @internal * - * Fetches a {@link database.Database} instance for the given database name from the + * Fetches a {@link databases.Database} instance for the given database name from the * internal cache, if available. * * @param databaseName - Name of the database. */ - database(databaseName: string): Database | undefined; + database(databaseName: string): databases.Database | undefined; /** * @internal * - * Adds a {@link database.Database} instance for the given database name to the + * Adds a {@link databases.Database} instance for the given database name to the * internal cache. * * @param databaseName - Name of the database. * @param database - Database instance to add to the cache. */ - database(databaseName: string, database: Database): Database; + database(databaseName: string, database: databases.Database): databases.Database; /** * @internal * - * Clears any {@link database.Database} instance stored for the given database name + * Clears any {@link databases.Database} instance stored for the given database name * from the internal cache, if present. * * @param databaseName - Name of the database. @@ -717,8 +708,8 @@ export class Connection { database(databaseName: string, database: null): undefined; database( databaseName: string, - database?: Database | null - ): Database | undefined { + database?: databases.Database | null + ): databases.Database | undefined { if (database === null) { this._databases.delete(databaseName); return undefined; @@ -834,7 +825,7 @@ export class Connection { * * Closes all open connections. * - * See {@link database.Database#close}. + * See {@link databases.Database#close}. */ close() { for (const host of this._hosts) { @@ -847,7 +838,7 @@ export class Connection { * * Waits for propagation. * - * See {@link database.Database#waitForPropagation}. + * See {@link databases.Database#waitForPropagation}. * * @param request - Request to perform against each coordinator. * @param timeout - Maximum number of milliseconds to wait for propagation. @@ -874,7 +865,7 @@ export class Connection { }); } catch (e) { if (endOfTime < Date.now()) { - throw new PropagationTimeoutError( + throw new errors.PropagationTimeoutError( undefined, { cause: e as Error } ); diff --git a/src/cursor.ts b/src/cursors.ts similarity index 90% rename from src/cursor.ts rename to src/cursors.ts index 808bb8237..90ae6e587 100644 --- a/src/cursor.ts +++ b/src/cursors.ts @@ -1,19 +1,21 @@ /** * ```ts - * import type { ArrayCursor, BatchedArrayCursor } from "arangojs/cursor.js"; + * import type { Cursor, BatchCursor } from "arangojs/cursors"; * ``` * - * The "cursor" module provides cursor-related interfaces for TypeScript. + * The "cursors" module provides cursor-related types and interfaces for + * TypeScript. * * @packageDocumentation */ import { LinkedList } from "./lib/linkedList.js"; -import { Database } from "./database.js"; +import * as databases from "./databases.js"; +//#region Cursor properties /** * Additional information about the cursor. */ -export interface CursorExtras { +export type CursorExtras = { /** * Warnings encountered while executing the query. */ @@ -33,12 +35,12 @@ export interface CursorExtras { * Additional statistics about the query execution. */ stats?: CursorStats; -} +}; /** * Additional statics about the query execution of the cursor. */ -export interface CursorStats { +export type CursorStats = { /** * Total number of index entries read from in-memory caches for indexes of * type edge or persistent. @@ -115,27 +117,31 @@ export interface CursorStats { */ runtime: number; }[]; -} +}; -interface BatchView { - isEmpty: boolean; +/** + * A low-level interface for consuming the items of a {@link BatchCursor}. + */ +export interface BatchCursorItemsView { + readonly isEmpty: boolean; more(): Promise; - shift(): T | undefined; + shift(): ItemType | undefined; } +//#endregion /** - * The `BatchedArrayCursor` provides a batch-wise API to an {@link ArrayCursor}. + * The `BatchCursor` provides a batch-wise API to an {@link Cursor}. * * When using TypeScript, cursors can be cast to a specific item type in order * to increase type safety. * - * @param T - Type to use for each item. Defaults to `any`. + * @param ItemType - Type to use for each item. Defaults to `any`. * * @example * ```ts * const db = new Database(); * const query = aql`FOR x IN 1..5 RETURN x`; - * const cursor = await db.query(query) as ArrayCursor; + * const cursor = await db.query(query) as Cursor; * const batches = cursor.batches; * ``` * @@ -152,9 +158,9 @@ interface BatchView { * } * ``` */ -export class BatchedArrayCursor { - protected _db: Database; - protected _batches: LinkedList>; +export class BatchCursor { + protected _db: databases.Database; + protected _batches: LinkedList>; protected _count?: number; protected _extra: CursorExtras; protected _hasMore: boolean; @@ -162,16 +168,16 @@ export class BatchedArrayCursor { protected _id: string | undefined; protected _hostUrl?: string; protected _allowDirtyRead?: boolean; - protected _itemsCursor: ArrayCursor; + protected _itemsCursor: Cursor; /** * @internal */ constructor( - db: Database, + db: databases.Database, body: { - extra: any; - result: T[]; + extra: CursorExtras; + result: ItemType[]; hasMore: boolean; nextBatchId?: string; id: string; @@ -192,23 +198,7 @@ export class BatchedArrayCursor { this._count = body.count; this._extra = body.extra; this._allowDirtyRead = allowDirtyRead; - this._itemsCursor = new ArrayCursor(this, { - get isEmpty() { - return !batches.length; - }, - more: () => this._more(), - shift: () => { - let batch = batches.first?.value; - while (batch && !batch.length) { - batches.shift(); - batch = batches.first?.value; - } - if (!batch) return undefined; - const value = batch.shift(); - if (!batch.length) batches.shift(); - return value; - }, - }); + this._itemsCursor = new Cursor(this, this.itemsView); } protected async _more(): Promise { @@ -241,14 +231,38 @@ export class BatchedArrayCursor { } /** - * An {@link ArrayCursor} providing item-wise access to the cursor result set. + * An {@link Cursor} providing item-wise access to the cursor result set. * - * See also {@link ArrayCursor#batches}. + * See also {@link Cursor#batches}. */ get items() { return this._itemsCursor; } + /** + * A low-level interface for consuming the items of this {@link BatchCursor}. + */ + get itemsView(): BatchCursorItemsView { + const batches = this._batches; + return { + get isEmpty() { + return !batches.length; + }, + more: () => this._more(), + shift: () => { + let batch = batches.first?.value; + while (batch && !batch.length) { + batches.shift(); + batch = batches.first?.value; + } + if (!batch) return undefined; + const value = batch.shift(); + if (!batch.length) batches.shift(); + return value; + }, + }; + } + /** * Additional information about the cursor. */ @@ -305,9 +319,9 @@ export class BatchedArrayCursor { * } * ``` */ - async *[Symbol.asyncIterator](): AsyncGenerator { + async *[Symbol.asyncIterator](): AsyncGenerator { while (this.hasNext) { - yield this.next() as Promise; + yield this.next() as Promise; } return undefined; } @@ -354,7 +368,7 @@ export class BatchedArrayCursor { * console.log(cursor.hasNext); // false * ``` */ - async all(): Promise { + async all(): Promise { return this.map((batch) => batch); } @@ -381,7 +395,7 @@ export class BatchedArrayCursor { * console.log(cursor.hasNext); // false * ``` */ - async next(): Promise { + async next(): Promise { while (!this._batches.length && this.hasNext) { await this._more(); } @@ -443,7 +457,7 @@ export class BatchedArrayCursor { * ``` */ async forEach( - callback: (currentBatch: T[], index: number, self: this) => false | void + callback: (currentBatch: ItemType[], index: number, self: this) => false | void ): Promise { let index = 0; while (this.hasNext) { @@ -463,8 +477,8 @@ export class BatchedArrayCursor { * * **Note**: This creates an array of all return values, which may impact * memory use when working with very large query result sets. Consider using - * {@link BatchedArrayCursor#forEach}, {@link BatchedArrayCursor#reduce} or - * {@link BatchedArrayCursor#flatMap} instead. + * {@link BatchCursor#forEach}, {@link BatchCursor#reduce} or + * {@link BatchCursor#flatMap} instead. * * See also: * [`Array.prototype.map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map). @@ -486,7 +500,7 @@ export class BatchedArrayCursor { * ``` */ async map( - callback: (currentBatch: T[], index: number, self: this) => R + callback: (currentBatch: ItemType[], index: number, self: this) => R ): Promise { let index = 0; const result: any[] = []; @@ -543,7 +557,7 @@ export class BatchedArrayCursor { * ``` */ async flatMap( - callback: (currentBatch: T[], index: number, self: this) => R | R[] + callback: (currentBatch: ItemType[], index: number, self: this) => R | R[] ): Promise { let index = 0; const result: any[] = []; @@ -566,7 +580,7 @@ export class BatchedArrayCursor { * for the last batch. * * **Note**: Most complex uses of the `reduce` method can be replaced with - * simpler code using {@link BatchedArrayCursor#forEach} or the `for await` + * simpler code using {@link BatchCursor#forEach} or the `for await` * syntax. * * **Note**: If the result set spans multiple batches, any remaining batches @@ -632,7 +646,7 @@ export class BatchedArrayCursor { async reduce( reducer: ( accumulator: R, - currentBatch: T[], + currentBatch: ItemType[], index: number, self: this ) => R, @@ -672,8 +686,8 @@ export class BatchedArrayCursor { */ async reduce( reducer: ( - accumulator: T[] | R, - currentBatch: T[], + accumulator: ItemType[] | R, + currentBatch: ItemType[], index: number, self: this ) => R @@ -681,7 +695,7 @@ export class BatchedArrayCursor { async reduce( reducer: ( accumulator: R, - currentBatch: T[], + currentBatch: ItemType[], index: number, self: this ) => R, @@ -743,21 +757,21 @@ export class BatchedArrayCursor { } /** - * The `ArrayCursor` type represents a cursor returned from a - * {@link database.Database#query}. + * The `Cursor` type represents a cursor returned from a + * {@link databases.Database#query}. * * When using TypeScript, cursors can be cast to a specific item type in order * to increase type safety. * - * See also {@link BatchedArrayCursor}. + * See also {@link BatchCursor}. * - * @param T - Type to use for each item. Defaults to `any`. + * @param ItemType - Type to use for each item. Defaults to `any`. * * @example * ```ts * const db = new Database(); * const query = aql`FOR x IN 1..5 RETURN x`; - * const result = await db.query(query) as ArrayCursor; + * const result = await db.query(query) as Cursor; * ``` * * @example @@ -771,14 +785,14 @@ export class BatchedArrayCursor { * } * ``` */ -export class ArrayCursor { - protected _batches: BatchedArrayCursor; - protected _view: BatchView; +export class Cursor { + protected _batches: BatchCursor; + protected _view: BatchCursorItemsView; /** * @internal */ - constructor(batchedCursor: BatchedArrayCursor, view: BatchView) { + constructor(batchedCursor: BatchCursor, view: BatchCursorItemsView) { this._batches = batchedCursor; this._view = view; } @@ -798,10 +812,10 @@ export class ArrayCursor { } /** - * A {@link BatchedArrayCursor} providing batch-wise access to the cursor + * A {@link BatchCursor} providing batch-wise access to the cursor * result set. * - * See also {@link BatchedArrayCursor#items}. + * See also {@link BatchCursor#items}. */ get batches() { return this._batches; @@ -851,9 +865,9 @@ export class ArrayCursor { * } * ``` */ - async *[Symbol.asyncIterator](): AsyncGenerator { + async *[Symbol.asyncIterator](): AsyncGenerator { while (this.hasNext) { - yield this.next() as Promise; + yield this.next() as Promise; } return undefined; } @@ -869,7 +883,7 @@ export class ArrayCursor { * console.log(cursor.hasNext); // false * ``` */ - async all(): Promise { + async all(): Promise { return this.batches.flatMap((v) => v); } @@ -891,7 +905,7 @@ export class ArrayCursor { * const empty = await cursor.next(); // undefined * ``` */ - async next(): Promise { + async next(): Promise { while (this._view.isEmpty && this.batches.hasMore) { await this._view.more(); } @@ -939,7 +953,7 @@ export class ArrayCursor { * ``` */ async forEach( - callback: (currentValue: T, index: number, self: this) => false | void + callback: (currentValue: ItemType, index: number, self: this) => false | void ): Promise { let index = 0; while (this.hasNext) { @@ -958,8 +972,8 @@ export class ArrayCursor { * * **Note**: This creates an array of all return values, which may impact * memory use when working with very large query result sets. Consider using - * {@link ArrayCursor#forEach}, {@link ArrayCursor#reduce} or - * {@link ArrayCursor#flatMap} instead. + * {@link Cursor#forEach}, {@link Cursor#reduce} or + * {@link Cursor#flatMap} instead. * * See also: * [`Array.prototype.map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map). @@ -978,7 +992,7 @@ export class ArrayCursor { * ``` */ async map( - callback: (currentValue: T, index: number, self: this) => R + callback: (currentValue: ItemType, index: number, self: this) => R ): Promise { let index = 0; const result: any[] = []; @@ -1029,7 +1043,7 @@ export class ArrayCursor { * ``` */ async flatMap( - callback: (currentValue: T, index: number, self: this) => R | R[] + callback: (currentValue: ItemType, index: number, self: this) => R | R[] ): Promise { let index = 0; const result: any[] = []; @@ -1052,7 +1066,7 @@ export class ArrayCursor { * for the last item. * * **Note**: Most complex uses of the `reduce` method can be replaced with - * simpler code using {@link ArrayCursor#forEach} or the `for await` syntax. + * simpler code using {@link Cursor#forEach} or the `for await` syntax. * * **Note**: If the result set spans multiple batches, any remaining batches * will only be fetched on demand. Depending on the cursor's TTL and the @@ -1109,7 +1123,7 @@ export class ArrayCursor { * ``` */ async reduce( - reducer: (accumulator: R, currentValue: T, index: number, self: this) => R, + reducer: (accumulator: R, currentValue: ItemType, index: number, self: this) => R, initialValue: R ): Promise; /** @@ -1143,14 +1157,14 @@ export class ArrayCursor { */ async reduce( reducer: ( - accumulator: T | R, - currentValue: T, + accumulator: ItemType | R, + currentValue: ItemType, index: number, self: this ) => R ): Promise; async reduce( - reducer: (accumulator: R, currentValue: T, index: number, self: this) => R, + reducer: (accumulator: R, currentValue: ItemType, index: number, self: this) => R, initialValue?: R ): Promise { let index = 0; diff --git a/src/database.ts b/src/databases.ts similarity index 65% rename from src/database.ts rename to src/databases.ts index d53f48623..9a9bad0be 100644 --- a/src/database.ts +++ b/src/databases.ts @@ -1,2058 +1,107 @@ /** * ```js - * import { Database } from "arangojs/database.js"; + * import { Database } from "arangojs/databases"; * ``` * - * The "database" module provides the {@link Database} class and associated + * The "databases" module provides the {@link Database} class and associated * types and interfaces for TypeScript. * * The Database class is also re-exported by the "index" module. * * @packageDocumentation */ -import { - Analyzer, - AnalyzerDescription, - CreateAnalyzerOptions, -} from "./analyzer.js"; -import { AqlLiteral, AqlQuery, isAqlLiteral, isAqlQuery } from "./aql.js"; -import { - ArangoCollection, - Collection, - CollectionMetadata, - collectionToString, - CollectionType, - CreateCollectionOptions, - DocumentCollection, - EdgeCollection, - isArangoCollection, -} from "./collection.js"; -import { - ArangoApiResponse, - ProcessedResponse, - Config, - Connection, - RequestOptions, -} from "./connection.js"; -import { ArrayCursor, BatchedArrayCursor } from "./cursor.js"; -import { HttpError, isArangoError } from "./error.js"; -import { FoxxManifest } from "./foxx-manifest.js"; -import { - CreateGraphOptions, - EdgeDefinitionOptions, - Graph, - GraphInfo, -} from "./graph.js"; -import { Job } from "./job.js"; +import * as administration from "./administration.js"; +import * as analyzers from "./analyzers.js"; +import * as aql from "./aql.js"; +import * as cluster from "./cluster.js"; +import * as collections from "./collections.js"; +import * as connection from "./connection.js"; +import * as cursors from "./cursors.js"; +import * as errors from "./errors.js"; +import * as graphs from "./graphs.js"; +import * as hotBackups from "./hot-backups.js"; +import * as jobs from "./jobs.js"; +import * as logs from "./logs.js"; +import * as queries from "./queries.js"; +import * as routes from "./routes.js"; +import * as services from "./services.js"; +import * as transactions from "./transactions.js"; +import * as users from "./users.js"; +import * as views from "./views.js"; import { DATABASE_NOT_FOUND } from "./lib/codes.js"; -import { Route } from "./route.js"; -import { Transaction } from "./transaction.js"; -import { CreateViewOptions, View, ViewDescription } from "./view.js"; +//#region Database operation options /** - * Indicates whether the given value represents a {@link Database}. - * - * @param database - A value that might be a database. - */ -export function isArangoDatabase(database: any): database is Database { - return Boolean(database && database.isArangoDatabase); -} - -/** - * @internal - */ -function coerceTransactionCollections( - collections: - | (TransactionCollections & { allowImplicit?: boolean }) - | (string | ArangoCollection)[] - | string - | ArangoCollection -): CoercedTransactionCollections { - if (typeof collections === "string") { - return { write: [collections] }; - } - if (Array.isArray(collections)) { - return { write: collections.map(collectionToString) }; - } - if (isArangoCollection(collections)) { - return { write: collectionToString(collections) }; - } - const cols: CoercedTransactionCollections = {}; - if (collections) { - if (collections.allowImplicit !== undefined) { - cols.allowImplicit = collections.allowImplicit; - } - if (collections.read) { - cols.read = Array.isArray(collections.read) - ? collections.read.map(collectionToString) - : collectionToString(collections.read); - } - if (collections.write) { - cols.write = Array.isArray(collections.write) - ? collections.write.map(collectionToString) - : collectionToString(collections.write); - } - if (collections.exclusive) { - cols.exclusive = Array.isArray(collections.exclusive) - ? collections.exclusive.map(collectionToString) - : collectionToString(collections.exclusive); - } - } - return cols; -} - -/** - * @internal - */ -type CoercedTransactionCollections = { - allowImplicit?: boolean; - exclusive?: string | string[]; - write?: string | string[]; - read?: string | string[]; -}; - -/** - * Collections involved in a transaction. - */ -export type TransactionCollections = { - /** - * An array of collections or a single collection that will be read from or - * written to during the transaction with no other writes being able to run - * in parallel. - */ - exclusive?: (string | ArangoCollection)[] | string | ArangoCollection; - /** - * An array of collections or a single collection that will be read from or - * written to during the transaction. - */ - write?: (string | ArangoCollection)[] | string | ArangoCollection; - /** - * An array of collections or a single collection that will be read from - * during the transaction. - */ - read?: (string | ArangoCollection)[] | string | ArangoCollection; -}; - -/** - * Options for how the transaction should be performed. - */ -export type TransactionOptions = { - /** - * Whether the transaction may read from collections not specified for this - * transaction. If set to `false`, accessing any collections not specified - * will result in the transaction being aborted to avoid potential deadlocks. - * - * Default: `true`. - */ - allowImplicit?: boolean; - /** - * If set to `true`, the request will explicitly permit ArangoDB to return a - * potentially dirty or stale result and arangojs will load balance the - * request without distinguishing between leaders and followers. - */ - allowDirtyRead?: boolean; - /** - * Determines whether to force the transaction to write all data to disk - * before returning. - */ - waitForSync?: boolean; - /** - * Determines how long the database will wait while attempting to gain locks - * on collections used by the transaction before timing out. - */ - lockTimeout?: number; - /** - * Determines the transaction size limit in bytes. - */ - maxTransactionSize?: number; - /** - * If set to `true`, the fast lock round will be skipped, which makes each - * locking operation take longer but guarantees deterministic locking order - * and may avoid deadlocks when many concurrent transactions are queued and - * try to access the same collection with an exclusive lock. - */ - skipFastLockRound?: boolean; -}; - -/** - * Options for executing a query. - * - * See {@link Database#query}. - */ -export type QueryOptions = { - /** - * If set to `true`, the query will be executed with support for dirty reads - * enabled, permitting ArangoDB to return a potentially dirty or stale result - * and arangojs will load balance the request without distinguishing between - * leaders and followers. - * - * Note that dirty reads are only supported for read-only queries, not data - * modification queries (e.g. using `INSERT`, `UPDATE`, `REPLACE` or - * `REMOVE`) and only when using ArangoDB 3.4 or later. - * - * Default: `false` - */ - allowDirtyRead?: boolean; - /** - * If set to `true`, cursor results will be stored by ArangoDB in such a way - * that batch reads can be retried in the case of a communication error. - * - * Default: `false` - */ - allowRetry?: boolean; - /** - * Maximum time in milliseconds arangojs will wait for a server response. - * Exceeding this value will result in the request being cancelled. - * - * **Note**: Setting a timeout for the client does not guarantee the query - * will be killed by ArangoDB if it is already being executed. See the - * `maxRuntime` option for limiting the execution time within ArangoDB. - */ - timeout?: number; - /** - * If set to a positive number, the query will automatically be retried at - * most this many times if it results in a write-write conflict. - * - * Default: `0` - */ - retryOnConflict?: number; - /** - * Unless set to `false`, the number of result values in the result set will - * be returned in the `count` attribute. This may be disabled by default in - * a future version of ArangoDB if calculating this value has a performance - * impact for some queries. - * - * Default: `true`. - */ - count?: boolean; - /** - * Number of result values to be transferred by the server in each - * network roundtrip (or "batch"). - * - * Must be greater than zero. - */ - batchSize?: number; - /** - * If set to `false`, the AQL query results cache lookup will be skipped for - * this query. - * - * Default: `true` - */ - cache?: boolean; - /** - * Maximum memory size in bytes that the query is allowed to use. - * Exceeding this value will result in the query failing with an error. - * - * If set to `0`, the memory limit is disabled. - * - * Default: `0` - */ - memoryLimit?: number; - /** - * Maximum allowed execution time before the query will be killed in seconds. - * - * If set to `0`, the query will be allowed to run indefinitely. - * - * Default: `0` - */ - maxRuntime?: number; - /** - * Time-to-live for the cursor in seconds. The cursor results may be - * garbage collected by ArangoDB after this much time has passed. - * - * Default: `30` - */ - ttl?: number; - /** - * If set to `true`, the query will throw an exception and abort if it would - otherwise produce a warning. - */ - failOnWarning?: boolean; - /** - * If set to `1` or `true`, additional query profiling information will be - * returned in the `extra.profile` attribute if the query is not served from - * the result cache. - * - * If set to `2`, the query will return execution stats per query plan node - * in the `extra.stats.nodes` attribute. Additionally the query plan is - * returned in `extra.plan`. - */ - profile?: boolean | number; - /** - * If set to `true`, the query will be executed as a streaming query. - */ - stream?: boolean; - /** - * Limits the maximum number of warnings a query will return. - */ - maxWarningsCount?: number; - /** - * If set to `true` and the query has a `LIMIT` clause, the total number of - * values matched before the last top-level `LIMIT` in the query was applied - * will be returned in the `extra.stats.fullCount` attribute. - */ - fullCount?: boolean; - /** - * If set to `false`, the query data will not be stored in the RocksDB block - * cache. This can be used to avoid thrashing he block cache when reading a - * lot of data. - */ - fillBlockCache?: boolean; - /** - * An object with a `rules` property specifying a list of optimizer rules to - * be included or excluded by the optimizer for this query. Prefix a rule - * name with `+` to include it, or `-` to exclude it. The name `all` acts as - * an alias matching all optimizer rules. - */ - optimizer?: { rules: string[] }; - /** - * Limits the maximum number of plans that will be created by the AQL query - * optimizer. - */ - maxPlans?: number; - /** - * Controls after how many execution nodes in a query a stack split should be - * performed. - * - * Default: `250` (`200` on macOS) - */ - maxNodesPerCallstack?: number; - /** - * Maximum size of transactions in bytes. - */ - maxTransactionSize?: number; - /** - * Maximum number of operations after which an intermediate commit is - * automatically performed. - */ - intermediateCommitCount?: number; - /** - * Maximum total size of operations in bytes after which an intermediate - * commit is automatically performed. - */ - intermediateCommitSize?: number; - /** - * (Enterprise Edition cluster only.) If set to `true`, collections - * inaccessible to current user will result in an access error instead - * of being treated as empty. - */ - skipInaccessibleCollections?: boolean; - /** - * (Enterprise Edition cluster only.) Limits the maximum time in seconds a - * DBServer will wait to bring satellite collections involved in the query - * into sync. Exceeding this value will result in the query being stopped. - * - * Default: `60` - */ - satelliteSyncWait?: number; -}; - -/** - * Options for explaining a query. - * - * See {@link Database#explain}. - */ -export type ExplainOptions = { - /** - * An object with a `rules` property specifying a list of optimizer rules to - * be included or excluded by the optimizer for this query. Prefix a rule - * name with `+` to include it, or `-` to exclude it. The name `all` acts as - * an alias matching all optimizer rules. - */ - optimizer?: { rules: string[] }; - /** - * Maximum number of plans that the optimizer is allowed to generate. - * Setting this to a low value limits the amount of work the optimizer does. - */ - maxNumberOfPlans?: number; - /** - * If set to true, all possible execution plans will be returned as the - * `plans` property. Otherwise only the optimal execution plan will be - * returned as the `plan` property. - * - * Default: `false` - */ - allPlans?: boolean; -}; - -/** - * Details for a transaction. - * - * See also {@link transaction.TransactionStatus}. - */ -export type TransactionDetails = { - /** - * Unique identifier of the transaction. - */ - id: string; - /** - * Status (or "state") of the transaction. - */ - state: "running" | "committed" | "aborted"; -}; - -/** - * Plan explaining query execution. - */ -export type ExplainPlan = { - /** - * Execution nodes in this plan. - */ - nodes: { - [key: string]: any; - type: string; - id: number; - dependencies: number[]; - estimatedCost: number; - estimatedNrItems: number; - }[]; - /** - * Rules applied by the optimizer. - */ - rules: string[]; - /** - * Information about collections involved in the query. - */ - collections: { - name: string; - type: "read" | "write"; - }[]; - /** - * Variables used in the query. - */ - variables: { - id: number; - name: string; - }[]; - /** - * Total estimated cost of the plan. - */ - estimatedCost: number; - /** - * Estimated number of items returned by the query. - */ - estimatedNrItems: number; - /** - * Whether the query is a data modification query. - */ - isModificationQuery: boolean; -}; - -/** - * Optimizer statistics for an explained query. - */ -export type ExplainStats = { - /** - * Total number of rules executed for this query. - */ - rulesExecuted: number; - /** - * Number of rules skipped for this query. - */ - rulesSkipped: number; - /** - * Total number of plans created. - */ - plansCreated: number; - /** - * Maximum memory usage in bytes of the query during explain. - */ - peakMemoryUsage: number; - /** - * Time in seconds needed to explain the query. - */ - executionTime: number; -}; - -/** - * Result of explaining a query with a single plan. - */ -export type SingleExplainResult = { - /** - * Query plan. - */ - plan: ExplainPlan; - /** - * Whether it would be possible to cache the query. - */ - cacheable: boolean; - /** - * Warnings encountered while planning the query execution. - */ - warnings: { code: number; message: string }[]; - /** - * Optimizer statistics for the explained query. - */ - stats: ExplainStats; -}; - -/** - * Result of explaining a query with multiple plans. - */ -export type MultiExplainResult = { - /** - * Query plans. - */ - plans: ExplainPlan[]; - /** - * Whether it would be possible to cache the query. - */ - cacheable: boolean; - /** - * Warnings encountered while planning the query execution. - */ - warnings: { code: number; message: string }[]; - /** - * Optimizer statistics for the explained query. - */ - stats: ExplainStats; -}; - -/** - * Node in an AQL abstract syntax tree (AST). - */ -export type AstNode = { - [key: string]: any; - type: string; - subNodes: AstNode[]; -}; - -/** - * Result of parsing a query. - */ -export type ParseResult = { - /** - * Whether the query was parsed. - */ - parsed: boolean; - /** - * Names of all collections involved in the query. - */ - collections: string[]; - /** - * Names of all bind parameters used in the query. - */ - bindVars: string[]; - /** - * Abstract syntax tree (AST) of the query. - */ - ast: AstNode[]; -}; - -/** - * Optimizer rule for AQL queries. - */ -export type QueryOptimizerRule = { - name: string; - flags: { - hidden: boolean; - clusterOnly: boolean; - canBeDisabled: boolean; - canCreateAdditionalPlans: boolean; - disabledByDefault: boolean; - enterpriseOnly: boolean; - }; -}; - -/** - * Information about query tracking. - */ -export type QueryTracking = { - /** - * Whether query tracking is enabled. - */ - enabled: boolean; - /** - * Maximum query string length in bytes that is kept in the list. - */ - maxQueryStringLength: number; - /** - * Maximum number of slow queries that is kept in the list. - */ - maxSlowQueries: number; - /** - * Threshold execution time in seconds for when a query is - * considered slow. - */ - slowQueryThreshold: number; - /** - * Whether bind parameters are being tracked along with queries. - */ - trackBindVars: boolean; - /** - * Whether slow queries are being tracked. - */ - trackSlowQueries: boolean; -}; - -/** - * Options for query tracking. - * - * See {@link Database#queryTracking}. - */ -export type QueryTrackingOptions = { - /** - * If set to `false`, neither queries nor slow queries will be tracked. - */ - enabled?: boolean; - /** - * Maximum query string length in bytes that will be kept in the list. - */ - maxQueryStringLength?: number; - /** - * Maximum number of slow queries to be kept in the list. - */ - maxSlowQueries?: number; - /** - * Threshold execution time in seconds for when a query will be - * considered slow. - */ - slowQueryThreshold?: number; - /** - * If set to `true`, bind parameters will be tracked along with queries. - */ - trackBindVars?: boolean; - /** - * If set to `true` and `enabled` is also set to `true`, slow queries will be - * tracked if their execution time exceeds `slowQueryThreshold`. - */ - trackSlowQueries?: boolean; -}; - -/** - * Object describing a query. - */ -export type QueryInfo = { - /** - * Unique identifier for this query. - */ - id: string; - /** - * Name of the database the query runs in. - */ - database: string; - /** - * Name of the user that started the query. - */ - user: string; - /** - * Query string (potentially truncated). - */ - query: string; - /** - * Bind parameters used in the query. - */ - bindVars: Record; - /** - * Date and time the query was started. - */ - started: string; - /** - * Query's running time in seconds. - */ - runTime: number; - /** - * Maximum memory usage in bytes of the query. - */ - peakMemoryUsage: number; - /** - * Query's current execution state. - */ - state: "executing" | "finished" | "killed"; - /** - * Whether the query uses a streaming cursor. - */ - stream: boolean; -}; - -/** - * Information about a cluster imbalance. - */ -export type ClusterImbalanceInfo = { - /** - * Information about the leader imbalance. - */ - leader: { - /** - * The weight of leader shards per DB-Server. A leader has a weight of 1 by default but it is higher if collections can only be moved together because of `distributeShardsLike`. - */ - weightUsed: number[]; - /** - * The ideal weight of leader shards per DB-Server. - */ - targetWeight: number[]; - /** - * The number of leader shards per DB-Server. - */ - numberShards: number[]; - /** - * The measure of the leader shard distribution. The higher the number, the worse the distribution. - */ - leaderDupl: number[]; - /** - * The sum of all weights. - */ - totalWeight: number; - /** - * The measure of the total imbalance. A high value indicates a high imbalance. - */ - imbalance: number; - /** - * The sum of shards, counting leader shards only. - */ - totalShards: number; - }; - /** - * Information about the shard imbalance. - */ - shards: { - /** - * The size of shards per DB-Server. - */ - sizeUsed: number[]; - /** - * The ideal size of shards per DB-Server. - */ - targetSize: number[]; - /** - * The number of leader and follower shards per DB-Server. - */ - numberShards: number[]; - /** - * The sum of the sizes. - */ - totalUsed: number; - /** - * The sum of shards, counting leader and follower shards. - */ - totalShards: number; - /** - * The sum of system collection shards, counting leader shards only. - */ - totalShardsFromSystemCollections: number; - /** - * The measure of the total imbalance. A high value indicates a high imbalance. - */ - imbalance: number; - }; -}; - -/** - * Information about the current state of the cluster imbalance. - */ -export type ClusterRebalanceState = ClusterImbalanceInfo & { - /** - * The number of pending move shard operations. - */ - pendingMoveShards: number; - /** - * The number of planned move shard operations. - */ - todoMoveShards: number; -}; - -/** - * Options for rebalancing the cluster. - */ -export type ClusterRebalanceOptions = { - /** - * Maximum number of moves to be computed. - * - * Default: `1000` - */ - maximumNumberOfMoves?: number; - /** - * Allow leader changes without moving data. - * - * Default: `true` - */ - leaderChanges?: boolean; - /** - * Allow moving leaders. - * - * Default: `false` - */ - moveLeaders?: boolean; - /** - * Allow moving followers. - * - * Default: `false` - */ - moveFollowers?: boolean; - /** - * Ignore system collections in the rebalance plan. - * - * Default: `false` - */ - excludeSystemCollections?: boolean; - /** - * Default: `256**6` - */ - piFactor?: number; - /** - * A list of database names to exclude from the analysis. - * - * Default: `[]` - */ - databasesExcluded?: string[]; -}; - -export type ClusterRebalanceMove = { - /** - * The server name from which to move. - */ - from: string; - /** - * The ID of the destination server. - */ - to: string; - /** - * Shard ID of the shard to be moved. - */ - shard: string; - /** - * Collection ID of the collection the shard belongs to. - */ - collection: number; - /** - * True if this is a leader move shard operation. - */ - isLeader: boolean; -}; - -export type ClusterRebalanceResult = { - /** - * Imbalance before the suggested move shard operations are applied. - */ - imbalanceBefore: ClusterImbalanceInfo; - /** - * Expected imbalance after the suggested move shard operations are applied. - */ - imbalanceAfter: ClusterImbalanceInfo; - /** - * Suggested move shard operations. - */ - moves: ClusterRebalanceMove[]; -}; - -/** - * Database user to create with a database. - */ -export type CreateDatabaseUser = { - /** - * Username of the user to create. - */ - username: string; - /** - * Password of the user to create. - * - * Default: `""` - */ - passwd?: string; - /** - * Whether the user is active. - * - * Default: `true` - */ - active?: boolean; - /** - * Additional data to store with the user object. - */ - extra?: Record; -}; - -/** - * Options for creating a database. - * - * See {@link Database#createDatabase}. - */ -export type CreateDatabaseOptions = { - /** - * Database users to create with the database. - */ - users?: CreateDatabaseUser[]; - /** - * (Cluster only.) The sharding method to use for new collections in the - * database. - */ - sharding?: "" | "flexible" | "single"; - /** - * (Cluster only.) Default replication factor for new collections in this - * database. - * - * Setting this to `1` disables replication. Setting this to `"satellite"` - * will replicate to every DBServer. - */ - replicationFactor?: "satellite" | number; - /** - * (Cluster only.) Default write concern for new collections created in this - * database. - */ - writeConcern?: number; -}; - -/** - * Object describing a database. - * - * See {@link Database#get}. - */ -export type DatabaseInfo = { - /** - * Name of the database. - */ - name: string; - /** - * Unique identifier of the database. - */ - id: string; - /** - * File system path of the database. - */ - path: string; - /** - * Whether the database is the system database. - */ - isSystem: boolean; - /** - * (Cluster only.) The sharding method to use for new collections in the - * database. - */ - sharding?: "" | "flexible" | "single"; - /** - * (Cluster only.) Default replication factor for new collections in this - * database. - */ - replicationFactor?: "satellite" | number; - /** - * (Cluster only.) Default write concern for new collections created in this - * database. - */ - writeConcern?: number; -}; - -/** - * Result of retrieving database version information. - */ -export type VersionInfo = { - /** - * Value identifying the server type, i.e. `"arango"`. - */ - server: string; - /** - * ArangoDB license type or "edition". - */ - license: "community" | "enterprise"; - /** - * ArangoDB server version. - */ - version: string; - /** - * Additional information about the ArangoDB server. - */ - details?: { [key: string]: string }; -}; - -/** - * Information about the storage engine. - */ -export type EngineInfo = { - /** - * Endianness of the storage engine. - */ - endianness?: "little" | "big"; - /** - * Name of the storage engine. - */ - name: string; - /** - * Features supported by the storage engine. - */ - supports?: { - /** - * Index types supported by the storage engine. - */ - indexes?: string[]; - /** - * Aliases supported by the storage engine. - */ - aliases?: { - /** - * Index type aliases supported by the storage engine. - */ - indexes?: Record; - } - }; -}; - -/** - * Information about the server status. - */ -export type ServerStatusInformation = { - /** - * (Cluster Coordinators and DB-Servers only.) The address of the server. - */ - address?: string; - /** - * (Cluster Coordinators and DB-Servers only.) Information about the Agency. - */ - agency?: { - /** - * Information about the communication with the Agency. - */ - agencyComm: { - /** - * A list of possible Agency endpoints. - */ - endpoints: string[]; - }; - }; - /** - * (Cluster Agents only.) Information about the Agents. - */ - agent?: { - /** - * The endpoint of the queried Agent. - */ - endpoint: string; - /** - * Server ID of the queried Agent. - */ - id: string; - /** - * Server ID of the leading Agent. - */ - leaderId: string; - /** - * Whether the queried Agent is the leader. - */ - leading: boolean; - /** - * The current term number. - */ - term: number; - }; - /** - * (Cluster Coordinators only.) Information about the Coordinators. - */ - coordinator?: { - /** - * The server ID of the Coordinator that is the Foxx master. - */ - foxxmaster: string[]; - /** - * Whether the queried Coordinator is the Foxx master. - */ - isFoxxmaster: boolean[]; - }; - /** - * Whether the Foxx API is enabled. - */ - foxxApi: boolean; - /** - * A host identifier defined by the HOST or NODE_NAME environment variable, - * or a fallback value using a machine identifier or the cluster/Agency address. - */ - host: string; - /** - * A hostname defined by the HOSTNAME environment variable. - */ - hostname?: string; - /** - * ArangoDB Edition. - */ - license: "community" | "enterprise"; - /** - * Server operation mode. - * - * @deprecated use `operationMode` instead - */ - mode: "server" | "console"; - /** - * Server operation mode. - */ - operationMode: "server" | "console"; - /** - * The process ID of arangod. - */ - pid: number; - /** - * Server type. - */ - server: "arango"; - /** - * Information about the server status. - */ - serverInfo: { - /** - * Whether the maintenance mode is enabled. - */ - maintenance: boolean; - /** - * (Cluster only.) The persisted ID. - */ - persistedId?: string; - /** - * Startup and recovery information. - */ - progress: { - /** - * Internal name of the feature that is currently being prepared, started, stopped or unprepared. - */ - feature: string; - /** - * Name of the lifecycle phase the instance is currently in. - */ - phase: string; - /** - * Current recovery sequence number value. - */ - recoveryTick: number; - }; - /** - * Whether writes are disabled. - */ - readOnly: boolean; - /** - * (Cluster only.) The reboot ID. Changes on every restart. - */ - rebootId?: number; - /** - * Either "SINGLE", "COORDINATOR", "PRIMARY" (DB-Server), or "AGENT" - */ - role: "SINGLE" | "COORDINATOR" | "PRIMARY" | "AGENT"; - /** - * (Cluster Coordinators and DB-Servers only.) The server ID. - */ - serverId?: string; - /** - * (Cluster Coordinators and DB-Servers only.) Either "STARTUP", "SERVING", - * or "SHUTDOWN". - */ - state?: "STARTUP" | "SERVING" | "SHUTDOWN"; - /** - * The server version string. - */ - version: string; - /** - * Whether writes are enabled. - * - * @deprecated Use `readOnly` instead. - */ - writeOpsEnabled: boolean; - }; -}; - -/** - * Server availability. - * - * - `"default"`: The server is operational. - * - * - `"readonly"`: The server is in read-only mode. - * - * - `false`: The server is not available. - */ -export type ServerAvailability = "default" | "readonly" | false; - -/** - * Single server deployment information for support purposes. - */ -export type SingleServerSupportInfo = { - /** - * ISO 8601 datetime string of when the information was requested. - */ - date: string; - /** - * Information about the deployment. - */ - deployment: { - /** - * Deployment mode: - * - * - `"single"`: A single server deployment. - * - * - `"cluster"`: A cluster deployment. - */ - type: "single"; - }; -}; - -/** - * Cluster deployment information for support purposes. - */ -export type ClusterSupportInfo = { - /** - * ISO 8601 datetime string of when the information was requested. - */ - date: string; - /** - * Information about the deployment. - */ - deployment: { - /** - * Deployment mode: - * - * - `"single"`: A single server deployment. - * - * - `"cluster"`: A cluster deployment. - */ - type: "cluster"; - /** - * Information about the servers in the cluster. - */ - servers: Record>; - /** - * Number of agents in the cluster. - */ - agents: number; - /** - * Number of coordinators in the cluster. - */ - coordinators: number; - /** - * Number of DB-Servers in the cluster. - */ - dbServers: number; - /** - * Information about the shards in the cluster. - */ - shards: { - /** - * Number of collections in the cluster. - */ - collections: number; - /** - * Number of shards in the cluster. - */ - shards: number; - /** - * Number of leaders in the cluster. - */ - leaders: number; - /** - * Number of real leaders in the cluster. - */ - realLeaders: number; - /** - * Number of followers in the cluster. - */ - followers: number; - /** - * Number of servers in the cluster. - */ - servers: number; - } - }; - /** - * (Cluster only.) Information about the ArangoDB instance as well as the - * host machine. - */ - host: Record; -} -/** - * Definition of an AQL User Function. - */ -export type AqlUserFunction = { - /** - * Name of the AQL User Function. - */ - name: string; - /** - * Implementation of the AQL User Function. - */ - code: string; - /** - * Whether the function is deterministic. - * - * See {@link Database#createFunction}. - */ - isDeterministic: boolean; -}; - -/** - * Options for installing the service. - * - * See {@link Database#installService}. - */ -export type InstallServiceOptions = { - /** - * An object mapping configuration option names to values. - * - * See also {@link Database#getServiceConfiguration}. - */ - configuration?: Record; - /** - * An object mapping dependency aliases to mount points. - * - * See also {@link Database#getServiceDependencies}. - */ - dependencies?: Record; - /** - * Whether the service should be installed in development mode. - * - * See also {@link Database#setServiceDevelopmentMode}. - * - * Default: `false` - */ - development?: boolean; - /** - * Whether the service should be installed in legacy compatibility mode - * - * This overrides the `engines` option in the service manifest (if any). - * - * Default: `false` - */ - legacy?: boolean; - /** - * Whether the "setup" script should be executed. - * - * Default: `true` - */ - setup?: boolean; -}; - -/** - * Options for replacing a service. - * - * See {@link Database#replaceService}. - */ -export type ReplaceServiceOptions = { - /** - * An object mapping configuration option names to values. - * - * See also {@link Database#getServiceConfiguration}. - */ - configuration?: Record; - /** - * An object mapping dependency aliases to mount points. - * - * See also {@link Database#getServiceDependencies}. - */ - dependencies?: Record; - /** - * Whether the service should be installed in development mode. - * - * See also {@link Database#setServiceDevelopmentMode}. - * - * Default: `false` - */ - development?: boolean; - /** - * Whether the service should be installed in legacy compatibility mode - * - * This overrides the `engines` option in the service manifest (if any). - * - * Default: `false` - */ - legacy?: boolean; - /** - * Whether the "setup" script should be executed. - * - * Default: `true` - */ - setup?: boolean; - /** - * Whether the existing service's "teardown" script should be executed - * prior to removing that service. - * - * Default: `true` - */ - teardown?: boolean; - /** - * If set to `true`, replacing a service that does not already exist will - * fall back to installing the new service. - * - * Default: `false` - */ - force?: boolean; -}; - -/** - * Options for upgrading a service. - * - * See {@link Database#upgradeService}. - */ -export type UpgradeServiceOptions = { - /** - * An object mapping configuration option names to values. - * - * See also {@link Database#getServiceConfiguration}. - */ - configuration?: Record; - /** - * An object mapping dependency aliases to mount points. - * - * See also {@link Database#getServiceDependencies}. - */ - dependencies?: Record; - /** - * Whether the service should be installed in development mode. - * - * See also {@link Database#setServiceDevelopmentMode}. - * - * Default: `false` - */ - development?: boolean; - /** - * Whether the service should be installed in legacy compatibility mode - * - * This overrides the `engines` option in the service manifest (if any). - * - * Default: `false` - */ - legacy?: boolean; - /** - * Whether the "setup" script should be executed. - * - * Default: `true` - */ - setup?: boolean; - /** - * Whether the existing service's "teardown" script should be executed - * prior to upgrading that service. - * - * Default: `false` - */ - teardown?: boolean; - /** - * Unless set to `true`, upgrading a service that does not already exist will - * fall back to installing the new service. - * - * Default: `false` - */ - force?: boolean; -}; - -/** - * Options for uninstalling a service. - * - * See {@link Database#uninstallService}. - */ -export type UninstallServiceOptions = { - /** - * Whether the service's "teardown" script should be executed - * prior to removing that service. - * - * Default: `true` - */ - teardown?: boolean; - /** - * If set to `true`, uninstalling a service that does not already exist - * will be considered successful. - * - * Default: `false` - */ - force?: boolean; -}; - -/** - * Object briefly describing a Foxx service. - */ -export type ServiceSummary = { - /** - * Service mount point, relative to the database. - */ - mount: string; - /** - * Name defined in the service manifest. - */ - name?: string; - /** - * Version defined in the service manifest. - */ - version?: string; - /** - * Service dependencies the service expects to be able to match as a mapping - * from dependency names to versions the service is compatible with. - */ - provides: Record; - /** - * Whether development mode is enabled for this service. - */ - development: boolean; - /** - * Whether the service is running in legacy compatibility mode. - */ - legacy: boolean; -}; - -/** - * Object describing a Foxx service in detail. - */ -export type ServiceInfo = { - /** - * Service mount point, relative to the database. - */ - mount: string; - /** - * File system path of the service. - */ - path: string; - /** - * Name defined in the service manifest. - */ - name?: string; - /** - * Version defined in the service manifest. - */ - version?: string; - /** - * Whether development mode is enabled for this service. - */ - development: boolean; - /** - * Whether the service is running in legacy compatibility mode. - */ - legacy: boolean; - /** - * Content of the service manifest of this service. - */ - manifest: FoxxManifest; - /** - * Internal checksum of the service's initial source bundle. - */ - checksum: string; - /** - * Options for this service. - */ - options: { - /** - * Configuration values set for this service. - */ - configuration: Record; - /** - * Service dependency configuration of this service. - */ - dependencies: Record; - }; -}; - -/** - * Object describing a configuration option of a Foxx service. - */ -export type ServiceConfiguration = { - /** - * Data type of the configuration value. - * - * **Note**: `"int"` and `"bool"` are historical synonyms for `"integer"` and - * `"boolean"`. The `"password"` type is synonymous with `"string"` but can - * be used to distinguish values which should not be displayed in plain text - * by software when managing the service. - */ - type: - | "integer" - | "boolean" - | "string" - | "number" - | "json" - | "password" - | "int" - | "bool"; - /** - * Current value of the configuration option as stored internally. - */ - currentRaw: any; - /** - * Processed current value of the configuration option as exposed in the - * service code. - */ - current: any; - /** - * Formatted name of the configuration option. - */ - title: string; - /** - * Human-readable description of the configuration option. - */ - description?: string; - /** - * Whether the configuration option must be set in order for the service - * to be operational. - */ - required: boolean; - /** - * Default value of the configuration option. - */ - default?: any; -}; - -/** - * Object describing a single-service dependency defined by a Foxx service. - */ -export type SingleServiceDependency = { - /** - * Whether this is a multi-service dependency. - */ - multiple: false; - /** - * Current mount point the dependency is resolved to. - */ - current?: string; - /** - * Formatted name of the dependency. - */ - title: string; - /** - * Name of the service the dependency expects to match. - */ - name: string; - /** - * Version of the service the dependency expects to match. - */ - version: string; - /** - * Human-readable description of the dependency. - */ - description?: string; - /** - * Whether the dependency must be matched in order for the service - * to be operational. - */ - required: boolean; -}; - -/** - * Object describing a multi-service dependency defined by a Foxx service. - */ -export type MultiServiceDependency = { - /** - * Whether this is a multi-service dependency. - */ - multiple: true; - /** - * Current mount points the dependency is resolved to. - */ - current?: string[]; - /** - * Formatted name of the dependency. - */ - title: string; - /** - * Name of the service the dependency expects to match. - */ - name: string; - /** - * Version of the service the dependency expects to match. - */ - version: string; - /** - * Human-readable description of the dependency. - */ - description?: string; - /** - * Whether the dependency must be matched in order for the service - * to be operational. - */ - required: boolean; -}; - -/** - * Test stats for a Foxx service's tests. - */ -export type ServiceTestStats = { - /** - * Total number of tests found. - */ - tests: number; - /** - * Number of tests that ran successfully. - */ - passes: number; - /** - * Number of tests that failed. - */ - failures: number; - /** - * Number of tests skipped or not executed. - */ - pending: number; - /** - * Total test duration in milliseconds. - */ - duration: number; -}; - -/** - * Test results for a single test case using the stream reporter. - */ -export type ServiceTestStreamTest = { - title: string; - fullTitle: string; - duration: number; - err?: string; -}; - -/** - * Test results for a Foxx service's tests using the stream reporter. - */ -export type ServiceTestStreamReport = ( - | ["start", { total: number }] - | ["pass", ServiceTestStreamTest] - | ["fail", ServiceTestStreamTest] - | ["end", ServiceTestStats] -)[]; - -/** - * Test results for a single test case using the suite reporter. - */ -export type ServiceTestSuiteTest = { - result: "pending" | "pass" | "fail"; - title: string; - duration: number; - err?: any; -}; - -/** - * Test results for a single test suite using the suite reporter. - */ -export type ServiceTestSuite = { - title: string; - suites: ServiceTestSuite[]; - tests: ServiceTestSuiteTest[]; -}; - -/** - * Test results for a Foxx service's tests using the suite reporter. - */ -export type ServiceTestSuiteReport = { - stats: ServiceTestStats; - suites: ServiceTestSuite[]; - tests: ServiceTestSuiteTest[]; -}; - -/** - * Test results for a single test case in XUnit format using the JSONML - * representation. - */ -export type ServiceTestXunitTest = - | ["testcase", { classname: string; name: string; time: number }] - | [ - "testcase", - { classname: string; name: string; time: number }, - ["failure", { message: string; type: string }, string], - ]; - -/** - * Test results for a Foxx service's tests in XUnit format using the JSONML - * representation. - */ -export type ServiceTestXunitReport = [ - "testsuite", - { - timestamp: number; - tests: number; - errors: number; - failures: number; - skip: number; - time: number; - }, - ...ServiceTestXunitTest[], -]; - -/** - * Test results for a Foxx service's tests in TAP format. - */ -export type ServiceTestTapReport = string[]; - -/** - * Test results for a single test case using the default reporter. - */ -export type ServiceTestDefaultTest = { - title: string; - fullTitle: string; - duration: number; - err?: string; -}; - -/** - * Test results for a Foxx service's tests using the default reporter. - */ -export type ServiceTestDefaultReport = { - stats: ServiceTestStats; - tests: ServiceTestDefaultTest[]; - pending: ServiceTestDefaultTest[]; - failures: ServiceTestDefaultTest[]; - passes: ServiceTestDefaultTest[]; -}; - -/** - * OpenAPI 2.0 description of a Foxx service. - */ -export type SwaggerJson = { - [key: string]: any; - info: { - title: string; - description: string; - version: string; - license: string; - }; - path: { - [key: string]: any; - }; -}; - -/** - * Access level for an ArangoDB user's access to a collection or database. - */ -export type AccessLevel = "rw" | "ro" | "none"; - -/** - * Properties of an ArangoDB user object. - */ -export type ArangoUser = { - /** - * ArangoDB username of the user. - */ - user: string; - /** - * Whether the ArangoDB user account is enabled and can authenticate. - */ - active: boolean; - /** - * Additional information to store about this user. - */ - extra: Record; -}; - -/** - * Options for creating an ArangoDB user. - */ -export type CreateUserOptions = { - /** - * ArangoDB username of the user. - */ - user: string; - /** - * Password the ArangoDB user will use for authentication. - */ - passwd: string; - /** - * Whether the ArangoDB user account is enabled and can authenticate. - * - * Default: `true` - */ - active?: boolean; - /** - * Additional information to store about this user. - * - * Default: `{}` - */ - extra?: Record; -}; - -/** - * Options for modifying an ArangoDB user. + * Options for creating a database. + * + * See {@link Database#createDatabase}. */ -export type UserOptions = { - /** - * Password the ArangoDB user will use for authentication. - */ - passwd: string; +export type CreateDatabaseOptions = { /** - * Whether the ArangoDB user account is enabled and can authenticate. - * - * Default: `true` + * Database users to create with the database. */ - active?: boolean; + users?: users.CreateDatabaseUserOptions[]; /** - * Additional information to store about this user. - * - * Default: `{}` + * (Cluster only.) The sharding method to use for new collections in the + * database. */ - extra?: Record; -}; - -/** - * Options for accessing or manipulating access levels. - */ -export type UserAccessLevelOptions = { + sharding?: "" | "flexible" | "single"; /** - * The database to access or manipulate the access level of. + * (Cluster only.) Default replication factor for new collections in this + * database. * - * If `collection` is an `ArangoCollection`, this option defaults to the - * database the collection is contained in. Otherwise this option defaults to - * the current database. - */ - database?: Database | string; - /** - * The collection to access or manipulate the access level of. - */ - collection?: ArangoCollection | string; -}; - -/** - * An object providing methods for accessing queue time metrics of the most - * recently received server responses if the server supports this feature. - */ -export type QueueTimeMetrics = { - /** - * Returns the queue time of the most recently received response in seconds. - */ - getLatest: () => number | undefined; - /** - * Returns a list of the most recently received queue time values as tuples - * of the timestamp of the response being processed in milliseconds and the - * queue time in seconds. + * Setting this to `1` disables replication. Setting this to `"satellite"` + * will replicate to every DBServer. */ - getValues: () => [number, number][]; + replicationFactor?: "satellite" | number; /** - * Returns the average queue time of the most recently received responses - * in seconds. + * (Cluster only.) Default write concern for new collections created in this + * database. */ - getAvg: () => number; + writeConcern?: number; }; +//#endregion +//#region DatabaseDescription /** - * (Enterprise Edition only.) Options for creating a hot backup. + * Object describing a database. + * + * See {@link Database#get}. */ -export type HotBackupOptions = { - /** - * If set to `true` and no global transaction lock can be acquired within the - * given timeout, a possibly inconsistent backup is taken. - * - * Default: `false` - */ - allowInconsistent?: boolean; - /** - * (Enterprise Edition cluster only.) If set to `true` and no global - * transaction lock can be acquired within the given timeout, all running - * transactions are forcefully aborted to ensure that a consistent backup - * can be created. - * - * Default: `false`. - */ - force?: boolean; +export type DatabaseDescription = { /** - * Label to appended to the backup's identifier. - * - * Default: If omitted or empty, a UUID will be generated. + * Name of the database. */ - label?: string; + name: string; /** - * Time in seconds that the operation will attempt to get a consistent - * snapshot. - * - * Default: `120`. + * Unique identifier of the database. */ - timeout?: number; -}; - -/** - * (Enterprise Edition only.) Result of a hot backup. - */ -export type HotBackupResult = { id: string; - potentiallyInconsistent: boolean; - sizeInBytes: number; - datetime: string; - nrDBServers: number; - nrFiles: number; -}; - -/** - * (Enterprise Edition only.) List of known hot backups. - */ -export type HotBackupList = { - server: string; - list: Record< - string, - HotBackupResult & { - version: string; - keys: any[]; - available: boolean; - nrPiecesPresent: number; - countIncludesFilesOnly: boolean; - } - >; -}; - -/** - * Numeric representation of the logging level of a log entry. - */ -export enum LogLevel { - FATAL, - ERROR, - WARNING, - INFO, - DEBUG, -} - -/** - * String representation of the logging level of a log entry. - */ -export type LogLevelLabel = "FATAL" | "ERROR" | "WARNING" | "INFO" | "DEBUG"; - -/** - * Logging level setting. - */ -export type LogLevelSetting = LogLevelLabel | "DEFAULT"; - -/** - * Log sorting direction, ascending or descending. - */ -export type LogSortDirection = "asc" | "desc"; - -/** - * Options for retrieving log entries. - */ -export type LogEntriesOptions = { - /** - * Maximum log level of the entries to retrieve. - * - * Default: `INFO`. - */ - upto?: LogLevel | LogLevelLabel | Lowercase; - /** - * If set, only log entries with this log level will be returned. - */ - level?: LogLevel | LogLevelLabel | Lowercase; /** - * If set, only log entries with an `lid` greater than or equal to this value - * will be returned. + * File system path of the database. */ - start?: number; + path: string; /** - * If set, only this many entries will be returned. + * Whether the database is the system database. */ - size?: number; + isSystem: boolean; /** - * If set, this many log entries will be skipped. + * (Cluster only.) The sharding method to use for new collections in the + * database. */ - offset?: number; + sharding?: "" | "flexible" | "single"; /** - * If set, only log entries containing the specified text will be returned. + * (Cluster only.) Default replication factor for new collections in this + * database. */ - search?: string; + replicationFactor?: "satellite" | number; /** - * If set to `"desc"`, log entries will be returned in reverse chronological - * order. - * - * Default: `"asc"`. + * (Cluster only.) Default write concern for new collections created in this + * database. */ - sort?: LogSortDirection; -}; - -/** - * An object representing a single log entry. - */ -export type LogMessage = { - id: number; - topic: string; - level: LogLevelLabel; - date: string; - message: string; -}; - -/** - * An object representing a list of log entries. - */ -export type LogEntries = { - totalAmount: number; - lid: number[]; - topic: string[]; - level: LogLevel[]; - timestamp: number[]; - text: string[]; + writeConcern?: number; }; +//#endregion /** * @internal @@ -2067,21 +116,31 @@ type TrappedError = { type TrappedRequest = { error?: false; jobId: string; - onResolve: (res: ProcessedResponse) => void; + onResolve: (res: connection.ProcessedResponse) => void; onReject: (error: any) => void; }; +//#region Database class +/** + * Indicates whether the given value represents a {@link Database}. + * + * @param database - A value that might be a database. + */ +export function isArangoDatabase(database: any): database is Database { + return Boolean(database && database.isArangoDatabase); +} + /** * An object representing a single ArangoDB database. All arangojs collections, * cursors, analyzers and so on are linked to a `Database` object. */ export class Database { - protected _connection: Connection; + protected _connection: connection.Connection; protected _name: string; - protected _analyzers = new Map(); - protected _collections = new Map(); - protected _graphs = new Map(); - protected _views = new Map(); + protected _analyzers = new Map(); + protected _collections = new Map(); + protected _graphs = new Map(); + protected _views = new Map(); protected _trapRequest?: ( trapped: TrappedError | TrappedRequest ) => void; @@ -2102,7 +161,7 @@ export class Database { * }); * ``` */ - constructor(config?: Config); + constructor(config?: connection.Config); /** * Creates a new `Database` instance with its own connection pool. * @@ -2123,7 +182,7 @@ export class Database { */ constructor(database: Database, name?: string); constructor( - configOrDatabase: string | string[] | Config | Database = {}, + configOrDatabase: string | string[] | connection.Config | Database = {}, name?: string ) { if (isArangoDatabase(configOrDatabase)) { @@ -2139,7 +198,7 @@ export class Database { typeof config === "string" || Array.isArray(config) ? { databaseName: name, url: config } : config; - this._connection = new Connection(options); + this._connection = new connection.Connection(options); this._name = databaseName || "_system"; } } @@ -2162,7 +221,7 @@ export class Database { } /** - * Returns a new {@link route.Route} instance for the given path (relative to the + * Returns a new {@link routes.Route} instance for the given path (relative to the * database) that can be used to perform arbitrary HTTP requests. * * @param path - The database-relative URL of the route. Defaults to the @@ -2183,8 +242,8 @@ export class Database { * // with JSON request body '{"username": "admin", "password": "hunter2"}' * ``` */ - route(path?: string, headers?: Headers | Record): Route { - return new Route(this, path, headers); + route(path?: string, headers?: Headers | Record): routes.Route { + return new routes.Route(this, path, headers); } /** @@ -2201,8 +260,8 @@ export class Database { * response object to a more useful return value. */ async request( - options: RequestOptions & { absolutePath?: boolean }, - transform?: (res: ProcessedResponse) => ReturnType + options: connection.RequestOptions & { absolutePath?: boolean }, + transform?: (res: connection.ProcessedResponse) => ReturnType ): Promise; /** * @internal @@ -2217,16 +276,16 @@ export class Database { * returned. */ async request( - options: RequestOptions & { absolutePath?: boolean }, + options: connection.RequestOptions & { absolutePath?: boolean }, transform: false - ): Promise>; + ): Promise>; async request( { absolutePath = false, basePath, ...opts - }: RequestOptions & { absolutePath?: boolean }, - transform: false | ((res: ProcessedResponse) => ReturnType) = (res) => res.parsedBody as ReturnType + }: connection.RequestOptions & { absolutePath?: boolean }, + transform: false | ((res: connection.ProcessedResponse) => ReturnType) = (res) => res.parsedBody as ReturnType ): Promise { if (!absolutePath) { basePath = `/_db/${encodeURIComponent(this._name)}${basePath || ""}`; @@ -2238,7 +297,7 @@ export class Database { const options = { ...opts }; options.headers = new Headers(options.headers); options.headers.set("x-arango-async", "store"); - let jobRes: ProcessedResponse; + let jobRes: connection.ProcessedResponse; try { jobRes = await this._connection.request({ basePath, ...options }); } catch (e) { @@ -2360,11 +419,11 @@ export class Database { * @param timeout - Maximum number of milliseconds to wait for propagation. */ async waitForPropagation( - request: RequestOptions, + request: connection.RequestOptions, timeout?: number ): Promise; async waitForPropagation( - { basePath, ...request }: RequestOptions, + { basePath, ...request }: connection.RequestOptions, timeout?: number ): Promise { await this._connection.waitForPropagation( @@ -2380,7 +439,7 @@ export class Database { * Methods for accessing the server-reported queue times of the mostly * recently received responses. */ - get queueTime(): QueueTimeMetrics { + get queueTime(): administration.QueueTimeMetrics { return this._connection.queueTime; } @@ -2510,7 +569,7 @@ export class Database { * // server: description of the server * ``` */ - version(details?: boolean): Promise { + version(details?: boolean): Promise { return this.request({ method: "GET", path: "/_api/version", @@ -2529,7 +588,7 @@ export class Database { * // name: name of the storage engine * ``` */ - engine(): Promise { + engine(): Promise { return this.request({ method: "GET", path: "/_api/engine", @@ -2562,7 +621,7 @@ export class Database { * // serverInfo: detailed information about the server * ``` */ - status(): Promise { + status(): Promise { return this.request({ method: "GET", path: "/_admin/status", @@ -2583,7 +642,7 @@ export class Database { * // availability is either "default", "readonly", or false * ``` */ - async availability(graceful = false): Promise { + async availability(graceful = false): Promise { try { return this.request({ method: "GET", @@ -2591,7 +650,7 @@ export class Database { }, (res) => res.parsedBody.mode); } catch (e) { if (graceful) return false; - if ((isArangoError(e) || e instanceof HttpError) && e.code === 503) { + if ((errors.isArangoError(e) || e instanceof errors.HttpError) && e.code === 503) { return false; } throw e; @@ -2603,7 +662,7 @@ export class Database { * * Note that this API may reveal sensitive data about the deployment. */ - supportInfo(): Promise { + supportInfo(): Promise { return this.request({ method: "GET", path: "/_admin/support-info", @@ -2634,7 +693,7 @@ export class Database { * const imbalance = await db.getClusterImbalance(); * ``` */ - getClusterImbalance(): Promise { + getClusterImbalance(): Promise { return this.request( { path: "/_admin/cluster/rebalance" }, (res) => res.parsedBody.result @@ -2657,15 +716,15 @@ export class Database { * ``` */ computeClusterRebalance( - opts: ClusterRebalanceOptions - ): Promise { + options: cluster.ClusterRebalanceOptions + ): Promise { return this.request( { method: "POST", path: "/_admin/cluster/rebalance", body: { version: 1, - ...opts, + ...options, }, }, (res) => res.parsedBody.result @@ -2687,7 +746,7 @@ export class Database { * } * ``` */ - executeClusterRebalance(moves: ClusterRebalanceMove[]): Promise { + executeClusterRebalance(moves: cluster.ClusterRebalanceMove[]): Promise { return this.request({ method: "POST", path: "/_admin/cluster/rebalance/execute", @@ -2713,8 +772,8 @@ export class Database { * ``` */ rebalanceCluster( - opts: ClusterRebalanceOptions - ): Promise { + opts: cluster.ClusterRebalanceOptions + ): Promise { return this.request({ method: "PUT", path: "/_admin/cluster/rebalance", @@ -2755,7 +814,7 @@ export class Database { * // the database exists * ``` */ - get(): Promise { + get(): Promise { return this.request( { path: "/_api/database/current" }, (res) => res.parsedBody.result @@ -2777,7 +836,7 @@ export class Database { await this.get(); return true; } catch (err: any) { - if (isArangoError(err) && err.errorNum === DATABASE_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === DATABASE_NOT_FOUND) { return false; } throw err; @@ -2820,11 +879,11 @@ export class Database { */ createDatabase( databaseName: string, - users: CreateDatabaseUser[] + users: users.CreateDatabaseUserOptions[] ): Promise; createDatabase( databaseName: string, - usersOrOptions: CreateDatabaseUser[] | CreateDatabaseOptions = {} + usersOrOptions: users.CreateDatabaseUserOptions[] | CreateDatabaseOptions = {} ): Promise { const { users, ...options } = Array.isArray(usersOrOptions) ? { users: usersOrOptions } @@ -2952,10 +1011,13 @@ export class Database { * Returns a `Collection` instance for the given collection name. * * In TypeScript the collection implements both the - * {@link collection.DocumentCollection} and {@link collection.EdgeCollection} + * {@link collections.DocumentCollection} and {@link collections.EdgeCollection} * interfaces and can be cast to either type to enforce a stricter API. * - * @param T - Type to use for document data. Defaults to `any`. + * @param EntryResultType - Type to represent document contents returned by + * the server (including computed properties). + * @param EntryInputType - Type to represent document contents passed when + * inserting or replacing documents (without computed properties). * @param collectionName - Name of the edge collection. * * @example @@ -2987,14 +1049,18 @@ export class Database { * const edges = db.collection("friends") as EdgeCollection; * ``` */ - collection = any>( + collection< + EntryResultType extends Record = any, + EntryInputType extends Record = EntryResultType, + >( collectionName: string - ): DocumentCollection & EdgeCollection { + ): collections.DocumentCollection & + collections.EdgeCollection { collectionName = collectionName; if (!this._collections.has(collectionName)) { this._collections.set( collectionName, - new Collection(this, collectionName) + new collections.Collection(this, collectionName) ); } return this._collections.get(collectionName)!; @@ -3002,9 +1068,12 @@ export class Database { /** * Creates a new collection with the given `collectionName` and `options`, - * then returns a {@link collection.DocumentCollection} instance for the new collection. + * then returns a {@link collections.DocumentCollection} instance for the new collection. * - * @param T - Type to use for document data. Defaults to `any`. + * @param EntryResultType - Type to represent document contents returned by + * the server (including computed properties). + * @param EntryInputType - Type to represent document contents passed when + * inserting or replacing documents (without computed properties). * @param collectionName - Name of the new collection. * @param options - Options for creating the collection. * @@ -3023,18 +1092,24 @@ export class Database { * const documents = db.createCollection("persons"); * ``` */ - async createCollection = any>( + async createCollection< + EntryResultType extends Record = any, + EntryInputType extends Record = EntryResultType, + >( collectionName: string, - options?: CreateCollectionOptions & { - type?: CollectionType.DOCUMENT_COLLECTION; + options?: collections.CreateCollectionOptions & { + type?: collections.CollectionType.DOCUMENT_COLLECTION; } - ): Promise>; + ): Promise>; /** * Creates a new edge collection with the given `collectionName` and - * `options`, then returns an {@link collection.EdgeCollection} instance for the new + * `options`, then returns an {@link collections.EdgeCollection} instance for the new * edge collection. * - * @param T - Type to use for edge document data. Defaults to `any`. + * @param EntryResultType - Type to represent edge document contents returned + * by the server (including computed properties). + * @param EntryInputType - Type to represent edge document contents passed + * when inserting or replacing documents (without computed properties). * @param collectionName - Name of the new collection. * @param options - Options for creating the collection. * @@ -3058,16 +1133,25 @@ export class Database { * }); * ``` */ - async createCollection = any>( + async createCollection< + EntryResultType extends Record = any, + EntryInputType extends Record = EntryResultType, + >( collectionName: string, - options: CreateCollectionOptions & { - type: CollectionType.EDGE_COLLECTION; + options: collections.CreateCollectionOptions & { + type: collections.CollectionType.EDGE_COLLECTION; } - ): Promise>; - async createCollection = any>( + ): Promise>; + async createCollection< + EntryResultType extends Record = any, + EntryInputType extends Record = EntryResultType, + >( collectionName: string, - options?: CreateCollectionOptions & { type?: CollectionType } - ): Promise & EdgeCollection> { + options?: collections.CreateCollectionOptions & { + type?: collections.CollectionType; + } + ): Promise & + collections.EdgeCollection> { const collection = this.collection(collectionName); await collection.create(options); return collection; @@ -3075,13 +1159,16 @@ export class Database { /** * Creates a new edge collection with the given `collectionName` and - * `options`, then returns an {@link collection.EdgeCollection} instance for the new + * `options`, then returns an {@link collections.EdgeCollection} instance for the new * edge collection. * * This is a convenience method for calling {@link Database#createCollection} * with `options.type` set to `EDGE_COLLECTION`. * - * @param T - Type to use for edge document data. Defaults to `any`. + * @param EntryResultType - Type to represent edge document contents returned + * by the server (including computed properties). + * @param EntryInputType - Type to represent edge document contents passed + * when inserting or replacing documents (without computed properties). * @param collectionName - Name of the new collection. * @param options - Options for creating the collection. * @@ -3101,13 +1188,17 @@ export class Database { * const edges = db.createEdgeCollection("friends"); * ``` */ - async createEdgeCollection = any>( + async createEdgeCollection< + EntryResultType extends Record = any, + EntryInputType extends Record = EntryResultType, + >( collectionName: string, - options?: CreateCollectionOptions - ): Promise> { + options?: collections.CreateCollectionOptions + ): Promise< + collections.EdgeCollection> { return this.createCollection(collectionName, { ...options, - type: CollectionType.EDGE_COLLECTION, + type: collections.CollectionType.EDGE_COLLECTION, }); } @@ -3126,7 +1217,7 @@ export class Database { async renameCollection( collectionName: string, newName: string - ): Promise> { + ): Promise> { const result = await this.request({ method: "PUT", path: `/_api/collection/${encodeURIComponent(collectionName)}/rename`, @@ -3162,7 +1253,7 @@ export class Database { */ listCollections( excludeSystem: boolean = true - ): Promise { + ): Promise { return this.request( { path: "/_api/collection", @@ -3177,7 +1268,7 @@ export class Database { * `Collection` instances. * * In TypeScript these instances implement both the - * {@link collection.DocumentCollection} and {@link collection.EdgeCollection} + * {@link collections.DocumentCollection} and {@link collections.EdgeCollection} * interfaces and can be cast to either type to enforce a stricter API. * * See also {@link Database#listCollections}. @@ -3202,7 +1293,7 @@ export class Database { */ async collections( excludeSystem: boolean = true - ): Promise> { + ): Promise> { const collections = await this.listCollections(excludeSystem); return collections.map((data) => this.collection(data.name)); } @@ -3210,7 +1301,7 @@ export class Database { //#region graphs /** - * Returns a {@link graph.Graph} instance representing the graph with the given + * Returns a {@link graphs.Graph} instance representing the graph with the given * `graphName`. * * @param graphName - Name of the graph. @@ -3221,16 +1312,16 @@ export class Database { * const graph = db.graph("some-graph"); * ``` */ - graph(graphName: string): Graph { + graph(graphName: string): graphs.Graph { if (!this._graphs.has(graphName)) { - this._graphs.set(graphName, new Graph(this, graphName)); + this._graphs.set(graphName, new graphs.Graph(this, graphName)); } return this._graphs.get(graphName)!; } /** * Creates a graph with the given `graphName` and `edgeDefinitions`, then - * returns a {@link graph.Graph} instance for the new graph. + * returns a {@link graphs.Graph} instance for the new graph. * * @param graphName - Name of the graph to be created. * @param edgeDefinitions - An array of edge definitions. @@ -3238,9 +1329,9 @@ export class Database { */ async createGraph( graphName: string, - edgeDefinitions: EdgeDefinitionOptions[], - options?: CreateGraphOptions - ): Promise { + edgeDefinitions: graphs.EdgeDefinitionOptions[], + options?: graphs.CreateGraphOptions + ): Promise { const graph = this.graph(graphName); await graph.create(edgeDefinitions, options); return graph; @@ -3259,7 +1350,7 @@ export class Database { * // graphs is an array of graph descriptions * ``` */ - listGraphs(): Promise { + listGraphs(): Promise { return this.request( { path: "/_api/gharial" }, (res) => res.parsedBody.graphs @@ -3267,7 +1358,7 @@ export class Database { } /** - * Fetches all graphs from the database and returns an array of {@link graph.Graph} + * Fetches all graphs from the database and returns an array of {@link graphs.Graph} * instances for those graphs. * * See also {@link Database#listGraphs}. @@ -3279,7 +1370,7 @@ export class Database { * // graphs is an array of Graph instances * ``` */ - async graphs(): Promise { + async graphs(): Promise { const graphs = await this.listGraphs(); return graphs.map((data: any) => this.graph(data._key)); } @@ -3287,7 +1378,7 @@ export class Database { //#region views /** - * Returns a {@link view.View} instance for the given `viewName`. + * Returns a {@link views.View} instance for the given `viewName`. * * @param viewName - Name of the ArangoSearch or SearchAlias View. * @@ -3297,16 +1388,16 @@ export class Database { * const view = db.view("potatoes"); * ``` */ - view(viewName: string): View { + view(viewName: string): views.View { if (!this._views.has(viewName)) { - this._views.set(viewName, new View(this, viewName)); + this._views.set(viewName, new views.View(this, viewName)); } return this._views.get(viewName)!; } /** * Creates a new View with the given `viewName` and `options`, then returns a - * {@link view.View} instance for the new View. + * {@link views.View} instance for the new View. * * @param viewName - Name of the View. * @param options - An object defining the properties of the View. @@ -3320,8 +1411,8 @@ export class Database { */ async createView( viewName: string, - options: CreateViewOptions - ): Promise { + options: views.CreateViewOptions + ): Promise { const view = this.view(viewName); await view.create(options); return view; @@ -3330,7 +1421,7 @@ export class Database { /** * Renames the view `viewName` to `newName`. * - * Additionally removes any stored {@link view.View} instance for `viewName` from + * Additionally removes any stored {@link views.View} instance for `viewName` from * the `Database` instance's internal cache. * * **Note**: Renaming views may not be supported when ArangoDB is running in @@ -3342,7 +1433,7 @@ export class Database { async renameView( viewName: string, newName: string - ): Promise> { + ): Promise> { const result = await this.request({ method: "PUT", path: `/_api/view/${encodeURIComponent(viewName)}/rename`, @@ -3366,13 +1457,13 @@ export class Database { * // views is an array of View descriptions * ``` */ - listViews(): Promise { + listViews(): Promise { return this.request({ path: "/_api/view" }, (res) => res.parsedBody.result); } /** * Fetches all Views from the database and returns an array of - * {@link view.View} instances + * {@link views.View} instances * for the Views. * * See also {@link Database#listViews}. @@ -3384,7 +1475,7 @@ export class Database { * // views is an array of ArangoSearch View instances * ``` */ - async views(): Promise { + async views(): Promise { const views = await this.listViews(); return views.map((data) => this.view(data.name)); } @@ -3392,7 +1483,7 @@ export class Database { //#region analyzers /** - * Returns an {@link analyzer.Analyzer} instance representing the Analyzer with the + * Returns an {@link analyzers.Analyzer} instance representing the Analyzer with the * given `analyzerName`. * * @example @@ -3402,16 +1493,19 @@ export class Database { * const info = await analyzer.get(); * ``` */ - analyzer(analyzerName: string): Analyzer { + analyzer(analyzerName: string): analyzers.Analyzer { if (!this._analyzers.has(analyzerName)) { - this._analyzers.set(analyzerName, new Analyzer(this, analyzerName)); + this._analyzers.set( + analyzerName, + new analyzers.Analyzer(this, analyzerName) + ); } return this._analyzers.get(analyzerName)!; } /** * Creates a new Analyzer with the given `analyzerName` and `options`, then - * returns an {@link analyzer.Analyzer} instance for the new Analyzer. + * returns an {@link analyzers.Analyzer} instance for the new Analyzer. * * @param analyzerName - Name of the Analyzer. * @param options - An object defining the properties of the Analyzer. @@ -3425,8 +1519,8 @@ export class Database { */ async createAnalyzer( analyzerName: string, - options: CreateAnalyzerOptions - ): Promise { + options: analyzers.CreateAnalyzerOptions + ): Promise { const analyzer = this.analyzer(analyzerName); await analyzer.create(options); return analyzer; @@ -3445,7 +1539,7 @@ export class Database { * // analyzers is an array of Analyzer descriptions * ``` */ - listAnalyzers(): Promise { + listAnalyzers(): Promise { return this.request( { path: "/_api/analyzer" }, (res) => res.parsedBody.result @@ -3454,7 +1548,7 @@ export class Database { /** * Fetches all Analyzers visible in the database and returns an array of - * {@link analyzer.Analyzer} instances for those Analyzers. + * {@link analyzers.Analyzer} instances for those Analyzers. * * See also {@link Database#listAnalyzers}. * @@ -3465,7 +1559,7 @@ export class Database { * // analyzers is an array of Analyzer instances * ``` */ - async analyzers(): Promise { + async analyzers(): Promise { const analyzers = await this.listAnalyzers(); return analyzers.map((data) => this.analyzer(data.name)); } @@ -3483,7 +1577,7 @@ export class Database { * // users is an array of user objects * ``` */ - listUsers(): Promise { + listUsers(): Promise { return this.request( { path: "/_api/user", @@ -3504,7 +1598,7 @@ export class Database { * // user is the user object for the user named "steve" * ``` */ - getUser(username: string): Promise> { + getUser(username: string): Promise> { return this.request({ path: `/_api/user/${encodeURIComponent(username)}`, }); @@ -3526,7 +1620,7 @@ export class Database { createUser( username: string, passwd: string - ): Promise>; + ): Promise>; /** * Creates a new ArangoDB user with the given options. * @@ -3542,12 +1636,12 @@ export class Database { */ createUser( username: string, - options: UserOptions - ): Promise>; + options: users.UserOptions + ): Promise>; createUser( username: string, - options: string | UserOptions - ): Promise> { + options: string | users.UserOptions + ): Promise> { if (typeof options === "string") { options = { passwd: options }; } @@ -3577,7 +1671,7 @@ export class Database { updateUser( username: string, passwd: string - ): Promise>; + ): Promise>; /** * Updates the ArangoDB user with the new options. * @@ -3593,12 +1687,12 @@ export class Database { */ updateUser( username: string, - options: Partial - ): Promise>; + options: Partial + ): Promise>; updateUser( username: string, - options: string | Partial - ): Promise> { + options: string | Partial + ): Promise> { if (typeof options === "string") { options = { passwd: options }; } @@ -3627,8 +1721,8 @@ export class Database { */ replaceUser( username: string, - options: UserOptions - ): Promise> { + options: users.UserOptions + ): Promise> { if (typeof options === "string") { options = { passwd: options }; } @@ -3656,13 +1750,13 @@ export class Database { */ removeUser( username: string - ): Promise>> { + ): Promise { return this.request( { method: "DELETE", path: `/_api/user/${encodeURIComponent(username)}`, }, - (res) => res.parsedBody + () => undefined, ); } @@ -3737,17 +1831,17 @@ export class Database { */ getUserAccessLevel( username: string, - { database, collection }: UserAccessLevelOptions - ): Promise { + { database, collection }: users.UserAccessLevelOptions + ): Promise { const databaseName = isArangoDatabase(database) ? database.name : database ?? - (isArangoCollection(collection) + (collections.isArangoCollection(collection) ? ((collection as any)._db as Database).name : this._name); const suffix = collection ? `/${encodeURIComponent( - isArangoCollection(collection) ? collection.name : collection + collections.isArangoCollection(collection) ? collection.name : collection )}` : ""; return this.request( @@ -3838,17 +1932,17 @@ export class Database { database, collection, grant, - }: UserAccessLevelOptions & { grant: AccessLevel } - ): Promise>> { + }: users.UserAccessLevelOptions & { grant: users.AccessLevel } + ): Promise>> { const databaseName = isArangoDatabase(database) ? database.name : database ?? - (isArangoCollection(collection) - ? ((collection as any)._db as Database).name + (collection instanceof collections.Collection + ? collection.database.name : this._name); const suffix = collection ? `/${encodeURIComponent( - isArangoCollection(collection) ? collection.name : collection + collections.isArangoCollection(collection) ? collection.name : collection )}` : ""; return this.request( @@ -3928,17 +2022,17 @@ export class Database { */ clearUserAccessLevel( username: string, - { database, collection }: UserAccessLevelOptions - ): Promise>> { + { database, collection }: users.UserAccessLevelOptions + ): Promise>> { const databaseName = isArangoDatabase(database) ? database.name : database ?? - (isArangoCollection(collection) + (collection instanceof collections.Collection ? ((collection as any)._db as Database).name : this._name); const suffix = collection ? `/${encodeURIComponent( - isArangoCollection(collection) ? collection.name : collection + collections.isArangoCollection(collection) ? collection.name : collection )}` : ""; return this.request( @@ -3971,7 +2065,7 @@ export class Database { getUserDatabases( username: string, full?: false - ): Promise>; + ): Promise>; /** * Fetches an object mapping names of databases to the access level of the * given ArangoDB user for those databases and the collections within each @@ -3999,8 +2093,8 @@ export class Database { Record< string, { - permission: AccessLevel; - collections: Record; + permission: users.AccessLevel; + collections: Record; } > >; @@ -4021,9 +2115,9 @@ export class Database { * value. * * Collections can be specified as collection names (strings) or objects - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as well as - * (in TypeScript) {@link collection.DocumentCollection} and {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as well as + * (in TypeScript) {@link collections.DocumentCollection} and {@link collections.EdgeCollection}. * * **Note**: The `action` function will be evaluated and executed on the * server inside ArangoDB's embedded JavaScript environment and can not @@ -4066,17 +2160,17 @@ export class Database { * ``` */ executeTransaction( - collections: TransactionCollections & { allowImplicit?: boolean }, + collections: transactions.TransactionCollectionOptions & { allowImplicit?: boolean }, action: string, - options?: TransactionOptions & { params?: any } + options?: transactions.TransactionOptions & { params?: any } ): Promise; /** * Performs a server-side transaction and returns its return value. * * Collections can be specified as collection names (strings) or objects - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as well as - * (in TypeScript) {@link collection.DocumentCollection} and {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as well as + * (in TypeScript) {@link collections.DocumentCollection} and {@link collections.EdgeCollection}. * * **Note**: The `action` function will be evaluated and executed on the * server inside ArangoDB's embedded JavaScript environment and can not @@ -4115,17 +2209,17 @@ export class Database { * ``` */ executeTransaction( - collections: (string | ArangoCollection)[], + collections: (string | collections.ArangoCollection)[], action: string, - options?: TransactionOptions & { params?: any } + options?: transactions.TransactionOptions & { params?: any } ): Promise; /** * Performs a server-side transaction and returns its return value. * * The Collection can be specified as a collection name (string) or an object - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as well as - * (in TypeScript) {@link collection.DocumentCollection} and {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as well as + * (in TypeScript) {@link collections.DocumentCollection} and {@link collections.EdgeCollection}. * * **Note**: The `action` function will be evaluated and executed on the * server inside ArangoDB's embedded JavaScript environment and can not @@ -4164,18 +2258,18 @@ export class Database { * ``` */ executeTransaction( - collection: string | ArangoCollection, + collection: string | collections.ArangoCollection, action: string, - options?: TransactionOptions & { params?: any } + options?: transactions.TransactionOptions & { params?: any } ): Promise; executeTransaction( collections: - | (TransactionCollections & { allowImplicit?: boolean }) - | (string | ArangoCollection)[] + | (transactions.TransactionCollectionOptions & { allowImplicit?: boolean }) + | (string | collections.ArangoCollection)[] | string - | ArangoCollection, + | collections.ArangoCollection, action: string, - options: TransactionOptions & { params?: any } = {} + options: transactions.TransactionOptions & { params?: any } = {} ): Promise { const { allowDirtyRead = undefined, ...opts } = options; return this.request( @@ -4184,7 +2278,7 @@ export class Database { path: "/_api/transaction", allowDirtyRead, body: { - collections: coerceTransactionCollections(collections), + collections: transactions.coerceTransactionCollections(collections), action, ...opts, }, @@ -4194,7 +2288,7 @@ export class Database { } /** - * Returns a {@link transaction.Transaction} instance for an existing streaming + * Returns a {@link transactions.Transaction} instance for an existing streaming * transaction with the given `id`. * * See also {@link Database#beginTransaction}. @@ -4210,19 +2304,19 @@ export class Database { * await trx2.commit(); * ``` */ - transaction(transactionId: string): Transaction { - return new Transaction(this, transactionId); + transaction(transactionId: string): transactions.Transaction { + return new transactions.Transaction(this, transactionId); } /** * Begins a new streaming transaction for the given collections, then returns - * a {@link transaction.Transaction} instance for the transaction. + * a {@link transactions.Transaction} instance for the transaction. * * Collections can be specified as collection names (strings) or objects - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as - * well as (in TypeScript) {@link collection.DocumentCollection} and - * {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as + * well as (in TypeScript) {@link collections.DocumentCollection} and + * {@link collections.EdgeCollection}. * * @param collections - Collections involved in the transaction. * @param options - Options for the transaction. @@ -4242,17 +2336,17 @@ export class Database { * ``` */ beginTransaction( - collections: TransactionCollections, - options?: TransactionOptions - ): Promise; + collections: transactions.TransactionCollectionOptions, + options?: transactions.TransactionOptions + ): Promise; /** * Begins a new streaming transaction for the given collections, then returns - * a {@link transaction.Transaction} instance for the transaction. + * a {@link transactions.Transaction} instance for the transaction. * * Collections can be specified as collection names (strings) or objects - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as well as - * (in TypeScript) {@link collection.DocumentCollection} and {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as well as + * (in TypeScript) {@link collections.DocumentCollection} and {@link collections.EdgeCollection}. * * @param collections - Collections that can be read from and written to * during the transaction. @@ -4273,17 +2367,17 @@ export class Database { * ``` */ beginTransaction( - collections: (string | ArangoCollection)[], - options?: TransactionOptions - ): Promise; + collections: (string | collections.ArangoCollection)[], + options?: transactions.TransactionOptions + ): Promise; /** * Begins a new streaming transaction for the given collections, then returns - * a {@link transaction.Transaction} instance for the transaction. + * a {@link transactions.Transaction} instance for the transaction. * * The Collection can be specified as a collection name (string) or an object - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as well as - * (in TypeScript) {@link collection.DocumentCollection} and {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as well as + * (in TypeScript) {@link collections.DocumentCollection} and {@link collections.EdgeCollection}. * * @param collection - A collection that can be read from and written to * during the transaction. @@ -4303,17 +2397,17 @@ export class Database { * ``` */ beginTransaction( - collection: string | ArangoCollection, - options?: TransactionOptions - ): Promise; + collection: string | collections.ArangoCollection, + options?: transactions.TransactionOptions + ): Promise; beginTransaction( collections: - | TransactionCollections - | (string | ArangoCollection)[] + | transactions.TransactionCollectionOptions + | (string | collections.ArangoCollection)[] | string - | ArangoCollection, - options: TransactionOptions = {} - ): Promise { + | collections.ArangoCollection, + options: transactions.TransactionOptions = {} + ): Promise { const { allowDirtyRead = undefined, ...opts } = options; return this.request( { @@ -4321,11 +2415,11 @@ export class Database { path: "/_api/transaction/begin", allowDirtyRead, body: { - collections: coerceTransactionCollections(collections), + collections: transactions.coerceTransactionCollections(collections), ...opts, }, }, - (res) => new Transaction(this, res.parsedBody.result.id) + (res) => new transactions.Transaction(this, res.parsedBody.result.id) ); } @@ -4336,10 +2430,10 @@ export class Database { * is rejected, the transaction will be aborted. * * Collections can be specified as collection names (strings) or objects - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as - * well as (in TypeScript) {@link collection.DocumentCollection} and - * {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as + * well as (in TypeScript) {@link collections.DocumentCollection} and + * {@link collections.EdgeCollection}. * * @param collections - Collections involved in the transaction. * @param callback - Callback function executing the transaction steps. @@ -4363,9 +2457,9 @@ export class Database { * ``` */ withTransaction( - collections: TransactionCollections, - callback: (step: Transaction["step"]) => Promise, - options?: TransactionOptions + collections: transactions.TransactionCollectionOptions, + callback: (step: transactions.Transaction["step"]) => Promise, + options?: transactions.TransactionOptions ): Promise; /** * Begins and commits a transaction using the given callback. Individual @@ -4374,9 +2468,9 @@ export class Database { * is rejected, the transaction will be aborted. * * Collections can be specified as collection names (strings) or objects - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as well as - * (in TypeScript) {@link collection.DocumentCollection} and {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as well as + * (in TypeScript) {@link collections.DocumentCollection} and {@link collections.EdgeCollection}. * * @param collections - Collections that can be read from and written to * during the transaction. @@ -4401,9 +2495,9 @@ export class Database { * ``` */ withTransaction( - collections: (string | ArangoCollection)[], - callback: (step: Transaction["step"]) => Promise, - options?: TransactionOptions + collections: (string | collections.ArangoCollection)[], + callback: (step: transactions.Transaction["step"]) => Promise, + options?: transactions.TransactionOptions ): Promise; /** * Begins and commits a transaction using the given callback. Individual @@ -4412,9 +2506,9 @@ export class Database { * is rejected, the transaction will be aborted. * * The Collection can be specified as a collection name (string) or an object - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as well as - * (in TypeScript) {@link collection.DocumentCollection} and {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as well as + * (in TypeScript) {@link collections.DocumentCollection} and {@link collections.EdgeCollection}. * * @param collection - A collection that can be read from and written to * during the transaction. @@ -4436,21 +2530,21 @@ export class Database { * ``` */ withTransaction( - collection: string | ArangoCollection, - callback: (step: Transaction["step"]) => Promise, - options?: TransactionOptions + collection: string | collections.ArangoCollection, + callback: (step: transactions.Transaction["step"]) => Promise, + options?: transactions.TransactionOptions ): Promise; async withTransaction( collections: - | TransactionCollections - | (string | ArangoCollection)[] + | transactions.TransactionCollectionOptions + | (string | collections.ArangoCollection)[] | string - | ArangoCollection, - callback: (step: Transaction["step"]) => Promise, - options: TransactionOptions = {} + | collections.ArangoCollection, + callback: (step: transactions.Transaction["step"]) => Promise, + options: transactions.TransactionOptions = {} ): Promise { const trx = await this.beginTransaction( - collections as TransactionCollections, + collections as transactions.TransactionCollectionOptions, options ); try { @@ -4478,7 +2572,7 @@ export class Database { * // transactions is an array of transaction descriptions * ``` */ - listTransactions(): Promise { + listTransactions(): Promise { return this._connection.request( { path: "/_api/transaction" }, (res) => res.parsedBody.transactions @@ -4487,7 +2581,7 @@ export class Database { /** * Fetches all active transactions from the database and returns an array of - * {@link transaction.Transaction} instances for those transactions. + * {@link transactions.Transaction} instances for those transactions. * * See also {@link Database#listTransactions}. * @@ -4498,7 +2592,7 @@ export class Database { * // transactions is an array of transactions * ``` */ - async transactions(): Promise { + async transactions(): Promise { const transactions = await this.listTransactions(); return transactions.map((data) => this.transaction(data.id)); } @@ -4507,9 +2601,9 @@ export class Database { //#region queries /** * Performs a database query using the given `query`, then returns a new - * {@link cursor.ArrayCursor} instance for the result set. + * {@link cursors.Cursor} instance for the result set. * - * See the {@link aql!aql} template string handler for information about how + * See the {@link aql.aql} template string handler for information about how * to create a query string without manually defining bind parameters nor * having to worry about escaping variables. * @@ -4518,7 +2612,7 @@ export class Database { * you do not need to use the `step` method to consume it. * * @param query - An object containing an AQL query string and bind - * parameters, e.g. the object returned from an {@link aql!aql} template string. + * parameters, e.g. the object returned from an {@link aql.aql} template string. * @param options - Options for the query execution. * * @example @@ -4556,14 +2650,14 @@ export class Database { * ``` */ query( - query: AqlQuery, - options?: QueryOptions - ): Promise>; + query: aql.AqlQuery, + options?: queries.QueryOptions + ): Promise>; /** * Performs a database query using the given `query` and `bindVars`, then - * returns a new {@link cursor.ArrayCursor} instance for the result set. + * returns a new {@link cursors.Cursor} instance for the result set. * - * See the {@link aql!aql} template string handler for a safer and easier + * See the {@link aql.aql} template string handler for a safer and easier * alternative to passing strings directly. * * **Note**: When executing a query in a streaming transaction using the @@ -4609,20 +2703,20 @@ export class Database { * ``` */ query( - query: string | AqlLiteral, + query: string | aql.AqlLiteral, bindVars?: Record, - options?: QueryOptions - ): Promise>; + options?: queries.QueryOptions + ): Promise>; query( - query: string | AqlQuery | AqlLiteral, + query: string | aql.AqlQuery | aql.AqlLiteral, bindVars?: Record, - options: QueryOptions = {} - ): Promise> { - if (isAqlQuery(query)) { + options: queries.QueryOptions = {} + ): Promise> { + if (aql.isAqlQuery(query)) { options = bindVars ?? {}; bindVars = query.bindVars; query = query.query; - } else if (isAqlLiteral(query)) { + } else if (aql.isAqlLiteral(query)) { query = query.toAQL(); } const { @@ -4655,7 +2749,7 @@ export class Database { timeout, }, (res) => - new BatchedArrayCursor( + new cursors.BatchCursor( this, res.parsedBody, res.arangojsHostUrl, @@ -4667,12 +2761,12 @@ export class Database { /** * Explains a database query using the given `query`. * - * See the {@link aql!aql} template string handler for information about how + * See the {@link aql.aql} template string handler for information about how * to create a query string without manually defining bind parameters nor * having to worry about escaping variables. * * @param query - An object containing an AQL query string and bind - * parameters, e.g. the object returned from an {@link aql!aql} template string. + * parameters, e.g. the object returned from an {@link aql.aql} template string. * @param options - Options for explaining the query. * * @example @@ -4687,18 +2781,18 @@ export class Database { * ``` */ explain( - query: AqlQuery, - options?: ExplainOptions & { allPlans?: false } - ): Promise>; + query: aql.AqlQuery, + options?: queries.ExplainOptions & { allPlans?: false } + ): Promise>; /** * Explains a database query using the given `query`. * - * See the {@link aql!aql} template string handler for information about how + * See the {@link aql.aql} template string handler for information about how * to create a query string without manually defining bind parameters nor * having to worry about escaping variables. * * @param query - An object containing an AQL query string and bind - * parameters, e.g. the object returned from an {@link aql!aql} template string. + * parameters, e.g. the object returned from an {@link aql.aql} template string. * @param options - Options for explaining the query. * * @example @@ -4716,13 +2810,13 @@ export class Database { * ``` */ explain( - query: AqlQuery, - options?: ExplainOptions & { allPlans: true } - ): Promise>; + query: aql.AqlQuery, + options?: queries.ExplainOptions & { allPlans: true } + ): Promise>; /** * Explains a database query using the given `query` and `bindVars`. * - * See the {@link aql!aql} template string handler for a safer and easier + * See the {@link aql.aql} template string handler for a safer and easier * alternative to passing strings directly. * * @param query - An AQL query string. @@ -4744,14 +2838,14 @@ export class Database { * ``` */ explain( - query: string | AqlLiteral, + query: string | aql.AqlLiteral, bindVars?: Record, - options?: ExplainOptions & { allPlans?: false } - ): Promise>; + options?: queries.ExplainOptions & { allPlans?: false } + ): Promise>; /** * Explains a database query using the given `query` and `bindVars`. * - * See the {@link aql!aql} template string handler for a safer and easier + * See the {@link aql.aql} template string handler for a safer and easier * alternative to passing strings directly. * * @param query - An AQL query string. @@ -4774,20 +2868,20 @@ export class Database { * ``` */ explain( - query: string | AqlLiteral, + query: string | aql.AqlLiteral, bindVars?: Record, - options?: ExplainOptions & { allPlans: true } - ): Promise>; + options?: queries.ExplainOptions & { allPlans: true } + ): Promise>; explain( - query: string | AqlQuery | AqlLiteral, + query: string | aql.AqlQuery | aql.AqlLiteral, bindVars?: Record, - options?: ExplainOptions - ): Promise> { - if (isAqlQuery(query)) { + options?: queries.ExplainOptions + ): Promise> { + if (aql.isAqlQuery(query)) { options = bindVars; bindVars = query.bindVars; query = query.query; - } else if (isAqlLiteral(query)) { + } else if (aql.isAqlLiteral(query)) { query = query.toAQL(); } return this.request({ @@ -4800,12 +2894,12 @@ export class Database { /** * Parses the given query and returns the result. * - * See the {@link aql!aql} template string handler for information about how + * See the {@link aql.aql} template string handler for information about how * to create a query string without manually defining bind parameters nor * having to worry about escaping variables. * * @param query - An AQL query string or an object containing an AQL query - * string and bind parameters, e.g. the object returned from an {@link aql!aql} + * string and bind parameters, e.g. the object returned from an {@link aql.aql} * template string. * * @example @@ -4818,11 +2912,11 @@ export class Database { * RETURN doc._key * `); * ``` - */ - parse(query: string | AqlQuery | AqlLiteral): Promise { - if (isAqlQuery(query)) { + aql.*/ + parse(query: string | aql.AqlQuery | aql.AqlLiteral): Promise { + if (aql.isAqlQuery(query)) { query = query.query; - } else if (isAqlLiteral(query)) { + } else if (aql.isAqlLiteral(query)) { query = query.toAQL(); } return this.request({ @@ -4844,7 +2938,7 @@ export class Database { * } * ``` */ - queryRules(): Promise { + queryRules(): Promise { return this.request({ path: "/_api/query/rules", }); @@ -4860,7 +2954,7 @@ export class Database { * console.log(tracking.enabled); * ``` */ - queryTracking(): Promise; + queryTracking(): Promise; /** * Modifies the query tracking properties. * @@ -4878,8 +2972,8 @@ export class Database { * }); * ``` */ - queryTracking(options: QueryTrackingOptions): Promise; - queryTracking(options?: QueryTrackingOptions): Promise { + queryTracking(options: queries.QueryTrackingOptions): Promise; + queryTracking(options?: queries.QueryTrackingOptions): Promise { return this.request( options ? { @@ -4905,7 +2999,7 @@ export class Database { * const queries = await db.listRunningQueries(); * ``` */ - listRunningQueries(): Promise { + listRunningQueries(): Promise { return this.request({ method: "GET", path: "/_api/query/current", @@ -4925,7 +3019,7 @@ export class Database { * // Only works if slow query tracking is enabled * ``` */ - listSlowQueries(): Promise { + listSlowQueries(): Promise { return this.request({ method: "GET", path: "/_api/query/slow", @@ -4985,18 +3079,18 @@ export class Database { } //#endregion - //#region functions + //#region user functions /** * Fetches a list of all AQL user functions registered with the database. * * @example * ```js * const db = new Database(); - * const functions = await db.listFunctions(); + * const functions = await db.listUserFunctions(); * const names = functions.map(fn => fn.name); * ``` */ - listFunctions(): Promise { + listUserFunctions(): Promise { return this.request( { path: "/_api/aqlfunction" }, (res) => res.parsedBody.result @@ -5019,7 +3113,7 @@ export class Database { * @example * ```js * const db = new Database(); - * await db.createFunction( + * await db.createUserFunction( * "ACME::ACCOUNTING::CALCULATE_VAT", * "(price) => price * 0.19" * ); @@ -5034,11 +3128,11 @@ export class Database { * // cursor is a cursor for the query result * ``` */ - createFunction( + createUserFunction( name: string, code: string, isDeterministic: boolean = false - ): Promise> { + ): Promise> { return this.request({ method: "POST", path: "/_api/aqlfunction", @@ -5057,14 +3151,14 @@ export class Database { * @example * ```js * const db = new Database(); - * await db.dropFunction("ACME::ACCOUNTING::CALCULATE_VAT"); + * await db.dropUserFunction("ACME::ACCOUNTING::CALCULATE_VAT"); * // the function no longer exists * ``` */ - dropFunction( + dropUserFunction( name: string, group: boolean = false - ): Promise> { + ): Promise> { return this.request({ method: "DELETE", path: `/_api/aqlfunction/${encodeURIComponent(name)}`, @@ -5091,7 +3185,7 @@ export class Database { * const services = await db.listServices(false); // all services * ``` */ - listServices(excludeSystem: boolean = true): Promise { + listServices(excludeSystem: boolean = true): Promise { return this.request({ path: "/_api/foxx", search: { excludeSystem }, @@ -5133,8 +3227,8 @@ export class Database { async installService( mount: string, source: File | Blob | string, - options: InstallServiceOptions = {} - ): Promise { + options: services.InstallServiceOptions = {} + ): Promise { const { configuration, dependencies, ...search } = options; const form = new FormData(); if (configuration) { @@ -5191,8 +3285,8 @@ export class Database { async replaceService( mount: string, source: File | Blob | string, - options: ReplaceServiceOptions = {} - ): Promise { + options: services.ReplaceServiceOptions = {} + ): Promise { const { configuration, dependencies, ...search } = options; const form = new FormData(); if (configuration) { @@ -5249,8 +3343,8 @@ export class Database { async upgradeService( mount: string, source: File | Blob | string, - options: UpgradeServiceOptions = {} - ): Promise { + options: services.UpgradeServiceOptions = {} + ): Promise { const { configuration, dependencies, ...search } = options; const form = new FormData(); if (configuration) { @@ -5285,7 +3379,7 @@ export class Database { */ uninstallService( mount: string, - options?: UninstallServiceOptions + options?: services.UninstallServiceOptions ): Promise { return this.request( { @@ -5309,7 +3403,7 @@ export class Database { * // info contains detailed information about the service * ``` */ - getService(mount: string): Promise { + getService(mount: string): Promise { return this.request({ path: "/_api/foxx/service", search: { mount }, @@ -5340,7 +3434,7 @@ export class Database { getServiceConfiguration( mount: string, minimal?: false - ): Promise>; + ): Promise>; /** * Retrieves information about the service's configuration options and their * current values. @@ -5401,7 +3495,7 @@ export class Database { mount: string, cfg: Record, minimal?: false - ): Promise>; + ): Promise>; /** * Replaces the configuration of the given service, discarding any existing * values for options not specified. @@ -5475,7 +3569,7 @@ export class Database { mount: string, cfg: Record, minimal?: false - ): Promise>; + ): Promise>; /** * Updates the configuration of the given service while maintaining any * existing values for options not specified. @@ -5545,7 +3639,7 @@ export class Database { getServiceDependencies( mount: string, minimal?: false - ): Promise>; + ): Promise>; /** * Retrieves information about the service's dependencies and their current * mount points. @@ -5609,7 +3703,7 @@ export class Database { ): Promise< Record< string, - (SingleServiceDependency | MultiServiceDependency) & { warning?: string } + (services.SingleServiceDependency | services.MultiServiceDependency) & { warning?: string } > >; /** @@ -5692,7 +3786,7 @@ export class Database { ): Promise< Record< string, - (SingleServiceDependency | MultiServiceDependency) & { warning?: string } + (services.SingleServiceDependency | services.MultiServiceDependency) & { warning?: string } > >; /** @@ -5762,7 +3856,7 @@ export class Database { setServiceDevelopmentMode( mount: string, enabled: boolean = true - ): Promise { + ): Promise { return this.request({ method: enabled ? "POST" : "DELETE", path: "/_api/foxx/development", @@ -5771,21 +3865,21 @@ export class Database { } /** - * Retrieves a list of scripts defined in the service manifest's "scripts" - * section mapped to their human readable representations. + * Retrieves an object mapping script names to their human readable + * representations, as defined in the service manifest's "scripts" section. * * @param mount - The service's mount point, relative to the database. * * @example * ```js * const db = new Database(); - * const scripts = await db.listServiceScripts("/my-service"); + * const scripts = await db.getServiceScripts("/my-service"); * for (const [name, title] of Object.entries(scripts)) { * console.log(`${name}: ${title}`); * } * ``` */ - listServiceScripts(mount: string): Promise> { + getServiceScripts(mount: string): Promise> { return this.request({ path: "/_api/foxx/scripts", search: { mount }, @@ -5853,7 +3947,7 @@ export class Database { */ filter?: string; } - ): Promise; + ): Promise; /** * Runs the tests of a given service and returns the results using the * "suite" reporter, which groups the test result by test suite. @@ -5885,7 +3979,7 @@ export class Database { */ filter?: string; } - ): Promise; + ): Promise; /** * Runs the tests of a given service and returns the results using the * "stream" reporter, which represents the results as a sequence of tuples @@ -5918,7 +4012,7 @@ export class Database { */ filter?: string; } - ): Promise; + ): Promise; /** * Runs the tests of a given service and returns the results using the * "tap" reporter, which represents the results as an array of strings using @@ -5951,7 +4045,7 @@ export class Database { */ filter?: string; } - ): Promise; + ): Promise; /** * Runs the tests of a given service and returns the results using the * "xunit" reporter, which represents the results as an XML document using @@ -5984,7 +4078,7 @@ export class Database { */ filter?: string; } - ): Promise; + ): Promise; /** * Runs the tests of a given service and returns the results as a string * using the "stream" reporter in "idiomatic" mode, which represents the @@ -6140,7 +4234,7 @@ export class Database { * // spec is a Swagger API description of the service * ``` */ - getServiceDocumentation(mount: string): Promise { + getServiceDocumentation(mount: string): Promise { return this.request({ path: "/_api/foxx/swagger", search: { mount }, @@ -6215,7 +4309,9 @@ export class Database { * // a hot backup has been created * ``` */ - createHotBackup(options: HotBackupOptions = {}): Promise { + createHotBackup( + options: hotBackups.HotBackupOptions = {} + ): Promise { return this.request( { method: "POST", @@ -6235,13 +4331,13 @@ export class Database { * * @example * ```js - * const backups = await db.listHotBackups(); - * for (const backup of backups) { + * const backups = await db.getHotBackups(); + * for (const backup of backups.list) { * console.log(backup.id); * } * ``` */ - listHotBackups(id?: string | string[]): Promise { + getHotBackups(id?: string | string[]): Promise { return this.request( { method: "POST", @@ -6314,7 +4410,7 @@ export class Database { * } * ``` */ - getLogEntries(options?: LogEntriesOptions): Promise { + getLogEntries(options?: logs.LogEntriesOptions): Promise { return this.request( { path: "/_admin/log/entries", @@ -6334,13 +4430,15 @@ export class Database { * * @example * ```js - * const messages = await db.getLogMessages(); + * const messages = await db.listLogMessages(); * for (const m of messages) { * console.log(`${m.date} - [${m.level}] ${m.message} (#${m.id})`); * } * ``` */ - getLogMessages(options?: LogEntriesOptions): Promise { + listLogMessages( + options?: logs.LogEntriesOptions + ): Promise { return this.request( { path: "/_admin/log", @@ -6359,7 +4457,7 @@ export class Database { * console.log(levels.request); // log level for incoming requests * ``` */ - getLogLevel(): Promise> { + getLogLevel(): Promise> { return this.request({ path: "/_admin/log/level", }); @@ -6379,8 +4477,8 @@ export class Database { * ``` */ setLogLevel( - levels: Record - ): Promise> { + levels: Record + ): Promise> { return this.request({ method: "PUT", path: "/_admin/log/level", @@ -6395,7 +4493,7 @@ export class Database { * database request performed by the callback will be marked for asynchronous * execution and its result will be made available as an async job. * - * Returns a {@link Job} instance that can be used to retrieve the result + * Returns a {@link jobs.Job} instance that can be used to retrieve the result * of the callback function once the request has been executed. * * @param callback - Callback function to execute as an async job. @@ -6411,7 +4509,7 @@ export class Database { * // job.result is a list of Collection instances * ``` */ - async createJob(callback: () => Promise): Promise> { + async createJob(callback: () => Promise): Promise> { const trap = new Promise>((resolveTrap) => { this._trapRequest = (trapped) => resolveTrap(trapped); }); @@ -6419,7 +4517,7 @@ export class Database { const trapped = await trap; if (trapped.error) return eventualResult as Promise; const { jobId, onResolve, onReject } = trapped; - return new Job( + return new jobs.Job( this, jobId, (res) => { @@ -6434,7 +4532,7 @@ export class Database { } /** - * Returns a {@link job.Job} instance for the given `jobId`. + * Returns a {@link jobs.Job} instance for the given `jobId`. * * @param jobId - ID of the async job. * @@ -6444,8 +4542,8 @@ export class Database { * const job = db.job("12345"); * ``` */ - job(jobId: string): Job { - return new Job(this, jobId); + job(jobId: string): jobs.Job { + return new jobs.Job(this, jobId); } /** @@ -6525,3 +4623,4 @@ export class Database { } //#endregion } +//#endregion diff --git a/src/documents.ts b/src/documents.ts index 1154068a8..ca9acfaa4 100644 --- a/src/documents.ts +++ b/src/documents.ts @@ -1,6 +1,6 @@ /** * ```ts - * import type { Document, Edge } from "arangojs/documents.js"; + * import type { Document, Edge } from "arangojs/documents"; * ``` * * The "documents" module provides document/edge related types for TypeScript. @@ -8,6 +8,7 @@ * @packageDocumentation */ +//#region Shared types /** * Common ArangoDB metadata properties of a document. */ @@ -41,7 +42,9 @@ export type EdgeMetadata = { */ _to: string; }; +//#endregion +//#region Document types /** * Type representing an object that can be stored in a collection. */ @@ -80,13 +83,473 @@ export type Edge = any> = T & export type Patch> = { [K in keyof T]?: T[K] | Patch; }; +//#endregion + +//#region Document operation options +/** + * Options for checking whether a document exists in a collection. + */ +export type DocumentExistsOptions = { + /** + * If set to `true`, the request will explicitly permit ArangoDB to return a + * potentially dirty or stale result and arangojs will load balance the + * request without distinguishing between leaders and followers. + */ + allowDirtyRead?: boolean; + /** + * If set to a document revision, the document will only match if its `_rev` + * matches the given revision. + */ + ifMatch?: string; + /** + * If set to a document revision, the document will only match if its `_rev` + * does not match the given revision. + */ + ifNoneMatch?: string; +}; + +/** + * Options for retrieving a document from a collection. + */ +export type ReadDocumentOptions = { + /** + * If set to `true`, `null` is returned instead of an exception being thrown + * if the document does not exist. + */ + graceful?: boolean; + /** + * If set to `true`, the request will explicitly permit ArangoDB to return a + * potentially dirty or stale result and arangojs will load balance the + * request without distinguishing between leaders and followers. + */ + allowDirtyRead?: boolean; + /** + * If set to a document revision, the request will fail with an error if the + * document exists but its `_rev` does not match the given revision. + */ + ifMatch?: string; + /** + * If set to a document revision, the request will fail with an error if the + * document exists and its `_rev` matches the given revision. Note that an + * `HttpError` with code 304 will be thrown instead of an `ArangoError`. + */ + ifNoneMatch?: string; +}; + +/** + * Options for retrieving multiple documents from a collection. + */ +export type BulkReadDocumentsOptions = { + /** + * If set to `true`, the request will explicitly permit ArangoDB to return a + * potentially dirty or stale result and arangojs will load balance the + * request without distinguishing between leaders and followers. + */ + allowDirtyRead?: boolean; + /** + * If set to `false`, the existing document will only be modified if its + * `_rev` property matches the same property on the new data. + * + * Default: `true` + */ + ignoreRevs?: boolean; +}; + +/** + * Options for inserting a new document into a collection. + */ +export type InsertDocumentOptions = { + /** + * If set to `true`, data will be synchronized to disk before returning. + * + * Default: `false` + */ + waitForSync?: boolean; + /** + * If set to `true`, no data will be returned by the server. This option can + * be used to reduce network traffic. + * + * Default: `false` + */ + silent?: boolean; + /** + * If set to `true`, the complete new document will be returned as the `new` + * property on the result object. Has no effect if `silent` is set to `true`. + * + * Default: `false` + */ + returnNew?: boolean; + /** + * If set to `true`, the complete old document will be returned as the `old` + * property on the result object. Has no effect if `silent` is set to `true`. + * This option is only available when `overwriteMode` is set to `"update"` or + * `"replace"`. + * + * Default: `false` + */ + returnOld?: boolean; + /** + * Defines what should happen if a document with the same `_key` or `_id` + * already exists, instead of throwing an exception. + * + * Default: `"conflict" + */ + overwriteMode?: "ignore" | "update" | "replace" | "conflict"; + /** + * If set to `false`, properties with a value of `null` will be removed from + * the new document. + * + * Default: `true` + */ + keepNull?: boolean; + /** + * If set to `false`, object properties that already exist in the old + * document will be overwritten rather than merged when an existing document + * with the same `_key` or `_id` is updated. This does not affect arrays. + * + * Default: `true` + */ + mergeObjects?: boolean; + /** + * If set to `true`, new entries will be added to in-memory index caches if + * document insertions affect the edge index or cache-enabled persistent + * indexes. + * + * Default: `false` + */ + refillIndexCaches?: boolean; + /** + * If set, the attribute with the name specified by the option is looked up + * in the stored document and the attribute value is compared numerically to + * the value of the versioning attribute in the supplied document that is + * supposed to update/replace it. + */ + versionAttribute?: string; +}; + +/** + * Options for replacing an existing document in a collection. + */ +export type ReplaceDocumentOptions = { + /** + * If set to `true`, data will be synchronized to disk before returning. + * + * Default: `false` + */ + waitForSync?: boolean; + /** + * If set to `true`, no data will be returned by the server. This option can + * be used to reduce network traffic. + * + * Default: `false` + */ + silent?: boolean; + /** + * If set to `true`, the complete new document will be returned as the `new` + * property on the result object. Has no effect if `silent` is set to `true`. + * + * Default: `false` + */ + returnNew?: boolean; + /** + * If set to `false`, the existing document will only be modified if its + * `_rev` property matches the same property on the new data. + * + * Default: `true` + */ + ignoreRevs?: boolean; + /** + * If set to `true`, the complete old document will be returned as the `old` + * property on the result object. Has no effect if `silent` is set to `true`. + * + * Default: `false` + */ + returnOld?: boolean; + /** + * If set to a document revision, the document will only be replaced if its + * `_rev` matches the given revision. + */ + ifMatch?: string; + /** + * If set to `true`, existing entries in in-memory index caches will be + * updated if document replacements affect the edge index or cache-enabled + * persistent indexes. + * + * Default: `false` + */ + refillIndexCaches?: boolean; + /** + * If set, the attribute with the name specified by the option is looked up + * in the stored document and the attribute value is compared numerically to + * the value of the versioning attribute in the supplied document that is + * supposed to update/replace it. + */ + versionAttribute?: string; +}; + +/** + * Options for updating a document in a collection. + */ +export type UpdateDocumentOptions = { + /** + * If set to `true`, data will be synchronized to disk before returning. + * + * Default: `false` + */ + waitForSync?: boolean; + /** + * If set to `true`, no data will be returned by the server. This option can + * be used to reduce network traffic. + * + * Default: `false` + */ + silent?: boolean; + /** + * If set to `true`, the complete new document will be returned as the `new` + * property on the result object. Has no effect if `silent` is set to `true`. + * + * Default: `false` + */ + returnNew?: boolean; + /** + * If set to `false`, the existing document will only be modified if its + * `_rev` property matches the same property on the new data. + * + * Default: `true` + */ + ignoreRevs?: boolean; + /** + * If set to `true`, the complete old document will be returned as the `old` + * property on the result object. Has no effect if `silent` is set to `true`. + * + * Default: `false` + */ + returnOld?: boolean; + /** + * If set to `false`, properties with a value of `null` will be removed from + * the new document. + * + * Default: `true` + */ + keepNull?: boolean; + /** + * If set to `false`, object properties that already exist in the old + * document will be overwritten rather than merged. This does not affect + * arrays. + * + * Default: `true` + */ + mergeObjects?: boolean; + /** + * If set to a document revision, the document will only be updated if its + * `_rev` matches the given revision. + */ + ifMatch?: string; + /** + * If set to `true`, existing entries in in-memory index caches will be + * updated if document updates affect the edge index or cache-enabled + * persistent indexes. + * + * Default: `false` + */ + refillIndexCaches?: boolean; + /** + * If set, the attribute with the name specified by the option is looked up + * in the stored document and the attribute value is compared numerically to + * the value of the versioning attribute in the supplied document that is + * supposed to update/replace it. + */ + versionAttribute?: string; +}; + +/** + * Options for removing a document from a collection. + */ +export type RemoveDocumentOptions = { + /** + * If set to `true`, changes will be synchronized to disk before returning. + * + * Default: `false` + */ + waitForSync?: boolean; + /** + * If set to `true`, the complete old document will be returned as the `old` + * property on the result object. Has no effect if `silent` is set to `true`. + * + * Default: `false` + */ + returnOld?: boolean; + /** + * If set to `true`, no data will be returned by the server. This option can + * be used to reduce network traffic. + * + * Default: `false` + */ + silent?: boolean; + /** + * If set to a document revision, the document will only be removed if its + * `_rev` matches the given revision. + */ + ifMatch?: string; + /** + * If set to `true`, existing entries in in-memory index caches will be + * deleted if document removals affect the edge index or cache-enabled + * persistent indexes. + * + * Default: `false` + */ + refillIndexCaches?: boolean; +}; + +/** + * Options for bulk importing documents into a collection. + */ +export type ImportDocumentsOptions = { + /** + * (Edge collections only.) Prefix to prepend to `_from` attribute values. + */ + fromPrefix?: string; + /** + * (Edge collections only.) Prefix to prepend to `_to` attribute values. + */ + toPrefix?: string; + /** + * If set to `true`, the collection is truncated before the data is imported. + * + * Default: `false` + */ + overwrite?: boolean; + /** + * Whether to wait for the documents to have been synced to disk. + */ + waitForSync?: boolean; + /** + * Controls behavior when a unique constraint is violated on the document key. + * + * * `"error"`: the document will not be imported. + * * `"update`: the document will be merged into the existing document. + * * `"replace"`: the document will replace the existing document. + * * `"ignore"`: the document will not be imported and the unique constraint + * error will be ignored. + * + * Default: `"error"` + */ + onDuplicate?: "error" | "update" | "replace" | "ignore"; + /** + * If set to `true`, the import will abort if any error occurs. + */ + complete?: boolean; + /** + * Whether the response should contain additional details about documents + * that could not be imported. + */ + details?: boolean; +}; + +/** + * Options for retrieving a document's edges from a collection. + */ +export type DocumentEdgesOptions = { + /** + * If set to `true`, the request will explicitly permit ArangoDB to return a + * potentially dirty or stale result and arangojs will load balance the + * request without distinguishing between leaders and followers. + */ + allowDirtyRead?: boolean; +}; +//#endregion + +//#region Document operation results +/** + * Represents a bulk operation failure for an individual document. + */ +export type DocumentOperationFailure = { + /** + * Indicates that the operation failed. + */ + error: true; + /** + * Human-readable description of the failure. + */ + errorMessage: string; + /** + * Numeric representation of the failure. + */ + errorNum: number; +}; + +/** + * Metadata returned by a document operation. + */ +export type DocumentOperationMetadata = DocumentMetadata & { + /** + * Revision of the document that was updated or replaced by this operation. + */ + _oldRev?: string; +}; + +/** + * Result of a collection bulk import. + */ +export type ImportDocumentsResult = { + /** + * Whether the import failed. + */ + error: false; + /** + * Number of new documents imported. + */ + created: number; + /** + * Number of documents that failed with an error. + */ + errors: number; + /** + * Number of empty documents. + */ + empty: number; + /** + * Number of documents updated. + */ + updated: number; + /** + * Number of documents that failed with an error that is ignored. + */ + ignored: number; + /** + * Additional details about any errors encountered during the import. + */ + details?: string[]; +}; + +/** + * Result of retrieving edges in a collection. + */ +export type DocumentEdgesResult = any> = { + edges: Edge[]; + stats: { + scannedIndex: number; + filtered: number; + }; +}; +//#endregion + +//#region Document selectors +/** + * A value that can be used to identify a document within a collection in + * arangojs methods, i.e. a partial ArangoDB document or the value of a + * document's `_key` or `_id`. + * + * See {@link DocumentMetadata}. + */ +export type DocumentSelector = ObjectWithDocumentId | ObjectWithDocumentKey | string; /** * An object with an ArangoDB document `_id` property. * - * See {@link documents.DocumentMetadata}. + * See {@link DocumentMetadata}. */ -export type ObjectWithId = { +export type ObjectWithDocumentId = { [key: string]: any; _id: string; }; @@ -94,22 +557,13 @@ export type ObjectWithId = { /** * An object with an ArangoDB document `_key` property. * - * See {@link documents.DocumentMetadata}. + * See {@link DocumentMetadata}. */ -export type ObjectWithKey = { +export type ObjectWithDocumentKey = { [key: string]: any; _key: string; }; -/** - * A value that can be used to identify a document within a collection in - * arangojs methods, i.e. a partial ArangoDB document or the value of a - * document's `_key` or `_id`. - * - * See {@link documents.DocumentMetadata}. - */ -export type DocumentSelector = ObjectWithId | ObjectWithKey | string; - /** * @internal */ @@ -140,3 +594,4 @@ export function _documentHandle( } return `${collectionName}/${selector}`; } +//#endregion \ No newline at end of file diff --git a/src/error.ts b/src/errors.ts similarity index 94% rename from src/error.ts rename to src/errors.ts index 84bcd72b7..ffb2107d3 100644 --- a/src/error.ts +++ b/src/errors.ts @@ -1,15 +1,15 @@ /** * ```ts - * import type { ArangoError, HttpError } from "arangojs/error.js"; + * import type { ArangoError, HttpError } from "arangojs/errors"; * ``` * - * The "error" module provides types and interfaces for TypeScript related + * The "errors" module provides types and interfaces for TypeScript related * to arangojs error handling. * * @packageDocumentation */ -import { ProcessedResponse } from "./connection.js"; +import * as connection from "./connection.js"; import { ERROR_ARANGO_MAINTENANCE_MODE } from "./lib/codes.js"; const messages: { [key: number]: string } = { @@ -271,12 +271,12 @@ export class HttpError extends NetworkError { /** * Server response object. */ - response: ProcessedResponse; + response: connection.ProcessedResponse; /** * @internal */ - constructor(response: ProcessedResponse, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { + constructor(response: connection.ProcessedResponse, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { const message = messages[response.status] ?? messages[500]; super(message, { ...options, request: response.request }); this.response = response; @@ -329,7 +329,7 @@ export class ArangoError extends Error { * * Creates a new `ArangoError` from a response object. */ - static from(response: ProcessedResponse): ArangoError { + static from(response: connection.ProcessedResponse): ArangoError { return new ArangoError(response.parsedBody!, { cause: new HttpError(response) }); @@ -355,7 +355,7 @@ export class ArangoError extends Error { /** * Server response object. */ - get response(): ProcessedResponse | undefined { + get response(): connection.ProcessedResponse | undefined { const cause = this.cause; if (cause instanceof HttpError) { return cause.response; diff --git a/src/foxx-manifest.ts b/src/foxx-manifest.ts index ad4cd1d4b..f11a40d35 100644 --- a/src/foxx-manifest.ts +++ b/src/foxx-manifest.ts @@ -1,6 +1,6 @@ /** * ```ts - * import type { FoxxManifest } from "arangojs/foxx-manifest.js"; + * import type { FoxxManifest } from "arangojs/foxx-manifest"; * ``` * * The "foxx-manifest" module provides the Foxx manifest type for TypeScript. @@ -103,14 +103,14 @@ export type Configuration = { * The type of value expected for this option. */ type: - | "integer" - | "boolean" - | "number" - | "string" - | "json" - | "password" - | "int" - | "bool"; + | "integer" + | "boolean" + | "number" + | "string" + | "json" + | "password" + | "int" + | "bool"; /** * The default value for this option in plain JSON. Can be omitted to provide no default value. */ diff --git a/src/graph.ts b/src/graphs.ts similarity index 79% rename from src/graph.ts rename to src/graphs.ts index e90284d80..5c7b29181 100644 --- a/src/graph.ts +++ b/src/graphs.ts @@ -4,52 +4,26 @@ * Graph, * GraphVertexCollection, * GraphEdgeCollection, - * } from "arangojs/graph.js"; + * } from "arangojs/graphs"; * ``` * - * The "graph" module provides graph related types and interfaces + * The "graphs" module provides graph related types and interfaces * for TypeScript. * * @packageDocumentation */ -import { - ArangoCollection, - collectionToString, - DocumentCollection, - EdgeCollection, -} from "./collection.js"; -import { Database } from "./database.js"; -import { - Document, - DocumentData, - DocumentMetadata, - DocumentSelector, - Edge, - EdgeData, - Patch, - _documentHandle, -} from "./documents.js"; -import { isArangoError } from "./error.js"; +import * as collections from "./collections.js"; +import * as databases from "./databases.js"; +import * as documents from "./documents.js"; +import * as errors from "./errors.js"; import { DOCUMENT_NOT_FOUND, GRAPH_NOT_FOUND } from "./lib/codes.js"; -/** - * Indicates whether the given value represents a {@link graph.Graph}. - * - * @param graph - A value that might be a Graph. - */ -export function isArangoGraph(graph: any): graph is Graph { - return Boolean(graph && graph.isArangoGraph); -} - /** * @internal */ function mungeGharialResponse(body: any, prop: "vertex" | "edge" | "removed") { - const { new: newDoc, old: oldDoc, [prop]: doc, ...meta } = body; - const result = { ...meta, ...doc }; - if (typeof newDoc !== "undefined") result.new = newDoc; - if (typeof oldDoc !== "undefined") result.old = oldDoc; - return result; + const { [prop]: doc, ...meta } = body; + return { ...meta, ...doc }; } /** @@ -57,20 +31,21 @@ function mungeGharialResponse(body: any, prop: "vertex" | "edge" | "removed") { */ function coerceEdgeDefinition(options: EdgeDefinitionOptions): EdgeDefinition { const edgeDefinition = {} as EdgeDefinition; - edgeDefinition.collection = collectionToString(options.collection); + edgeDefinition.collection = collections.collectionToString(options.collection); edgeDefinition.from = Array.isArray(options.from) - ? options.from.map(collectionToString) - : [collectionToString(options.from)]; + ? options.from.map(collections.collectionToString) + : [collections.collectionToString(options.from)]; edgeDefinition.to = Array.isArray(options.to) - ? options.to.map(collectionToString) - : [collectionToString(options.to)]; + ? options.to.map(collections.collectionToString) + : [collections.collectionToString(options.to)]; return edgeDefinition; } +//#region Graph document operation options /** * Options for retrieving a document from a graph collection. */ -export type GraphCollectionReadOptions = { +export type ReadGraphDocumentOptions = { /** * If set to a document revision, the document will only be returned if its * `_rev` property matches this value. @@ -98,7 +73,7 @@ export type GraphCollectionReadOptions = { /** * Options for inserting a document into a graph collection. */ -export type GraphCollectionInsertOptions = { +export type InsertGraphDocumentOptions = { /** * If set to `true`, data will be synchronized to disk before returning. * @@ -117,7 +92,7 @@ export type GraphCollectionInsertOptions = { /** * Options for replacing a document in a graph collection. */ -export type GraphCollectionReplaceOptions = { +export type ReplaceGraphDocumentOptions = { /** * If set to a document revision, the document will only be modified if its * `_rev` property matches this value. @@ -157,7 +132,7 @@ export type GraphCollectionReplaceOptions = { /** * Options for removing a document from a graph collection. */ -export type GraphCollectionRemoveOptions = { +export type RemoveGraphDocumentOptions = { /** * If set to a document revision, the document will only be removed if its * `_rev` property matches this value. @@ -179,47 +154,33 @@ export type GraphCollectionRemoveOptions = { */ returnOld?: boolean; }; +//#endregion +//#region Edge definition operation options /** - * Definition of a relation in a {@link graph.Graph}. - */ -export type EdgeDefinition = { - /** - * Name of the collection containing the edges. - */ - collection: string; - /** - * Array of names of collections containing the start vertices. - */ - from: string[]; - /** - * Array of names of collections containing the end vertices. - */ - to: string[]; -}; - -/** - * An edge definition used to define a collection of edges in a {@link graph.Graph}. + * An edge definition used to define a collection of edges in a {@link Graph}. */ export type EdgeDefinitionOptions = { /** * Collection containing the edges. */ - collection: string | ArangoCollection; + collection: string | collections.ArangoCollection; /** * Collection or collections containing the start vertices. */ - from: (string | ArangoCollection)[] | string | ArangoCollection; + from: (string | collections.ArangoCollection)[] | string | collections.ArangoCollection; /** * Collection or collections containing the end vertices. */ - to: (string | ArangoCollection)[] | string | ArangoCollection; + to: (string | collections.ArangoCollection)[] | string | collections.ArangoCollection; }; +//#endregion +//#region GraphDescription /** * General information about a graph. */ -export type GraphInfo = { +export type GraphDescription = { /** * Key of the document internally representing this graph. * @@ -294,6 +255,26 @@ export type GraphInfo = { isDisjoint?: boolean; }; +/** + * Definition of a relation in a {@link Graph}. + */ +export type EdgeDefinition = { + /** + * Name of the collection containing the edges. + */ + collection: string; + /** + * Array of names of collections containing the start vertices. + */ + from: string[]; + /** + * Array of names of collections containing the end vertices. + */ + to: string[]; +}; +//#endregion + +//#region Graph operation options /** * Option for creating a graph. */ @@ -309,7 +290,7 @@ export type CreateGraphOptions = { * Additional vertex collections. Documents within these collections do not * have edges within this graph. */ - orphanCollections?: (string | ArangoCollection)[] | string | ArangoCollection; + orphanCollections?: (string | collections.ArangoCollection)[] | string | collections.ArangoCollection; /** * (Cluster only.) Number of shards that is used for every collection @@ -356,25 +337,34 @@ export type CreateGraphOptions = { * (Enterprise Edition cluster only.) Collections to be included in a Hybrid * SmartGraph. */ - satellites?: (string | ArangoCollection)[]; + satellites?: (string | collections.ArangoCollection)[]; }; +/** + * Options for adding a vertex collection to a graph. + */ export type AddVertexCollectionOptions = { /** * (Enterprise Edition cluster only.) Collections to be included in a Hybrid * SmartGraph. */ - satellites?: (string | ArangoCollection)[]; + satellites?: (string | collections.ArangoCollection)[]; }; +/** + * Options for adding an edge definition to a graph. + */ export type AddEdgeDefinitionOptions = { /** * (Enterprise Edition cluster only.) Collections to be included in a Hybrid * SmartGraph. */ - satellites?: (string | ArangoCollection)[]; + satellites?: (string | collections.ArangoCollection)[]; }; +/** + * Options for replacing an edge definition in a graph. + */ export type ReplaceEdgeDefinitionOptions = { /** * (Enterprise Edition cluster only.) Collections to be included in a Hybrid @@ -382,23 +372,31 @@ export type ReplaceEdgeDefinitionOptions = { */ satellites?: string[]; }; +//#endregion +//#region GraphVertexCollection class /** - * Represents a {@link collection.DocumentCollection} of vertices in a {@link graph.Graph}. + * Represents a {@link collections.DocumentCollection} of vertices in a {@link Graph}. * - * @param T - Type to use for document data. Defaults to `any`. + * @param EntryResultType - Type to represent vertex document contents returned + * by the server (including computed properties). + * @param EntryInputType - Type to represent vertex document contents passed + * when inserting or replacing vertex documents (without computed properties). */ -export class GraphVertexCollection = any> - implements ArangoCollection { - protected _db: Database; +export class GraphVertexCollection< + EntryResultType extends Record = any, + EntryInputType extends Record = EntryResultType, +> + implements collections.ArangoCollection { + protected _db: databases.Database; protected _name: string; protected _graph: Graph; - protected _collection: DocumentCollection; + protected _collection: collections.DocumentCollection; /** * @internal */ - constructor(db: Database, name: string, graph: Graph) { + constructor(db: databases.Database, name: string, graph: Graph) { this._db = db; this._collection = db.collection(name); this._name = this._collection.name; @@ -429,14 +427,14 @@ export class GraphVertexCollection = any> } /** - * A {@link collection.DocumentCollection} instance for this vertex collection. + * A {@link collections.DocumentCollection} instance for this vertex collection. */ get collection() { return this._collection; } /** - * The {@link graph.Graph} instance this vertex collection is bound to. + * The {@link Graph} instance this vertex collection is bound to. */ get graph() { return this._graph; @@ -462,14 +460,14 @@ export class GraphVertexCollection = any> * } * ``` */ - async vertexExists(selector: DocumentSelector): Promise { + async vertexExists(selector: documents.DocumentSelector): Promise { try { return await this._db.request( { method: "HEAD", path: `/_api/gharial/${encodeURIComponent( this.graph.name - )}/vertex/${encodeURI(_documentHandle(selector, this._name))}`, + )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, }, () => true ); @@ -516,9 +514,9 @@ export class GraphVertexCollection = any> * ``` */ async vertex( - selector: DocumentSelector, - options?: GraphCollectionReadOptions - ): Promise>; + selector: documents.DocumentSelector, + options?: ReadGraphDocumentOptions + ): Promise>; /** * Retrieves the vertex matching the given key or id. * @@ -555,13 +553,13 @@ export class GraphVertexCollection = any> * ``` */ async vertex( - selector: DocumentSelector, + selector: documents.DocumentSelector, graceful: boolean - ): Promise>; + ): Promise>; async vertex( - selector: DocumentSelector, - options: boolean | GraphCollectionReadOptions = {} - ): Promise | null> { + selector: documents.DocumentSelector, + options: boolean | ReadGraphDocumentOptions = {} + ): Promise | null> { if (typeof options === "boolean") { options = { graceful: options }; } @@ -577,7 +575,7 @@ export class GraphVertexCollection = any> { path: `/_api/gharial/${encodeURIComponent( this.graph.name - )}/vertex/${encodeURI(_documentHandle(selector, this._name))}`, + )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, headers, search, allowDirtyRead, @@ -588,7 +586,7 @@ export class GraphVertexCollection = any> try { return await result; } catch (err: any) { - if (isArangoError(err) && err.errorNum === DOCUMENT_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === DOCUMENT_NOT_FOUND) { return null; } throw err; @@ -613,10 +611,10 @@ export class GraphVertexCollection = any> * ``` */ save( - data: DocumentData, - options?: GraphCollectionInsertOptions - ): Promise }>; - save(data: DocumentData, options?: GraphCollectionInsertOptions) { + data: documents.DocumentData, + options?: InsertGraphDocumentOptions + ): Promise }>; + save(data: documents.DocumentData, options?: InsertGraphDocumentOptions) { return this._db.request( { method: "POST", @@ -655,14 +653,14 @@ export class GraphVertexCollection = any> * ``` */ replace( - selector: DocumentSelector, - newValue: DocumentData, - options?: GraphCollectionReplaceOptions - ): Promise; old?: Document }>; + selector: documents.DocumentSelector, + newValue: documents.DocumentData, + options?: ReplaceGraphDocumentOptions + ): Promise; old?: documents.Document }>; replace( - selector: DocumentSelector, - newValue: DocumentData, - options: GraphCollectionReplaceOptions = {} + selector: documents.DocumentSelector, + newValue: documents.DocumentData, + options: ReplaceGraphDocumentOptions = {} ) { if (typeof options === "string") { options = { rev: options }; @@ -675,7 +673,7 @@ export class GraphVertexCollection = any> method: "PUT", path: `/_api/gharial/${encodeURIComponent( this.graph.name - )}/vertex/${encodeURI(_documentHandle(selector, this._name))}`, + )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, body: newValue, search, headers, @@ -709,14 +707,14 @@ export class GraphVertexCollection = any> * ``` */ update( - selector: DocumentSelector, - newValue: Patch>, - options?: GraphCollectionReplaceOptions - ): Promise; old?: Document }>; + selector: documents.DocumentSelector, + newValue: documents.Patch>, + options?: ReplaceGraphDocumentOptions + ): Promise; old?: documents.Document }>; update( - selector: DocumentSelector, - newValue: Patch>, - options: GraphCollectionReplaceOptions = {} + selector: documents.DocumentSelector, + newValue: documents.Patch>, + options: ReplaceGraphDocumentOptions = {} ) { if (typeof options === "string") { options = { rev: options }; @@ -729,7 +727,7 @@ export class GraphVertexCollection = any> method: "PATCH", path: `/_api/gharial/${encodeURIComponent( this.graph.name - )}/vertex/${encodeURI(_documentHandle(selector, this._name))}`, + )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, body: newValue, search, headers, @@ -766,12 +764,12 @@ export class GraphVertexCollection = any> * ``` */ remove( - selector: DocumentSelector, - options?: GraphCollectionRemoveOptions - ): Promise }>; + selector: documents.DocumentSelector, + options?: RemoveGraphDocumentOptions + ): Promise }>; remove( - selector: DocumentSelector, - options: GraphCollectionRemoveOptions = {} + selector: documents.DocumentSelector, + options: RemoveGraphDocumentOptions = {} ) { if (typeof options === "string") { options = { rev: options }; @@ -784,7 +782,7 @@ export class GraphVertexCollection = any> method: "DELETE", path: `/_api/gharial/${encodeURIComponent( this.graph.name - )}/vertex/${encodeURI(_documentHandle(selector, this._name))}`, + )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, search, headers, }, @@ -792,23 +790,31 @@ export class GraphVertexCollection = any> ); } } +//#endregion +//#region GraphEdgeCollection class /** - * Represents a {@link collection.EdgeCollection} of edges in a {@link graph.Graph}. + * Represents a {@link collections.EdgeCollection} of edges in a {@link Graph}. * - * @param T - Type to use for document data. Defaults to `any`. + * @param EntryResultType - Type to represent edge document contents returned + * by the server (including computed properties). + * @param EntryInputType - Type to represent edge document contents passed + * when inserting or replacing edge documents (without computed properties). */ -export class GraphEdgeCollection = any> - implements ArangoCollection { - protected _db: Database; +export class GraphEdgeCollection< + EntryResultType extends Record = any, + EntryInputType extends Record = EntryResultType, +> + implements collections.ArangoCollection { + protected _db: databases.Database; protected _name: string; protected _graph: Graph; - protected _collection: EdgeCollection; + protected _collection: collections.EdgeCollection; /** * @internal */ - constructor(db: Database, name: string, graph: Graph) { + constructor(db: databases.Database, name: string, graph: Graph) { this._db = db; this._collection = db.collection(name); this._name = this._collection.name; @@ -839,14 +845,14 @@ export class GraphEdgeCollection = any> } /** - * A {@link collection.EdgeCollection} instance for this edge collection. + * A {@link collections.EdgeCollection} instance for this edge collection. */ get collection() { return this._collection; } /** - * The {@link graph.Graph} instance this edge collection is bound to. + * The {@link Graph} instance this edge collection is bound to. */ get graph() { return this._graph; @@ -872,14 +878,14 @@ export class GraphEdgeCollection = any> * } * ``` */ - async edgeExists(selector: DocumentSelector): Promise { + async edgeExists(selector: documents.DocumentSelector): Promise { try { return await this._db.request( { method: "HEAD", path: `/_api/gharial/${encodeURIComponent( this.graph.name - )}/edge/${encodeURI(_documentHandle(selector, this._name))}`, + )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, }, () => true ); @@ -926,9 +932,9 @@ export class GraphEdgeCollection = any> * ``` */ async edge( - selector: DocumentSelector, - options?: GraphCollectionReadOptions - ): Promise>; + selector: documents.DocumentSelector, + options?: ReadGraphDocumentOptions + ): Promise>; /** * Retrieves the edge matching the given key or id. * @@ -964,11 +970,11 @@ export class GraphEdgeCollection = any> * } * ``` */ - async edge(selector: DocumentSelector, graceful: boolean): Promise>; + async edge(selector: documents.DocumentSelector, graceful: boolean): Promise>; async edge( - selector: DocumentSelector, - options: boolean | GraphCollectionReadOptions = {} - ): Promise | null> { + selector: documents.DocumentSelector, + options: boolean | ReadGraphDocumentOptions = {} + ): Promise | null> { if (typeof options === "boolean") { options = { graceful: options }; } @@ -984,7 +990,7 @@ export class GraphEdgeCollection = any> { path: `/_api/gharial/${encodeURIComponent( this.graph.name - )}/edge/${encodeURI(_documentHandle(selector, this._name))}`, + )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, search, allowDirtyRead, }, @@ -994,7 +1000,7 @@ export class GraphEdgeCollection = any> try { return await result; } catch (err: any) { - if (isArangoError(err) && err.errorNum === DOCUMENT_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === DOCUMENT_NOT_FOUND) { return null; } throw err; @@ -1018,10 +1024,10 @@ export class GraphEdgeCollection = any> * ``` */ save( - data: EdgeData, - options?: GraphCollectionInsertOptions - ): Promise }>; - save(data: EdgeData, options?: GraphCollectionInsertOptions) { + data: documents.EdgeData, + options?: InsertGraphDocumentOptions + ): Promise }>; + save(data: documents.EdgeData, options?: InsertGraphDocumentOptions) { return this._db.request( { method: "POST", @@ -1068,14 +1074,14 @@ export class GraphEdgeCollection = any> * ``` */ replace( - selector: DocumentSelector, - newValue: EdgeData, - options?: GraphCollectionReplaceOptions - ): Promise; old?: Edge }>; + selector: documents.DocumentSelector, + newValue: documents.EdgeData, + options?: ReplaceGraphDocumentOptions + ): Promise; old?: documents.Edge }>; replace( - selector: DocumentSelector, - newValue: EdgeData, - options: GraphCollectionReplaceOptions = {} + selector: documents.DocumentSelector, + newValue: documents.EdgeData, + options: ReplaceGraphDocumentOptions = {} ) { if (typeof options === "string") { options = { rev: options }; @@ -1088,7 +1094,7 @@ export class GraphEdgeCollection = any> method: "PUT", path: `/_api/gharial/${encodeURIComponent( this.graph.name - )}/edge/${encodeURI(_documentHandle(selector, this._name))}`, + )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, body: newValue, search, headers, @@ -1130,14 +1136,14 @@ export class GraphEdgeCollection = any> * ``` */ update( - selector: DocumentSelector, - newValue: Patch>, - options?: GraphCollectionReplaceOptions - ): Promise; old?: Edge }>; + selector: documents.DocumentSelector, + newValue: documents.Patch>, + options?: ReplaceGraphDocumentOptions + ): Promise; old?: documents.Edge }>; update( - selector: DocumentSelector, - newValue: Patch>, - options: GraphCollectionReplaceOptions = {} + selector: documents.DocumentSelector, + newValue: documents.Patch>, + options: ReplaceGraphDocumentOptions = {} ) { if (typeof options === "string") { options = { rev: options }; @@ -1150,7 +1156,7 @@ export class GraphEdgeCollection = any> method: "PATCH", path: `/_api/gharial/${encodeURIComponent( this.graph.name - )}/edge/${encodeURI(_documentHandle(selector, this._name))}`, + )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, body: newValue, search, headers, @@ -1179,12 +1185,12 @@ export class GraphEdgeCollection = any> * ``` */ remove( - selector: DocumentSelector, - options?: GraphCollectionRemoveOptions - ): Promise }>; + selector: documents.DocumentSelector, + options?: RemoveGraphDocumentOptions + ): Promise }>; remove( - selector: DocumentSelector, - options: GraphCollectionRemoveOptions = {} + selector: documents.DocumentSelector, + options: RemoveGraphDocumentOptions = {} ) { if (typeof options === "string") { options = { rev: options }; @@ -1197,7 +1203,7 @@ export class GraphEdgeCollection = any> method: "DELETE", path: `/_api/gharial/${encodeURIComponent( this.graph.name - )}/edge/${encodeURI(_documentHandle(selector, this._name))}`, + )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, search, headers, }, @@ -1205,26 +1211,35 @@ export class GraphEdgeCollection = any> ); } } +//#endregion + +//#region Graph class +/** + * Indicates whether the given value represents a {@link Graph}. + * + * @param graph - A value that might be a Graph. + */ +export function isArangoGraph(graph: any): graph is Graph { + return Boolean(graph && graph.isArangoGraph); +} /** - * Represents a graph in a {@link database.Database}. + * Represents a graph in a {@link databases.Database}. */ export class Graph { protected _name: string; - protected _db: Database; + protected _db: databases.Database; /** * @internal */ - constructor(db: Database, name: string) { + constructor(db: databases.Database, name: string) { this._db = db; this._name = name; } /** - * @internal - * * Indicates that this object represents an ArangoDB Graph. */ get isArangoGraph(): true { @@ -1261,7 +1276,7 @@ export class Graph { await this.get(); return true; } catch (err: any) { - if (isArangoError(err) && err.errorNum === GRAPH_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === GRAPH_NOT_FOUND) { return false; } throw err; @@ -1279,7 +1294,7 @@ export class Graph { * // data contains general information about the graph * ``` */ - get(): Promise { + get(): Promise { return this._db.request( { path: `/_api/gharial/${encodeURIComponent(this._name)}` }, (res) => res.parsedBody.graph @@ -1310,7 +1325,7 @@ export class Graph { create( edgeDefinitions: EdgeDefinitionOptions[], options: CreateGraphOptions = {} - ): Promise { + ): Promise { const { orphanCollections, satellites, waitForSync, isSmart, ...opts } = options; return this._db.request( @@ -1321,12 +1336,12 @@ export class Graph { orphanCollections: orphanCollections && (Array.isArray(orphanCollections) - ? orphanCollections.map(collectionToString) - : [collectionToString(orphanCollections)]), + ? orphanCollections.map(collections.collectionToString) + : [collections.collectionToString(orphanCollections)]), edgeDefinitions: edgeDefinitions.map(coerceEdgeDefinition), isSmart, name: this._name, - options: { ...opts, satellites: satellites?.map(collectionToString) }, + options: { ...opts, satellites: satellites?.map(collections.collectionToString) }, }, search: { waitForSync }, }, @@ -1360,18 +1375,18 @@ export class Graph { } /** - * Returns a {@link graph.GraphVertexCollection} instance for the given collection + * Returns a {@link GraphVertexCollection} instance for the given collection * name representing the collection in this graph. * * @param T - Type to use for document data. Defaults to `any`. * @param collection - Name of the vertex collection. */ vertexCollection = any>( - collection: string | ArangoCollection + collection: string | collections.ArangoCollection ): GraphVertexCollection { return new GraphVertexCollection( this._db, - collectionToString(collection), + collections.collectionToString(collection), this ); } @@ -1380,7 +1395,7 @@ export class Graph { * Fetches all vertex collections of this graph from the database and returns * an array of their names. * - * See also {@link graph.Graph#vertexCollections}. + * See also {@link Graph#vertexCollections}. * * @example * ```js @@ -1406,9 +1421,9 @@ export class Graph { /** * Fetches all vertex collections of this graph from the database and returns - * an array of {@link graph.GraphVertexCollection} instances. + * an array of {@link GraphVertexCollection} instances. * - * See also {@link graph.Graph#listVertexCollections}. + * See also {@link Graph#listVertexCollections}. * * @example * ```js @@ -1451,17 +1466,17 @@ export class Graph { * ``` */ addVertexCollection( - collection: string | ArangoCollection, + collection: string | collections.ArangoCollection, options: AddVertexCollectionOptions = {} - ): Promise { + ): Promise { const { satellites, ...opts } = options; return this._db.request( { method: "POST", path: `/_api/gharial/${encodeURIComponent(this._name)}/vertex`, body: { - collection: collectionToString(collection), - options: { ...opts, satellites: satellites?.map(collectionToString) }, + collection: collections.collectionToString(collection), + options: { ...opts, satellites: satellites?.map(collections.collectionToString) }, }, }, (res) => res.parsedBody.graph @@ -1491,15 +1506,15 @@ export class Graph { * ``` */ removeVertexCollection( - collection: string | ArangoCollection, + collection: string | collections.ArangoCollection, dropCollection: boolean = false - ): Promise { + ): Promise { return this._db.request( { method: "DELETE", path: `/_api/gharial/${encodeURIComponent( this._name - )}/vertex/${encodeURIComponent(collectionToString(collection))}`, + )}/vertex/${encodeURIComponent(collections.collectionToString(collection))}`, search: { dropCollection, }, @@ -1509,7 +1524,7 @@ export class Graph { } /** - * Returns a {@link graph.GraphEdgeCollection} instance for the given collection + * Returns a {@link GraphEdgeCollection} instance for the given collection * name representing the collection in this graph. * * @param T - Type to use for document data. Defaults to `any`. @@ -1532,11 +1547,11 @@ export class Graph { * ``` */ edgeCollection = any>( - collection: string | ArangoCollection + collection: string | collections.ArangoCollection ): GraphEdgeCollection { return new GraphEdgeCollection( this._db, - collectionToString(collection), + collections.collectionToString(collection), this ); } @@ -1545,7 +1560,7 @@ export class Graph { * Fetches all edge collections of this graph from the database and returns * an array of their names. * - * See also {@link graph.Graph#edgeCollections}. + * See also {@link Graph#edgeCollections}. * * @example * ```js @@ -1571,9 +1586,9 @@ export class Graph { /** * Fetches all edge collections of this graph from the database and returns - * an array of {@link graph.GraphEdgeCollection} instances. + * an array of {@link GraphEdgeCollection} instances. * - * See also {@link graph.Graph#listEdgeCollections}. + * See also {@link Graph#listEdgeCollections}. * * @example * ```js @@ -1618,7 +1633,7 @@ export class Graph { addEdgeDefinition( edgeDefinition: EdgeDefinitionOptions, options: AddEdgeDefinitionOptions = {} - ): Promise { + ): Promise { const { satellites, ...opts } = options; return this._db.request( { @@ -1626,7 +1641,7 @@ export class Graph { path: `/_api/gharial/${encodeURIComponent(this._name)}/edge`, body: { ...coerceEdgeDefinition(edgeDefinition), - options: { ...opts, satellites: satellites?.map(collectionToString) }, + options: { ...opts, satellites: satellites?.map(collections.collectionToString) }, }, }, (res) => res.parsedBody.graph @@ -1661,7 +1676,7 @@ export class Graph { replaceEdgeDefinition( edgeDefinition: EdgeDefinitionOptions, options?: ReplaceEdgeDefinitionOptions - ): Promise; + ): Promise; /** * Replaces an edge definition in this graph. The existing edge definition * for the given edge collection will be overwritten. @@ -1689,14 +1704,14 @@ export class Graph { * ``` */ replaceEdgeDefinition( - collection: string | ArangoCollection, + collection: string | collections.ArangoCollection, edgeDefinition: EdgeDefinitionOptions, options?: ReplaceEdgeDefinitionOptions - ): Promise; + ): Promise; replaceEdgeDefinition( collectionOrEdgeDefinitionOptions: | string - | ArangoCollection + | collections.ArangoCollection | EdgeDefinitionOptions, edgeDefinitionOrOptions?: | EdgeDefinitionOptions @@ -1705,7 +1720,7 @@ export class Graph { ) { let collection = collectionOrEdgeDefinitionOptions as | string - | ArangoCollection; + | collections.ArangoCollection; let edgeDefinition = edgeDefinitionOrOptions as EdgeDefinitionOptions; if ( edgeDefinitionOrOptions && @@ -1725,10 +1740,10 @@ export class Graph { method: "PUT", path: `/_api/gharial/${encodeURIComponent( this._name - )}/edge/${encodeURIComponent(collectionToString(collection))}`, + )}/edge/${encodeURIComponent(collections.collectionToString(collection))}`, body: { ...coerceEdgeDefinition(edgeDefinition), - options: { ...opts, satellites: satellites?.map(collectionToString) }, + options: { ...opts, satellites: satellites?.map(collections.collectionToString) }, }, }, (res) => res.parsedBody.graph @@ -1758,15 +1773,15 @@ export class Graph { * ``` */ removeEdgeDefinition( - collection: string | ArangoCollection, + collection: string | collections.ArangoCollection, dropCollection: boolean = false - ): Promise { + ): Promise { return this._db.request( { method: "DELETE", path: `/_api/gharial/${encodeURIComponent( this._name - )}/edge/${encodeURIComponent(collectionToString(collection))}`, + )}/edge/${encodeURIComponent(collections.collectionToString(collection))}`, search: { dropCollection, }, @@ -1775,3 +1790,4 @@ export class Graph { ); } } +//#endregion \ No newline at end of file diff --git a/src/hot-backups.ts b/src/hot-backups.ts new file mode 100644 index 000000000..5b587b170 --- /dev/null +++ b/src/hot-backups.ts @@ -0,0 +1,73 @@ +/** + * ```ts + * import type { HotBackupOptions } from "arangojs/hot-backups"; + * ``` + * + * The "hot-backups" module provides types for managing hot backups. + * + * @packageDocumentation + */ + +/** + * (Enterprise Edition only.) Options for creating a hot backup. + */ +export type HotBackupOptions = { + /** + * If set to `true` and no global transaction lock can be acquired within the + * given timeout, a possibly inconsistent backup is taken. + * + * Default: `false` + */ + allowInconsistent?: boolean; + /** + * (Enterprise Edition cluster only.) If set to `true` and no global + * transaction lock can be acquired within the given timeout, all running + * transactions are forcefully aborted to ensure that a consistent backup + * can be created. + * + * Default: `false`. + */ + force?: boolean; + /** + * Label to appended to the backup's identifier. + * + * Default: If omitted or empty, a UUID will be generated. + */ + label?: string; + /** + * Time in seconds that the operation will attempt to get a consistent + * snapshot. + * + * Default: `120`. + */ + timeout?: number; +}; + +/** + * (Enterprise Edition only.) Result of a hot backup. + */ +export type HotBackupResult = { + id: string; + potentiallyInconsistent: boolean; + sizeInBytes: number; + datetime: string; + nrDBServers: number; + nrFiles: number; +}; + +/** + * (Enterprise Edition only.) List of known hot backups. + */ +export type HotBackupList = { + server: string; + list: Record< + string, + HotBackupResult & { + version: string; + keys: any[]; + available: boolean; + nrPiecesPresent: number; + countIncludesFilesOnly: boolean; + } + >; +}; diff --git a/src/index.ts b/src/index.ts index 8972b0434..4a814fddd 100644 --- a/src/index.ts +++ b/src/index.ts @@ -8,12 +8,12 @@ * * If you are just getting started, you probably want to use the * {@link arangojs} function, which is also the default export of this module, - * or the {@link database.Database} class for which it is a wrapper. + * or the {@link databases.Database} class for which it is a wrapper. * * @packageDocumentation */ -import { Config } from "./connection.js"; -import { Database } from "./database.js"; +import * as connection from "./connection.js"; +import * as databases from "./databases.js"; if (typeof module !== "undefined" && typeof exports !== "undefined") { module.exports = exports = arangojs; @@ -21,7 +21,7 @@ if (typeof module !== "undefined" && typeof exports !== "undefined") { /** * Creates a new `Database` instance with its own connection pool. * - * This is a wrapper function for the {@link database.Database:constructor}. + * This is a wrapper function for the {@link databases.Database:constructor}. * * @param config - An object with configuration options. * @@ -34,11 +34,11 @@ if (typeof module !== "undefined" && typeof exports !== "undefined") { * }); * ``` */ -export function arangojs(config?: Config): Database; +export function arangojs(config?: connection.Config): databases.Database; /** * Creates a new `Database` instance with its own connection pool. * - * This is a wrapper function for the {@link database.Database:constructor}. + * This is a wrapper function for the {@link databases.Database:constructor}. * * @param url - Base URL of the ArangoDB server or list of server URLs. * Equivalent to the `url` option in {@link connection.Config}. @@ -49,14 +49,14 @@ export function arangojs(config?: Config): Database; * db.useBasicAuth("admin", "hunter2"); * ``` */ -export function arangojs(url: string | string[], name?: string): Database; -export function arangojs(config?: string | string[] | Config, name?: string) { +export function arangojs(url: string | string[], name?: string): databases.Database; +export function arangojs(config?: string | string[] | connection.Config, name?: string) { if (typeof config === "string" || Array.isArray(config)) { const url = config; - return new Database(url, name); + return new databases.Database(url, name); } - return new Database(config); + return new databases.Database(config); } export default arangojs; export { aql } from "./aql.js"; -export { Database } from "./database.js"; +export { Database } from "./databases.js"; diff --git a/src/indexes.ts b/src/indexes.ts index 8be5833c5..2d63f6557 100644 --- a/src/indexes.ts +++ b/src/indexes.ts @@ -7,7 +7,7 @@ * PersistentIndex, * PrimaryIndex, * TtlIndex, - * } from "arangojs/indexes.js"; + * } from "arangojs/indexes"; * ``` * * The "indexes" module provides index-related types for TypeScript. @@ -15,53 +15,71 @@ * @packageDocumentation */ -import { AnalyzerFeature } from "./analyzer.js"; -import { Compression, Direction, TierConsolidationPolicy } from "./view.js"; +import * as analyzers from "./analyzers.js"; +import * as views from "./views.js"; +//#region Shared types /** - * Options for creating a persistent index. + * Type of an index. + */ +export type IndexType = IndexDescription["type"]; + +/** + * Type of an internal index. + */ +export type InternalIndexType = InternalIndexDescription["type"]; +//#endregion + +//#region Index operation options +/** + * Options for listing indexes. */ -export type EnsurePersistentIndexOptions = { +export type ListIndexesOptions = { /** - * Type of this index. + * If set to `true`, includes additional information about each index. + * + * Default: `false` */ - type: "persistent"; + withStats?: boolean; /** - * An array of attribute paths. + * If set to `true`, includes internal indexes as well as indexes that are + * not yet fully built but are in the building phase. + * + * You should cast the resulting indexes to `HiddenIndex` to ensure internal + * and incomplete indexes are accurately represented. + * + * Default: `false`. */ - fields: string[]; + withHidden?: boolean; +}; + +/** + * Options for creating an index. + */ +export type EnsureIndexOptions = + | EnsurePersistentIndexOptions + | EnsureGeoIndexOptions + | EnsureTtlIndexOptions + | EnsureMdiIndexOptions + | EnsureInvertedIndexOptions; + +type EnsureIndexOptionsType< + Type extends IndexType, + Fields extends any[], + Extra extends {} = {} +> = { /** * A unique name for this index. */ name?: string; /** - * If set to `true`, a unique index will be created. - * - * Default: `false` - */ - unique?: boolean; - /** - * If set to `true`, the index will omit documents that do not contain at - * least one of the attribute paths in `fields` and these documents will be - * ignored for uniqueness checks. - * - * Default: `false` - */ - sparse?: boolean; - /** - * If set to `false`, inserting duplicate index values from the same - * document will lead to a unique constraint error if this is a unique index. - * - * Default: `true` + * Type of this index. */ - deduplicate?: boolean; + type: Type; /** - * If set to `false`, index selectivity estimates will be disabled for this - * index. - * - * Default: `true` + * An array of attribute paths. */ - estimates?: boolean; + fields: Fields; /** * If set to `true`, the index will be created in the background to reduce * the write-lock duration for the collection during index creation. @@ -69,64 +87,74 @@ export type EnsurePersistentIndexOptions = { * Default: `false` */ inBackground?: boolean; - /** - * If set to `true`, an in-memory hash cache will be put in front of the - * persistent index. - * - * Default: `false` - */ - cacheEnabled?: boolean; - /** - * An array of attribute paths that will be stored in the index but can not - * be used for index lookups or sorting but can avoid full document lookups. - */ - storedValues?: string[]; -}; +} & Extra; /** - * Options for creating a geo index. + * Options for creating a persistent index. */ -export type EnsureGeoIndexOptions = - | { - type: "geo"; +export type EnsurePersistentIndexOptions = EnsureIndexOptionsType< + "persistent", string[], + { /** - * If set to `true`, `fields` must be an array containing a single attribute - * path and the attribute value must be an array with two values, the first - * of which will be interpreted as the longitude and the second of which will - * be interpreted as the latitude of the document. + * If set to `true`, a unique index will be created. * * Default: `false` */ - geoJson?: false; + unique?: boolean; /** - * If set to `true`, the index will use pre-3.10 rules for parsing - * GeoJSON polygons. This option is always implicitly `true` when using - * ArangoDB 3.9 or lower. + * If set to `true`, the index will omit documents that do not contain at + * least one of the attribute paths in `fields` and these documents will be + * ignored for uniqueness checks. + * + * Default: `false` */ - legacyPolygons?: boolean; + sparse?: boolean; /** - * Attribute paths for the document's latitude and longitude values. + * If set to `false`, inserting duplicate index values from the same + * document will lead to a unique constraint error if this is a unique index. + * + * Default: `true` */ - fields: [string, string]; + deduplicate?: boolean; /** - * A unique name for this index. + * If set to `false`, index selectivity estimates will be disabled for this + * index. + * + * Default: `true` */ - name?: string; + estimates?: boolean; /** - * If set to `true`, the index will be created in the background to reduce - * the write-lock duration for the collection during index creation. + * If set to `true`, an in-memory hash cache will be put in front of the + * persistent index. * * Default: `false` */ - inBackground?: boolean; + cacheEnabled?: boolean; + /** + * An array of attribute paths that will be stored in the index but can not + * be used for index lookups or sorting but can avoid full document lookups. + */ + storedValues?: string[]; } - | { - type: "geo"; +>; + +/** + * Options for creating a geo index. + */ +export type EnsureGeoIndexOptions = EnsureIndexOptionsType< + "geo", [string, string] | [string], + { /** * If set to `true`, `fields` must be an array containing a single attribute * path and the attribute value must be an array with two values, the first - * of which will be interpreted as the longitude and the second of which will - * be interpreted as the latitude of the document. + * of which will be interpreted as the longitude and the second of which + * will be interpreted as the latitude of the document. + * + * If set to `false`, `fields` can be either an array containing two + * attribute paths, the first of which will be interpreted as the latitude + * and the second as the longitude, or a single attribute path for an array + * containing two values, the first of which will be interpreted as the + * latitude, the second as the longitude. * * Default: `false` */ @@ -137,125 +165,170 @@ export type EnsureGeoIndexOptions = * ArangoDB 3.9 or lower. */ legacyPolygons?: boolean; - /** - * An array containing the attribute path for an array containing two values, - * the first of which will be interpreted as the latitude, the second as the - * longitude. If `geoJson` is set to `true`, the order is reversed to match - * the GeoJSON format. - */ - fields: [string]; - /** - * A unique name for this index. - */ - name?: string; - /** - * If set to `true`, the index will be created in the background to reduce - * the write-lock duration for the collection during index creation. - * - * Default: `false` - */ - inBackground?: boolean; - }; + } +>; /** * Options for creating a TTL index. */ -export type EnsureTtlIndexOptions = { - /** - * Type of this index. - */ - type: "ttl"; - /** - * An array containing exactly one attribute path. - */ - fields: [string]; - /** - * A unique name for this index. - */ - name?: string; - /** - * Duration in seconds after the attribute value at which the document will - * be considered as expired. - */ - expireAfter: number; - /** - * If set to `true`, the index will be created in the background to reduce - * the write-lock duration for the collection during index creation. - * - * Default: `false` - */ - inBackground?: boolean; -}; +export type EnsureTtlIndexOptions = EnsureIndexOptionsType< + "ttl", [string], + { + /** + * Duration in seconds after the attribute value at which the document will + * be considered as expired. + */ + expireAfter: number; + } +>; /** * Options for creating a MDI index. */ -export type EnsureMdiIndexOptions = { - /** - * Type of this index. - */ - type: "mdi"; - /** - * An array containing attribute paths for the dimensions. - */ - fields: string[]; - /** - * Data type of the dimension attributes. - */ - fieldValueTypes: "double"; - /** - * A unique name for this index. - */ - name?: string; - /** - * If set to `true`, a unique index will be created. - * - * Default: `false` - */ - unique?: boolean; - /** - * If set to `true`, the index will be created in the background to reduce - * the write-lock duration for the collection during index creation. - * - * Default: `false` - */ - inBackground?: boolean; -}; +export type EnsureMdiIndexOptions = EnsureIndexOptionsType< + "mdi", string[], + { + /** + * Data type of the dimension attributes. + */ + fieldValueTypes: "double"; + /** + * If set to `true`, a unique index will be created. + * + * Default: `false` + */ + unique?: boolean; + } +>; /** - * (Enterprise Edition only.) Options for a nested field in an inverted index. + * Options for creating an inverted index. */ -export type InvertedIndexNestedFieldOptions = { - /** - * An attribute path. - */ - name: string; - /** - * Name of the Analyzer to apply to the values of this field. - * - * Defaults to the `analyzer` specified on the parent options or on the index - * itself. - */ - analyzer?: string; - /** - * List of Analyzer features to enable for this field's Analyzer. - * - * Defaults to the features of the Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * If set to `true` array values will be indexed using the same behavior as - * ArangoSearch Views. This option only applies when using the index in a - * SearchAlias View. - * - * Defaults to the value of `searchField` specified on the index itself. - */ - searchField?: boolean; - /** - * Sub-objects to index to allow querying for co-occurring values. - */ - nested?: (string | InvertedIndexNestedFieldOptions)[]; -}; +export type EnsureInvertedIndexOptions = EnsureIndexOptionsType< + "inverted", (string | InvertedIndexFieldOptions)[], + { + /** + * If set to `true` array values will by default be indexed using the same + * behavior as ArangoSearch Views. This option only applies when using the + * index in a SearchAlias View. + * + * Default: `false` + */ + searchField?: boolean; + /** + * An array of attribute paths that will be stored in the index but can not + * be used for index lookups or sorting but can avoid full document lookups. + */ + storedValues?: InvertedIndexStoredValueOptions[]; + /** + * Primary sort order to optimize AQL queries using a matching sort order. + */ + primarySort?: InvertedIndexPrimarySortOptions; + /** + * (Enterprise Edition only.) If set to `true`, then the primary key column + * will always be cached in memory. + * + * Default: `false` + */ + primaryKeyCache?: boolean; + /** + * Name of the default Analyzer to apply to the values of indexed fields. + * + * Default: `"identity"` + */ + analyzer?: string; + /** + * List of Analyzer features to enable for the default Analyzer. + * + * Defaults to the Analyzer's features. + */ + features?: analyzers.AnalyzerFeature[]; + /** + * If set to `true`, all document attributes are indexed, excluding any + * sub-attributes configured in the `fields` array. The `analyzer` and + * `features` properties apply to the sub-attributes. This option only + * applies when using the index in a SearchAlias View. + * + * Default: `false` + */ + includeAllFields?: boolean; + /** + * If set to `true`, the position of values in array values are tracked and + * need to be specified in queries. Otherwise all values in an array are + * treated as equivalent. This option only applies when using the index in a + * SearchAlias View. + * + * Default: `false` + */ + trackListPositions?: boolean; + /** + * The number of threads to use for indexing the fields. + * + * Default: `2` + */ + parallelism?: number; + /** + * Wait at least this many commits between removing unused files in the + * ArangoSearch data directory. + * + * Default: `2` + */ + cleanupIntervalStep?: number; + /** + * Wait at least this many milliseconds between committing View data store + * changes and making documents visible to queries. + * + * Default: `1000` + */ + commitIntervalMsec?: number; + /** + * Wait at least this many milliseconds between applying + * `consolidationPolicy` to consolidate View data store and possibly release + * space on the filesystem. + * + * Default: `1000` + */ + consolidationIntervalMsec?: number; + /** + * The consolidation policy to apply for selecting which segments should be + * merged. + * + * Default: `{ type: "tier" }` + */ + consolidationPolicy?: views.TierConsolidationPolicy; + /** + * Maximum number of writers (segments) cached in the pool. + * + * Default: `64` + */ + writeBufferIdle?: number; + /** + * Maximum number of concurrent active writers (segments) that perform a + * transaction. + * + * Default: `0` (disabled) + */ + writeBufferActive?: number; + /** + * Maximum memory byte size per writer (segment) before a writer (segment) + * flush is triggered. + * + * Default: `33554432` (32 MiB) + */ + writeBufferSizeMax?: number; + /** + * (Enterprise Edition only.) If set to `true`, then field normalization + * values will always be cached in memory. + * + * Default: `false` + */ + cache?: boolean; + /** + * An array of strings defining sort expressions to optimize. + */ + optimizeTopK?: string[]; + } +>; /** * Options for an attribute path in an inverted index. @@ -276,7 +349,7 @@ export type InvertedIndexFieldOptions = { * * Defaults to the features of the Analyzer. */ - features?: AnalyzerFeature[]; + features?: analyzers.AnalyzerFeature[]; /** * If set to `true`, all document attributes are indexed, excluding any * sub-attributes configured in the `fields` array. The `analyzer` and @@ -319,22 +392,22 @@ export type InvertedIndexFieldOptions = { }; /** - * Options for defining a stored value on an inverted index. + * Options for defining a primary sort field on an inverted index. */ -export type InvertedIndexStoredValueOptions = { +export type InvertedIndexPrimarySortOptions = { /** - * The attribute paths to store. + * An array of fields to sort the index by. */ - fields: string[]; + fields: InvertedIndexPrimarySortFieldOptions[]; /** - * How the attribute values should be compressed. + * How the primary sort data should be compressed. * * Default: `"lz4"` */ - compression?: Compression; + compression?: views.Compression; /** - * (Enterprise Edition only.) If set to `true`, then stored values will - * always be cached in memory. + * (Enterprise Edition only.) If set to `true`, then primary sort columns + * will always be cached in memory. * * Default: `false` */ @@ -352,186 +425,90 @@ export type InvertedIndexPrimarySortFieldOptions = { /** * The sorting direction. */ - direction: Direction; + direction: views.Direction; }; /** - * Options for creating an inverted index. + * (Enterprise Edition only.) Options for a nested field in an inverted index. */ -export type EnsureInvertedIndexOptions = { - /** - * Type of this index. - */ - type: "inverted"; - /** - * An array of attribute paths or objects specifying options for the fields. - */ - fields: (string | InvertedIndexFieldOptions)[]; - /** - * A unique name for this index. - */ - name?: string; - /** - * If set to `true` array values will by default be indexed using the same - * behavior as ArangoSearch Views. This option only applies when using the - * index in a SearchAlias View. - * - * Default: `false` - */ - searchField?: boolean; - /** - * An array of attribute paths that will be stored in the index but can not - * be used for index lookups or sorting but can avoid full document lookups. - */ - storedValues?: InvertedIndexStoredValueOptions[]; - /** - * Primary sort order to optimize AQL queries using a matching sort order. - */ - primarySort?: { - /** - * An array of fields to sort the index by. - */ - fields: InvertedIndexPrimarySortFieldOptions[]; - /** - * How the primary sort data should be compressed. - * - * Default: `"lz4"` - */ - compression?: Compression; - /** - * (Enterprise Edition only.) If set to `true`, then primary sort columns - * will always be cached in memory. - * - * Default: `false` - */ - cache?: boolean; - }; +export type InvertedIndexNestedFieldOptions = { /** - * (Enterprise Edition only.) If set to `true`, then the primary key column - * will always be cached in memory. - * - * Default: `false` + * An attribute path. */ - primaryKeyCache?: boolean; + name: string; /** - * Name of the default Analyzer to apply to the values of indexed fields. + * Name of the Analyzer to apply to the values of this field. * - * Default: `"identity"` + * Defaults to the `analyzer` specified on the parent options or on the index + * itself. */ analyzer?: string; /** - * List of Analyzer features to enable for the default Analyzer. - * - * Defaults to the Analyzer's features. - */ - features?: AnalyzerFeature[]; - /** - * If set to `true`, all document attributes are indexed, excluding any - * sub-attributes configured in the `fields` array. The `analyzer` and - * `features` properties apply to the sub-attributes. This option only - * applies when using the index in a SearchAlias View. + * List of Analyzer features to enable for this field's Analyzer. * - * Default: `false` + * Defaults to the features of the Analyzer. */ - includeAllFields?: boolean; + features?: analyzers.AnalyzerFeature[]; /** - * If set to `true`, the position of values in array values are tracked and - * need to be specified in queries. Otherwise all values in an array are - * treated as equivalent. This option only applies when using the index in a + * If set to `true` array values will be indexed using the same behavior as + * ArangoSearch Views. This option only applies when using the index in a * SearchAlias View. * - * Default: `false` - */ - trackListPositions?: boolean; - /** - * The number of threads to use for indexing the fields. - * - * Default: `2` - */ - parallelism?: number; - /** - * Wait at least this many commits between removing unused files in the - * ArangoSearch data directory. - * - * Default: `2` - */ - cleanupIntervalStep?: number; - /** - * Wait at least this many milliseconds between committing View data store - * changes and making documents visible to queries. - * - * Default: `1000` - */ - commitIntervalMsec?: number; - /** - * Wait at least this many milliseconds between applying - * `consolidationPolicy` to consolidate View data store and possibly release - * space on the filesystem. - * - * Default: `1000` - */ - consolidationIntervalMsec?: number; - /** - * The consolidation policy to apply for selecting which segments should be - * merged. - * - * Default: `{ type: "tier" }` - */ - consolidationPolicy?: TierConsolidationPolicy; - /** - * Maximum number of writers (segments) cached in the pool. - * - * Default: `64` + * Defaults to the value of `searchField` specified on the index itself. */ - writeBufferIdle?: number; + searchField?: boolean; /** - * Maximum number of concurrent active writers (segments) that perform a - * transaction. - * - * Default: `0` (disabled) + * Sub-objects to index to allow querying for co-occurring values. */ - writeBufferActive?: number; + nested?: (string | InvertedIndexNestedFieldOptions)[]; +}; + +/** + * Options for defining a stored value on an inverted index. + */ +export type InvertedIndexStoredValueOptions = { /** - * Maximum memory byte size per writer (segment) before a writer (segment) - * flush is triggered. - * - * Default: `33554432` (32 MiB) + * The attribute paths to store. */ - writeBufferSizeMax?: number; + fields: string[]; /** - * If set to `true`, the index will be created in the background to reduce - * the write-lock duration for the collection during index creation. + * How the attribute values should be compressed. * - * Default: `false` + * Default: `"lz4"` */ - inBackground?: boolean; + compression?: views.Compression; /** - * (Enterprise Edition only.) If set to `true`, then field normalization - * values will always be cached in memory. + * (Enterprise Edition only.) If set to `true`, then stored values will + * always be cached in memory. * * Default: `false` */ cache?: boolean; - /** - * An array of strings defining sort expressions to optimize. - */ - optimizeTopK?: string[]; }; +//#endregion +//#region IndexDescription /** - * Options for creating an index. + * An object representing an index. */ -export type EnsureIndexOptions = - | EnsurePersistentIndexOptions - | EnsureGeoIndexOptions - | EnsureTtlIndexOptions - | EnsureMdiIndexOptions - | EnsureInvertedIndexOptions; +export type IndexDescription = + | GeoIndexDescription + | PersistentIndexDescription + | TtlIndexDescription + | MdiIndexDescription + | InvertedIndexDescription + | SystemIndexDescription; + +/** + * An object representing a system index. + */ +export type SystemIndexDescription = + | PrimaryIndexDescription; /** * Shared attributes of all index types. */ -export type GenericIndex = { +export type IndexDescriptionType = { /** * A unique name for this index. */ @@ -540,6 +517,14 @@ export type GenericIndex = { * A unique identifier for this index. */ id: string; + /** + * Type of this index. + */ + type: Type; + /** + * An array of attribute paths. + */ + fields: Fields; /** * Whether documents not containing at least one of the attribute paths * are omitted by this index. @@ -553,123 +538,134 @@ export type GenericIndex = { * Additional stats about this index. */ figures?: Record; -}; +} & Extra; /** * An object representing a persistent index. */ -export type PersistentIndex = GenericIndex & { - type: "persistent"; - fields: string[]; - cacheEnabled: boolean; - deduplicate: boolean; - estimates: boolean; - storedValues?: string[]; -}; +export type PersistentIndexDescription = IndexDescriptionType< + "persistent", string[], + { + cacheEnabled: boolean; + deduplicate: boolean; + estimates: boolean; + storedValues?: string[]; + } +>; /** * An object representing a primary index. */ -export type PrimaryIndex = GenericIndex & { - type: "primary"; - fields: string[]; - selectivityEstimate: number; -}; +export type PrimaryIndexDescription = IndexDescriptionType< + "primary", string[], + { + selectivityEstimate: number; + } +>; /** * An object representing a geo index. */ -export type GeoIndex = GenericIndex & { - type: "geo"; - fields: [string] | [string, string]; - geoJson: boolean; - legacyPolygons: boolean; - bestIndexedLevel: number; - worstIndexedLevel: number; - maxNumCoverCells: number; -}; +export type GeoIndexDescription = IndexDescriptionType< + "geo", [string] | [string, string], + { + geoJson: boolean; + legacyPolygons: boolean; + bestIndexedLevel: number; + worstIndexedLevel: number; + maxNumCoverCells: number; + } +>; /** * An object representing a TTL index. */ -export type TtlIndex = GenericIndex & { - type: "ttl"; - fields: [string]; - expireAfter: number; - selectivityEstimate: number; -}; +export type TtlIndexDescription = IndexDescriptionType< + "ttl", [string], + { + expireAfter: number; + selectivityEstimate: number; + } +>; /** * An object representing a MDI index. */ -export type MdiIndex = GenericIndex & { - type: "mdi"; - fields: string[]; - fieldValueTypes: "double"; -}; +export type MdiIndexDescription = IndexDescriptionType< + "mdi", string[], + { + fieldValueTypes: "double"; + } +>; /** - * (Enterprise Edition only.) An object representing a nested field in an - * inverted index. + * An object representing an inverted index. */ -export type InvertedIndexNestedField = { +export type InvertedIndexDescription = IndexDescriptionType< + "inverted", InvertedIndexField[], + { + searchField: boolean; + cache?: boolean; + storedValues: { + fields: string[]; + compression: views.Compression; + cache?: boolean; + }[]; + primarySort: { + fields: { + field: string; + direction: views.Direction; + }[]; + compression: views.Compression; + cache?: boolean; + }; + primaryKeyCache?: boolean; + analyzer: string; + features: analyzers.AnalyzerFeature[]; + includeAllFields: boolean; + trackListPositions: boolean; + parallelism: number; + cleanupIntervalStep: number; + commitIntervalMsec: number; + consolidationIntervalMsec: number; + consolidationPolicy: Required; + writeBufferIdle: number; + writeBufferActive: number; + writeBufferSizeMax: number; + optimizeTopK: string[]; + } +>; + +/** + * An object representing a field in an inverted index. + */ +export type InvertedIndexField = { name: string; analyzer?: string; - features?: AnalyzerFeature[]; + features?: analyzers.AnalyzerFeature[]; + includeAllFields?: boolean; searchField?: boolean; + trackListPositions?: boolean; nested?: InvertedIndexNestedField[]; + cache?: boolean; }; /** - * An object representing an inverted index. + * (Enterprise Edition only.) An object representing a nested field in an + * inverted index. */ -export type InvertedIndex = GenericIndex & { - type: "inverted"; - fields: { - name: string; - analyzer?: string; - features?: AnalyzerFeature[]; - includeAllFields?: boolean; - searchField?: boolean; - trackListPositions?: boolean; - nested?: InvertedIndexNestedField[]; - cache?: boolean; - }[]; - searchField: boolean; - cache?: boolean; - storedValues: { - fields: string[]; - compression: Compression; - cache?: boolean; - }[]; - primarySort: { - fields: { - field: string; - direction: Direction; - }[]; - compression: Compression; - cache?: boolean; - }; - primaryKeyCache?: boolean; - analyzer: string; - features: AnalyzerFeature[]; - includeAllFields: boolean; - trackListPositions: boolean; - parallelism: number; - cleanupIntervalStep: number; - commitIntervalMsec: number; - consolidationIntervalMsec: number; - consolidationPolicy: Required; - writeBufferIdle: number; - writeBufferActive: number; - writeBufferSizeMax: number; - optimizeTopK: string[]; +export type InvertedIndexNestedField = { + name: string; + analyzer?: string; + features?: analyzers.AnalyzerFeature[]; + searchField?: boolean; + nested?: InvertedIndexNestedField[]; }; /** * An object representing an arangosearch index. */ -export type InternalArangosearchIndex = { +export type ArangosearchIndexDescription = { id: string; type: "arangosearch"; view: string; @@ -681,21 +677,10 @@ export type InternalArangosearchIndex = { storeValues: "none" | "id"; }; -/** - * An object representing an index. - */ -export type Index = - | GeoIndex - | PersistentIndex - | PrimaryIndex - | TtlIndex - | MdiIndex - | InvertedIndex; - /** * An object representing an internal index. */ -export type InternalIndex = InternalArangosearchIndex; +export type InternalIndexDescription = ArangosearchIndexDescription; /** * An object representing a potentially hidden index. @@ -712,33 +697,36 @@ export type InternalIndex = InternalArangosearchIndex; * // property * ``` */ -export type HiddenIndex = (Index | InternalArangosearchIndex) & { +export type HiddenIndexDescription = (IndexDescription | InternalIndexDescription) & { /** * Progress of this index if it is still being created. */ progress?: number; }; +//#endregion -export type IndexDetails = Index & { - figures?: Record; - progress?: number; -}; +//#region Index selectors +/** + * Index name, id or object with a `name` or `id` property. + */ +export type IndexSelector = ObjectWithIndexId | ObjectWithName | string; -export type ObjectWithId = { +/** + * An object with an `id` property. + */ +export type ObjectWithIndexId = { [key: string]: any; id: string; }; +/** + * An object with a `name` property. + */ export type ObjectWithName = { [key: string]: any; name: string; }; -/** - * Index name, id or object with a `name` or `id` property. - */ -export type IndexSelector = ObjectWithId | ObjectWithName | string; - /** * @internal */ @@ -765,3 +753,4 @@ export function _indexHandle( } return `${collectionName}/${String(selector)}`; } +//#endregion \ No newline at end of file diff --git a/src/job.ts b/src/jobs.ts similarity index 75% rename from src/job.ts rename to src/jobs.ts index 92c2ada79..0fbedb771 100644 --- a/src/job.ts +++ b/src/jobs.ts @@ -1,25 +1,36 @@ -import { ProcessedResponse } from "./connection.js"; -import { Database } from "./database.js"; +/** + * ```ts + * import type { Job } from "arangojs/jobs"; + * ``` + * + * The "jobs" module provides job-related types for TypeScript. + * + * @packageDocumentation + */ +import * as connection from "./connection.js"; +import * as databases from "./databases.js"; /** - * Represents an async job in a {@link database.Database}. + * Represents an async job in a {@link databases.Database}. + * + * @param ResultType - The type of the job's result. */ -export class Job { +export class Job { protected _id: string; - protected _db: Database; - protected _transformResponse?: (res: ProcessedResponse) => Promise; - protected _transformError?: (error: any) => Promise; + protected _db: databases.Database; + protected _transformResponse?: (res: connection.ProcessedResponse) => Promise; + protected _transformError?: (error: any) => Promise; protected _loaded: boolean = false; - protected _result: T | undefined; + protected _result: ResultType | undefined; /** * @internal */ constructor( - db: Database, + db: databases.Database, id: string, - transformResponse?: (res: ProcessedResponse) => Promise, - transformError?: (error: any) => Promise + transformResponse?: (res: connection.ProcessedResponse) => Promise, + transformError?: (error: any) => Promise ) { this._db = db; this._id = id; @@ -52,7 +63,7 @@ export class Job { /** * The job's result if it has been loaded or `undefined` otherwise. */ - get result(): T | undefined { + get result(): ResultType | undefined { return this._result; } @@ -71,9 +82,9 @@ export class Job { * console.log(job.result); * ``` */ - async load(): Promise { + async load(): Promise { if (!this.isLoaded) { - let res: ProcessedResponse; + let res: connection.ProcessedResponse; try { res = await this._db.request( { diff --git a/src/lib/request.ts b/src/lib/request.ts index 50daed6d6..ce31e7d48 100644 --- a/src/lib/request.ts +++ b/src/lib/request.ts @@ -5,7 +5,7 @@ * @internal */ -import { FetchFailedError, NetworkError, RequestAbortedError, ResponseTimeoutError } from "../error.js"; +import { FetchFailedError, NetworkError, RequestAbortedError, ResponseTimeoutError } from "../errors.js"; function timer(timeout: number, cb: () => void) { const t = setTimeout(cb, timeout); diff --git a/src/logs.ts b/src/logs.ts new file mode 100644 index 000000000..ea2bbca0f --- /dev/null +++ b/src/logs.ts @@ -0,0 +1,104 @@ +/** + * ```ts + * import type { LogLevel } from "arangojs/logs"; + * ``` + * + * The "logs" module provides types for ArangoDB logs. + * + * @packageDocumentation + */ + +//#region Shared types +/** + * Numeric representation of the logging level of a log entry. + */ +export enum LogLevel { + FATAL, + ERROR, + WARNING, + INFO, + DEBUG, +} + +/** + * String representation of the logging level of a log entry. + */ +export type LogLevelLabel = keyof typeof LogLevel; + +/** + * Logging level setting. + */ +export type LogLevelSetting = LogLevelLabel | "DEFAULT"; + +/** + * Log sorting direction, ascending or descending. + */ +export type LogSortDirection = "asc" | "desc"; +//#endregion + +//#region Log operation options +/** + * Options for retrieving log entries. + */ +export type LogEntriesOptions = { + /** + * Maximum log level of the entries to retrieve. + * + * Default: `INFO`. + */ + upto?: LogLevel | LogLevelLabel | Lowercase; + /** + * If set, only log entries with this log level will be returned. + */ + level?: LogLevel | LogLevelLabel | Lowercase; + /** + * If set, only log entries with an `lid` greater than or equal to this value + * will be returned. + */ + start?: number; + /** + * If set, only this many entries will be returned. + */ + size?: number; + /** + * If set, this many log entries will be skipped. + */ + offset?: number; + /** + * If set, only log entries containing the specified text will be returned. + */ + search?: string; + /** + * If set to `"desc"`, log entries will be returned in reverse chronological + * order. + * + * Default: `"asc"`. + */ + sort?: LogSortDirection; +}; +//#endregion + +//#region Log operation results +/** + * An object representing a single log entry. + */ +export type LogMessage = { + id: number; + topic: string; + level: LogLevelLabel; + date: string; + message: string; +}; + +/** + * An object representing a list of log entries. + */ +export type LogEntries = { + totalAmount: number; + lid: number[]; + topic: string[]; + level: LogLevel[]; + timestamp: number[]; + text: string[]; +}; +//#endregion \ No newline at end of file diff --git a/src/queries.ts b/src/queries.ts new file mode 100644 index 000000000..9be481c95 --- /dev/null +++ b/src/queries.ts @@ -0,0 +1,513 @@ +/** + * ```ts + * import type { QueryOptions } from "arangojs/queries"; + * ``` + * + * The "query" module provides query related types for TypeScript. + * + * @packageDocumentation + */ + +//#region Query operation options +/** + * Options for executing a query. + * + * See {@link databases.Database#query}. + */ +export type QueryOptions = { + /** + * If set to `true`, the query will be executed with support for dirty reads + * enabled, permitting ArangoDB to return a potentially dirty or stale result + * and arangojs will load balance the request without distinguishing between + * leaders and followers. + * + * Note that dirty reads are only supported for read-only queries, not data + * modification queries (e.g. using `INSERT`, `UPDATE`, `REPLACE` or + * `REMOVE`) and only when using ArangoDB 3.4 or later. + * + * Default: `false` + */ + allowDirtyRead?: boolean; + /** + * If set to `true`, cursor results will be stored by ArangoDB in such a way + * that batch reads can be retried in the case of a communication error. + * + * Default: `false` + */ + allowRetry?: boolean; + /** + * Maximum time in milliseconds arangojs will wait for a server response. + * Exceeding this value will result in the request being cancelled. + * + * **Note**: Setting a timeout for the client does not guarantee the query + * will be killed by ArangoDB if it is already being executed. See the + * `maxRuntime` option for limiting the execution time within ArangoDB. + */ + timeout?: number; + /** + * If set to a positive number, the query will automatically be retried at + * most this many times if it results in a write-write conflict. + * + * Default: `0` + */ + retryOnConflict?: number; + /** + * Unless set to `false`, the number of result values in the result set will + * be returned in the `count` attribute. This may be disabled by default in + * a future version of ArangoDB if calculating this value has a performance + * impact for some queries. + * + * Default: `true`. + */ + count?: boolean; + /** + * Number of result values to be transferred by the server in each + * network roundtrip (or "batch"). + * + * Must be greater than zero. + */ + batchSize?: number; + /** + * If set to `false`, the AQL query results cache lookup will be skipped for + * this query. + * + * Default: `true` + */ + cache?: boolean; + /** + * Maximum memory size in bytes that the query is allowed to use. + * Exceeding this value will result in the query failing with an error. + * + * If set to `0`, the memory limit is disabled. + * + * Default: `0` + */ + memoryLimit?: number; + /** + * Maximum allowed execution time before the query will be killed in seconds. + * + * If set to `0`, the query will be allowed to run indefinitely. + * + * Default: `0` + */ + maxRuntime?: number; + /** + * Time-to-live for the cursor in seconds. The cursor results may be + * garbage collected by ArangoDB after this much time has passed. + * + * Default: `30` + */ + ttl?: number; + /** + * If set to `true`, the query will throw an exception and abort if it would + otherwise produce a warning. + */ + failOnWarning?: boolean; + /** + * If set to `1` or `true`, additional query profiling information will be + * returned in the `extra.profile` attribute if the query is not served from + * the result cache. + * + * If set to `2`, the query will return execution stats per query plan node + * in the `extra.stats.nodes` attribute. Additionally the query plan is + * returned in `extra.plan`. + */ + profile?: boolean | number; + /** + * If set to `true`, the query will be executed as a streaming query. + */ + stream?: boolean; + /** + * Limits the maximum number of warnings a query will return. + */ + maxWarningsCount?: number; + /** + * If set to `true` and the query has a `LIMIT` clause, the total number of + * values matched before the last top-level `LIMIT` in the query was applied + * will be returned in the `extra.stats.fullCount` attribute. + */ + fullCount?: boolean; + /** + * If set to `false`, the query data will not be stored in the RocksDB block + * cache. This can be used to avoid thrashing he block cache when reading a + * lot of data. + */ + fillBlockCache?: boolean; + /** + * An object with a `rules` property specifying a list of optimizer rules to + * be included or excluded by the optimizer for this query. Prefix a rule + * name with `+` to include it, or `-` to exclude it. The name `all` acts as + * an alias matching all optimizer rules. + */ + optimizer?: { rules: string[] }; + /** + * Limits the maximum number of plans that will be created by the AQL query + * optimizer. + */ + maxPlans?: number; + /** + * Controls after how many execution nodes in a query a stack split should be + * performed. + * + * Default: `250` (`200` on macOS) + */ + maxNodesPerCallstack?: number; + /** + * Maximum size of transactions in bytes. + */ + maxTransactionSize?: number; + /** + * Maximum number of operations after which an intermediate commit is + * automatically performed. + */ + intermediateCommitCount?: number; + /** + * Maximum total size of operations in bytes after which an intermediate + * commit is automatically performed. + */ + intermediateCommitSize?: number; + /** + * (Enterprise Edition cluster only.) If set to `true`, collections + * inaccessible to current user will result in an access error instead + * of being treated as empty. + */ + skipInaccessibleCollections?: boolean; + /** + * (Enterprise Edition cluster only.) Limits the maximum time in seconds a + * DBServer will wait to bring satellite collections involved in the query + * into sync. Exceeding this value will result in the query being stopped. + * + * Default: `60` + */ + satelliteSyncWait?: number; +}; + +/** + * Options for explaining a query. + * + * See {@link Database#explain}. + */ +export type ExplainOptions = { + /** + * An object with a `rules` property specifying a list of optimizer rules to + * be included or excluded by the optimizer for this query. Prefix a rule + * name with `+` to include it, or `-` to exclude it. The name `all` acts as + * an alias matching all optimizer rules. + */ + optimizer?: { rules: string[] }; + /** + * Maximum number of plans that the optimizer is allowed to generate. + * Setting this to a low value limits the amount of work the optimizer does. + */ + maxNumberOfPlans?: number; + /** + * If set to true, all possible execution plans will be returned as the + * `plans` property. Otherwise only the optimal execution plan will be + * returned as the `plan` property. + * + * Default: `false` + */ + allPlans?: boolean; +}; + +/** + * Options for query tracking. + * + * See {@link Database#queryTracking}. + */ +export type QueryTrackingOptions = { + /** + * If set to `false`, neither queries nor slow queries will be tracked. + */ + enabled?: boolean; + /** + * Maximum query string length in bytes that will be kept in the list. + */ + maxQueryStringLength?: number; + /** + * Maximum number of slow queries to be kept in the list. + */ + maxSlowQueries?: number; + /** + * Threshold execution time in seconds for when a query will be + * considered slow. + */ + slowQueryThreshold?: number; + /** + * If set to `true`, bind parameters will be tracked along with queries. + */ + trackBindVars?: boolean; + /** + * If set to `true` and `enabled` is also set to `true`, slow queries will be + * tracked if their execution time exceeds `slowQueryThreshold`. + */ + trackSlowQueries?: boolean; +}; + +//#endregion + +//#region Query operation results +/** + * Result of explaining a query with a single plan. + */ +export type SingleExplainResult = { + /** + * Query plan. + */ + plan: ExplainPlan; + /** + * Whether it would be possible to cache the query. + */ + cacheable: boolean; + /** + * Warnings encountered while planning the query execution. + */ + warnings: { code: number; message: string }[]; + /** + * Optimizer statistics for the explained query. + */ + stats: ExplainStats; +}; + +/** + * Result of explaining a query with multiple plans. + */ +export type MultiExplainResult = { + /** + * Query plans. + */ + plans: ExplainPlan[]; + /** + * Whether it would be possible to cache the query. + */ + cacheable: boolean; + /** + * Warnings encountered while planning the query execution. + */ + warnings: { code: number; message: string }[]; + /** + * Optimizer statistics for the explained query. + */ + stats: ExplainStats; +}; + +/** + * Plan explaining query execution. + */ +export type ExplainPlan = { + /** + * Execution nodes in this plan. + */ + nodes: { + [key: string]: any; + type: string; + id: number; + dependencies: number[]; + estimatedCost: number; + estimatedNrItems: number; + }[]; + /** + * Rules applied by the optimizer. + */ + rules: string[]; + /** + * Information about collections involved in the query. + */ + collections: { + name: string; + type: "read" | "write"; + }[]; + /** + * Variables used in the query. + */ + variables: { + id: number; + name: string; + }[]; + /** + * Total estimated cost of the plan. + */ + estimatedCost: number; + /** + * Estimated number of items returned by the query. + */ + estimatedNrItems: number; + /** + * Whether the query is a data modification query. + */ + isModificationQuery: boolean; +}; + +/** + * Optimizer statistics for an explained query. + */ +export type ExplainStats = { + /** + * Total number of rules executed for this query. + */ + rulesExecuted: number; + /** + * Number of rules skipped for this query. + */ + rulesSkipped: number; + /** + * Total number of plans created. + */ + plansCreated: number; + /** + * Maximum memory usage in bytes of the query during explain. + */ + peakMemoryUsage: number; + /** + * Time in seconds needed to explain the query. + */ + executionTime: number; +}; + +/** + * Result of parsing a query. + */ +export type ParseResult = { + /** + * Whether the query was parsed. + */ + parsed: boolean; + /** + * Names of all collections involved in the query. + */ + collections: string[]; + /** + * Names of all bind parameters used in the query. + */ + bindVars: string[]; + /** + * Abstract syntax tree (AST) of the query. + */ + ast: AstNode[]; +}; + +/** + * Node in an AQL abstract syntax tree (AST). + */ +export type AstNode = { + [key: string]: any; + type: string; + subNodes: AstNode[]; +}; + +/** + * Optimizer rule for AQL queries. + */ +export type QueryOptimizerRule = { + name: string; + flags: { + hidden: boolean; + clusterOnly: boolean; + canBeDisabled: boolean; + canCreateAdditionalPlans: boolean; + disabledByDefault: boolean; + enterpriseOnly: boolean; + }; +}; + +/** + * Information about query tracking. + */ +export type QueryTrackingInfo = { + /** + * Whether query tracking is enabled. + */ + enabled: boolean; + /** + * Maximum query string length in bytes that is kept in the list. + */ + maxQueryStringLength: number; + /** + * Maximum number of slow queries that is kept in the list. + */ + maxSlowQueries: number; + /** + * Threshold execution time in seconds for when a query is + * considered slow. + */ + slowQueryThreshold: number; + /** + * Whether bind parameters are being tracked along with queries. + */ + trackBindVars: boolean; + /** + * Whether slow queries are being tracked. + */ + trackSlowQueries: boolean; +}; +//#endregion + +//#region QueryDescription +/** + * Object describing a query. + */ +export type QueryDescription = { + /** + * Unique identifier for this query. + */ + id: string; + /** + * Name of the database the query runs in. + */ + database: string; + /** + * Name of the user that started the query. + */ + user: string; + /** + * Query string (potentially truncated). + */ + query: string; + /** + * Bind parameters used in the query. + */ + bindVars: Record; + /** + * Date and time the query was started. + */ + started: string; + /** + * Query's running time in seconds. + */ + runTime: number; + /** + * Maximum memory usage in bytes of the query. + */ + peakMemoryUsage: number; + /** + * Query's current execution state. + */ + state: "executing" | "finished" | "killed"; + /** + * Whether the query uses a streaming cursor. + */ + stream: boolean; +}; +//#endregion + +//#region UserFunctionDescription +/** + * Definition of an AQL User Function. + */ +export type UserFunctionDescription = { + /** + * Name of the AQL User Function. + */ + name: string; + /** + * Implementation of the AQL User Function. + */ + code: string; + /** + * Whether the function is deterministic. + * + * See {@link Database#createFunction}. + */ + isDeterministic: boolean; +}; +//#endregion \ No newline at end of file diff --git a/src/route.ts b/src/routes.ts similarity index 90% rename from src/route.ts rename to src/routes.ts index fe52bfce7..62f877d24 100644 --- a/src/route.ts +++ b/src/routes.ts @@ -1,21 +1,22 @@ /** * ```ts - * import type { Route } from "arangojs/route.js"; + * import type { Route } from "arangojs/routes"; * ``` * - * The "route" module provides route related types and interfaces for TypeScript. + * The "routes" module provides route related types and interfaces for + * TypeScript. * * @packageDocumentation */ -import { ProcessedResponse, RequestOptions } from "./connection.js"; -import { Database } from "./database.js"; +import * as connections from "./connection.js"; +import * as databases from "./databases.js"; import { mergeHeaders } from "./lib/mergeHeaders.js"; /** * Represents an arbitrary route relative to an ArangoDB database. */ export class Route { - protected _db: Database; + protected _db: databases.Database; protected _path: string; protected _headers: Headers; @@ -23,7 +24,7 @@ export class Route { * @internal */ constructor( - db: Database, + db: databases.Database, path: string = "", headers: Headers | Record = {} ) { @@ -99,7 +100,7 @@ export class Route { * }); * ``` */ - request(options?: RequestOptions) { + request(options?: connections.RequestOptions) { const opts = { ...options }; if (!opts.path || opts.path === "/") opts.path = ""; else if (!this._path || opts.path.charAt(0) === "/") opts.path = opts.path; @@ -129,7 +130,7 @@ export class Route { path: string, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; + ): Promise; /** * Performs a DELETE request against the given path relative to this route * and returns the server response. @@ -148,8 +149,8 @@ export class Route { delete( search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; - delete(...args: any[]): Promise { + ): Promise; + delete(...args: any[]): Promise { const path = typeof args[0] === "string" ? args.shift() : undefined; const [search, headers] = args; return this.request({ method: "DELETE", path, search, headers }); @@ -174,7 +175,7 @@ export class Route { path: string, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; + ): Promise; /** * Performs a GET request against the given path relative to this route * and returns the server response. @@ -193,8 +194,8 @@ export class Route { get( search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; - get(...args: any[]): Promise { + ): Promise; + get(...args: any[]): Promise { const path = typeof args[0] === "string" ? args.shift() : undefined; const [search, headers] = args; return this.request({ method: "GET", path, search, headers }); @@ -219,7 +220,7 @@ export class Route { path: string, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; + ): Promise; /** * Performs a HEAD request against the given path relative to this route * and returns the server response. @@ -238,8 +239,8 @@ export class Route { head( search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; - head(...args: any[]): Promise { + ): Promise; + head(...args: any[]): Promise { const path = typeof args[0] === "string" ? args.shift() : undefined; const [search, headers] = args; return this.request({ method: "HEAD", path, search, headers }); @@ -266,7 +267,7 @@ export class Route { body?: any, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; + ): Promise; /** * Performs a PATCH request against the given path relative to this route * and returns the server response. @@ -289,8 +290,8 @@ export class Route { body?: any, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; - patch(...args: any[]): Promise { + ): Promise; + patch(...args: any[]): Promise { const path = typeof args[0] === "string" ? args.shift() : undefined; const [body, search, headers] = args; return this.request({ method: "PATCH", path, body, search, headers }); @@ -320,7 +321,7 @@ export class Route { body?: any, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; + ): Promise; /** * Performs a POST request against the given path relative to this route * and returns the server response. @@ -346,8 +347,8 @@ export class Route { body?: any, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; - post(...args: any[]): Promise { + ): Promise; + post(...args: any[]): Promise { const path = typeof args[0] === "string" ? args.shift() : undefined; const [body, search, headers] = args; return this.request({ method: "POST", path, body, search, headers }); @@ -374,7 +375,7 @@ export class Route { body?: any, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; + ): Promise; /** * Performs a PUT request against the given path relative to this route * and returns the server response. @@ -397,8 +398,8 @@ export class Route { body?: any, search?: URLSearchParams | Record, headers?: Headers | Record - ): Promise; - put(...args: any[]): Promise { + ): Promise; + put(...args: any[]): Promise { const path = typeof args[0] === "string" ? args.shift() : undefined; const [body, search, headers] = args; return this.request({ method: "PUT", path, body, search, headers }); diff --git a/src/services.ts b/src/services.ts new file mode 100644 index 000000000..6e14f0a4f --- /dev/null +++ b/src/services.ts @@ -0,0 +1,538 @@ +/** + * ```ts + * import type { ServiceInfo } from "arangojs/services"; + * ``` + * + * The "services" module provides types for Foxx services. + * + * @packageDocumentation + */ +import { FoxxManifest } from "./foxx-manifest.js"; + +//#region Service operation options +/** + * Options for installing the service. + * + * See {@link Database#installService}. + */ +export type InstallServiceOptions = { + /** + * An object mapping configuration option names to values. + * + * See also {@link Database#getServiceConfiguration}. + */ + configuration?: Record; + /** + * An object mapping dependency aliases to mount points. + * + * See also {@link Database#getServiceDependencies}. + */ + dependencies?: Record; + /** + * Whether the service should be installed in development mode. + * + * See also {@link Database#setServiceDevelopmentMode}. + * + * Default: `false` + */ + development?: boolean; + /** + * Whether the service should be installed in legacy compatibility mode + * + * This overrides the `engines` option in the service manifest (if any). + * + * Default: `false` + */ + legacy?: boolean; + /** + * Whether the "setup" script should be executed. + * + * Default: `true` + */ + setup?: boolean; +}; + +/** + * Options for replacing a service. + * + * See {@link Database#replaceService}. + */ +export type ReplaceServiceOptions = { + /** + * An object mapping configuration option names to values. + * + * See also {@link Database#getServiceConfiguration}. + */ + configuration?: Record; + /** + * An object mapping dependency aliases to mount points. + * + * See also {@link Database#getServiceDependencies}. + */ + dependencies?: Record; + /** + * Whether the service should be installed in development mode. + * + * See also {@link Database#setServiceDevelopmentMode}. + * + * Default: `false` + */ + development?: boolean; + /** + * Whether the service should be installed in legacy compatibility mode + * + * This overrides the `engines` option in the service manifest (if any). + * + * Default: `false` + */ + legacy?: boolean; + /** + * Whether the "setup" script should be executed. + * + * Default: `true` + */ + setup?: boolean; + /** + * Whether the existing service's "teardown" script should be executed + * prior to removing that service. + * + * Default: `true` + */ + teardown?: boolean; + /** + * If set to `true`, replacing a service that does not already exist will + * fall back to installing the new service. + * + * Default: `false` + */ + force?: boolean; +}; + +/** + * Options for upgrading a service. + * + * See {@link Database#upgradeService}. + */ +export type UpgradeServiceOptions = { + /** + * An object mapping configuration option names to values. + * + * See also {@link Database#getServiceConfiguration}. + */ + configuration?: Record; + /** + * An object mapping dependency aliases to mount points. + * + * See also {@link Database#getServiceDependencies}. + */ + dependencies?: Record; + /** + * Whether the service should be installed in development mode. + * + * See also {@link Database#setServiceDevelopmentMode}. + * + * Default: `false` + */ + development?: boolean; + /** + * Whether the service should be installed in legacy compatibility mode + * + * This overrides the `engines` option in the service manifest (if any). + * + * Default: `false` + */ + legacy?: boolean; + /** + * Whether the "setup" script should be executed. + * + * Default: `true` + */ + setup?: boolean; + /** + * Whether the existing service's "teardown" script should be executed + * prior to upgrading that service. + * + * Default: `false` + */ + teardown?: boolean; + /** + * Unless set to `true`, upgrading a service that does not already exist will + * fall back to installing the new service. + * + * Default: `false` + */ + force?: boolean; +}; + +/** + * Options for uninstalling a service. + * + * See {@link Database#uninstallService}. + */ +export type UninstallServiceOptions = { + /** + * Whether the service's "teardown" script should be executed + * prior to removing that service. + * + * Default: `true` + */ + teardown?: boolean; + /** + * If set to `true`, uninstalling a service that does not already exist + * will be considered successful. + * + * Default: `false` + */ + force?: boolean; +}; +//#endregion + +//#region Service operation results +/** + * Object briefly describing a Foxx service. + */ +export type ServiceSummary = { + /** + * Service mount point, relative to the database. + */ + mount: string; + /** + * Name defined in the service manifest. + */ + name?: string; + /** + * Version defined in the service manifest. + */ + version?: string; + /** + * Service dependencies the service expects to be able to match as a mapping + * from dependency names to versions the service is compatible with. + */ + provides: Record; + /** + * Whether development mode is enabled for this service. + */ + development: boolean; + /** + * Whether the service is running in legacy compatibility mode. + */ + legacy: boolean; +}; + +/** + * Object describing a configuration option of a Foxx service. + */ +export type ServiceConfiguration = { + /** + * Data type of the configuration value. + * + * **Note**: `"int"` and `"bool"` are historical synonyms for `"integer"` and + * `"boolean"`. The `"password"` type is synonymous with `"string"` but can + * be used to distinguish values which should not be displayed in plain text + * by software when managing the service. + */ + type: + | "integer" + | "boolean" + | "string" + | "number" + | "json" + | "password" + | "int" + | "bool"; + /** + * Current value of the configuration option as stored internally. + */ + currentRaw: any; + /** + * Processed current value of the configuration option as exposed in the + * service code. + */ + current: any; + /** + * Formatted name of the configuration option. + */ + title: string; + /** + * Human-readable description of the configuration option. + */ + description?: string; + /** + * Whether the configuration option must be set in order for the service + * to be operational. + */ + required: boolean; + /** + * Default value of the configuration option. + */ + default?: any; +}; + +/** + * Object describing a single-service dependency defined by a Foxx service. + */ +export type SingleServiceDependency = { + /** + * Whether this is a multi-service dependency. + */ + multiple: false; + /** + * Current mount point the dependency is resolved to. + */ + current?: string; + /** + * Formatted name of the dependency. + */ + title: string; + /** + * Name of the service the dependency expects to match. + */ + name: string; + /** + * Version of the service the dependency expects to match. + */ + version: string; + /** + * Human-readable description of the dependency. + */ + description?: string; + /** + * Whether the dependency must be matched in order for the service + * to be operational. + */ + required: boolean; +}; + +/** + * Object describing a multi-service dependency defined by a Foxx service. + */ +export type MultiServiceDependency = { + /** + * Whether this is a multi-service dependency. + */ + multiple: true; + /** + * Current mount points the dependency is resolved to. + */ + current?: string[]; + /** + * Formatted name of the dependency. + */ + title: string; + /** + * Name of the service the dependency expects to match. + */ + name: string; + /** + * Version of the service the dependency expects to match. + */ + version: string; + /** + * Human-readable description of the dependency. + */ + description?: string; + /** + * Whether the dependency must be matched in order for the service + * to be operational. + */ + required: boolean; +}; + +/** + * Test stats for a Foxx service's tests. + */ +export type ServiceTestStats = { + /** + * Total number of tests found. + */ + tests: number; + /** + * Number of tests that ran successfully. + */ + passes: number; + /** + * Number of tests that failed. + */ + failures: number; + /** + * Number of tests skipped or not executed. + */ + pending: number; + /** + * Total test duration in milliseconds. + */ + duration: number; +}; + +/** + * Test results for a Foxx service's tests using the stream reporter. + */ +export type ServiceTestStreamReport = ( + | ["start", { total: number }] + | ["pass", ServiceTestStreamTest] + | ["fail", ServiceTestStreamTest] + | ["end", ServiceTestStats] +)[]; + +/** + * Test results for a single test case using the stream reporter. + */ +export type ServiceTestStreamTest = { + title: string; + fullTitle: string; + duration: number; + err?: string; +}; + +/** + * Test results for a Foxx service's tests using the suite reporter. + */ +export type ServiceTestSuiteReport = { + stats: ServiceTestStats; + suites: ServiceTestSuite[]; + tests: ServiceTestSuiteTest[]; +}; + +/** + * Test results for a single test suite using the suite reporter. + */ +export type ServiceTestSuite = { + title: string; + suites: ServiceTestSuite[]; + tests: ServiceTestSuiteTest[]; +}; + +/** + * Test results for a single test case using the suite reporter. + */ +export type ServiceTestSuiteTest = { + result: "pending" | "pass" | "fail"; + title: string; + duration: number; + err?: any; +}; + +/** + * Test results for a Foxx service's tests in XUnit format using the JSONML + * representation. + */ +export type ServiceTestXunitReport = [ + "testsuite", + { + timestamp: number; + tests: number; + errors: number; + failures: number; + skip: number; + time: number; + }, + ...ServiceTestXunitTest[], +]; + +/** + * Test results for a single test case in XUnit format using the JSONML + * representation. + */ +export type ServiceTestXunitTest = + | ["testcase", { classname: string; name: string; time: number }] + | [ + "testcase", + { classname: string; name: string; time: number }, + ["failure", { message: string; type: string }, string], + ]; + +/** + * Test results for a Foxx service's tests in TAP format. + */ +export type ServiceTestTapReport = string[]; + +/** + * Test results for a Foxx service's tests using the default reporter. + */ +export type ServiceTestDefaultReport = { + stats: ServiceTestStats; + tests: ServiceTestDefaultTest[]; + pending: ServiceTestDefaultTest[]; + failures: ServiceTestDefaultTest[]; + passes: ServiceTestDefaultTest[]; +}; + +/** + * Test results for a single test case using the default reporter. + */ +export type ServiceTestDefaultTest = { + title: string; + fullTitle: string; + duration: number; + err?: string; +}; + +/** + * OpenAPI 2.0 description of a Foxx service. + */ +export type SwaggerJson = { + [key: string]: any; + info: { + title: string; + description: string; + version: string; + license: string; + }; + path: { + [key: string]: any; + }; +}; +//#endregion + +//#region ServiceDescription +/** + * Object describing a Foxx service in detail. + */ +export type ServiceDescription = { + /** + * Service mount point, relative to the database. + */ + mount: string; + /** + * File system path of the service. + */ + path: string; + /** + * Name defined in the service manifest. + */ + name?: string; + /** + * Version defined in the service manifest. + */ + version?: string; + /** + * Whether development mode is enabled for this service. + */ + development: boolean; + /** + * Whether the service is running in legacy compatibility mode. + */ + legacy: boolean; + /** + * Content of the service manifest of this service. + */ + manifest: FoxxManifest; + /** + * Internal checksum of the service's initial source bundle. + */ + checksum: string; + /** + * Options for this service. + */ + options: { + /** + * Configuration values set for this service. + */ + configuration: Record; + /** + * Service dependency configuration of this service. + */ + dependencies: Record; + }; +}; +//#endregion \ No newline at end of file diff --git a/src/test/01-manipulating-databases.ts b/src/test/01-manipulating-databases.ts index bedd2541c..93eccf81d 100644 --- a/src/test/01-manipulating-databases.ts +++ b/src/test/01-manipulating-databases.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Database } from "../database.js"; -import { ArangoError } from "../error.js"; +import { Database } from "../databases.js"; +import { ArangoError } from "../errors.js"; import { config } from "./_config.js"; describe("Manipulating databases", function () { @@ -77,7 +77,7 @@ describe("Manipulating databases", function () { after(async () => { try { await system.dropDatabase(name); - } catch {} + } catch { } }); it("deletes the given database from the server", async () => { await system.dropDatabase(name); diff --git a/src/test/02-accessing-collections.ts b/src/test/02-accessing-collections.ts index 811e6ca6e..12a687afa 100644 --- a/src/test/02-accessing-collections.ts +++ b/src/test/02-accessing-collections.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { isArangoCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { isArangoCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; const range = (n: number): number[] => Array.from(Array(n).keys()); diff --git a/src/test/03-accessing-graphs.ts b/src/test/03-accessing-graphs.ts index e65e50600..0fe202f0a 100644 --- a/src/test/03-accessing-graphs.ts +++ b/src/test/03-accessing-graphs.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Database } from "../database.js"; -import { Graph } from "../graph.js"; +import { Database } from "../databases.js"; +import { Graph } from "../graphs.js"; import { config } from "./_config.js"; const range = (n: number): number[] => Array.from(Array(n).keys()); diff --git a/src/test/04-transactions.ts b/src/test/04-transactions.ts index 5b741d73d..aef369c4d 100644 --- a/src/test/04-transactions.ts +++ b/src/test/04-transactions.ts @@ -1,7 +1,7 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; -import { Transaction } from "../transaction.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; +import { Transaction } from "../transactions.js"; import { config } from "./_config.js"; describe("Transactions", () => { @@ -127,7 +127,7 @@ describe("Transactions", () => { let doc: any; try { doc = await collection.document("test"); - } catch (e: any) {} + } catch (e: any) { } if (doc) expect.fail("Document should not exist yet."); const { id, status } = await trx.commit(); expect(id).to.equal(trx.id); @@ -142,11 +142,11 @@ describe("Transactions", () => { let doc: any; try { doc = await collection.document("test1"); - } catch (e: any) {} + } catch (e: any) { } if (doc) expect.fail("Document should not exist yet."); try { doc = await collection.document("test2"); - } catch (e: any) {} + } catch (e: any) { } if (doc) expect.fail("Document should not exist yet."); const { id, status } = await trx.commit(); expect(id).to.equal(trx.id); @@ -164,7 +164,7 @@ describe("Transactions", () => { let doc: any; try { doc = await collection.document("test"); - } catch (e: any) {} + } catch (e: any) { } if (doc) expect.fail("Document should not exist yet."); }); diff --git a/src/test/05-aql-helpers.ts b/src/test/05-aql-helpers.ts index 9bc70c6e6..f65ab3e34 100644 --- a/src/test/05-aql-helpers.ts +++ b/src/test/05-aql-helpers.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; import { aql, join, literal } from "../aql.js"; -import { Database } from "../database.js"; +import { Database } from "../databases.js"; describe("AQL helpers", function () { describe("aql", () => { diff --git a/src/test/06-managing-functions.ts b/src/test/06-managing-functions.ts index 790e83642..19785aed3 100644 --- a/src/test/06-managing-functions.ts +++ b/src/test/06-managing-functions.ts @@ -1,5 +1,5 @@ import { expect } from "chai"; -import { Database } from "../database.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; describe("Managing functions", function () { @@ -20,15 +20,15 @@ describe("Managing functions", function () { }); describe("database.listFunctions", () => { it("should be empty per default", async () => { - const result = await db.listFunctions(); + const result = await db.listUserFunctions(); expect(result).to.be.instanceof(Array); expect(result).to.be.empty; }); it("should include before created function", async () => { const name = "myfunctions::temperature::celsiustofahrenheit"; const code = "function (celsius) { return celsius * 1.8 + 32; }"; - await db.createFunction(name, code); - const result = await db.listFunctions(); + await db.createUserFunction(name, code); + const result = await db.listUserFunctions(); expect(result).to.be.instanceof(Array); expect(result.length).to.equal(1); expect(result[0]).to.eql({ @@ -39,7 +39,7 @@ describe("Managing functions", function () { }); describe("database.createFunction", () => { it("should create a function", async () => { - const info = await db.createFunction( + const info = await db.createUserFunction( "myfunctions::temperature::celsiustofahrenheit2", "function (celsius) { return celsius * 1.8 + 32; }" ); @@ -50,11 +50,11 @@ describe("Managing functions", function () { describe("database.dropFunction", () => { it("should drop a existing function", async () => { const name = "myfunctions::temperature::celsiustofahrenheit"; - await db.createFunction( + await db.createUserFunction( name, "function (celsius) { return celsius * 1.8 + 32; }" ); - const info = await db.dropFunction(name); + const info = await db.dropUserFunction(name); expect(info).to.have.property("deletedCount", 1); }); }); diff --git a/src/test/07-routes.ts b/src/test/07-routes.ts index 5e9b8d000..acce96925 100644 --- a/src/test/07-routes.ts +++ b/src/test/07-routes.ts @@ -1,7 +1,7 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; -import { Route } from "../route.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; +import { Route } from "../routes.js"; import { config } from "./_config.js"; describe("Arbitrary HTTP routes", () => { diff --git a/src/test/08-cursors.ts b/src/test/08-cursors.ts index 304bdd6b7..0f98397c6 100644 --- a/src/test/08-cursors.ts +++ b/src/test/08-cursors.ts @@ -1,8 +1,8 @@ import { expect } from "chai"; import { LinkedList } from "../lib/linkedList.js"; import { aql } from "../aql.js"; -import { ArrayCursor, BatchedArrayCursor } from "../cursor.js"; -import { Database } from "../database.js"; +import { Cursor, BatchCursor } from "../cursors.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; const aqlQuery = aql`FOR i IN 0..10 RETURN i`; @@ -17,8 +17,8 @@ async function sleep(ms: number) { describe("Item-wise Cursor API", () => { const name = `testdb_${Date.now()}`; let system: Database, db: Database; - let cursor: ArrayCursor; - let allCursors: (ArrayCursor | BatchedArrayCursor)[]; + let cursor: Cursor; + let allCursors: (Cursor | BatchCursor)[]; before(async () => { allCursors = []; system = new Database(config); @@ -138,7 +138,7 @@ describe("Item-wise Cursor API", () => { }); it("returns false after last result is consumed (with large amount of results)", async () => { const EXPECTED_LENGTH = 10000; - async function loadMore(cursor: ArrayCursor, totalLength: number) { + async function loadMore(cursor: Cursor, totalLength: number) { await cursor.next(); totalLength++; expect(cursor.hasNext).to.equal(totalLength !== EXPECTED_LENGTH); @@ -241,8 +241,8 @@ describe("Item-wise Cursor API", () => { describe("Batch-wise Cursor API", () => { const name = `testdb_${Date.now()}`; let system: Database, db: Database; - let cursor: BatchedArrayCursor; - let allCursors: (ArrayCursor | BatchedArrayCursor)[]; + let cursor: BatchCursor; + let allCursors: (Cursor | BatchCursor)[]; before(async () => { allCursors = []; system = new Database(config); @@ -349,7 +349,7 @@ describe("Batch-wise Cursor API", () => { }); it("returns false after last result is consumed (with large amount of results)", async () => { const EXPECTED_LENGTH = 10000; - async function loadMore(cursor: ArrayCursor, totalLength: number) { + async function loadMore(cursor: Cursor, totalLength: number) { await cursor.next(); totalLength++; expect(cursor.hasNext).to.equal(totalLength !== EXPECTED_LENGTH); diff --git a/src/test/09-collection-metadata.ts b/src/test/09-collection-metadata.ts index e7e2359c2..3da01f478 100644 --- a/src/test/09-collection-metadata.ts +++ b/src/test/09-collection-metadata.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { COLLECTION_NOT_FOUND } from "../lib/codes.js"; import { config } from "./_config.js"; diff --git a/src/test/10-manipulating-collections.ts b/src/test/10-manipulating-collections.ts index af90b8915..cf7f0a2d8 100644 --- a/src/test/10-manipulating-collections.ts +++ b/src/test/10-manipulating-collections.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; describe("Manipulating collections", function () { diff --git a/src/test/11-managing-indexes.ts b/src/test/11-managing-indexes.ts index 0176e998c..4727180be 100644 --- a/src/test/11-managing-indexes.ts +++ b/src/test/11-managing-indexes.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; const it312 = config.arangoVersion! >= 31200 ? it : it.skip; diff --git a/src/test/13-bulk-imports.ts b/src/test/13-bulk-imports.ts index 6e25712d5..3eac6c600 100644 --- a/src/test/13-bulk-imports.ts +++ b/src/test/13-bulk-imports.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; describe("Bulk imports", function () { diff --git a/src/test/14-document-collections.ts b/src/test/14-document-collections.ts index ea934edd6..a2f2baf48 100644 --- a/src/test/14-document-collections.ts +++ b/src/test/14-document-collections.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { DocumentMetadata } from "../documents.js"; import { config } from "./_config.js"; diff --git a/src/test/15-edge-collections.ts b/src/test/15-edge-collections.ts index c4f30a623..d1fc51e7c 100644 --- a/src/test/15-edge-collections.ts +++ b/src/test/15-edge-collections.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { EdgeCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { EdgeCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { DocumentMetadata } from "../documents.js"; import { config } from "./_config.js"; diff --git a/src/test/16-graphs.ts b/src/test/16-graphs.ts index c95d059a2..f09684a29 100644 --- a/src/test/16-graphs.ts +++ b/src/test/16-graphs.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Database } from "../database.js"; -import { Graph } from "../graph.js"; +import { Database } from "../databases.js"; +import { Graph } from "../graphs.js"; import { config } from "./_config.js"; const range = (n: number): number[] => Array.from(Array(n).keys()); @@ -90,12 +90,12 @@ describe("Graph API", function () { ...edgeCollectionNames.map(async (name) => { try { await graph.removeEdgeDefinition(name, true); - } catch {} + } catch { } }), ...vertexCollectionNames.map(async (name) => { try { await graph.removeVertexCollection(name, true); - } catch {} + } catch { } }), ]); }); diff --git a/src/test/17-graph-vertices.ts b/src/test/17-graph-vertices.ts index f46bff192..dc7097315 100644 --- a/src/test/17-graph-vertices.ts +++ b/src/test/17-graph-vertices.ts @@ -1,8 +1,8 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; -import { ArangoError } from "../error.js"; -import { Graph, GraphVertexCollection } from "../graph.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; +import { ArangoError } from "../errors.js"; +import { Graph, GraphVertexCollection } from "../graphs.js"; import { config } from "./_config.js"; const range = (n: number): number[] => Array.from(Array(n).keys()); diff --git a/src/test/18-graph-edges.ts b/src/test/18-graph-edges.ts index 2aab1197c..4f8a59255 100644 --- a/src/test/18-graph-edges.ts +++ b/src/test/18-graph-edges.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Database } from "../database.js"; -import { Graph } from "../graph.js"; +import { Database } from "../databases.js"; +import { Graph } from "../graphs.js"; import { config } from "./_config.js"; describe("Manipulating graph edges", function () { diff --git a/src/test/19-graph-vertex-collections.ts b/src/test/19-graph-vertex-collections.ts index 252dcf06a..4f66c4ef4 100644 --- a/src/test/19-graph-vertex-collections.ts +++ b/src/test/19-graph-vertex-collections.ts @@ -1,7 +1,7 @@ import { expect } from "chai"; -import { Database } from "../database.js"; +import { Database } from "../databases.js"; import { DocumentMetadata } from "../documents.js"; -import { GraphVertexCollection } from "../graph.js"; +import { GraphVertexCollection } from "../graphs.js"; import { config } from "./_config.js"; describe("GraphVertexCollection API", function () { diff --git a/src/test/20-graph-edge-collections.ts b/src/test/20-graph-edge-collections.ts index 3de62de8a..33d798bfc 100644 --- a/src/test/20-graph-edge-collections.ts +++ b/src/test/20-graph-edge-collections.ts @@ -1,7 +1,7 @@ import { expect } from "chai"; -import { Database } from "../database.js"; +import { Database } from "../databases.js"; import { DocumentMetadata } from "../documents.js"; -import { GraphEdgeCollection } from "../graph.js"; +import { GraphEdgeCollection } from "../graphs.js"; import { config } from "./_config.js"; describe("GraphEdgeCollection API", function () { diff --git a/src/test/22-foxx-api.ts b/src/test/22-foxx-api.ts index 9f636127a..cd1f397c8 100644 --- a/src/test/22-foxx-api.ts +++ b/src/test/22-foxx-api.ts @@ -2,8 +2,8 @@ import { expect } from "chai"; import * as fs from "fs"; import * as path from "path"; -import { Database } from "../database.js"; -import { ArangoError } from "../error.js"; +import { Database } from "../databases.js"; +import { ArangoError } from "../errors.js"; import { config } from "./_config.js"; const localAppsPath = path.resolve(".", "fixtures"); @@ -33,7 +33,7 @@ describe("Foxx service", () => { after(async () => { try { await db.uninstallService(serviceServiceMount, { force: true }); - } catch (e: any) {} + } catch (e: any) { } try { await system.dropDatabase(name); } finally { @@ -44,7 +44,7 @@ describe("Foxx service", () => { afterEach(async () => { try { await db.uninstallService(mount, { force: true }); - } catch (e: any) {} + } catch (e: any) { } }); const cases = [ @@ -125,7 +125,7 @@ describe("Foxx service", () => { try { await db.route(mount).get(); expect.fail(); - } catch (e: any) {} + } catch (e: any) { } }); it("empty configuration should be available", async () => { @@ -683,7 +683,7 @@ describe("Foxx service", () => { ), ]) ); - const scripts = await db.listServiceScripts(mount); + const scripts = await db.getServiceScripts(mount); expect(scripts).to.have.property("setup", "Setup"); expect(scripts).to.have.property("teardown", "Teardown"); }); @@ -853,7 +853,7 @@ describe("Foxx service", () => { "getServiceDependencies", (mount: string) => db.getServiceDependencies(mount), ], - ["listServiceScripts", (mount: string) => db.listServiceScripts(mount)], + ["listServiceScripts", (mount: string) => db.getServiceScripts(mount)], ["upgradeService", (mount: string) => db.upgradeService(mount, {} as any)], [ "updateServiceConfiguration", diff --git a/src/test/23-aql-queries-stream.ts b/src/test/23-aql-queries-stream.ts index cced8f0d9..5af17c061 100644 --- a/src/test/23-aql-queries-stream.ts +++ b/src/test/23-aql-queries-stream.ts @@ -1,13 +1,14 @@ import { expect } from "chai"; import { aql } from "../aql.js"; -import { ArrayCursor } from "../cursor.js"; -import { Database, QueryOptions } from "../database.js"; +import { Cursor } from "../cursors.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; +import { QueryOptions } from "../queries.js"; describe("AQL Stream queries", function () { const name = `testdb_${Date.now()}`; let system: Database, db: Database; - let allCursors: ArrayCursor[]; + let allCursors: Cursor[]; before(async () => { allCursors = []; system = new Database(config); @@ -29,7 +30,7 @@ describe("AQL Stream queries", function () { it("returns a cursor for the query result", async () => { const cursor = await db.query("RETURN 23", {}, { stream: true }); allCursors.push(cursor); - expect(cursor).to.be.an.instanceof(ArrayCursor); + expect(cursor).to.be.an.instanceof(Cursor); }); it("supports bindVars", async () => { const cursor = await db.query("RETURN @x", { x: 5 }, { stream: true }); @@ -91,7 +92,7 @@ describe("AQL Stream queries", function () { allCursors.push(...cursors); await Promise.all( cursors.map((c) => - (c as ArrayCursor).forEach(() => { + (c as Cursor).forEach(() => { count++; }) ) diff --git a/src/test/24-accessing-views.ts b/src/test/24-accessing-views.ts index 7adfe43f9..7e77a56ae 100644 --- a/src/test/24-accessing-views.ts +++ b/src/test/24-accessing-views.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Database } from "../database.js"; -import { View } from "../view.js"; +import { Database } from "../databases.js"; +import { View } from "../views.js"; import { config } from "./_config.js"; const range = (n: number): number[] => Array.from(Array(n).keys()); diff --git a/src/test/25-view-metadata.ts b/src/test/25-view-metadata.ts index 2a946a12e..711c6ce52 100644 --- a/src/test/25-view-metadata.ts +++ b/src/test/25-view-metadata.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Database } from "../database.js"; -import { ArangoSearchViewProperties, View } from "../view.js"; +import { Database } from "../databases.js"; +import { ArangoSearchViewProperties, View } from "../views.js"; import { config } from "./_config.js"; describe("View metadata", function () { diff --git a/src/test/26-manipulating-views.ts b/src/test/26-manipulating-views.ts index 1e0942560..afceeed78 100644 --- a/src/test/26-manipulating-views.ts +++ b/src/test/26-manipulating-views.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Database } from "../database.js"; -import { ArangoSearchViewProperties, View } from "../view.js"; +import { Database } from "../databases.js"; +import { ArangoSearchViewProperties, View } from "../views.js"; import { config } from "./_config.js"; // NOTE These tests will not reliably work in a cluster. diff --git a/src/test/27-query-management.ts b/src/test/27-query-management.ts index 8cf1b3d91..a9dff5c64 100644 --- a/src/test/27-query-management.ts +++ b/src/test/27-query-management.ts @@ -1,8 +1,8 @@ import { expect } from "chai"; import { aql } from "../aql.js"; -import { ArrayCursor } from "../cursor.js"; -import { Database } from "../database.js"; -import { ArangoError, ResponseTimeoutError } from "../error.js"; +import { Cursor } from "../cursors.js"; +import { Database } from "../databases.js"; +import { ArangoError, ResponseTimeoutError } from "../errors.js"; import { config } from "./_config.js"; // NOTE These tests will not reliably work with load balancing. @@ -18,7 +18,7 @@ async function sleep(ms: number) { describe("Query Management API", function () { const dbName = `testdb_${Date.now()}`; let system: Database, db: Database; - let allCursors: ArrayCursor[]; + let allCursors: Cursor[]; before(async () => { allCursors = []; system = new Database(config); @@ -46,7 +46,7 @@ describe("Query Management API", function () { it("returns a cursor for the query result", async () => { const cursor = await db.query("RETURN 23"); allCursors.push(cursor); - expect(cursor).to.be.an.instanceof(ArrayCursor); + expect(cursor).to.be.an.instanceof(Cursor); }); it("throws an exception on error", async () => { try { diff --git a/src/test/28-accessing-analyzers.ts b/src/test/28-accessing-analyzers.ts index 06512c91c..540189a1e 100644 --- a/src/test/28-accessing-analyzers.ts +++ b/src/test/28-accessing-analyzers.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Analyzer } from "../analyzer.js"; -import { Database } from "../database.js"; +import { Analyzer } from "../analyzers.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; const range = (n: number): number[] => Array.from(Array(n).keys()); diff --git a/src/test/29-manipulating-analyzers.ts b/src/test/29-manipulating-analyzers.ts index 0cf02e879..d3324b560 100644 --- a/src/test/29-manipulating-analyzers.ts +++ b/src/test/29-manipulating-analyzers.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Analyzer } from "../analyzer.js"; -import { Database } from "../database.js"; +import { Analyzer } from "../analyzers.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; function waitForAnalyzer(db: Database, name: string) { @@ -42,7 +42,7 @@ describe("Manipulating analyzers", function () { after(async () => { try { await analyzer.drop(); - } catch {} + } catch { } }); it("fetches information about the analyzer", async () => { const data = await analyzer.get(); diff --git a/src/test/29-queue-time.ts b/src/test/29-queue-time.ts index ba2686e05..88527011a 100644 --- a/src/test/29-queue-time.ts +++ b/src/test/29-queue-time.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; const range = (n: number): number[] => Array.from(Array(n).keys()); diff --git a/src/test/30-concurrent-transactions.ts b/src/test/30-concurrent-transactions.ts index 9c8124a46..3a15e949f 100644 --- a/src/test/30-concurrent-transactions.ts +++ b/src/test/30-concurrent-transactions.ts @@ -1,9 +1,9 @@ /* eslint-disable no-console */ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; +import { DocumentCollection } from "../collections.js"; import { Connection } from "../connection.js"; -import { Database } from "../database.js"; -import { Transaction } from "../transaction.js"; +import { Database } from "../databases.js"; +import { Transaction } from "../transactions.js"; import { config } from "./_config.js"; const delay = (ms: number) => @@ -37,7 +37,7 @@ describe("Transactions", function () { ); try { await system.dropDatabase(name); - } catch {} + } catch { } }); beforeEach(async () => { collection = await db.createCollection(`collection-${Date.now()}`); diff --git a/src/test/31-conflicts.ts b/src/test/31-conflicts.ts index 824fc08b7..2a1700b7f 100644 --- a/src/test/31-conflicts.ts +++ b/src/test/31-conflicts.ts @@ -1,7 +1,7 @@ import { expect } from "chai"; import { aql } from "../aql.js"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; const range = (n: number): number[] => Array.from(Array(n).keys()); diff --git a/src/transaction.ts b/src/transactions.ts similarity index 70% rename from src/transaction.ts rename to src/transactions.ts index 29a3d0b8a..9329fb14b 100644 --- a/src/transaction.ts +++ b/src/transactions.ts @@ -1,29 +1,136 @@ /** * ```ts - * import type { Transaction } from "arangojs/transaction.js"; + * import type { Transaction } from "arangojs/transactions"; * ``` * - * The "transaction" module provides transaction related types and interfaces + * The "transactions" module provides transaction related types and interfaces * for TypeScript. * * @packageDocumentation */ -import { Connection } from "./connection.js"; -import { Database } from "./database.js"; -import { isArangoError } from "./error.js"; +import * as collections from "./collections.js"; +import * as connection from "./connection.js"; +import * as databases from "./databases.js"; +import * as errors from "./errors.js"; import { TRANSACTION_NOT_FOUND } from "./lib/codes.js"; +//#region Transaction operation options /** - * Indicates whether the given value represents a {@link Transaction}. - * - * @param transaction - A value that might be a transaction. + * Collections involved in a transaction. */ -export function isArangoTransaction( - transaction: any -): transaction is Transaction { - return Boolean(transaction && transaction.isArangoTransaction); +export type TransactionCollectionOptions = { + /** + * An array of collections or a single collection that will be read from or + * written to during the transaction with no other writes being able to run + * in parallel. + */ + exclusive?: (string | collections.ArangoCollection)[] | string | collections.ArangoCollection; + /** + * An array of collections or a single collection that will be read from or + * written to during the transaction. + */ + write?: (string | collections.ArangoCollection)[] | string | collections.ArangoCollection; + /** + * An array of collections or a single collection that will be read from + * during the transaction. + */ + read?: (string | collections.ArangoCollection)[] | string | collections.ArangoCollection; +}; + +/** + * @internal + */ +export function coerceTransactionCollections( + options: + | (TransactionCollectionOptions & { allowImplicit?: boolean }) + | (string | collections.ArangoCollection)[] + | string + | collections.ArangoCollection +): CoercedTransactionCollections { + if (typeof options === "string") { + return { write: [options] }; + } + if (Array.isArray(options)) { + return { write: options.map(collections.collectionToString) }; + } + if (collections.isArangoCollection(options)) { + return { write: collections.collectionToString(options) }; + } + const opts: CoercedTransactionCollections = {}; + if (options) { + if (options.allowImplicit !== undefined) { + opts.allowImplicit = options.allowImplicit; + } + if (options.read) { + opts.read = Array.isArray(options.read) + ? options.read.map(collections.collectionToString) + : collections.collectionToString(options.read); + } + if (options.write) { + opts.write = Array.isArray(options.write) + ? options.write.map(collections.collectionToString) + : collections.collectionToString(options.write); + } + if (options.exclusive) { + opts.exclusive = Array.isArray(options.exclusive) + ? options.exclusive.map(collections.collectionToString) + : collections.collectionToString(options.exclusive); + } + } + return opts; } +/** + * @internal + */ +type CoercedTransactionCollections = { + allowImplicit?: boolean; + exclusive?: string | string[]; + write?: string | string[]; + read?: string | string[]; +}; + +/** + * Options for how the transaction should be performed. + */ +export type TransactionOptions = { + /** + * Whether the transaction may read from collections not specified for this + * transaction. If set to `false`, accessing any collections not specified + * will result in the transaction being aborted to avoid potential deadlocks. + * + * Default: `true`. + */ + allowImplicit?: boolean; + /** + * If set to `true`, the request will explicitly permit ArangoDB to return a + * potentially dirty or stale result and arangojs will load balance the + * request without distinguishing between leaders and followers. + */ + allowDirtyRead?: boolean; + /** + * Determines whether to force the transaction to write all data to disk + * before returning. + */ + waitForSync?: boolean; + /** + * Determines how long the database will wait while attempting to gain locks + * on collections used by the transaction before timing out. + */ + lockTimeout?: number; + /** + * Determines the transaction size limit in bytes. + */ + maxTransactionSize?: number; + /** + * If set to `true`, the fast lock round will be skipped, which makes each + * locking operation take longer but guarantees deterministic locking order + * and may avoid deadlocks when many concurrent transactions are queued and + * try to access the same collection with an exclusive lock. + */ + skipFastLockRound?: boolean; +}; + /** * Options for how the transaction should be committed. */ @@ -47,13 +154,31 @@ export type TransactionAbortOptions = { */ allowDirtyRead?: boolean; }; +//#endregion + +//#region Transaction operation results +/** + * Details for a transaction. + * + * See also {@link TransactionInfo}. + */ +export type TransactionDetails = { + /** + * Unique identifier of the transaction. + */ + id: string; + /** + * Status (or "state") of the transaction. + */ + state: "running" | "committed" | "aborted"; +}; /** * Status of a given transaction. * - * See also {@link database.TransactionDetails}. + * See also {@link TransactionDetails}. */ -export type TransactionStatus = { +export type TransactionInfo = { /** * Unique identifier of the transaction. */ @@ -63,18 +188,30 @@ export type TransactionStatus = { */ status: "running" | "committed" | "aborted"; }; +//#endregion +//#region Transaction class +/** + * Indicates whether the given value represents a {@link Transaction}. + * + * @param transaction - A value that might be a transaction. + */ +export function isArangoTransaction( + transaction: any +): transaction is Transaction { + return Boolean(transaction && transaction.isArangoTransaction); +} /** - * Represents a streaming transaction in a {@link database.Database}. + * Represents a streaming transaction in a {@link databases.Database}. */ export class Transaction { - protected _db: Database; + protected _db: databases.Database; protected _id: string; /** * @internal */ - constructor(db: Database, id: string) { + constructor(db: databases.Database, id: string) { this._db = db; this._id = id; } @@ -98,7 +235,7 @@ export class Transaction { /** * Unique identifier of this transaction. * - * See {@link database.Database#transaction}. + * See {@link databases.Database#transaction}. */ get id() { return this._id; @@ -120,7 +257,7 @@ export class Transaction { await this.get(); return true; } catch (err: any) { - if (isArangoError(err) && err.errorNum === TRANSACTION_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === TRANSACTION_NOT_FOUND) { return false; } throw err; @@ -140,7 +277,7 @@ export class Transaction { * // the transaction exists * ``` */ - get(): Promise { + get(): Promise { return this._db.request( { path: `/_api/transaction/${encodeURIComponent(this.id)}`, @@ -164,7 +301,7 @@ export class Transaction { * // result indicates the updated transaction status * ``` */ - commit(options: TransactionCommitOptions = {}): Promise { + commit(options: TransactionCommitOptions = {}): Promise { const { allowDirtyRead = undefined } = options; return this._db.request( { @@ -191,7 +328,7 @@ export class Transaction { * // result indicates the updated transaction status * ``` */ - abort(options: TransactionAbortOptions = {}): Promise { + abort(options: TransactionAbortOptions = {}): Promise { const { allowDirtyRead = undefined } = options; return this._db.request( { @@ -391,7 +528,7 @@ export class Transaction { * ``` */ step(callback: () => Promise): Promise { - const conn = (this._db as any)._connection as Connection; + const conn = (this._db as any)._connection as connection.Connection; conn.setTransactionId(this.id); try { const promise = callback(); @@ -406,3 +543,4 @@ export class Transaction { } } } +//#endregion \ No newline at end of file diff --git a/src/users.ts b/src/users.ts new file mode 100644 index 000000000..a59968579 --- /dev/null +++ b/src/users.ts @@ -0,0 +1,106 @@ +/** + * ```ts + * import type { ArangoUser } from "arangojs/users"; + * ``` + * + * The "users" module provides types for ArangoDB users. + * + * @packageDocumentation + */ +import * as collections from "./collections.js"; +import * as databases from "./databases.js"; + +//#region Shared types +/** + * Access level for an ArangoDB user's access to a collection or database. + */ +export type AccessLevel = "rw" | "ro" | "none"; +//#endregion + +//#region User operation options +/** + * Options for modifying an ArangoDB user. + */ +export type UserOptions = { + /** + * Password the ArangoDB user will use for authentication. + */ + passwd: string; + /** + * Whether the ArangoDB user account is enabled and can authenticate. + * + * Default: `true` + */ + active?: boolean; + /** + * Additional information to store about this user. + * + * Default: `{}` + */ + extra?: Record; +}; + +/** + * Options for accessing or manipulating access levels. + */ +export type UserAccessLevelOptions = { + /** + * The database to access or manipulate the access level of. + * + * If `collection` is an `ArangoCollection`, this option defaults to the + * database the collection is contained in. Otherwise this option defaults to + * the current database. + */ + database?: databases.Database | string; + /** + * The collection to access or manipulate the access level of. + */ + collection?: collections.ArangoCollection | string; +}; + +/** + * Database user to create with a database. + */ +export type CreateDatabaseUserOptions = { + /** + * Username of the user to create. + */ + username: string; + /** + * Password of the user to create. + * + * Default: `""` + */ + passwd?: string; + /** + * Whether the user is active. + * + * Default: `true` + */ + active?: boolean; + /** + * Additional data to store with the user object. + */ + extra?: Record; +}; +//#endregion + +//#region User operation results +/** + * Properties of an ArangoDB user object. + */ +export type ArangoUser = { + /** + * ArangoDB username of the user. + */ + user: string; + /** + * Whether the ArangoDB user account is enabled and can authenticate. + */ + active: boolean; + /** + * Additional information to store about this user. + */ + extra: Record; +}; +//#endregion \ No newline at end of file diff --git a/src/view.ts b/src/views.ts similarity index 80% rename from src/view.ts rename to src/views.ts index 05c6bfaaf..38a23554d 100644 --- a/src/view.ts +++ b/src/views.ts @@ -1,30 +1,28 @@ /** * ```ts - * import type { ArangoSearchView } from "arangojs/view.js"; + * import type { ArangoSearchView } from "arangojs/views"; * ``` * - * The "view" module provides View related types and interfaces for TypeScript. + * The "views" module provides View related types and interfaces for + * TypeScript. * * @packageDocumentation */ -import { ArangoApiResponse } from "./connection.js"; -import { Database } from "./database.js"; -import { isArangoError } from "./error.js"; +import * as connections from "./connection.js"; +import * as databases from "./databases.js"; +import * as errors from "./errors.js"; import { VIEW_NOT_FOUND } from "./lib/codes.js"; +//#region Shared types /** - * Indicates whether the given value represents a {@link View}. - * - * @param view - A value that might be a View. + * Sorting direction. Descending or ascending. */ -export function isArangoView(view: any): view is View { - return Boolean(view && view.isArangoView); -} +export type Direction = "desc" | "asc"; /** - * Sorting direction. Descending or ascending. + * Compression for storing data. */ -export type Direction = "desc" | "asc"; +export type Compression = "lz4" | "none"; /** * Policy to consolidate based on segment byte size and live document count as @@ -89,10 +87,12 @@ export type TierConsolidationPolicy = { }; /** - * Compression for storing data. + * Type of a View. */ -export type Compression = "lz4" | "none"; +export type ViewType = ViewDescription["type"]; +//#endregion +//#region CreateViewOptions /** * Options for creating a View. */ @@ -100,152 +100,19 @@ export type CreateViewOptions = | CreateArangoSearchViewOptions | CreateSearchAliasViewOptions; -/** - * Options for replacing a View's properties. - */ -export type ViewPropertiesOptions = - | ArangoSearchViewPropertiesOptions - | SearchAliasViewPropertiesOptions; - -/** - * Options for partially modifying a View's properties. - */ -export type ViewPatchPropertiesOptions = - | ArangoSearchViewPropertiesOptions - | SearchAliasViewPatchPropertiesOptions; - -/** - * A link definition for an ArangoSearch View. - */ -export type ArangoSearchViewLinkOptions = { +type CreateViewOptionsType = { /** - * A list of names of Analyzers to apply to values of processed document - * attributes. - * - * Default: `["identity"]` - */ - analyzers?: string[]; - /** - * An object mapping names of attributes to process for each document to - * {@link ArangoSearchViewLinkOptions} definitions. - */ - fields?: Record; - /** - * If set to `true`, all document attributes will be processed, otherwise - * only the attributes in `fields` will be processed. - * - * Default: `false` - */ - includeAllFields?: boolean; - /** - * (Enterprise Edition only.) An object mapping attribute names to - * {@link ArangoSearchViewLinkOptions} definitions to index sub-objects - * stored in an array. - */ - nested?: Record; - /** - * If set to `true`, the position of values in array values will be tracked, - * otherwise all values in an array will be treated as equal alternatives. - */ - trackListPositions?: boolean; - /** - * Controls how the view should keep track of the attribute values. - * - * Default: `"none"` - */ - storeValues?: "none" | "id"; - /** - * If set to `true`, then no exclusive lock is used on the source collection - * during View index creation, so that it remains basically available. - * - * Default: `false` - */ - inBackground?: boolean; - /** - * (Enterprise Edition only.) If set to `true`, then field normalization - * values will always be cached in memory. - * - * Default: `false` - */ - cache?: boolean; -}; - -/** - * Options for modifying the properties of an ArangoSearch View. - */ -export type ArangoSearchViewPropertiesOptions = { - /** - * How many commits to wait between removing unused files. - * - * Default: `2` - */ - cleanupIntervalStep?: number; - /** - * How long to wait between applying the `consolidationPolicy`. - * - * Default: `10000` - */ - consolidationIntervalMsec?: number; - /** - * How long to wait between commiting View data store changes and making - * documents visible to queries. - * - * Default: `1000` - */ - commitIntervalMsec?: number; - /** - * Consolidation policy to apply for selecting which segments should be - * merged. - * - * Default: `{ type: "tier" }` - */ - consolidationPolicy?: TierConsolidationPolicy; - /** - * An object mapping names of linked collections to - * {@link ArangoSearchViewLinkOptions} definitions. - */ - links?: Record>; -}; - -/** - * Options for partially modifying the properties of an ArangoSearch View. - */ -export type ArangoSearchViewPatchPropertiesOptions = - ArangoSearchViewPropertiesOptions; - -/** - * Options for creating a stored value in an ArangoSearch View. - */ -export interface ArangoSearchViewStoredValueOptions { - /** - * Attribute paths for which values should be stored in the view index - * in addition to those used for sorting via `primarySort`. - */ - fields: string[]; - /** - * How the attribute values should be compressed. - * - * Default: `"lz4"` - */ - compression?: Compression; - /** - * (Enterprise Edition only.) If set to `true`, then stored values will - * always be cached in memory. - * - * Default: `false` + * Type of the View. */ - cache?: boolean; -} + type: Type; +} & extra; /** * Options for creating an ArangoSearch View. */ -export type CreateArangoSearchViewOptions = +export type CreateArangoSearchViewOptions = CreateViewOptionsType< + "arangosearch", ArangoSearchViewPropertiesOptions & { - /** - * Type of the View. - */ - type: "arangosearch"; /** * Maximum number of writers cached in the pool. * @@ -274,32 +141,7 @@ export type CreateArangoSearchViewOptions = * If `direction` is set to `"desc"` or `asc` is set to `false`, * the primary sorting order will be descending. */ - primarySort?: ( - | { - /** - * Attribute path for the value of each document to use for - * sorting. - */ - field: string; - /** - * If set to `"asc"`, the primary sorting order will be ascending. - * If set to `"desc"`, the primary sorting order will be descending. - */ - direction: Direction; - } - | { - /** - * Attribute path for the value of each document to use for - * sorting. - */ - field: string; - /** - * If set to `true`, the primary sorting order will be ascending. - * If set to `false`, the primary sorting order will be descending. - */ - asc: boolean; - } - )[]; + primarySort?: CreateArangoSearchViewPrimarySortOptions[]; /** * Compression to use for the primary sort data. * @@ -324,41 +166,105 @@ export type CreateArangoSearchViewOptions = * Attribute paths for which values should be stored in the view index * in addition to those used for sorting via `primarySort`. */ - storedValues?: ArangoSearchViewStoredValueOptions[] | string[] | string[][]; + storedValues?: CreateArangoSearchViewStoredValueOptions[] | string[] | string[][]; /** * An array of strings defining sort expressions to optimize. */ optimizeTopK?: string[]; - }; + } +>; /** - * Options defining an index used in a SearchAlias View. + * Options for creating a primary sort in an ArangoSearch View. */ -export type SearchAliasViewIndexOptions = { +export type CreateArangoSearchViewPrimarySortOptions = ( + | { + /** + * Attribute path for the value of each document to use for + * sorting. + */ + field: string; + /** + * If set to `"asc"`, the primary sorting order will be ascending. + * If set to `"desc"`, the primary sorting order will be descending. + */ + direction: Direction; + } + | { + /** + * Attribute path for the value of each document to use for + * sorting. + */ + field: string; + /** + * If set to `true`, the primary sorting order will be ascending. + * If set to `false`, the primary sorting order will be descending. + */ + asc: boolean; + } +); + +/** + * Options for creating a stored value in an ArangoSearch View. + */ +export interface CreateArangoSearchViewStoredValueOptions { /** - * Name of a collection. + * Attribute paths for which values should be stored in the view index + * in addition to those used for sorting via `primarySort`. */ - collection: string; + fields: string[]; /** - * Name of an inverted index in the collection. + * How the attribute values should be compressed. + * + * Default: `"lz4"` */ - index: string; -}; + compression?: Compression; + /** + * (Enterprise Edition only.) If set to `true`, then stored values will + * always be cached in memory. + * + * Default: `false` + */ + cache?: boolean; +} /** - * Options for modifying the properties of a SearchAlias View. + * Options for creating a SearchAlias View. */ -export type SearchAliasViewPropertiesOptions = { +export type CreateSearchAliasViewOptions = CreateViewOptionsType< + "search-alias", + SearchAliasViewPropertiesOptions +>; +//#endregion + +//#region UpdateViewPropertiesOptions +/** + * Options for partially modifying a View's properties. + */ +export type UpdateViewPropertiesOptions = + | UpdateArangoSearchViewPropertiesOptions + | UpdateSearchAliasViewPropertiesOptions; + +/** + * Options for partially modifying the properties of an ArangoSearch View. + */ +export type UpdateArangoSearchViewPropertiesOptions = + ArangoSearchViewPropertiesOptions; + +/** + * Options for partially modifying the properties of a SearchAlias View. + */ +export type UpdateSearchAliasViewPropertiesOptions = { /** * An array of inverted indexes to add to the View. */ - indexes: SearchAliasViewIndexOptions[]; + indexes: UpdateSearchAliasViewIndexOptions[]; }; /** * Options defining an index to be modified in a SearchAlias View. */ -export type SearchAliasViewPatchIndexOptions = SearchAliasViewIndexOptions & { +export type UpdateSearchAliasViewIndexOptions = SearchAliasViewIndexOptions & { /** * Whether to add or remove the index. * @@ -367,30 +273,147 @@ export type SearchAliasViewPatchIndexOptions = SearchAliasViewIndexOptions & { operation?: "add" | "del"; }; +//#endregion + +//#region ViewPropertiesOptions /** - * Options for partially modifying the properties of a SearchAlias View. + * Options for replacing a View's properties. */ -export type SearchAliasViewPatchPropertiesOptions = { +export type ViewPropertiesOptions = + | ArangoSearchViewPropertiesOptions + | SearchAliasViewPropertiesOptions; + +/** + * Options for modifying the properties of an ArangoSearch View. + */ +export type ArangoSearchViewPropertiesOptions = { + /** + * How many commits to wait between removing unused files. + * + * Default: `2` + */ + cleanupIntervalStep?: number; + /** + * How long to wait between applying the `consolidationPolicy`. + * + * Default: `10000` + */ + consolidationIntervalMsec?: number; + /** + * How long to wait between commiting View data store changes and making + * documents visible to queries. + * + * Default: `1000` + */ + commitIntervalMsec?: number; + /** + * Consolidation policy to apply for selecting which segments should be + * merged. + * + * Default: `{ type: "tier" }` + */ + consolidationPolicy?: TierConsolidationPolicy; + /** + * An object mapping names of linked collections to + * {@link ArangoSearchViewLinkOptions} definitions. + */ + links?: Record>; +}; + +/** + * A link definition for an ArangoSearch View. + */ +export type ArangoSearchViewLinkOptions = { + /** + * A list of names of Analyzers to apply to values of processed document + * attributes. + * + * Default: `["identity"]` + */ + analyzers?: string[]; + /** + * An object mapping names of attributes to process for each document to + * {@link ArangoSearchViewLinkOptions} definitions. + */ + fields?: Record; + /** + * If set to `true`, all document attributes will be processed, otherwise + * only the attributes in `fields` will be processed. + * + * Default: `false` + */ + includeAllFields?: boolean; + /** + * (Enterprise Edition only.) An object mapping attribute names to + * {@link ArangoSearchViewLinkOptions} definitions to index sub-objects + * stored in an array. + */ + nested?: Record; + /** + * If set to `true`, the position of values in array values will be tracked, + * otherwise all values in an array will be treated as equal alternatives. + */ + trackListPositions?: boolean; + /** + * Controls how the view should keep track of the attribute values. + * + * Default: `"none"` + */ + storeValues?: "none" | "id"; + /** + * If set to `true`, then no exclusive lock is used on the source collection + * during View index creation, so that it remains basically available. + * + * Default: `false` + */ + inBackground?: boolean; + /** + * (Enterprise Edition only.) If set to `true`, then field normalization + * values will always be cached in memory. + * + * Default: `false` + */ + cache?: boolean; +}; + +/** + * Options for modifying the properties of a SearchAlias View. + */ +export type SearchAliasViewPropertiesOptions = { /** * An array of inverted indexes to add to the View. */ - indexes: SearchAliasViewPatchIndexOptions[]; + indexes: SearchAliasViewIndexOptions[]; }; /** - * Options for creating a SearchAlias View. + * Options defining an index used in a SearchAlias View. */ -export type CreateSearchAliasViewOptions = SearchAliasViewPropertiesOptions & { +export type SearchAliasViewIndexOptions = { /** - * Type of the View. + * Name of a collection. */ - type: "search-alias"; + collection: string; + /** + * Name of an inverted index in the collection. + */ + index: string; }; +//#endregion + +//#region ViewDescription +export type ViewDescription = + | ArangoSearchViewDescription + | SearchAliasViewDescription; /** * Generic description of a View. */ -export type GenericViewDescription = { +export type ViewDescriptionType = { + /** + * Type of the View. + */ + type: Type; /** * A globally unique identifier for this View. */ @@ -405,35 +428,16 @@ export type GenericViewDescription = { name: string; }; -export type ViewDescription = - | ArangoSearchViewDescription - | SearchAliasViewDescription; - -export type ArangoSearchViewDescription = GenericViewDescription & { - type: "arangosearch"; -}; +export type ArangoSearchViewDescription = ViewDescriptionType<"arangosearch">; -export type SearchAliasViewDescription = GenericViewDescription & { - type: "search-alias"; -}; +export type SearchAliasViewDescription = ViewDescriptionType<"search-alias">; +//#endregion +//#region ViewProperties export type ViewProperties = | ArangoSearchViewProperties | SearchAliasViewProperties; -/** - * A link definition for an ArangoSearch View. - */ -export type ArangoSearchViewLink = { - analyzers: string[]; - fields: Record; - includeAllFields: boolean; - nested?: Record; - trackListPositions: boolean; - storeValues: "none" | "id"; - cache: boolean; -}; - /** * Properties of an ArangoSearch View. */ @@ -461,24 +465,54 @@ export type ArangoSearchViewProperties = ArangoSearchViewDescription & { optimizeTopK: string[]; }; +/** + * A link definition for an ArangoSearch View. + */ +export type ArangoSearchViewLink = { + analyzers: string[]; + fields: Record; + includeAllFields: boolean; + nested?: Record; + trackListPositions: boolean; + storeValues: "none" | "id"; + cache: boolean; +}; + /** * Properties of a SearchAlias View. */ export type SearchAliasViewProperties = SearchAliasViewDescription & { indexes: { collection: string; index: string }[]; }; +//#endregion + +//#region View class +/** + * Indicates whether the given value represents a {@link View}. + * + * @param view - A value that might be a View. + */ +export function isArangoView(view: any): view is View { + return Boolean(view && view.isArangoView); +} + +// Note: Resist the urge to attempt to create separate ArangoSearchView +// and SearchAliasView classes or interfaces. The requirements for producing +// a meaningful typedoc documentation, providing a nice API in the `Database` +// class and having these as separate interfaces seem to be mutually +// incompatible. /** - * Represents a View in a {@link database.Database}. + * Represents a View in a {@link databases.Database}. */ export class View { protected _name: string; - protected _db: Database; + protected _db: databases.Database; /** * @internal */ - constructor(db: Database, name: string) { + constructor(db: databases.Database, name: string) { this._db = db; this._name = name; } @@ -517,7 +551,7 @@ export class View { * // data contains general information about the View * ``` */ - get(): Promise> { + get(): Promise> { return this._db.request({ path: `/_api/view/${encodeURIComponent(this._name)}`, }); @@ -539,7 +573,7 @@ export class View { await this.get(); return true; } catch (err: any) { - if (isArangoError(err) && err.errorNum === VIEW_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === VIEW_NOT_FOUND) { return false; } throw err; @@ -549,7 +583,7 @@ export class View { /** * Creates a View with the given `options` and the instance's name. * - * See also {@link database.Database#createView}. + * See also {@link databases.Database#createView}. * * @example * ```js @@ -581,8 +615,8 @@ export class View { /** * Renames the View and updates the instance's `name` to `newName`. * - * Additionally removes the instance from the {@link database.Database}'s internal - * cache. + * Additionally removes the instance from the {@link databases.Database}'s + * internal cache. * * **Note**: Renaming Views may not be supported when ArangoDB is * running in a cluster configuration. @@ -600,7 +634,7 @@ export class View { * // view1 and view3 represent the same ArangoDB view! * ``` */ - async rename(newName: string): Promise> { + async rename(newName: string): Promise> { const result = this._db.renameView(this._name, newName); this._name = newName; return result; @@ -617,7 +651,7 @@ export class View { * // data contains the View's properties * ``` */ - properties(): Promise> { + properties(): Promise> { return this._db.request({ path: `/_api/view/${encodeURIComponent(this._name)}/properties`, }); @@ -638,12 +672,12 @@ export class View { * console.log(result.consolidationIntervalMsec); // 234 * ``` */ - updateProperties( + updateProperties( properties?: Properties ): Promise< - Properties extends ArangoSearchViewPatchPropertiesOptions + Properties extends UpdateArangoSearchViewPropertiesOptions ? ArangoSearchViewProperties - : Properties extends SearchAliasViewPatchPropertiesOptions + : Properties extends UpdateSearchAliasViewPropertiesOptions ? SearchAliasViewProperties : ViewProperties > { @@ -707,3 +741,4 @@ export class View { ); } } +//#endregion \ No newline at end of file From e1cd08ff3cba4c7a870c89dffa796a0a784fd50f Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Thu, 28 Nov 2024 14:16:29 +0100 Subject: [PATCH 04/21] 10.0.0-alpha.0 --- CHANGELOG.md | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 91cb1a0f3..08dc44613 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,7 +14,7 @@ This driver uses semantic versioning: - A change in the major version (e.g. 1.Y.Z -> 2.0.0) indicates _breaking_ changes that require changes in your code to upgrade. -## [Unreleased] +## [10.0.0-alpha.0] - 2024-11-28 This is a major release and breaks backwards compatibility. @@ -2288,7 +2288,7 @@ For a detailed list of changes between pre-release versions of v7 see the Graph methods now only return the relevant part of the response body. -[unreleased]: https://github.com/arangodb/arangojs/compare/v9.2.0...HEAD +[10.0.0-alpha.0]: https://github.com/arangodb/arangojs/compare/v9.2.0...v10.0.0-alpha.0 [9.2.0]: https://github.com/arangodb/arangojs/compare/v9.1.0...v9.2.0 [9.1.0]: https://github.com/arangodb/arangojs/compare/v9.0.0...v9.1.0 [9.0.0]: https://github.com/arangodb/arangojs/compare/v8.8.1...v9.0.0 diff --git a/package.json b/package.json index 89e98454e..91c62b3a7 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "private": true, "type": "module", "name": "arangojs", - "version": "9.2.0", + "version": "10.0.0-alpha.0", "engines": { "node": ">=18" }, From 62d17e924e075f7a337d88d8c91bbd6f8d6c6e69 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Thu, 28 Nov 2024 15:04:19 +0100 Subject: [PATCH 05/21] More renaming --- CHANGELOG.md | 7 +++++++ src/collections.ts | 12 ++++++------ 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 08dc44613..76f33ea0b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,13 @@ This driver uses semantic versioning: - A change in the major version (e.g. 1.Y.Z -> 2.0.0) indicates _breaking_ changes that require changes in your code to upgrade. +## [Unreleased] + +### Changed + +- Renamed `CollectionDropOptions` type to `DropCollectionOptions` +- Renamed `CollectionTruncateOptions` type to `TruncateCollectionOptions` + ## [10.0.0-alpha.0] - 2024-11-28 This is a major release and breaks backwards compatibility. diff --git a/src/collections.ts b/src/collections.ts index 942c064ee..557bdcaaf 100644 --- a/src/collections.ts +++ b/src/collections.ts @@ -339,7 +339,7 @@ export type CollectionChecksumOptions = { /** * Options for truncating collections. */ -export type CollectionTruncateOptions = { +export type TruncateCollectionOptions = { /** * Whether the collection should be compacted after truncation. */ @@ -354,7 +354,7 @@ export type CollectionTruncateOptions = { /** * Options for dropping collections. */ -export type CollectionDropOptions = { +export type DropCollectionOptions = { /** * Whether the collection is a system collection. If the collection is a * system collection, this option must be set to `true` or ArangoDB will @@ -819,7 +819,7 @@ export interface DocumentCollection< * // the collection "some-collection" is now empty * ``` */ - truncate(options?: CollectionTruncateOptions): Promise>; + truncate(options?: TruncateCollectionOptions): Promise>; /** * Deletes the collection from the database. * @@ -834,7 +834,7 @@ export interface DocumentCollection< * ``` */ drop( - options?: CollectionDropOptions + options?: DropCollectionOptions ): Promise>; /** * Triggers compaction for a collection. @@ -2415,7 +2415,7 @@ export class Collection< return result; } - truncate(options?: CollectionTruncateOptions): Promise> { + truncate(options?: TruncateCollectionOptions): Promise> { return this._db.request({ method: "PUT", path: `/_api/collection/${this._name}/truncate`, @@ -2423,7 +2423,7 @@ export class Collection< }); } - drop(options?: CollectionDropOptions) { + drop(options?: DropCollectionOptions) { return this._db.request({ method: "DELETE", path: `/_api/collection/${encodeURIComponent(this._name)}`, From a2a07ca9e5e1eb2b1392c0294cd861cb80f964d0 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Thu, 28 Nov 2024 15:04:37 +0100 Subject: [PATCH 06/21] Change error constructor signatures --- CHANGELOG.md | 4 ++++ src/errors.ts | 35 +++++++++++++++++++++++------------ src/lib/request.ts | 8 ++++---- 3 files changed, 31 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 76f33ea0b..fcd32716a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,10 @@ This driver uses semantic versioning: - Renamed `CollectionDropOptions` type to `DropCollectionOptions` - Renamed `CollectionTruncateOptions` type to `TruncateCollectionOptions` +- Changed error type constructor signatures + + The `request` property is now always positional and the `options` property + is always optional. ## [10.0.0-alpha.0] - 2024-11-28 diff --git a/src/errors.ts b/src/errors.ts index ffb2107d3..53f7d71da 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -175,7 +175,7 @@ interface SystemError extends Error { export class PropagationTimeoutError extends Error { name = "PropagationTimeoutError"; - constructor(message: string | undefined, options: { cause: Error }) { + constructor(message?: string, options: { cause?: Error } = {}) { super(message ?? 'Timed out while waiting for propagation', options); } } @@ -196,8 +196,8 @@ export class NetworkError extends Error { */ request: globalThis.Request; - constructor(message: string, options: { request: globalThis.Request, cause?: Error, isSafeToRetry?: boolean | null }) { - const { request, isSafeToRetry = null, ...opts } = options; + constructor(message: string, request: globalThis.Request, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { + const { isSafeToRetry = null, ...opts } = options; super(message, opts); this.request = request; this.isSafeToRetry = isSafeToRetry; @@ -219,8 +219,8 @@ export class NetworkError extends Error { export class ResponseTimeoutError extends NetworkError { name = "ResponseTimeoutError"; - constructor(message: string | undefined, options: { request: globalThis.Request, cause?: Error, isSafeToRetry?: boolean | null }) { - super(message ?? 'Timed out while waiting for server response', options); + constructor(message: string | undefined, request: globalThis.Request, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { + super(message ?? 'Timed out while waiting for server response', request, options); } } @@ -230,8 +230,8 @@ export class ResponseTimeoutError extends NetworkError { export class RequestAbortedError extends NetworkError { name = "RequestAbortedError"; - constructor(message: string | undefined, options: { request: globalThis.Request, cause?: Error, isSafeToRetry?: boolean | null }) { - super(message ?? 'Request aborted', options); + constructor(message: string | undefined, request: globalThis.Request, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { + super(message ?? 'Request aborted', request, options); } } @@ -243,9 +243,9 @@ export class RequestAbortedError extends NetworkError { export class FetchFailedError extends NetworkError { name = "FetchFailedError"; - constructor(message: string | undefined, options: { request: globalThis.Request, cause: TypeError, isSafeToRetry?: boolean | null }) { + constructor(message: string | undefined, request: globalThis.Request, options: { cause?: TypeError, isSafeToRetry?: boolean | null } = {}) { let isSafeToRetry = options.isSafeToRetry; - if (options.cause.cause instanceof Error) { + if (options.cause?.cause instanceof Error) { if (isSafeToRetry === undefined) { isSafeToRetry = isSafeToRetryFailedFetch(options.cause.cause) || undefined; } @@ -253,7 +253,7 @@ export class FetchFailedError extends NetworkError { message = `Fetch failed: ${options.cause.cause.message}`; } } - super(message ?? 'Fetch failed', { ...options, isSafeToRetry }); + super(message ?? 'Fetch failed', request, { ...options, isSafeToRetry }); } } @@ -278,7 +278,7 @@ export class HttpError extends NetworkError { */ constructor(response: connection.ProcessedResponse, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { const message = messages[response.status] ?? messages[500]; - super(message, { ...options, request: response.request }); + super(message, response.request, options); this.response = response; this.code = response.status; } @@ -305,6 +305,13 @@ export class ArangoError extends Error { */ isSafeToRetry: boolean | null = null; + /** + * @internal + */ + get error(): true { + return true; + } + /** * ArangoDB error code. * @@ -338,7 +345,7 @@ export class ArangoError extends Error { /** * Creates a new `ArangoError` from an ArangoDB error response. */ - constructor(data: ArangoErrorResponse, options: { cause?: Error, isSafeToRetry?: boolean | null }) { + constructor(data: ArangoErrorResponse, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { const { isSafeToRetry, ...opts } = options; super(data.errorMessage, opts); this.errorNum = data.errorNum; @@ -391,4 +398,8 @@ export class ArangoError extends Error { code: this.code, }; } + + toString() { + return `${this.name} ${this.errorNum}: ${this.message}`; + } } \ No newline at end of file diff --git a/src/lib/request.ts b/src/lib/request.ts index ce31e7d48..e8993acd3 100644 --- a/src/lib/request.ts +++ b/src/lib/request.ts @@ -128,14 +128,14 @@ export function createRequest( if (signal.aborted) { const reason = typeof signal.reason == 'string' ? signal.reason : undefined; if (reason === REASON_TIMEOUT) { - error = new ResponseTimeoutError(undefined, { request }); + error = new ResponseTimeoutError(undefined, request, { cause }); } else { - error = new RequestAbortedError(reason, { request, cause }); + error = new RequestAbortedError(reason, request, { cause }); } } else if (cause instanceof TypeError) { - error = new FetchFailedError(undefined, { request, cause }); + error = new FetchFailedError(undefined, request, { cause }); } else { - error = new NetworkError(cause.message, { request, cause }); + error = new NetworkError(cause.message, request, { cause }); } if (config.afterResponse) { const p = config.afterResponse(error); From be8cc34f970ec0f45c0acdc038a663239e04ed97 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Thu, 28 Nov 2024 17:58:37 +0100 Subject: [PATCH 07/21] More cleanup --- CHANGELOG.md | 23 + src/config.ts | 257 ++++++++++ src/connection.ts | 510 +++++++++----------- src/cursors.ts | 2 +- src/errors.ts | 22 +- src/lib/joinPath.ts | 19 - src/lib/mergeHeaders.ts | 20 - src/lib/normalizeUrl.ts | 17 - src/lib/request.ts | 160 ------ src/lib/util.ts | 94 ++++ src/lib/{linkedList.ts => x3-linkedlist.ts} | 0 src/routes.ts | 2 +- src/test/08-cursors.ts | 2 +- src/test/_config.ts | 7 +- 14 files changed, 604 insertions(+), 531 deletions(-) create mode 100644 src/config.ts delete mode 100644 src/lib/joinPath.ts delete mode 100644 src/lib/mergeHeaders.ts delete mode 100644 src/lib/normalizeUrl.ts delete mode 100644 src/lib/request.ts create mode 100644 src/lib/util.ts rename src/lib/{linkedList.ts => x3-linkedlist.ts} (100%) diff --git a/CHANGELOG.md b/CHANGELOG.md index fcd32716a..c374dbeac 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,12 +19,35 @@ This driver uses semantic versioning: ### Changed - Renamed `CollectionDropOptions` type to `DropCollectionOptions` + - Renamed `CollectionTruncateOptions` type to `TruncateCollectionOptions` + - Changed error type constructor signatures The `request` property is now always positional and the `options` property is always optional. +- Moved configuration related types to new `config` module + + The following types were moved: `Config`, `LoadBalancingStrategy`, + `BasicAuthCredentials` and `BearerAuthCredentials`. + +- Moved `ArangoErrorResponse` type to `connection` module + + The type is now also no longer marked as internal. + +- Moved internal utility functions to new `lib/util` module + + These methods are all still marked as internal and should not be used + directly. + +- Closing a connection now closes all open requests + + Previously in certain situations only the most recent request would be + closed per server. Note that this still merely aborts the requests but + does not guarantee the underlying connections are closed as these are + handled by Node.js or the browser natively. + ## [10.0.0-alpha.0] - 2024-11-28 This is a major release and breaks backwards compatibility. diff --git a/src/config.ts b/src/config.ts new file mode 100644 index 000000000..c537cb9b1 --- /dev/null +++ b/src/config.ts @@ -0,0 +1,257 @@ +/** + * ```ts + * import type { Config } from "arangojs/config"; + * ``` + * + * The "config" module provides configuration related types for TypeScript. + * + * @packageDocumentation + */ +import * as errors from "./errors.js"; + +//#region Shared types +/** + * Determines the behavior when multiple URLs are used: + * + * - `"NONE"`: No load balancing. All requests will be handled by the first + * URL in the list until a network error is encountered. On network error, + * arangojs will advance to using the next URL in the list. + * + * - `"ONE_RANDOM"`: Randomly picks one URL from the list initially, then + * behaves like `"NONE"`. + * + * - `"ROUND_ROBIN"`: Every sequential request uses the next URL in the list. + */ +export type LoadBalancingStrategy = "NONE" | "ROUND_ROBIN" | "ONE_RANDOM"; +//#endregion + +//#region Credentials +/** + * Credentials for HTTP Basic authentication. + */ +export type BasicAuthCredentials = { + /** + * Username to use for authentication, e.g. `"root"`. + */ + username: string; + /** + * Password to use for authentication. Defaults to an empty string. + */ + password?: string; +}; + +/** + * Credentials for HTTP Bearer token authentication. + */ +export type BearerAuthCredentials = { + /** + * Bearer token to use for authentication. + */ + token: string; +}; + +/** + * Determines if the given credentials are for Bearer token authentication. + */ +export function isBearerAuth(auth: BasicAuthCredentials | BearerAuthCredentials): auth is BearerAuthCredentials { + return auth.hasOwnProperty("token"); +} +//#endregion + +//#region Config +/** + * Options for configuring arangojs. + */ +export type Config = { + /** + * Name of the database to use. + * + * Default: `"_system"` + */ + databaseName?: string; + /** + * Base URL of the ArangoDB server or list of server URLs. + * + * When working with a cluster, the method {@link databases.Database#acquireHostList} + * can be used to automatically pick up additional coordinators/followers at + * any point. + * + * When running ArangoDB on a unix socket, e.g. `/tmp/arangodb.sock`, the + * following URL formats are supported for unix sockets: + * + * - `unix:///tmp/arangodb.sock` (no SSL) + * - `http+unix:///tmp/arangodb.sock` (or `https+unix://` for SSL) + * - `http://unix:/tmp/arangodb.sock` (or `https://unix:` for SSL) + * + * Additionally `ssl` and `tls` are treated as synonymous with `https` and + * `tcp` is treated as synonymous with `http`, so the following URLs are + * considered identical: + * + * - `tcp://127.0.0.1:8529` and `http://127.0.0.1:8529` + * - `ssl://127.0.0.1:8529` and `https://127.0.0.1:8529` + * - `tcp+unix:///tmp/arangodb.sock` and `http+unix:///tmp/arangodb.sock` + * - `ssl+unix:///tmp/arangodb.sock` and `https+unix:///tmp/arangodb.sock` + * - `tcp://unix:/tmp/arangodb.sock` and `http://unix:/tmp/arangodb.sock` + * - `ssl://unix:/tmp/arangodb.sock` and `https://unix:/tmp/arangodb.sock` + * + * See also `auth` for passing authentication credentials. + * + * Default: `"http://127.0.0.1:8529"` + */ + url?: string | string[]; + /** + * Credentials to use for authentication. + * + * See also {@link databases.Database#useBasicAuth} and + * {@link databases.Database#useBearerAuth}. + * + * Default: `{ username: "root", password: "" }` + */ + auth?: BasicAuthCredentials | BearerAuthCredentials; + /** + * Numeric representation of the ArangoDB version the driver should expect. + * The format is defined as `XYYZZ` where `X` is the major version, `Y` is + * the zero-filled two-digit minor version and `Z` is the zero-filled two-digit + * bugfix version, e.g. `30102` for 3.1.2, `20811` for 2.8.11. + * + * Depending on this value certain methods may become unavailable or change + * their behavior to remain compatible with different versions of ArangoDB. + * + * Default: `31100` + */ + arangoVersion?: number; + /** + * Determines the behavior when multiple URLs are provided: + * + * - `"NONE"`: No load balancing. All requests will be handled by the first + * URL in the list until a network error is encountered. On network error, + * arangojs will advance to using the next URL in the list. + * + * - `"ONE_RANDOM"`: Randomly picks one URL from the list initially, then + * behaves like `"NONE"`. + * + * - `"ROUND_ROBIN"`: Every sequential request uses the next URL in the list. + * + * Default: `"NONE"` + */ + loadBalancingStrategy?: LoadBalancingStrategy; + /** + * Determines the behavior when a request fails because the underlying + * connection to the server could not be opened + * (i.e. [`ECONNREFUSED` in Node.js](https://nodejs.org/api/errors.html#errors_common_system_errors)): + * + * - `false`: the request fails immediately. + * + * - `0`: the request is retried until a server can be reached but only a + * total number of times matching the number of known servers (including + * the initial failed request). + * + * - any other number: the request is retried until a server can be reached + * or the request has been retried a total of `maxRetries` number of times + * (not including the initial failed request). + * + * When working with a single server, the retries (if any) will be made to + * the same server. + * + * This setting currently has no effect when using arangojs in a browser. + * + * **Note**: Requests bound to a specific server (e.g. fetching query results) + * will never be retried automatically and ignore this setting. + * + * **Note**: To set the number of retries when a write-write conflict is + * encountered, see `retryOnConflict` instead. + * + * Default: `0` + */ + maxRetries?: false | number; + /** + * Maximum number of parallel requests arangojs will perform. If any + * additional requests are attempted, they will be enqueued until one of the + * active requests has completed. + * + * **Note:** when using `ROUND_ROBIN` load balancing and passing an array of + * URLs in the `url` option, the default value of this option will be set to + * `3 * url.length` instead of `3`. + * + * Default: `3` + */ + poolSize?: number; + /** + * (Browser only.) Determines whether credentials (e.g. cookies) will be sent + * with requests to the ArangoDB server. + * + * If set to `same-origin`, credentials will only be included with requests + * on the same URL origin as the invoking script. If set to `include`, + * credentials will always be sent. If set to `omit`, credentials will be + * excluded from all requests. + * + * Default: `same-origin` + */ + credentials?: "omit" | "include" | "same-origin"; + /** + * If set to `true`, requests will keep the underlying connection open until + * it times out or is closed. In browsers this prevents requests from being + * cancelled when the user navigates away from the page. + * + * Default: `true` + */ + keepalive?: boolean; + /** + * Callback that will be invoked with the finished request object before it + * is finalized. In the browser the request may already have been sent. + * + * @param req - Request object or XHR instance used for this request. + */ + beforeRequest?: (req: globalThis.Request) => void | Promise; + /** + * Callback that will be invoked when the server response has been received + * and processed or when the request has been failed without a response. + * + * The originating request will be available as the `request` property + * on either the error or response object. + * + * @param err - Error encountered when handling this request or `null`. + * @param res - Response object for this request, if no error occurred. + */ + afterResponse?: (err: errors.NetworkError | null, res?: globalThis.Response & { request: globalThis.Request; }) => void | Promise; + /** + * Callback that will be invoked when a request + * + * @param err - Error encountered when handling this request. + */ + onError?: (err: Error) => void | Promise; + /** + * If set to a positive number, requests will automatically be retried at + * most this many times if they result in a write-write conflict. + * + * Default: `0` + */ + retryOnConflict?: number; + /** + * An object with additional headers to send with every request. + * + * If an `"authorization"` header is provided, it will be overridden when + * using {@link databases.Database#useBasicAuth}, {@link databases.Database#useBearerAuth} or + * the `auth` configuration option. + */ + headers?: Headers | Record; + /** + * If set to `true`, arangojs will generate stack traces every time a request + * is initiated and augment the stack traces of any errors it generates. + * + * **Warning**: This will cause arangojs to generate stack traces in advance + * even if the request does not result in an error. Generating stack traces + * may negatively impact performance. + */ + precaptureStackTraces?: boolean; + /** + * Limits the number of values of server-reported response queue times that + * will be stored and accessible using {@link databases.Database#queueTime}. If set to + * a finite value, older values will be discarded to make room for new values + * when that limit is reached. + * + * Default: `10` + */ + responseQueueTimeSamples?: number; +}; +//#endregion \ No newline at end of file diff --git a/src/connection.ts b/src/connection.ts index 48efe1053..38348cf10 100644 --- a/src/connection.ts +++ b/src/connection.ts @@ -1,44 +1,184 @@ /** * ```ts - * import type { Config } from "arangojs/connection"; + * import type { ArangoApiResponse } from "arangojs/connection"; * ``` * - * The "connection" module provides connection and configuration related types - * for TypeScript. + * The "connection" module provides connection related types for TypeScript. * * @packageDocumentation */ import * as administration from "./administration.js"; +import * as configuration from "./config.js"; import * as databases from "./databases.js"; import * as errors from "./errors.js"; -import { LinkedList } from "./lib/linkedList.js"; +import * as util from "./lib/util.js"; +import { LinkedList } from "./lib/x3-linkedlist.js"; import { ERROR_ARANGO_CONFLICT } from "./lib/codes.js"; -import { normalizeUrl } from "./lib/normalizeUrl.js"; -import { - createRequest, - RequestConfig, - RequestFunction, -} from "./lib/request.js"; -import { joinPath } from "./lib/joinPath.js"; -import { mergeHeaders } from "./lib/mergeHeaders.js"; const MIME_JSON = /\/(json|javascript)(\W|$)/; const LEADER_ENDPOINT_HEADER = "x-arango-endpoint"; +const REASON_TIMEOUT = 'timeout'; +//#region ServerFetchFunction /** - * Determines the behavior when multiple URLs are used: - * - * - `"NONE"`: No load balancing. All requests will be handled by the first - * URL in the list until a network error is encountered. On network error, - * arangojs will advance to using the next URL in the list. + * @internal + */ +type CreateServerFetchFunctionOptions = Omit & { + beforeRequest?: (req: globalThis.Request) => void | Promise; + afterResponse?: (err: errors.NetworkError | null, res?: globalThis.Response & { request: globalThis.Request }) => void | Promise; +}; + +/** + * @internal + */ +type ServerFetchFunction = { + /** + * @internal + * + * Perform a fetch request against this host. + * + * @param pathname - URL path, relative to the `basePath` and server domain. + * @param options - Options for this fetch request. + */ + (pathname: string, options: ServerFetchOptions): Promise; + /** + * @internal + * + * Close the pending request, if any. + */ + close: () => void; +}; + +/** + * @internal + */ +type ServerFetchOptions = Omit & { + search?: URLSearchParams; + timeout?: number; +}; + +/** + * @internal * - * - `"ONE_RANDOM"`: Randomly picks one URL from the list initially, then - * behaves like `"NONE"`. + * Create a function for performing fetch requests against a given host. * - * - `"ROUND_ROBIN"`: Every sequential request uses the next URL in the list. + * @param baseUrl - Base URL of the host, i.e. protocol, port and domain name. + * @param options - Options to use for all fetch requests. */ -export type LoadBalancingStrategy = "NONE" | "ROUND_ROBIN" | "ONE_RANDOM"; +function createServerFetchFunction( + baseUrl: URL, + { + beforeRequest, + afterResponse, + ...serverFetchOptions + }: CreateServerFetchFunctionOptions +): ServerFetchFunction { + const pending = new Map(); + return Object.assign( + async function serverFetch( + pathname: string, + { + search, + body, + timeout, + ...fetchOptions + }: ServerFetchOptions) { + const url = new URL(pathname + baseUrl.search, baseUrl); + if (search) { + for (const [key, value] of search) { + url.searchParams.append(key, value); + } + } + if (body instanceof FormData) { + const res = new Response(body); + const blob = await res.blob(); + // Workaround for ArangoDB 3.12.0-rc1 and earlier: + // Omitting the final CRLF results in "bad request body" fatal error + body = new Blob([blob, "\r\n"], { type: blob.type }); + } + const headers = util.mergeHeaders(serverFetchOptions.headers, fetchOptions.headers); + if (!headers.has("authorization")) { + headers.set( + "authorization", + `Basic ${btoa( + `${baseUrl.username || "root"}:${baseUrl.password || ""}` + )}` + ); + } + const abortController = new AbortController(); + const signal = abortController.signal; + const request = new Request(url, { + ...serverFetchOptions, + ...fetchOptions, + headers, + body, + signal, + }); + if (beforeRequest) { + const p = beforeRequest(request); + if (p instanceof Promise) await p; + } + const requestId = util.generateRequestId(); + pending.set(requestId, abortController); + let clearTimer: (() => void) | undefined; + if (timeout) { + clearTimer = util.createTimer(timeout, () => { + clearTimer = undefined; + abortController.abort(REASON_TIMEOUT); + }); + } + let response: globalThis.Response & { request: globalThis.Request }; + try { + response = Object.assign(await fetch(request), { request }); + } catch (e: unknown) { + const cause = e instanceof Error ? e : new Error(String(e)); + let error: errors.NetworkError; + if (signal.aborted) { + const reason = typeof signal.reason == 'string' ? signal.reason : undefined; + if (reason === REASON_TIMEOUT) { + error = new errors.ResponseTimeoutError(undefined, request, { cause }); + } else { + error = new errors.RequestAbortedError(reason, request, { cause }); + } + } else if (cause instanceof TypeError) { + error = new errors.FetchFailedError(undefined, request, { cause }); + } else { + error = new errors.NetworkError(cause.message, request, { cause }); + } + if (afterResponse) { + const p = afterResponse(error); + if (p instanceof Promise) await p; + } + throw error; + } finally { + clearTimer?.(); + pending.delete(requestId); + } + if (afterResponse) { + const p = afterResponse(null, response); + if (p instanceof Promise) await p; + } + return response; + }, + { + close() { + if (!pending.size) return; + const controllers = [...pending.values()]; + pending.clear(); + for (const controller of controllers) { + try { + controller.abort(); + } catch (e) { + // noop + } + } + }, + } + ); +} +//#endregion +//#region Response types /** * Generic properties shared by all ArangoDB HTTP API responses. */ @@ -59,48 +199,52 @@ export type ArangoResponseMetadata = { export type ArangoApiResponse = T & ArangoResponseMetadata; /** - * Credentials for HTTP Basic authentication. + * Interface representing an ArangoDB error response. */ -export type BasicAuthCredentials = { +export type ArangoErrorResponse = { /** - * Username to use for authentication, e.g. `"root"`. + * Indicates that the request resulted in an error. */ - username: string; + error: true; /** - * Password to use for authentication. Defaults to an empty string. + * Intended response status code as provided in the response body. */ - password?: string; -}; - -/** - * Credentials for HTTP Bearer token authentication. - */ -export type BearerAuthCredentials = { + code: number; /** - * Bearer token to use for authentication. + * Error message as provided in the response body. */ - token: string; -}; - -function isBearerAuth(auth: any): auth is BearerAuthCredentials { - return auth.hasOwnProperty("token"); + errorMessage: string; + /** + * ArangoDB error code as provided in the response body. + * + * See the [ArangoDB error documentation](https://docs.arangodb.com/stable/develop/error-codes-and-meanings/) + * for more information. + */ + errorNum: number; } /** - * @internal + * Processed response object. */ -function generateStackTrace() { - let err = new Error(); - if (!err.stack) { - try { - throw err; - } catch (e: any) { - err = e; - } - } - return err; -} +export interface ProcessedResponse extends globalThis.Response { + /** + * @internal + * + * Identifier of the ArangoDB host that served this request. + */ + arangojsHostUrl?: string; + /** + * Fetch request object. + */ + request: globalThis.Request; + /** + * Parsed response body. + */ + parsedBody?: T; +}; +//#endregion +//#region Request options /** * Options for performing a request with arangojs. */ @@ -167,27 +311,9 @@ export type RequestOptions = { */ search?: URLSearchParams | Record; }; +//#endregion -/** - * Processed response object. - */ -export interface ProcessedResponse extends globalThis.Response { - /** - * @internal - * - * Identifier of the ArangoDB host that served this request. - */ - arangojsHostUrl?: string; - /** - * Fetch request object. - */ - request: globalThis.Request; - /** - * Parsed response body. - */ - parsedBody?: T; -}; - +//#region Connection class /** * @internal */ @@ -200,211 +326,9 @@ type Task = { reject: (error: unknown) => void; transform?: (res: ProcessedResponse) => T; retries: number; - options: { - method: string; - expectBinary: boolean; - timeout?: number; - pathname: string; - search?: URLSearchParams; - headers: Headers; - body: any; - }; -}; - -/** - * Options for configuring arangojs. - */ -export type Config = { - /** - * Name of the database to use. - * - * Default: `"_system"` - */ - databaseName?: string; - /** - * Base URL of the ArangoDB server or list of server URLs. - * - * When working with a cluster, the method {@link databases.Database#acquireHostList} - * can be used to automatically pick up additional coordinators/followers at - * any point. - * - * When running ArangoDB on a unix socket, e.g. `/tmp/arangodb.sock`, the - * following URL formats are supported for unix sockets: - * - * - `unix:///tmp/arangodb.sock` (no SSL) - * - `http+unix:///tmp/arangodb.sock` (or `https+unix://` for SSL) - * - `http://unix:/tmp/arangodb.sock` (or `https://unix:` for SSL) - * - * Additionally `ssl` and `tls` are treated as synonymous with `https` and - * `tcp` is treated as synonymous with `http`, so the following URLs are - * considered identical: - * - * - `tcp://127.0.0.1:8529` and `http://127.0.0.1:8529` - * - `ssl://127.0.0.1:8529` and `https://127.0.0.1:8529` - * - `tcp+unix:///tmp/arangodb.sock` and `http+unix:///tmp/arangodb.sock` - * - `ssl+unix:///tmp/arangodb.sock` and `https+unix:///tmp/arangodb.sock` - * - `tcp://unix:/tmp/arangodb.sock` and `http://unix:/tmp/arangodb.sock` - * - `ssl://unix:/tmp/arangodb.sock` and `https://unix:/tmp/arangodb.sock` - * - * See also `auth` for passing authentication credentials. - * - * Default: `"http://127.0.0.1:8529"` - */ - url?: string | string[]; - /** - * Credentials to use for authentication. - * - * See also {@link databases.Database#useBasicAuth} and - * {@link databases.Database#useBearerAuth}. - * - * Default: `{ username: "root", password: "" }` - */ - auth?: BasicAuthCredentials | BearerAuthCredentials; - /** - * Numeric representation of the ArangoDB version the driver should expect. - * The format is defined as `XYYZZ` where `X` is the major version, `Y` is - * the zero-filled two-digit minor version and `Z` is the zero-filled two-digit - * bugfix version, e.g. `30102` for 3.1.2, `20811` for 2.8.11. - * - * Depending on this value certain methods may become unavailable or change - * their behavior to remain compatible with different versions of ArangoDB. - * - * Default: `31100` - */ - arangoVersion?: number; - /** - * Determines the behavior when multiple URLs are provided: - * - * - `"NONE"`: No load balancing. All requests will be handled by the first - * URL in the list until a network error is encountered. On network error, - * arangojs will advance to using the next URL in the list. - * - * - `"ONE_RANDOM"`: Randomly picks one URL from the list initially, then - * behaves like `"NONE"`. - * - * - `"ROUND_ROBIN"`: Every sequential request uses the next URL in the list. - * - * Default: `"NONE"` - */ - loadBalancingStrategy?: LoadBalancingStrategy; - /** - * Determines the behavior when a request fails because the underlying - * connection to the server could not be opened - * (i.e. [`ECONNREFUSED` in Node.js](https://nodejs.org/api/errors.html#errors_common_system_errors)): - * - * - `false`: the request fails immediately. - * - * - `0`: the request is retried until a server can be reached but only a - * total number of times matching the number of known servers (including - * the initial failed request). - * - * - any other number: the request is retried until a server can be reached - * or the request has been retried a total of `maxRetries` number of times - * (not including the initial failed request). - * - * When working with a single server, the retries (if any) will be made to - * the same server. - * - * This setting currently has no effect when using arangojs in a browser. - * - * **Note**: Requests bound to a specific server (e.g. fetching query results) - * will never be retried automatically and ignore this setting. - * - * **Note**: To set the number of retries when a write-write conflict is - * encountered, see `retryOnConflict` instead. - * - * Default: `0` - */ - maxRetries?: false | number; - /** - * Maximum number of parallel requests arangojs will perform. If any - * additional requests are attempted, they will be enqueued until one of the - * active requests has completed. - * - * **Note:** when using `ROUND_ROBIN` load balancing and passing an array of - * URLs in the `url` option, the default value of this option will be set to - * `3 * url.length` instead of `3`. - * - * Default: `3` - */ - poolSize?: number; - /** - * (Browser only.) Determines whether credentials (e.g. cookies) will be sent - * with requests to the ArangoDB server. - * - * If set to `same-origin`, credentials will only be included with requests - * on the same URL origin as the invoking script. If set to `include`, - * credentials will always be sent. If set to `omit`, credentials will be - * excluded from all requests. - * - * Default: `same-origin` - */ - credentials?: "omit" | "include" | "same-origin"; - /** - * If set to `true`, requests will keep the underlying connection open until - * it times out or is closed. In browsers this prevents requests from being - * cancelled when the user navigates away from the page. - * - * Default: `true` - */ - keepalive?: boolean; - /** - * Callback that will be invoked with the finished request object before it - * is finalized. In the browser the request may already have been sent. - * - * @param req - Request object or XHR instance used for this request. - */ - beforeRequest?: (req: globalThis.Request) => void | Promise; - /** - * Callback that will be invoked when the server response has been received - * and processed or when the request has been failed without a response. - * - * The originating request will be available as the `request` property - * on either the error or response object. - * - * @param err - Error encountered when handling this request or `null`. - * @param res - Response object for this request, if no error occurred. - */ - afterResponse?: (err: errors.NetworkError | null, res?: globalThis.Response & { request: globalThis.Request }) => void | Promise; - /** - * Callback that will be invoked when a request - * - * @param err - Error encountered when handling this request. - */ - onError?: (err: Error) => void | Promise; - /** - * If set to a positive number, requests will automatically be retried at - * most this many times if they result in a write-write conflict. - * - * Default: `0` - */ - retryOnConflict?: number; - /** - * An object with additional headers to send with every request. - * - * If an `"authorization"` header is provided, it will be overridden when - * using {@link databases.Database#useBasicAuth}, {@link databases.Database#useBearerAuth} or - * the `auth` configuration option. - */ - headers?: Headers | Record; - /** - * If set to `true`, arangojs will generate stack traces every time a request - * is initiated and augment the stack traces of any errors it generates. - * - * **Warning**: This will cause arangojs to generate stack traces in advance - * even if the request does not result in an error. Generating stack traces - * may negatively impact performance. - */ - precaptureStackTraces?: boolean; - /** - * Limits the number of values of server-reported response queue times that - * will be stored and accessible using {@link databases.Database#queueTime}. If set to - * a finite value, older values will be discarded to make room for new values - * when that limit is reached. - * - * Default: `10` - */ - responseQueueTimeSamples?: number; + expectBinary: boolean; + pathname: string; + options: ServerFetchOptions; }; /** @@ -427,14 +351,14 @@ export class Connection { protected _activeTasks: number = 0; protected _arangoVersion: number = 31100; protected _headers: Headers; - protected _loadBalancingStrategy: LoadBalancingStrategy; + protected _loadBalancingStrategy: configuration.LoadBalancingStrategy; protected _maxRetries: number | false; protected _taskPoolSize: number; - protected _requestConfig: RequestConfig; + protected _requestConfig: CreateServerFetchFunctionOptions; protected _retryOnConflict: number; protected _queue = new LinkedList(); protected _databases = new Map(); - protected _hosts: RequestFunction[] = []; + protected _hosts: ServerFetchFunction[] = []; protected _hostUrls: string[] = []; protected _activeHostUrl: string; protected _activeDirtyHostUrl: string; @@ -452,7 +376,7 @@ export class Connection { * @param config - An object with configuration options. * */ - constructor(config: Omit = {}) { + constructor(config: Omit = {}) { const URLS = config.url ? Array.isArray(config.url) ? config.url @@ -494,7 +418,7 @@ export class Connection { this.addToHostList(URLS); if (config.auth) { - if (isBearerAuth(config.auth)) { + if (configuration.isBearerAuth(config.auth)) { this.setBearerAuth(config.auth); } else { this.setBasicAuth(config.auth); @@ -557,6 +481,7 @@ export class Connection { arangojsHostUrl: string; parsedBody?: any; } = Object.assign(await this._hosts[this._hostUrls.indexOf(hostUrl)]( + task.pathname, task.options ), { arangojsHostUrl: hostUrl }); const leaderEndpoint = res.headers.get(LEADER_ENDPOINT_HEADER); @@ -594,7 +519,7 @@ export class Connection { throw new errors.HttpError(res); } if (res.body) { - if (task.options.expectBinary) { + if (task.expectBinary) { res.parsedBody = await res.blob(); } else if (contentType?.match(MIME_JSON)) { res.parsedBody = await res.json(); @@ -656,11 +581,11 @@ export class Connection { } } - setBearerAuth(auth: BearerAuthCredentials) { + setBearerAuth(auth: configuration.BearerAuthCredentials) { this.setHeader("authorization", `Bearer ${auth.token}`); } - setBasicAuth(auth: BasicAuthCredentials) { + setBasicAuth(auth: configuration.BasicAuthCredentials) { this.setHeader( "authorization", `Basic ${btoa(`${auth.username}:${auth.password}`)}` @@ -731,7 +656,7 @@ export class Connection { * @param urls - URLs to use as host list. */ setHostList(urls: string[]): void { - const cleanUrls = urls.map((url) => normalizeUrl(url)); + const cleanUrls = urls.map((url) => util.normalizeUrl(url)); this._hosts.splice( 0, this._hosts.length, @@ -742,7 +667,7 @@ export class Connection { if (!parsedUrl.pathname.endsWith("/")) { parsedUrl.pathname += "/"; } - return createRequest(parsedUrl, this._requestConfig); + return createServerFetchFunction(parsedUrl, this._requestConfig); }) ); this._hostUrls.splice(0, this._hostUrls.length, ...cleanUrls); @@ -759,7 +684,7 @@ export class Connection { */ addToHostList(urls: string | string[]): string[] { const cleanUrls = (Array.isArray(urls) ? urls : [urls]).map((url) => - normalizeUrl(url) + util.normalizeUrl(url) ); const newUrls = cleanUrls.filter( (url) => this._hostUrls.indexOf(url) === -1 @@ -771,7 +696,7 @@ export class Connection { if (!parsedUrl.pathname.endsWith("/")) { parsedUrl.pathname += "/"; } - return createRequest(parsedUrl, this._requestConfig); + return createServerFetchFunction(parsedUrl, this._requestConfig); }) ); return cleanUrls; @@ -902,7 +827,7 @@ export class Connection { transform?: (res: globalThis.Response & { request: globalThis.Request; parsedBody?: any }) => T ): Promise { return new Promise((resolve, reject) => { - const headers = mergeHeaders(this._headers, requestHeaders ?? {}); + const headers = util.mergeHeaders(this._headers, requestHeaders ?? {}); if (body && !(body instanceof FormData)) { let contentType; @@ -933,8 +858,9 @@ export class Connection { hostUrl, allowDirtyRead, retryOnConflict, + expectBinary, + pathname: util.joinPath(basePath, path) ?? "", options: { - pathname: joinPath(basePath, path) ?? "", search: params && (params instanceof URLSearchParams @@ -943,7 +869,6 @@ export class Connection { headers, timeout, method, - expectBinary, body, }, reject, @@ -958,7 +883,7 @@ export class Connection { task.stack = () => `\n${capture.stack.split("\n").slice(3).join("\n")}`; } else { - const capture = generateStackTrace() as { readonly stack: string }; + const capture = util.generateStackTrace() as { readonly stack: string }; if (Object.prototype.hasOwnProperty.call(capture, "stack")) { task.stack = () => `\n${capture.stack.split("\n").slice(4).join("\n")}`; @@ -971,3 +896,4 @@ export class Connection { }); } } +//#endregion \ No newline at end of file diff --git a/src/cursors.ts b/src/cursors.ts index 90ae6e587..0a31d5221 100644 --- a/src/cursors.ts +++ b/src/cursors.ts @@ -8,7 +8,7 @@ * * @packageDocumentation */ -import { LinkedList } from "./lib/linkedList.js"; +import { LinkedList } from "./lib/x3-linkedlist.js"; import * as databases from "./databases.js"; //#region Cursor properties diff --git a/src/errors.ts b/src/errors.ts index 53f7d71da..c11e44099 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -82,7 +82,7 @@ export function isNetworkError(error: any): error is NetworkError { * * Indicates whether the given value represents an ArangoDB error response. */ -export function isArangoErrorResponse(body: any): body is ArangoErrorResponse { +export function isArangoErrorResponse(body: any): body is connection.ArangoErrorResponse { return ( body && body.error === true && @@ -136,18 +136,6 @@ function isSafeToRetryFailedFetch(cause: Error): boolean | null { return null; } -/** - * @internal -* - * Interface representing an ArangoDB error response. - */ -export interface ArangoErrorResponse { - error: true; - code: number; - errorMessage: string; - errorNum: number; -} - /** * Interface representing a Node.js `UndiciError`. * @@ -336,7 +324,7 @@ export class ArangoError extends Error { * * Creates a new `ArangoError` from a response object. */ - static from(response: connection.ProcessedResponse): ArangoError { + static from(response: connection.ProcessedResponse): ArangoError { return new ArangoError(response.parsedBody!, { cause: new HttpError(response) }); @@ -345,7 +333,7 @@ export class ArangoError extends Error { /** * Creates a new `ArangoError` from an ArangoDB error response. */ - constructor(data: ArangoErrorResponse, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { + constructor(data: connection.ArangoErrorResponse, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { const { isSafeToRetry, ...opts } = options; super(data.errorMessage, opts); this.errorNum = data.errorNum; @@ -362,7 +350,7 @@ export class ArangoError extends Error { /** * Server response object. */ - get response(): connection.ProcessedResponse | undefined { + get response(): connection.ProcessedResponse | undefined { const cause = this.cause; if (cause instanceof HttpError) { return cause.response; @@ -390,7 +378,7 @@ export class ArangoError extends Error { return true; } - toJSON(): ArangoErrorResponse { + toJSON(): connection.ArangoErrorResponse { return { error: true, errorMessage: this.errorMessage, diff --git a/src/lib/joinPath.ts b/src/lib/joinPath.ts deleted file mode 100644 index e43f268cd..000000000 --- a/src/lib/joinPath.ts +++ /dev/null @@ -1,19 +0,0 @@ -/** - * Helper to merge two path segments. - * - * @packageDocumentation - * @internal - */ - -/** - * @internal - */ -export function joinPath( - basePath: string | undefined, - path: string | undefined -): string | undefined { - if (!basePath) return path; - if (!path) return basePath; - if (!basePath.endsWith("/")) basePath += "/"; - return basePath + path.replace(/^\//g, ""); -} diff --git a/src/lib/mergeHeaders.ts b/src/lib/mergeHeaders.ts deleted file mode 100644 index 9f9f20953..000000000 --- a/src/lib/mergeHeaders.ts +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Utility function for merging headers. - * - * @packageDocumentation - * @internal - */ - -/** - * @internal - */ -export function mergeHeaders( - base: Headers, - extra: Headers | Record | undefined -) { - if (!extra) return base; - return new Headers([ - ...base, - ...(extra instanceof Headers ? extra : Object.entries(extra)), - ]); -} diff --git a/src/lib/normalizeUrl.ts b/src/lib/normalizeUrl.ts deleted file mode 100644 index c70b257b0..000000000 --- a/src/lib/normalizeUrl.ts +++ /dev/null @@ -1,17 +0,0 @@ -/** - * Utility function for normalizing URLs. - * - * @packageDocumentation - * @internal - */ - -/** - * @internal - */ -export function normalizeUrl(url: string): string { - const raw = url.match(/^(tcp|ssl|tls)((?::|\+).+)/); - if (raw) url = (raw[1] === "tcp" ? "http" : "https") + raw[2]; - const unix = url.match(/^(?:(http|https)\+)?unix:\/\/(\/.+)/); - if (unix) url = `${unix[1] || "http"}://unix:${unix[2]}`; - return url; -} diff --git a/src/lib/request.ts b/src/lib/request.ts deleted file mode 100644 index e8993acd3..000000000 --- a/src/lib/request.ts +++ /dev/null @@ -1,160 +0,0 @@ -/** - * Request handling internals. - * - * @packageDocumentation - * @internal - */ - -import { FetchFailedError, NetworkError, RequestAbortedError, ResponseTimeoutError } from "../errors.js"; - -function timer(timeout: number, cb: () => void) { - const t = setTimeout(cb, timeout); - return () => clearTimeout(t); -} - -export const REASON_TIMEOUT = 'timeout'; - -/** - * @internal - */ -export type RequestOptions = { - method: string; - pathname: string; - search?: URLSearchParams; - headers: Headers; - body: any; - expectBinary: boolean; - timeout?: number; -}; - -/** - * @internal - */ -export type RequestConfig = { - credentials: "omit" | "include" | "same-origin"; - keepalive: boolean; - beforeRequest?: (req: globalThis.Request) => void | Promise; - afterResponse?: (err: NetworkError | null, res?: globalThis.Response & { request: globalThis.Request }) => void | Promise; -}; - -/** - * @internal - */ -export type RequestFunction = { - (options: RequestOptions): Promise; - close?: () => void; -}; - -/** - * @internal - */ -export const isBrowser = false; - -/** - * Create a function for performing requests against a given host. - * - * @param baseUrl - Base URL of the host, i.e. protocol, port and domain name. - * @param config - Options to use for creating the agent. - * @param agent - Agent to use for performing requests. - * - * @internal - */ -export function createRequest( - baseUrl: URL, - config: RequestConfig -): RequestFunction { - let abort: () => void | undefined; - return Object.assign( - async function request({ - method, - search: searchParams, - pathname: requestPath, - headers: requestHeaders, - body, - timeout, - }: RequestOptions) { - const headers = new Headers(requestHeaders); - const url = new URL( - baseUrl.search ? requestPath + baseUrl.search : requestPath, - baseUrl - ); - if (searchParams) { - for (const [key, value] of searchParams) { - url.searchParams.append(key, value); - } - } - if (body instanceof FormData) { - const res = new Response(body); - const blob = await res.blob(); - // Workaround for ArangoDB 3.12.0-rc1 and earlier: - // Omitting the final CRLF results in "bad request body" fatal error - body = new Blob([blob, "\r\n"], { type: blob.type }); - } - if (!headers.has("authorization")) { - headers.set( - "authorization", - `Basic ${btoa( - `${baseUrl.username || "root"}:${baseUrl.password || ""}` - )}` - ); - } - const request = new Request(url, { - method, - headers, - body, - credentials: config.credentials, - keepalive: config.keepalive, - }); - if (config.beforeRequest) { - const p = config.beforeRequest(request); - if (p instanceof Promise) await p; - } - const abortController = new AbortController(); - const signal = abortController.signal; - abort = () => abortController.abort(); - let clearTimer: (() => void) | undefined; - if (timeout) { - clearTimer = timer(timeout, () => { - clearTimer = undefined; - abortController.abort(REASON_TIMEOUT); - }); - } - let response: globalThis.Response & { request: globalThis.Request }; - try { - response = Object.assign(await fetch(request, { signal }), { request }); - } catch (e: unknown) { - const cause = e instanceof Error ? e : new Error(String(e)); - let error: NetworkError; - if (signal.aborted) { - const reason = typeof signal.reason == 'string' ? signal.reason : undefined; - if (reason === REASON_TIMEOUT) { - error = new ResponseTimeoutError(undefined, request, { cause }); - } else { - error = new RequestAbortedError(reason, request, { cause }); - } - } else if (cause instanceof TypeError) { - error = new FetchFailedError(undefined, request, { cause }); - } else { - error = new NetworkError(cause.message, request, { cause }); - } - if (config.afterResponse) { - const p = config.afterResponse(error); - if (p instanceof Promise) await p; - } - throw error; - } finally { - clearTimer?.(); - } - if (config.afterResponse) { - const p = config.afterResponse(null, response); - if (p instanceof Promise) await p; - } - return response; - }, - { - close() { - abort?.(); - }, - } - ); -} diff --git a/src/lib/util.ts b/src/lib/util.ts new file mode 100644 index 000000000..507c6a170 --- /dev/null +++ b/src/lib/util.ts @@ -0,0 +1,94 @@ +/** + * Utility functions for arangojs. + * + * @packageDocumentation + * @internal + */ + +const THIRTY_MINUTES = 30 * 60_000; + +/** + * @internal + * + * Helper to merge two path segments. + */ +export function joinPath( + basePath: string | undefined, + path: string | undefined +): string | undefined { + if (!basePath) return path; + if (!path) return basePath; + if (!basePath.endsWith("/")) basePath += "/"; + return basePath + path.replace(/^\//g, ""); +} + +/** + * @internal + * + * Utility function for merging headers. + */ +export function mergeHeaders( + ...headerses: (Headers | string[][] | Record> | undefined)[] +) { + if (!headerses.length) return new Headers(); + return new Headers([ + ...headerses.flatMap(item => item ? [ + ...((item instanceof Headers || Array.isArray(item)) ? item : new Headers(item)) + ] : []), + ]); +} + +/** + * @internal + * + * Utility function for normalizing URLs. + */ +export function normalizeUrl(url: string): string { + const raw = url.match(/^(tcp|ssl|tls)((?::|\+).+)/); + if (raw) url = (raw[1] === "tcp" ? "http" : "https") + raw[2]; + const unix = url.match(/^(?:(http|https)\+)?unix:\/\/(\/.+)/); + if (unix) url = `${unix[1] || "http"}://unix:${unix[2]}`; + return url; +} + +/** + * @internal + * + * Generate a unique request ID. + */ +export function generateRequestId() { + return `${Date.now() % THIRTY_MINUTES}_${Math.random().toString(36).substring(2, 15)}`; +} + +/** + * @internal + * + * Creates a timer that will call the given callback after the specified + * timeout. + * + * @param timeout - Number of milliseconds after which the callback will be + * called. + * @param callback - Callback to call after the timeout. + * @returns A function that clears the timer. + */ +export function createTimer(timeout: number, callback: () => void) { + const t = setTimeout(callback, timeout); + return () => clearTimeout(t); +} + +/** + * @internal + * + * Generates a stack trace. + */ +export function generateStackTrace() { + let err = new Error(); + if (!err.stack) { + try { + throw err; + } catch (e: any) { + err = e; + } + } + return err; +} diff --git a/src/lib/linkedList.ts b/src/lib/x3-linkedlist.ts similarity index 100% rename from src/lib/linkedList.ts rename to src/lib/x3-linkedlist.ts diff --git a/src/routes.ts b/src/routes.ts index 62f877d24..3f52adf55 100644 --- a/src/routes.ts +++ b/src/routes.ts @@ -10,7 +10,7 @@ */ import * as connections from "./connection.js"; import * as databases from "./databases.js"; -import { mergeHeaders } from "./lib/mergeHeaders.js"; +import { mergeHeaders } from "./lib/util.js"; /** * Represents an arbitrary route relative to an ArangoDB database. diff --git a/src/test/08-cursors.ts b/src/test/08-cursors.ts index 0f98397c6..493c5d7ab 100644 --- a/src/test/08-cursors.ts +++ b/src/test/08-cursors.ts @@ -1,5 +1,5 @@ import { expect } from "chai"; -import { LinkedList } from "../lib/linkedList.js"; +import { LinkedList } from "../lib/x3-linkedlist.js"; import { aql } from "../aql.js"; import { Cursor, BatchCursor } from "../cursors.js"; import { Database } from "../databases.js"; diff --git a/src/test/_config.ts b/src/test/_config.ts index 472d940eb..055096de0 100644 --- a/src/test/_config.ts +++ b/src/test/_config.ts @@ -1,4 +1,5 @@ -import { Config, LoadBalancingStrategy } from "../connection.js"; +import { LoadBalancingStrategy } from "../connection.js"; +import { Config } from "../config.js"; const ARANGO_URL = process.env.TEST_ARANGODB_URL || "http://127.0.0.1:8529"; const ARANGO_VERSION = Number( @@ -20,13 +21,13 @@ const ARANGO_LOAD_BALANCING_STRATEGY = process.env export const config: Config & { arangoVersion: NonNullable; } = ARANGO_URL.includes(",") - ? { + ? { url: ARANGO_URL.split(",").filter((s) => Boolean(s)), arangoVersion, precaptureStackTraces: true, loadBalancingStrategy: ARANGO_LOAD_BALANCING_STRATEGY || "ROUND_ROBIN", } - : { + : { url: ARANGO_URL, arangoVersion, precaptureStackTraces: true, From 5e14077463d6468346a1e6b7a91a327f49b9fb22 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Mon, 9 Dec 2024 12:54:04 +0100 Subject: [PATCH 08/21] Add optional undici support --- CHANGELOG.md | 28 + README.md | 66 ++- package.json | 3 + src/administration.ts | 16 +- src/analyzers.ts | 6 +- src/collections.ts | 62 +- src/{config.ts => configuration.ts} | 96 +--- src/connection.ts | 726 ++++++++++++++++-------- src/cursors.ts | 4 +- src/databases.ts | 228 ++++---- src/errors.ts | 134 ++--- src/graphs.ts | 44 +- src/index.ts | 8 +- src/jobs.ts | 8 +- src/lib/util.ts | 30 +- src/queries.ts | 2 +- src/routes.ts | 93 ++- src/test/00-basics.ts | 34 +- src/test/02-accessing-collections.ts | 10 +- src/test/03-accessing-graphs.ts | 12 +- src/test/04-transactions.ts | 2 +- src/test/07-routes.ts | 6 +- src/test/08-cursors.ts | 4 +- src/test/09-collection-metadata.ts | 2 +- src/test/10-manipulating-collections.ts | 6 +- src/test/11-managing-indexes.ts | 2 +- src/test/13-bulk-imports.ts | 2 +- src/test/14-document-collections.ts | 2 +- src/test/15-edge-collections.ts | 2 +- src/test/16-graphs.ts | 6 +- src/test/17-graph-vertices.ts | 12 +- src/test/18-graph-edges.ts | 2 +- src/test/19-graph-vertex-collections.ts | 2 +- src/test/20-graph-edge-collections.ts | 2 +- src/test/23-aql-queries-stream.ts | 6 +- src/test/24-accessing-views.ts | 4 +- src/test/25-view-metadata.ts | 2 +- src/test/26-manipulating-views.ts | 4 +- src/test/27-query-management.ts | 6 +- src/test/28-accessing-analyzers.ts | 4 +- src/test/29-manipulating-analyzers.ts | 2 +- src/test/29-queue-time.ts | 2 +- src/test/30-concurrent-transactions.ts | 6 +- src/test/31-conflicts.ts | 2 +- src/test/_config.ts | 7 +- src/transactions.ts | 6 +- src/views.ts | 12 +- 47 files changed, 972 insertions(+), 753 deletions(-) rename src/{config.ts => configuration.ts} (61%) diff --git a/CHANGELOG.md b/CHANGELOG.md index c374dbeac..534e366e2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,18 @@ This driver uses semantic versioning: - Renamed `CollectionTruncateOptions` type to `TruncateCollectionOptions` +- Renamed `Config` type to `ConfigOptions` + +- Renamed `path` option to `pathname` in `RequestOptions` type + + This affects the `db.waitForPropagation` and `route.request` methods. + +- Removed `basePath` option from `RequestOptions` type + + This affects the `db.waitForPropagation` and `route.request` methods. + +- Renamed `route.path` property to `route.pathname` + - Changed error type constructor signatures The `request` property is now always positional and the `options` property @@ -36,6 +48,11 @@ This driver uses semantic versioning: The type is now also no longer marked as internal. +- Moved configuration related types to new `configuration` module + + The following types were moved: `ConfigOptions`, `LoadBalancingStrategy`, + `BasicAuthCredentials` and `BearerAuthCredentials`. + - Moved internal utility functions to new `lib/util` module These methods are all still marked as internal and should not be used @@ -48,6 +65,17 @@ This driver uses semantic versioning: does not guarantee the underlying connections are closed as these are handled by Node.js or the browser natively. +### Added + +- Restored support for Unix domain sockets + + Using Unix domain sockets requires the `undici` library to be installed. + +- Restored support for `config.agentOptions` + + The `config.agentOptions` option can now be used to create a custom `undici` + agent if the `undici` library is installed. + ## [10.0.0-alpha.0] - 2024-11-28 This is a major release and breaks backwards compatibility. diff --git a/README.md b/README.md index 5c48a1724..edf1c0392 100644 --- a/README.md +++ b/README.md @@ -255,7 +255,7 @@ allowing arangojs to provide more meaningful stack traces at the cost of an impact to performance even when no error occurs. ```diff - const { Database } = require("arangojs"); + import { Database } from "arangojs"; const db = new Database({ url: ARANGODB_SERVER, @@ -269,15 +269,47 @@ that do not support the `stack` property on error objects, this option will still impact performance but not result in any additional information becoming available. +### Unix domain sockets + +If you want to use Unix domain sockets, you need to install the `undici` module, +which is an optional dependency of arangojs. + +```sh +npm install --save undici +``` + +If the `undici` module is not installed and arangojs attempts to make a request +over a Unix domain socket, the request will fail with a plain `Error` with a +message indicating that the `undici` module is unavailable. + ### Node.js with self-signed HTTPS certificates -If you need to support self-signed HTTPS certificates in Node.js, you may have -to override the global fetch agent. At the time of this writing, there is no -official way to do this for the native `fetch` implementation in Node.js. +If you need to support self-signed HTTPS certificates in Node.js, you will need +to install the `undici` module, which is an optional dependency of arangojs. + +```sh +npm install --save undici +``` + +You can instruct arangojs to use the `undici` module by setting the +`config.agentOptions` option: + +```diff + import { Database } from "arangojs"; + + const db = new Database({ + url: ARANGODB_SERVER, ++ agentOptions: { ++ ca: [ ++ fs.readFileSync(".ssl/sub.class1.server.ca.pem"), ++ fs.readFileSync(".ssl/ca.pem"), ++ ], ++ }, + }); +``` -However as Node.js uses the `undici` module for its `fetch` implementation -internally, you can override the global agent by adding `undici` as a -dependency to your project and using its `setGlobalDispatcher` as follows: +To override the global fetch agent instead, you can use the `undici` module's +`setGlobalDispatcher` method as follows: ```js import { Agent, setGlobalDispatcher } from "undici"; @@ -293,20 +325,22 @@ setGlobalDispatcher( ``` Although this is **strongly discouraged**, it's also possible to disable -HTTPS certificate validation entirely, but note this has +HTTPS certificate validation entirely this way, but note this has **extremely dangerous** security implications: -```js -import { Agent, setGlobalDispatcher } from "undici"; +```diff + import { Database } from "arangojs"; -setGlobalDispatcher( - new Agent({ - rejectUnauthorized: false, - }) -); + const db = new Database({ + url: ARANGODB_SERVER, ++ agentOptions: { ++ rejectUnauthorized: false, ++ }, + }); ``` -This is a [known limitation](https://github.com/orgs/nodejs/discussions/44038#discussioncomment-5701073) +The requirement to use the `undici` module to override these settings is a +[known limitation](https://github.com/orgs/nodejs/discussions/44038#discussioncomment-5701073) of Node.js at the time of this writing. When using arangojs in the browser, self-signed HTTPS certificates need to diff --git a/package.json b/package.json index 91c62b3a7..3091a46ce 100644 --- a/package.json +++ b/package.json @@ -102,5 +102,8 @@ "source-map-support": "^0.5.21", "typedoc": "^0.25.12", "typescript": "^5.4.2" + }, + "optionalDependencies": { + "undici": ">=5.21.0" } } diff --git a/src/administration.ts b/src/administration.ts index 4a764c0ee..6009e8feb 100644 --- a/src/administration.ts +++ b/src/administration.ts @@ -141,7 +141,7 @@ export type ServerStatusInformation = { license: "community" | "enterprise"; /** * Server operation mode. - * + * * @deprecated use `operationMode` instead */ mode: "server" | "console"; @@ -213,7 +213,7 @@ export type ServerStatusInformation = { version: string; /** * Whether writes are enabled. - * + * * @deprecated Use `readOnly` instead. */ writeOpsEnabled: boolean; @@ -224,9 +224,9 @@ export type ServerStatusInformation = { * Server availability. * * - `"default"`: The server is operational. - * + * * - `"readonly"`: The server is in read-only mode. - * + * * - `false`: The server is not available. */ export type ServerAvailability = "default" | "readonly" | false; @@ -245,9 +245,9 @@ export type SingleServerSupportInfo = { deployment: { /** * Deployment mode: - * + * * - `"single"`: A single server deployment. - * + * * - `"cluster"`: A cluster deployment. */ type: "single"; @@ -268,9 +268,9 @@ export type ClusterSupportInfo = { deployment: { /** * Deployment mode: - * + * * - `"single"`: A single server deployment. - * + * * - `"cluster"`: A cluster deployment. */ type: "cluster"; diff --git a/src/analyzers.ts b/src/analyzers.ts index 727b42456..8d7dc17ff 100644 --- a/src/analyzers.ts +++ b/src/analyzers.ts @@ -912,7 +912,7 @@ export class Analyzer { */ get(): Promise> { return this._db.request({ - path: `/_api/analyzer/${encodeURIComponent(this._name)}`, + pathname: `/_api/analyzer/${encodeURIComponent(this._name)}`, }); } @@ -972,7 +972,7 @@ export class Analyzer { > { return this._db.request({ method: "POST", - path: "/_api/analyzer", + pathname: "/_api/analyzer", body: { name: this._name, ...options }, }); } @@ -994,7 +994,7 @@ export class Analyzer { drop(force: boolean = false): Promise> { return this._db.request({ method: "DELETE", - path: `/_api/analyzer/${encodeURIComponent(this._name)}`, + pathname: `/_api/analyzer/${encodeURIComponent(this._name)}`, search: { force }, }); } diff --git a/src/collections.ts b/src/collections.ts index 557bdcaaf..f88180469 100644 --- a/src/collections.ts +++ b/src/collections.ts @@ -2275,7 +2275,7 @@ export class Collection< //#region Collection operations get() { return this._db.request({ - path: `/_api/collection/${encodeURIComponent(this._name)}`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}`, }); } @@ -2327,7 +2327,7 @@ export class Collection< } return this._db.request({ method: "POST", - path: "/_api/collection", + pathname: "/_api/collection", search, body: { ...opts, @@ -2341,12 +2341,12 @@ export class Collection< ): Promise> { if (!properties) { return this._db.request({ - path: `/_api/collection/${encodeURIComponent(this._name)}/properties`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}/properties`, }); } return this._db.request({ method: "PUT", - path: `/_api/collection/${encodeURIComponent(this._name)}/properties`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}/properties`, body: properties, }); } @@ -2357,7 +2357,7 @@ export class Collection< > > { return this._db.request({ - path: `/_api/collection/${encodeURIComponent(this._name)}/count`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}/count`, }); } @@ -2365,7 +2365,7 @@ export class Collection< return this._db.request( { method: "PUT", - path: `/_api/collection/${encodeURIComponent( + pathname: `/_api/collection/${encodeURIComponent( this._name )}/recalculateCount`, }, @@ -2381,7 +2381,7 @@ export class Collection< > > { return this._db.request({ - path: `/_api/collection/${encodeURIComponent(this._name)}/figures`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}/figures`, search: { details }, }); } @@ -2392,7 +2392,7 @@ export class Collection< > > { return this._db.request({ - path: `/_api/collection/${encodeURIComponent(this._name)}/revision`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}/revision`, }); } @@ -2404,7 +2404,7 @@ export class Collection< > > { return this._db.request({ - path: `/_api/collection/${encodeURIComponent(this._name)}/checksum`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}/checksum`, search: options, }); } @@ -2418,7 +2418,7 @@ export class Collection< truncate(options?: TruncateCollectionOptions): Promise> { return this._db.request({ method: "PUT", - path: `/_api/collection/${this._name}/truncate`, + pathname: `/_api/collection/${this._name}/truncate`, search: options, }); } @@ -2426,7 +2426,7 @@ export class Collection< drop(options?: DropCollectionOptions) { return this._db.request({ method: "DELETE", - path: `/_api/collection/${encodeURIComponent(this._name)}`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}`, search: options, }); } @@ -2435,7 +2435,7 @@ export class Collection< return this._db.request( { method: "PUT", - path: `/_api/collection/${this._name}/compact`, + pathname: `/_api/collection/${this._name}/compact`, } ); } @@ -2448,7 +2448,7 @@ export class Collection< return this._db.request( { method: "PUT", - path: `/_api/collection/${encodeURIComponent( + pathname: `/_api/collection/${encodeURIComponent( this._name )}/responsibleShard`, body: document, @@ -2473,7 +2473,7 @@ export class Collection< return await this._db.request( { method: "HEAD", - path: `/_api/document/${encodeURI( + pathname: `/_api/document/${encodeURI( documents._documentHandle(selector, this._name) )}`, headers, @@ -2500,7 +2500,7 @@ export class Collection< const { allowDirtyRead = undefined } = options; return this._db.request({ method: "PUT", - path: `/_api/document/${encodeURIComponent(this._name)}`, + pathname: `/_api/document/${encodeURIComponent(this._name)}`, search: { onlyget: true }, allowDirtyRead, body: selectors, @@ -2525,7 +2525,7 @@ export class Collection< if (ifNoneMatch) headers["if-none-match"] = ifNoneMatch; const result = this._db.request( { - path: `/_api/document/${encodeURI( + pathname: `/_api/document/${encodeURI( documents._documentHandle(selector, this._name) )}`, headers, @@ -2553,7 +2553,7 @@ export class Collection< return this._db.request( { method: "POST", - path: `/_api/document/${encodeURIComponent(this._name)}`, + pathname: `/_api/document/${encodeURIComponent(this._name)}`, body: data, search: options, }, @@ -2568,7 +2568,7 @@ export class Collection< return this._db.request( { method: "POST", - path: `/_api/document/${encodeURIComponent(this._name)}`, + pathname: `/_api/document/${encodeURIComponent(this._name)}`, body: data, search: options, }, @@ -2587,7 +2587,7 @@ export class Collection< return this._db.request( { method: "PUT", - path: `/_api/document/${encodeURI( + pathname: `/_api/document/${encodeURI( documents._documentHandle(selector, this._name) )}`, headers, @@ -2607,7 +2607,7 @@ export class Collection< return this._db.request( { method: "PUT", - path: `/_api/document/${encodeURIComponent(this._name)}`, + pathname: `/_api/document/${encodeURIComponent(this._name)}`, body: newData, search: options, }, @@ -2626,7 +2626,7 @@ export class Collection< return this._db.request( { method: "PATCH", - path: `/_api/document/${encodeURI( + pathname: `/_api/document/${encodeURI( documents._documentHandle(selector, this._name) )}`, headers, @@ -2646,7 +2646,7 @@ export class Collection< return this._db.request( { method: "PATCH", - path: `/_api/document/${encodeURIComponent(this._name)}`, + pathname: `/_api/document/${encodeURIComponent(this._name)}`, body: newData, search: options, }, @@ -2661,7 +2661,7 @@ export class Collection< return this._db.request( { method: "DELETE", - path: `/_api/document/${encodeURI( + pathname: `/_api/document/${encodeURI( documents._documentHandle(selector, this._name) )}`, headers, @@ -2678,7 +2678,7 @@ export class Collection< return this._db.request( { method: "DELETE", - path: `/_api/document/${encodeURIComponent(this._name)}`, + pathname: `/_api/document/${encodeURIComponent(this._name)}`, body: selectors, search: options, }, @@ -2700,7 +2700,7 @@ export class Collection< } return this._db.request({ method: "POST", - path: "/_api/import", + pathname: "/_api/import", body: data, isBinary: true, search, @@ -2716,7 +2716,7 @@ export class Collection< ) { const { allowDirtyRead = undefined } = options; return this._db.request({ - path: `/_api/edges/${encodeURIComponent(this._name)}`, + pathname: `/_api/edges/${encodeURIComponent(this._name)}`, allowDirtyRead, search: { direction, @@ -2743,7 +2743,7 @@ export class Collection< return this._db.request( { method: "PUT", - path: `/_api/collection/${encodeURIComponent( + pathname: `/_api/collection/${encodeURIComponent( this._name )}/loadIndexesIntoMemory`, }, @@ -2754,7 +2754,7 @@ export class Collection< indexes(options?: indexes.ListIndexesOptions) { return this._db.request( { - path: "/_api/index", + pathname: "/_api/index", search: { collection: this._name, ...options }, }, (res) => res.parsedBody.indexes @@ -2763,14 +2763,14 @@ export class Collection< index(selector: indexes.IndexSelector) { return this._db.request({ - path: `/_api/index/${encodeURI(indexes._indexHandle(selector, this._name))}`, + pathname: `/_api/index/${encodeURI(indexes._indexHandle(selector, this._name))}`, }); } ensureIndex(options: indexes.EnsureIndexOptions) { return this._db.request({ method: "POST", - path: "/_api/index", + pathname: "/_api/index", body: options, search: { collection: this._name }, }); @@ -2779,7 +2779,7 @@ export class Collection< dropIndex(selector: indexes.IndexSelector) { return this._db.request({ method: "DELETE", - path: `/_api/index/${encodeURI(indexes._indexHandle(selector, this._name))}`, + pathname: `/_api/index/${encodeURI(indexes._indexHandle(selector, this._name))}`, }); } //#endregion diff --git a/src/config.ts b/src/configuration.ts similarity index 61% rename from src/config.ts rename to src/configuration.ts index c537cb9b1..54e703cda 100644 --- a/src/config.ts +++ b/src/configuration.ts @@ -1,13 +1,14 @@ /** * ```ts - * import type { Config } from "arangojs/config"; + * import type { ConfigOptions } from "arangojs/configuration"; * ``` * - * The "config" module provides configuration related types for TypeScript. + * The "configuration" module provides configuration related types for + * TypeScript. * * @packageDocumentation */ -import * as errors from "./errors.js"; +import * as connection from "./connection.js"; //#region Shared types /** @@ -62,7 +63,7 @@ export function isBearerAuth(auth: BasicAuthCredentials | BearerAuthCredentials) /** * Options for configuring arangojs. */ -export type Config = { +export type ConfigOptions = connection.CommonRequestOptions & { /** * Name of the database to use. * @@ -135,35 +136,6 @@ export type Config = { * Default: `"NONE"` */ loadBalancingStrategy?: LoadBalancingStrategy; - /** - * Determines the behavior when a request fails because the underlying - * connection to the server could not be opened - * (i.e. [`ECONNREFUSED` in Node.js](https://nodejs.org/api/errors.html#errors_common_system_errors)): - * - * - `false`: the request fails immediately. - * - * - `0`: the request is retried until a server can be reached but only a - * total number of times matching the number of known servers (including - * the initial failed request). - * - * - any other number: the request is retried until a server can be reached - * or the request has been retried a total of `maxRetries` number of times - * (not including the initial failed request). - * - * When working with a single server, the retries (if any) will be made to - * the same server. - * - * This setting currently has no effect when using arangojs in a browser. - * - * **Note**: Requests bound to a specific server (e.g. fetching query results) - * will never be retried automatically and ignore this setting. - * - * **Note**: To set the number of retries when a write-write conflict is - * encountered, see `retryOnConflict` instead. - * - * Default: `0` - */ - maxRetries?: false | number; /** * Maximum number of parallel requests arangojs will perform. If any * additional requests are attempted, they will be enqueued until one of the @@ -177,64 +149,28 @@ export type Config = { */ poolSize?: number; /** - * (Browser only.) Determines whether credentials (e.g. cookies) will be sent - * with requests to the ArangoDB server. - * - * If set to `same-origin`, credentials will only be included with requests - * on the same URL origin as the invoking script. If set to `include`, - * credentials will always be sent. If set to `omit`, credentials will be - * excluded from all requests. - * - * Default: `same-origin` - */ - credentials?: "omit" | "include" | "same-origin"; - /** - * If set to `true`, requests will keep the underlying connection open until - * it times out or is closed. In browsers this prevents requests from being - * cancelled when the user navigates away from the page. - * - * Default: `true` - */ - keepalive?: boolean; - /** - * Callback that will be invoked with the finished request object before it - * is finalized. In the browser the request may already have been sent. + * Default options to pass to the `fetch` function when making requests. * - * @param req - Request object or XHR instance used for this request. + * See [the Fetch API specification](https://fetch.spec.whatwg.org/#request-class) + * or the [MDN Web Docs](https://developer.mozilla.org/en-US/docs/Web/API/RequestInit) + * for more information on the available options. */ - beforeRequest?: (req: globalThis.Request) => void | Promise; + fetchOptions?: connection.CommonFetchOptions; /** - * Callback that will be invoked when the server response has been received - * and processed or when the request has been failed without a response. - * - * The originating request will be available as the `request` property - * on either the error or response object. + * If set, arangojs will use the [`undici`](https://www.npmjs.com/package/undici) + * package to make requests and the provided options will be used to create + * the `undici` agent. * - * @param err - Error encountered when handling this request or `null`. - * @param res - Response object for this request, if no error occurred. + * See [the `undici` documentation](https://undici.nodejs.org/#/docs/api/Agent?id=parameter-agentoptions) + * for more information on the available options. */ - afterResponse?: (err: errors.NetworkError | null, res?: globalThis.Response & { request: globalThis.Request; }) => void | Promise; + agentOptions?: any; /** * Callback that will be invoked when a request * * @param err - Error encountered when handling this request. */ onError?: (err: Error) => void | Promise; - /** - * If set to a positive number, requests will automatically be retried at - * most this many times if they result in a write-write conflict. - * - * Default: `0` - */ - retryOnConflict?: number; - /** - * An object with additional headers to send with every request. - * - * If an `"authorization"` header is provided, it will be overridden when - * using {@link databases.Database#useBasicAuth}, {@link databases.Database#useBearerAuth} or - * the `auth` configuration option. - */ - headers?: Headers | Record; /** * If set to `true`, arangojs will generate stack traces every time a request * is initiated and augment the stack traces of any errors it generates. diff --git a/src/connection.ts b/src/connection.ts index 38348cf10..6831d07f2 100644 --- a/src/connection.ts +++ b/src/connection.ts @@ -8,7 +8,7 @@ * @packageDocumentation */ import * as administration from "./administration.js"; -import * as configuration from "./config.js"; +import * as configuration from "./configuration.js"; import * as databases from "./databases.js"; import * as errors from "./errors.js"; import * as util from "./lib/util.js"; @@ -19,84 +19,95 @@ const MIME_JSON = /\/(json|javascript)(\W|$)/; const LEADER_ENDPOINT_HEADER = "x-arango-endpoint"; const REASON_TIMEOUT = 'timeout'; -//#region ServerFetchFunction +//#region Host /** * @internal */ -type CreateServerFetchFunctionOptions = Omit & { - beforeRequest?: (req: globalThis.Request) => void | Promise; - afterResponse?: (err: errors.NetworkError | null, res?: globalThis.Response & { request: globalThis.Request }) => void | Promise; -}; - -/** - * @internal - */ -type ServerFetchFunction = { +type Host = { /** * @internal - * + * * Perform a fetch request against this host. - * - * @param pathname - URL path, relative to the `basePath` and server domain. + * + * @param pathname - URL path, relative to the server URL. * @param options - Options for this fetch request. */ - (pathname: string, options: ServerFetchOptions): Promise; + fetch: (options: Omit) => Promise; /** * @internal - * + * * Close the pending request, if any. */ close: () => void; }; -/** - * @internal - */ -type ServerFetchOptions = Omit & { - search?: URLSearchParams; - timeout?: number; -}; - /** * @internal * * Create a function for performing fetch requests against a given host. * - * @param baseUrl - Base URL of the host, i.e. protocol, port and domain name. + * @param arangojsHostUrl - Base URL of the host, i.e. protocol, port and domain name. * @param options - Options to use for all fetch requests. */ -function createServerFetchFunction( - baseUrl: URL, - { - beforeRequest, - afterResponse, - ...serverFetchOptions - }: CreateServerFetchFunctionOptions -): ServerFetchFunction { +function createHost(arangojsHostUrl: string, agentOptions?: any): Host { + const baseUrl = new URL(arangojsHostUrl); + let fetch = globalThis.fetch; + let createDispatcher: (() => Promise) | undefined; + let dispatcher: any; + let socketPath: string | undefined; + if (arangojsHostUrl.match(/^\w+:\/\/unix:\//)) { + socketPath = baseUrl.pathname; + baseUrl.hostname = "localhost"; + baseUrl.pathname = "/"; + agentOptions = { + ...agentOptions, + connect: { + ...agentOptions?.connect, + socketPath, + }, + }; + } + if (agentOptions) { + createDispatcher = (async () => { + let undici: any; + try { + undici = await import("undici"); + } catch (cause) { + if (socketPath) { + throw new Error('Undici is required for Unix domain sockets', { cause }); + } + throw new Error('Undici is required when using config.agentOptions', { cause }); + } + fetch = undici.fetch; + return new undici.Agent(agentOptions); + }); + } const pending = new Map(); - return Object.assign( - async function serverFetch( - pathname: string, + return { + async fetch( { + method, + pathname, search, + headers: requestHeaders, body, timeout, - ...fetchOptions - }: ServerFetchOptions) { + fetchOptions, + beforeRequest, + afterResponse, + }: Omit) { const url = new URL(pathname + baseUrl.search, baseUrl); if (search) { - for (const [key, value] of search) { + const searchParams = ( + search instanceof URLSearchParams + ? search + : new URLSearchParams(search) + ); + for (const [key, value] of searchParams) { url.searchParams.append(key, value); } } - if (body instanceof FormData) { - const res = new Response(body); - const blob = await res.blob(); - // Workaround for ArangoDB 3.12.0-rc1 and earlier: - // Omitting the final CRLF results in "bad request body" fatal error - body = new Blob([blob, "\r\n"], { type: blob.type }); - } - const headers = util.mergeHeaders(serverFetchOptions.headers, fetchOptions.headers); + const headers = new Headers(requestHeaders); if (!headers.has("authorization")) { headers.set( "authorization", @@ -107,13 +118,18 @@ function createServerFetchFunction( } const abortController = new AbortController(); const signal = abortController.signal; + if (createDispatcher) { + dispatcher = await createDispatcher(); + createDispatcher = undefined; + } const request = new Request(url, { - ...serverFetchOptions, ...fetchOptions, + dispatcher, + method, headers, body, signal, - }); + } as globalThis.RequestInit); if (beforeRequest) { const p = beforeRequest(request); if (p instanceof Promise) await p; @@ -129,7 +145,7 @@ function createServerFetchFunction( } let response: globalThis.Response & { request: globalThis.Request }; try { - response = Object.assign(await fetch(request), { request }); + response = Object.assign(await fetch(request), { request, arangojsHostUrl }); } catch (e: unknown) { const cause = e instanceof Error ? e : new Error(String(e)); let error: errors.NetworkError; @@ -160,25 +176,110 @@ function createServerFetchFunction( } return response; }, - { - close() { - if (!pending.size) return; - const controllers = [...pending.values()]; - pending.clear(); - for (const controller of controllers) { - try { - controller.abort(); - } catch (e) { - // noop - } + close() { + if (!pending.size) return; + const controllers = [...pending.values()]; + pending.clear(); + for (const controller of controllers) { + try { + controller.abort(); + } catch (e) { + // noop } - }, - } - ); + } + }, + }; } //#endregion //#region Response types +const STATUS_CODE_DEFAULT_MESSAGES = { + 0: "Network Error", + 300: "Multiple Choices", + 301: "Moved Permanently", + 302: "Found", + 303: "See Other", + 304: "Not Modified", + 307: "Temporary Redirect", + 308: "Permanent Redirect", + 400: "Bad Request", + 401: "Unauthorized", + 402: "Payment Required", + 403: "Forbidden", + 404: "Not Found", + 405: "Method Not Allowed", + 406: "Not Acceptable", + 407: "Proxy Authentication Required", + 408: "Request Timeout", + 409: "Conflict", + 410: "Gone", + 411: "Length Required", + 412: "Precondition Failed", + 413: "Payload Too Large", + 414: "Request-URI Too Long", + 415: "Unsupported Media Type", + 416: "Requested Range Not Satisfiable", + 417: "Expectation Failed", + 418: "I'm a teapot", + 421: "Misdirected Request", + 422: "Unprocessable Entity", + 423: "Locked", + 424: "Failed Dependency", + 426: "Upgrade Required", + 428: "Precondition Required", + 429: "Too Many Requests", + 431: "Request Header Fields Too Large", + 444: "Connection Closed Without Response", + 451: "Unavailable For Legal Reasons", + 499: "Client Closed Request", + 500: "Internal Server Error", + 501: "Not Implemented", + 502: "Bad Gateway", + 503: "Service Unavailable", + 504: "Gateway Timeout", + 505: "HTTP Version Not Supported", + 506: "Variant Also Negotiates", + 507: "Insufficient Storage", + 508: "Loop Detected", + 510: "Not Extended", + 511: "Network Authentication Required", + 599: "Network Connect Timeout Error", +}; + +type KnownStatusCode = keyof typeof STATUS_CODE_DEFAULT_MESSAGES; +const KNOWN_STATUS_CODES = Object.keys(STATUS_CODE_DEFAULT_MESSAGES).map((k) => Number(k)) as KnownStatusCode[]; +const REDIRECT_CODES = [301, 302, 303, 307, 308] satisfies KnownStatusCode[]; +type RedirectStatusCode = typeof REDIRECT_CODES[number]; + +/** + * @internal + * + * Indicates whether the given status code can be translated to a known status + * message. + */ +function isKnownStatusCode(code: number): code is KnownStatusCode { + return KNOWN_STATUS_CODES.includes(code as KnownStatusCode); +} + +/** + * Indicates whether the given status code represents a redirect. + */ +export function isRedirect(response: ProcessedResponse): boolean { + return REDIRECT_CODES.includes(response.status as RedirectStatusCode); +} + +/** + * Returns the status message for the given response's status code or the + * status text of the response. + */ +export function getStatusMessage(response: ProcessedResponse): string { + if (isKnownStatusCode(response.status)) { + return STATUS_CODE_DEFAULT_MESSAGES[response.status]; + } + if (response.statusText) return response.statusText; + return "Unknown response status"; +} + /** * Generic properties shared by all ArangoDB HTTP API responses. */ @@ -198,6 +299,19 @@ export type ArangoResponseMetadata = { */ export type ArangoApiResponse = T & ArangoResponseMetadata; +/** + * Indicates whether the given value represents an ArangoDB error response. + */ +export function isArangoErrorResponse(body: any): body is ArangoErrorResponse { + if (!body || typeof body !== 'object') return false; + return ( + body.error === true && + typeof body.code === 'number' && + typeof body.errorMessage === 'string' && + typeof body.errorNum === 'number' + ); +} + /** * Interface representing an ArangoDB error response. */ @@ -246,70 +360,236 @@ export interface ProcessedResponse extends globalThis.Response { //#region Request options /** - * Options for performing a request with arangojs. + * Options available for requests made with the Fetch API. */ -export type RequestOptions = { +export type CommonFetchOptions = { /** - * @internal + * Headers object containing any additional headers to send with the request. * - * Identifier of a specific ArangoDB host to use when more than one is known. + * Note that the `Authorization` header will be overridden if the `auth` + * configuration option is set. */ - hostUrl?: string; + headers?: string[][] | Record> | Headers; /** - * HTTP method to use in order to perform the request. + * Controls whether the socket should be reused for subsequent requests. * - * Default: `"GET"` + * Default: `false` */ - method?: string; + keepalive?: boolean; /** - * Request body data. + * Controls what to do when the response status code is a redirect. + * + * - `"error"`: Abort with a network error. + * - `"follow"`: Automatically follow redirects. + * - `"manual"`: Abort with an `HttpError`. + * + * Default: `"follow"` */ - body?: any; + redirect?: 'error' | 'follow' | 'manual'; /** - * If set to `true`, the response body will not be interpreted as JSON and - * instead passed as-is. + * Value to use for the `Referer` header. + * + * If set to `"about:client"`, the default value for the context in which the + * request is made will be used. + * + * Default: `"about:client"` */ - expectBinary?: boolean; + referrer?: string; /** - * If set to `true`, the request body will not be converted to JSON and - * instead passed as-is. + * (Browser only.) Controls the Attribution Reporting API specific behavior. + * + * See the [MDN Web Docs](https://developer.mozilla.org/en-US/docs/Web/API/RequestInit) + * for more information on the available options. */ - isBinary?: boolean; + attributionReporting?: any; /** - * Whether ArangoDB is allowed to perform a dirty read to respond to this - * request. If set to `true`, the response may reflect a dirty state from - * a non-authoritative server. + * (Browser only.) Cache mode to use for the request. + * + * See [the Fetch API specification](https://fetch.spec.whatwg.org/#request-class) + * or the [MDN Web Docs](https://developer.mozilla.org/en-US/docs/Web/API/RequestInit) + * for more information on the available options. */ - allowDirtyRead?: boolean; + cache?: string; /** - * If set to a positive number, the request will automatically be retried at - * most this many times if it results in a write-write conflict. + * (Browser only.) Controls sending of credentials and cookies. * - * Default: `config.retryOnConflict` + * - `"omit"`: Never send cookies. + * - `"include"`: Always send cookies. + * - `"same-origin"`: Only send cookies if the request is to the same origin. + * + * Default: `"same-origin"` */ - retryOnConflict?: number; + credentials?: 'omit' | 'include' | 'same-origin'; + /** + * (Node.js only.) Undici `Dispatcher` instance to use for the request. + * + * Defaults to the global dispatcher. + */ + dispatcher?: any; + /** + * (Browser only.) Sets cross-origin behavior for the request. + * + * See [the Fetch API specification](https://fetch.spec.whatwg.org/#request-class) + * or the [MDN Web Docs](https://developer.mozilla.org/en-US/docs/Web/API/RequestInit) + * for more information on the available options. + * + * Default: `"cors"` + */ + mode?: string; + /** + * (Browser only.) Request priority relative to other requests of the same type. + * + * See [the Fetch API specification](https://fetch.spec.whatwg.org/#request-class) + * or the [MDN Web Docs](https://developer.mozilla.org/en-US/docs/Web/API/RequestInit) + * for more information on the available options. + * + * Default: `"auto"` + */ + priority?: 'low' | 'high' | 'auto'; + /** + * (Browser only.) Policy to use for the `Referer` header, equivalent to the + * semantics of the `Referrer-Policy` header. + * + * See [the Fetch API specification](https://fetch.spec.whatwg.org/#request-class) + * or the [MDN Web Docs](https://developer.mozilla.org/en-US/docs/Web/API/RequestInit) + * for more information on the available options. + */ + referrerPolicy?: string; +}; + +/** + * Fetch-specific options for performing a request with arangojs. + */ +export type FetchOptions = CommonFetchOptions & { + /** + * Subresource integrity value to use for the request, formatted as + * `-`. + */ + integrity?: `${string}-${string}`; +}; + +/** + * Options that can be shared globally for all requests made with arangojs. + */ +export type CommonRequestOptions = { + /** + * Determines the behavior when a request fails because the underlying + * connection to the server could not be opened + * (e.g. [`ECONNREFUSED` in Node.js](https://nodejs.org/api/errors.html#errors_common_system_errors)): + * + * - `false`: the request fails immediately. + * + * - `0`: the request is retried until a server can be reached but only a + * total number of times matching the number of known servers (including + * the initial failed request). + * + * - any other number: the request is retried until a server can be reached + * or the request has been retried a total of `maxRetries` number of times + * (not including the initial failed request). + * + * When working with a single server, the retries (if any) will be made to + * the same server. + * + * This setting currently has no effect when using arangojs in a browser. + * + * **Note**: Requests bound to a specific server (e.g. fetching query results) + * will never be retried automatically and ignore this setting. + * + * **Note**: To set the number of retries when a write-write conflict is + * encountered, see `retryOnConflict` instead. + * + * Default: `0` + */ + maxRetries?: false | number; /** - * HTTP headers to pass along with this request in addition to the default - * headers generated by arangojs. + * If set to a positive number, requests will automatically be retried at + * most this many times if they result in a write-write conflict. + * + * Default: `0` */ - headers?: Headers | Record; + retryOnConflict?: number; /** * Time in milliseconds after which arangojs will abort the request if the * socket has not already timed out. */ timeout?: number; /** - * Optional prefix path to prepend to the `path`. + * Whether ArangoDB is allowed to perform a dirty read to respond to the + * request. If set to `true`, the response may reflect a dirty state from + * a non-authoritative server. + * + * Default: `false` + */ + allowDirtyRead?: boolean; + /** + * Callback that will be invoked with the finished request object before it + * is finalized. In the browser the request may already have been sent. + * + * @param req - Request object or XHR instance used for this request. + */ + beforeRequest?: (req: globalThis.Request) => void | Promise; + /** + * Callback that will be invoked when the server response has been received + * and processed or when the request has been failed without a response. + * + * The originating request will be available as the `request` property + * on either the error or response object. + * + * @param err - Error encountered when handling this request or `null`. + * @param res - Response object for this request, if no error occurred. + */ + afterResponse?: (err: errors.NetworkError | null, res?: globalThis.Response & { request: globalThis.Request; }) => void | Promise; +}; + +/** + * Options for performing a request with arangojs. + */ +export type RequestOptions = CommonRequestOptions & { + /** + * @internal + * + * Identifier of a specific ArangoDB host to use when more than one is known. */ - basePath?: string; + hostUrl?: string; /** - * URL path, relative to the `basePath` and server domain. + * HTTP method to use in order to perform the request. + * + * Default: `"GET"` */ - path?: string; + method?: string; + /** + * URL path, relative to the server domain. + */ + pathname?: string; /** * URL parameters to pass as part of the query string. */ search?: URLSearchParams | Record; + /** + * Headers object containing any additional headers to send with the request. + * + * Note that the `Authorization` header will be overridden if the `auth` + * configuration option is set. + */ + headers?: string[][] | Record> | Headers; + /** + * Request body data. + */ + body?: any; + /** + * Additional options to pass to the `fetch` function. + */ + fetchOptions?: Omit; + /** + * If set to `true`, the request body will not be converted to JSON and + * instead passed as-is. + */ + isBinary?: boolean; + /** + * If set to `true`, the response body will not be interpreted as JSON and + * instead passed as-is. + */ + expectBinary?: boolean; }; //#endregion @@ -318,17 +598,13 @@ export type RequestOptions = { * @internal */ type Task = { - hostUrl?: string; stack?: () => string; - allowDirtyRead: boolean; - retryOnConflict: number; resolve: (result: T) => void; reject: (error: unknown) => void; transform?: (res: ProcessedResponse) => T; retries: number; - expectBinary: boolean; - pathname: string; - options: ServerFetchOptions; + conflicts: number; + options: RequestOptions; }; /** @@ -349,16 +625,14 @@ export function isArangoConnection(connection: any): connection is Connection { */ export class Connection { protected _activeTasks: number = 0; - protected _arangoVersion: number = 31100; - protected _headers: Headers; + protected _arangoVersion: number; protected _loadBalancingStrategy: configuration.LoadBalancingStrategy; - protected _maxRetries: number | false; protected _taskPoolSize: number; - protected _requestConfig: CreateServerFetchFunctionOptions; - protected _retryOnConflict: number; + protected _commonRequestOptions: CommonRequestOptions; + protected _commonFetchOptions: CommonFetchOptions & { headers: Headers }; protected _queue = new LinkedList(); protected _databases = new Map(); - protected _hosts: ServerFetchFunction[] = []; + protected _hosts: Host[] = []; protected _hostUrls: string[] = []; protected _activeHostUrl: string; protected _activeDirtyHostUrl: string; @@ -376,52 +650,54 @@ export class Connection { * @param config - An object with configuration options. * */ - constructor(config: Omit = {}) { - const URLS = config.url - ? Array.isArray(config.url) - ? config.url - : [config.url] - : ["http://127.0.0.1:8529"]; - const DEFAULT_POOL_SIZE = - 3 * (config.loadBalancingStrategy === "ROUND_ROBIN" ? URLS.length : 1); + constructor(config: Omit = {}) { + const { + url = "http://127.0.0.1:8529", + auth, + arangoVersion = 31100, + loadBalancingStrategy = "NONE", + maxRetries = 0, + poolSize = 3 * ( + loadBalancingStrategy === "ROUND_ROBIN" && Array.isArray(url) ? url.length : 1 + ), + fetchOptions: { headers, ...commonFetchOptions } = {}, + onError, + precaptureStackTraces = false, + responseQueueTimeSamples = 10, + ...commonRequestOptions + } = config; + const URLS = Array.isArray(url) ? url : [url]; + this._loadBalancingStrategy = loadBalancingStrategy; + this._precaptureStackTraces = precaptureStackTraces; + this._responseQueueTimeSamples = ( + responseQueueTimeSamples < 0 ? Infinity : responseQueueTimeSamples + ); + this._arangoVersion = arangoVersion; + this._taskPoolSize = poolSize; + this._onError = onError; - if (config.arangoVersion !== undefined) { - this._arangoVersion = config.arangoVersion; - } - this._taskPoolSize = config.poolSize ?? DEFAULT_POOL_SIZE; - this._requestConfig = { - credentials: config.credentials ?? "same-origin", - keepalive: config.keepalive ?? true, - beforeRequest: config.beforeRequest, - afterResponse: config.afterResponse, + this._commonRequestOptions = commonRequestOptions; + this._commonFetchOptions = { + headers: new Headers(headers), + ...commonFetchOptions, }; - this._headers = new Headers(config.headers); - this._headers.set("x-arango-version", String(this._arangoVersion)); - this._headers.set( + + this._commonFetchOptions.headers.set( + "x-arango-version", + String(arangoVersion) + ); + this._commonFetchOptions.headers.set( "x-arango-driver", `arangojs/${process.env.ARANGOJS_VERSION} (cloud)` ); - this._loadBalancingStrategy = config.loadBalancingStrategy ?? "NONE"; - this._precaptureStackTraces = Boolean(config.precaptureStackTraces); - this._responseQueueTimeSamples = config.responseQueueTimeSamples ?? 10; - this._retryOnConflict = config.retryOnConflict ?? 0; - this._onError = config.onError; - if (this._responseQueueTimeSamples < 0) { - this._responseQueueTimeSamples = Infinity; - } - if (config.maxRetries === false) { - this._maxRetries = false; - } else { - this._maxRetries = Number(config.maxRetries ?? 0); - } this.addToHostList(URLS); - if (config.auth) { - if (configuration.isBearerAuth(config.auth)) { - this.setBearerAuth(config.auth); + if (auth) { + if (configuration.isBearerAuth(auth)) { + this.setBearerAuth(auth); } else { - this.setBasicAuth(config.auth); + this.setBasicAuth(auth); } } @@ -466,9 +742,9 @@ export class Connection { let hostUrl = this._activeHostUrl; try { this._activeTasks += 1; - if (task.hostUrl !== undefined) { - hostUrl = task.hostUrl; - } else if (task.allowDirtyRead) { + if (task.options.hostUrl !== undefined) { + hostUrl = task.options.hostUrl; + } else if (task.options.allowDirtyRead) { hostUrl = this._activeDirtyHostUrl; const i = this._hostUrls.indexOf(this._activeDirtyHostUrl) + 1; this._activeDirtyHostUrl = this._hostUrls[i % this._hostUrls.length]; @@ -476,18 +752,19 @@ export class Connection { const i = this._hostUrls.indexOf(this._activeHostUrl) + 1; this._activeHostUrl = this._hostUrls[i % this._hostUrls.length]; } + const host = this._hosts[this._hostUrls.indexOf(hostUrl)]; const res: globalThis.Response & { request: globalThis.Request; arangojsHostUrl: string; parsedBody?: any; - } = Object.assign(await this._hosts[this._hostUrls.indexOf(hostUrl)]( - task.pathname, - task.options - ), { arangojsHostUrl: hostUrl }); + } = Object.assign( + await host.fetch(task.options), + { arangojsHostUrl: hostUrl } + ); const leaderEndpoint = res.headers.get(LEADER_ENDPOINT_HEADER); if (res.status === 503 && leaderEndpoint) { const [cleanUrl] = this.addToHostList(leaderEndpoint); - task.hostUrl = cleanUrl; + task.options.hostUrl = cleanUrl; if (this._activeHostUrl === hostUrl) { this._activeHostUrl = cleanUrl; } @@ -511,7 +788,7 @@ export class Connection { } catch { // noop } - if (errors.isArangoErrorResponse(errorBody)) { + if (isArangoErrorResponse(errorBody)) { res.parsedBody = errorBody; throw errors.ArangoError.from(res); } @@ -519,7 +796,7 @@ export class Connection { throw new errors.HttpError(res); } if (res.body) { - if (task.expectBinary) { + if (task.options.expectBinary) { res.parsedBody = await res.blob(); } else if (contentType?.match(MIME_JSON)) { res.parsedBody = await res.json(); @@ -533,7 +810,7 @@ export class Connection { } catch (e: unknown) { const err = e as Error; if ( - !task.allowDirtyRead && + !task.options.allowDirtyRead && this._hosts.length > 1 && this._activeHostUrl === hostUrl && this._loadBalancingStrategy !== "ROUND_ROBIN" @@ -544,18 +821,19 @@ export class Connection { if ( errors.isArangoError(err) && err.errorNum === ERROR_ARANGO_CONFLICT && - task.retryOnConflict > 0 + task.options.retryOnConflict && + task.conflicts < task.options.retryOnConflict ) { - task.retryOnConflict -= 1; + task.conflicts += 1; this._queue.push(task); return; } if ( (errors.isNetworkError(err) || errors.isArangoError(err)) && err.isSafeToRetry && - task.hostUrl === undefined && - this._maxRetries !== false && - task.retries < (this._maxRetries || this._hosts.length - 1) + task.options.hostUrl === undefined && + this._commonRequestOptions.maxRetries !== false && + task.retries < (this._commonRequestOptions.maxRetries || this._hosts.length - 1) ) { task.retries += 1; this._queue.push(task); @@ -663,11 +941,7 @@ export class Connection { ...cleanUrls.map((url) => { const i = this._hostUrls.indexOf(url); if (i !== -1) return this._hosts[i]; - const parsedUrl = new URL(url); - if (!parsedUrl.pathname.endsWith("/")) { - parsedUrl.pathname += "/"; - } - return createServerFetchFunction(parsedUrl, this._requestConfig); + return createHost(url); }) ); this._hostUrls.splice(0, this._hostUrls.length, ...cleanUrls); @@ -691,13 +965,7 @@ export class Connection { ); this._hostUrls.push(...newUrls); this._hosts.push( - ...newUrls.map((url: string) => { - const parsedUrl = new URL(url); - if (!parsedUrl.pathname.endsWith("/")) { - parsedUrl.pathname += "/"; - } - return createServerFetchFunction(parsedUrl, this._requestConfig); - }) + ...newUrls.map(url => createHost(url)) ); return cleanUrls; } @@ -739,9 +1007,9 @@ export class Connection { */ setHeader(headerName: string, value: string | null) { if (value === null) { - this._headers.delete(headerName); + this._commonFetchOptions.headers.delete(headerName); } else { - this._headers.set(headerName, value); + this._commonFetchOptions.headers.set(headerName, value); } } @@ -809,71 +1077,79 @@ export class Connection { * * Performs a request using the arangojs connection pool. */ - request( - { + async request( + requestOptions: RequestOptions & { isBinary?: boolean }, + transform?: (res: globalThis.Response & { request: globalThis.Request; parsedBody?: any }) => T + ): Promise { + const { hostUrl, - method = "GET", - body, - expectBinary = false, - isBinary = false, allowDirtyRead = false, - retryOnConflict = this._retryOnConflict, + isBinary = false, + maxRetries = 0, + method = "GET", + retryOnConflict = 0, timeout = 0, headers: requestHeaders, - basePath, - path, - search: params, - }: RequestOptions, - transform?: (res: globalThis.Response & { request: globalThis.Request; parsedBody?: any }) => T - ): Promise { - return new Promise((resolve, reject) => { - const headers = util.mergeHeaders(this._headers, requestHeaders ?? {}); - - if (body && !(body instanceof FormData)) { - let contentType; - if (isBinary) { - contentType = "application/octet-stream"; - } else if (typeof body === "object") { - body = JSON.stringify(body); - contentType = "application/json"; - } else { - body = String(body); - contentType = "text/plain"; - } - if (!headers.has("content-type")) { - headers.set("content-type", contentType); - } - } + body: requestBody, + fetchOptions, + ...taskOptions + } = { ...this._commonRequestOptions, ...requestOptions }; - if (this._transactionId) { - headers.set("x-arango-trx-id", this._transactionId); - } + const headers = util.mergeHeaders( + this._commonFetchOptions.headers, + requestHeaders + ); - if (allowDirtyRead) { - headers.set("x-arango-allow-dirty-read", "true"); + let body = requestBody; + if (body instanceof FormData) { + const res = new Response(body); + const blob = await res.blob(); + // Workaround for ArangoDB 3.12.0-rc1 and earlier: + // Omitting the final CRLF results in "bad request body" fatal error + body = new Blob([blob, "\r\n"], { type: blob.type }); + } else if (body) { + let contentType; + if (isBinary) { + contentType = "application/octet-stream"; + } else if (typeof body === "object") { + body = JSON.stringify(body); + contentType = "application/json"; + } else { + body = String(body); + contentType = "text/plain"; + } + if (!headers.has("content-type")) { + headers.set("content-type", contentType); } + } + + if (this._transactionId) { + headers.set("x-arango-trx-id", this._transactionId); + } + + if (allowDirtyRead) { + headers.set("x-arango-allow-dirty-read", "true"); + } + return new Promise((resolve, reject) => { const task: Task = { + resolve, + reject, + transform, retries: 0, - hostUrl, - allowDirtyRead, - retryOnConflict, - expectBinary, - pathname: util.joinPath(basePath, path) ?? "", + conflicts: 0, options: { - search: - params && - (params instanceof URLSearchParams - ? params - : new URLSearchParams(params)), - headers, - timeout, + ...taskOptions, + hostUrl, method, + headers, body, + allowDirtyRead, + retryOnConflict, + maxRetries, + fetchOptions, + timeout, }, - reject, - resolve, - transform, }; if (this._precaptureStackTraces) { diff --git a/src/cursors.ts b/src/cursors.ts index 0a31d5221..bff892b78 100644 --- a/src/cursors.ts +++ b/src/cursors.ts @@ -205,7 +205,7 @@ export class BatchCursor { if (!this._id || !this.hasMore) return; const body = await this._db.request({ method: "POST", - path: this._nextBatchId + pathname: this._nextBatchId ? `/_api/cursor/${encodeURIComponent(this._id)}/${this._nextBatchId}` : `/_api/cursor/${encodeURIComponent(this._id)}`, hostUrl: this._hostUrl, @@ -746,7 +746,7 @@ export class BatchCursor { return this._db.request( { method: "DELETE", - path: `/_api/cursor/${encodeURIComponent(this._id!)}`, + pathname: `/_api/cursor/${encodeURIComponent(this._id!)}`, }, () => { this._hasMore = false; diff --git a/src/databases.ts b/src/databases.ts index 9a9bad0be..62a7bc6dd 100644 --- a/src/databases.ts +++ b/src/databases.ts @@ -15,6 +15,7 @@ import * as analyzers from "./analyzers.js"; import * as aql from "./aql.js"; import * as cluster from "./cluster.js"; import * as collections from "./collections.js"; +import * as configuration from "./configuration.js"; import * as connection from "./connection.js"; import * as cursors from "./cursors.js"; import * as errors from "./errors.js"; @@ -27,6 +28,7 @@ import * as routes from "./routes.js"; import * as services from "./services.js"; import * as transactions from "./transactions.js"; import * as users from "./users.js"; +import * as util from "./lib/util.js"; import * as views from "./views.js"; import { DATABASE_NOT_FOUND } from "./lib/codes.js"; @@ -161,14 +163,14 @@ export class Database { * }); * ``` */ - constructor(config?: connection.Config); + constructor(config?: configuration.ConfigOptions); /** * Creates a new `Database` instance with its own connection pool. * * See also {@link Database#database}. * * @param url - Base URL of the ArangoDB server or list of server URLs. - * Equivalent to the `url` option in {@link connection.Config}. + * Equivalent to the `url` option in {@link configuration.ConfigOptions}. * * @example * ```js @@ -182,7 +184,7 @@ export class Database { */ constructor(database: Database, name?: string); constructor( - configOrDatabase: string | string[] | connection.Config | Database = {}, + configOrDatabase: string | string[] | configuration.ConfigOptions | Database = {}, name?: string ) { if (isArangoDatabase(configOrDatabase)) { @@ -251,16 +253,14 @@ export class Database { * * Performs an arbitrary HTTP request against the database. * - * If `absolutePath` is set to `true`, the database path will not be - * automatically prepended to the `basePath`. - * - * @param T - Return type to use. Defaults to the response object type. + * @param BodyType - Type of the expected response body. + * @param ReturnType - Type the response body will be transformed to. * @param options - Options for this request. * @param transform - An optional function to transform the low-level * response object to a more useful return value. */ async request( - options: connection.RequestOptions & { absolutePath?: boolean }, + options: connection.RequestOptions, transform?: (res: connection.ProcessedResponse) => ReturnType ): Promise; /** @@ -268,38 +268,32 @@ export class Database { * * Performs an arbitrary HTTP request against the database. * - * If `absolutePath` is set to `true`, the database path will not be - * automatically prepended to the `basePath`. - * + * @param BodyType - Type of the expected response body. * @param options - Options for this request. * @param transform - If set to `false`, the raw response object will be * returned. */ async request( - options: connection.RequestOptions & { absolutePath?: boolean }, + options: connection.RequestOptions, transform: false ): Promise>; async request( { - absolutePath = false, - basePath, + pathname, ...opts - }: connection.RequestOptions & { absolutePath?: boolean }, + }: connection.RequestOptions, transform: false | ((res: connection.ProcessedResponse) => ReturnType) = (res) => res.parsedBody as ReturnType ): Promise { - if (!absolutePath) { - basePath = `/_db/${encodeURIComponent(this._name)}${basePath || ""}`; - } + pathname = util.joinPath('_db', encodeURIComponent(this._name), pathname); if (this._trapRequest) { const trap = this._trapRequest; this._trapRequest = undefined; return new Promise(async (resolveRequest, rejectRequest) => { - const options = { ...opts }; - options.headers = new Headers(options.headers); - options.headers.set("x-arango-async", "store"); + opts.headers = new Headers(opts.headers); + opts.headers.set("x-arango-async", "store"); let jobRes: connection.ProcessedResponse; try { - jobRes = await this._connection.request({ basePath, ...options }); + jobRes = await this._connection.request({ pathname, ...opts }); } catch (e) { trap({ error: true }); rejectRequest(e); @@ -321,7 +315,7 @@ export class Database { }); } return this._connection.request( - { basePath, ...opts }, + { pathname, ...opts }, transform || undefined ); } @@ -329,7 +323,7 @@ export class Database { /** * Updates the URL list by requesting a list of all coordinators in the * cluster and adding any endpoints not initially specified in the - * {@link connection.Config}. + * {@link configuration.ConfigOptions}. * * For long-running processes communicating with an ArangoDB cluster it is * recommended to run this method periodically (e.g. once per hour) to make @@ -354,7 +348,7 @@ export class Database { */ async acquireHostList(overwrite = false): Promise { const urls: string[] = await this.request( - { path: "/_api/cluster/endpoints" }, + { pathname: "/_api/cluster/endpoints" }, (res) => res.parsedBody.endpoints.map((endpoint: any) => endpoint.endpoint) ); @@ -409,7 +403,7 @@ export class Database { * const analyzer = db.analyzer("my-analyzer"); * await analyzer.create(); * await db.waitForPropagation( - * { path: `/_api/analyzer/${encodeURIComponent(analyzer.name)}` }, + * { pathname: `/_api/analyzer/${encodeURIComponent(analyzer.name)}` }, * 30000 * ); * // Analyzer has been propagated to all coordinators and can safely be used @@ -423,13 +417,13 @@ export class Database { timeout?: number ): Promise; async waitForPropagation( - { basePath, ...request }: connection.RequestOptions, + { pathname, ...request }: connection.RequestOptions, timeout?: number ): Promise { await this._connection.waitForPropagation( { ...request, - basePath: `/_db/${encodeURIComponent(this._name)}${basePath || ""}`, + pathname: util.joinPath('_db', encodeURIComponent(this._name), pathname), }, timeout ); @@ -513,7 +507,7 @@ export class Database { return this.request( { method: "POST", - path: "/_open/auth", + pathname: "/_open/auth", body: { username, password }, }, (res) => { @@ -541,7 +535,7 @@ export class Database { return this.request( { method: "POST", - path: "/_open/auth/renew", + pathname: "/_open/auth/renew", }, (res) => { if (!res.parsedBody.jwt) return null; @@ -572,7 +566,7 @@ export class Database { version(details?: boolean): Promise { return this.request({ method: "GET", - path: "/_api/version", + pathname: "/_api/version", search: { details }, }); } @@ -591,7 +585,7 @@ export class Database { engine(): Promise { return this.request({ method: "GET", - path: "/_api/engine", + pathname: "/_api/engine", }); } @@ -603,7 +597,7 @@ export class Database { return this.request( { method: "GET", - path: "/_admin/time", + pathname: "/_admin/time", }, (res) => res.parsedBody.time * 1000 ); @@ -624,13 +618,13 @@ export class Database { status(): Promise { return this.request({ method: "GET", - path: "/_admin/status", + pathname: "/_admin/status", }); } /** * Fetches availability information about the server. - * + * * @param graceful - If set to `true`, the method will always return `false` * instead of throwing an error; otherwise `false` will only be returned * when the server responds with a 503 status code or an ArangoDB error with @@ -646,7 +640,7 @@ export class Database { try { return this.request({ method: "GET", - path: "/_admin/server/availability", + pathname: "/_admin/server/availability", }, (res) => res.parsedBody.mode); } catch (e) { if (graceful) return false; @@ -659,13 +653,13 @@ export class Database { /** * Fetches deployment information about the server for support purposes. - * + * * Note that this API may reveal sensitive data about the deployment. */ supportInfo(): Promise { return this.request({ method: "GET", - path: "/_admin/support-info", + pathname: "/_admin/support-info", }); } @@ -676,7 +670,7 @@ export class Database { return this.request( { method: "DELETE", - path: "/_admin/shutdown", + pathname: "/_admin/shutdown", }, () => undefined ); @@ -695,7 +689,7 @@ export class Database { */ getClusterImbalance(): Promise { return this.request( - { path: "/_admin/cluster/rebalance" }, + { pathname: "/_admin/cluster/rebalance" }, (res) => res.parsedBody.result ); } @@ -721,7 +715,7 @@ export class Database { return this.request( { method: "POST", - path: "/_admin/cluster/rebalance", + pathname: "/_admin/cluster/rebalance", body: { version: 1, ...options, @@ -749,7 +743,7 @@ export class Database { executeClusterRebalance(moves: cluster.ClusterRebalanceMove[]): Promise { return this.request({ method: "POST", - path: "/_admin/cluster/rebalance/execute", + pathname: "/_admin/cluster/rebalance/execute", body: { version: 1, moves, @@ -776,7 +770,7 @@ export class Database { ): Promise { return this.request({ method: "PUT", - path: "/_admin/cluster/rebalance", + pathname: "/_admin/cluster/rebalance", body: { version: 1, ...opts, @@ -816,7 +810,7 @@ export class Database { */ get(): Promise { return this.request( - { path: "/_api/database/current" }, + { pathname: "/_api/database/current" }, (res) => res.parsedBody.result ); } @@ -891,7 +885,7 @@ export class Database { return this.request( { method: "POST", - path: "/_api/database", + pathname: "/_api/database", body: { name: databaseName, users, options }, }, () => this.database(databaseName) @@ -913,7 +907,7 @@ export class Database { */ listDatabases(): Promise { return this.request( - { path: "/_api/database" }, + { pathname: "/_api/database" }, (res) => res.parsedBody.result ); } @@ -934,7 +928,7 @@ export class Database { */ listUserDatabases(): Promise { return this.request( - { path: "/_api/database/user" }, + { pathname: "/_api/database/user" }, (res) => res.parsedBody.result ); } @@ -954,7 +948,7 @@ export class Database { * ``` */ databases(): Promise { - return this.request({ path: "/_api/database" }, (res) => + return this.request({ pathname: "/_api/database" }, (res) => (res.parsedBody.result as string[]).map((databaseName) => this.database(databaseName) ) @@ -976,7 +970,7 @@ export class Database { * ``` */ userDatabases(): Promise { - return this.request({ path: "/_api/database/user" }, (res) => + return this.request({ pathname: "/_api/database/user" }, (res) => (res.parsedBody.result as string[]).map((databaseName) => this.database(databaseName) ) @@ -999,7 +993,7 @@ export class Database { return this.request( { method: "DELETE", - path: `/_api/database/${encodeURIComponent(databaseName)}`, + pathname: `/_api/database/${encodeURIComponent(databaseName)}`, }, (res) => res.parsedBody.result ); @@ -1220,7 +1214,7 @@ export class Database { ): Promise> { const result = await this.request({ method: "PUT", - path: `/_api/collection/${encodeURIComponent(collectionName)}/rename`, + pathname: `/_api/collection/${encodeURIComponent(collectionName)}/rename`, body: { name: newName }, }); this._collections.delete(collectionName); @@ -1256,7 +1250,7 @@ export class Database { ): Promise { return this.request( { - path: "/_api/collection", + pathname: "/_api/collection", search: { excludeSystem }, }, (res) => res.parsedBody.result @@ -1352,7 +1346,7 @@ export class Database { */ listGraphs(): Promise { return this.request( - { path: "/_api/gharial" }, + { pathname: "/_api/gharial" }, (res) => res.parsedBody.graphs ); } @@ -1436,7 +1430,7 @@ export class Database { ): Promise> { const result = await this.request({ method: "PUT", - path: `/_api/view/${encodeURIComponent(viewName)}/rename`, + pathname: `/_api/view/${encodeURIComponent(viewName)}/rename`, body: { name: newName }, }); this._views.delete(viewName); @@ -1458,7 +1452,7 @@ export class Database { * ``` */ listViews(): Promise { - return this.request({ path: "/_api/view" }, (res) => res.parsedBody.result); + return this.request({ pathname: "/_api/view" }, (res) => res.parsedBody.result); } /** @@ -1541,7 +1535,7 @@ export class Database { */ listAnalyzers(): Promise { return this.request( - { path: "/_api/analyzer" }, + { pathname: "/_api/analyzer" }, (res) => res.parsedBody.result ); } @@ -1580,7 +1574,7 @@ export class Database { listUsers(): Promise { return this.request( { - path: "/_api/user", + pathname: "/_api/user", }, (res) => res.parsedBody.result ); @@ -1600,7 +1594,7 @@ export class Database { */ getUser(username: string): Promise> { return this.request({ - path: `/_api/user/${encodeURIComponent(username)}`, + pathname: `/_api/user/${encodeURIComponent(username)}`, }); } @@ -1648,7 +1642,7 @@ export class Database { return this.request( { method: "POST", - path: "/_api/user", + pathname: "/_api/user", body: { user: username, ...options }, }, (res) => res.parsedBody @@ -1699,7 +1693,7 @@ export class Database { return this.request( { method: "PATCH", - path: `/_api/user/${encodeURIComponent(username)}`, + pathname: `/_api/user/${encodeURIComponent(username)}`, body: options, }, (res) => res.parsedBody @@ -1729,7 +1723,7 @@ export class Database { return this.request( { method: "PUT", - path: `/_api/user/${encodeURIComponent(username)}`, + pathname: `/_api/user/${encodeURIComponent(username)}`, body: options, }, (res) => res.parsedBody @@ -1754,7 +1748,7 @@ export class Database { return this.request( { method: "DELETE", - path: `/_api/user/${encodeURIComponent(username)}`, + pathname: `/_api/user/${encodeURIComponent(username)}`, }, () => undefined, ); @@ -1836,8 +1830,8 @@ export class Database { const databaseName = isArangoDatabase(database) ? database.name : database ?? - (collections.isArangoCollection(collection) - ? ((collection as any)._db as Database).name + (collection instanceof collections.Collection + ? collection.database.name : this._name); const suffix = collection ? `/${encodeURIComponent( @@ -1846,7 +1840,7 @@ export class Database { : ""; return this.request( { - path: `/_api/user/${encodeURIComponent( + pathname: `/_api/user/${encodeURIComponent( username )}/database/${encodeURIComponent(databaseName)}${suffix}`, }, @@ -1948,7 +1942,7 @@ export class Database { return this.request( { method: "PUT", - path: `/_api/user/${encodeURIComponent( + pathname: `/_api/user/${encodeURIComponent( username )}/database/${encodeURIComponent(databaseName)}${suffix}`, body: { grant }, @@ -2028,7 +2022,7 @@ export class Database { ? database.name : database ?? (collection instanceof collections.Collection - ? ((collection as any)._db as Database).name + ? collection.database.name : this._name); const suffix = collection ? `/${encodeURIComponent( @@ -2038,7 +2032,7 @@ export class Database { return this.request( { method: "DELETE", - path: `/_api/user/${encodeURIComponent( + pathname: `/_api/user/${encodeURIComponent( username )}/database/${encodeURIComponent(databaseName)}${suffix}`, }, @@ -2101,7 +2095,7 @@ export class Database { getUserDatabases(username: string, full?: boolean) { return this.request( { - path: `/_api/user/${encodeURIComponent(username)}/database`, + pathname: `/_api/user/${encodeURIComponent(username)}/database`, search: { full }, }, (res) => res.parsedBody.result @@ -2275,7 +2269,7 @@ export class Database { return this.request( { method: "POST", - path: "/_api/transaction", + pathname: "/_api/transaction", allowDirtyRead, body: { collections: transactions.coerceTransactionCollections(collections), @@ -2412,7 +2406,7 @@ export class Database { return this.request( { method: "POST", - path: "/_api/transaction/begin", + pathname: "/_api/transaction/begin", allowDirtyRead, body: { collections: transactions.coerceTransactionCollections(collections), @@ -2574,7 +2568,7 @@ export class Database { */ listTransactions(): Promise { return this._connection.request( - { path: "/_api/transaction" }, + { pathname: "/_api/transaction" }, (res) => res.parsedBody.transactions ); } @@ -2733,7 +2727,7 @@ export class Database { return this.request( { method: "POST", - path: "/_api/cursor", + pathname: "/_api/cursor", body: { query, bindVars, @@ -2886,7 +2880,7 @@ export class Database { } return this.request({ method: "POST", - path: "/_api/explain", + pathname: "/_api/explain", body: { query, bindVars, options }, }); } @@ -2921,7 +2915,7 @@ export class Database { } return this.request({ method: "POST", - path: "/_api/query", + pathname: "/_api/query", body: { query }, }); } @@ -2940,7 +2934,7 @@ export class Database { */ queryRules(): Promise { return this.request({ - path: "/_api/query/rules", + pathname: "/_api/query/rules", }); } @@ -2978,12 +2972,12 @@ export class Database { options ? { method: "PUT", - path: "/_api/query/properties", + pathname: "/_api/query/properties", body: options, } : { method: "GET", - path: "/_api/query/properties", + pathname: "/_api/query/properties", } ); } @@ -3002,7 +2996,7 @@ export class Database { listRunningQueries(): Promise { return this.request({ method: "GET", - path: "/_api/query/current", + pathname: "/_api/query/current", }); } @@ -3022,7 +3016,7 @@ export class Database { listSlowQueries(): Promise { return this.request({ method: "GET", - path: "/_api/query/slow", + pathname: "/_api/query/slow", }); } @@ -3042,7 +3036,7 @@ export class Database { return this.request( { method: "DELETE", - path: "/_api/query/slow", + pathname: "/_api/query/slow", }, () => undefined ); @@ -3072,7 +3066,7 @@ export class Database { return this.request( { method: "DELETE", - path: `/_api/query/${encodeURIComponent(queryId)}`, + pathname: `/_api/query/${encodeURIComponent(queryId)}`, }, () => undefined ); @@ -3092,7 +3086,7 @@ export class Database { */ listUserFunctions(): Promise { return this.request( - { path: "/_api/aqlfunction" }, + { pathname: "/_api/aqlfunction" }, (res) => res.parsedBody.result ); } @@ -3135,7 +3129,7 @@ export class Database { ): Promise> { return this.request({ method: "POST", - path: "/_api/aqlfunction", + pathname: "/_api/aqlfunction", body: { name, code, isDeterministic }, }); } @@ -3161,7 +3155,7 @@ export class Database { ): Promise> { return this.request({ method: "DELETE", - path: `/_api/aqlfunction/${encodeURIComponent(name)}`, + pathname: `/_api/aqlfunction/${encodeURIComponent(name)}`, search: { group }, }); } @@ -3187,7 +3181,7 @@ export class Database { */ listServices(excludeSystem: boolean = true): Promise { return this.request({ - path: "/_api/foxx", + pathname: "/_api/foxx", search: { excludeSystem }, }); } @@ -3244,7 +3238,7 @@ export class Database { return await this.request({ body: form, method: "POST", - path: "/_api/foxx", + pathname: "/_api/foxx", search: { ...search, mount }, }); } @@ -3302,7 +3296,7 @@ export class Database { return await this.request({ body: form, method: "PUT", - path: "/_api/foxx/service", + pathname: "/_api/foxx/service", search: { ...search, mount }, }); } @@ -3360,7 +3354,7 @@ export class Database { return await this.request({ body: form, method: "PATCH", - path: "/_api/foxx/service", + pathname: "/_api/foxx/service", search: { ...search, mount }, }); } @@ -3384,7 +3378,7 @@ export class Database { return this.request( { method: "DELETE", - path: "/_api/foxx/service", + pathname: "/_api/foxx/service", search: { ...options, mount }, }, () => undefined @@ -3405,7 +3399,7 @@ export class Database { */ getService(mount: string): Promise { return this.request({ - path: "/_api/foxx/service", + pathname: "/_api/foxx/service", search: { mount }, }); } @@ -3462,7 +3456,7 @@ export class Database { ): Promise>; getServiceConfiguration(mount: string, minimal: boolean = false) { return this.request({ - path: "/_api/foxx/configuration", + pathname: "/_api/foxx/configuration", search: { mount, minimal }, }); } @@ -3535,7 +3529,7 @@ export class Database { ) { return this.request({ method: "PUT", - path: "/_api/foxx/configuration", + pathname: "/_api/foxx/configuration", body: cfg, search: { mount, minimal }, }); @@ -3609,7 +3603,7 @@ export class Database { ) { return this.request({ method: "PATCH", - path: "/_api/foxx/configuration", + pathname: "/_api/foxx/configuration", body: cfg, search: { mount, minimal }, }); @@ -3667,7 +3661,7 @@ export class Database { ): Promise>; getServiceDependencies(mount: string, minimal: boolean = false) { return this.request({ - path: "/_api/foxx/dependencies", + pathname: "/_api/foxx/dependencies", search: { mount, minimal }, }); } @@ -3749,7 +3743,7 @@ export class Database { ) { return this.request({ method: "PUT", - path: "/_api/foxx/dependencies", + pathname: "/_api/foxx/dependencies", body: deps, search: { mount, minimal }, }); @@ -3832,7 +3826,7 @@ export class Database { ) { return this.request({ method: "PATCH", - path: "/_api/foxx/dependencies", + pathname: "/_api/foxx/dependencies", body: deps, search: { mount, minimal }, }); @@ -3859,7 +3853,7 @@ export class Database { ): Promise { return this.request({ method: enabled ? "POST" : "DELETE", - path: "/_api/foxx/development", + pathname: "/_api/foxx/development", search: { mount }, }); } @@ -3881,7 +3875,7 @@ export class Database { */ getServiceScripts(mount: string): Promise> { return this.request({ - path: "/_api/foxx/scripts", + pathname: "/_api/foxx/scripts", search: { mount }, }); } @@ -3913,7 +3907,7 @@ export class Database { runServiceScript(mount: string, name: string, params?: any): Promise { return this.request({ method: "POST", - path: `/_api/foxx/scripts/${encodeURIComponent(name)}`, + pathname: `/_api/foxx/scripts/${encodeURIComponent(name)}`, body: params, search: { mount }, }); @@ -4191,7 +4185,7 @@ export class Database { ) { return this.request({ method: "POST", - path: "/_api/foxx/tests", + pathname: "/_api/foxx/tests", search: { ...options, mount, @@ -4216,7 +4210,7 @@ export class Database { */ getServiceReadme(mount: string): Promise { return this.request({ - path: "/_api/foxx/readme", + pathname: "/_api/foxx/readme", search: { mount }, }); } @@ -4236,7 +4230,7 @@ export class Database { */ getServiceDocumentation(mount: string): Promise { return this.request({ - path: "/_api/foxx/swagger", + pathname: "/_api/foxx/swagger", search: { mount }, }); } @@ -4257,7 +4251,7 @@ export class Database { downloadService(mount: string): Promise { return this.request({ method: "POST", - path: "/_api/foxx/download", + pathname: "/_api/foxx/download", search: { mount }, expectBinary: true, }); @@ -4287,7 +4281,7 @@ export class Database { return this.request( { method: "POST", - path: "/_api/foxx/commit", + pathname: "/_api/foxx/commit", search: { replace }, }, () => undefined @@ -4315,7 +4309,7 @@ export class Database { return this.request( { method: "POST", - path: "/_admin/backup/create", + pathname: "/_admin/backup/create", body: options, }, (res) => res.parsedBody.result @@ -4341,7 +4335,7 @@ export class Database { return this.request( { method: "POST", - path: "/_admin/backup/list", + pathname: "/_admin/backup/list", body: id ? { id } : undefined, }, (res) => res.parsedBody.result @@ -4365,7 +4359,7 @@ export class Database { return this.request( { method: "POST", - path: "/_admin/backup/restore", + pathname: "/_admin/backup/restore", body: { id }, }, (res) => res.parsedBody.result.previous @@ -4387,7 +4381,7 @@ export class Database { return this.request( { method: "POST", - path: "/_admin/backup/delete", + pathname: "/_admin/backup/delete", body: { id }, }, () => undefined @@ -4413,7 +4407,7 @@ export class Database { getLogEntries(options?: logs.LogEntriesOptions): Promise { return this.request( { - path: "/_admin/log/entries", + pathname: "/_admin/log/entries", search: options, }, (res) => res.parsedBody @@ -4441,7 +4435,7 @@ export class Database { ): Promise { return this.request( { - path: "/_admin/log", + pathname: "/_admin/log", search: options, }, (res) => res.parsedBody.messages @@ -4459,7 +4453,7 @@ export class Database { */ getLogLevel(): Promise> { return this.request({ - path: "/_admin/log/level", + pathname: "/_admin/log/level", }); } @@ -4481,7 +4475,7 @@ export class Database { ): Promise> { return this.request({ method: "PUT", - path: "/_admin/log/level", + pathname: "/_admin/log/level", body: levels, }); } @@ -4559,7 +4553,7 @@ export class Database { listPendingJobs(): Promise { return this.request( { - path: "/_api/job/pending", + pathname: "/_api/job/pending", }, (res) => res.parsedBody ); @@ -4578,7 +4572,7 @@ export class Database { listCompletedJobs(): Promise { return this.request( { - path: "/_api/job/done", + pathname: "/_api/job/done", }, (res) => res.parsedBody ); @@ -4602,7 +4596,7 @@ export class Database { return this.request( { method: "DELETE", - path: `/_api/job/expired`, + pathname: `/_api/job/expired`, search: { stamp: threshold / 1000 }, }, () => undefined @@ -4616,7 +4610,7 @@ export class Database { return this.request( { method: "DELETE", - path: `/_api/job/all`, + pathname: `/_api/job/all`, }, () => undefined ); diff --git a/src/errors.ts b/src/errors.ts index c11e44099..99fecd07c 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -12,53 +12,6 @@ import * as connection from "./connection.js"; import { ERROR_ARANGO_MAINTENANCE_MODE } from "./lib/codes.js"; -const messages: { [key: number]: string } = { - 0: "Network Error", - 304: "Not Modified", - 400: "Bad Request", - 401: "Unauthorized", - 402: "Payment Required", - 403: "Forbidden", - 404: "Not Found", - 405: "Method Not Allowed", - 406: "Not Acceptable", - 407: "Proxy Authentication Required", - 408: "Request Timeout", - 409: "Conflict", - 410: "Gone", - 411: "Length Required", - 412: "Precondition Failed", - 413: "Payload Too Large", - 414: "Request-URI Too Long", - 415: "Unsupported Media Type", - 416: "Requested Range Not Satisfiable", - 417: "Expectation Failed", - 418: "I'm a teapot", - 421: "Misdirected Request", - 422: "Unprocessable Entity", - 423: "Locked", - 424: "Failed Dependency", - 426: "Upgrade Required", - 428: "Precondition Required", - 429: "Too Many Requests", - 431: "Request Header Fields Too Large", - 444: "Connection Closed Without Response", - 451: "Unavailable For Legal Reasons", - 499: "Client Closed Request", - 500: "Internal Server Error", - 501: "Not Implemented", - 502: "Bad Gateway", - 503: "Service Unavailable", - 504: "Gateway Timeout", - 505: "HTTP Version Not Supported", - 506: "Variant Also Negotiates", - 507: "Insufficient Storage", - 508: "Loop Detected", - 510: "Not Extended", - 511: "Network Authentication Required", - 599: "Network Connect Timeout Error", -}; - /** * Indicates whether the given value represents an {@link ArangoError}. * @@ -79,46 +32,28 @@ export function isNetworkError(error: any): error is NetworkError { /** * @internal -* - * Indicates whether the given value represents an ArangoDB error response. - */ -export function isArangoErrorResponse(body: any): body is connection.ArangoErrorResponse { - return ( - body && - body.error === true && - typeof body.code === 'number' && - typeof body.errorMessage === 'string' && - typeof body.errorNum === 'number' - ); -} - -/** - * @internal - * + * * Indicates whether the given value represents a Node.js `SystemError`. */ -function isSystemError(err: any): err is SystemError { - return ( - err && - Object.getPrototypeOf(err) === Error.prototype && - typeof err.code === 'string' && - typeof err.errno !== 'undefined' && - typeof err.syscall === 'string' - ); +export function isSystemError(err: any): err is SystemError { + if (!err || !(err instanceof Error)) return false; + if (Object.getPrototypeOf(err) !== Error.prototype) return false; + const error = err as SystemError; + if (typeof error.code !== 'string') return false; + if (typeof error.syscall !== 'string') return false; + return typeof error.errno === 'number' || typeof error.errno === 'string'; } /** * @internal - * + * * Indicates whether the given value represents a Node.js `UndiciError`. */ -function isUndiciError(err: any): err is UndiciError { - return ( - err && - err instanceof Error && - typeof (err as UndiciError).code === 'string' && - (err as UndiciError).code.startsWith('UND_') - ); +export function isUndiciError(err: any): err is UndiciError { + if (!err || !(err instanceof Error)) return false; + const error = err as UndiciError; + if (typeof error.code !== 'string') return false; + return error.code.startsWith('UND_'); } /** @@ -126,31 +61,36 @@ function isUndiciError(err: any): err is UndiciError { * * Determines whether the given failed fetch error cause is safe to retry. */ -function isSafeToRetryFailedFetch(cause: Error): boolean | null { +function isSafeToRetryFailedFetch(error?: Error): boolean | null { + if (!error || !error.cause) return null; + let cause = error.cause as Error; + if (isArangoError(cause) || isNetworkError(cause)) { + return cause.isSafeToRetry; + } if (isSystemError(cause) && cause.syscall === 'connect' && cause.code === 'ECONNREFUSED') { return true; } if (isUndiciError(cause) && cause.code === 'UND_ERR_CONNECT_TIMEOUT') { return true; } - return null; + return isSafeToRetryFailedFetch(cause); } /** * Interface representing a Node.js `UndiciError`. - * + * * @internal */ -interface UndiciError extends Error { +export interface UndiciError extends Error { code: `UND_${string}`; } /** * Interface representing a Node.js `SystemError`. - * + * * @internal */ -interface SystemError extends Error { +export interface SystemError extends Error { code: string; errno: number | string; syscall: string; @@ -225,21 +165,16 @@ export class RequestAbortedError extends NetworkError { /** * Represents an error from a failed fetch request. - * + * * The root cause is often extremely difficult to determine. */ export class FetchFailedError extends NetworkError { name = "FetchFailedError"; - constructor(message: string | undefined, request: globalThis.Request, options: { cause?: TypeError, isSafeToRetry?: boolean | null } = {}) { - let isSafeToRetry = options.isSafeToRetry; - if (options.cause?.cause instanceof Error) { - if (isSafeToRetry === undefined) { - isSafeToRetry = isSafeToRetryFailedFetch(options.cause.cause) || undefined; - } - if (message === undefined) { - message = `Fetch failed: ${options.cause.cause.message}`; - } + constructor(message: string | undefined, request: globalThis.Request, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { + let isSafeToRetry = options.isSafeToRetry ?? isSafeToRetryFailedFetch(options.cause); + if (options.cause?.cause instanceof Error && options.cause.cause.message) { + message = `Fetch failed: ${options.cause.cause.message}`; } super(message ?? 'Fetch failed', request, { ...options, isSafeToRetry }); } @@ -265,8 +200,7 @@ export class HttpError extends NetworkError { * @internal */ constructor(response: connection.ProcessedResponse, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { - const message = messages[response.status] ?? messages[500]; - super(message, response.request, options); + super(connection.getStatusMessage(response), response.request, options); this.response = response; this.code = response.status; } @@ -278,6 +212,10 @@ export class HttpError extends NetworkError { code: this.code, }; } + + toString() { + return `${this.name} ${this.code}: ${this.message}`; + } } /** @@ -288,7 +226,7 @@ export class ArangoError extends Error { /** * Indicates whether the request that caused this error can be safely retried. - * + * * @internal */ isSafeToRetry: boolean | null = null; diff --git a/src/graphs.ts b/src/graphs.ts index 5c7b29181..8ae1700ab 100644 --- a/src/graphs.ts +++ b/src/graphs.ts @@ -465,7 +465,7 @@ export class GraphVertexCollection< return await this._db.request( { method: "HEAD", - path: `/_api/gharial/${encodeURIComponent( + pathname: `/_api/gharial/${encodeURIComponent( this.graph.name )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, }, @@ -573,7 +573,7 @@ export class GraphVertexCollection< if (rev) headers["if-match"] = rev; const result = this._db.request( { - path: `/_api/gharial/${encodeURIComponent( + pathname: `/_api/gharial/${encodeURIComponent( this.graph.name )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, headers, @@ -618,7 +618,7 @@ export class GraphVertexCollection< return this._db.request( { method: "POST", - path: `/_api/gharial/${encodeURIComponent( + pathname: `/_api/gharial/${encodeURIComponent( this.graph.name )}/vertex/${encodeURIComponent(this._name)}`, body: data, @@ -671,7 +671,7 @@ export class GraphVertexCollection< return this._db.request( { method: "PUT", - path: `/_api/gharial/${encodeURIComponent( + pathname: `/_api/gharial/${encodeURIComponent( this.graph.name )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, body: newValue, @@ -725,7 +725,7 @@ export class GraphVertexCollection< return this._db.request( { method: "PATCH", - path: `/_api/gharial/${encodeURIComponent( + pathname: `/_api/gharial/${encodeURIComponent( this.graph.name )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, body: newValue, @@ -780,7 +780,7 @@ export class GraphVertexCollection< return this._db.request( { method: "DELETE", - path: `/_api/gharial/${encodeURIComponent( + pathname: `/_api/gharial/${encodeURIComponent( this.graph.name )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, search, @@ -883,7 +883,7 @@ export class GraphEdgeCollection< return await this._db.request( { method: "HEAD", - path: `/_api/gharial/${encodeURIComponent( + pathname: `/_api/gharial/${encodeURIComponent( this.graph.name )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, }, @@ -988,7 +988,7 @@ export class GraphEdgeCollection< if (rev) headers["if-match"] = rev; const result = this._db.request( { - path: `/_api/gharial/${encodeURIComponent( + pathname: `/_api/gharial/${encodeURIComponent( this.graph.name )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, search, @@ -1031,7 +1031,7 @@ export class GraphEdgeCollection< return this._db.request( { method: "POST", - path: `/_api/gharial/${encodeURIComponent( + pathname: `/_api/gharial/${encodeURIComponent( this.graph.name )}/edge/${encodeURIComponent(this._name)}`, body: data, @@ -1092,7 +1092,7 @@ export class GraphEdgeCollection< return this._db.request( { method: "PUT", - path: `/_api/gharial/${encodeURIComponent( + pathname: `/_api/gharial/${encodeURIComponent( this.graph.name )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, body: newValue, @@ -1154,7 +1154,7 @@ export class GraphEdgeCollection< return this._db.request( { method: "PATCH", - path: `/_api/gharial/${encodeURIComponent( + pathname: `/_api/gharial/${encodeURIComponent( this.graph.name )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, body: newValue, @@ -1201,7 +1201,7 @@ export class GraphEdgeCollection< return this._db.request( { method: "DELETE", - path: `/_api/gharial/${encodeURIComponent( + pathname: `/_api/gharial/${encodeURIComponent( this.graph.name )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, search, @@ -1296,7 +1296,7 @@ export class Graph { */ get(): Promise { return this._db.request( - { path: `/_api/gharial/${encodeURIComponent(this._name)}` }, + { pathname: `/_api/gharial/${encodeURIComponent(this._name)}` }, (res) => res.parsedBody.graph ); } @@ -1331,7 +1331,7 @@ export class Graph { return this._db.request( { method: "POST", - path: "/_api/gharial", + pathname: "/_api/gharial", body: { orphanCollections: orphanCollections && @@ -1367,7 +1367,7 @@ export class Graph { return this._db.request( { method: "DELETE", - path: `/_api/gharial/${encodeURIComponent(this._name)}`, + pathname: `/_api/gharial/${encodeURIComponent(this._name)}`, search: { dropCollections }, }, (res) => res.parsedBody.removed @@ -1414,7 +1414,7 @@ export class Graph { */ listVertexCollections(): Promise { return this._db.request( - { path: `/_api/gharial/${encodeURIComponent(this._name)}/vertex` }, + { pathname: `/_api/gharial/${encodeURIComponent(this._name)}/vertex` }, (res) => res.parsedBody.collections ); } @@ -1473,7 +1473,7 @@ export class Graph { return this._db.request( { method: "POST", - path: `/_api/gharial/${encodeURIComponent(this._name)}/vertex`, + pathname: `/_api/gharial/${encodeURIComponent(this._name)}/vertex`, body: { collection: collections.collectionToString(collection), options: { ...opts, satellites: satellites?.map(collections.collectionToString) }, @@ -1512,7 +1512,7 @@ export class Graph { return this._db.request( { method: "DELETE", - path: `/_api/gharial/${encodeURIComponent( + pathname: `/_api/gharial/${encodeURIComponent( this._name )}/vertex/${encodeURIComponent(collections.collectionToString(collection))}`, search: { @@ -1579,7 +1579,7 @@ export class Graph { */ listEdgeCollections(): Promise { return this._db.request( - { path: `/_api/gharial/${encodeURIComponent(this._name)}/edge` }, + { pathname: `/_api/gharial/${encodeURIComponent(this._name)}/edge` }, (res) => res.parsedBody.collections ); } @@ -1638,7 +1638,7 @@ export class Graph { return this._db.request( { method: "POST", - path: `/_api/gharial/${encodeURIComponent(this._name)}/edge`, + pathname: `/_api/gharial/${encodeURIComponent(this._name)}/edge`, body: { ...coerceEdgeDefinition(edgeDefinition), options: { ...opts, satellites: satellites?.map(collections.collectionToString) }, @@ -1738,7 +1738,7 @@ export class Graph { return this._db.request( { method: "PUT", - path: `/_api/gharial/${encodeURIComponent( + pathname: `/_api/gharial/${encodeURIComponent( this._name )}/edge/${encodeURIComponent(collections.collectionToString(collection))}`, body: { @@ -1779,7 +1779,7 @@ export class Graph { return this._db.request( { method: "DELETE", - path: `/_api/gharial/${encodeURIComponent( + pathname: `/_api/gharial/${encodeURIComponent( this._name )}/edge/${encodeURIComponent(collections.collectionToString(collection))}`, search: { diff --git a/src/index.ts b/src/index.ts index 4a814fddd..e29f74c42 100644 --- a/src/index.ts +++ b/src/index.ts @@ -12,7 +12,7 @@ * * @packageDocumentation */ -import * as connection from "./connection.js"; +import * as configuration from "./configuration.js"; import * as databases from "./databases.js"; if (typeof module !== "undefined" && typeof exports !== "undefined") { @@ -34,14 +34,14 @@ if (typeof module !== "undefined" && typeof exports !== "undefined") { * }); * ``` */ -export function arangojs(config?: connection.Config): databases.Database; +export function arangojs(config?: configuration.ConfigOptions): databases.Database; /** * Creates a new `Database` instance with its own connection pool. * * This is a wrapper function for the {@link databases.Database:constructor}. * * @param url - Base URL of the ArangoDB server or list of server URLs. - * Equivalent to the `url` option in {@link connection.Config}. + * Equivalent to the `url` option in {@link configuration.ConfigOptions}. * * @example * ```js @@ -50,7 +50,7 @@ export function arangojs(config?: connection.Config): databases.Database; * ``` */ export function arangojs(url: string | string[], name?: string): databases.Database; -export function arangojs(config?: string | string[] | connection.Config, name?: string) { +export function arangojs(config?: string | string[] | configuration.ConfigOptions, name?: string) { if (typeof config === "string" || Array.isArray(config)) { const url = config; return new databases.Database(url, name); diff --git a/src/jobs.ts b/src/jobs.ts index 0fbedb771..a315f64a3 100644 --- a/src/jobs.ts +++ b/src/jobs.ts @@ -89,7 +89,7 @@ export class Job { res = await this._db.request( { method: "PUT", - path: `/_api/job/${this._id}`, + pathname: `/_api/job/${this._id}`, }, false ); @@ -119,7 +119,7 @@ export class Job { return this._db.request( { method: "PUT", - path: `/_api/job/${this._id}/cancel`, + pathname: `/_api/job/${this._id}/cancel`, }, () => undefined ); @@ -132,7 +132,7 @@ export class Job { return this._db.request( { method: "DELETE", - path: `/_api/job/${this._id}`, + pathname: `/_api/job/${this._id}`, }, () => undefined ); @@ -157,7 +157,7 @@ export class Job { getCompleted(): Promise { return this._db.request( { - path: `/_api/job/${this._id}`, + pathname: `/_api/job/${this._id}`, }, (res) => res.status !== 204 ); diff --git a/src/lib/util.ts b/src/lib/util.ts index 507c6a170..357215abb 100644 --- a/src/lib/util.ts +++ b/src/lib/util.ts @@ -10,16 +10,21 @@ const THIRTY_MINUTES = 30 * 60_000; /** * @internal * - * Helper to merge two path segments. + * Helper to merge path segments. */ export function joinPath( - basePath: string | undefined, - path: string | undefined -): string | undefined { - if (!basePath) return path; - if (!path) return basePath; - if (!basePath.endsWith("/")) basePath += "/"; - return basePath + path.replace(/^\//g, ""); + ...pathList: (string | undefined)[] +): string { + if (!pathList.length) return ""; + return pathList.flatMap((path, i) => { + if (!path) return []; + if (i === pathList.length - 1) { + if (i === 0) return [path]; + return [path.replace(/^\/+/, "")]; + } + if (i === 0) return [path.replace(/\/+$/, "")]; + return [path.replace(/^\/+|\/+$/, "")]; + }).join("/"); } /** @@ -28,12 +33,12 @@ export function joinPath( * Utility function for merging headers. */ export function mergeHeaders( - ...headerses: (Headers | string[][] | Record> | undefined)[] + ...headersList: (Headers | string[][] | Record> | undefined)[] ) { - if (!headerses.length) return new Headers(); + if (!headersList.length) return new Headers(); return new Headers([ - ...headerses.flatMap(item => item ? [ - ...((item instanceof Headers || Array.isArray(item)) ? item : new Headers(item)) + ...headersList.flatMap(headers => headers ? [ + ...((headers instanceof Headers || Array.isArray(headers)) ? headers : new Headers(headers)) ] : []), ]); } @@ -48,6 +53,7 @@ export function normalizeUrl(url: string): string { if (raw) url = (raw[1] === "tcp" ? "http" : "https") + raw[2]; const unix = url.match(/^(?:(http|https)\+)?unix:\/\/(\/.+)/); if (unix) url = `${unix[1] || "http"}://unix:${unix[2]}`; + else if (!url.endsWith('/')) url += '/'; return url; } diff --git a/src/queries.ts b/src/queries.ts index 9be481c95..b0769424b 100644 --- a/src/queries.ts +++ b/src/queries.ts @@ -4,7 +4,7 @@ * ``` * * The "query" module provides query related types for TypeScript. - * + * * @packageDocumentation */ diff --git a/src/routes.ts b/src/routes.ts index 3f52adf55..97c7c1f3c 100644 --- a/src/routes.ts +++ b/src/routes.ts @@ -10,14 +10,14 @@ */ import * as connections from "./connection.js"; import * as databases from "./databases.js"; -import { mergeHeaders } from "./lib/util.js"; +import * as util from "./lib/util.js"; /** * Represents an arbitrary route relative to an ArangoDB database. */ export class Route { protected _db: databases.Database; - protected _path: string; + protected _pathname: string; protected _headers: Headers; /** @@ -25,13 +25,13 @@ export class Route { */ constructor( db: databases.Database, - path: string = "", + pathname: string = "", headers: Headers | Record = {} ) { - if (!path) path = ""; - else if (path.charAt(0) !== "/") path = `/${path}`; + if (!pathname) pathname = ""; + else if (pathname.charAt(0) !== "/") pathname = `/${pathname}`; this._db = db; - this._path = path; + this._pathname = pathname; this._headers = headers instanceof Headers ? headers : new Headers(headers); } @@ -45,8 +45,8 @@ export class Route { /** * Path of this route. */ - get path() { - return this._path; + get pathname() { + return this._pathname; } /** @@ -60,7 +60,7 @@ export class Route { * Creates a new route relative to this route that inherits any of its default * HTTP headers. * - * @param path - Path relative to this route. + * @param pathname - Path relative to this route. * @param headers - Additional headers that will be sent with each request. * * @example @@ -70,13 +70,11 @@ export class Route { * const users = foxx.route("/users"); * ``` */ - route(path: string, headers?: Headers | Record) { - if (!path) path = ""; - else if (path.charAt(0) !== "/") path = `/${path}`; + route(pathname: string, headers?: Headers | Record) { return new Route( this._db, - this._path + path, - mergeHeaders(this._headers, headers) + util.joinPath(this._pathname, pathname), + util.mergeHeaders(this._headers, headers) ); } @@ -92,7 +90,7 @@ export class Route { * const foxx = db.route("/my-foxx-service"); * const res = await foxx.request({ * method: "POST", - * path: "/users", + * pathname: "/users", * body: { * username: "admin", * password: "hunter2" @@ -100,22 +98,21 @@ export class Route { * }); * ``` */ - request(options?: connections.RequestOptions) { - const opts = { ...options }; - if (!opts.path || opts.path === "/") opts.path = ""; - else if (!this._path || opts.path.charAt(0) === "/") opts.path = opts.path; - else opts.path = `/${opts.path}`; - opts.basePath = this._path; - opts.headers = mergeHeaders(this._headers, opts.headers); - opts.method = opts.method ? opts.method.toUpperCase() : "GET"; - return this._db.request(opts, false); + request(options: connections.RequestOptions = {}) { + const { method = "GET", pathname, headers, ...opts } = options; + return this._db.request({ + ...opts, + method: method.toUpperCase(), + pathname: util.joinPath(this._pathname, pathname), + headers: util.mergeHeaders(this._headers, headers), + }, false); } /** * Performs a DELETE request against the given path relative to this route * and returns the server response. * - * @param path - Path relative to this route. + * @param pathname - Path relative to this route. * @param search - Query string parameters for this request. * @param headers - Additional headers to send with this request. * @@ -127,7 +124,7 @@ export class Route { * ``` */ delete( - path: string, + pathname: string, search?: URLSearchParams | Record, headers?: Headers | Record ): Promise; @@ -151,16 +148,16 @@ export class Route { headers?: Headers | Record ): Promise; delete(...args: any[]): Promise { - const path = typeof args[0] === "string" ? args.shift() : undefined; + const pathname = typeof args[0] === "string" ? args.shift() : undefined; const [search, headers] = args; - return this.request({ method: "DELETE", path, search, headers }); + return this.request({ method: "DELETE", pathname, search, headers }); } /** * Performs a GET request against the given path relative to this route * and returns the server response. * - * @param path - Path relative to this route. + * @param pathname - Path relative to this route. * @param search - Query string parameters for this request. * @param headers - Additional headers to send with this request. * @@ -172,7 +169,7 @@ export class Route { * ``` */ get( - path: string, + pathname: string, search?: URLSearchParams | Record, headers?: Headers | Record ): Promise; @@ -196,16 +193,16 @@ export class Route { headers?: Headers | Record ): Promise; get(...args: any[]): Promise { - const path = typeof args[0] === "string" ? args.shift() : undefined; + const pathname = typeof args[0] === "string" ? args.shift() : undefined; const [search, headers] = args; - return this.request({ method: "GET", path, search, headers }); + return this.request({ method: "GET", pathname, search, headers }); } /** * Performs a HEAD request against the given path relative to this route * and returns the server response. * - * @param path - Path relative to this route. + * @param pathname - Path relative to this route. * @param search - Query string parameters for this request. * @param headers - Additional headers to send with this request. * @@ -217,7 +214,7 @@ export class Route { * ``` */ head( - path: string, + pathname: string, search?: URLSearchParams | Record, headers?: Headers | Record ): Promise; @@ -241,16 +238,16 @@ export class Route { headers?: Headers | Record ): Promise; head(...args: any[]): Promise { - const path = typeof args[0] === "string" ? args.shift() : undefined; + const pathname = typeof args[0] === "string" ? args.shift() : undefined; const [search, headers] = args; - return this.request({ method: "HEAD", path, search, headers }); + return this.request({ method: "HEAD", pathname, search, headers }); } /** * Performs a PATCH request against the given path relative to this route * and returns the server response. * - * @param path - Path relative to this route. + * @param pathname - Path relative to this route. * @param body - Body of the request object. * @param search - Query string parameters for this request. * @param headers - Additional headers to send with this request. @@ -263,7 +260,7 @@ export class Route { * ``` */ patch( - path: string, + pathname: string, body?: any, search?: URLSearchParams | Record, headers?: Headers | Record @@ -292,16 +289,16 @@ export class Route { headers?: Headers | Record ): Promise; patch(...args: any[]): Promise { - const path = typeof args[0] === "string" ? args.shift() : undefined; + const pathname = typeof args[0] === "string" ? args.shift() : undefined; const [body, search, headers] = args; - return this.request({ method: "PATCH", path, body, search, headers }); + return this.request({ method: "PATCH", pathname, body, search, headers }); } /** * Performs a POST request against the given path relative to this route * and returns the server response. * - * @param path - Path relative to this route. + * @param pathname - Path relative to this route. * @param body - Body of the request object. * @param search - Query string parameters for this request. * @param headers - Additional headers to send with this request. @@ -317,7 +314,7 @@ export class Route { * ``` */ post( - path: string, + pathname: string, body?: any, search?: URLSearchParams | Record, headers?: Headers | Record @@ -349,16 +346,16 @@ export class Route { headers?: Headers | Record ): Promise; post(...args: any[]): Promise { - const path = typeof args[0] === "string" ? args.shift() : undefined; + const pathname = typeof args[0] === "string" ? args.shift() : undefined; const [body, search, headers] = args; - return this.request({ method: "POST", path, body, search, headers }); + return this.request({ method: "POST", pathname, body, search, headers }); } /** * Performs a PUT request against the given path relative to this route * and returns the server response. * - * @param path - Path relative to this route. + * @param pathname - Path relative to this route. * @param body - Body of the request object. * @param search - Query string parameters for this request. * @param headers - Additional headers to send with this request. @@ -371,7 +368,7 @@ export class Route { * ``` */ put( - path: string, + pathname: string, body?: any, search?: URLSearchParams | Record, headers?: Headers | Record @@ -400,8 +397,8 @@ export class Route { headers?: Headers | Record ): Promise; put(...args: any[]): Promise { - const path = typeof args[0] === "string" ? args.shift() : undefined; + const pathname = typeof args[0] === "string" ? args.shift() : undefined; const [body, search, headers] = args; - return this.request({ method: "PUT", path, body, search, headers }); + return this.request({ method: "PUT", pathname, body, search, headers }); } } diff --git a/src/test/00-basics.ts b/src/test/00-basics.ts index 8a0c24adb..43de2a9b9 100644 --- a/src/test/00-basics.ts +++ b/src/test/00-basics.ts @@ -34,31 +34,39 @@ describe("Configuring the driver", () => { describe("with headers", () => { it("applies the headers", (done) => { const db = new Database({ - headers: { - "x-one": "1", - "x-two": "2", - }, + fetchOptions: { + headers: { + "x-one": "1", + "x-two": "2", + }, + } }); (db as any)._connection._hosts = [ - ({ headers }: any) => { - expect(headers.get("x-one")).to.equal("1"); - expect(headers.get("x-two")).to.equal("2"); - done(); + { + fetch: ({ headers }: any) => { + expect(headers.get("x-one")).to.equal("1"); + expect(headers.get("x-two")).to.equal("2"); + done(); + }, + close: () => { }, }, ]; - db.request({ headers: {} }, () => {}); + db.request({ headers: {} }, () => { }); }); }); describe("with an arangoVersion", () => { it("sets the x-arango-version header", (done) => { const db = new Database({ arangoVersion: 99999 }); (db as any)._connection._hosts = [ - ({ headers }: any) => { - expect(headers.get("x-arango-version")).to.equal("99999"); - done(); + { + fetch: ({ headers }: any) => { + expect(headers.get("x-arango-version")).to.equal("99999"); + done(); + }, + close: () => { }, }, ]; - db.request({ headers: {} }, () => {}); + db.request({ headers: {} }, () => { }); }); }); }); diff --git a/src/test/02-accessing-collections.ts b/src/test/02-accessing-collections.ts index 12a687afa..2217ff2f0 100644 --- a/src/test/02-accessing-collections.ts +++ b/src/test/02-accessing-collections.ts @@ -42,7 +42,7 @@ describe("Accessing collections", function () { ...nonSystemCollectionNames.map(async (name) => { const collection = await db.createCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }), @@ -50,7 +50,7 @@ describe("Accessing collections", function () { const collection = db.collection(name); await collection.create({ isSystem: true }); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }), @@ -94,14 +94,14 @@ describe("Accessing collections", function () { ...documentCollectionNames.map(async (name) => { const collection = await db.createCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }), ...edgeCollectionNames.map(async (name) => { const collection = await db.createEdgeCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }), @@ -109,7 +109,7 @@ describe("Accessing collections", function () { const collection = db.collection(name); await collection.create({ isSystem: true }); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }), diff --git a/src/test/03-accessing-graphs.ts b/src/test/03-accessing-graphs.ts index 0fe202f0a..249d78bef 100644 --- a/src/test/03-accessing-graphs.ts +++ b/src/test/03-accessing-graphs.ts @@ -38,14 +38,14 @@ describe("Accessing graphs", function () { ...vertexCollectionNames.map(async (name) => { const collection = await db.createCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }), ...edgeCollectionNames.map(async (name) => { const collection = await db.createEdgeCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }), @@ -61,7 +61,7 @@ describe("Accessing graphs", function () { })) ); await db.waitForPropagation( - { path: `/_api/gharial/${graph.name}` }, + { pathname: `/_api/gharial/${graph.name}` }, 10000 ); }), @@ -90,14 +90,14 @@ describe("Accessing graphs", function () { ...vertexCollectionNames.map(async (name) => { const collection = await db.createCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }), ...edgeCollectionNames.map(async (name) => { const collection = await db.createEdgeCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }), @@ -113,7 +113,7 @@ describe("Accessing graphs", function () { })) ); await db.waitForPropagation( - { path: `/_api/gharial/${graph.name}` }, + { pathname: `/_api/gharial/${graph.name}` }, 10000 ); }), diff --git a/src/test/04-transactions.ts b/src/test/04-transactions.ts index aef369c4d..d853e2afe 100644 --- a/src/test/04-transactions.ts +++ b/src/test/04-transactions.ts @@ -52,7 +52,7 @@ describe("Transactions", () => { beforeEach(async () => { collection = await db.createCollection(`collection-${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }); diff --git a/src/test/07-routes.ts b/src/test/07-routes.ts index acce96925..ab12e49e2 100644 --- a/src/test/07-routes.ts +++ b/src/test/07-routes.ts @@ -17,7 +17,7 @@ describe("Arbitrary HTTP routes", () => { it("creates a route for the given path", () => { const path = "/hi"; const route = db.route(path); - expect((route as any)._path).to.equal(path); + expect((route as any)._pathname).to.equal(path); }); it("passes the given headers to the new route", () => { const route = db.route("/hello", { "x-magic": "awesome" }); @@ -38,7 +38,7 @@ describe("Route API", function () { db = await system.createDatabase(name); collection = await db.createCollection(`c_${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }); @@ -55,7 +55,7 @@ describe("Route API", function () { describe("route.route", () => { it("should concat path", () => { const route = db.route("/_api").route("/version"); - expect(route).to.have.property("_path", "/_api/version"); + expect(route).to.have.property("_pathname", "/_api/version"); }); }); describe("route.get", () => { diff --git a/src/test/08-cursors.ts b/src/test/08-cursors.ts index 493c5d7ab..dabdb0162 100644 --- a/src/test/08-cursors.ts +++ b/src/test/08-cursors.ts @@ -226,7 +226,7 @@ describe("Item-wise Cursor API", () => { try { await db.request({ method: "PUT", - path: `/_api/cursor/${id}`, + pathname: `/_api/cursor/${id}`, hostUrl: hostUrl, }); } catch (e: any) { @@ -442,7 +442,7 @@ describe("Batch-wise Cursor API", () => { try { await db.request({ method: "PUT", - path: `/_api/cursor/${id}`, + pathname: `/_api/cursor/${id}`, hostUrl: hostUrl, }); } catch (e: any) { diff --git a/src/test/09-collection-metadata.ts b/src/test/09-collection-metadata.ts index 3da01f478..3317ed738 100644 --- a/src/test/09-collection-metadata.ts +++ b/src/test/09-collection-metadata.ts @@ -17,7 +17,7 @@ describe("Collection metadata", function () { db = system.database(dbName); collection = await db.createCollection(collectionName); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }); diff --git a/src/test/10-manipulating-collections.ts b/src/test/10-manipulating-collections.ts index cf7f0a2d8..3de70f40d 100644 --- a/src/test/10-manipulating-collections.ts +++ b/src/test/10-manipulating-collections.ts @@ -23,7 +23,7 @@ describe("Manipulating collections", function () { beforeEach(async () => { collection = await db.createCollection(`collection-${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }); @@ -41,7 +41,7 @@ describe("Manipulating collections", function () { `document-collection-${Date.now()}` ); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); const info = await db.collection(collection.name).get(); @@ -55,7 +55,7 @@ describe("Manipulating collections", function () { `edge-collection-${Date.now()}` ); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); const info = await db.collection(collection.name).get(); diff --git a/src/test/11-managing-indexes.ts b/src/test/11-managing-indexes.ts index 4727180be..1ddeea9f4 100644 --- a/src/test/11-managing-indexes.ts +++ b/src/test/11-managing-indexes.ts @@ -18,7 +18,7 @@ describe("Managing indexes", function () { db = system.database(dbName); collection = await db.createCollection(collectionName); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }); diff --git a/src/test/13-bulk-imports.ts b/src/test/13-bulk-imports.ts index 3eac6c600..fbed560d6 100644 --- a/src/test/13-bulk-imports.ts +++ b/src/test/13-bulk-imports.ts @@ -16,7 +16,7 @@ describe("Bulk imports", function () { db = system.database(dbName); collection = await db.createCollection(collectionName); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }); diff --git a/src/test/14-document-collections.ts b/src/test/14-document-collections.ts index a2f2baf48..4bd44bf0e 100644 --- a/src/test/14-document-collections.ts +++ b/src/test/14-document-collections.ts @@ -24,7 +24,7 @@ describe("DocumentCollection API", function () { beforeEach(async () => { collection = await db.createCollection(`c_${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }); diff --git a/src/test/15-edge-collections.ts b/src/test/15-edge-collections.ts index d1fc51e7c..2dd3c5f5f 100644 --- a/src/test/15-edge-collections.ts +++ b/src/test/15-edge-collections.ts @@ -28,7 +28,7 @@ describe("EdgeCollection API", function () { beforeEach(async () => { collection = await db.createEdgeCollection(`c_${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }); diff --git a/src/test/16-graphs.ts b/src/test/16-graphs.ts index f09684a29..e60019cac 100644 --- a/src/test/16-graphs.ts +++ b/src/test/16-graphs.ts @@ -12,14 +12,14 @@ async function createCollections(db: Database) { ...vertexCollectionNames.map(async (name) => { const collection = await db.createCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }), ...edgeCollectionNames.map(async (name) => { const collection = await db.createEdgeCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }), @@ -109,7 +109,7 @@ describe("Graph API", function () { })) ); await db.waitForPropagation( - { path: `/_api/gharial/${graph.name}` }, + { pathname: `/_api/gharial/${graph.name}` }, 10000 ); const data = await graph.get(); diff --git a/src/test/17-graph-vertices.ts b/src/test/17-graph-vertices.ts index dc7097315..fa884e4c4 100644 --- a/src/test/17-graph-vertices.ts +++ b/src/test/17-graph-vertices.ts @@ -14,14 +14,14 @@ async function createCollections(db: Database) { ...vertexCollectionNames.map(async (name) => { const collection = await db.createCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }), ...edgeCollectionNames.map(async (name) => { const collection = await db.createEdgeCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }), @@ -41,8 +41,8 @@ async function createGraph( to: vertexCollectionNames, })) ); - await (graph as any)._db.waitForPropagation( - { path: `/_api/gharial/${graph.name}` }, + await graph.database.waitForPropagation( + { pathname: `/_api/gharial/${graph.name}` }, 10000 ); return result; @@ -91,7 +91,7 @@ describe("Manipulating graph vertices", function () { beforeEach(async () => { vertexCollection = await db.createCollection(`xc_${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${vertexCollection.name}` }, + { pathname: `/_api/collection/${vertexCollection.name}` }, 10000 ); }); @@ -108,7 +108,7 @@ describe("Manipulating graph vertices", function () { beforeEach(async () => { vertexCollection = await db.createCollection(`xc_${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${vertexCollection.name}` }, + { pathname: `/_api/collection/${vertexCollection.name}` }, 10000 ); await graph.addVertexCollection(vertexCollection.name); diff --git a/src/test/18-graph-edges.ts b/src/test/18-graph-edges.ts index 4f8a59255..1c412813b 100644 --- a/src/test/18-graph-edges.ts +++ b/src/test/18-graph-edges.ts @@ -31,7 +31,7 @@ describe("Manipulating graph edges", function () { to: ["person"], }, ]); - await db.waitForPropagation({ path: `/_api/gharial/${graph.name}` }, 10000); + await db.waitForPropagation({ pathname: `/_api/gharial/${graph.name}` }, 10000); }); afterEach(async () => { await graph.drop(); diff --git a/src/test/19-graph-vertex-collections.ts b/src/test/19-graph-vertex-collections.ts index 4f66c4ef4..a70386237 100644 --- a/src/test/19-graph-vertex-collections.ts +++ b/src/test/19-graph-vertex-collections.ts @@ -22,7 +22,7 @@ describe("GraphVertexCollection API", function () { to: ["person"], }, ]); - await db.waitForPropagation({ path: `/_api/gharial/${graph.name}` }, 10000); + await db.waitForPropagation({ pathname: `/_api/gharial/${graph.name}` }, 10000); collection = graph.vertexCollection("person"); }); after(async () => { diff --git a/src/test/20-graph-edge-collections.ts b/src/test/20-graph-edge-collections.ts index 33d798bfc..089f7e8ff 100644 --- a/src/test/20-graph-edge-collections.ts +++ b/src/test/20-graph-edge-collections.ts @@ -22,7 +22,7 @@ describe("GraphEdgeCollection API", function () { to: ["person"], }, ]); - await db.waitForPropagation({ path: `/_api/gharial/${graph.name}` }, 10000); + await db.waitForPropagation({ pathname: `/_api/gharial/${graph.name}` }, 10000); collection = graph.edgeCollection("knows"); await graph .vertexCollection("person") diff --git a/src/test/23-aql-queries-stream.ts b/src/test/23-aql-queries-stream.ts index 5af17c061..2f0dd8d75 100644 --- a/src/test/23-aql-queries-stream.ts +++ b/src/test/23-aql-queries-stream.ts @@ -46,7 +46,7 @@ describe("AQL Stream queries", function () { }); allCursors.push(cursor); expect(cursor.count).to.equal(undefined); - expect((cursor as any).batches.hasMore).to.equal(true); + expect(cursor.batches.hasMore).to.equal(true); }); it("supports compact queries with options", async () => { const query: any = { @@ -60,7 +60,7 @@ describe("AQL Stream queries", function () { }); allCursors.push(cursor); expect(cursor.count).to.equal(undefined); // count will be ignored - expect((cursor as any).batches.hasMore).to.equal(true); + expect(cursor.batches.hasMore).to.equal(true); }); }); describe("with some data", () => { @@ -68,7 +68,7 @@ describe("AQL Stream queries", function () { before(async () => { const collection = await db.createCollection(cname); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); await Promise.all( diff --git a/src/test/24-accessing-views.ts b/src/test/24-accessing-views.ts index 7e77a56ae..a1892fc3b 100644 --- a/src/test/24-accessing-views.ts +++ b/src/test/24-accessing-views.ts @@ -37,7 +37,7 @@ describe("Accessing views", function () { const view = db.view(name); await view.create({ type: "arangosearch" }); await db.waitForPropagation( - { path: `/_api/view/${view.name}` }, + { pathname: `/_api/view/${view.name}` }, 10000 ); }) @@ -60,7 +60,7 @@ describe("Accessing views", function () { const view = db.view(name); await view.create({ type: "arangosearch" }); await db.waitForPropagation( - { path: `/_api/view/${view.name}` }, + { pathname: `/_api/view/${view.name}` }, 10000 ); }) diff --git a/src/test/25-view-metadata.ts b/src/test/25-view-metadata.ts index 711c6ce52..1c1eda411 100644 --- a/src/test/25-view-metadata.ts +++ b/src/test/25-view-metadata.ts @@ -16,7 +16,7 @@ describe("View metadata", function () { db = system.database(dbName); view = db.view(viewName); await view.create({ type: "arangosearch" }); - await db.waitForPropagation({ path: `/_api/view/${view.name}` }, 10000); + await db.waitForPropagation({ pathname: `/_api/view/${view.name}` }, 10000); }); after(async () => { await system.dropDatabase(dbName); diff --git a/src/test/26-manipulating-views.ts b/src/test/26-manipulating-views.ts index afceeed78..027fa6b31 100644 --- a/src/test/26-manipulating-views.ts +++ b/src/test/26-manipulating-views.ts @@ -27,7 +27,7 @@ describe("Manipulating views", function () { beforeEach(async () => { view = db.view(`v-${Date.now()}`); await view.create({ type: "arangosearch" }); - await db.waitForPropagation({ path: `/_api/view/${view.name}` }, 10000); + await db.waitForPropagation({ pathname: `/_api/view/${view.name}` }, 10000); }); afterEach(async () => { try { @@ -41,7 +41,7 @@ describe("Manipulating views", function () { it("creates a new arangosearch view", async () => { const view = db.view(`asv-${Date.now()}`); await view.create({ type: "arangosearch" }); - await db.waitForPropagation({ path: `/_api/view/${view.name}` }, 10000); + await db.waitForPropagation({ pathname: `/_api/view/${view.name}` }, 10000); const info = await view.get(); expect(info).to.have.property("name", view.name); expect(info).to.have.property("type", "arangosearch"); diff --git a/src/test/27-query-management.ts b/src/test/27-query-management.ts index a9dff5c64..3b80c7c85 100644 --- a/src/test/27-query-management.ts +++ b/src/test/27-query-management.ts @@ -28,7 +28,7 @@ describe("Query Management API", function () { // the following makes calls to /_db/${name} on all coordinators, thus waiting // long enough for the database to become available on all instances if (Array.isArray(config.url)) { - await db.waitForPropagation({ path: `/_api/version` }, 10000); + await db.waitForPropagation({ pathname: `/_api/version` }, 10000); } }); after(async () => { @@ -95,7 +95,7 @@ describe("Query Management API", function () { }); allCursors.push(cursor); expect(cursor.count).to.equal(10); - expect((cursor as any).batches.hasMore).to.equal(true); + expect(cursor.batches.hasMore).to.equal(true); }); it("supports AQB queries", async () => { const cursor = await db.query({ toAQL: () => "RETURN 42" }); @@ -126,7 +126,7 @@ describe("Query Management API", function () { const cursor = await db.query(query, { batchSize: 2, count: true }); allCursors.push(cursor); expect(cursor.count).to.equal(10); - expect((cursor as any).batches.hasMore).to.equal(true); + expect(cursor.batches.hasMore).to.equal(true); }); }); diff --git a/src/test/28-accessing-analyzers.ts b/src/test/28-accessing-analyzers.ts index 540189a1e..760323dab 100644 --- a/src/test/28-accessing-analyzers.ts +++ b/src/test/28-accessing-analyzers.ts @@ -42,7 +42,7 @@ describe("Accessing analyzers", function () { const analyzer = db.analyzer(name.replace(/^[^:]+::/, "")); await analyzer.create({ type: "identity" }); await db.waitForPropagation( - { path: `/_api/analyzer/${analyzer.name}` }, + { pathname: `/_api/analyzer/${analyzer.name}` }, 65000 ); }) @@ -70,7 +70,7 @@ describe("Accessing analyzers", function () { const analyzer = db.analyzer(name.replace(/^[^:]+::/, "")); await analyzer.create({ type: "identity" }); await db.waitForPropagation( - { path: `/_api/analyzer/${analyzer.name}` }, + { pathname: `/_api/analyzer/${analyzer.name}` }, 65000 ); }) diff --git a/src/test/29-manipulating-analyzers.ts b/src/test/29-manipulating-analyzers.ts index d3324b560..51fd3f437 100644 --- a/src/test/29-manipulating-analyzers.ts +++ b/src/test/29-manipulating-analyzers.ts @@ -4,7 +4,7 @@ import { Database } from "../databases.js"; import { config } from "./_config.js"; function waitForAnalyzer(db: Database, name: string) { - return db.waitForPropagation({ path: `/_api/analyzer/${name}` }, 30000); + return db.waitForPropagation({ pathname: `/_api/analyzer/${name}` }, 30000); } describe("Manipulating analyzers", function () { diff --git a/src/test/29-queue-time.ts b/src/test/29-queue-time.ts index 88527011a..e1cf72dca 100644 --- a/src/test/29-queue-time.ts +++ b/src/test/29-queue-time.ts @@ -18,7 +18,7 @@ describe("Queue time metrics", function () { db = system.database(dbName); collection = await db.createCollection(`c_${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }); diff --git a/src/test/30-concurrent-transactions.ts b/src/test/30-concurrent-transactions.ts index 3a15e949f..38d33a801 100644 --- a/src/test/30-concurrent-transactions.ts +++ b/src/test/30-concurrent-transactions.ts @@ -42,7 +42,7 @@ describe("Transactions", function () { beforeEach(async () => { collection = await db.createCollection(`collection-${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }); @@ -57,7 +57,7 @@ describe("Transactions", function () { it("can run concurrent transactions in parallel", async () => { const conn = (db as any)._connection as Connection; - const range = Array.from(Array((conn as any)._maxTasks).keys()).map( + const range = Array.from(Array((conn as any)._taskPoolSize).keys()).map( (i) => i + 1 ); let failed = 0; @@ -112,7 +112,7 @@ describe("Transactions", function () { }); it("respects transactional guarantees", async () => { const conn = (db as any)._connection as Connection; - const range = Array.from(Array((conn as any)._maxTasks).keys()).map( + const range = Array.from(Array((conn as any)._taskPoolSize).keys()).map( (i) => i + 1 ); const key = "test"; diff --git a/src/test/31-conflicts.ts b/src/test/31-conflicts.ts index 2a1700b7f..711faf46e 100644 --- a/src/test/31-conflicts.ts +++ b/src/test/31-conflicts.ts @@ -19,7 +19,7 @@ describe("config.maxRetries", () => { db = await system.createDatabase(dbName); collection = await db.createCollection(collectionName); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, + { pathname: `/_api/collection/${collection.name}` }, 10000 ); }); diff --git a/src/test/_config.ts b/src/test/_config.ts index 055096de0..71d0aa205 100644 --- a/src/test/_config.ts +++ b/src/test/_config.ts @@ -1,5 +1,4 @@ -import { LoadBalancingStrategy } from "../connection.js"; -import { Config } from "../config.js"; +import { ConfigOptions, LoadBalancingStrategy } from "../configuration.js"; const ARANGO_URL = process.env.TEST_ARANGODB_URL || "http://127.0.0.1:8529"; const ARANGO_VERSION = Number( @@ -18,8 +17,8 @@ else if (ARANGO_RELEASE.includes(".")) { const ARANGO_LOAD_BALANCING_STRATEGY = process.env .TEST_ARANGO_LOAD_BALANCING_STRATEGY as LoadBalancingStrategy | undefined; -export const config: Config & { - arangoVersion: NonNullable; +export const config: ConfigOptions & { + arangoVersion: NonNullable; } = ARANGO_URL.includes(",") ? { url: ARANGO_URL.split(",").filter((s) => Boolean(s)), diff --git a/src/transactions.ts b/src/transactions.ts index 9329fb14b..45107adcf 100644 --- a/src/transactions.ts +++ b/src/transactions.ts @@ -280,7 +280,7 @@ export class Transaction { get(): Promise { return this._db.request( { - path: `/_api/transaction/${encodeURIComponent(this.id)}`, + pathname: `/_api/transaction/${encodeURIComponent(this.id)}`, }, (res) => res.parsedBody.result ); @@ -306,7 +306,7 @@ export class Transaction { return this._db.request( { method: "PUT", - path: `/_api/transaction/${encodeURIComponent(this.id)}`, + pathname: `/_api/transaction/${encodeURIComponent(this.id)}`, allowDirtyRead, }, (res) => res.parsedBody.result @@ -333,7 +333,7 @@ export class Transaction { return this._db.request( { method: "DELETE", - path: `/_api/transaction/${encodeURIComponent(this.id)}`, + pathname: `/_api/transaction/${encodeURIComponent(this.id)}`, allowDirtyRead, }, (res) => res.parsedBody.result diff --git a/src/views.ts b/src/views.ts index 38a23554d..82d4f62fa 100644 --- a/src/views.ts +++ b/src/views.ts @@ -553,7 +553,7 @@ export class View { */ get(): Promise> { return this._db.request({ - path: `/_api/view/${encodeURIComponent(this._name)}`, + pathname: `/_api/view/${encodeURIComponent(this._name)}`, }); } @@ -604,7 +604,7 @@ export class View { > { return this._db.request({ method: "POST", - path: "/_api/view", + pathname: "/_api/view", body: { ...options, name: this._name, @@ -653,7 +653,7 @@ export class View { */ properties(): Promise> { return this._db.request({ - path: `/_api/view/${encodeURIComponent(this._name)}/properties`, + pathname: `/_api/view/${encodeURIComponent(this._name)}/properties`, }); } @@ -683,7 +683,7 @@ export class View { > { return this._db.request({ method: "PATCH", - path: `/_api/view/${encodeURIComponent(this._name)}/properties`, + pathname: `/_api/view/${encodeURIComponent(this._name)}/properties`, body: properties ?? {}, }); } @@ -714,7 +714,7 @@ export class View { > { return this._db.request({ method: "PUT", - path: `/_api/view/${encodeURIComponent(this._name)}/properties`, + pathname: `/_api/view/${encodeURIComponent(this._name)}/properties`, body: properties ?? {}, }); } @@ -735,7 +735,7 @@ export class View { return this._db.request( { method: "DELETE", - path: `/_api/view/${encodeURIComponent(this._name)}`, + pathname: `/_api/view/${encodeURIComponent(this._name)}`, }, (res) => res.parsedBody.result ); From ea20d2cc59497ad54edc4326a44d9268d36fb4dc Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Mon, 9 Dec 2024 12:55:11 +0100 Subject: [PATCH 09/21] 10.0.0-alpha.1 --- CHANGELOG.md | 3 ++- package.json | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 534e366e2..891e61934 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,7 +14,7 @@ This driver uses semantic versioning: - A change in the major version (e.g. 1.Y.Z -> 2.0.0) indicates _breaking_ changes that require changes in your code to upgrade. -## [Unreleased] +## [10.0.0-alpha.1] - 2024-12-09 ### Changed @@ -2350,6 +2350,7 @@ For a detailed list of changes between pre-release versions of v7 see the Graph methods now only return the relevant part of the response body. +[10.0.0-alpha.1]: https://github.com/arangodb/arangojs/compare/v10.0.0-alpha.0...v10.0.0-alpha.1 [10.0.0-alpha.0]: https://github.com/arangodb/arangojs/compare/v9.2.0...v10.0.0-alpha.0 [9.2.0]: https://github.com/arangodb/arangojs/compare/v9.1.0...v9.2.0 [9.1.0]: https://github.com/arangodb/arangojs/compare/v9.0.0...v9.1.0 diff --git a/package.json b/package.json index 3091a46ce..9b539c722 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "private": true, "type": "module", "name": "arangojs", - "version": "10.0.0-alpha.0", + "version": "10.0.0-alpha.1", "engines": { "node": ">=18" }, From 3fbd3b512d49b33fb55281720982068d88254f64 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Mon, 9 Dec 2024 12:58:22 +0100 Subject: [PATCH 10/21] Update CHANGELOG.md --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 891e61934..25bb8897c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,11 @@ This driver uses semantic versioning: ## [10.0.0-alpha.1] - 2024-12-09 +This is a major release and breaks backwards compatibility. + +See [the migration guide](./MIGRATING.md#v9-to-v10) for detailed instructions +for upgrading your code to arangojs v10. + ### Changed - Renamed `CollectionDropOptions` type to `DropCollectionOptions` From d269f53fb095ef10760c21f23130832371fb7206 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Mon, 9 Dec 2024 15:17:46 +0100 Subject: [PATCH 11/21] Prevent overzealous bundling --- src/connection.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/connection.ts b/src/connection.ts index 6831d07f2..5e5155b0d 100644 --- a/src/connection.ts +++ b/src/connection.ts @@ -71,7 +71,9 @@ function createHost(arangojsHostUrl: string, agentOptions?: any): Host { createDispatcher = (async () => { let undici: any; try { - undici = await import("undici"); + // Prevent overzealous bundlers from attempting to bundle undici + const undiciName = "undici"; + undici = await import(undiciName); } catch (cause) { if (socketPath) { throw new Error('Undici is required for Unix domain sockets', { cause }); From 460cf2bb941d7537762359c18e327cc03522a5f2 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Mon, 9 Dec 2024 15:18:15 +0100 Subject: [PATCH 12/21] Remove unused web.js --- src/web.js | 25 ------------------------- 1 file changed, 25 deletions(-) delete mode 100644 src/web.js diff --git a/src/web.js b/src/web.js deleted file mode 100644 index e7c6d2aaf..000000000 --- a/src/web.js +++ /dev/null @@ -1,25 +0,0 @@ -/* eslint-disable @typescript-eslint/no-var-requires */ -"use strict"; -const { aql } = require("./aql"); -const { CollectionStatus, CollectionType } = require("./collection"); -const { ViewType } = require("./view"); -const { Database } = require("./database"); - -module.exports = exports = arangojs; - -function arangojs(config) { - if (typeof config === "string" || Array.isArray(config)) { - const url = config; - return new Database(url); - } - return new Database(config); -} - -Object.assign(arangojs, { - aql, - arangojs, - CollectionStatus, - CollectionType, - Database, - ViewType, -}); From 49edbcf06126ceb58332d52a94e8a08872fe3717 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Mon, 9 Dec 2024 15:27:47 +0100 Subject: [PATCH 13/21] npm pkg fix --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 9b539c722..74cce3919 100644 --- a/package.json +++ b/package.json @@ -14,7 +14,7 @@ }, "repository": { "type": "git", - "url": "https://github.com/arangodb/arangojs.git" + "url": "git+https://github.com/arangodb/arangojs.git" }, "author": "ArangoDB GmbH", "contributors": [ From 23b89ad7ac6707eb2e17cbc1ac2c4c581f21d90e Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Tue, 10 Dec 2024 11:41:32 +0100 Subject: [PATCH 14/21] Looser ArangoDB error response handling --- src/connection.ts | 17 ++++++++++------- src/errors.ts | 7 +++++-- 2 files changed, 15 insertions(+), 9 deletions(-) diff --git a/src/connection.ts b/src/connection.ts index 5e5155b0d..5cd5883f1 100644 --- a/src/connection.ts +++ b/src/connection.ts @@ -304,13 +304,16 @@ export type ArangoApiResponse = T & ArangoResponseMetadata; /** * Indicates whether the given value represents an ArangoDB error response. */ -export function isArangoErrorResponse(body: any): body is ArangoErrorResponse { - if (!body || typeof body !== 'object') return false; +export function isArangoErrorResponse( + body: unknown, +): body is ArangoErrorResponse { + if (!body || typeof body !== "object") return false; + const obj = body as Record; return ( - body.error === true && - typeof body.code === 'number' && - typeof body.errorMessage === 'string' && - typeof body.errorNum === 'number' + obj.error === true && + typeof obj.errorMessage === "string" && + typeof obj.errorNum === "number" && + (obj.code === undefined || typeof obj.code === "number") ); } @@ -325,7 +328,7 @@ export type ArangoErrorResponse = { /** * Intended response status code as provided in the response body. */ - code: number; + code?: number; /** * Error message as provided in the response body. */ diff --git a/src/errors.ts b/src/errors.ts index 99fecd07c..2b38982f6 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -255,7 +255,7 @@ export class ArangoError extends Error { /** * HTTP status code included in the server error response object. */ - code: number; + code?: number; /** * @internal @@ -271,7 +271,10 @@ export class ArangoError extends Error { /** * Creates a new `ArangoError` from an ArangoDB error response. */ - constructor(data: connection.ArangoErrorResponse, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { + constructor( + data: Omit, + options: { cause?: Error; isSafeToRetry?: boolean | null } = {}, + ) { const { isSafeToRetry, ...opts } = options; super(data.errorMessage, opts); this.errorNum = data.errorNum; From 897e7b8f0f7c219a0c0c2e3ef4a3e53898b95da4 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Tue, 10 Dec 2024 11:41:37 +0100 Subject: [PATCH 15/21] Prettier --- src/administration.ts | 8 +- src/analyzers.ts | 149 +++---- src/aql.ts | 4 +- src/collections.ts | 369 ++++++++++------- src/configuration.ts | 6 +- src/connection.ts | 182 +++++---- src/cursors.ts | 77 ++-- src/databases.ts | 502 ++++++++++++++---------- src/documents.ts | 13 +- src/errors.ts | 74 +++- src/foxx-manifest.ts | 16 +- src/graphs.ts | 254 +++++++----- src/index.ts | 14 +- src/indexes.ts | 65 +-- src/jobs.ts | 18 +- src/lib/util.ts | 45 ++- src/lib/x3-linkedlist.ts | 70 ++-- src/logs.ts | 2 +- src/queries.ts | 2 +- src/routes.ts | 43 +- src/services.ts | 26 +- src/test/00-basics.ts | 10 +- src/test/01-manipulating-databases.ts | 2 +- src/test/02-accessing-collections.ts | 28 +- src/test/03-accessing-graphs.ts | 20 +- src/test/04-transactions.ts | 16 +- src/test/05-aql-helpers.ts | 18 +- src/test/06-managing-functions.ts | 4 +- src/test/07-routes.ts | 2 +- src/test/08-cursors.ts | 8 +- src/test/09-collection-metadata.ts | 2 +- src/test/10-manipulating-collections.ts | 10 +- src/test/11-managing-indexes.ts | 2 +- src/test/13-bulk-imports.ts | 8 +- src/test/14-document-collections.ts | 4 +- src/test/15-edge-collections.ts | 10 +- src/test/16-graphs.ts | 22 +- src/test/17-graph-vertices.ts | 18 +- src/test/18-graph-edges.ts | 25 +- src/test/19-graph-vertex-collections.ts | 7 +- src/test/20-graph-edge-collections.ts | 7 +- src/test/22-foxx-api.ts | 146 +++---- src/test/23-aql-queries-stream.ts | 14 +- src/test/24-accessing-views.ts | 12 +- src/test/26-manipulating-views.ts | 7 +- src/test/27-query-management.ts | 12 +- src/test/28-accessing-analyzers.ts | 16 +- src/test/29-manipulating-analyzers.ts | 2 +- src/test/29-queue-time.ts | 18 +- src/test/30-concurrent-transactions.ts | 26 +- src/test/31-conflicts.ts | 16 +- src/test/_config.ts | 6 +- src/transactions.ts | 29 +- src/users.ts | 2 +- src/views.ts | 90 +++-- 55 files changed, 1474 insertions(+), 1084 deletions(-) diff --git a/src/administration.ts b/src/administration.ts index 6009e8feb..262989829 100644 --- a/src/administration.ts +++ b/src/administration.ts @@ -58,7 +58,7 @@ export type EngineInfo = { * Index type aliases supported by the storage engine. */ indexes?: Record; - } + }; }; }; @@ -127,7 +127,7 @@ export type ServerStatusInformation = { */ foxxApi: boolean; /** - * A host identifier defined by the HOST or NODE_NAME environment variable, + * A host identifier defined by the HOST or NODE_NAME environment variable, * or a fallback value using a machine identifier or the cluster/Agency address. */ host: string; @@ -318,7 +318,7 @@ export type ClusterSupportInfo = { * Number of servers in the cluster. */ servers: number; - } + }; }; /** * (Cluster only.) Information about the ArangoDB instance as well as the @@ -347,4 +347,4 @@ export interface QueueTimeMetrics { * in seconds. */ getAvg(): number; -}; +} diff --git a/src/analyzers.ts b/src/analyzers.ts index 8d7dc17ff..05a5f31a9 100644 --- a/src/analyzers.ts +++ b/src/analyzers.ts @@ -77,43 +77,42 @@ export type CreateAnalyzerOptions = type CreateAnalyzerOptionsType< Type extends AnalyzerType, - Properties = void + Properties = void, > = Properties extends void ? { - /** - * Type of the Analyzer. - */ - type: Type; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * This Analyzer does not take additional properties. - */ - properties?: Record; - } + /** + * Type of the Analyzer. + */ + type: Type; + /** + * Features to enable for this Analyzer. + */ + features?: AnalyzerFeature[]; + /** + * This Analyzer does not take additional properties. + */ + properties?: Record; + } : { - /** - * Type of the Analyzer. - */ - type: Type; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: Properties; - }; + /** + * Type of the Analyzer. + */ + type: Type; + /** + * Features to enable for this Analyzer. + */ + features?: AnalyzerFeature[]; + /** + * Additional properties for the Analyzer. + */ + properties: Properties; + }; /** * Options for creating an Identity Analyzer. */ -export type CreateIdentityAnalyzerOptions = CreateAnalyzerOptionsType< - "identity" ->; +export type CreateIdentityAnalyzerOptions = + CreateAnalyzerOptionsType<"identity">; /** * Options for creating a Delimiter Analyzer. @@ -122,12 +121,12 @@ export type CreateDelimiterAnalyzerOptions = CreateAnalyzerOptionsType< "delimiter", | string | { - /** - * This value will be used as delimiter to split text into tokens as - * specified in RFC 4180, without starting new records on newlines. - */ - delimiter: string; - } + /** + * This value will be used as delimiter to split text into tokens as + * specified in RFC 4180, without starting new records on newlines. + */ + delimiter: string; + } >; /** @@ -583,7 +582,7 @@ export type AnalyzerDescription = */ type AnalyzerDescriptionType< Type extends string, - Properties = Record + Properties = Record, > = { /** * A unique name for this Analyzer. @@ -932,43 +931,43 @@ export class Analyzer { * ``` */ create( - options: Options + options: Options, ): Promise< Options extends CreateIdentityAnalyzerOptions - ? IdentityAnalyzerDescription - : Options extends CreateDelimiterAnalyzerOptions - ? DelimiterAnalyzerDescription - : Options extends CreateStemAnalyzerOptions - ? StemAnalyzerDescription - : Options extends CreateNormAnalyzerOptions - ? NormAnalyzerDescription - : Options extends CreateNgramAnalyzerOptions - ? NgramAnalyzerDescription - : Options extends CreateTextAnalyzerOptions - ? TextAnalyzerDescription - : Options extends CreateSegmentationAnalyzerOptions - ? SegmentationAnalyzerDescription - : Options extends CreateAqlAnalyzerOptions - ? AqlAnalyzerDescription - : Options extends CreatePipelineAnalyzerOptions - ? PipelineAnalyzerDescription - : Options extends CreateStopwordsAnalyzerOptions - ? StopwordsAnalyzerDescription - : Options extends CreateCollationAnalyzerOptions - ? CollationAnalyzerDescription - : Options extends CreateMinHashAnalyzerOptions - ? MinHashAnalyzerDescription - : Options extends CreateClassificationAnalyzerOptions - ? ClassificationAnalyzerDescription - : Options extends CreateNearestNeighborsAnalyzerOptions - ? NearestNeighborsAnalyzerDescription - : Options extends CreateGeoJsonAnalyzerOptions - ? GeoJsonAnalyzerDescription - : Options extends CreateGeoPointAnalyzerOptions - ? GeoPointAnalyzerDescription - : Options extends CreateGeoS2AnalyzerOptions - ? GeoS2AnalyzerDescription - : AnalyzerDescription + ? IdentityAnalyzerDescription + : Options extends CreateDelimiterAnalyzerOptions + ? DelimiterAnalyzerDescription + : Options extends CreateStemAnalyzerOptions + ? StemAnalyzerDescription + : Options extends CreateNormAnalyzerOptions + ? NormAnalyzerDescription + : Options extends CreateNgramAnalyzerOptions + ? NgramAnalyzerDescription + : Options extends CreateTextAnalyzerOptions + ? TextAnalyzerDescription + : Options extends CreateSegmentationAnalyzerOptions + ? SegmentationAnalyzerDescription + : Options extends CreateAqlAnalyzerOptions + ? AqlAnalyzerDescription + : Options extends CreatePipelineAnalyzerOptions + ? PipelineAnalyzerDescription + : Options extends CreateStopwordsAnalyzerOptions + ? StopwordsAnalyzerDescription + : Options extends CreateCollationAnalyzerOptions + ? CollationAnalyzerDescription + : Options extends CreateMinHashAnalyzerOptions + ? MinHashAnalyzerDescription + : Options extends CreateClassificationAnalyzerOptions + ? ClassificationAnalyzerDescription + : Options extends CreateNearestNeighborsAnalyzerOptions + ? NearestNeighborsAnalyzerDescription + : Options extends CreateGeoJsonAnalyzerOptions + ? GeoJsonAnalyzerDescription + : Options extends CreateGeoPointAnalyzerOptions + ? GeoPointAnalyzerDescription + : Options extends CreateGeoS2AnalyzerOptions + ? GeoS2AnalyzerDescription + : AnalyzerDescription > { return this._db.request({ method: "POST", @@ -991,7 +990,9 @@ export class Analyzer { * // the Analyzer "some-analyzer" no longer exists * ``` */ - drop(force: boolean = false): Promise> { + drop( + force: boolean = false, + ): Promise> { return this._db.request({ method: "DELETE", pathname: `/_api/analyzer/${encodeURIComponent(this._name)}`, @@ -999,4 +1000,4 @@ export class Analyzer { }); } } -//#endregion \ No newline at end of file +//#endregion diff --git a/src/aql.ts b/src/aql.ts index a45ae84cb..5229ce3e4 100644 --- a/src/aql.ts +++ b/src/aql.ts @@ -224,7 +224,7 @@ export function aql( 2, strings[i] + src.strings[0], ...src.strings.slice(1, src.args.length), - src.strings[src.args.length] + strings[i + 1] + src.strings[src.args.length] + strings[i + 1], ); } else { query += rawValue.query + strings[i + 1]; @@ -319,7 +319,7 @@ export function aql( * ``` */ export function literal( - value: string | number | boolean | AqlLiteral | null | undefined + value: string | number | boolean | AqlLiteral | null | undefined, ): AqlLiteral { if (isAqlLiteral(value)) { return value; diff --git a/src/collections.ts b/src/collections.ts index f88180469..38929f214 100644 --- a/src/collections.ts +++ b/src/collections.ts @@ -26,7 +26,7 @@ import { COLLECTION_NOT_FOUND, DOCUMENT_NOT_FOUND } from "./lib/codes.js"; * @param collection - A value that might be a collection. */ export function isArangoCollection( - collection: any + collection: any, ): collection is ArangoCollection { return Boolean(collection && collection.isArangoCollection); } @@ -38,7 +38,7 @@ export function isArangoCollection( * @param collection - Collection name or {@link ArangoCollection} object. */ export function collectionToString( - collection: string | ArangoCollection + collection: string | ArangoCollection, ): string { if (isArangoCollection(collection)) { return String(collection.name); @@ -663,8 +663,10 @@ export interface DocumentCollection< create( options?: CreateCollectionOptions & { type?: CollectionType; - } - ): Promise>; + }, + ): Promise< + connection.ArangoApiResponse + >; /** * Retrieves the collection's properties. * @@ -692,8 +694,10 @@ export interface DocumentCollection< * ``` */ properties( - properties: CollectionPropertiesOptions - ): Promise>; + properties: CollectionPropertiesOptions, + ): Promise< + connection.ArangoApiResponse + >; /** * Retrieves information about the number of documents in a collection. * @@ -741,11 +745,11 @@ export interface DocumentCollection< * ``` */ figures( - details?: boolean + details?: boolean, ): Promise< connection.ArangoApiResponse< CollectionDescription & - CollectionProperties & { count: number; figures: Record } + CollectionProperties & { count: number; figures: Record } > >; /** @@ -778,7 +782,7 @@ export interface DocumentCollection< * ``` */ checksum( - options?: CollectionChecksumOptions + options?: CollectionChecksumOptions, ): Promise< connection.ArangoApiResponse< CollectionDescription & { revision: string; checksum: string } @@ -806,7 +810,9 @@ export interface DocumentCollection< * // collection1 and collection3 represent the same ArangoDB collection! * ``` */ - rename(newName: string): Promise>; + rename( + newName: string, + ): Promise>; /** * Deletes all documents in the collection. * @@ -819,7 +825,9 @@ export interface DocumentCollection< * // the collection "some-collection" is now empty * ``` */ - truncate(options?: TruncateCollectionOptions): Promise>; + truncate( + options?: TruncateCollectionOptions, + ): Promise>; /** * Deletes the collection from the database. * @@ -834,7 +842,7 @@ export interface DocumentCollection< * ``` */ drop( - options?: DropCollectionOptions + options?: DropCollectionOptions, ): Promise>; /** * Triggers compaction for a collection. @@ -864,7 +872,7 @@ export interface DocumentCollection< * ``` */ getResponsibleShard( - document: Partial> + document: Partial>, ): Promise; /** * Derives a document `_id` from the given selector for this collection. @@ -922,7 +930,7 @@ export interface DocumentCollection< */ documentExists( selector: documents.DocumentSelector, - options?: documents.DocumentExistsOptions + options?: documents.DocumentExistsOptions, ): Promise; /** * Retrieves the document matching the given key or id. @@ -960,7 +968,7 @@ export interface DocumentCollection< */ document( selector: documents.DocumentSelector, - options?: documents.ReadDocumentOptions + options?: documents.ReadDocumentOptions, ): Promise>; /** * Retrieves the document matching the given key or id. @@ -999,7 +1007,7 @@ export interface DocumentCollection< */ document( selector: documents.DocumentSelector, - graceful: boolean + graceful: boolean, ): Promise>; /** * Retrieves the documents matching the given key or id values. @@ -1025,7 +1033,7 @@ export interface DocumentCollection< */ documents( selectors: (string | documents.ObjectWithDocumentKey)[], - options?: documents.BulkReadDocumentsOptions + options?: documents.BulkReadDocumentsOptions, ): Promise[]>; /** * Inserts a new document with the given `data` into the collection. @@ -1046,7 +1054,7 @@ export interface DocumentCollection< */ save( data: documents.DocumentData, - options?: documents.InsertDocumentOptions + options?: documents.InsertDocumentOptions, ): Promise< documents.DocumentOperationMetadata & { new?: documents.Document; @@ -1076,13 +1084,13 @@ export interface DocumentCollection< */ saveAll( data: Array>, - options?: documents.InsertDocumentOptions + options?: documents.InsertDocumentOptions, ): Promise< Array< | (documents.DocumentOperationMetadata & { - new?: documents.Document; - old?: documents.Document; - }) + new?: documents.Document; + old?: documents.Document; + }) | documents.DocumentOperationFailure > >; @@ -1113,7 +1121,7 @@ export interface DocumentCollection< replace( selector: documents.DocumentSelector, newData: documents.DocumentData, - options?: documents.ReplaceDocumentOptions + options?: documents.ReplaceDocumentOptions, ): Promise< documents.DocumentOperationMetadata & { new?: documents.Document; @@ -1146,15 +1154,16 @@ export interface DocumentCollection< */ replaceAll( newData: Array< - documents.DocumentData & ({ _key: string } | { _id: string }) + documents.DocumentData & + ({ _key: string } | { _id: string }) >, - options?: Omit + options?: Omit, ): Promise< Array< | (documents.DocumentOperationMetadata & { - new?: documents.Document; - old?: documents.Document; - }) + new?: documents.Document; + old?: documents.Document; + }) | documents.DocumentOperationFailure > >; @@ -1185,7 +1194,7 @@ export interface DocumentCollection< update( selector: documents.DocumentSelector, newData: documents.Patch>, - options?: documents.UpdateDocumentOptions + options?: documents.UpdateDocumentOptions, ): Promise< documents.DocumentOperationMetadata & { new?: documents.Document; @@ -1218,15 +1227,16 @@ export interface DocumentCollection< */ updateAll( newData: Array< - documents.Patch> & ({ _key: string } | { _id: string }) + documents.Patch> & + ({ _key: string } | { _id: string }) >, - options?: Omit + options?: Omit, ): Promise< Array< | (documents.DocumentOperationMetadata & { - new?: documents.Document; - old?: documents.Document; - }) + new?: documents.Document; + old?: documents.Document; + }) | documents.DocumentOperationFailure > >; @@ -1259,8 +1269,10 @@ export interface DocumentCollection< */ remove( selector: documents.DocumentSelector, - options?: documents.RemoveDocumentOptions - ): Promise }>; + options?: documents.RemoveDocumentOptions, + ): Promise< + documents.DocumentMetadata & { old?: documents.Document } + >; /** * Removes existing documents from the collection. * @@ -1281,10 +1293,12 @@ export interface DocumentCollection< */ removeAll( selectors: (string | documents.ObjectWithDocumentKey)[], - options?: Omit + options?: Omit, ): Promise< Array< - | (documents.DocumentMetadata & { old?: documents.Document }) + | (documents.DocumentMetadata & { + old?: documents.Document; + }) | documents.DocumentOperationFailure > >; @@ -1309,7 +1323,7 @@ export interface DocumentCollection< */ import( data: documents.DocumentData[], - options?: documents.ImportDocumentsOptions + options?: documents.ImportDocumentsOptions, ): Promise; /** * Bulk imports the given `data` into the collection. @@ -1335,7 +1349,7 @@ export interface DocumentCollection< */ import( data: any[][], - options?: documents.ImportDocumentsOptions + options?: documents.ImportDocumentsOptions, ): Promise; /** * Bulk imports the given `data` into the collection. @@ -1397,7 +1411,7 @@ export interface DocumentCollection< data: Buffer | Blob | string, options?: documents.ImportDocumentsOptions & { type?: "documents" | "list" | "auto"; - } + }, ): Promise; //#endregion @@ -1436,8 +1450,12 @@ export interface DocumentCollection< * }); * ``` */ - indexes( - options?: indexes.ListIndexesOptions + indexes< + IndexType extends + | indexes.IndexDescription + | indexes.HiddenIndexDescription = indexes.IndexDescription, + >( + options?: indexes.ListIndexesOptions, ): Promise; /** * Returns an index description by name or `id` if it exists. @@ -1471,8 +1489,12 @@ export interface DocumentCollection< * ``` */ ensureIndex( - options: indexes.EnsurePersistentIndexOptions - ): Promise>; + options: indexes.EnsurePersistentIndexOptions, + ): Promise< + connection.ArangoApiResponse< + indexes.PersistentIndexDescription & { isNewlyCreated: boolean } + > + >; /** * Creates a TTL index on the collection if it does not already exist. * @@ -1503,8 +1525,12 @@ export interface DocumentCollection< * ``` */ ensureIndex( - options: indexes.EnsureTtlIndexOptions - ): Promise>; + options: indexes.EnsureTtlIndexOptions, + ): Promise< + connection.ArangoApiResponse< + indexes.TtlIndexDescription & { isNewlyCreated: boolean } + > + >; /** * Creates a multi-dimensional index on the collection if it does not already exist. * @@ -1524,8 +1550,12 @@ export interface DocumentCollection< * ``` */ ensureIndex( - options: indexes.EnsureMdiIndexOptions - ): Promise>; + options: indexes.EnsureMdiIndexOptions, + ): Promise< + connection.ArangoApiResponse< + indexes.MdiIndexDescription & { isNewlyCreated: boolean } + > + >; /** * Creates a geo index on the collection if it does not already exist. * @@ -1544,8 +1574,12 @@ export interface DocumentCollection< * ``` */ ensureIndex( - options: indexes.EnsureGeoIndexOptions - ): Promise>; + options: indexes.EnsureGeoIndexOptions, + ): Promise< + connection.ArangoApiResponse< + indexes.GeoIndexDescription & { isNewlyCreated: boolean } + > + >; /** * Creates a inverted index on the collection if it does not already exist. * @@ -1563,8 +1597,12 @@ export interface DocumentCollection< * ``` */ ensureIndex( - options: indexes.EnsureInvertedIndexOptions - ): Promise>; + options: indexes.EnsureInvertedIndexOptions, + ): Promise< + connection.ArangoApiResponse< + indexes.InvertedIndexDescription & { isNewlyCreated: boolean } + > + >; /** * Creates an index on the collection if it does not already exist. * @@ -1584,8 +1622,12 @@ export interface DocumentCollection< * ``` */ ensureIndex( - options: indexes.EnsureIndexOptions - ): Promise>; + options: indexes.EnsureIndexOptions, + ): Promise< + connection.ArangoApiResponse< + indexes.IndexDescription & { isNewlyCreated: boolean } + > + >; /** * Deletes the index with the given name or `id` from the database. * @@ -1600,7 +1642,7 @@ export interface DocumentCollection< * ``` */ dropIndex( - selector: indexes.IndexSelector + selector: indexes.IndexSelector, ): Promise>; //#endregion } @@ -1675,7 +1717,7 @@ export interface EdgeCollection< */ document( selector: documents.DocumentSelector, - options?: documents.ReadDocumentOptions + options?: documents.ReadDocumentOptions, ): Promise>; /** * Retrieves the document matching the given key or id. @@ -1714,7 +1756,7 @@ export interface EdgeCollection< */ document( selector: documents.DocumentSelector, - graceful: boolean + graceful: boolean, ): Promise>; /** * Retrieves the documents matching the given key or id values. @@ -1740,7 +1782,7 @@ export interface EdgeCollection< */ documents( selectors: (string | documents.ObjectWithDocumentKey)[], - options?: documents.BulkReadDocumentsOptions + options?: documents.BulkReadDocumentsOptions, ): Promise[]>; /** * Inserts a new document with the given `data` into the collection. @@ -1760,7 +1802,7 @@ export interface EdgeCollection< */ save( data: documents.EdgeData, - options?: documents.InsertDocumentOptions + options?: documents.InsertDocumentOptions, ): Promise< documents.DocumentOperationMetadata & { new?: documents.Edge; @@ -1788,13 +1830,13 @@ export interface EdgeCollection< */ saveAll( data: Array>, - options?: documents.InsertDocumentOptions + options?: documents.InsertDocumentOptions, ): Promise< Array< | (documents.DocumentOperationMetadata & { - new?: documents.Edge; - old?: documents.Edge; - }) + new?: documents.Edge; + old?: documents.Edge; + }) | documents.DocumentOperationFailure > >; @@ -1833,7 +1875,7 @@ export interface EdgeCollection< replace( selector: documents.DocumentSelector, newData: documents.DocumentData, - options?: documents.ReplaceDocumentOptions + options?: documents.ReplaceDocumentOptions, ): Promise< documents.DocumentOperationMetadata & { new?: documents.Edge; @@ -1882,15 +1924,16 @@ export interface EdgeCollection< */ replaceAll( newData: Array< - documents.DocumentData & ({ _key: string } | { _id: string }) + documents.DocumentData & + ({ _key: string } | { _id: string }) >, - options?: documents.ReplaceDocumentOptions + options?: documents.ReplaceDocumentOptions, ): Promise< Array< | (documents.DocumentOperationMetadata & { - new?: documents.Edge; - old?: documents.Edge; - }) + new?: documents.Edge; + old?: documents.Edge; + }) | documents.DocumentOperationFailure > >; @@ -1929,7 +1972,7 @@ export interface EdgeCollection< update( selector: documents.DocumentSelector, newData: documents.Patch>, - options?: documents.UpdateDocumentOptions + options?: documents.UpdateDocumentOptions, ): Promise< documents.DocumentOperationMetadata & { new?: documents.Edge; @@ -1976,15 +2019,16 @@ export interface EdgeCollection< */ updateAll( newData: Array< - documents.Patch> & ({ _key: string } | { _id: string }) + documents.Patch> & + ({ _key: string } | { _id: string }) >, - options?: documents.UpdateDocumentOptions + options?: documents.UpdateDocumentOptions, ): Promise< Array< | (documents.DocumentOperationMetadata & { - new?: documents.Edge; - old?: documents.Edge; - }) + new?: documents.Edge; + old?: documents.Edge; + }) | documents.DocumentOperationFailure > >; @@ -2009,8 +2053,10 @@ export interface EdgeCollection< */ remove( selector: documents.DocumentSelector, - options?: documents.RemoveDocumentOptions - ): Promise }>; + options?: documents.RemoveDocumentOptions, + ): Promise< + documents.DocumentMetadata & { old?: documents.Edge } + >; /** * Removes existing documents from the collection. * @@ -2031,7 +2077,7 @@ export interface EdgeCollection< */ removeAll( selectors: documents.DocumentSelector[], - options?: documents.RemoveDocumentOptions + options?: documents.RemoveDocumentOptions, ): Promise< Array< | (documents.DocumentMetadata & { old?: documents.Edge }) @@ -2058,7 +2104,7 @@ export interface EdgeCollection< */ import( data: documents.EdgeData[], - options?: documents.ImportDocumentsOptions + options?: documents.ImportDocumentsOptions, ): Promise; /** * Bulk imports the given `data` into the collection. @@ -2083,7 +2129,7 @@ export interface EdgeCollection< */ import( data: any[][], - options?: documents.ImportDocumentsOptions + options?: documents.ImportDocumentsOptions, ): Promise; /** * Bulk imports the given `data` into the collection. @@ -2142,7 +2188,7 @@ export interface EdgeCollection< data: Buffer | Blob | string, options?: documents.ImportDocumentsOptions & { type?: "documents" | "list" | "auto"; - } + }, ): Promise; //#endregion @@ -2174,8 +2220,10 @@ export interface EdgeCollection< */ edges( selector: documents.DocumentSelector, - options?: documents.DocumentEdgesOptions - ): Promise>>; + options?: documents.DocumentEdgesOptions, + ): Promise< + connection.ArangoApiResponse> + >; /** * Retrieves a list of all incoming edges of the document matching the given * `selector`. @@ -2203,8 +2251,10 @@ export interface EdgeCollection< */ inEdges( selector: documents.DocumentSelector, - options?: documents.DocumentEdgesOptions - ): Promise>>; + options?: documents.DocumentEdgesOptions, + ): Promise< + connection.ArangoApiResponse> + >; /** * Retrieves a list of all outgoing edges of the document matching the given * `selector`. @@ -2232,8 +2282,10 @@ export interface EdgeCollection< */ outEdges( selector: documents.DocumentSelector, - options?: documents.DocumentEdgesOptions - ): Promise>>; + options?: documents.DocumentEdgesOptions, + ): Promise< + connection.ArangoApiResponse> + >; //#endregion } //#endregion @@ -2243,12 +2295,13 @@ export interface EdgeCollection< * @internal */ export class Collection< - EntryResultType extends Record = any, - EntryInputType extends Record = EntryResultType, -> + EntryResultType extends Record = any, + EntryInputType extends Record = EntryResultType, + > implements - EdgeCollection, - DocumentCollection { + EdgeCollection, + DocumentCollection +{ protected _name: string; protected _db: databases.Database; @@ -2294,7 +2347,7 @@ export class Collection< create( options: CreateCollectionOptions & { type?: CollectionType; - } = {} + } = {}, ) { const { waitForSyncReplication = undefined, @@ -2337,8 +2390,10 @@ export class Collection< } properties( - properties?: CollectionPropertiesOptions - ): Promise> { + properties?: CollectionPropertiesOptions, + ): Promise< + connection.ArangoApiResponse + > { if (!properties) { return this._db.request({ pathname: `/_api/collection/${encodeURIComponent(this._name)}/properties`, @@ -2366,18 +2421,19 @@ export class Collection< { method: "PUT", pathname: `/_api/collection/${encodeURIComponent( - this._name + this._name, )}/recalculateCount`, }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } figures( - details = false + details = false, ): Promise< connection.ArangoApiResponse< - CollectionDescription & CollectionProperties & { count: number; figures: Record } + CollectionDescription & + CollectionProperties & { count: number; figures: Record } > > { return this._db.request({ @@ -2397,7 +2453,7 @@ export class Collection< } checksum( - options?: CollectionChecksumOptions + options?: CollectionChecksumOptions, ): Promise< connection.ArangoApiResponse< CollectionDescription & { revision: string; checksum: string } @@ -2415,7 +2471,9 @@ export class Collection< return result; } - truncate(options?: TruncateCollectionOptions): Promise> { + truncate( + options?: TruncateCollectionOptions, + ): Promise> { return this._db.request({ method: "PUT", pathname: `/_api/collection/${this._name}/truncate`, @@ -2432,28 +2490,26 @@ export class Collection< } compact() { - return this._db.request( - { - method: "PUT", - pathname: `/_api/collection/${this._name}/compact`, - } - ); + return this._db.request({ + method: "PUT", + pathname: `/_api/collection/${this._name}/compact`, + }); } //#endregion //#region Document operations getResponsibleShard( - document: Partial> + document: Partial>, ): Promise { return this._db.request( { method: "PUT", pathname: `/_api/collection/${encodeURIComponent( - this._name + this._name, )}/responsibleShard`, body: document, }, - (res) => res.parsedBody.shardId + (res) => res.parsedBody.shardId, ); } @@ -2463,7 +2519,7 @@ export class Collection< async documentExists( selector: documents.DocumentSelector, - options: documents.DocumentExistsOptions = {} + options: documents.DocumentExistsOptions = {}, ): Promise { const { ifMatch = undefined, ifNoneMatch = undefined } = options; const headers = {} as Record; @@ -2474,7 +2530,7 @@ export class Collection< { method: "HEAD", pathname: `/_api/document/${encodeURI( - documents._documentHandle(selector, this._name) + documents._documentHandle(selector, this._name), )}`, headers, }, @@ -2483,7 +2539,7 @@ export class Collection< throw new errors.HttpError(res); } return true; - } + }, ); } catch (err: any) { if (err.code === 404) { @@ -2495,7 +2551,7 @@ export class Collection< documents( selectors: (string | documents.ObjectWithDocumentKey)[], - options: documents.BulkReadDocumentsOptions = {} + options: documents.BulkReadDocumentsOptions = {}, ) { const { allowDirtyRead = undefined } = options; return this._db.request({ @@ -2509,7 +2565,7 @@ export class Collection< async document( selector: documents.DocumentSelector, - options: boolean | documents.ReadDocumentOptions = {} + options: boolean | documents.ReadDocumentOptions = {}, ) { if (typeof options === "boolean") { options = { graceful: options }; @@ -2526,7 +2582,7 @@ export class Collection< const result = this._db.request( { pathname: `/_api/document/${encodeURI( - documents._documentHandle(selector, this._name) + documents._documentHandle(selector, this._name), )}`, headers, allowDirtyRead, @@ -2536,7 +2592,7 @@ export class Collection< throw new errors.HttpError(res); } return res.parsedBody; - } + }, ); if (!graceful) return result; try { @@ -2549,7 +2605,10 @@ export class Collection< } } - save(data: documents.DocumentData, options?: documents.InsertDocumentOptions) { + save( + data: documents.DocumentData, + options?: documents.InsertDocumentOptions, + ) { return this._db.request( { method: "POST", @@ -2557,13 +2616,13 @@ export class Collection< body: data, search: options, }, - (res) => (options?.silent ? undefined : res.parsedBody) + (res) => (options?.silent ? undefined : res.parsedBody), ); } saveAll( data: Array>, - options?: documents.InsertDocumentOptions + options?: documents.InsertDocumentOptions, ) { return this._db.request( { @@ -2572,14 +2631,14 @@ export class Collection< body: data, search: options, }, - (res) => (options?.silent ? undefined : res.parsedBody) + (res) => (options?.silent ? undefined : res.parsedBody), ); } replace( selector: documents.DocumentSelector, newData: documents.DocumentData, - options: documents.ReplaceDocumentOptions = {} + options: documents.ReplaceDocumentOptions = {}, ) { const { ifMatch = undefined, ...opts } = options; const headers = {} as Record; @@ -2588,21 +2647,22 @@ export class Collection< { method: "PUT", pathname: `/_api/document/${encodeURI( - documents._documentHandle(selector, this._name) + documents._documentHandle(selector, this._name), )}`, headers, body: newData, search: opts, }, - (res) => (options?.silent ? undefined : res.parsedBody) + (res) => (options?.silent ? undefined : res.parsedBody), ); } replaceAll( newData: Array< - documents.DocumentData & ({ _key: string } | { _id: string }) + documents.DocumentData & + ({ _key: string } | { _id: string }) >, - options?: documents.ReplaceDocumentOptions + options?: documents.ReplaceDocumentOptions, ) { return this._db.request( { @@ -2611,14 +2671,14 @@ export class Collection< body: newData, search: options, }, - (res) => (options?.silent ? undefined : res.parsedBody) + (res) => (options?.silent ? undefined : res.parsedBody), ); } update( selector: documents.DocumentSelector, newData: documents.Patch>, - options: documents.UpdateDocumentOptions = {} + options: documents.UpdateDocumentOptions = {}, ) { const { ifMatch = undefined, ...opts } = options; const headers = {} as Record; @@ -2627,21 +2687,22 @@ export class Collection< { method: "PATCH", pathname: `/_api/document/${encodeURI( - documents._documentHandle(selector, this._name) + documents._documentHandle(selector, this._name), )}`, headers, body: newData, search: opts, }, - (res) => (options?.silent ? undefined : res.parsedBody) + (res) => (options?.silent ? undefined : res.parsedBody), ); } updateAll( newData: Array< - documents.Patch> & ({ _key: string } | { _id: string }) + documents.Patch> & + ({ _key: string } | { _id: string }) >, - options?: documents.UpdateDocumentOptions + options?: documents.UpdateDocumentOptions, ) { return this._db.request( { @@ -2650,11 +2711,14 @@ export class Collection< body: newData, search: options, }, - (res) => (options?.silent ? undefined : res.parsedBody) + (res) => (options?.silent ? undefined : res.parsedBody), ); } - remove(selector: documents.DocumentSelector, options: documents.RemoveDocumentOptions = {}) { + remove( + selector: documents.DocumentSelector, + options: documents.RemoveDocumentOptions = {}, + ) { const { ifMatch = undefined, ...opts } = options; const headers = {} as Record; if (ifMatch) headers["if-match"] = ifMatch; @@ -2662,18 +2726,18 @@ export class Collection< { method: "DELETE", pathname: `/_api/document/${encodeURI( - documents._documentHandle(selector, this._name) + documents._documentHandle(selector, this._name), )}`, headers, search: opts, }, - (res) => (options?.silent ? undefined : res.parsedBody) + (res) => (options?.silent ? undefined : res.parsedBody), ); } removeAll( selectors: (string | documents.ObjectWithDocumentKey)[], - options?: documents.RemoveDocumentOptions + options?: documents.RemoveDocumentOptions, ) { return this._db.request( { @@ -2682,7 +2746,7 @@ export class Collection< body: selectors, search: options, }, - (res) => (options?.silent ? undefined : res.parsedBody) + (res) => (options?.silent ? undefined : res.parsedBody), ); } @@ -2690,7 +2754,7 @@ export class Collection< data: Buffer | Blob | string | any[], options: documents.ImportDocumentsOptions & { type?: "documents" | "list" | "auto"; - } = {} + } = {}, ): Promise { const search = { ...options, collection: this._name }; if (Array.isArray(data)) { @@ -2712,7 +2776,7 @@ export class Collection< protected _edges( selector: documents.DocumentSelector, options: documents.DocumentEdgesOptions = {}, - direction?: "in" | "out" + direction?: "in" | "out", ) { const { allowDirtyRead = undefined } = options; return this._db.request({ @@ -2725,15 +2789,24 @@ export class Collection< }); } - edges(vertex: documents.DocumentSelector, options?: documents.DocumentEdgesOptions) { + edges( + vertex: documents.DocumentSelector, + options?: documents.DocumentEdgesOptions, + ) { return this._edges(vertex, options); } - inEdges(vertex: documents.DocumentSelector, options?: documents.DocumentEdgesOptions) { + inEdges( + vertex: documents.DocumentSelector, + options?: documents.DocumentEdgesOptions, + ) { return this._edges(vertex, options, "in"); } - outEdges(vertex: documents.DocumentSelector, options?: documents.DocumentEdgesOptions) { + outEdges( + vertex: documents.DocumentSelector, + options?: documents.DocumentEdgesOptions, + ) { return this._edges(vertex, options, "out"); } //#endregion @@ -2744,10 +2817,10 @@ export class Collection< { method: "PUT", pathname: `/_api/collection/${encodeURIComponent( - this._name + this._name, )}/loadIndexesIntoMemory`, }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -2757,7 +2830,7 @@ export class Collection< pathname: "/_api/index", search: { collection: this._name, ...options }, }, - (res) => res.parsedBody.indexes + (res) => res.parsedBody.indexes, ); } @@ -2784,4 +2857,4 @@ export class Collection< } //#endregion } -//#endregion \ No newline at end of file +//#endregion diff --git a/src/configuration.ts b/src/configuration.ts index 54e703cda..69ee050c5 100644 --- a/src/configuration.ts +++ b/src/configuration.ts @@ -54,7 +54,9 @@ export type BearerAuthCredentials = { /** * Determines if the given credentials are for Bearer token authentication. */ -export function isBearerAuth(auth: BasicAuthCredentials | BearerAuthCredentials): auth is BearerAuthCredentials { +export function isBearerAuth( + auth: BasicAuthCredentials | BearerAuthCredentials, +): auth is BearerAuthCredentials { return auth.hasOwnProperty("token"); } //#endregion @@ -190,4 +192,4 @@ export type ConfigOptions = connection.CommonRequestOptions & { */ responseQueueTimeSamples?: number; }; -//#endregion \ No newline at end of file +//#endregion diff --git a/src/connection.ts b/src/connection.ts index 5cd5883f1..336678988 100644 --- a/src/connection.ts +++ b/src/connection.ts @@ -11,13 +11,13 @@ import * as administration from "./administration.js"; import * as configuration from "./configuration.js"; import * as databases from "./databases.js"; import * as errors from "./errors.js"; +import { ERROR_ARANGO_CONFLICT } from "./lib/codes.js"; import * as util from "./lib/util.js"; import { LinkedList } from "./lib/x3-linkedlist.js"; -import { ERROR_ARANGO_CONFLICT } from "./lib/codes.js"; const MIME_JSON = /\/(json|javascript)(\W|$)/; const LEADER_ENDPOINT_HEADER = "x-arango-endpoint"; -const REASON_TIMEOUT = 'timeout'; +const REASON_TIMEOUT = "timeout"; //#region Host /** @@ -32,7 +32,17 @@ type Host = { * @param pathname - URL path, relative to the server URL. * @param options - Options for this fetch request. */ - fetch: (options: Omit) => Promise; + fetch: ( + options: Omit< + RequestOptions, + | "maxRetries" + | "retryOnConflict" + | "allowDirtyRead" + | "hostUrl" + | "expectBinary" + | "isBinary" + >, + ) => Promise; /** * @internal * @@ -68,7 +78,7 @@ function createHost(arangojsHostUrl: string, agentOptions?: any): Host { }; } if (agentOptions) { - createDispatcher = (async () => { + createDispatcher = async () => { let undici: any; try { // Prevent overzealous bundlers from attempting to bundle undici @@ -76,35 +86,45 @@ function createHost(arangojsHostUrl: string, agentOptions?: any): Host { undici = await import(undiciName); } catch (cause) { if (socketPath) { - throw new Error('Undici is required for Unix domain sockets', { cause }); + throw new Error("Undici is required for Unix domain sockets", { + cause, + }); } - throw new Error('Undici is required when using config.agentOptions', { cause }); + throw new Error("Undici is required when using config.agentOptions", { + cause, + }); } fetch = undici.fetch; return new undici.Agent(agentOptions); - }); + }; } const pending = new Map(); return { - async fetch( - { - method, - pathname, - search, - headers: requestHeaders, - body, - timeout, - fetchOptions, - beforeRequest, - afterResponse, - }: Omit) { + async fetch({ + method, + pathname, + search, + headers: requestHeaders, + body, + timeout, + fetchOptions, + beforeRequest, + afterResponse, + }: Omit< + RequestOptions, + | "maxRetries" + | "retryOnConflict" + | "allowDirtyRead" + | "hostUrl" + | "expectBinary" + | "isBinary" + >) { const url = new URL(pathname + baseUrl.search, baseUrl); if (search) { - const searchParams = ( + const searchParams = search instanceof URLSearchParams ? search - : new URLSearchParams(search) - ); + : new URLSearchParams(search); for (const [key, value] of searchParams) { url.searchParams.append(key, value); } @@ -114,8 +134,8 @@ function createHost(arangojsHostUrl: string, agentOptions?: any): Host { headers.set( "authorization", `Basic ${btoa( - `${baseUrl.username || "root"}:${baseUrl.password || ""}` - )}` + `${baseUrl.username || "root"}:${baseUrl.password || ""}`, + )}`, ); } const abortController = new AbortController(); @@ -147,14 +167,20 @@ function createHost(arangojsHostUrl: string, agentOptions?: any): Host { } let response: globalThis.Response & { request: globalThis.Request }; try { - response = Object.assign(await fetch(request), { request, arangojsHostUrl }); + response = Object.assign(await fetch(request), { + request, + arangojsHostUrl, + }); } catch (e: unknown) { const cause = e instanceof Error ? e : new Error(String(e)); let error: errors.NetworkError; if (signal.aborted) { - const reason = typeof signal.reason == 'string' ? signal.reason : undefined; + const reason = + typeof signal.reason == "string" ? signal.reason : undefined; if (reason === REASON_TIMEOUT) { - error = new errors.ResponseTimeoutError(undefined, request, { cause }); + error = new errors.ResponseTimeoutError(undefined, request, { + cause, + }); } else { error = new errors.RequestAbortedError(reason, request, { cause }); } @@ -249,9 +275,11 @@ const STATUS_CODE_DEFAULT_MESSAGES = { }; type KnownStatusCode = keyof typeof STATUS_CODE_DEFAULT_MESSAGES; -const KNOWN_STATUS_CODES = Object.keys(STATUS_CODE_DEFAULT_MESSAGES).map((k) => Number(k)) as KnownStatusCode[]; +const KNOWN_STATUS_CODES = Object.keys(STATUS_CODE_DEFAULT_MESSAGES).map((k) => + Number(k), +) as KnownStatusCode[]; const REDIRECT_CODES = [301, 302, 303, 307, 308] satisfies KnownStatusCode[]; -type RedirectStatusCode = typeof REDIRECT_CODES[number]; +type RedirectStatusCode = (typeof REDIRECT_CODES)[number]; /** * @internal @@ -340,7 +368,7 @@ export type ArangoErrorResponse = { * for more information. */ errorNum: number; -} +}; /** * Processed response object. @@ -360,7 +388,7 @@ export interface ProcessedResponse extends globalThis.Response { * Parsed response body. */ parsedBody?: T; -}; +} //#endregion //#region Request options @@ -374,7 +402,10 @@ export type CommonFetchOptions = { * Note that the `Authorization` header will be overridden if the `auth` * configuration option is set. */ - headers?: string[][] | Record> | Headers; + headers?: + | string[][] + | Record> + | Headers; /** * Controls whether the socket should be reused for subsequent requests. * @@ -390,7 +421,7 @@ export type CommonFetchOptions = { * * Default: `"follow"` */ - redirect?: 'error' | 'follow' | 'manual'; + redirect?: "error" | "follow" | "manual"; /** * Value to use for the `Referer` header. * @@ -424,7 +455,7 @@ export type CommonFetchOptions = { * * Default: `"same-origin"` */ - credentials?: 'omit' | 'include' | 'same-origin'; + credentials?: "omit" | "include" | "same-origin"; /** * (Node.js only.) Undici `Dispatcher` instance to use for the request. * @@ -450,7 +481,7 @@ export type CommonFetchOptions = { * * Default: `"auto"` */ - priority?: 'low' | 'high' | 'auto'; + priority?: "low" | "high" | "auto"; /** * (Browser only.) Policy to use for the `Referer` header, equivalent to the * semantics of the `Referrer-Policy` header. @@ -543,7 +574,10 @@ export type CommonRequestOptions = { * @param err - Error encountered when handling this request or `null`. * @param res - Response object for this request, if no error occurred. */ - afterResponse?: (err: errors.NetworkError | null, res?: globalThis.Response & { request: globalThis.Request; }) => void | Promise; + afterResponse?: ( + err: errors.NetworkError | null, + res?: globalThis.Response & { request: globalThis.Request }, + ) => void | Promise; }; /** @@ -576,7 +610,10 @@ export type RequestOptions = CommonRequestOptions & { * Note that the `Authorization` header will be overridden if the `auth` * configuration option is set. */ - headers?: string[][] | Record> | Headers; + headers?: + | string[][] + | Record> + | Headers; /** * Request body data. */ @@ -662,9 +699,10 @@ export class Connection { arangoVersion = 31100, loadBalancingStrategy = "NONE", maxRetries = 0, - poolSize = 3 * ( - loadBalancingStrategy === "ROUND_ROBIN" && Array.isArray(url) ? url.length : 1 - ), + poolSize = 3 * + (loadBalancingStrategy === "ROUND_ROBIN" && Array.isArray(url) + ? url.length + : 1), fetchOptions: { headers, ...commonFetchOptions } = {}, onError, precaptureStackTraces = false, @@ -674,9 +712,8 @@ export class Connection { const URLS = Array.isArray(url) ? url : [url]; this._loadBalancingStrategy = loadBalancingStrategy; this._precaptureStackTraces = precaptureStackTraces; - this._responseQueueTimeSamples = ( - responseQueueTimeSamples < 0 ? Infinity : responseQueueTimeSamples - ); + this._responseQueueTimeSamples = + responseQueueTimeSamples < 0 ? Infinity : responseQueueTimeSamples; this._arangoVersion = arangoVersion; this._taskPoolSize = poolSize; this._onError = onError; @@ -689,11 +726,11 @@ export class Connection { this._commonFetchOptions.headers.set( "x-arango-version", - String(arangoVersion) + String(arangoVersion), ); this._commonFetchOptions.headers.set( "x-arango-driver", - `arangojs/${process.env.ARANGOJS_VERSION} (cloud)` + `arangojs/${process.env.ARANGOJS_VERSION} (cloud)`, ); this.addToHostList(URLS); @@ -762,10 +799,9 @@ export class Connection { request: globalThis.Request; arangojsHostUrl: string; parsedBody?: any; - } = Object.assign( - await host.fetch(task.options), - { arangojsHostUrl: hostUrl } - ); + } = Object.assign(await host.fetch(task.options), { + arangojsHostUrl: hostUrl, + }); const leaderEndpoint = res.headers.get(LEADER_ENDPOINT_HEADER); if (res.status === 503 && leaderEndpoint) { const [cleanUrl] = this.addToHostList(leaderEndpoint); @@ -838,7 +874,8 @@ export class Connection { err.isSafeToRetry && task.options.hostUrl === undefined && this._commonRequestOptions.maxRetries !== false && - task.retries < (this._commonRequestOptions.maxRetries || this._hosts.length - 1) + task.retries < + (this._commonRequestOptions.maxRetries || this._hosts.length - 1) ) { task.retries += 1; this._queue.push(task); @@ -871,7 +908,7 @@ export class Connection { setBasicAuth(auth: configuration.BasicAuthCredentials) { this.setHeader( "authorization", - `Basic ${btoa(`${auth.username}:${auth.password}`)}` + `Basic ${btoa(`${auth.username}:${auth.password}`)}`, ); } @@ -903,7 +940,10 @@ export class Connection { * @param databaseName - Name of the database. * @param database - Database instance to add to the cache. */ - database(databaseName: string, database: databases.Database): databases.Database; + database( + databaseName: string, + database: databases.Database, + ): databases.Database; /** * @internal * @@ -916,7 +956,7 @@ export class Connection { database(databaseName: string, database: null): undefined; database( databaseName: string, - database?: databases.Database | null + database?: databases.Database | null, ): databases.Database | undefined { if (database === null) { this._databases.delete(databaseName); @@ -947,7 +987,7 @@ export class Connection { const i = this._hostUrls.indexOf(url); if (i !== -1) return this._hosts[i]; return createHost(url); - }) + }), ); this._hostUrls.splice(0, this._hostUrls.length, ...cleanUrls); } @@ -963,15 +1003,13 @@ export class Connection { */ addToHostList(urls: string | string[]): string[] { const cleanUrls = (Array.isArray(urls) ? urls : [urls]).map((url) => - util.normalizeUrl(url) + util.normalizeUrl(url), ); const newUrls = cleanUrls.filter( - (url) => this._hostUrls.indexOf(url) === -1 + (url) => this._hostUrls.indexOf(url) === -1, ); this._hostUrls.push(...newUrls); - this._hosts.push( - ...newUrls.map(url => createHost(url)) - ); + this._hosts.push(...newUrls.map((url) => createHost(url))); return cleanUrls; } @@ -1063,10 +1101,9 @@ export class Connection { }); } catch (e) { if (endOfTime < Date.now()) { - throw new errors.PropagationTimeoutError( - undefined, - { cause: e as Error } - ); + throw new errors.PropagationTimeoutError(undefined, { + cause: e as Error, + }); } await new Promise((resolve) => setTimeout(resolve, 1000)); continue; @@ -1082,9 +1119,16 @@ export class Connection { * * Performs a request using the arangojs connection pool. */ - async request( + async request< + T = globalThis.Response & { request: globalThis.Request; parsedBody?: any }, + >( requestOptions: RequestOptions & { isBinary?: boolean }, - transform?: (res: globalThis.Response & { request: globalThis.Request; parsedBody?: any }) => T + transform?: ( + res: globalThis.Response & { + request: globalThis.Request; + parsedBody?: any; + }, + ) => T, ): Promise { const { hostUrl, @@ -1102,7 +1146,7 @@ export class Connection { const headers = util.mergeHeaders( this._commonFetchOptions.headers, - requestHeaders + requestHeaders, ); let body = requestBody; @@ -1164,7 +1208,9 @@ export class Connection { task.stack = () => `\n${capture.stack.split("\n").slice(3).join("\n")}`; } else { - const capture = util.generateStackTrace() as { readonly stack: string }; + const capture = util.generateStackTrace() as { + readonly stack: string; + }; if (Object.prototype.hasOwnProperty.call(capture, "stack")) { task.stack = () => `\n${capture.stack.split("\n").slice(4).join("\n")}`; @@ -1177,4 +1223,4 @@ export class Connection { }); } } -//#endregion \ No newline at end of file +//#endregion diff --git a/src/cursors.ts b/src/cursors.ts index bff892b78..1e9c1adc7 100644 --- a/src/cursors.ts +++ b/src/cursors.ts @@ -184,10 +184,10 @@ export class BatchCursor { count: number; }, hostUrl?: string, - allowDirtyRead?: boolean + allowDirtyRead?: boolean, ) { const batches = new LinkedList( - body.result.length ? [new LinkedList(body.result)] : [] + body.result.length ? [new LinkedList(body.result)] : [], ); this._db = db; this._batches = batches; @@ -319,7 +319,11 @@ export class BatchCursor { * } * ``` */ - async *[Symbol.asyncIterator](): AsyncGenerator { + async *[Symbol.asyncIterator](): AsyncGenerator< + ItemType[], + undefined, + undefined + > { while (this.hasNext) { yield this.next() as Promise; } @@ -457,7 +461,11 @@ export class BatchCursor { * ``` */ async forEach( - callback: (currentBatch: ItemType[], index: number, self: this) => false | void + callback: ( + currentBatch: ItemType[], + index: number, + self: this, + ) => false | void, ): Promise { let index = 0; while (this.hasNext) { @@ -500,7 +508,7 @@ export class BatchCursor { * ``` */ async map( - callback: (currentBatch: ItemType[], index: number, self: this) => R + callback: (currentBatch: ItemType[], index: number, self: this) => R, ): Promise { let index = 0; const result: any[] = []; @@ -557,7 +565,7 @@ export class BatchCursor { * ``` */ async flatMap( - callback: (currentBatch: ItemType[], index: number, self: this) => R | R[] + callback: (currentBatch: ItemType[], index: number, self: this) => R | R[], ): Promise { let index = 0; const result: any[] = []; @@ -648,9 +656,9 @@ export class BatchCursor { accumulator: R, currentBatch: ItemType[], index: number, - self: this + self: this, ) => R, - initialValue: R + initialValue: R, ): Promise; /** @@ -689,17 +697,17 @@ export class BatchCursor { accumulator: ItemType[] | R, currentBatch: ItemType[], index: number, - self: this - ) => R + self: this, + ) => R, ): Promise; async reduce( reducer: ( accumulator: R, currentBatch: ItemType[], index: number, - self: this + self: this, ) => R, - initialValue?: R + initialValue?: R, ): Promise { let index = 0; if (!this.hasNext) return initialValue; @@ -751,7 +759,7 @@ export class BatchCursor { () => { this._hasMore = false; return undefined; - } + }, ); } } @@ -792,7 +800,10 @@ export class Cursor { /** * @internal */ - constructor(batchedCursor: BatchCursor, view: BatchCursorItemsView) { + constructor( + batchedCursor: BatchCursor, + view: BatchCursorItemsView, + ) { this._batches = batchedCursor; this._view = view; } @@ -865,7 +876,11 @@ export class Cursor { * } * ``` */ - async *[Symbol.asyncIterator](): AsyncGenerator { + async *[Symbol.asyncIterator](): AsyncGenerator< + ItemType, + undefined, + undefined + > { while (this.hasNext) { yield this.next() as Promise; } @@ -953,7 +968,11 @@ export class Cursor { * ``` */ async forEach( - callback: (currentValue: ItemType, index: number, self: this) => false | void + callback: ( + currentValue: ItemType, + index: number, + self: this, + ) => false | void, ): Promise { let index = 0; while (this.hasNext) { @@ -992,7 +1011,7 @@ export class Cursor { * ``` */ async map( - callback: (currentValue: ItemType, index: number, self: this) => R + callback: (currentValue: ItemType, index: number, self: this) => R, ): Promise { let index = 0; const result: any[] = []; @@ -1043,7 +1062,7 @@ export class Cursor { * ``` */ async flatMap( - callback: (currentValue: ItemType, index: number, self: this) => R | R[] + callback: (currentValue: ItemType, index: number, self: this) => R | R[], ): Promise { let index = 0; const result: any[] = []; @@ -1123,8 +1142,13 @@ export class Cursor { * ``` */ async reduce( - reducer: (accumulator: R, currentValue: ItemType, index: number, self: this) => R, - initialValue: R + reducer: ( + accumulator: R, + currentValue: ItemType, + index: number, + self: this, + ) => R, + initialValue: R, ): Promise; /** * Depletes the cursor by applying the `reducer` function to each item in @@ -1160,12 +1184,17 @@ export class Cursor { accumulator: ItemType | R, currentValue: ItemType, index: number, - self: this - ) => R + self: this, + ) => R, ): Promise; async reduce( - reducer: (accumulator: R, currentValue: ItemType, index: number, self: this) => R, - initialValue?: R + reducer: ( + accumulator: R, + currentValue: ItemType, + index: number, + self: this, + ) => R, + initialValue?: R, ): Promise { let index = 0; if (!this.hasNext) return initialValue; diff --git a/src/databases.ts b/src/databases.ts index 62a7bc6dd..930b262c0 100644 --- a/src/databases.ts +++ b/src/databases.ts @@ -144,7 +144,7 @@ export class Database { protected _graphs = new Map(); protected _views = new Map(); protected _trapRequest?: ( - trapped: TrappedError | TrappedRequest + trapped: TrappedError | TrappedRequest, ) => void; /** @@ -184,8 +184,12 @@ export class Database { */ constructor(database: Database, name?: string); constructor( - configOrDatabase: string | string[] | configuration.ConfigOptions | Database = {}, - name?: string + configOrDatabase: + | string + | string[] + | configuration.ConfigOptions + | Database = {}, + name?: string, ) { if (isArangoDatabase(configOrDatabase)) { const connection = configOrDatabase._connection; @@ -244,7 +248,10 @@ export class Database { * // with JSON request body '{"username": "admin", "password": "hunter2"}' * ``` */ - route(path?: string, headers?: Headers | Record): routes.Route { + route( + path?: string, + headers?: Headers | Record, + ): routes.Route { return new routes.Route(this, path, headers); } @@ -261,7 +268,7 @@ export class Database { */ async request( options: connection.RequestOptions, - transform?: (res: connection.ProcessedResponse) => ReturnType + transform?: (res: connection.ProcessedResponse) => ReturnType, ): Promise; /** * @internal @@ -275,16 +282,16 @@ export class Database { */ async request( options: connection.RequestOptions, - transform: false + transform: false, ): Promise>; async request( - { - pathname, - ...opts - }: connection.RequestOptions, - transform: false | ((res: connection.ProcessedResponse) => ReturnType) = (res) => res.parsedBody as ReturnType + { pathname, ...opts }: connection.RequestOptions, + transform: + | false + | ((res: connection.ProcessedResponse) => ReturnType) = (res) => + res.parsedBody as ReturnType, ): Promise { - pathname = util.joinPath('_db', encodeURIComponent(this._name), pathname); + pathname = util.joinPath("_db", encodeURIComponent(this._name), pathname); if (this._trapRequest) { const trap = this._trapRequest; this._trapRequest = undefined; @@ -316,7 +323,7 @@ export class Database { } return this._connection.request( { pathname, ...opts }, - transform || undefined + transform || undefined, ); } @@ -350,7 +357,7 @@ export class Database { const urls: string[] = await this.request( { pathname: "/_api/cluster/endpoints" }, (res) => - res.parsedBody.endpoints.map((endpoint: any) => endpoint.endpoint) + res.parsedBody.endpoints.map((endpoint: any) => endpoint.endpoint), ); if (urls.length > 0) { if (overwrite) this._connection.setHostList(urls); @@ -414,18 +421,22 @@ export class Database { */ async waitForPropagation( request: connection.RequestOptions, - timeout?: number + timeout?: number, ): Promise; async waitForPropagation( { pathname, ...request }: connection.RequestOptions, - timeout?: number + timeout?: number, ): Promise { await this._connection.waitForPropagation( { ...request, - pathname: util.joinPath('_db', encodeURIComponent(this._name), pathname), + pathname: util.joinPath( + "_db", + encodeURIComponent(this._name), + pathname, + ), }, - timeout + timeout, ); } @@ -513,7 +524,7 @@ export class Database { (res) => { this.useBearerAuth(res.parsedBody.jwt); return res.parsedBody.jwt; - } + }, ); } @@ -541,7 +552,7 @@ export class Database { if (!res.parsedBody.jwt) return null; this.useBearerAuth(res.parsedBody.jwt); return res.parsedBody.jwt; - } + }, ); } //#endregion @@ -599,7 +610,7 @@ export class Database { method: "GET", pathname: "/_admin/time", }, - (res) => res.parsedBody.time * 1000 + (res) => res.parsedBody.time * 1000, ); } @@ -636,15 +647,23 @@ export class Database { * // availability is either "default", "readonly", or false * ``` */ - async availability(graceful = false): Promise { + async availability( + graceful = false, + ): Promise { try { - return this.request({ - method: "GET", - pathname: "/_admin/server/availability", - }, (res) => res.parsedBody.mode); + return this.request( + { + method: "GET", + pathname: "/_admin/server/availability", + }, + (res) => res.parsedBody.mode, + ); } catch (e) { if (graceful) return false; - if ((errors.isArangoError(e) || e instanceof errors.HttpError) && e.code === 503) { + if ( + (errors.isArangoError(e) || e instanceof errors.HttpError) && + e.code === 503 + ) { return false; } throw e; @@ -656,7 +675,9 @@ export class Database { * * Note that this API may reveal sensitive data about the deployment. */ - supportInfo(): Promise { + supportInfo(): Promise< + administration.SingleServerSupportInfo | administration.ClusterSupportInfo + > { return this.request({ method: "GET", pathname: "/_admin/support-info", @@ -672,7 +693,7 @@ export class Database { method: "DELETE", pathname: "/_admin/shutdown", }, - () => undefined + () => undefined, ); } //#endregion @@ -690,7 +711,7 @@ export class Database { getClusterImbalance(): Promise { return this.request( { pathname: "/_admin/cluster/rebalance" }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -710,7 +731,7 @@ export class Database { * ``` */ computeClusterRebalance( - options: cluster.ClusterRebalanceOptions + options: cluster.ClusterRebalanceOptions, ): Promise { return this.request( { @@ -721,7 +742,7 @@ export class Database { ...options, }, }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -740,7 +761,9 @@ export class Database { * } * ``` */ - executeClusterRebalance(moves: cluster.ClusterRebalanceMove[]): Promise { + executeClusterRebalance( + moves: cluster.ClusterRebalanceMove[], + ): Promise { return this.request({ method: "POST", pathname: "/_admin/cluster/rebalance/execute", @@ -766,7 +789,7 @@ export class Database { * ``` */ rebalanceCluster( - opts: cluster.ClusterRebalanceOptions + opts: cluster.ClusterRebalanceOptions, ): Promise { return this.request({ method: "PUT", @@ -811,7 +834,7 @@ export class Database { get(): Promise { return this.request( { pathname: "/_api/database/current" }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -855,7 +878,7 @@ export class Database { */ createDatabase( databaseName: string, - options?: CreateDatabaseOptions + options?: CreateDatabaseOptions, ): Promise; /** * Creates a new database with the given `databaseName` with the given @@ -873,11 +896,13 @@ export class Database { */ createDatabase( databaseName: string, - users: users.CreateDatabaseUserOptions[] + users: users.CreateDatabaseUserOptions[], ): Promise; createDatabase( databaseName: string, - usersOrOptions: users.CreateDatabaseUserOptions[] | CreateDatabaseOptions = {} + usersOrOptions: + | users.CreateDatabaseUserOptions[] + | CreateDatabaseOptions = {}, ): Promise { const { users, ...options } = Array.isArray(usersOrOptions) ? { users: usersOrOptions } @@ -888,7 +913,7 @@ export class Database { pathname: "/_api/database", body: { name: databaseName, users, options }, }, - () => this.database(databaseName) + () => this.database(databaseName), ); } @@ -908,7 +933,7 @@ export class Database { listDatabases(): Promise { return this.request( { pathname: "/_api/database" }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -929,7 +954,7 @@ export class Database { listUserDatabases(): Promise { return this.request( { pathname: "/_api/database/user" }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -950,8 +975,8 @@ export class Database { databases(): Promise { return this.request({ pathname: "/_api/database" }, (res) => (res.parsedBody.result as string[]).map((databaseName) => - this.database(databaseName) - ) + this.database(databaseName), + ), ); } @@ -972,8 +997,8 @@ export class Database { userDatabases(): Promise { return this.request({ pathname: "/_api/database/user" }, (res) => (res.parsedBody.result as string[]).map((databaseName) => - this.database(databaseName) - ) + this.database(databaseName), + ), ); } @@ -995,7 +1020,7 @@ export class Database { method: "DELETE", pathname: `/_api/database/${encodeURIComponent(databaseName)}`, }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } //#endregion @@ -1047,14 +1072,14 @@ export class Database { EntryResultType extends Record = any, EntryInputType extends Record = EntryResultType, >( - collectionName: string + collectionName: string, ): collections.DocumentCollection & collections.EdgeCollection { collectionName = collectionName; if (!this._collections.has(collectionName)) { this._collections.set( collectionName, - new collections.Collection(this, collectionName) + new collections.Collection(this, collectionName), ); } return this._collections.get(collectionName)!; @@ -1093,7 +1118,7 @@ export class Database { collectionName: string, options?: collections.CreateCollectionOptions & { type?: collections.CollectionType.DOCUMENT_COLLECTION; - } + }, ): Promise>; /** * Creates a new edge collection with the given `collectionName` and @@ -1134,7 +1159,7 @@ export class Database { collectionName: string, options: collections.CreateCollectionOptions & { type: collections.CollectionType.EDGE_COLLECTION; - } + }, ): Promise>; async createCollection< EntryResultType extends Record = any, @@ -1143,9 +1168,11 @@ export class Database { collectionName: string, options?: collections.CreateCollectionOptions & { type?: collections.CollectionType; - } - ): Promise & - collections.EdgeCollection> { + }, + ): Promise< + collections.DocumentCollection & + collections.EdgeCollection + > { const collection = this.collection(collectionName); await collection.create(options); return collection; @@ -1187,9 +1214,8 @@ export class Database { EntryInputType extends Record = EntryResultType, >( collectionName: string, - options?: collections.CreateCollectionOptions - ): Promise< - collections.EdgeCollection> { + options?: collections.CreateCollectionOptions, + ): Promise> { return this.createCollection(collectionName, { ...options, type: collections.CollectionType.EDGE_COLLECTION, @@ -1210,7 +1236,7 @@ export class Database { */ async renameCollection( collectionName: string, - newName: string + newName: string, ): Promise> { const result = await this.request({ method: "PUT", @@ -1246,14 +1272,14 @@ export class Database { * ``` */ listCollections( - excludeSystem: boolean = true + excludeSystem: boolean = true, ): Promise { return this.request( { pathname: "/_api/collection", search: { excludeSystem }, }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -1286,8 +1312,10 @@ export class Database { * ``` */ async collections( - excludeSystem: boolean = true - ): Promise> { + excludeSystem: boolean = true, + ): Promise< + Array + > { const collections = await this.listCollections(excludeSystem); return collections.map((data) => this.collection(data.name)); } @@ -1324,7 +1352,7 @@ export class Database { async createGraph( graphName: string, edgeDefinitions: graphs.EdgeDefinitionOptions[], - options?: graphs.CreateGraphOptions + options?: graphs.CreateGraphOptions, ): Promise { const graph = this.graph(graphName); await graph.create(edgeDefinitions, options); @@ -1347,7 +1375,7 @@ export class Database { listGraphs(): Promise { return this.request( { pathname: "/_api/gharial" }, - (res) => res.parsedBody.graphs + (res) => res.parsedBody.graphs, ); } @@ -1405,7 +1433,7 @@ export class Database { */ async createView( viewName: string, - options: views.CreateViewOptions + options: views.CreateViewOptions, ): Promise { const view = this.view(viewName); await view.create(options); @@ -1426,7 +1454,7 @@ export class Database { */ async renameView( viewName: string, - newName: string + newName: string, ): Promise> { const result = await this.request({ method: "PUT", @@ -1452,7 +1480,10 @@ export class Database { * ``` */ listViews(): Promise { - return this.request({ pathname: "/_api/view" }, (res) => res.parsedBody.result); + return this.request( + { pathname: "/_api/view" }, + (res) => res.parsedBody.result, + ); } /** @@ -1491,7 +1522,7 @@ export class Database { if (!this._analyzers.has(analyzerName)) { this._analyzers.set( analyzerName, - new analyzers.Analyzer(this, analyzerName) + new analyzers.Analyzer(this, analyzerName), ); } return this._analyzers.get(analyzerName)!; @@ -1513,7 +1544,7 @@ export class Database { */ async createAnalyzer( analyzerName: string, - options: analyzers.CreateAnalyzerOptions + options: analyzers.CreateAnalyzerOptions, ): Promise { const analyzer = this.analyzer(analyzerName); await analyzer.create(options); @@ -1536,7 +1567,7 @@ export class Database { listAnalyzers(): Promise { return this.request( { pathname: "/_api/analyzer" }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -1576,7 +1607,7 @@ export class Database { { pathname: "/_api/user", }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -1592,7 +1623,9 @@ export class Database { * // user is the user object for the user named "steve" * ``` */ - getUser(username: string): Promise> { + getUser( + username: string, + ): Promise> { return this.request({ pathname: `/_api/user/${encodeURIComponent(username)}`, }); @@ -1613,7 +1646,7 @@ export class Database { */ createUser( username: string, - passwd: string + passwd: string, ): Promise>; /** * Creates a new ArangoDB user with the given options. @@ -1630,11 +1663,11 @@ export class Database { */ createUser( username: string, - options: users.UserOptions + options: users.UserOptions, ): Promise>; createUser( username: string, - options: string | users.UserOptions + options: string | users.UserOptions, ): Promise> { if (typeof options === "string") { options = { passwd: options }; @@ -1645,7 +1678,7 @@ export class Database { pathname: "/_api/user", body: { user: username, ...options }, }, - (res) => res.parsedBody + (res) => res.parsedBody, ); } @@ -1664,7 +1697,7 @@ export class Database { */ updateUser( username: string, - passwd: string + passwd: string, ): Promise>; /** * Updates the ArangoDB user with the new options. @@ -1681,11 +1714,11 @@ export class Database { */ updateUser( username: string, - options: Partial + options: Partial, ): Promise>; updateUser( username: string, - options: string | Partial + options: string | Partial, ): Promise> { if (typeof options === "string") { options = { passwd: options }; @@ -1696,7 +1729,7 @@ export class Database { pathname: `/_api/user/${encodeURIComponent(username)}`, body: options, }, - (res) => res.parsedBody + (res) => res.parsedBody, ); } @@ -1715,7 +1748,7 @@ export class Database { */ replaceUser( username: string, - options: users.UserOptions + options: users.UserOptions, ): Promise> { if (typeof options === "string") { options = { passwd: options }; @@ -1726,7 +1759,7 @@ export class Database { pathname: `/_api/user/${encodeURIComponent(username)}`, body: options, }, - (res) => res.parsedBody + (res) => res.parsedBody, ); } @@ -1742,9 +1775,7 @@ export class Database { * // The user "steve" has been removed * ``` */ - removeUser( - username: string - ): Promise { + removeUser(username: string): Promise { return this.request( { method: "DELETE", @@ -1825,26 +1856,28 @@ export class Database { */ getUserAccessLevel( username: string, - { database, collection }: users.UserAccessLevelOptions + { database, collection }: users.UserAccessLevelOptions, ): Promise { const databaseName = isArangoDatabase(database) ? database.name - : database ?? - (collection instanceof collections.Collection - ? collection.database.name - : this._name); + : (database ?? + (collection instanceof collections.Collection + ? collection.database.name + : this._name)); const suffix = collection ? `/${encodeURIComponent( - collections.isArangoCollection(collection) ? collection.name : collection - )}` + collections.isArangoCollection(collection) + ? collection.name + : collection, + )}` : ""; return this.request( { pathname: `/_api/user/${encodeURIComponent( - username + username, )}/database/${encodeURIComponent(databaseName)}${suffix}`, }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -1926,28 +1959,30 @@ export class Database { database, collection, grant, - }: users.UserAccessLevelOptions & { grant: users.AccessLevel } + }: users.UserAccessLevelOptions & { grant: users.AccessLevel }, ): Promise>> { const databaseName = isArangoDatabase(database) ? database.name - : database ?? - (collection instanceof collections.Collection - ? collection.database.name - : this._name); + : (database ?? + (collection instanceof collections.Collection + ? collection.database.name + : this._name)); const suffix = collection ? `/${encodeURIComponent( - collections.isArangoCollection(collection) ? collection.name : collection - )}` + collections.isArangoCollection(collection) + ? collection.name + : collection, + )}` : ""; return this.request( { method: "PUT", pathname: `/_api/user/${encodeURIComponent( - username + username, )}/database/${encodeURIComponent(databaseName)}${suffix}`, body: { grant }, }, - (res) => res.parsedBody + (res) => res.parsedBody, ); } @@ -2016,27 +2051,29 @@ export class Database { */ clearUserAccessLevel( username: string, - { database, collection }: users.UserAccessLevelOptions + { database, collection }: users.UserAccessLevelOptions, ): Promise>> { const databaseName = isArangoDatabase(database) ? database.name - : database ?? - (collection instanceof collections.Collection - ? collection.database.name - : this._name); + : (database ?? + (collection instanceof collections.Collection + ? collection.database.name + : this._name)); const suffix = collection ? `/${encodeURIComponent( - collections.isArangoCollection(collection) ? collection.name : collection - )}` + collections.isArangoCollection(collection) + ? collection.name + : collection, + )}` : ""; return this.request( { method: "DELETE", pathname: `/_api/user/${encodeURIComponent( - username + username, )}/database/${encodeURIComponent(databaseName)}${suffix}`, }, - (res) => res.parsedBody + (res) => res.parsedBody, ); } @@ -2058,7 +2095,7 @@ export class Database { */ getUserDatabases( username: string, - full?: false + full?: false, ): Promise>; /** * Fetches an object mapping names of databases to the access level of the @@ -2082,7 +2119,7 @@ export class Database { */ getUserDatabases( username: string, - full: true + full: true, ): Promise< Record< string, @@ -2098,7 +2135,7 @@ export class Database { pathname: `/_api/user/${encodeURIComponent(username)}/database`, search: { full }, }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } //#endregion @@ -2154,9 +2191,11 @@ export class Database { * ``` */ executeTransaction( - collections: transactions.TransactionCollectionOptions & { allowImplicit?: boolean }, + collections: transactions.TransactionCollectionOptions & { + allowImplicit?: boolean; + }, action: string, - options?: transactions.TransactionOptions & { params?: any } + options?: transactions.TransactionOptions & { params?: any }, ): Promise; /** * Performs a server-side transaction and returns its return value. @@ -2205,7 +2244,7 @@ export class Database { executeTransaction( collections: (string | collections.ArangoCollection)[], action: string, - options?: transactions.TransactionOptions & { params?: any } + options?: transactions.TransactionOptions & { params?: any }, ): Promise; /** * Performs a server-side transaction and returns its return value. @@ -2254,16 +2293,18 @@ export class Database { executeTransaction( collection: string | collections.ArangoCollection, action: string, - options?: transactions.TransactionOptions & { params?: any } + options?: transactions.TransactionOptions & { params?: any }, ): Promise; executeTransaction( collections: - | (transactions.TransactionCollectionOptions & { allowImplicit?: boolean }) + | (transactions.TransactionCollectionOptions & { + allowImplicit?: boolean; + }) | (string | collections.ArangoCollection)[] | string | collections.ArangoCollection, action: string, - options: transactions.TransactionOptions & { params?: any } = {} + options: transactions.TransactionOptions & { params?: any } = {}, ): Promise { const { allowDirtyRead = undefined, ...opts } = options; return this.request( @@ -2277,7 +2318,7 @@ export class Database { ...opts, }, }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -2331,7 +2372,7 @@ export class Database { */ beginTransaction( collections: transactions.TransactionCollectionOptions, - options?: transactions.TransactionOptions + options?: transactions.TransactionOptions, ): Promise; /** * Begins a new streaming transaction for the given collections, then returns @@ -2362,7 +2403,7 @@ export class Database { */ beginTransaction( collections: (string | collections.ArangoCollection)[], - options?: transactions.TransactionOptions + options?: transactions.TransactionOptions, ): Promise; /** * Begins a new streaming transaction for the given collections, then returns @@ -2392,7 +2433,7 @@ export class Database { */ beginTransaction( collection: string | collections.ArangoCollection, - options?: transactions.TransactionOptions + options?: transactions.TransactionOptions, ): Promise; beginTransaction( collections: @@ -2400,7 +2441,7 @@ export class Database { | (string | collections.ArangoCollection)[] | string | collections.ArangoCollection, - options: transactions.TransactionOptions = {} + options: transactions.TransactionOptions = {}, ): Promise { const { allowDirtyRead = undefined, ...opts } = options; return this.request( @@ -2413,7 +2454,7 @@ export class Database { ...opts, }, }, - (res) => new transactions.Transaction(this, res.parsedBody.result.id) + (res) => new transactions.Transaction(this, res.parsedBody.result.id), ); } @@ -2453,7 +2494,7 @@ export class Database { withTransaction( collections: transactions.TransactionCollectionOptions, callback: (step: transactions.Transaction["step"]) => Promise, - options?: transactions.TransactionOptions + options?: transactions.TransactionOptions, ): Promise; /** * Begins and commits a transaction using the given callback. Individual @@ -2491,7 +2532,7 @@ export class Database { withTransaction( collections: (string | collections.ArangoCollection)[], callback: (step: transactions.Transaction["step"]) => Promise, - options?: transactions.TransactionOptions + options?: transactions.TransactionOptions, ): Promise; /** * Begins and commits a transaction using the given callback. Individual @@ -2526,7 +2567,7 @@ export class Database { withTransaction( collection: string | collections.ArangoCollection, callback: (step: transactions.Transaction["step"]) => Promise, - options?: transactions.TransactionOptions + options?: transactions.TransactionOptions, ): Promise; async withTransaction( collections: @@ -2535,11 +2576,11 @@ export class Database { | string | collections.ArangoCollection, callback: (step: transactions.Transaction["step"]) => Promise, - options: transactions.TransactionOptions = {} + options: transactions.TransactionOptions = {}, ): Promise { const trx = await this.beginTransaction( collections as transactions.TransactionCollectionOptions, - options + options, ); try { const result = await callback((fn) => trx.step(fn)); @@ -2548,7 +2589,7 @@ export class Database { } catch (e) { try { await trx.abort(); - } catch { } + } catch {} throw e; } } @@ -2569,7 +2610,7 @@ export class Database { listTransactions(): Promise { return this._connection.request( { pathname: "/_api/transaction" }, - (res) => res.parsedBody.transactions + (res) => res.parsedBody.transactions, ); } @@ -2645,7 +2686,7 @@ export class Database { */ query( query: aql.AqlQuery, - options?: queries.QueryOptions + options?: queries.QueryOptions, ): Promise>; /** * Performs a database query using the given `query` and `bindVars`, then @@ -2699,12 +2740,12 @@ export class Database { query( query: string | aql.AqlLiteral, bindVars?: Record, - options?: queries.QueryOptions + options?: queries.QueryOptions, ): Promise>; query( query: string | aql.AqlQuery | aql.AqlLiteral, bindVars?: Record, - options: queries.QueryOptions = {} + options: queries.QueryOptions = {}, ): Promise> { if (aql.isAqlQuery(query)) { options = bindVars ?? {}; @@ -2747,8 +2788,8 @@ export class Database { this, res.parsedBody, res.arangojsHostUrl, - allowDirtyRead - ).items + allowDirtyRead, + ).items, ); } @@ -2776,7 +2817,7 @@ export class Database { */ explain( query: aql.AqlQuery, - options?: queries.ExplainOptions & { allPlans?: false } + options?: queries.ExplainOptions & { allPlans?: false }, ): Promise>; /** * Explains a database query using the given `query`. @@ -2805,7 +2846,7 @@ export class Database { */ explain( query: aql.AqlQuery, - options?: queries.ExplainOptions & { allPlans: true } + options?: queries.ExplainOptions & { allPlans: true }, ): Promise>; /** * Explains a database query using the given `query` and `bindVars`. @@ -2834,7 +2875,7 @@ export class Database { explain( query: string | aql.AqlLiteral, bindVars?: Record, - options?: queries.ExplainOptions & { allPlans?: false } + options?: queries.ExplainOptions & { allPlans?: false }, ): Promise>; /** * Explains a database query using the given `query` and `bindVars`. @@ -2864,13 +2905,17 @@ export class Database { explain( query: string | aql.AqlLiteral, bindVars?: Record, - options?: queries.ExplainOptions & { allPlans: true } + options?: queries.ExplainOptions & { allPlans: true }, ): Promise>; explain( query: string | aql.AqlQuery | aql.AqlLiteral, bindVars?: Record, - options?: queries.ExplainOptions - ): Promise> { + options?: queries.ExplainOptions, + ): Promise< + connection.ArangoApiResponse< + queries.SingleExplainResult | queries.MultiExplainResult + > + > { if (aql.isAqlQuery(query)) { options = bindVars; bindVars = query.bindVars; @@ -2907,7 +2952,9 @@ export class Database { * `); * ``` aql.*/ - parse(query: string | aql.AqlQuery | aql.AqlLiteral): Promise { + parse( + query: string | aql.AqlQuery | aql.AqlLiteral, + ): Promise { if (aql.isAqlQuery(query)) { query = query.query; } else if (aql.isAqlLiteral(query)) { @@ -2966,19 +3013,23 @@ export class Database { * }); * ``` */ - queryTracking(options: queries.QueryTrackingOptions): Promise; - queryTracking(options?: queries.QueryTrackingOptions): Promise { + queryTracking( + options: queries.QueryTrackingOptions, + ): Promise; + queryTracking( + options?: queries.QueryTrackingOptions, + ): Promise { return this.request( options ? { - method: "PUT", - pathname: "/_api/query/properties", - body: options, - } + method: "PUT", + pathname: "/_api/query/properties", + body: options, + } : { - method: "GET", - pathname: "/_api/query/properties", - } + method: "GET", + pathname: "/_api/query/properties", + }, ); } @@ -3038,7 +3089,7 @@ export class Database { method: "DELETE", pathname: "/_api/query/slow", }, - () => undefined + () => undefined, ); } @@ -3068,7 +3119,7 @@ export class Database { method: "DELETE", pathname: `/_api/query/${encodeURIComponent(queryId)}`, }, - () => undefined + () => undefined, ); } //#endregion @@ -3087,7 +3138,7 @@ export class Database { listUserFunctions(): Promise { return this.request( { pathname: "/_api/aqlfunction" }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -3125,7 +3176,7 @@ export class Database { createUserFunction( name: string, code: string, - isDeterministic: boolean = false + isDeterministic: boolean = false, ): Promise> { return this.request({ method: "POST", @@ -3151,7 +3202,7 @@ export class Database { */ dropUserFunction( name: string, - group: boolean = false + group: boolean = false, ): Promise> { return this.request({ method: "DELETE", @@ -3179,7 +3230,9 @@ export class Database { * const services = await db.listServices(false); // all services * ``` */ - listServices(excludeSystem: boolean = true): Promise { + listServices( + excludeSystem: boolean = true, + ): Promise { return this.request({ pathname: "/_api/foxx", search: { excludeSystem }, @@ -3221,7 +3274,7 @@ export class Database { async installService( mount: string, source: File | Blob | string, - options: services.InstallServiceOptions = {} + options: services.InstallServiceOptions = {}, ): Promise { const { configuration, dependencies, ...search } = options; const form = new FormData(); @@ -3233,7 +3286,7 @@ export class Database { } form.append( "source", - typeof source === "string" ? JSON.stringify(source) : source + typeof source === "string" ? JSON.stringify(source) : source, ); return await this.request({ body: form, @@ -3279,7 +3332,7 @@ export class Database { async replaceService( mount: string, source: File | Blob | string, - options: services.ReplaceServiceOptions = {} + options: services.ReplaceServiceOptions = {}, ): Promise { const { configuration, dependencies, ...search } = options; const form = new FormData(); @@ -3291,7 +3344,7 @@ export class Database { } form.append( "source", - typeof source === "string" ? JSON.stringify(source) : source + typeof source === "string" ? JSON.stringify(source) : source, ); return await this.request({ body: form, @@ -3337,7 +3390,7 @@ export class Database { async upgradeService( mount: string, source: File | Blob | string, - options: services.UpgradeServiceOptions = {} + options: services.UpgradeServiceOptions = {}, ): Promise { const { configuration, dependencies, ...search } = options; const form = new FormData(); @@ -3349,7 +3402,7 @@ export class Database { } form.append( "source", - typeof source === "string" ? JSON.stringify(source) : source + typeof source === "string" ? JSON.stringify(source) : source, ); return await this.request({ body: form, @@ -3373,7 +3426,7 @@ export class Database { */ uninstallService( mount: string, - options?: services.UninstallServiceOptions + options?: services.UninstallServiceOptions, ): Promise { return this.request( { @@ -3381,7 +3434,7 @@ export class Database { pathname: "/_api/foxx/service", search: { ...options, mount }, }, - () => undefined + () => undefined, ); } @@ -3427,7 +3480,7 @@ export class Database { */ getServiceConfiguration( mount: string, - minimal?: false + minimal?: false, ): Promise>; /** * Retrieves information about the service's configuration options and their @@ -3452,7 +3505,7 @@ export class Database { */ getServiceConfiguration( mount: string, - minimal: true + minimal: true, ): Promise>; getServiceConfiguration(mount: string, minimal: boolean = false) { return this.request({ @@ -3488,8 +3541,10 @@ export class Database { replaceServiceConfiguration( mount: string, cfg: Record, - minimal?: false - ): Promise>; + minimal?: false, + ): Promise< + Record + >; /** * Replaces the configuration of the given service, discarding any existing * values for options not specified. @@ -3517,7 +3572,7 @@ export class Database { replaceServiceConfiguration( mount: string, cfg: Record, - minimal: true + minimal: true, ): Promise<{ values: Record; warnings: Record; @@ -3525,7 +3580,7 @@ export class Database { replaceServiceConfiguration( mount: string, cfg: Record, - minimal: boolean = false + minimal: boolean = false, ) { return this.request({ method: "PUT", @@ -3562,8 +3617,10 @@ export class Database { updateServiceConfiguration( mount: string, cfg: Record, - minimal?: false - ): Promise>; + minimal?: false, + ): Promise< + Record + >; /** * Updates the configuration of the given service while maintaining any * existing values for options not specified. @@ -3591,7 +3648,7 @@ export class Database { updateServiceConfiguration( mount: string, cfg: Record, - minimal: true + minimal: true, ): Promise<{ values: Record; warnings: Record; @@ -3599,7 +3656,7 @@ export class Database { updateServiceConfiguration( mount: string, cfg: Record, - minimal: boolean = false + minimal: boolean = false, ) { return this.request({ method: "PATCH", @@ -3632,8 +3689,13 @@ export class Database { */ getServiceDependencies( mount: string, - minimal?: false - ): Promise>; + minimal?: false, + ): Promise< + Record< + string, + services.SingleServiceDependency | services.MultiServiceDependency + > + >; /** * Retrieves information about the service's dependencies and their current * mount points. @@ -3657,7 +3719,7 @@ export class Database { */ getServiceDependencies( mount: string, - minimal: true + minimal: true, ): Promise>; getServiceDependencies(mount: string, minimal: boolean = false) { return this.request({ @@ -3693,11 +3755,13 @@ export class Database { replaceServiceDependencies( mount: string, deps: Record, - minimal?: false + minimal?: false, ): Promise< Record< string, - (services.SingleServiceDependency | services.MultiServiceDependency) & { warning?: string } + (services.SingleServiceDependency | services.MultiServiceDependency) & { + warning?: string; + } > >; /** @@ -3731,7 +3795,7 @@ export class Database { replaceServiceDependencies( mount: string, deps: Record, - minimal: true + minimal: true, ): Promise<{ values: Record; warnings: Record; @@ -3739,7 +3803,7 @@ export class Database { replaceServiceDependencies( mount: string, deps: Record, - minimal: boolean = false + minimal: boolean = false, ) { return this.request({ method: "PUT", @@ -3776,11 +3840,13 @@ export class Database { updateServiceDependencies( mount: string, deps: Record, - minimal?: false + minimal?: false, ): Promise< Record< string, - (services.SingleServiceDependency | services.MultiServiceDependency) & { warning?: string } + (services.SingleServiceDependency | services.MultiServiceDependency) & { + warning?: string; + } > >; /** @@ -3814,7 +3880,7 @@ export class Database { updateServiceDependencies( mount: string, deps: Record, - minimal: true + minimal: true, ): Promise<{ values: Record; warnings: Record; @@ -3822,7 +3888,7 @@ export class Database { updateServiceDependencies( mount: string, deps: Record, - minimal: boolean = false + minimal: boolean = false, ) { return this.request({ method: "PATCH", @@ -3849,7 +3915,7 @@ export class Database { */ setServiceDevelopmentMode( mount: string, - enabled: boolean = true + enabled: boolean = true, ): Promise { return this.request({ method: enabled ? "POST" : "DELETE", @@ -3940,7 +4006,7 @@ export class Database { * executed. */ filter?: string; - } + }, ): Promise; /** * Runs the tests of a given service and returns the results using the @@ -3972,7 +4038,7 @@ export class Database { * executed. */ filter?: string; - } + }, ): Promise; /** * Runs the tests of a given service and returns the results using the @@ -4005,7 +4071,7 @@ export class Database { * executed. */ filter?: string; - } + }, ): Promise; /** * Runs the tests of a given service and returns the results using the @@ -4038,7 +4104,7 @@ export class Database { * executed. */ filter?: string; - } + }, ): Promise; /** * Runs the tests of a given service and returns the results using the @@ -4071,7 +4137,7 @@ export class Database { * executed. */ filter?: string; - } + }, ): Promise; /** * Runs the tests of a given service and returns the results as a string @@ -4105,7 +4171,7 @@ export class Database { * executed. */ filter?: string; - } + }, ): Promise; /** * Runs the tests of a given service and returns the results as a string @@ -4139,7 +4205,7 @@ export class Database { * executed. */ filter?: string; - } + }, ): Promise; /** * Runs the tests of a given service and returns the results as a string @@ -4173,7 +4239,7 @@ export class Database { * executed. */ filter?: string; - } + }, ): Promise; runServiceTests( mount: string, @@ -4181,7 +4247,7 @@ export class Database { reporter?: string; idiomatic?: boolean; filter?: string; - } + }, ) { return this.request({ method: "POST", @@ -4284,7 +4350,7 @@ export class Database { pathname: "/_api/foxx/commit", search: { replace }, }, - () => undefined + () => undefined, ); } //#endregion @@ -4304,7 +4370,7 @@ export class Database { * ``` */ createHotBackup( - options: hotBackups.HotBackupOptions = {} + options: hotBackups.HotBackupOptions = {}, ): Promise { return this.request( { @@ -4312,7 +4378,7 @@ export class Database { pathname: "/_admin/backup/create", body: options, }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -4338,7 +4404,7 @@ export class Database { pathname: "/_admin/backup/list", body: id ? { id } : undefined, }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -4362,7 +4428,7 @@ export class Database { pathname: "/_admin/backup/restore", body: { id }, }, - (res) => res.parsedBody.result.previous + (res) => res.parsedBody.result.previous, ); } @@ -4384,7 +4450,7 @@ export class Database { pathname: "/_admin/backup/delete", body: { id }, }, - () => undefined + () => undefined, ); } //#endregion @@ -4410,7 +4476,7 @@ export class Database { pathname: "/_admin/log/entries", search: options, }, - (res) => res.parsedBody + (res) => res.parsedBody, ); } @@ -4431,14 +4497,14 @@ export class Database { * ``` */ listLogMessages( - options?: logs.LogEntriesOptions + options?: logs.LogEntriesOptions, ): Promise { return this.request( { pathname: "/_admin/log", search: options, }, - (res) => res.parsedBody.messages + (res) => res.parsedBody.messages, ); } @@ -4471,7 +4537,7 @@ export class Database { * ``` */ setLogLevel( - levels: Record + levels: Record, ): Promise> { return this.request({ method: "PUT", @@ -4504,9 +4570,11 @@ export class Database { * ``` */ async createJob(callback: () => Promise): Promise> { - const trap = new Promise>((resolveTrap) => { - this._trapRequest = (trapped) => resolveTrap(trapped); - }); + const trap = new Promise>( + (resolveTrap) => { + this._trapRequest = (trapped) => resolveTrap(trapped); + }, + ); const eventualResult = callback(); const trapped = await trap; if (trapped.error) return eventualResult as Promise; @@ -4521,7 +4589,7 @@ export class Database { (e) => { onReject(e); return eventualResult; - } + }, ); } @@ -4555,7 +4623,7 @@ export class Database { { pathname: "/_api/job/pending", }, - (res) => res.parsedBody + (res) => res.parsedBody, ); } @@ -4574,7 +4642,7 @@ export class Database { { pathname: "/_api/job/done", }, - (res) => res.parsedBody + (res) => res.parsedBody, ); } @@ -4599,7 +4667,7 @@ export class Database { pathname: `/_api/job/expired`, search: { stamp: threshold / 1000 }, }, - () => undefined + () => undefined, ); } @@ -4612,7 +4680,7 @@ export class Database { method: "DELETE", pathname: `/_api/job/all`, }, - () => undefined + () => undefined, ); } //#endregion diff --git a/src/documents.ts b/src/documents.ts index ca9acfaa4..4f396e8d3 100644 --- a/src/documents.ts +++ b/src/documents.ts @@ -542,7 +542,10 @@ export type DocumentEdgesResult = any> = { * * See {@link DocumentMetadata}. */ -export type DocumentSelector = ObjectWithDocumentId | ObjectWithDocumentKey | string; +export type DocumentSelector = + | ObjectWithDocumentId + | ObjectWithDocumentKey + | string; /** * An object with an ArangoDB document `_id` property. @@ -570,7 +573,7 @@ export type ObjectWithDocumentKey = { export function _documentHandle( selector: DocumentSelector, collectionName: string, - strict: boolean = true + strict: boolean = true, ): string { if (typeof selector !== "string") { if (selector._id) { @@ -580,18 +583,18 @@ export function _documentHandle( return _documentHandle(selector._key, collectionName); } throw new Error( - "Document handle must be a string or an object with a _key or _id attribute" + "Document handle must be a string or an object with a _key or _id attribute", ); } if (selector.includes("/")) { const [head] = selector.split("/"); if (strict && head !== collectionName) { throw new Error( - `Document ID "${selector}" does not match collection name "${collectionName}"` + `Document ID "${selector}" does not match collection name "${collectionName}"`, ); } return selector; } return `${collectionName}/${selector}`; } -//#endregion \ No newline at end of file +//#endregion diff --git a/src/errors.ts b/src/errors.ts index 2b38982f6..eae50925c 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -39,9 +39,9 @@ export function isSystemError(err: any): err is SystemError { if (!err || !(err instanceof Error)) return false; if (Object.getPrototypeOf(err) !== Error.prototype) return false; const error = err as SystemError; - if (typeof error.code !== 'string') return false; - if (typeof error.syscall !== 'string') return false; - return typeof error.errno === 'number' || typeof error.errno === 'string'; + if (typeof error.code !== "string") return false; + if (typeof error.syscall !== "string") return false; + return typeof error.errno === "number" || typeof error.errno === "string"; } /** @@ -52,8 +52,8 @@ export function isSystemError(err: any): err is SystemError { export function isUndiciError(err: any): err is UndiciError { if (!err || !(err instanceof Error)) return false; const error = err as UndiciError; - if (typeof error.code !== 'string') return false; - return error.code.startsWith('UND_'); + if (typeof error.code !== "string") return false; + return error.code.startsWith("UND_"); } /** @@ -67,10 +67,14 @@ function isSafeToRetryFailedFetch(error?: Error): boolean | null { if (isArangoError(cause) || isNetworkError(cause)) { return cause.isSafeToRetry; } - if (isSystemError(cause) && cause.syscall === 'connect' && cause.code === 'ECONNREFUSED') { + if ( + isSystemError(cause) && + cause.syscall === "connect" && + cause.code === "ECONNREFUSED" + ) { return true; } - if (isUndiciError(cause) && cause.code === 'UND_ERR_CONNECT_TIMEOUT') { + if (isUndiciError(cause) && cause.code === "UND_ERR_CONNECT_TIMEOUT") { return true; } return isSafeToRetryFailedFetch(cause); @@ -104,7 +108,7 @@ export class PropagationTimeoutError extends Error { name = "PropagationTimeoutError"; constructor(message?: string, options: { cause?: Error } = {}) { - super(message ?? 'Timed out while waiting for propagation', options); + super(message ?? "Timed out while waiting for propagation", options); } } @@ -124,7 +128,11 @@ export class NetworkError extends Error { */ request: globalThis.Request; - constructor(message: string, request: globalThis.Request, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { + constructor( + message: string, + request: globalThis.Request, + options: { cause?: Error; isSafeToRetry?: boolean | null } = {}, + ) { const { isSafeToRetry = null, ...opts } = options; super(message, opts); this.request = request; @@ -147,8 +155,16 @@ export class NetworkError extends Error { export class ResponseTimeoutError extends NetworkError { name = "ResponseTimeoutError"; - constructor(message: string | undefined, request: globalThis.Request, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { - super(message ?? 'Timed out while waiting for server response', request, options); + constructor( + message: string | undefined, + request: globalThis.Request, + options: { cause?: Error; isSafeToRetry?: boolean | null } = {}, + ) { + super( + message ?? "Timed out while waiting for server response", + request, + options, + ); } } @@ -158,8 +174,12 @@ export class ResponseTimeoutError extends NetworkError { export class RequestAbortedError extends NetworkError { name = "RequestAbortedError"; - constructor(message: string | undefined, request: globalThis.Request, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { - super(message ?? 'Request aborted', request, options); + constructor( + message: string | undefined, + request: globalThis.Request, + options: { cause?: Error; isSafeToRetry?: boolean | null } = {}, + ) { + super(message ?? "Request aborted", request, options); } } @@ -171,12 +191,17 @@ export class RequestAbortedError extends NetworkError { export class FetchFailedError extends NetworkError { name = "FetchFailedError"; - constructor(message: string | undefined, request: globalThis.Request, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { - let isSafeToRetry = options.isSafeToRetry ?? isSafeToRetryFailedFetch(options.cause); + constructor( + message: string | undefined, + request: globalThis.Request, + options: { cause?: Error; isSafeToRetry?: boolean | null } = {}, + ) { + let isSafeToRetry = + options.isSafeToRetry ?? isSafeToRetryFailedFetch(options.cause); if (options.cause?.cause instanceof Error && options.cause.cause.message) { message = `Fetch failed: ${options.cause.cause.message}`; } - super(message ?? 'Fetch failed', request, { ...options, isSafeToRetry }); + super(message ?? "Fetch failed", request, { ...options, isSafeToRetry }); } } @@ -199,7 +224,10 @@ export class HttpError extends NetworkError { /** * @internal */ - constructor(response: connection.ProcessedResponse, options: { cause?: Error, isSafeToRetry?: boolean | null } = {}) { + constructor( + response: connection.ProcessedResponse, + options: { cause?: Error; isSafeToRetry?: boolean | null } = {}, + ) { super(connection.getStatusMessage(response), response.request, options); this.response = response; this.code = response.status; @@ -262,9 +290,11 @@ export class ArangoError extends Error { * * Creates a new `ArangoError` from a response object. */ - static from(response: connection.ProcessedResponse): ArangoError { + static from( + response: connection.ProcessedResponse, + ): ArangoError { return new ArangoError(response.parsedBody!, { - cause: new HttpError(response) + cause: new HttpError(response), }); } @@ -291,7 +321,9 @@ export class ArangoError extends Error { /** * Server response object. */ - get response(): connection.ProcessedResponse | undefined { + get response(): + | connection.ProcessedResponse + | undefined { const cause = this.cause; if (cause instanceof HttpError) { return cause.response; @@ -331,4 +363,4 @@ export class ArangoError extends Error { toString() { return `${this.name} ${this.errorNum}: ${this.message}`; } -} \ No newline at end of file +} diff --git a/src/foxx-manifest.ts b/src/foxx-manifest.ts index f11a40d35..76a91afb4 100644 --- a/src/foxx-manifest.ts +++ b/src/foxx-manifest.ts @@ -103,14 +103,14 @@ export type Configuration = { * The type of value expected for this option. */ type: - | "integer" - | "boolean" - | "number" - | "string" - | "json" - | "password" - | "int" - | "bool"; + | "integer" + | "boolean" + | "number" + | "string" + | "json" + | "password" + | "int" + | "bool"; /** * The default value for this option in plain JSON. Can be omitted to provide no default value. */ diff --git a/src/graphs.ts b/src/graphs.ts index 8ae1700ab..f82eb9373 100644 --- a/src/graphs.ts +++ b/src/graphs.ts @@ -31,7 +31,9 @@ function mungeGharialResponse(body: any, prop: "vertex" | "edge" | "removed") { */ function coerceEdgeDefinition(options: EdgeDefinitionOptions): EdgeDefinition { const edgeDefinition = {} as EdgeDefinition; - edgeDefinition.collection = collections.collectionToString(options.collection); + edgeDefinition.collection = collections.collectionToString( + options.collection, + ); edgeDefinition.from = Array.isArray(options.from) ? options.from.map(collections.collectionToString) : [collections.collectionToString(options.from)]; @@ -168,11 +170,17 @@ export type EdgeDefinitionOptions = { /** * Collection or collections containing the start vertices. */ - from: (string | collections.ArangoCollection)[] | string | collections.ArangoCollection; + from: + | (string | collections.ArangoCollection)[] + | string + | collections.ArangoCollection; /** * Collection or collections containing the end vertices. */ - to: (string | collections.ArangoCollection)[] | string | collections.ArangoCollection; + to: + | (string | collections.ArangoCollection)[] + | string + | collections.ArangoCollection; }; //#endregion @@ -290,7 +298,10 @@ export type CreateGraphOptions = { * Additional vertex collections. Documents within these collections do not * have edges within this graph. */ - orphanCollections?: (string | collections.ArangoCollection)[] | string | collections.ArangoCollection; + orphanCollections?: + | (string | collections.ArangoCollection)[] + | string + | collections.ArangoCollection; /** * (Cluster only.) Number of shards that is used for every collection @@ -386,12 +397,15 @@ export type ReplaceEdgeDefinitionOptions = { export class GraphVertexCollection< EntryResultType extends Record = any, EntryInputType extends Record = EntryResultType, -> - implements collections.ArangoCollection { +> implements collections.ArangoCollection +{ protected _db: databases.Database; protected _name: string; protected _graph: Graph; - protected _collection: collections.DocumentCollection; + protected _collection: collections.DocumentCollection< + EntryResultType, + EntryInputType + >; /** * @internal @@ -466,10 +480,10 @@ export class GraphVertexCollection< { method: "HEAD", pathname: `/_api/gharial/${encodeURIComponent( - this.graph.name + this.graph.name, )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, }, - () => true + () => true, ); } catch (err: any) { if (err.code === 404) { @@ -515,7 +529,7 @@ export class GraphVertexCollection< */ async vertex( selector: documents.DocumentSelector, - options?: ReadGraphDocumentOptions + options?: ReadGraphDocumentOptions, ): Promise>; /** * Retrieves the vertex matching the given key or id. @@ -554,11 +568,11 @@ export class GraphVertexCollection< */ async vertex( selector: documents.DocumentSelector, - graceful: boolean + graceful: boolean, ): Promise>; async vertex( selector: documents.DocumentSelector, - options: boolean | ReadGraphDocumentOptions = {} + options: boolean | ReadGraphDocumentOptions = {}, ): Promise | null> { if (typeof options === "boolean") { options = { graceful: options }; @@ -574,13 +588,13 @@ export class GraphVertexCollection< const result = this._db.request( { pathname: `/_api/gharial/${encodeURIComponent( - this.graph.name + this.graph.name, )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, headers, search, allowDirtyRead, }, - (res) => res.parsedBody.vertex + (res) => res.parsedBody.vertex, ); if (!graceful) return result; try { @@ -612,19 +626,24 @@ export class GraphVertexCollection< */ save( data: documents.DocumentData, - options?: InsertGraphDocumentOptions - ): Promise }>; - save(data: documents.DocumentData, options?: InsertGraphDocumentOptions) { + options?: InsertGraphDocumentOptions, + ): Promise< + documents.DocumentMetadata & { new?: documents.Document } + >; + save( + data: documents.DocumentData, + options?: InsertGraphDocumentOptions, + ) { return this._db.request( { method: "POST", pathname: `/_api/gharial/${encodeURIComponent( - this.graph.name + this.graph.name, )}/vertex/${encodeURIComponent(this._name)}`, body: data, search: options, }, - (res) => mungeGharialResponse(res.parsedBody, "vertex") + (res) => mungeGharialResponse(res.parsedBody, "vertex"), ); } @@ -655,12 +674,17 @@ export class GraphVertexCollection< replace( selector: documents.DocumentSelector, newValue: documents.DocumentData, - options?: ReplaceGraphDocumentOptions - ): Promise; old?: documents.Document }>; + options?: ReplaceGraphDocumentOptions, + ): Promise< + documents.DocumentMetadata & { + new?: documents.Document; + old?: documents.Document; + } + >; replace( selector: documents.DocumentSelector, newValue: documents.DocumentData, - options: ReplaceGraphDocumentOptions = {} + options: ReplaceGraphDocumentOptions = {}, ) { if (typeof options === "string") { options = { rev: options }; @@ -672,13 +696,13 @@ export class GraphVertexCollection< { method: "PUT", pathname: `/_api/gharial/${encodeURIComponent( - this.graph.name + this.graph.name, )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, body: newValue, search, headers, }, - (res) => mungeGharialResponse(res.parsedBody, "vertex") + (res) => mungeGharialResponse(res.parsedBody, "vertex"), ); } @@ -709,12 +733,17 @@ export class GraphVertexCollection< update( selector: documents.DocumentSelector, newValue: documents.Patch>, - options?: ReplaceGraphDocumentOptions - ): Promise; old?: documents.Document }>; + options?: ReplaceGraphDocumentOptions, + ): Promise< + documents.DocumentMetadata & { + new?: documents.Document; + old?: documents.Document; + } + >; update( selector: documents.DocumentSelector, newValue: documents.Patch>, - options: ReplaceGraphDocumentOptions = {} + options: ReplaceGraphDocumentOptions = {}, ) { if (typeof options === "string") { options = { rev: options }; @@ -726,13 +755,13 @@ export class GraphVertexCollection< { method: "PATCH", pathname: `/_api/gharial/${encodeURIComponent( - this.graph.name + this.graph.name, )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, body: newValue, search, headers, }, - (res) => mungeGharialResponse(res.parsedBody, "vertex") + (res) => mungeGharialResponse(res.parsedBody, "vertex"), ); } @@ -765,11 +794,13 @@ export class GraphVertexCollection< */ remove( selector: documents.DocumentSelector, - options?: RemoveGraphDocumentOptions - ): Promise }>; + options?: RemoveGraphDocumentOptions, + ): Promise< + documents.DocumentMetadata & { old?: documents.Document } + >; remove( selector: documents.DocumentSelector, - options: RemoveGraphDocumentOptions = {} + options: RemoveGraphDocumentOptions = {}, ) { if (typeof options === "string") { options = { rev: options }; @@ -781,12 +812,12 @@ export class GraphVertexCollection< { method: "DELETE", pathname: `/_api/gharial/${encodeURIComponent( - this.graph.name + this.graph.name, )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, search, headers, }, - (res) => mungeGharialResponse(res.parsedBody, "removed") + (res) => mungeGharialResponse(res.parsedBody, "removed"), ); } } @@ -804,12 +835,15 @@ export class GraphVertexCollection< export class GraphEdgeCollection< EntryResultType extends Record = any, EntryInputType extends Record = EntryResultType, -> - implements collections.ArangoCollection { +> implements collections.ArangoCollection +{ protected _db: databases.Database; protected _name: string; protected _graph: Graph; - protected _collection: collections.EdgeCollection; + protected _collection: collections.EdgeCollection< + EntryResultType, + EntryInputType + >; /** * @internal @@ -884,10 +918,10 @@ export class GraphEdgeCollection< { method: "HEAD", pathname: `/_api/gharial/${encodeURIComponent( - this.graph.name + this.graph.name, )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, }, - () => true + () => true, ); } catch (err: any) { if (err.code === 404) { @@ -933,7 +967,7 @@ export class GraphEdgeCollection< */ async edge( selector: documents.DocumentSelector, - options?: ReadGraphDocumentOptions + options?: ReadGraphDocumentOptions, ): Promise>; /** * Retrieves the edge matching the given key or id. @@ -970,10 +1004,13 @@ export class GraphEdgeCollection< * } * ``` */ - async edge(selector: documents.DocumentSelector, graceful: boolean): Promise>; async edge( selector: documents.DocumentSelector, - options: boolean | ReadGraphDocumentOptions = {} + graceful: boolean, + ): Promise>; + async edge( + selector: documents.DocumentSelector, + options: boolean | ReadGraphDocumentOptions = {}, ): Promise | null> { if (typeof options === "boolean") { options = { graceful: options }; @@ -989,12 +1026,12 @@ export class GraphEdgeCollection< const result = this._db.request( { pathname: `/_api/gharial/${encodeURIComponent( - this.graph.name + this.graph.name, )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, search, allowDirtyRead, }, - (res) => res.parsedBody.edge + (res) => res.parsedBody.edge, ); if (!graceful) return result; try { @@ -1025,19 +1062,24 @@ export class GraphEdgeCollection< */ save( data: documents.EdgeData, - options?: InsertGraphDocumentOptions - ): Promise }>; - save(data: documents.EdgeData, options?: InsertGraphDocumentOptions) { + options?: InsertGraphDocumentOptions, + ): Promise< + documents.DocumentMetadata & { new?: documents.Edge } + >; + save( + data: documents.EdgeData, + options?: InsertGraphDocumentOptions, + ) { return this._db.request( { method: "POST", pathname: `/_api/gharial/${encodeURIComponent( - this.graph.name + this.graph.name, )}/edge/${encodeURIComponent(this._name)}`, body: data, search: options, }, - (res) => mungeGharialResponse(res.parsedBody, "edge") + (res) => mungeGharialResponse(res.parsedBody, "edge"), ); } @@ -1076,12 +1118,17 @@ export class GraphEdgeCollection< replace( selector: documents.DocumentSelector, newValue: documents.EdgeData, - options?: ReplaceGraphDocumentOptions - ): Promise; old?: documents.Edge }>; + options?: ReplaceGraphDocumentOptions, + ): Promise< + documents.DocumentMetadata & { + new?: documents.Edge; + old?: documents.Edge; + } + >; replace( selector: documents.DocumentSelector, newValue: documents.EdgeData, - options: ReplaceGraphDocumentOptions = {} + options: ReplaceGraphDocumentOptions = {}, ) { if (typeof options === "string") { options = { rev: options }; @@ -1093,13 +1140,13 @@ export class GraphEdgeCollection< { method: "PUT", pathname: `/_api/gharial/${encodeURIComponent( - this.graph.name + this.graph.name, )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, body: newValue, search, headers, }, - (res) => mungeGharialResponse(res.parsedBody, "edge") + (res) => mungeGharialResponse(res.parsedBody, "edge"), ); } @@ -1138,12 +1185,17 @@ export class GraphEdgeCollection< update( selector: documents.DocumentSelector, newValue: documents.Patch>, - options?: ReplaceGraphDocumentOptions - ): Promise; old?: documents.Edge }>; + options?: ReplaceGraphDocumentOptions, + ): Promise< + documents.DocumentMetadata & { + new?: documents.Edge; + old?: documents.Edge; + } + >; update( selector: documents.DocumentSelector, newValue: documents.Patch>, - options: ReplaceGraphDocumentOptions = {} + options: ReplaceGraphDocumentOptions = {}, ) { if (typeof options === "string") { options = { rev: options }; @@ -1155,13 +1207,13 @@ export class GraphEdgeCollection< { method: "PATCH", pathname: `/_api/gharial/${encodeURIComponent( - this.graph.name + this.graph.name, )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, body: newValue, search, headers, }, - (res) => mungeGharialResponse(res.parsedBody, "edge") + (res) => mungeGharialResponse(res.parsedBody, "edge"), ); } @@ -1186,11 +1238,13 @@ export class GraphEdgeCollection< */ remove( selector: documents.DocumentSelector, - options?: RemoveGraphDocumentOptions - ): Promise }>; + options?: RemoveGraphDocumentOptions, + ): Promise< + documents.DocumentMetadata & { old?: documents.Edge } + >; remove( selector: documents.DocumentSelector, - options: RemoveGraphDocumentOptions = {} + options: RemoveGraphDocumentOptions = {}, ) { if (typeof options === "string") { options = { rev: options }; @@ -1202,12 +1256,12 @@ export class GraphEdgeCollection< { method: "DELETE", pathname: `/_api/gharial/${encodeURIComponent( - this.graph.name + this.graph.name, )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, search, headers, }, - (res) => mungeGharialResponse(res.parsedBody, "removed") + (res) => mungeGharialResponse(res.parsedBody, "removed"), ); } } @@ -1297,7 +1351,7 @@ export class Graph { get(): Promise { return this._db.request( { pathname: `/_api/gharial/${encodeURIComponent(this._name)}` }, - (res) => res.parsedBody.graph + (res) => res.parsedBody.graph, ); } @@ -1324,7 +1378,7 @@ export class Graph { */ create( edgeDefinitions: EdgeDefinitionOptions[], - options: CreateGraphOptions = {} + options: CreateGraphOptions = {}, ): Promise { const { orphanCollections, satellites, waitForSync, isSmart, ...opts } = options; @@ -1341,11 +1395,14 @@ export class Graph { edgeDefinitions: edgeDefinitions.map(coerceEdgeDefinition), isSmart, name: this._name, - options: { ...opts, satellites: satellites?.map(collections.collectionToString) }, + options: { + ...opts, + satellites: satellites?.map(collections.collectionToString), + }, }, search: { waitForSync }, }, - (res) => res.parsedBody.graph + (res) => res.parsedBody.graph, ); } @@ -1370,7 +1427,7 @@ export class Graph { pathname: `/_api/gharial/${encodeURIComponent(this._name)}`, search: { dropCollections }, }, - (res) => res.parsedBody.removed + (res) => res.parsedBody.removed, ); } @@ -1382,12 +1439,12 @@ export class Graph { * @param collection - Name of the vertex collection. */ vertexCollection = any>( - collection: string | collections.ArangoCollection + collection: string | collections.ArangoCollection, ): GraphVertexCollection { return new GraphVertexCollection( this._db, collections.collectionToString(collection), - this + this, ); } @@ -1415,7 +1472,7 @@ export class Graph { listVertexCollections(): Promise { return this._db.request( { pathname: `/_api/gharial/${encodeURIComponent(this._name)}/vertex` }, - (res) => res.parsedBody.collections + (res) => res.parsedBody.collections, ); } @@ -1467,7 +1524,7 @@ export class Graph { */ addVertexCollection( collection: string | collections.ArangoCollection, - options: AddVertexCollectionOptions = {} + options: AddVertexCollectionOptions = {}, ): Promise { const { satellites, ...opts } = options; return this._db.request( @@ -1476,10 +1533,13 @@ export class Graph { pathname: `/_api/gharial/${encodeURIComponent(this._name)}/vertex`, body: { collection: collections.collectionToString(collection), - options: { ...opts, satellites: satellites?.map(collections.collectionToString) }, + options: { + ...opts, + satellites: satellites?.map(collections.collectionToString), + }, }, }, - (res) => res.parsedBody.graph + (res) => res.parsedBody.graph, ); } @@ -1507,19 +1567,19 @@ export class Graph { */ removeVertexCollection( collection: string | collections.ArangoCollection, - dropCollection: boolean = false + dropCollection: boolean = false, ): Promise { return this._db.request( { method: "DELETE", pathname: `/_api/gharial/${encodeURIComponent( - this._name + this._name, )}/vertex/${encodeURIComponent(collections.collectionToString(collection))}`, search: { dropCollection, }, }, - (res) => res.parsedBody.graph + (res) => res.parsedBody.graph, ); } @@ -1547,12 +1607,12 @@ export class Graph { * ``` */ edgeCollection = any>( - collection: string | collections.ArangoCollection + collection: string | collections.ArangoCollection, ): GraphEdgeCollection { return new GraphEdgeCollection( this._db, collections.collectionToString(collection), - this + this, ); } @@ -1580,7 +1640,7 @@ export class Graph { listEdgeCollections(): Promise { return this._db.request( { pathname: `/_api/gharial/${encodeURIComponent(this._name)}/edge` }, - (res) => res.parsedBody.collections + (res) => res.parsedBody.collections, ); } @@ -1632,7 +1692,7 @@ export class Graph { */ addEdgeDefinition( edgeDefinition: EdgeDefinitionOptions, - options: AddEdgeDefinitionOptions = {} + options: AddEdgeDefinitionOptions = {}, ): Promise { const { satellites, ...opts } = options; return this._db.request( @@ -1641,10 +1701,13 @@ export class Graph { pathname: `/_api/gharial/${encodeURIComponent(this._name)}/edge`, body: { ...coerceEdgeDefinition(edgeDefinition), - options: { ...opts, satellites: satellites?.map(collections.collectionToString) }, + options: { + ...opts, + satellites: satellites?.map(collections.collectionToString), + }, }, }, - (res) => res.parsedBody.graph + (res) => res.parsedBody.graph, ); } @@ -1675,7 +1738,7 @@ export class Graph { */ replaceEdgeDefinition( edgeDefinition: EdgeDefinitionOptions, - options?: ReplaceEdgeDefinitionOptions + options?: ReplaceEdgeDefinitionOptions, ): Promise; /** * Replaces an edge definition in this graph. The existing edge definition @@ -1706,7 +1769,7 @@ export class Graph { replaceEdgeDefinition( collection: string | collections.ArangoCollection, edgeDefinition: EdgeDefinitionOptions, - options?: ReplaceEdgeDefinitionOptions + options?: ReplaceEdgeDefinitionOptions, ): Promise; replaceEdgeDefinition( collectionOrEdgeDefinitionOptions: @@ -1716,7 +1779,7 @@ export class Graph { edgeDefinitionOrOptions?: | EdgeDefinitionOptions | ReplaceEdgeDefinitionOptions, - options: ReplaceEdgeDefinitionOptions = {} + options: ReplaceEdgeDefinitionOptions = {}, ) { let collection = collectionOrEdgeDefinitionOptions as | string @@ -1739,14 +1802,17 @@ export class Graph { { method: "PUT", pathname: `/_api/gharial/${encodeURIComponent( - this._name + this._name, )}/edge/${encodeURIComponent(collections.collectionToString(collection))}`, body: { ...coerceEdgeDefinition(edgeDefinition), - options: { ...opts, satellites: satellites?.map(collections.collectionToString) }, + options: { + ...opts, + satellites: satellites?.map(collections.collectionToString), + }, }, }, - (res) => res.parsedBody.graph + (res) => res.parsedBody.graph, ); } @@ -1774,20 +1840,20 @@ export class Graph { */ removeEdgeDefinition( collection: string | collections.ArangoCollection, - dropCollection: boolean = false + dropCollection: boolean = false, ): Promise { return this._db.request( { method: "DELETE", pathname: `/_api/gharial/${encodeURIComponent( - this._name + this._name, )}/edge/${encodeURIComponent(collections.collectionToString(collection))}`, search: { dropCollection, }, }, - (res) => res.parsedBody.graph + (res) => res.parsedBody.graph, ); } } -//#endregion \ No newline at end of file +//#endregion diff --git a/src/index.ts b/src/index.ts index e29f74c42..af5e3c1af 100644 --- a/src/index.ts +++ b/src/index.ts @@ -34,7 +34,9 @@ if (typeof module !== "undefined" && typeof exports !== "undefined") { * }); * ``` */ -export function arangojs(config?: configuration.ConfigOptions): databases.Database; +export function arangojs( + config?: configuration.ConfigOptions, +): databases.Database; /** * Creates a new `Database` instance with its own connection pool. * @@ -49,8 +51,14 @@ export function arangojs(config?: configuration.ConfigOptions): databases.Databa * db.useBasicAuth("admin", "hunter2"); * ``` */ -export function arangojs(url: string | string[], name?: string): databases.Database; -export function arangojs(config?: string | string[] | configuration.ConfigOptions, name?: string) { +export function arangojs( + url: string | string[], + name?: string, +): databases.Database; +export function arangojs( + config?: string | string[] | configuration.ConfigOptions, + name?: string, +) { if (typeof config === "string" || Array.isArray(config)) { const url = config; return new databases.Database(url, name); diff --git a/src/indexes.ts b/src/indexes.ts index 2d63f6557..e360ee000 100644 --- a/src/indexes.ts +++ b/src/indexes.ts @@ -66,7 +66,7 @@ export type EnsureIndexOptions = type EnsureIndexOptionsType< Type extends IndexType, Fields extends any[], - Extra extends {} = {} + Extra extends {} = {}, > = { /** * A unique name for this index. @@ -93,7 +93,8 @@ type EnsureIndexOptionsType< * Options for creating a persistent index. */ export type EnsurePersistentIndexOptions = EnsureIndexOptionsType< - "persistent", string[], + "persistent", + string[], { /** * If set to `true`, a unique index will be created. @@ -142,7 +143,8 @@ export type EnsurePersistentIndexOptions = EnsureIndexOptionsType< * Options for creating a geo index. */ export type EnsureGeoIndexOptions = EnsureIndexOptionsType< - "geo", [string, string] | [string], + "geo", + [string, string] | [string], { /** * If set to `true`, `fields` must be an array containing a single attribute @@ -172,7 +174,8 @@ export type EnsureGeoIndexOptions = EnsureIndexOptionsType< * Options for creating a TTL index. */ export type EnsureTtlIndexOptions = EnsureIndexOptionsType< - "ttl", [string], + "ttl", + [string], { /** * Duration in seconds after the attribute value at which the document will @@ -186,7 +189,8 @@ export type EnsureTtlIndexOptions = EnsureIndexOptionsType< * Options for creating a MDI index. */ export type EnsureMdiIndexOptions = EnsureIndexOptionsType< - "mdi", string[], + "mdi", + string[], { /** * Data type of the dimension attributes. @@ -205,7 +209,8 @@ export type EnsureMdiIndexOptions = EnsureIndexOptionsType< * Options for creating an inverted index. */ export type EnsureInvertedIndexOptions = EnsureIndexOptionsType< - "inverted", (string | InvertedIndexFieldOptions)[], + "inverted", + (string | InvertedIndexFieldOptions)[], { /** * If set to `true` array values will by default be indexed using the same @@ -502,13 +507,16 @@ export type IndexDescription = /** * An object representing a system index. */ -export type SystemIndexDescription = - | PrimaryIndexDescription; +export type SystemIndexDescription = PrimaryIndexDescription; /** * Shared attributes of all index types. */ -export type IndexDescriptionType = { +export type IndexDescriptionType< + Type extends string, + Fields extends any[], + Extra extends {} = {}, +> = { /** * A unique name for this index. */ @@ -518,12 +526,12 @@ export type IndexDescriptionType { protected _id: string; protected _db: databases.Database; - protected _transformResponse?: (res: connection.ProcessedResponse) => Promise; + protected _transformResponse?: ( + res: connection.ProcessedResponse, + ) => Promise; protected _transformError?: (error: any) => Promise; protected _loaded: boolean = false; protected _result: ResultType | undefined; @@ -29,8 +31,10 @@ export class Job { constructor( db: databases.Database, id: string, - transformResponse?: (res: connection.ProcessedResponse) => Promise, - transformError?: (error: any) => Promise + transformResponse?: ( + res: connection.ProcessedResponse, + ) => Promise, + transformError?: (error: any) => Promise, ) { this._db = db; this._id = id; @@ -91,7 +95,7 @@ export class Job { method: "PUT", pathname: `/_api/job/${this._id}`, }, - false + false, ); } catch (e) { if (this._transformError) { @@ -121,7 +125,7 @@ export class Job { method: "PUT", pathname: `/_api/job/${this._id}/cancel`, }, - () => undefined + () => undefined, ); } @@ -134,7 +138,7 @@ export class Job { method: "DELETE", pathname: `/_api/job/${this._id}`, }, - () => undefined + () => undefined, ); } @@ -159,7 +163,7 @@ export class Job { { pathname: `/_api/job/${this._id}`, }, - (res) => res.status !== 204 + (res) => res.status !== 204, ); } } diff --git a/src/lib/util.ts b/src/lib/util.ts index 357215abb..3f6fa71a5 100644 --- a/src/lib/util.ts +++ b/src/lib/util.ts @@ -12,19 +12,19 @@ const THIRTY_MINUTES = 30 * 60_000; * * Helper to merge path segments. */ -export function joinPath( - ...pathList: (string | undefined)[] -): string { +export function joinPath(...pathList: (string | undefined)[]): string { if (!pathList.length) return ""; - return pathList.flatMap((path, i) => { - if (!path) return []; - if (i === pathList.length - 1) { - if (i === 0) return [path]; - return [path.replace(/^\/+/, "")]; - } - if (i === 0) return [path.replace(/\/+$/, "")]; - return [path.replace(/^\/+|\/+$/, "")]; - }).join("/"); + return pathList + .flatMap((path, i) => { + if (!path) return []; + if (i === pathList.length - 1) { + if (i === 0) return [path]; + return [path.replace(/^\/+/, "")]; + } + if (i === 0) return [path.replace(/\/+$/, "")]; + return [path.replace(/^\/+|\/+$/, "")]; + }) + .join("/"); } /** @@ -33,13 +33,24 @@ export function joinPath( * Utility function for merging headers. */ export function mergeHeaders( - ...headersList: (Headers | string[][] | Record> | undefined)[] + ...headersList: ( + | Headers + | string[][] + | Record> + | undefined + )[] ) { if (!headersList.length) return new Headers(); return new Headers([ - ...headersList.flatMap(headers => headers ? [ - ...((headers instanceof Headers || Array.isArray(headers)) ? headers : new Headers(headers)) - ] : []), + ...headersList.flatMap((headers) => + headers + ? [ + ...(headers instanceof Headers || Array.isArray(headers) + ? headers + : new Headers(headers)), + ] + : [], + ), ]); } @@ -53,7 +64,7 @@ export function normalizeUrl(url: string): string { if (raw) url = (raw[1] === "tcp" ? "http" : "https") + raw[2]; const unix = url.match(/^(?:(http|https)\+)?unix:\/\/(\/.+)/); if (unix) url = `${unix[1] || "http"}://unix:${unix[2]}`; - else if (!url.endsWith('/')) url += '/'; + else if (!url.endsWith("/")) url += "/"; return url; } diff --git a/src/lib/x3-linkedlist.ts b/src/lib/x3-linkedlist.ts index 7678bdb0e..66a1cdd7f 100644 --- a/src/lib/x3-linkedlist.ts +++ b/src/lib/x3-linkedlist.ts @@ -51,7 +51,7 @@ export class LinkedListItem { /** *Function to run on unlink() call. Usually used by LinkedList to fix first and last pointers and reduce length. */ - protected unlinkCleanup?: (item: LinkedListItem) => void + protected unlinkCleanup?: (item: LinkedListItem) => void, ) {} /** @@ -60,7 +60,7 @@ export class LinkedListItem { */ public insertBehind( /** LinkListItem to be inserted behind this one */ - item: LinkedListItem + item: LinkedListItem, ): void { item.insertBefore(this); @@ -80,7 +80,7 @@ export class LinkedListItem { */ public unlink( /** If true, additionally removes the reference to the item before and behind */ - unchain = false + unchain = false, ): void { if (this.before) this.before.behind = this.behind; @@ -105,7 +105,7 @@ export class LinkedListItem { */ protected insertBefore( /** LinkListItem to be inserted before this one */ - before: LinkedListItem + before: LinkedListItem, ): void { this.before = before; if (!this.unlinkCleanup) { @@ -137,7 +137,7 @@ export class LinkedList { constructor( /** Values to be added initially into list */ - values?: Iterable | LinkedList + values?: Iterable | LinkedList, ) { if (values) { if (values instanceof LinkedList) values = values.values(); @@ -160,7 +160,7 @@ export class LinkedList { */ public clear( /** If `true`, remove link info from every item. Changes complexity to O(n)! */ - unchain = false + unchain = false, ): void { if (unchain) { while (this.first) { @@ -181,7 +181,7 @@ export class LinkedList { /** Runs for every item in the LinkedList */ callback: (value: T, item: LinkedListItem, list: this) => boolean, /** If given, callback function will be bound to thisArg */ - thisArg?: C + thisArg?: C, ): boolean { if (thisArg) { callback = callback.bind(thisArg); @@ -203,7 +203,7 @@ export class LinkedList { /** decides wether given element should be part of new LinkedList */ callback: (value: T, item: LinkedListItem, list: this) => boolean, /** If given, callback function will be bound to thisArg */ - thisArg?: C + thisArg?: C, ): LinkedList { if (thisArg) { callback = callback.bind(thisArg); @@ -226,7 +226,7 @@ export class LinkedList { /** runs for every value in LinkedList. If it returns truthy, current value is returned. */ callback: (value: T, item: LinkedListItem, list: this) => boolean, /** If given, callback function will be bound to thisArg */ - thisArg?: C + thisArg?: C, ): T | undefined { if (thisArg) { callback = callback.bind(thisArg); @@ -249,7 +249,7 @@ export class LinkedList { /** runs for every LinkedListItem in LinkedList. If it returns truthy, current LinkedListItem is returned. */ callback: (value: T, item: LinkedListItem, list: this) => boolean, /** If given, callback function will be bound to thisArg */ - thisArg?: C + thisArg?: C, ): LinkedListItem | undefined { if (thisArg) { callback = callback.bind(thisArg); @@ -272,7 +272,7 @@ export class LinkedList { /** Gets every value in LinkedList once with corresponding LinkedListItem and LinkedList */ callback: (value: T, item: LinkedListItem, list: this) => void, /** If given, callback function will be bound to thisArg */ - thisArg?: C + thisArg?: C, ): void { if (thisArg) { callback = callback.bind(thisArg); @@ -291,7 +291,7 @@ export class LinkedList { /** value to be found in this */ value: T, /** Starting index. Supports negative values for which `this.size - 1 + fromIndex` will be used as starting point. */ - fromIndex = 0 + fromIndex = 0, ): boolean { let current = this.getItemByIndex(fromIndex); while (current) { @@ -311,7 +311,7 @@ export class LinkedList { /** Value to be found */ searchedValue: T, /** Index to start from */ - fromIndex = 0 + fromIndex = 0, ): LinkedListItem | undefined { let current = this.getItemByIndex(fromIndex); while (current) { @@ -331,7 +331,7 @@ export class LinkedList { /** Value to be found */ searchedValue: T, /** Index to start from */ - fromIndex = -1 + fromIndex = -1, ): LinkedListItem | undefined { let current = this.getItemByIndex(fromIndex); while (current) { @@ -351,7 +351,7 @@ export class LinkedList { /** Gets value, LinkedListeItem and LinkedList. The response will be used as value in the new LinkedList */ callback: (value: T, item: LinkedListItem, list: this) => V, /** If given, callback function will be bound to thisArg */ - thisArg?: C + thisArg?: C, ): LinkedList { if (thisArg) { callback = callback.bind(thisArg); @@ -378,8 +378,8 @@ export class LinkedList { accumulator: T, currentValue: T, currentItem: LinkedListItem, - list: this - ) => V + list: this, + ) => V, ): V; public reduce( /** @@ -390,25 +390,25 @@ export class LinkedList { accumulator: V, currentValue: T, currentItem: LinkedListItem, - list: this + list: this, ) => V, /** Value for the first call of callback */ - initialValue: V + initialValue: V, ): V; public reduce( callback: ( accumulator: V | T, currentValue: T, currentItem: LinkedListItem, - list: this + list: this, ) => V, - initialValue?: V | T + initialValue?: V | T, ): V | T { let current = this.first; if (!current) { if (!initialValue) { throw new TypeError( - "Empty accumulator on empty LinkedList is not allowed." + "Empty accumulator on empty LinkedList is not allowed.", ); } return initialValue; @@ -445,8 +445,8 @@ export class LinkedList { accumulator: T, currentValue: T, currentItem: LinkedListItem, - list: this - ) => V + list: this, + ) => V, ): V; public reduceRight( /** @@ -457,25 +457,25 @@ export class LinkedList { accumulator: V, currentValue: T, currentItem: LinkedListItem, - list: this + list: this, ) => V, /** Value for the first call of callback */ - initialValue: V + initialValue: V, ): V; public reduceRight( callback: ( accumulator: V | T, currentValue: T, currentItem: LinkedListItem, - list: this + list: this, ) => V, - initialValue?: V | T + initialValue?: V | T, ): V | T { let current = this.last; if (!current) { if (!initialValue) { throw new TypeError( - "Empty accumulator on empty LinkedList is not allowed." + "Empty accumulator on empty LinkedList is not allowed.", ); } return initialValue; @@ -503,7 +503,7 @@ export class LinkedList { /** called for every element. If response is truthy, this currentvalue will be returned by `.some()`. */ callback: (currentValue: T, item: LinkedListItem, list: this) => boolean, /** If given, callback function will be bound to thisArg */ - thisArg?: C + thisArg?: C, ): boolean { if (thisArg) { callback = callback.bind(thisArg); @@ -522,7 +522,7 @@ export class LinkedList { */ public join( /** separator between items in the resulting string */ - separator?: string + separator?: string, ): string { return [...this.values()].join(separator); } @@ -604,7 +604,7 @@ export class LinkedList { */ public remove( /** value to remove once */ - value: T + value: T, ): boolean { for (const item of this.keys()) { if (item.value === value) { @@ -621,7 +621,7 @@ export class LinkedList { */ public removeAllOccurrences( /** value to remove completely */ - value: T + value: T, ): boolean { let foundSomethingToDelete = false; @@ -704,7 +704,7 @@ export class LinkedList { */ private getItemByIndex( /** Index of item to get from list */ - index: number + index: number, ): LinkedListItem | undefined { if (index === undefined) { throw new Error("index must be a number!"); @@ -738,7 +738,7 @@ export class LinkedList { */ private unlinkCleanup = ( /** Item that has been unlinked */ - item: LinkedListItem + item: LinkedListItem, ): void => { if (this.first === item) { this.first = this.first.behind; diff --git a/src/logs.ts b/src/logs.ts index ea2bbca0f..f4f654196 100644 --- a/src/logs.ts +++ b/src/logs.ts @@ -101,4 +101,4 @@ export type LogEntries = { timestamp: number[]; text: string[]; }; -//#endregion \ No newline at end of file +//#endregion diff --git a/src/queries.ts b/src/queries.ts index b0769424b..04fe41a0b 100644 --- a/src/queries.ts +++ b/src/queries.ts @@ -510,4 +510,4 @@ export type UserFunctionDescription = { */ isDeterministic: boolean; }; -//#endregion \ No newline at end of file +//#endregion diff --git a/src/routes.ts b/src/routes.ts index 97c7c1f3c..c63f62c47 100644 --- a/src/routes.ts +++ b/src/routes.ts @@ -26,7 +26,7 @@ export class Route { constructor( db: databases.Database, pathname: string = "", - headers: Headers | Record = {} + headers: Headers | Record = {}, ) { if (!pathname) pathname = ""; else if (pathname.charAt(0) !== "/") pathname = `/${pathname}`; @@ -74,7 +74,7 @@ export class Route { return new Route( this._db, util.joinPath(this._pathname, pathname), - util.mergeHeaders(this._headers, headers) + util.mergeHeaders(this._headers, headers), ); } @@ -100,12 +100,15 @@ export class Route { */ request(options: connections.RequestOptions = {}) { const { method = "GET", pathname, headers, ...opts } = options; - return this._db.request({ - ...opts, - method: method.toUpperCase(), - pathname: util.joinPath(this._pathname, pathname), - headers: util.mergeHeaders(this._headers, headers), - }, false); + return this._db.request( + { + ...opts, + method: method.toUpperCase(), + pathname: util.joinPath(this._pathname, pathname), + headers: util.mergeHeaders(this._headers, headers), + }, + false, + ); } /** @@ -126,7 +129,7 @@ export class Route { delete( pathname: string, search?: URLSearchParams | Record, - headers?: Headers | Record + headers?: Headers | Record, ): Promise; /** * Performs a DELETE request against the given path relative to this route @@ -145,7 +148,7 @@ export class Route { */ delete( search?: URLSearchParams | Record, - headers?: Headers | Record + headers?: Headers | Record, ): Promise; delete(...args: any[]): Promise { const pathname = typeof args[0] === "string" ? args.shift() : undefined; @@ -171,7 +174,7 @@ export class Route { get( pathname: string, search?: URLSearchParams | Record, - headers?: Headers | Record + headers?: Headers | Record, ): Promise; /** * Performs a GET request against the given path relative to this route @@ -190,7 +193,7 @@ export class Route { */ get( search?: URLSearchParams | Record, - headers?: Headers | Record + headers?: Headers | Record, ): Promise; get(...args: any[]): Promise { const pathname = typeof args[0] === "string" ? args.shift() : undefined; @@ -216,7 +219,7 @@ export class Route { head( pathname: string, search?: URLSearchParams | Record, - headers?: Headers | Record + headers?: Headers | Record, ): Promise; /** * Performs a HEAD request against the given path relative to this route @@ -235,7 +238,7 @@ export class Route { */ head( search?: URLSearchParams | Record, - headers?: Headers | Record + headers?: Headers | Record, ): Promise; head(...args: any[]): Promise { const pathname = typeof args[0] === "string" ? args.shift() : undefined; @@ -263,7 +266,7 @@ export class Route { pathname: string, body?: any, search?: URLSearchParams | Record, - headers?: Headers | Record + headers?: Headers | Record, ): Promise; /** * Performs a PATCH request against the given path relative to this route @@ -286,7 +289,7 @@ export class Route { patch( body?: any, search?: URLSearchParams | Record, - headers?: Headers | Record + headers?: Headers | Record, ): Promise; patch(...args: any[]): Promise { const pathname = typeof args[0] === "string" ? args.shift() : undefined; @@ -317,7 +320,7 @@ export class Route { pathname: string, body?: any, search?: URLSearchParams | Record, - headers?: Headers | Record + headers?: Headers | Record, ): Promise; /** * Performs a POST request against the given path relative to this route @@ -343,7 +346,7 @@ export class Route { post( body?: any, search?: URLSearchParams | Record, - headers?: Headers | Record + headers?: Headers | Record, ): Promise; post(...args: any[]): Promise { const pathname = typeof args[0] === "string" ? args.shift() : undefined; @@ -371,7 +374,7 @@ export class Route { pathname: string, body?: any, search?: URLSearchParams | Record, - headers?: Headers | Record + headers?: Headers | Record, ): Promise; /** * Performs a PUT request against the given path relative to this route @@ -394,7 +397,7 @@ export class Route { put( body?: any, search?: URLSearchParams | Record, - headers?: Headers | Record + headers?: Headers | Record, ): Promise; put(...args: any[]): Promise { const pathname = typeof args[0] === "string" ? args.shift() : undefined; diff --git a/src/services.ts b/src/services.ts index 6e14f0a4f..0a646b332 100644 --- a/src/services.ts +++ b/src/services.ts @@ -232,14 +232,14 @@ export type ServiceConfiguration = { * by software when managing the service. */ type: - | "integer" - | "boolean" - | "string" - | "number" - | "json" - | "password" - | "int" - | "bool"; + | "integer" + | "boolean" + | "string" + | "number" + | "json" + | "password" + | "int" + | "bool"; /** * Current value of the configuration option as stored internally. */ @@ -436,10 +436,10 @@ export type ServiceTestXunitReport = [ export type ServiceTestXunitTest = | ["testcase", { classname: string; name: string; time: number }] | [ - "testcase", - { classname: string; name: string; time: number }, - ["failure", { message: string; type: string }, string], - ]; + "testcase", + { classname: string; name: string; time: number }, + ["failure", { message: string; type: string }, string], + ]; /** * Test results for a Foxx service's tests in TAP format. @@ -535,4 +535,4 @@ export type ServiceDescription = { dependencies: Record; }; }; -//#endregion \ No newline at end of file +//#endregion diff --git a/src/test/00-basics.ts b/src/test/00-basics.ts index 43de2a9b9..cf7d4d695 100644 --- a/src/test/00-basics.ts +++ b/src/test/00-basics.ts @@ -39,7 +39,7 @@ describe("Configuring the driver", () => { "x-one": "1", "x-two": "2", }, - } + }, }); (db as any)._connection._hosts = [ { @@ -48,10 +48,10 @@ describe("Configuring the driver", () => { expect(headers.get("x-two")).to.equal("2"); done(); }, - close: () => { }, + close: () => {}, }, ]; - db.request({ headers: {} }, () => { }); + db.request({ headers: {} }, () => {}); }); }); describe("with an arangoVersion", () => { @@ -63,10 +63,10 @@ describe("Configuring the driver", () => { expect(headers.get("x-arango-version")).to.equal("99999"); done(); }, - close: () => { }, + close: () => {}, }, ]; - db.request({ headers: {} }, () => { }); + db.request({ headers: {} }, () => {}); }); }); }); diff --git a/src/test/01-manipulating-databases.ts b/src/test/01-manipulating-databases.ts index 93eccf81d..fb3e84645 100644 --- a/src/test/01-manipulating-databases.ts +++ b/src/test/01-manipulating-databases.ts @@ -77,7 +77,7 @@ describe("Manipulating databases", function () { after(async () => { try { await system.dropDatabase(name); - } catch { } + } catch {} }); it("deletes the given database from the server", async () => { await system.dropDatabase(name); diff --git a/src/test/02-accessing-collections.ts b/src/test/02-accessing-collections.ts index 2217ff2f0..b3b119ca0 100644 --- a/src/test/02-accessing-collections.ts +++ b/src/test/02-accessing-collections.ts @@ -34,7 +34,7 @@ describe("Accessing collections", function () { }); describe("database.listCollections", () => { const nonSystemCollectionNames = range(4).map( - (i) => `c_${Date.now()}_${i}` + (i) => `c_${Date.now()}_${i}`, ); const systemCollectionNames = range(4).map((i) => `_c_${Date.now()}_${i}`); before(async () => { @@ -43,7 +43,7 @@ describe("Accessing collections", function () { const collection = await db.createCollection(name); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }), ...systemCollectionNames.map(async (name) => { @@ -51,7 +51,7 @@ describe("Accessing collections", function () { await collection.create({ isSystem: true }); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }), ] as Promise[]); @@ -60,7 +60,7 @@ describe("Accessing collections", function () { await Promise.all([ ...nonSystemCollectionNames.map((name) => db.collection(name).drop()), ...systemCollectionNames.map((name) => - db.collection(name).drop({ isSystem: true }) + db.collection(name).drop({ isSystem: true }), ), ]); }); @@ -68,7 +68,7 @@ describe("Accessing collections", function () { const collections = await db.listCollections(); expect(collections.length).to.equal(nonSystemCollectionNames.length); expect(collections.map((c: any) => c.name).sort()).to.eql( - nonSystemCollectionNames + nonSystemCollectionNames, ); }); it("includes system collections if explicitly passed false", async () => { @@ -79,13 +79,13 @@ describe("Accessing collections", function () { .sort(); expect(collections.length).to.be.at.least(allCollectionNames.length); expect(collections.map((c: any) => c.name).sort()).to.eql( - allCollectionNames + allCollectionNames, ); }); }); describe("database.collections", () => { const documentCollectionNames = range(4).map( - (i) => `dc_${Date.now()}_${i}` + (i) => `dc_${Date.now()}_${i}`, ); const edgeCollectionNames = range(4).map((i) => `ec_${Date.now()}_${i}`); const systemCollectionNames = range(4).map((i) => `_c_${Date.now()}_${i}`); @@ -95,14 +95,14 @@ describe("Accessing collections", function () { const collection = await db.createCollection(name); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }), ...edgeCollectionNames.map(async (name) => { const collection = await db.createEdgeCollection(name); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }), ...systemCollectionNames.map(async (name) => { @@ -110,7 +110,7 @@ describe("Accessing collections", function () { await collection.create({ isSystem: true }); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }), ] as Promise[]); @@ -120,17 +120,17 @@ describe("Accessing collections", function () { ...documentCollectionNames.map((name) => db.collection(name).drop()), ...edgeCollectionNames.map((name) => db.collection(name).drop()), ...systemCollectionNames.map((name) => - db.collection(name).drop({ isSystem: true }) + db.collection(name).drop({ isSystem: true }), ), ]); }); it("creates Collection instances", async () => { const collections = await db.collections(); expect(collections.length).to.equal( - documentCollectionNames.length + edgeCollectionNames.length + documentCollectionNames.length + edgeCollectionNames.length, ); expect(collections.map((c) => c.name).sort()).to.eql( - [...documentCollectionNames, ...edgeCollectionNames].sort() + [...documentCollectionNames, ...edgeCollectionNames].sort(), ); }); it("includes system collections if explicitly passed false", async () => { @@ -142,7 +142,7 @@ describe("Accessing collections", function () { ...builtinSystemCollections, ].sort(); expect(collections.map((c: any) => c.name).sort()).to.eql( - allCollectionNames + allCollectionNames, ); }); }); diff --git a/src/test/03-accessing-graphs.ts b/src/test/03-accessing-graphs.ts index 249d78bef..849016241 100644 --- a/src/test/03-accessing-graphs.ts +++ b/src/test/03-accessing-graphs.ts @@ -39,14 +39,14 @@ describe("Accessing graphs", function () { const collection = await db.createCollection(name); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }), ...edgeCollectionNames.map(async (name) => { const collection = await db.createEdgeCollection(name); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }), ] as Promise[]); @@ -58,11 +58,11 @@ describe("Accessing graphs", function () { collection: name, from: vertexCollectionNames, to: vertexCollectionNames, - })) + })), ); await db.waitForPropagation( { pathname: `/_api/gharial/${graph.name}` }, - 10000 + 10000, ); }), ]); @@ -72,7 +72,7 @@ describe("Accessing graphs", function () { await Promise.all( vertexCollectionNames .concat(edgeCollectionNames) - .map((name) => db.collection(name).drop()) + .map((name) => db.collection(name).drop()), ); }); it("fetches information about all graphs", async () => { @@ -91,14 +91,14 @@ describe("Accessing graphs", function () { const collection = await db.createCollection(name); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }), ...edgeCollectionNames.map(async (name) => { const collection = await db.createEdgeCollection(name); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }), ] as Promise[]); @@ -110,11 +110,11 @@ describe("Accessing graphs", function () { collection: name, from: vertexCollectionNames, to: vertexCollectionNames, - })) + })), ); await db.waitForPropagation( { pathname: `/_api/gharial/${graph.name}` }, - 10000 + 10000, ); }), ]); @@ -124,7 +124,7 @@ describe("Accessing graphs", function () { await Promise.all( vertexCollectionNames .concat(edgeCollectionNames) - .map((name) => db.collection(name).drop()) + .map((name) => db.collection(name).drop()), ); }); it("creates Graph instances", async () => { diff --git a/src/test/04-transactions.ts b/src/test/04-transactions.ts index d853e2afe..f97bf3132 100644 --- a/src/test/04-transactions.ts +++ b/src/test/04-transactions.ts @@ -27,7 +27,7 @@ describe("Transactions", () => { const result = await db.executeTransaction( [], "function (params) {return params;}", - { params: "test" } + { params: "test" }, ); expect(result).to.equal("test"); }); @@ -44,8 +44,8 @@ describe("Transactions", () => { after(async () => { await Promise.all( allTransactions.map((transaction) => - transaction.abort().catch(() => undefined) - ) + transaction.abort().catch(() => undefined), + ), ); await system.dropDatabase(name); }); @@ -53,7 +53,7 @@ describe("Transactions", () => { collection = await db.createCollection(`collection-${Date.now()}`); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }); afterEach(async () => { @@ -127,7 +127,7 @@ describe("Transactions", () => { let doc: any; try { doc = await collection.document("test"); - } catch (e: any) { } + } catch (e: any) {} if (doc) expect.fail("Document should not exist yet."); const { id, status } = await trx.commit(); expect(id).to.equal(trx.id); @@ -142,11 +142,11 @@ describe("Transactions", () => { let doc: any; try { doc = await collection.document("test1"); - } catch (e: any) { } + } catch (e: any) {} if (doc) expect.fail("Document should not exist yet."); try { doc = await collection.document("test2"); - } catch (e: any) { } + } catch (e: any) {} if (doc) expect.fail("Document should not exist yet."); const { id, status } = await trx.commit(); expect(id).to.equal(trx.id); @@ -164,7 +164,7 @@ describe("Transactions", () => { let doc: any; try { doc = await collection.document("test"); - } catch (e: any) { } + } catch (e: any) {} if (doc) expect.fail("Document should not exist yet."); }); diff --git a/src/test/05-aql-helpers.ts b/src/test/05-aql-helpers.ts index f65ab3e34..8c2edc306 100644 --- a/src/test/05-aql-helpers.ts +++ b/src/test/05-aql-helpers.ts @@ -20,10 +20,10 @@ describe("AQL helpers", function () { ]; const query = aql`A ${values[0]} B ${values[1]} C ${values[2]} D ${values[3]} E ${values[4]} F ${values[5]} G ${values[6]} H ${values[7]} I ${values[8]} J ${values[9]} K EOF`; expect(query.query).to.equal( - `A @value0 B @value1 C @value2 D @value3 E @value4 F @value5 G @value6 H @value7 I @value8 J @value9 K EOF` + `A @value0 B @value1 C @value2 D @value3 E @value4 F @value5 G @value6 H @value7 I @value8 J @value9 K EOF`, ); const bindVarNames = Object.keys(query.bindVars).sort((a, b) => - +a.substr(5) > +b.substr(5) ? 1 : -1 + +a.substr(5) > +b.substr(5) ? 1 : -1, ); expect(bindVarNames).to.eql([ "value0", @@ -41,7 +41,7 @@ describe("AQL helpers", function () { }); it("omits undefined bindvars and empty queries", () => { const query = aql`A ${undefined} B ${aql``} C ${join([])} D ${literal( - "" + "", )} E`; expect(query.query).to.equal("A B C D E"); expect(query.bindVars).to.eql({}); @@ -111,20 +111,20 @@ describe("AQL helpers", function () { it("supports nesting simple queries", () => { const query = aql`FOR x IN (${aql`FOR a IN 1..3 RETURN a`}) RETURN x`; expect(query.query).to.equal( - "FOR x IN (FOR a IN 1..3 RETURN a) RETURN x" + "FOR x IN (FOR a IN 1..3 RETURN a) RETURN x", ); }); it("supports deeply nesting simple queries", () => { const query = aql`FOR x IN (${aql`FOR a IN (${aql`FOR b IN 1..3 RETURN b`}) RETURN a`}) RETURN x`; expect(query.query).to.equal( - "FOR x IN (FOR a IN (FOR b IN 1..3 RETURN b) RETURN a) RETURN x" + "FOR x IN (FOR a IN (FOR b IN 1..3 RETURN b) RETURN a) RETURN x", ); }); it("supports nesting with bindVars", () => { const collection = db.collection("paprika"); const query = aql`A ${collection} B ${aql`X ${collection} Y ${aql`J ${collection} K ${9} L`} Z`} C ${4}`; expect(query.query).to.equal( - "A @@value0 B X @@value0 Y J @@value0 K @value1 L Z C @value2" + "A @@value0 B X @@value0 Y J @@value0 K @value1 L Z C @value2", ); expect(query.bindVars).to.eql({ "@value0": "paprika", @@ -138,7 +138,7 @@ describe("AQL helpers", function () { const filter = aql`FILTER u.role == ${role}`; const query = aql`FOR u IN ${users} ${filter} RETURN u`; expect(query.query).to.equal( - "FOR u IN @@value0 FILTER u.role == @value1 RETURN u" + "FOR u IN @@value0 FILTER u.role == @value1 RETURN u", ); expect(query.bindVars).to.eql({ "@value0": users.name, @@ -153,7 +153,7 @@ describe("AQL helpers", function () { it("supports deep nesting", () => { const query = aql`A ${1} ${aql`a ${2} ${aql`X ${3} ${aql`x ${4} y`} ${5} Y`} ${6} b`} ${7} B`; expect(query.query).to.equal( - "A @value0 a @value1 X @value2 x @value3 y @value4 Y @value5 b @value6 B" + "A @value0 a @value1 X @value2 x @value3 y @value4 Y @value5 b @value6 B", ); expect(query.bindVars).to.eql({ value0: 1, @@ -185,7 +185,7 @@ describe("AQL helpers", function () { ]; for (const [value, result] of pairs) { it(`returns an AQL literal of "${result}" for ${String( - JSON.stringify(value) + JSON.stringify(value), )}`, () => { expect(literal(value).toAQL()).to.equal(result); }); diff --git a/src/test/06-managing-functions.ts b/src/test/06-managing-functions.ts index 19785aed3..4e8f30dcb 100644 --- a/src/test/06-managing-functions.ts +++ b/src/test/06-managing-functions.ts @@ -41,7 +41,7 @@ describe("Managing functions", function () { it("should create a function", async () => { const info = await db.createUserFunction( "myfunctions::temperature::celsiustofahrenheit2", - "function (celsius) { return celsius * 1.8 + 32; }" + "function (celsius) { return celsius * 1.8 + 32; }", ); expect(info).to.have.property("code", 201); expect(info).to.have.property("error", false); @@ -52,7 +52,7 @@ describe("Managing functions", function () { const name = "myfunctions::temperature::celsiustofahrenheit"; await db.createUserFunction( name, - "function (celsius) { return celsius * 1.8 + 32; }" + "function (celsius) { return celsius * 1.8 + 32; }", ); const info = await db.dropUserFunction(name); expect(info).to.have.property("deletedCount", 1); diff --git a/src/test/07-routes.ts b/src/test/07-routes.ts index ab12e49e2..12853c770 100644 --- a/src/test/07-routes.ts +++ b/src/test/07-routes.ts @@ -39,7 +39,7 @@ describe("Route API", function () { collection = await db.createCollection(`c_${Date.now()}`); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }); after(async () => { diff --git a/src/test/08-cursors.ts b/src/test/08-cursors.ts index dabdb0162..6d1ccba36 100644 --- a/src/test/08-cursors.ts +++ b/src/test/08-cursors.ts @@ -28,7 +28,7 @@ describe("Item-wise Cursor API", () => { }); after(async () => { await Promise.all( - allCursors.map((cursor) => cursor.kill().catch(() => undefined)) + allCursors.map((cursor) => cursor.kill().catch(() => undefined)), ); try { await system.dropDatabase(name); @@ -199,7 +199,7 @@ describe("Item-wise Cursor API", () => { .reduce((acc, next) => { acc.push(...next); return acc; - }, [] as number[]) + }, [] as number[]), ); }); it("doesn't choke on non-arrays", async () => { @@ -252,7 +252,7 @@ describe("Batch-wise Cursor API", () => { }); after(async () => { await Promise.all( - allCursors.map((cursor) => cursor.kill().catch(() => undefined)) + allCursors.map((cursor) => cursor.kill().catch(() => undefined)), ); try { await system.dropDatabase(name); @@ -395,7 +395,7 @@ describe("Batch-wise Cursor API", () => { .reduce((acc, next) => { acc.push(...next); return acc; - }, [] as number[]) + }, [] as number[]), ); }); it("doesn't choke on non-arrays", async () => { diff --git a/src/test/09-collection-metadata.ts b/src/test/09-collection-metadata.ts index 3317ed738..bbbe7fe64 100644 --- a/src/test/09-collection-metadata.ts +++ b/src/test/09-collection-metadata.ts @@ -18,7 +18,7 @@ describe("Collection metadata", function () { collection = await db.createCollection(collectionName); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }); after(async () => { diff --git a/src/test/10-manipulating-collections.ts b/src/test/10-manipulating-collections.ts index 3de70f40d..f53908c86 100644 --- a/src/test/10-manipulating-collections.ts +++ b/src/test/10-manipulating-collections.ts @@ -24,7 +24,7 @@ describe("Manipulating collections", function () { collection = await db.createCollection(`collection-${Date.now()}`); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }); afterEach(async () => { @@ -38,11 +38,11 @@ describe("Manipulating collections", function () { describe("collection.create", () => { it("creates a new document collection", async () => { const collection = await db.createCollection( - `document-collection-${Date.now()}` + `document-collection-${Date.now()}`, ); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); const info = await db.collection(collection.name).get(); expect(info).to.have.property("name", collection.name); @@ -52,11 +52,11 @@ describe("Manipulating collections", function () { }); it("creates a new edge collection", async () => { const collection = await db.createEdgeCollection( - `edge-collection-${Date.now()}` + `edge-collection-${Date.now()}`, ); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); const info = await db.collection(collection.name).get(); expect(info).to.have.property("name", collection.name); diff --git a/src/test/11-managing-indexes.ts b/src/test/11-managing-indexes.ts index 1ddeea9f4..f3ca332d8 100644 --- a/src/test/11-managing-indexes.ts +++ b/src/test/11-managing-indexes.ts @@ -19,7 +19,7 @@ describe("Managing indexes", function () { collection = await db.createCollection(collectionName); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }); after(async () => { diff --git a/src/test/13-bulk-imports.ts b/src/test/13-bulk-imports.ts index fbed560d6..31494d2b5 100644 --- a/src/test/13-bulk-imports.ts +++ b/src/test/13-bulk-imports.ts @@ -17,7 +17,7 @@ describe("Bulk imports", function () { collection = await db.createCollection(collectionName); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }); after(async () => { @@ -77,7 +77,7 @@ describe("Bulk imports", function () { }); it("should accept buffer of LDJSON arrays", async () => { const data = Buffer.from( - '["_key", "data"]\r\n["tb1", "banana"]\r\n["tb2", "peach"]\r\n["tb3", "apricot"]\r\n' + '["_key", "data"]\r\n["tb1", "banana"]\r\n["tb2", "peach"]\r\n["tb3", "apricot"]\r\n', ); const info = await collection.import(data); expect(info).to.eql({ @@ -106,7 +106,7 @@ describe("Bulk imports", function () { }); it("should accept buffer of LDJSON documents", async () => { const data = Buffer.from( - `{"_key": "db1-${type}", "data": "banana"}\r\n{"_key": "db2-${type}", "data": "peach"}\r\n{"_key": "db3-${type}", "data": "apricot"}\r\n` + `{"_key": "db1-${type}", "data": "banana"}\r\n{"_key": "db2-${type}", "data": "peach"}\r\n{"_key": "db3-${type}", "data": "apricot"}\r\n`, ); const info = await collection.import(data, { type }); expect(info).to.eql({ @@ -144,7 +144,7 @@ describe("Bulk imports", function () { { _key: `jb1-${String(type)}`, data: "banana" }, { _key: `jb2-${String(type)}`, data: "peach" }, { _key: `jb3-${String(type)}`, data: "apricot" }, - ]) + ]), ); const info = await collection.import(data, { type }); expect(info).to.eql({ diff --git a/src/test/14-document-collections.ts b/src/test/14-document-collections.ts index 4bd44bf0e..9ebede187 100644 --- a/src/test/14-document-collections.ts +++ b/src/test/14-document-collections.ts @@ -25,7 +25,7 @@ describe("DocumentCollection API", function () { collection = await db.createCollection(`c_${Date.now()}`); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }); afterEach(async () => { @@ -177,7 +177,7 @@ describe("DocumentCollection API", function () { await collection.update( doc, { sup: "dawg", empty: null }, - { keepNull: false } + { keepNull: false }, ); const newData = await collection.document(doc._key); expect(newData).to.have.property("potato").that.equals(doc.potato); diff --git a/src/test/15-edge-collections.ts b/src/test/15-edge-collections.ts index 2dd3c5f5f..5a8811ed0 100644 --- a/src/test/15-edge-collections.ts +++ b/src/test/15-edge-collections.ts @@ -29,7 +29,7 @@ describe("EdgeCollection API", function () { collection = await db.createEdgeCollection(`c_${Date.now()}`); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }); afterEach(async () => { @@ -110,7 +110,7 @@ describe("EdgeCollection API", function () { "_id", "_rev", "_from", - "_to" + "_to", ); expect(doc._id).to.equal(meta._id); expect(doc._key).to.equal(meta._key); @@ -138,7 +138,7 @@ describe("EdgeCollection API", function () { "_id", "_rev", "_from", - "_to" + "_to", ); expect(doc._id).to.equal(meta._id); expect(doc._rev).to.equal(meta._rev); @@ -163,7 +163,7 @@ describe("EdgeCollection API", function () { "_id", "_rev", "_from", - "_to" + "_to", ); expect(doc.something).to.equal(data.something); expect(doc._id).to.equal(meta._id); @@ -216,7 +216,7 @@ describe("EdgeCollection API", function () { await collection.update( doc, { more: "peanuts", empty: null }, - { keepNull: false } + { keepNull: false }, ); const newData = await collection.document(doc._key); expect(newData).to.have.property("something", doc.something); diff --git a/src/test/16-graphs.ts b/src/test/16-graphs.ts index e60019cac..8cbba1800 100644 --- a/src/test/16-graphs.ts +++ b/src/test/16-graphs.ts @@ -13,14 +13,14 @@ async function createCollections(db: Database) { const collection = await db.createCollection(name); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }), ...edgeCollectionNames.map(async (name) => { const collection = await db.createEdgeCollection(name); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }), ] as Promise[]); @@ -30,14 +30,14 @@ async function createCollections(db: Database) { async function createGraph( graph: Graph, vertexCollectionNames: string[], - edgeCollectionNames: string[] + edgeCollectionNames: string[], ) { return await graph.create( edgeCollectionNames.map((name) => ({ collection: name, from: vertexCollectionNames, to: vertexCollectionNames, - })) + })), ); } @@ -69,7 +69,7 @@ describe("Graph API", function () { after(async () => { await graph.drop(); await Promise.all( - collectionNames.map((name) => db.collection(name).drop()) + collectionNames.map((name) => db.collection(name).drop()), ); }); it("fetches information about the graph", async () => { @@ -90,12 +90,12 @@ describe("Graph API", function () { ...edgeCollectionNames.map(async (name) => { try { await graph.removeEdgeDefinition(name, true); - } catch { } + } catch {} }), ...vertexCollectionNames.map(async (name) => { try { await graph.removeVertexCollection(name, true); - } catch { } + } catch {} }), ]); }); @@ -106,11 +106,11 @@ describe("Graph API", function () { collection: name, from: vertexCollectionNames, to: vertexCollectionNames, - })) + })), ); await db.waitForPropagation( { pathname: `/_api/gharial/${graph.name}` }, - 10000 + 10000, ); const data = await graph.get(); expect(data).to.have.property("name", graph.name); @@ -132,8 +132,8 @@ describe("Graph API", function () { db .collection(name) .drop() - .catch(() => null) - ) + .catch(() => null), + ), ); }); it("destroys the graph if not passed true", async () => { diff --git a/src/test/17-graph-vertices.ts b/src/test/17-graph-vertices.ts index fa884e4c4..f83c786a9 100644 --- a/src/test/17-graph-vertices.ts +++ b/src/test/17-graph-vertices.ts @@ -15,14 +15,14 @@ async function createCollections(db: Database) { const collection = await db.createCollection(name); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }), ...edgeCollectionNames.map(async (name) => { const collection = await db.createEdgeCollection(name); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }), ] as Promise[]); @@ -32,18 +32,18 @@ async function createCollections(db: Database) { async function createGraph( graph: Graph, vertexCollectionNames: string[], - edgeCollectionNames: string[] + edgeCollectionNames: string[], ) { const result = await graph.create( edgeCollectionNames.map((name) => ({ collection: name, from: vertexCollectionNames, to: vertexCollectionNames, - })) + })), ); await graph.database.waitForPropagation( { pathname: `/_api/gharial/${graph.name}` }, - 10000 + 10000, ); return result; } @@ -75,7 +75,7 @@ describe("Manipulating graph vertices", function () { afterEach(async () => { await graph.drop(); await Promise.all( - collectionNames.map((name) => db.collection(name).drop()) + collectionNames.map((name) => db.collection(name).drop()), ); }); describe("graph.vertexCollection", () => { @@ -92,7 +92,7 @@ describe("Manipulating graph vertices", function () { vertexCollection = await db.createCollection(`xc_${Date.now()}`); await db.waitForPropagation( { pathname: `/_api/collection/${vertexCollection.name}` }, - 10000 + 10000, ); }); afterEach(async () => { @@ -109,7 +109,7 @@ describe("Manipulating graph vertices", function () { vertexCollection = await db.createCollection(`xc_${Date.now()}`); await db.waitForPropagation( { pathname: `/_api/collection/${vertexCollection.name}` }, - 10000 + 10000, ); await graph.addVertexCollection(vertexCollection.name); }); @@ -121,7 +121,7 @@ describe("Manipulating graph vertices", function () { it("destroys the collection if explicitly passed true", async () => { const data = await graph.removeVertexCollection( vertexCollection.name, - true + true, ); expect(data.orphanCollections).not.to.contain(vertexCollection.name); try { diff --git a/src/test/18-graph-edges.ts b/src/test/18-graph-edges.ts index 1c412813b..de99a5356 100644 --- a/src/test/18-graph-edges.ts +++ b/src/test/18-graph-edges.ts @@ -31,7 +31,10 @@ describe("Manipulating graph edges", function () { to: ["person"], }, ]); - await db.waitForPropagation({ pathname: `/_api/gharial/${graph.name}` }, 10000); + await db.waitForPropagation( + { pathname: `/_api/gharial/${graph.name}` }, + 10000, + ); }); afterEach(async () => { await graph.drop(); @@ -45,13 +48,13 @@ describe("Manipulating graph edges", function () { expect(info.edgeDefinitions.map((e) => e.collection)).to.contain("knows"); expect(info.edgeDefinitions.length).to.equal(1); const edgeDefinition = info.edgeDefinitions.filter( - (e) => e.collection === "knows" + (e) => e.collection === "knows", ); expect( - edgeDefinition.reduce((arr, e) => [...arr, ...e.from], [] as string[]) + edgeDefinition.reduce((arr, e) => [...arr, ...e.from], [] as string[]), ).to.contain("person"); expect( - edgeDefinition.reduce((arr, e) => [...arr, ...e.to], [] as string[]) + edgeDefinition.reduce((arr, e) => [...arr, ...e.to], [] as string[]), ).to.contain("person"); }); }); @@ -90,17 +93,17 @@ describe("Manipulating graph edges", function () { expect(info).to.have.property("edgeDefinitions"); expect(info.edgeDefinitions).to.be.instanceOf(Array); expect(info.edgeDefinitions.map((e) => e.collection)).to.contain( - "works_in" + "works_in", ); expect(info.edgeDefinitions.length).to.equal(2); const edgeDefinition = info.edgeDefinitions.filter( - (e) => e.collection === "works_in" + (e) => e.collection === "works_in", ); expect( - edgeDefinition.reduce((arr, e) => [...arr, ...e.from], [] as string[]) + edgeDefinition.reduce((arr, e) => [...arr, ...e.from], [] as string[]), ).to.contain("person"); expect( - edgeDefinition.reduce((arr, e) => [...arr, ...e.to], [] as string[]) + edgeDefinition.reduce((arr, e) => [...arr, ...e.to], [] as string[]), ).to.contain("city"); }); }); @@ -117,13 +120,13 @@ describe("Manipulating graph edges", function () { expect(info.edgeDefinitions.map((e) => e.collection)).to.contain("knows"); expect(info.edgeDefinitions.length).to.equal(1); const edgeDefinition = info.edgeDefinitions.filter( - (e) => e.collection === "knows" + (e) => e.collection === "knows", ); expect( - edgeDefinition.reduce((arr, e) => [...arr, ...e.from], [] as string[]) + edgeDefinition.reduce((arr, e) => [...arr, ...e.from], [] as string[]), ).to.contain("person"); expect( - edgeDefinition.reduce((arr, e) => [...arr, ...e.to], [] as string[]) + edgeDefinition.reduce((arr, e) => [...arr, ...e.to], [] as string[]), ).to.contain("city"); }); }); diff --git a/src/test/19-graph-vertex-collections.ts b/src/test/19-graph-vertex-collections.ts index a70386237..1340d50ed 100644 --- a/src/test/19-graph-vertex-collections.ts +++ b/src/test/19-graph-vertex-collections.ts @@ -22,7 +22,10 @@ describe("GraphVertexCollection API", function () { to: ["person"], }, ]); - await db.waitForPropagation({ pathname: `/_api/gharial/${graph.name}` }, 10000); + await db.waitForPropagation( + { pathname: `/_api/gharial/${graph.name}` }, + 10000, + ); collection = graph.vertexCollection("person"); }); after(async () => { @@ -132,7 +135,7 @@ describe("GraphVertexCollection API", function () { await collection.update( doc, { sup: "dawg", empty: null }, - { keepNull: false } + { keepNull: false }, ); const newData = await collection.vertex(doc._key); expect(newData).to.have.property("potato").that.equals(doc.potato); diff --git a/src/test/20-graph-edge-collections.ts b/src/test/20-graph-edge-collections.ts index 089f7e8ff..71310f218 100644 --- a/src/test/20-graph-edge-collections.ts +++ b/src/test/20-graph-edge-collections.ts @@ -22,7 +22,10 @@ describe("GraphEdgeCollection API", function () { to: ["person"], }, ]); - await db.waitForPropagation({ pathname: `/_api/gharial/${graph.name}` }, 10000); + await db.waitForPropagation( + { pathname: `/_api/gharial/${graph.name}` }, + 10000, + ); collection = graph.edgeCollection("knows"); await graph .vertexCollection("person") @@ -163,7 +166,7 @@ describe("GraphEdgeCollection API", function () { await collection.update( doc, { sup: "dawg", empty: null }, - { keepNull: false } + { keepNull: false }, ); const newData = await collection.edge(doc._key); expect(newData).to.have.property("potato", doc.potato); diff --git a/src/test/22-foxx-api.ts b/src/test/22-foxx-api.ts index cd1f397c8..51e9c0606 100644 --- a/src/test/22-foxx-api.ts +++ b/src/test/22-foxx-api.ts @@ -23,9 +23,9 @@ describe("Foxx service", () => { serviceServiceMount, new Blob([ fs.readFileSync( - path.resolve("fixtures", "service-service-service.zip") + path.resolve("fixtures", "service-service-service.zip"), ), - ]) + ]), ); arangoPaths = (await db.route(serviceServiceMount).get()).parsedBody; }); @@ -33,7 +33,7 @@ describe("Foxx service", () => { after(async () => { try { await db.uninstallService(serviceServiceMount, { force: true }); - } catch (e: any) { } + } catch (e: any) {} try { await system.dropDatabase(name); } finally { @@ -44,7 +44,7 @@ describe("Foxx service", () => { afterEach(async () => { try { await db.uninstallService(mount, { force: true }); - } catch (e: any) { } + } catch (e: any) {} }); const cases = [ @@ -65,7 +65,7 @@ describe("Foxx service", () => { source: () => new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.js") + path.resolve(localAppsPath, "minimal-working-service.js"), ), ]), }, @@ -74,7 +74,7 @@ describe("Foxx service", () => { source: () => new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), ]), }, @@ -92,7 +92,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "itzpapalotl.zip")), - ]) + ]), ); await db.replaceService(mount, c.source(arangoPaths)); const resp = await db.route(mount).get(); @@ -104,7 +104,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "itzpapalotl.zip")), - ]) + ]), ); await db.upgradeService(mount, c.source(arangoPaths)); const resp = await db.route(mount).get(); @@ -117,15 +117,15 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); await db.uninstallService(mount); try { await db.route(mount).get(); expect.fail(); - } catch (e: any) { } + } catch (e: any) {} }); it("empty configuration should be available", async () => { @@ -133,9 +133,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const resp = await db.getServiceConfiguration(mount); expect(resp).to.eql({}); @@ -146,9 +146,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const resp = await db.getServiceConfiguration(mount, true); expect(resp).to.eql({}); @@ -159,7 +159,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); const resp = await db.getServiceConfiguration(mount); expect(resp).to.have.property("test1"); @@ -173,7 +173,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); const resp = await db.getServiceConfiguration(mount, true); expect(resp).to.have.eql({}); @@ -184,7 +184,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); const updateResp = await db.updateServiceConfiguration(mount, { test1: "test", @@ -207,14 +207,14 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); const updateResp = await db.updateServiceConfiguration( mount, { test1: "test", }, - true + true, ); expect(updateResp).to.have.property("values"); expect(updateResp.values).to.have.property("test1", "test"); @@ -230,7 +230,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); const replaceResp = await db.replaceServiceConfiguration(mount, { test1: "test", @@ -253,14 +253,14 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); const replaceResp = await db.replaceServiceConfiguration( mount, { test1: "test", }, - true + true, ); expect(replaceResp).to.have.property("values"); expect(replaceResp.values).to.have.property("test1", "test"); @@ -277,7 +277,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); await db.replaceServiceConfiguration(mount, { test2: "test2" }); await db.updateServiceConfiguration(mount, { test1: "test1" }); @@ -293,7 +293,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); await db.replaceServiceConfiguration(mount, { test2: "test2" }, true); await db.updateServiceConfiguration(mount, { test1: "test1" }, true); @@ -307,7 +307,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); await db.updateServiceConfiguration(mount, { test2: "test2" }); await db.replaceServiceConfiguration(mount, { test1: "test" }); @@ -323,7 +323,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); await db.updateServiceConfiguration(mount, { test2: "test2" }, true); await db.replaceServiceConfiguration(mount, { test1: "test" }, true); @@ -337,9 +337,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const resp = await db.getServiceDependencies(mount); expect(resp).to.eql({}); @@ -350,9 +350,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const resp = await db.getServiceDependencies(mount, true); expect(resp).to.eql({}); @@ -363,7 +363,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const resp = await db.getServiceDependencies(mount); expect(resp).to.have.property("test1"); @@ -377,7 +377,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const resp = await db.getServiceDependencies(mount, true); expect(resp).to.eql({}); @@ -388,7 +388,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const updateResp = await db.updateServiceDependencies(mount, { test1: "/test", @@ -411,14 +411,14 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const updateResp = await db.updateServiceDependencies( mount, { test1: "/test", }, - true + true, ); expect(updateResp).to.have.property("values"); expect(updateResp.values).to.have.property("test1", "/test"); @@ -434,7 +434,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const replaceResp = await db.replaceServiceDependencies(mount, { test1: "/test", @@ -457,14 +457,14 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const replaceResp = await db.replaceServiceDependencies( mount, { test1: "/test", }, - true + true, ); expect(replaceResp).to.have.property("values"); expect(replaceResp.values).to.have.property("test1", "/test"); @@ -481,7 +481,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const replaceResp = await db.replaceServiceDependencies(mount, { test2: "/test2", @@ -511,12 +511,12 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const replaceResp = await db.replaceServiceDependencies( mount, { test2: "/test2" }, - true + true, ); expect(replaceResp).to.have.property("values"); expect(replaceResp.values).to.have.property("test2", "/test2"); @@ -528,7 +528,7 @@ describe("Foxx service", () => { { test1: "/test1", }, - true + true, ); expect(updateResp).to.have.property("values"); expect(updateResp.values).to.have.property("test1", "/test1"); @@ -543,7 +543,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const updateResp = await db.updateServiceDependencies(mount, { test2: "/test2", @@ -575,14 +575,14 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const updateResp = await db.updateServiceDependencies( mount, { test2: "/test2", }, - true + true, ); expect(updateResp).to.have.property("values"); expect(updateResp.values).to.not.have.property("test1"); @@ -593,7 +593,7 @@ describe("Foxx service", () => { { test1: "/test1", }, - true + true, ); expect(replaceResp).to.have.property("values"); expect(replaceResp.values).to.have.property("test1", "/test1"); @@ -610,9 +610,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const resp = await db.downloadService(mount); expect(resp).to.be.instanceof(Blob); @@ -623,9 +623,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const services = await db.listServices(); expect(services).to.be.instanceOf(Array); @@ -637,9 +637,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const services = await db.listServices(); const service = services.find((service) => service.mount === mount)!; @@ -656,9 +656,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const service = await db.getService(mount); expect(service).to.have.property("mount", mount); @@ -679,9 +679,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-setup-teardown.zip") + path.resolve(localAppsPath, "minimal-working-setup-teardown.zip"), ), - ]) + ]), ); const scripts = await db.getServiceScripts(mount); expect(scripts).to.have.property("setup", "Setup"); @@ -693,9 +693,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-setup-teardown.zip") + path.resolve(localAppsPath, "minimal-working-setup-teardown.zip"), ), - ]) + ]), ); const col = `${mount}_setup_teardown`.replace(/\//, "").replace(/-/g, "_"); expect(await db.collection(col).get()).to.be.instanceOf(Object); @@ -714,7 +714,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "echo-script.zip")), - ]) + ]), ); try { await db.runServiceScript(mount, "no", {}); @@ -730,7 +730,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "echo-script.zip")), - ]) + ]), ); const argv = { hello: "world" }; const resp = await db.runServiceScript(mount, "echo", argv); @@ -742,7 +742,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "echo-script.zip")), - ]) + ]), ); const argv = ["yes", "please"]; const resp = await db.runServiceScript(mount, "echo", argv); @@ -754,9 +754,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const resp = await db.getService(mount); expect(resp.development).to.equal(false); @@ -771,10 +771,10 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), ]), - { development: true } + { development: true }, ); const resp = await db.getService(mount); expect(resp.development).to.equal(true); @@ -787,7 +787,9 @@ describe("Foxx service", () => { it("tests should run", async () => { await db.installService( mount, - new Blob([fs.readFileSync(path.resolve(localAppsPath, "with-tests.zip"))]) + new Blob([ + fs.readFileSync(path.resolve(localAppsPath, "with-tests.zip")), + ]), ); const resp = await db.runServiceTests(mount, {}); expect(resp).to.have.property("stats"); @@ -802,7 +804,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-readme.zip")), - ]) + ]), ); const resp = await db.getServiceReadme(mount); expect(resp).to.equal("Please read this."); @@ -813,9 +815,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const resp = await db.getServiceReadme(mount); expect(resp).to.equal(undefined); @@ -826,9 +828,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const resp = await db.getServiceDocumentation(mount); expect(resp).to.have.property("swagger", "2.0"); diff --git a/src/test/23-aql-queries-stream.ts b/src/test/23-aql-queries-stream.ts index 2f0dd8d75..55a39e5e9 100644 --- a/src/test/23-aql-queries-stream.ts +++ b/src/test/23-aql-queries-stream.ts @@ -18,7 +18,7 @@ describe("AQL Stream queries", function () { }); after(async () => { await Promise.all( - allCursors.map((cursor) => cursor.kill().catch(() => undefined)) + allCursors.map((cursor) => cursor.kill().catch(() => undefined)), ); try { await system.dropDatabase(name); @@ -69,12 +69,12 @@ describe("AQL Stream queries", function () { const collection = await db.createCollection(cname); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); await Promise.all( Array.from(Array(1000).keys()).map((i: number) => - collection.save({ hallo: i }) - ) + collection.save({ hallo: i }), + ), ); }); /*after(async () => { @@ -87,15 +87,15 @@ describe("AQL Stream queries", function () { let count = 0; const cursors = await Promise.all( - Array.from(Array(25)).map(() => db.query(query, options)) + Array.from(Array(25)).map(() => db.query(query, options)), ); allCursors.push(...cursors); await Promise.all( cursors.map((c) => (c as Cursor).forEach(() => { count++; - }) - ) + }), + ), ); expect(count).to.equal(25 * 1000); }); diff --git a/src/test/24-accessing-views.ts b/src/test/24-accessing-views.ts index a1892fc3b..0cdc063bd 100644 --- a/src/test/24-accessing-views.ts +++ b/src/test/24-accessing-views.ts @@ -38,9 +38,9 @@ describe("Accessing views", function () { await view.create({ type: "arangosearch" }); await db.waitForPropagation( { pathname: `/_api/view/${view.name}` }, - 10000 + 10000, ); - }) + }), ); }); after(async () => { @@ -61,14 +61,14 @@ describe("Accessing views", function () { await view.create({ type: "arangosearch" }); await db.waitForPropagation( { pathname: `/_api/view/${view.name}` }, - 10000 + 10000, ); - }) + }), ); }); after(async () => { await Promise.all( - arangoSearchViewNames.map((name) => db.view(name).drop()) + arangoSearchViewNames.map((name) => db.view(name).drop()), ); }); it("creates View instances", async () => { @@ -76,7 +76,7 @@ describe("Accessing views", function () { const arangoSearchViews = views.filter((v) => v instanceof View).sort(); expect(arangoSearchViews.length).to.equal(arangoSearchViewNames.length); expect(arangoSearchViews.map((v) => v.name).sort()).to.eql( - arangoSearchViewNames + arangoSearchViewNames, ); }); }); diff --git a/src/test/26-manipulating-views.ts b/src/test/26-manipulating-views.ts index 027fa6b31..512344267 100644 --- a/src/test/26-manipulating-views.ts +++ b/src/test/26-manipulating-views.ts @@ -41,7 +41,10 @@ describe("Manipulating views", function () { it("creates a new arangosearch view", async () => { const view = db.view(`asv-${Date.now()}`); await view.create({ type: "arangosearch" }); - await db.waitForPropagation({ pathname: `/_api/view/${view.name}` }, 10000); + await db.waitForPropagation( + { pathname: `/_api/view/${view.name}` }, + 10000, + ); const info = await view.get(); expect(info).to.have.property("name", view.name); expect(info).to.have.property("type", "arangosearch"); @@ -76,7 +79,7 @@ describe("Manipulating views", function () { commitIntervalMsec: 30000, }); expect(properties.consolidationIntervalMsec).to.equal( - initial.consolidationIntervalMsec + initial.consolidationIntervalMsec, ); expect(properties.commitIntervalMsec).to.equal(30000); }); diff --git a/src/test/27-query-management.ts b/src/test/27-query-management.ts index 3b80c7c85..3e25cd5bc 100644 --- a/src/test/27-query-management.ts +++ b/src/test/27-query-management.ts @@ -33,7 +33,7 @@ describe("Query Management API", function () { }); after(async () => { await Promise.all( - allCursors.map((cursor) => cursor.kill().catch(() => undefined)) + allCursors.map((cursor) => cursor.kill().catch(() => undefined)), ); try { await system.dropDatabase(dbName); @@ -200,7 +200,7 @@ describe("Query Management API", function () { // must filter the list here, as there could be other (system) queries // ongoing at the same time queries = (await db.listRunningQueries()).filter( - (i: any) => i.query === query + (i: any) => i.query === query, ); if (queries.length > 0) { break; @@ -237,7 +237,7 @@ describe("Query Management API", function () { allCursors.push(cursor); // must filter the list here, as there could have been other (system) queries const queries = (await db.listSlowQueries()).filter( - (i: any) => i.query === query + (i: any) => i.query === query, ); expect(queries).to.have.lengthOf(1); expect(queries[0]).to.have.property("query", query); @@ -266,12 +266,12 @@ describe("Query Management API", function () { allCursors.push(cursor); // must filter the list here, as there could have been other (system) queries const queries1 = (await db.listSlowQueries()).filter( - (i: any) => i.query === query + (i: any) => i.query === query, ); expect(queries1).to.have.lengthOf(1); await db.clearSlowQueries(); const queries2 = (await db.listSlowQueries()).filter( - (i: any) => i.query === query + (i: any) => i.query === query, ); expect(queries2).to.have.lengthOf(0); }); @@ -285,7 +285,7 @@ describe("Query Management API", function () { const p1 = db.query(query); p1.then((cursor) => allCursors.push(cursor)); const queries = (await db.listSlowQueries()).filter( - (i: any) => i.query === query + (i: any) => i.query === query, ); expect(queries).to.have.lengthOf(1); expect(queries[0]).to.have.property("bindVars"); diff --git a/src/test/28-accessing-analyzers.ts b/src/test/28-accessing-analyzers.ts index 760323dab..f524cebfb 100644 --- a/src/test/28-accessing-analyzers.ts +++ b/src/test/28-accessing-analyzers.ts @@ -43,16 +43,16 @@ describe("Accessing analyzers", function () { await analyzer.create({ type: "identity" }); await db.waitForPropagation( { pathname: `/_api/analyzer/${analyzer.name}` }, - 65000 + 65000, ); - }) + }), ); }); after(async () => { await Promise.all( analyzerNames.map((name) => - db.analyzer(name.replace(/^[^:]+::/, "")).drop() - ) + db.analyzer(name.replace(/^[^:]+::/, "")).drop(), + ), ); }); it("fetches information about all analyzers", async () => { @@ -71,16 +71,16 @@ describe("Accessing analyzers", function () { await analyzer.create({ type: "identity" }); await db.waitForPropagation( { pathname: `/_api/analyzer/${analyzer.name}` }, - 65000 + 65000, ); - }) + }), ); }); after(async () => { await Promise.all( analyzerNames.map((name) => - db.analyzer(name.replace(/^[^:]+::/, "")).drop() - ) + db.analyzer(name.replace(/^[^:]+::/, "")).drop(), + ), ); }); it("creates Analyzer instances", async () => { diff --git a/src/test/29-manipulating-analyzers.ts b/src/test/29-manipulating-analyzers.ts index 51fd3f437..f5726c145 100644 --- a/src/test/29-manipulating-analyzers.ts +++ b/src/test/29-manipulating-analyzers.ts @@ -42,7 +42,7 @@ describe("Manipulating analyzers", function () { after(async () => { try { await analyzer.drop(); - } catch { } + } catch {} }); it("fetches information about the analyzer", async () => { const data = await analyzer.get(); diff --git a/src/test/29-queue-time.ts b/src/test/29-queue-time.ts index e1cf72dca..06cc6d5f5 100644 --- a/src/test/29-queue-time.ts +++ b/src/test/29-queue-time.ts @@ -19,7 +19,7 @@ describe("Queue time metrics", function () { collection = await db.createCollection(`c_${Date.now()}`); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }); after(async () => { @@ -34,7 +34,7 @@ describe("Queue time metrics", function () { }); it("should trim existing queue times when set to a lower value", async () => { await Promise.all( - range(10).map(() => collection.save({ value: Math.random() })) + range(10).map(() => collection.save({ value: Math.random() })), ); expect(db.queueTime.getValues().length).to.equal(10); db.setResponseQueueTimeSamples(5); @@ -42,23 +42,23 @@ describe("Queue time metrics", function () { }); it("should allow more values when set to a higher value", async () => { await Promise.all( - range(10).map(() => collection.save({ value: Math.random() })) + range(10).map(() => collection.save({ value: Math.random() })), ); expect(db.queueTime.getValues().length).to.equal(10); db.setResponseQueueTimeSamples(20); await Promise.all( - range(10).map(() => collection.save({ value: Math.random() })) + range(10).map(() => collection.save({ value: Math.random() })), ); expect(db.queueTime.getValues().length).to.equal(20); }); it("should allow fewer values when set to a lower value", async () => { await Promise.all( - range(10).map(() => collection.save({ value: Math.random() })) + range(10).map(() => collection.save({ value: Math.random() })), ); expect(db.queueTime.getValues().length).to.equal(10); db.setResponseQueueTimeSamples(5); await Promise.all( - range(10).map(() => collection.save({ value: Math.random() })) + range(10).map(() => collection.save({ value: Math.random() })), ); expect(db.queueTime.getValues().length).to.equal(5); }); @@ -67,7 +67,7 @@ describe("Queue time metrics", function () { it("should return the latest value", async () => { expect(db.queueTime.getLatest()).to.equal(undefined); await Promise.all( - range(10).map(() => collection.save({ value: Math.random() })) + range(10).map(() => collection.save({ value: Math.random() })), ); const values = db.queueTime.getValues(); expect(values.length).to.be.greaterThan(0); @@ -79,7 +79,7 @@ describe("Queue time metrics", function () { const min = Date.now(); expect(db.queueTime.getValues()).to.eql([]); await Promise.all( - range(10).map(() => collection.save({ value: Math.random() })) + range(10).map(() => collection.save({ value: Math.random() })), ); const max = Date.now(); const values = db.queueTime.getValues(); @@ -96,7 +96,7 @@ describe("Queue time metrics", function () { it("should return the arithmetic average of all current values", async () => { expect(db.queueTime.getAvg()).to.equal(0); await Promise.all( - range(10).map(() => collection.save({ value: Math.random() })) + range(10).map(() => collection.save({ value: Math.random() })), ); const values = db.queueTime.getValues(); expect(values.length).to.be.greaterThan(0); diff --git a/src/test/30-concurrent-transactions.ts b/src/test/30-concurrent-transactions.ts index 38d33a801..0ded03b89 100644 --- a/src/test/30-concurrent-transactions.ts +++ b/src/test/30-concurrent-transactions.ts @@ -32,18 +32,18 @@ describe("Transactions", function () { after(async () => { await Promise.all( allTransactions.map((transaction) => - transaction.abort().catch(() => undefined) - ) + transaction.abort().catch(() => undefined), + ), ); try { await system.dropDatabase(name); - } catch { } + } catch {} }); beforeEach(async () => { collection = await db.createCollection(`collection-${Date.now()}`); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }); afterEach(async () => { @@ -58,7 +58,7 @@ describe("Transactions", function () { it("can run concurrent transactions in parallel", async () => { const conn = (db as any)._connection as Connection; const range = Array.from(Array((conn as any)._taskPoolSize).keys()).map( - (i) => i + 1 + (i) => i + 1, ); let failed = 0; await Promise.all( @@ -73,7 +73,7 @@ describe("Transactions", function () { trx.id, "completed begin after", Date.now() - started, - "ms elapsed" + "ms elapsed", ); await trx.step(() => collection.save({ enabled: true })); console.log( @@ -82,7 +82,7 @@ describe("Transactions", function () { trx.id, "completed save after", Date.now() - started, - "ms elapsed" + "ms elapsed", ); await delay(Math.random() * 10); await trx.commit(); @@ -92,7 +92,7 @@ describe("Transactions", function () { trx.id, "completed commit after", Date.now() - started, - "ms elapsed" + "ms elapsed", ); } catch (e: any) { console.error( @@ -102,18 +102,18 @@ describe("Transactions", function () { "failed after", Date.now() - started, "ms elapsed:", - String(e) + String(e), ); failed++; } - }) + }), ); expect(failed).to.equal(0); }); it("respects transactional guarantees", async () => { const conn = (db as any)._connection as Connection; const range = Array.from(Array((conn as any)._taskPoolSize).keys()).map( - (i) => i + 1 + (i) => i + 1, ); const key = "test"; await collection.save({ _key: key, i: 0 }); @@ -134,7 +134,7 @@ describe("Transactions", function () { "adding", value, "=", - doc.i + value + doc.i + value, ); await trx.step(() => collection.update(key, { i: doc.i + value })); console.log(value, "committing"); @@ -144,7 +144,7 @@ describe("Transactions", function () { console.error(value, "failed:", String(e)); failed++; } - }) + }), ); const doc = await collection.document(key); expect(doc.i).to.equal(range.reduce((a, b) => a + b)); diff --git a/src/test/31-conflicts.ts b/src/test/31-conflicts.ts index 711faf46e..6bd73a07f 100644 --- a/src/test/31-conflicts.ts +++ b/src/test/31-conflicts.ts @@ -20,7 +20,7 @@ describe("config.maxRetries", () => { collection = await db.createCollection(collectionName); await db.waitForPropagation( { pathname: `/_api/collection/${collection.name}` }, - 10000 + 10000, ); }); after(async () => { @@ -45,12 +45,12 @@ describe("config.maxRetries", () => { LET doc = DOCUMENT(${collection}, ${docKey}) UPDATE doc WITH { data: doc.data + 1 } IN ${collection} `, - { retryOnConflict: 0 } - ) - ) + { retryOnConflict: 0 }, + ), + ), ); expect( - result.filter(({ status }) => status === "rejected") + result.filter(({ status }) => status === "rejected"), ).not.to.have.lengthOf(0); const { data } = await collection.document(docKey); expect(data).not.to.equal(1_000); @@ -65,9 +65,9 @@ describe("config.maxRetries", () => { LET doc = DOCUMENT(${collection}, ${docKey}) UPDATE doc WITH { data: doc.data + 1 } IN ${collection} `, - { retryOnConflict: 100 } - ) - ) + { retryOnConflict: 100 }, + ), + ), ); const { data } = await collection.document(docKey); expect(data).to.equal(1_000); diff --git a/src/test/_config.ts b/src/test/_config.ts index 71d0aa205..53419dcbb 100644 --- a/src/test/_config.ts +++ b/src/test/_config.ts @@ -2,7 +2,7 @@ import { ConfigOptions, LoadBalancingStrategy } from "../configuration.js"; const ARANGO_URL = process.env.TEST_ARANGODB_URL || "http://127.0.0.1:8529"; const ARANGO_VERSION = Number( - process.env.ARANGO_VERSION || process.env.ARANGOJS_DEVEL_VERSION || 0 + process.env.ARANGO_VERSION || process.env.ARANGOJS_DEVEL_VERSION || 0, ); const ARANGO_RELEASE = process.env.ARANGO_RELEASE || ""; let arangoVersion: number = 39999; @@ -20,13 +20,13 @@ const ARANGO_LOAD_BALANCING_STRATEGY = process.env export const config: ConfigOptions & { arangoVersion: NonNullable; } = ARANGO_URL.includes(",") - ? { + ? { url: ARANGO_URL.split(",").filter((s) => Boolean(s)), arangoVersion, precaptureStackTraces: true, loadBalancingStrategy: ARANGO_LOAD_BALANCING_STRATEGY || "ROUND_ROBIN", } - : { + : { url: ARANGO_URL, arangoVersion, precaptureStackTraces: true, diff --git a/src/transactions.ts b/src/transactions.ts index 45107adcf..1d97463c9 100644 --- a/src/transactions.ts +++ b/src/transactions.ts @@ -24,17 +24,26 @@ export type TransactionCollectionOptions = { * written to during the transaction with no other writes being able to run * in parallel. */ - exclusive?: (string | collections.ArangoCollection)[] | string | collections.ArangoCollection; + exclusive?: + | (string | collections.ArangoCollection)[] + | string + | collections.ArangoCollection; /** * An array of collections or a single collection that will be read from or * written to during the transaction. */ - write?: (string | collections.ArangoCollection)[] | string | collections.ArangoCollection; + write?: + | (string | collections.ArangoCollection)[] + | string + | collections.ArangoCollection; /** * An array of collections or a single collection that will be read from * during the transaction. */ - read?: (string | collections.ArangoCollection)[] | string | collections.ArangoCollection; + read?: + | (string | collections.ArangoCollection)[] + | string + | collections.ArangoCollection; }; /** @@ -45,7 +54,7 @@ export function coerceTransactionCollections( | (TransactionCollectionOptions & { allowImplicit?: boolean }) | (string | collections.ArangoCollection)[] | string - | collections.ArangoCollection + | collections.ArangoCollection, ): CoercedTransactionCollections { if (typeof options === "string") { return { write: [options] }; @@ -196,7 +205,7 @@ export type TransactionInfo = { * @param transaction - A value that might be a transaction. */ export function isArangoTransaction( - transaction: any + transaction: any, ): transaction is Transaction { return Boolean(transaction && transaction.isArangoTransaction); } @@ -282,7 +291,7 @@ export class Transaction { { pathname: `/_api/transaction/${encodeURIComponent(this.id)}`, }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -309,7 +318,7 @@ export class Transaction { pathname: `/_api/transaction/${encodeURIComponent(this.id)}`, allowDirtyRead, }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -336,7 +345,7 @@ export class Transaction { pathname: `/_api/transaction/${encodeURIComponent(this.id)}`, allowDirtyRead, }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } @@ -534,7 +543,7 @@ export class Transaction { const promise = callback(); if (!promise) { throw new Error( - "Transaction callback was not an async function or did not return a promise!" + "Transaction callback was not an async function or did not return a promise!", ); } return Promise.resolve(promise); @@ -543,4 +552,4 @@ export class Transaction { } } } -//#endregion \ No newline at end of file +//#endregion diff --git a/src/users.ts b/src/users.ts index a59968579..1baebea1e 100644 --- a/src/users.ts +++ b/src/users.ts @@ -103,4 +103,4 @@ export type ArangoUser = { */ extra: Record; }; -//#endregion \ No newline at end of file +//#endregion diff --git a/src/views.ts b/src/views.ts index 82d4f62fa..9b7f5fa67 100644 --- a/src/views.ts +++ b/src/views.ts @@ -166,7 +166,10 @@ export type CreateArangoSearchViewOptions = CreateViewOptionsType< * Attribute paths for which values should be stored in the view index * in addition to those used for sorting via `primarySort`. */ - storedValues?: CreateArangoSearchViewStoredValueOptions[] | string[] | string[][]; + storedValues?: + | CreateArangoSearchViewStoredValueOptions[] + | string[] + | string[][]; /** * An array of strings defining sort expressions to optimize. */ @@ -177,32 +180,31 @@ export type CreateArangoSearchViewOptions = CreateViewOptionsType< /** * Options for creating a primary sort in an ArangoSearch View. */ -export type CreateArangoSearchViewPrimarySortOptions = ( +export type CreateArangoSearchViewPrimarySortOptions = | { - /** - * Attribute path for the value of each document to use for - * sorting. - */ - field: string; - /** - * If set to `"asc"`, the primary sorting order will be ascending. - * If set to `"desc"`, the primary sorting order will be descending. - */ - direction: Direction; - } + /** + * Attribute path for the value of each document to use for + * sorting. + */ + field: string; + /** + * If set to `"asc"`, the primary sorting order will be ascending. + * If set to `"desc"`, the primary sorting order will be descending. + */ + direction: Direction; + } | { - /** - * Attribute path for the value of each document to use for - * sorting. - */ - field: string; - /** - * If set to `true`, the primary sorting order will be ascending. - * If set to `false`, the primary sorting order will be descending. - */ - asc: boolean; - } -); + /** + * Attribute path for the value of each document to use for + * sorting. + */ + field: string; + /** + * If set to `true`, the primary sorting order will be ascending. + * If set to `false`, the primary sorting order will be descending. + */ + asc: boolean; + }; /** * Options for creating a stored value in an ArangoSearch View. @@ -594,13 +596,13 @@ export class View { * ``` */ create( - options: CreateViewOptions + options: CreateViewOptions, ): Promise< typeof options extends CreateArangoSearchViewOptions - ? ArangoSearchViewDescription - : Options extends CreateSearchAliasViewOptions - ? SearchAliasViewDescription - : ViewDescription + ? ArangoSearchViewDescription + : Options extends CreateSearchAliasViewOptions + ? SearchAliasViewDescription + : ViewDescription > { return this._db.request({ method: "POST", @@ -634,7 +636,9 @@ export class View { * // view1 and view3 represent the same ArangoDB view! * ``` */ - async rename(newName: string): Promise> { + async rename( + newName: string, + ): Promise> { const result = this._db.renameView(this._name, newName); this._name = newName; return result; @@ -673,13 +677,13 @@ export class View { * ``` */ updateProperties( - properties?: Properties + properties?: Properties, ): Promise< Properties extends UpdateArangoSearchViewPropertiesOptions - ? ArangoSearchViewProperties - : Properties extends UpdateSearchAliasViewPropertiesOptions - ? SearchAliasViewProperties - : ViewProperties + ? ArangoSearchViewProperties + : Properties extends UpdateSearchAliasViewPropertiesOptions + ? SearchAliasViewProperties + : ViewProperties > { return this._db.request({ method: "PATCH", @@ -704,13 +708,13 @@ export class View { * ``` */ replaceProperties( - properties?: Properties + properties?: Properties, ): Promise< Properties extends ArangoSearchViewPropertiesOptions - ? ArangoSearchViewProperties - : Properties extends SearchAliasViewPropertiesOptions - ? SearchAliasViewProperties - : ViewProperties + ? ArangoSearchViewProperties + : Properties extends SearchAliasViewPropertiesOptions + ? SearchAliasViewProperties + : ViewProperties > { return this._db.request({ method: "PUT", @@ -737,8 +741,8 @@ export class View { method: "DELETE", pathname: `/_api/view/${encodeURIComponent(this._name)}`, }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } } -//#endregion \ No newline at end of file +//#endregion From 860985b56d4c5efc850b98d68379a24e8fc7b240 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Tue, 10 Dec 2024 11:51:37 +0100 Subject: [PATCH 16/21] Make undici an optional peer dep --- CHANGELOG.md | 16 +++++++++++++++- README.md | 5 +++-- package.json | 7 ++++++- 3 files changed, 24 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 25bb8897c..ede451448 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,16 @@ This driver uses semantic versioning: - A change in the major version (e.g. 1.Y.Z -> 2.0.0) indicates _breaking_ changes that require changes in your code to upgrade. +## [Unreleased] + +### Changed + +- Changed `undici` from an optional dependency to an optional peer dependency + + The `undici` module is only required when using Unix domain sockets or + overriding the fetch agent in Node.js and does not need to be installed + otherwise. + ## [10.0.0-alpha.1] - 2024-12-09 This is a major release and breaks backwards compatibility. @@ -146,7 +156,7 @@ for upgrading your code to arangojs v10. `arangojs/documents` module The following types were moved: `DocumentOperationFailure`, - `DocumentOperationMetadata`, `DocumentExistsOptions`, + `DocumentOperationMetadata`, `DocumentExistsOptions`, `CollectionReadOptions`, `CollectionBatchReadOptions`, `CollectionInsertOptions`, `CollectionReplaceOptions`, `CollectionUpdateOptions`, `CollectionRemoveOptions`, @@ -243,9 +253,11 @@ for upgrading your code to arangojs v10. - `CreateDatabaseUser` -> `CreateDatabaseUserOptions` - Index operations: + - `IndexListOptions` -> `ListIndexesOptions` - Collection document operations: + - `DocumentExistsOptions` -> `DocumentExistsOptions` - `CollectionReadOptions` -> `ReadDocumentOptions` - `CollectionBatchReadOptions` -> `BulkReadDocumentsOptions` @@ -259,6 +271,7 @@ for upgrading your code to arangojs v10. - `CollectionEdgesResult` -> `DocumentEdgesResult` - Graph collection document operation: + - `GraphCollectionReadOptions` -> `ReadGraphDocumentOptions` - `GraphCollectionInsertOptions` -> `CreateGraphDocumentOptions` - `GraphCollectionReplaceOptions` -> `ReplaceGraphDocumentOptions` @@ -2355,6 +2368,7 @@ For a detailed list of changes between pre-release versions of v7 see the Graph methods now only return the relevant part of the response body. +[unreleased]: https://github.com/arangodb/arangojs/compare/v10.0.0-alpha.1...v10 [10.0.0-alpha.1]: https://github.com/arangodb/arangojs/compare/v10.0.0-alpha.0...v10.0.0-alpha.1 [10.0.0-alpha.0]: https://github.com/arangodb/arangojs/compare/v9.2.0...v10.0.0-alpha.0 [9.2.0]: https://github.com/arangodb/arangojs/compare/v9.1.0...v9.2.0 diff --git a/README.md b/README.md index edf1c0392..2f72ea4fc 100644 --- a/README.md +++ b/README.md @@ -272,7 +272,7 @@ available. ### Unix domain sockets If you want to use Unix domain sockets, you need to install the `undici` module, -which is an optional dependency of arangojs. +which is an optional peer dependency of arangojs. ```sh npm install --save undici @@ -285,7 +285,8 @@ message indicating that the `undici` module is unavailable. ### Node.js with self-signed HTTPS certificates If you need to support self-signed HTTPS certificates in Node.js, you will need -to install the `undici` module, which is an optional dependency of arangojs. +to install the `undici` module, which is an optional peer dependency of +arangojs. ```sh npm install --save undici diff --git a/package.json b/package.json index 74cce3919..dd2f0b357 100644 --- a/package.json +++ b/package.json @@ -103,7 +103,12 @@ "typedoc": "^0.25.12", "typescript": "^5.4.2" }, - "optionalDependencies": { + "peerDependencies": { "undici": ">=5.21.0" + }, + "peerDependenciesMeta": { + "undici": { + "optional": true + } } } From 5868177131cfbb8b21204a29efc53e5413a2ebfe Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Tue, 10 Dec 2024 17:27:42 +0100 Subject: [PATCH 17/21] Update CHANGELOG.md --- CHANGELOG.md | 291 +++++++++++++++++++------------------ src/databases.ts | 340 ++++++++++++++++++++++---------------------- src/transactions.ts | 18 +-- 3 files changed, 332 insertions(+), 317 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ede451448..8e71842a0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,122 +16,138 @@ This driver uses semantic versioning: ## [Unreleased] -### Changed +This is a major release and breaks backwards compatibility. -- Changed `undici` from an optional dependency to an optional peer dependency +See [the migration guide](./MIGRATING.md#v9-to-v10) for detailed instructions +for upgrading your code to arangojs v10. - The `undici` module is only required when using Unix domain sockets or - overriding the fetch agent in Node.js and does not need to be installed - otherwise. +### Removed -## [10.0.0-alpha.1] - 2024-12-09 +- Removed unused `CreateUserOptions` type -This is a major release and breaks backwards compatibility. + The actual type used by the `db.createUser` method is still `UserOptions`. -See [the migration guide](./MIGRATING.md#v9-to-v10) for detailed instructions -for upgrading your code to arangojs v10. +- Removed unused `IndexDetails` type + + This type was intended to be returned by `collection.indexes` when the + `withStats` option is set to `true` but the `figures` property is already + included in the current return type. ### Changed -- Renamed `CollectionDropOptions` type to `DropCollectionOptions` +- Closing a connection now closes all open requests -- Renamed `CollectionTruncateOptions` type to `TruncateCollectionOptions` + Previously in certain situations only the most recent request would be + closed per server. Note that this still merely aborts the requests but + does not guarantee the underlying connections are closed as these are + handled by Node.js or the browser natively. need to be installed + otherwise. -- Renamed `Config` type to `ConfigOptions` +- Moved fetch-specific `config` options from into `config.fetchOptions` -- Renamed `path` option to `pathname` in `RequestOptions` type + The following options were moved: `credentials`, `headers` and `keepalive`. - This affects the `db.waitForPropagation` and `route.request` methods. +#### Error handling -- Removed `basePath` option from `RequestOptions` type +- Errors encountered before a request completes are now wrapped in a + `NetworkError` or a subclass thereof - This affects the `db.waitForPropagation` and `route.request` methods. + This should help making it easier to diagnose network issues and distinguish + the relevant error conditions. -- Renamed `route.path` property to `route.pathname` + The originating error can still be accessed using the `cause` property of the + `NetworkError` error. -- Changed error type constructor signatures +- `HttpError` now extends the `NetworkError` class - The `request` property is now always positional and the `options` property - is always optional. + This allows treating all non-`ArangoError` errors as one category of errors, + even when there is no server response available. -- Moved configuration related types to new `config` module +- `db.waitForPropagation` now throws a `PropagationTimeoutError` error when + invoked with a `timeout` option and the timeout duration is exceeded - The following types were moved: `Config`, `LoadBalancingStrategy`, - `BasicAuthCredentials` and `BearerAuthCredentials`. + The method would previously throw the most recent error encountered while + waiting for replication. The originating error can still be accessed using + the `cause` property of the `PropagationTimeoutError` error. -- Moved `ArangoErrorResponse` type to `connection` module +- `db.waitForPropagation` now respects the `timeout` option more strictly - The type is now also no longer marked as internal. + Previously the method would only time out if the timeout duration was + exceeded after the most recent request failed. Now the timeout is + recalculated and passed on to each request, preventing it from exceeding + the specified duration. -- Moved configuration related types to new `configuration` module + If the propagation timed out due to an underlying request exceeding the + timeout duration, the `cause` property of the `PropagationTimeoutError` + error will be a `ResponseTimeoutError` error. - The following types were moved: `ConfigOptions`, `LoadBalancingStrategy`, - `BasicAuthCredentials` and `BearerAuthCredentials`. +- `config.beforeRequest` and `config.afterResponse` callbacks can now return + promises -- Moved internal utility functions to new `lib/util` module + If the callback returns a promise, it will be awaited before the request + and response cycle proceeds. If either callback throws an error or returns + a promise that is rejected, that error will be thrown instead. - These methods are all still marked as internal and should not be used - directly. +- `config.afterResponse` callback signature changed -- Closing a connection now closes all open requests + The callback signature previously used the internal `ArangojsResponse` type. + The new signature uses the `Response` type of the Fetch API with an + additional `request` property to more accurately represent the actual value + it receives as the `parsedBody` property will never be present. - Previously in certain situations only the most recent request would be - closed per server. Note that this still merely aborts the requests but - does not guarantee the underlying connections are closed as these are - handled by Node.js or the browser natively. +- `response` property on `ArangoError` is now optional -### Added + This property should always be present but this allows using the error in + situations where a response might not be available. -- Restored support for Unix domain sockets +#### General type changes - Using Unix domain sockets requires the `undici` library to be installed. +- Changed `GraphVertexCollection` and `GraphEdgeCollection` generic types to + take separate `EntryResultType` and `EntryInputType` type parameters -- Restored support for `config.agentOptions` +- Changed `db.collection`, `db.createCollection` and `db.createEdgeCollection` + methods to take separate `EntryResultType` and `EntryInputType` type + parameters - The `config.agentOptions` option can now be used to create a custom `undici` - agent if the `undici` library is installed. + These type parameters are used to narrow the the returned collection type. -## [10.0.0-alpha.0] - 2024-11-28 +- Changed `db.removeUser` method return type to `Promise` -This is a major release and breaks backwards compatibility. + The previous return type served no purpose. -See [the migration guide](./MIGRATING.md#v9-to-v10) for detailed instructions -for upgrading your code to arangojs v10. +- Changed `QueueTimeMetrics` type to an interface -### Removed +- Changed `CursorExtras` and `CursorStats` interfaces to types -- Removed unused `CreateUserOptions` type +#### Low-level request/route changes - The actual type used by the `db.createUser` method is still `UserOptions`. +- Renamed `path` option to `pathname` in `RequestOptions` type -- Removed unused `IndexDetails` type + This affects the `db.waitForPropagation` and `route.request` methods. - This type was intended to be returned by `collection.indexes` when the - `withStats` option is set to `true` but the `figures` property is already - included in the current return type. +- Removed `basePath` option from `RequestOptions` type -### Changed + This affects the `db.waitForPropagation` and `route.request` methods. -- Changed `QueueTimeMetrics` type to an interface +- Renamed `route.path` property to `route.pathname` -- Changed `CursorExtras` and `CursorStats` interfaces to types +#### Renamed methods -- Changed `GraphVertexCollection` and `GraphEdgeCollection` generic types to - take separate `EntryResultType` and `EntryInputType` type parameters +- Renamed various methods for consistency: -- Changed `db.collection`, `db.createCollection` and `db.createEdgeCollection` - methods to take separate `EntryResultType` and `EntryInputType` type - parameters + Methods that return an array now follow the `listNouns` pattern, methods that + return a "list of nouns" wrapped in an object have been renamed to follow the + `getNouns` pattern to avoid confusion: - These type parameters are used to narrow the the returned collection type. + - `db.listServiceScripts` -> `db.getServiceScripts` + - `db.listHotBackups` -> `db.getHotBackups` + - `db.listFunctions` -> `db.listUserFunctions` + - `db.getLogMessages` -> `db.listLogMessages` + +- Renamed AQL user function management methods: -- Renamed `db.listServiceScripts` method to `db.getServiceScripts` -- Renamed `db.listHotBackups` method to `db.getHotBackups` -- Renamed `db.getLogMessages` method to `db.listLogMessages` -- Renamed `db.listFunctions` method to `db.listUserFunctions` -- Renamed `db.createFunction` method to `db.createUserFunction` -- Renamed `db.dropFunction` method to `db.dropUserFunction` -- Changed `db.removeUser` method to return `void` + - `db.createFunction` -> `db.createUserFunction` + - `db.dropFunction` -> `db.dropUserFunction` #### Module renaming @@ -150,6 +166,11 @@ for upgrading your code to arangojs v10. - `arangojs/transaction` -> `arangojs/transactions` - `arangojs/view` -> `arangojs/views` +- Moved internal utility functions to new `arangojs/lib/util` module + + These methods are all still marked as internal and should not be used + directly. + #### Moved types - Moved document related types from `arangojs/collection` module to @@ -218,6 +239,20 @@ for upgrading your code to arangojs v10. The following types were moved: `QueueTimeMetrics` and `VersionInfo`. +- Moved configuration related types to new `arangojs/config` module + + The following types were moved: `Config`, `LoadBalancingStrategy`, + `BasicAuthCredentials` and `BearerAuthCredentials`. + +- Moved `ArangoErrorResponse` type to `arangojs/connection` module + + The type is now also no longer marked as internal. + +- Moved configuration related types to new `arangojs/configuration` module + + The following types were moved: `ConfigOptions`, `LoadBalancingStrategy`, + `BasicAuthCredentials` and `BearerAuthCredentials`. + #### Renamed types - Renamed `Index` types to `IndexDescription` for consistency @@ -241,20 +276,39 @@ for upgrading your code to arangojs v10. - Renamed various types for consistency: + Types representing an instance of a specific entity type in ArangoDB like a + collection, graph or query now follow the `NounDescription` naming pattern: + - `AqlUserFunction` -> `UserFunctionDescription` - `CollectionMetadata` -> `CollectionDescription` - `DatabaseInfo` -> `DatabaseDescription` - `GraphInfo` -> `GraphDescription` - `ServiceInfo` -> `ServiceDescription` - `QueryInfo` -> `QueryDescription` + - `TransactionDetails` -> `TransactionDescription` + + Note that the `TransactionDescription` type used by `db.listTransactions` + is slightly different from the `TransactionInfo` type used by methods of + `Transaction` objects due to implementation details of ArangoDB. + + Types representing general information rather than an instance of something + now generally follow the `NounInfo` naming pattern, whereas types + representing the result of an operation generally follow the `NounResult` + or `VerbNounResult` naming pattern: + - `QueryTracking` -> `QueryTrackingInfo` - - `TransactionDetails` -> `TransactionInfo` - - `TransactionCollections` -> `TransactionCollectionOptions` - - `CreateDatabaseUser` -> `CreateDatabaseUserOptions` + - `CollectionImportResult` -> `ImportDocumentsResult` + - `CollectionEdgesResult` -> `DocumentEdgesResult` - - Index operations: + Types for options passed to methods now generally follow the `NounOptions`, + `VerbNounOptions` or `VerbNounAttributeOptions` naming patterns: - - `IndexListOptions` -> `ListIndexesOptions` + - `Config` -> `ConfigOptions` + - `TransactionCollections` -> `TransactionCollectionOptions` + - `CreateDatabaseUser` -> `CreateDatabaseUserOptions` + - `CollectionDropOptions` -> `DropCollectionOptions` + - `CollectionTruncateOptions` -> `TruncateCollectionOptions` + - `IndexListOptions` -> `ListIndexesOptions` - Collection document operations: @@ -267,8 +321,6 @@ for upgrading your code to arangojs v10. - `CollectionRemoveOptions` -> `RemoveDocumentOptions` - `CollectionImportOptions` -> `ImportDocumentsOptions` - `CollectionEdgesOptions` -> `DocumentEdgesOptions` - - `CollectionImportResult` -> `ImportDocumentsResult` - - `CollectionEdgesResult` -> `DocumentEdgesResult` - Graph collection document operation: @@ -279,6 +331,7 @@ for upgrading your code to arangojs v10. - `ViewPatchPropertiesOptions` -> `UpdateViewPropertiesOptions` - View operations: + - `ArangoSearchViewPatchPropertiesOptions` -> `UpdateArangoSearchViewPropertiesOptions` - `SearchAliasViewPatchPropertiesOptions` -> `UpdateSearchAliasViewPropertiesOptions` - `SearchAliasViewPatchIndexOptions` -> `UpdateSearchAliasViewIndexOptions` @@ -297,67 +350,36 @@ for upgrading your code to arangojs v10. - `ObjectWithId` (in `documents` module) -> `ObjectWithDocumentId` - `ObjectWithKey` (in `documents` module) -> `ObjectWithDocumentKey` -#### Error handling - -- Errors encountered before a request completes are now wrapped in a - `NetworkError` or a subclass thereof - - This should help making it easier to diagnose network issues and distinguish - the relevant error conditions. - - The originating error can still be accessed using the `cause` property of the - `NetworkError` error. - -- `HttpError` now extends the `NetworkError` class - - This allows treating all non-`ArangoError` errors as one category of errors, - even when there is no server response available. - -- `db.waitForPropagation` now throws a `PropagationTimeoutError` error when - invoked with a `timeout` option and the timeout duration is exceeded - - The method would previously throw the most recent error encountered while - waiting for replication. The originating error can still be accessed using - the `cause` property of the `PropagationTimeoutError` error. - -- `db.waitForPropagation` now respects the `timeout` option more strictly - - Previously the method would only time out if the timeout duration was - exceeded after the most recent request failed. Now the timeout is - recalculated and passed on to each request, preventing it from exceeding - the specified duration. - - If the propagation timed out due to an underlying request exceeding the - timeout duration, the `cause` property of the `PropagationTimeoutError` - error will be a `ResponseTimeoutError` error. - -- `config.beforeRequest` and `config.afterResponse` callbacks can now return - promises +### Added - If the callback returns a promise, it will be awaited before the request - and response cycle proceeds. If either callback throws an error or returns - a promise that is rejected, that error will be thrown instead. +- Restored support for Unix domain sockets -- `config.afterResponse` callback signature changed + Using Unix domain sockets requires the `undici` library to be installed. - The callback signature previously used the internal `ArangojsResponse` type. - The new signature uses the `Response` type of the Fetch API with an - additional `request` property to more accurately represent the actual value - it receives as the `parsedBody` property will never be present. +- Restored support for `config.agentOptions` -- `response` property on `ArangoError` is now optional + The `config.agentOptions` option can now be used to create a custom `undici` + agent if the `undici` library is installed. - This property should always be present but this allows using the error in - situations where a response might not be available. +- Added `config.fetchOptions` option -### Added + This option can now be used to specify default options for the `fetch` + function used by arangojs like `headers`, `credentials`, `keepalive` and + `redirect`. - Added `BatchCursor#itemsView` property and `BatchCursorItemsView` interface This property provides a low-level interface for consuming the items of the cursor and is used by the regular item-wise `Cursor` class internally. -- Added `onError` option to `Config` (DE-955) +- Added `ProcessedResponse` type + + This type replaces the previously internal `ArangojsResponse` type and + extends the native `Response` type with additional properties. + +#### Error handling + +- Added `config.onError` option (DE-955) This option can be used to specify a callback function that will be invoked whenever a request results in an error. Unlike `afterResponse`, this callback @@ -368,6 +390,11 @@ for upgrading your code to arangojs v10. If the `onError` callback throws an error or returns a promise that is rejected, that error will be thrown instead. +- Added optional `ArangoError#request` property + + This property is always present if the error has a `response` property. In + normal use this should always be the case. + - Added `NetworkError` class This is the common base class for all errors (including `HttpError`) that @@ -408,16 +435,6 @@ for upgrading your code to arangojs v10. `cause` property. This error is only thrown when `db.waitForPropagation` is invoked with a `timeout` option and the timeout duration is exceeded. -- Added `ProcessedResponse` type - - This type replaces the previously internal `ArangojsResponse` type and - extends the native `Response` type with additional properties. - -- Added optional `ArangoError#request` property - - This property is always present if the error has a `response` property. In - normal use this should always be the case. - ## [9.2.0] - 2024-11-27 ### Added @@ -2368,9 +2385,7 @@ For a detailed list of changes between pre-release versions of v7 see the Graph methods now only return the relevant part of the response body. -[unreleased]: https://github.com/arangodb/arangojs/compare/v10.0.0-alpha.1...v10 -[10.0.0-alpha.1]: https://github.com/arangodb/arangojs/compare/v10.0.0-alpha.0...v10.0.0-alpha.1 -[10.0.0-alpha.0]: https://github.com/arangodb/arangojs/compare/v9.2.0...v10.0.0-alpha.0 +[unreleased]: https://github.com/arangodb/arangojs/compare/v9.2.0...v10 [9.2.0]: https://github.com/arangodb/arangojs/compare/v9.1.0...v9.2.0 [9.1.0]: https://github.com/arangodb/arangojs/compare/v9.0.0...v9.1.0 [9.0.0]: https://github.com/arangodb/arangojs/compare/v8.8.1...v9.0.0 diff --git a/src/databases.ts b/src/databases.ts index 930b262c0..d1022b0c3 100644 --- a/src/databases.ts +++ b/src/databases.ts @@ -22,15 +22,15 @@ import * as errors from "./errors.js"; import * as graphs from "./graphs.js"; import * as hotBackups from "./hot-backups.js"; import * as jobs from "./jobs.js"; +import { DATABASE_NOT_FOUND } from "./lib/codes.js"; +import * as util from "./lib/util.js"; import * as logs from "./logs.js"; import * as queries from "./queries.js"; import * as routes from "./routes.js"; import * as services from "./services.js"; import * as transactions from "./transactions.js"; import * as users from "./users.js"; -import * as util from "./lib/util.js"; import * as views from "./views.js"; -import { DATABASE_NOT_FOUND } from "./lib/codes.js"; //#region Database operation options /** @@ -144,7 +144,7 @@ export class Database { protected _graphs = new Map(); protected _views = new Map(); protected _trapRequest?: ( - trapped: TrappedError | TrappedRequest, + trapped: TrappedError | TrappedRequest ) => void; /** @@ -189,7 +189,7 @@ export class Database { | string[] | configuration.ConfigOptions | Database = {}, - name?: string, + name?: string ) { if (isArangoDatabase(configOrDatabase)) { const connection = configOrDatabase._connection; @@ -250,7 +250,7 @@ export class Database { */ route( path?: string, - headers?: Headers | Record, + headers?: Headers | Record ): routes.Route { return new routes.Route(this, path, headers); } @@ -268,7 +268,7 @@ export class Database { */ async request( options: connection.RequestOptions, - transform?: (res: connection.ProcessedResponse) => ReturnType, + transform?: (res: connection.ProcessedResponse) => ReturnType ): Promise; /** * @internal @@ -282,14 +282,14 @@ export class Database { */ async request( options: connection.RequestOptions, - transform: false, + transform: false ): Promise>; async request( { pathname, ...opts }: connection.RequestOptions, transform: | false | ((res: connection.ProcessedResponse) => ReturnType) = (res) => - res.parsedBody as ReturnType, + res.parsedBody as ReturnType ): Promise { pathname = util.joinPath("_db", encodeURIComponent(this._name), pathname); if (this._trapRequest) { @@ -323,7 +323,7 @@ export class Database { } return this._connection.request( { pathname, ...opts }, - transform || undefined, + transform || undefined ); } @@ -357,7 +357,7 @@ export class Database { const urls: string[] = await this.request( { pathname: "/_api/cluster/endpoints" }, (res) => - res.parsedBody.endpoints.map((endpoint: any) => endpoint.endpoint), + res.parsedBody.endpoints.map((endpoint: any) => endpoint.endpoint) ); if (urls.length > 0) { if (overwrite) this._connection.setHostList(urls); @@ -421,11 +421,11 @@ export class Database { */ async waitForPropagation( request: connection.RequestOptions, - timeout?: number, + timeout?: number ): Promise; async waitForPropagation( { pathname, ...request }: connection.RequestOptions, - timeout?: number, + timeout?: number ): Promise { await this._connection.waitForPropagation( { @@ -433,10 +433,10 @@ export class Database { pathname: util.joinPath( "_db", encodeURIComponent(this._name), - pathname, + pathname ), }, - timeout, + timeout ); } @@ -524,7 +524,7 @@ export class Database { (res) => { this.useBearerAuth(res.parsedBody.jwt); return res.parsedBody.jwt; - }, + } ); } @@ -552,7 +552,7 @@ export class Database { if (!res.parsedBody.jwt) return null; this.useBearerAuth(res.parsedBody.jwt); return res.parsedBody.jwt; - }, + } ); } //#endregion @@ -610,7 +610,7 @@ export class Database { method: "GET", pathname: "/_admin/time", }, - (res) => res.parsedBody.time * 1000, + (res) => res.parsedBody.time * 1000 ); } @@ -648,7 +648,7 @@ export class Database { * ``` */ async availability( - graceful = false, + graceful = false ): Promise { try { return this.request( @@ -656,7 +656,7 @@ export class Database { method: "GET", pathname: "/_admin/server/availability", }, - (res) => res.parsedBody.mode, + (res) => res.parsedBody.mode ); } catch (e) { if (graceful) return false; @@ -693,7 +693,7 @@ export class Database { method: "DELETE", pathname: "/_admin/shutdown", }, - () => undefined, + () => undefined ); } //#endregion @@ -711,7 +711,7 @@ export class Database { getClusterImbalance(): Promise { return this.request( { pathname: "/_admin/cluster/rebalance" }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -731,7 +731,7 @@ export class Database { * ``` */ computeClusterRebalance( - options: cluster.ClusterRebalanceOptions, + options: cluster.ClusterRebalanceOptions ): Promise { return this.request( { @@ -742,7 +742,7 @@ export class Database { ...options, }, }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -762,7 +762,7 @@ export class Database { * ``` */ executeClusterRebalance( - moves: cluster.ClusterRebalanceMove[], + moves: cluster.ClusterRebalanceMove[] ): Promise { return this.request({ method: "POST", @@ -789,7 +789,7 @@ export class Database { * ``` */ rebalanceCluster( - opts: cluster.ClusterRebalanceOptions, + opts: cluster.ClusterRebalanceOptions ): Promise { return this.request({ method: "PUT", @@ -834,7 +834,7 @@ export class Database { get(): Promise { return this.request( { pathname: "/_api/database/current" }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -878,7 +878,7 @@ export class Database { */ createDatabase( databaseName: string, - options?: CreateDatabaseOptions, + options?: CreateDatabaseOptions ): Promise; /** * Creates a new database with the given `databaseName` with the given @@ -896,13 +896,13 @@ export class Database { */ createDatabase( databaseName: string, - users: users.CreateDatabaseUserOptions[], + users: users.CreateDatabaseUserOptions[] ): Promise; createDatabase( databaseName: string, usersOrOptions: | users.CreateDatabaseUserOptions[] - | CreateDatabaseOptions = {}, + | CreateDatabaseOptions = {} ): Promise { const { users, ...options } = Array.isArray(usersOrOptions) ? { users: usersOrOptions } @@ -913,7 +913,7 @@ export class Database { pathname: "/_api/database", body: { name: databaseName, users, options }, }, - () => this.database(databaseName), + () => this.database(databaseName) ); } @@ -933,7 +933,7 @@ export class Database { listDatabases(): Promise { return this.request( { pathname: "/_api/database" }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -954,7 +954,7 @@ export class Database { listUserDatabases(): Promise { return this.request( { pathname: "/_api/database/user" }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -975,8 +975,8 @@ export class Database { databases(): Promise { return this.request({ pathname: "/_api/database" }, (res) => (res.parsedBody.result as string[]).map((databaseName) => - this.database(databaseName), - ), + this.database(databaseName) + ) ); } @@ -997,8 +997,8 @@ export class Database { userDatabases(): Promise { return this.request({ pathname: "/_api/database/user" }, (res) => (res.parsedBody.result as string[]).map((databaseName) => - this.database(databaseName), - ), + this.database(databaseName) + ) ); } @@ -1020,7 +1020,7 @@ export class Database { method: "DELETE", pathname: `/_api/database/${encodeURIComponent(databaseName)}`, }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } //#endregion @@ -1072,14 +1072,14 @@ export class Database { EntryResultType extends Record = any, EntryInputType extends Record = EntryResultType, >( - collectionName: string, + collectionName: string ): collections.DocumentCollection & collections.EdgeCollection { collectionName = collectionName; if (!this._collections.has(collectionName)) { this._collections.set( collectionName, - new collections.Collection(this, collectionName), + new collections.Collection(this, collectionName) ); } return this._collections.get(collectionName)!; @@ -1118,7 +1118,7 @@ export class Database { collectionName: string, options?: collections.CreateCollectionOptions & { type?: collections.CollectionType.DOCUMENT_COLLECTION; - }, + } ): Promise>; /** * Creates a new edge collection with the given `collectionName` and @@ -1159,7 +1159,7 @@ export class Database { collectionName: string, options: collections.CreateCollectionOptions & { type: collections.CollectionType.EDGE_COLLECTION; - }, + } ): Promise>; async createCollection< EntryResultType extends Record = any, @@ -1168,7 +1168,7 @@ export class Database { collectionName: string, options?: collections.CreateCollectionOptions & { type?: collections.CollectionType; - }, + } ): Promise< collections.DocumentCollection & collections.EdgeCollection @@ -1214,7 +1214,7 @@ export class Database { EntryInputType extends Record = EntryResultType, >( collectionName: string, - options?: collections.CreateCollectionOptions, + options?: collections.CreateCollectionOptions ): Promise> { return this.createCollection(collectionName, { ...options, @@ -1236,7 +1236,7 @@ export class Database { */ async renameCollection( collectionName: string, - newName: string, + newName: string ): Promise> { const result = await this.request({ method: "PUT", @@ -1272,14 +1272,14 @@ export class Database { * ``` */ listCollections( - excludeSystem: boolean = true, + excludeSystem: boolean = true ): Promise { return this.request( { pathname: "/_api/collection", search: { excludeSystem }, }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -1312,7 +1312,7 @@ export class Database { * ``` */ async collections( - excludeSystem: boolean = true, + excludeSystem: boolean = true ): Promise< Array > { @@ -1352,7 +1352,7 @@ export class Database { async createGraph( graphName: string, edgeDefinitions: graphs.EdgeDefinitionOptions[], - options?: graphs.CreateGraphOptions, + options?: graphs.CreateGraphOptions ): Promise { const graph = this.graph(graphName); await graph.create(edgeDefinitions, options); @@ -1375,7 +1375,7 @@ export class Database { listGraphs(): Promise { return this.request( { pathname: "/_api/gharial" }, - (res) => res.parsedBody.graphs, + (res) => res.parsedBody.graphs ); } @@ -1433,7 +1433,7 @@ export class Database { */ async createView( viewName: string, - options: views.CreateViewOptions, + options: views.CreateViewOptions ): Promise { const view = this.view(viewName); await view.create(options); @@ -1454,7 +1454,7 @@ export class Database { */ async renameView( viewName: string, - newName: string, + newName: string ): Promise> { const result = await this.request({ method: "PUT", @@ -1482,7 +1482,7 @@ export class Database { listViews(): Promise { return this.request( { pathname: "/_api/view" }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -1522,7 +1522,7 @@ export class Database { if (!this._analyzers.has(analyzerName)) { this._analyzers.set( analyzerName, - new analyzers.Analyzer(this, analyzerName), + new analyzers.Analyzer(this, analyzerName) ); } return this._analyzers.get(analyzerName)!; @@ -1544,7 +1544,7 @@ export class Database { */ async createAnalyzer( analyzerName: string, - options: analyzers.CreateAnalyzerOptions, + options: analyzers.CreateAnalyzerOptions ): Promise { const analyzer = this.analyzer(analyzerName); await analyzer.create(options); @@ -1567,7 +1567,7 @@ export class Database { listAnalyzers(): Promise { return this.request( { pathname: "/_api/analyzer" }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -1607,7 +1607,7 @@ export class Database { { pathname: "/_api/user", }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -1624,7 +1624,7 @@ export class Database { * ``` */ getUser( - username: string, + username: string ): Promise> { return this.request({ pathname: `/_api/user/${encodeURIComponent(username)}`, @@ -1646,7 +1646,7 @@ export class Database { */ createUser( username: string, - passwd: string, + passwd: string ): Promise>; /** * Creates a new ArangoDB user with the given options. @@ -1663,11 +1663,11 @@ export class Database { */ createUser( username: string, - options: users.UserOptions, + options: users.UserOptions ): Promise>; createUser( username: string, - options: string | users.UserOptions, + options: string | users.UserOptions ): Promise> { if (typeof options === "string") { options = { passwd: options }; @@ -1678,7 +1678,7 @@ export class Database { pathname: "/_api/user", body: { user: username, ...options }, }, - (res) => res.parsedBody, + (res) => res.parsedBody ); } @@ -1697,7 +1697,7 @@ export class Database { */ updateUser( username: string, - passwd: string, + passwd: string ): Promise>; /** * Updates the ArangoDB user with the new options. @@ -1714,11 +1714,11 @@ export class Database { */ updateUser( username: string, - options: Partial, + options: Partial ): Promise>; updateUser( username: string, - options: string | Partial, + options: string | Partial ): Promise> { if (typeof options === "string") { options = { passwd: options }; @@ -1729,7 +1729,7 @@ export class Database { pathname: `/_api/user/${encodeURIComponent(username)}`, body: options, }, - (res) => res.parsedBody, + (res) => res.parsedBody ); } @@ -1748,7 +1748,7 @@ export class Database { */ replaceUser( username: string, - options: users.UserOptions, + options: users.UserOptions ): Promise> { if (typeof options === "string") { options = { passwd: options }; @@ -1759,7 +1759,7 @@ export class Database { pathname: `/_api/user/${encodeURIComponent(username)}`, body: options, }, - (res) => res.parsedBody, + (res) => res.parsedBody ); } @@ -1781,7 +1781,7 @@ export class Database { method: "DELETE", pathname: `/_api/user/${encodeURIComponent(username)}`, }, - () => undefined, + () => undefined ); } @@ -1856,7 +1856,7 @@ export class Database { */ getUserAccessLevel( username: string, - { database, collection }: users.UserAccessLevelOptions, + { database, collection }: users.UserAccessLevelOptions ): Promise { const databaseName = isArangoDatabase(database) ? database.name @@ -1868,16 +1868,16 @@ export class Database { ? `/${encodeURIComponent( collections.isArangoCollection(collection) ? collection.name - : collection, + : collection )}` : ""; return this.request( { pathname: `/_api/user/${encodeURIComponent( - username, + username )}/database/${encodeURIComponent(databaseName)}${suffix}`, }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -1959,7 +1959,7 @@ export class Database { database, collection, grant, - }: users.UserAccessLevelOptions & { grant: users.AccessLevel }, + }: users.UserAccessLevelOptions & { grant: users.AccessLevel } ): Promise>> { const databaseName = isArangoDatabase(database) ? database.name @@ -1971,18 +1971,18 @@ export class Database { ? `/${encodeURIComponent( collections.isArangoCollection(collection) ? collection.name - : collection, + : collection )}` : ""; return this.request( { method: "PUT", pathname: `/_api/user/${encodeURIComponent( - username, + username )}/database/${encodeURIComponent(databaseName)}${suffix}`, body: { grant }, }, - (res) => res.parsedBody, + (res) => res.parsedBody ); } @@ -2051,7 +2051,7 @@ export class Database { */ clearUserAccessLevel( username: string, - { database, collection }: users.UserAccessLevelOptions, + { database, collection }: users.UserAccessLevelOptions ): Promise>> { const databaseName = isArangoDatabase(database) ? database.name @@ -2063,17 +2063,17 @@ export class Database { ? `/${encodeURIComponent( collections.isArangoCollection(collection) ? collection.name - : collection, + : collection )}` : ""; return this.request( { method: "DELETE", pathname: `/_api/user/${encodeURIComponent( - username, + username )}/database/${encodeURIComponent(databaseName)}${suffix}`, }, - (res) => res.parsedBody, + (res) => res.parsedBody ); } @@ -2095,7 +2095,7 @@ export class Database { */ getUserDatabases( username: string, - full?: false, + full?: false ): Promise>; /** * Fetches an object mapping names of databases to the access level of the @@ -2119,7 +2119,7 @@ export class Database { */ getUserDatabases( username: string, - full: true, + full: true ): Promise< Record< string, @@ -2135,7 +2135,7 @@ export class Database { pathname: `/_api/user/${encodeURIComponent(username)}/database`, search: { full }, }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } //#endregion @@ -2195,7 +2195,7 @@ export class Database { allowImplicit?: boolean; }, action: string, - options?: transactions.TransactionOptions & { params?: any }, + options?: transactions.TransactionOptions & { params?: any } ): Promise; /** * Performs a server-side transaction and returns its return value. @@ -2244,7 +2244,7 @@ export class Database { executeTransaction( collections: (string | collections.ArangoCollection)[], action: string, - options?: transactions.TransactionOptions & { params?: any }, + options?: transactions.TransactionOptions & { params?: any } ): Promise; /** * Performs a server-side transaction and returns its return value. @@ -2293,7 +2293,7 @@ export class Database { executeTransaction( collection: string | collections.ArangoCollection, action: string, - options?: transactions.TransactionOptions & { params?: any }, + options?: transactions.TransactionOptions & { params?: any } ): Promise; executeTransaction( collections: @@ -2304,7 +2304,7 @@ export class Database { | string | collections.ArangoCollection, action: string, - options: transactions.TransactionOptions & { params?: any } = {}, + options: transactions.TransactionOptions & { params?: any } = {} ): Promise { const { allowDirtyRead = undefined, ...opts } = options; return this.request( @@ -2318,7 +2318,7 @@ export class Database { ...opts, }, }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -2372,7 +2372,7 @@ export class Database { */ beginTransaction( collections: transactions.TransactionCollectionOptions, - options?: transactions.TransactionOptions, + options?: transactions.TransactionOptions ): Promise; /** * Begins a new streaming transaction for the given collections, then returns @@ -2403,7 +2403,7 @@ export class Database { */ beginTransaction( collections: (string | collections.ArangoCollection)[], - options?: transactions.TransactionOptions, + options?: transactions.TransactionOptions ): Promise; /** * Begins a new streaming transaction for the given collections, then returns @@ -2433,7 +2433,7 @@ export class Database { */ beginTransaction( collection: string | collections.ArangoCollection, - options?: transactions.TransactionOptions, + options?: transactions.TransactionOptions ): Promise; beginTransaction( collections: @@ -2441,7 +2441,7 @@ export class Database { | (string | collections.ArangoCollection)[] | string | collections.ArangoCollection, - options: transactions.TransactionOptions = {}, + options: transactions.TransactionOptions = {} ): Promise { const { allowDirtyRead = undefined, ...opts } = options; return this.request( @@ -2454,7 +2454,7 @@ export class Database { ...opts, }, }, - (res) => new transactions.Transaction(this, res.parsedBody.result.id), + (res) => new transactions.Transaction(this, res.parsedBody.result.id) ); } @@ -2494,7 +2494,7 @@ export class Database { withTransaction( collections: transactions.TransactionCollectionOptions, callback: (step: transactions.Transaction["step"]) => Promise, - options?: transactions.TransactionOptions, + options?: transactions.TransactionOptions ): Promise; /** * Begins and commits a transaction using the given callback. Individual @@ -2532,7 +2532,7 @@ export class Database { withTransaction( collections: (string | collections.ArangoCollection)[], callback: (step: transactions.Transaction["step"]) => Promise, - options?: transactions.TransactionOptions, + options?: transactions.TransactionOptions ): Promise; /** * Begins and commits a transaction using the given callback. Individual @@ -2567,7 +2567,7 @@ export class Database { withTransaction( collection: string | collections.ArangoCollection, callback: (step: transactions.Transaction["step"]) => Promise, - options?: transactions.TransactionOptions, + options?: transactions.TransactionOptions ): Promise; async withTransaction( collections: @@ -2576,11 +2576,11 @@ export class Database { | string | collections.ArangoCollection, callback: (step: transactions.Transaction["step"]) => Promise, - options: transactions.TransactionOptions = {}, + options: transactions.TransactionOptions = {} ): Promise { const trx = await this.beginTransaction( collections as transactions.TransactionCollectionOptions, - options, + options ); try { const result = await callback((fn) => trx.step(fn)); @@ -2607,10 +2607,10 @@ export class Database { * // transactions is an array of transaction descriptions * ``` */ - listTransactions(): Promise { + listTransactions(): Promise { return this._connection.request( { pathname: "/_api/transaction" }, - (res) => res.parsedBody.transactions, + (res) => res.parsedBody.transactions ); } @@ -2686,7 +2686,7 @@ export class Database { */ query( query: aql.AqlQuery, - options?: queries.QueryOptions, + options?: queries.QueryOptions ): Promise>; /** * Performs a database query using the given `query` and `bindVars`, then @@ -2740,12 +2740,12 @@ export class Database { query( query: string | aql.AqlLiteral, bindVars?: Record, - options?: queries.QueryOptions, + options?: queries.QueryOptions ): Promise>; query( query: string | aql.AqlQuery | aql.AqlLiteral, bindVars?: Record, - options: queries.QueryOptions = {}, + options: queries.QueryOptions = {} ): Promise> { if (aql.isAqlQuery(query)) { options = bindVars ?? {}; @@ -2788,8 +2788,8 @@ export class Database { this, res.parsedBody, res.arangojsHostUrl, - allowDirtyRead, - ).items, + allowDirtyRead + ).items ); } @@ -2817,7 +2817,7 @@ export class Database { */ explain( query: aql.AqlQuery, - options?: queries.ExplainOptions & { allPlans?: false }, + options?: queries.ExplainOptions & { allPlans?: false } ): Promise>; /** * Explains a database query using the given `query`. @@ -2846,7 +2846,7 @@ export class Database { */ explain( query: aql.AqlQuery, - options?: queries.ExplainOptions & { allPlans: true }, + options?: queries.ExplainOptions & { allPlans: true } ): Promise>; /** * Explains a database query using the given `query` and `bindVars`. @@ -2875,7 +2875,7 @@ export class Database { explain( query: string | aql.AqlLiteral, bindVars?: Record, - options?: queries.ExplainOptions & { allPlans?: false }, + options?: queries.ExplainOptions & { allPlans?: false } ): Promise>; /** * Explains a database query using the given `query` and `bindVars`. @@ -2905,12 +2905,12 @@ export class Database { explain( query: string | aql.AqlLiteral, bindVars?: Record, - options?: queries.ExplainOptions & { allPlans: true }, + options?: queries.ExplainOptions & { allPlans: true } ): Promise>; explain( query: string | aql.AqlQuery | aql.AqlLiteral, bindVars?: Record, - options?: queries.ExplainOptions, + options?: queries.ExplainOptions ): Promise< connection.ArangoApiResponse< queries.SingleExplainResult | queries.MultiExplainResult @@ -2953,7 +2953,7 @@ export class Database { * ``` aql.*/ parse( - query: string | aql.AqlQuery | aql.AqlLiteral, + query: string | aql.AqlQuery | aql.AqlLiteral ): Promise { if (aql.isAqlQuery(query)) { query = query.query; @@ -3014,10 +3014,10 @@ export class Database { * ``` */ queryTracking( - options: queries.QueryTrackingOptions, + options: queries.QueryTrackingOptions ): Promise; queryTracking( - options?: queries.QueryTrackingOptions, + options?: queries.QueryTrackingOptions ): Promise { return this.request( options @@ -3029,7 +3029,7 @@ export class Database { : { method: "GET", pathname: "/_api/query/properties", - }, + } ); } @@ -3089,7 +3089,7 @@ export class Database { method: "DELETE", pathname: "/_api/query/slow", }, - () => undefined, + () => undefined ); } @@ -3119,7 +3119,7 @@ export class Database { method: "DELETE", pathname: `/_api/query/${encodeURIComponent(queryId)}`, }, - () => undefined, + () => undefined ); } //#endregion @@ -3138,7 +3138,7 @@ export class Database { listUserFunctions(): Promise { return this.request( { pathname: "/_api/aqlfunction" }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -3176,7 +3176,7 @@ export class Database { createUserFunction( name: string, code: string, - isDeterministic: boolean = false, + isDeterministic: boolean = false ): Promise> { return this.request({ method: "POST", @@ -3202,7 +3202,7 @@ export class Database { */ dropUserFunction( name: string, - group: boolean = false, + group: boolean = false ): Promise> { return this.request({ method: "DELETE", @@ -3231,7 +3231,7 @@ export class Database { * ``` */ listServices( - excludeSystem: boolean = true, + excludeSystem: boolean = true ): Promise { return this.request({ pathname: "/_api/foxx", @@ -3274,7 +3274,7 @@ export class Database { async installService( mount: string, source: File | Blob | string, - options: services.InstallServiceOptions = {}, + options: services.InstallServiceOptions = {} ): Promise { const { configuration, dependencies, ...search } = options; const form = new FormData(); @@ -3286,7 +3286,7 @@ export class Database { } form.append( "source", - typeof source === "string" ? JSON.stringify(source) : source, + typeof source === "string" ? JSON.stringify(source) : source ); return await this.request({ body: form, @@ -3332,7 +3332,7 @@ export class Database { async replaceService( mount: string, source: File | Blob | string, - options: services.ReplaceServiceOptions = {}, + options: services.ReplaceServiceOptions = {} ): Promise { const { configuration, dependencies, ...search } = options; const form = new FormData(); @@ -3344,7 +3344,7 @@ export class Database { } form.append( "source", - typeof source === "string" ? JSON.stringify(source) : source, + typeof source === "string" ? JSON.stringify(source) : source ); return await this.request({ body: form, @@ -3390,7 +3390,7 @@ export class Database { async upgradeService( mount: string, source: File | Blob | string, - options: services.UpgradeServiceOptions = {}, + options: services.UpgradeServiceOptions = {} ): Promise { const { configuration, dependencies, ...search } = options; const form = new FormData(); @@ -3402,7 +3402,7 @@ export class Database { } form.append( "source", - typeof source === "string" ? JSON.stringify(source) : source, + typeof source === "string" ? JSON.stringify(source) : source ); return await this.request({ body: form, @@ -3426,7 +3426,7 @@ export class Database { */ uninstallService( mount: string, - options?: services.UninstallServiceOptions, + options?: services.UninstallServiceOptions ): Promise { return this.request( { @@ -3434,7 +3434,7 @@ export class Database { pathname: "/_api/foxx/service", search: { ...options, mount }, }, - () => undefined, + () => undefined ); } @@ -3480,7 +3480,7 @@ export class Database { */ getServiceConfiguration( mount: string, - minimal?: false, + minimal?: false ): Promise>; /** * Retrieves information about the service's configuration options and their @@ -3505,7 +3505,7 @@ export class Database { */ getServiceConfiguration( mount: string, - minimal: true, + minimal: true ): Promise>; getServiceConfiguration(mount: string, minimal: boolean = false) { return this.request({ @@ -3541,7 +3541,7 @@ export class Database { replaceServiceConfiguration( mount: string, cfg: Record, - minimal?: false, + minimal?: false ): Promise< Record >; @@ -3572,7 +3572,7 @@ export class Database { replaceServiceConfiguration( mount: string, cfg: Record, - minimal: true, + minimal: true ): Promise<{ values: Record; warnings: Record; @@ -3580,7 +3580,7 @@ export class Database { replaceServiceConfiguration( mount: string, cfg: Record, - minimal: boolean = false, + minimal: boolean = false ) { return this.request({ method: "PUT", @@ -3617,7 +3617,7 @@ export class Database { updateServiceConfiguration( mount: string, cfg: Record, - minimal?: false, + minimal?: false ): Promise< Record >; @@ -3648,7 +3648,7 @@ export class Database { updateServiceConfiguration( mount: string, cfg: Record, - minimal: true, + minimal: true ): Promise<{ values: Record; warnings: Record; @@ -3656,7 +3656,7 @@ export class Database { updateServiceConfiguration( mount: string, cfg: Record, - minimal: boolean = false, + minimal: boolean = false ) { return this.request({ method: "PATCH", @@ -3689,7 +3689,7 @@ export class Database { */ getServiceDependencies( mount: string, - minimal?: false, + minimal?: false ): Promise< Record< string, @@ -3719,7 +3719,7 @@ export class Database { */ getServiceDependencies( mount: string, - minimal: true, + minimal: true ): Promise>; getServiceDependencies(mount: string, minimal: boolean = false) { return this.request({ @@ -3755,7 +3755,7 @@ export class Database { replaceServiceDependencies( mount: string, deps: Record, - minimal?: false, + minimal?: false ): Promise< Record< string, @@ -3795,7 +3795,7 @@ export class Database { replaceServiceDependencies( mount: string, deps: Record, - minimal: true, + minimal: true ): Promise<{ values: Record; warnings: Record; @@ -3803,7 +3803,7 @@ export class Database { replaceServiceDependencies( mount: string, deps: Record, - minimal: boolean = false, + minimal: boolean = false ) { return this.request({ method: "PUT", @@ -3840,7 +3840,7 @@ export class Database { updateServiceDependencies( mount: string, deps: Record, - minimal?: false, + minimal?: false ): Promise< Record< string, @@ -3880,7 +3880,7 @@ export class Database { updateServiceDependencies( mount: string, deps: Record, - minimal: true, + minimal: true ): Promise<{ values: Record; warnings: Record; @@ -3888,7 +3888,7 @@ export class Database { updateServiceDependencies( mount: string, deps: Record, - minimal: boolean = false, + minimal: boolean = false ) { return this.request({ method: "PATCH", @@ -3915,7 +3915,7 @@ export class Database { */ setServiceDevelopmentMode( mount: string, - enabled: boolean = true, + enabled: boolean = true ): Promise { return this.request({ method: enabled ? "POST" : "DELETE", @@ -4006,7 +4006,7 @@ export class Database { * executed. */ filter?: string; - }, + } ): Promise; /** * Runs the tests of a given service and returns the results using the @@ -4038,7 +4038,7 @@ export class Database { * executed. */ filter?: string; - }, + } ): Promise; /** * Runs the tests of a given service and returns the results using the @@ -4071,7 +4071,7 @@ export class Database { * executed. */ filter?: string; - }, + } ): Promise; /** * Runs the tests of a given service and returns the results using the @@ -4104,7 +4104,7 @@ export class Database { * executed. */ filter?: string; - }, + } ): Promise; /** * Runs the tests of a given service and returns the results using the @@ -4137,7 +4137,7 @@ export class Database { * executed. */ filter?: string; - }, + } ): Promise; /** * Runs the tests of a given service and returns the results as a string @@ -4171,7 +4171,7 @@ export class Database { * executed. */ filter?: string; - }, + } ): Promise; /** * Runs the tests of a given service and returns the results as a string @@ -4205,7 +4205,7 @@ export class Database { * executed. */ filter?: string; - }, + } ): Promise; /** * Runs the tests of a given service and returns the results as a string @@ -4239,7 +4239,7 @@ export class Database { * executed. */ filter?: string; - }, + } ): Promise; runServiceTests( mount: string, @@ -4247,7 +4247,7 @@ export class Database { reporter?: string; idiomatic?: boolean; filter?: string; - }, + } ) { return this.request({ method: "POST", @@ -4350,7 +4350,7 @@ export class Database { pathname: "/_api/foxx/commit", search: { replace }, }, - () => undefined, + () => undefined ); } //#endregion @@ -4370,7 +4370,7 @@ export class Database { * ``` */ createHotBackup( - options: hotBackups.HotBackupOptions = {}, + options: hotBackups.HotBackupOptions = {} ): Promise { return this.request( { @@ -4378,7 +4378,7 @@ export class Database { pathname: "/_admin/backup/create", body: options, }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -4404,7 +4404,7 @@ export class Database { pathname: "/_admin/backup/list", body: id ? { id } : undefined, }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -4428,7 +4428,7 @@ export class Database { pathname: "/_admin/backup/restore", body: { id }, }, - (res) => res.parsedBody.result.previous, + (res) => res.parsedBody.result.previous ); } @@ -4450,7 +4450,7 @@ export class Database { pathname: "/_admin/backup/delete", body: { id }, }, - () => undefined, + () => undefined ); } //#endregion @@ -4476,7 +4476,7 @@ export class Database { pathname: "/_admin/log/entries", search: options, }, - (res) => res.parsedBody, + (res) => res.parsedBody ); } @@ -4497,14 +4497,14 @@ export class Database { * ``` */ listLogMessages( - options?: logs.LogEntriesOptions, + options?: logs.LogEntriesOptions ): Promise { return this.request( { pathname: "/_admin/log", search: options, }, - (res) => res.parsedBody.messages, + (res) => res.parsedBody.messages ); } @@ -4537,7 +4537,7 @@ export class Database { * ``` */ setLogLevel( - levels: Record, + levels: Record ): Promise> { return this.request({ method: "PUT", @@ -4573,7 +4573,7 @@ export class Database { const trap = new Promise>( (resolveTrap) => { this._trapRequest = (trapped) => resolveTrap(trapped); - }, + } ); const eventualResult = callback(); const trapped = await trap; @@ -4589,7 +4589,7 @@ export class Database { (e) => { onReject(e); return eventualResult; - }, + } ); } @@ -4623,7 +4623,7 @@ export class Database { { pathname: "/_api/job/pending", }, - (res) => res.parsedBody, + (res) => res.parsedBody ); } @@ -4642,7 +4642,7 @@ export class Database { { pathname: "/_api/job/done", }, - (res) => res.parsedBody, + (res) => res.parsedBody ); } @@ -4667,7 +4667,7 @@ export class Database { pathname: `/_api/job/expired`, search: { stamp: threshold / 1000 }, }, - () => undefined, + () => undefined ); } @@ -4680,7 +4680,7 @@ export class Database { method: "DELETE", pathname: `/_api/job/all`, }, - () => undefined, + () => undefined ); } //#endregion diff --git a/src/transactions.ts b/src/transactions.ts index 1d97463c9..7f222187f 100644 --- a/src/transactions.ts +++ b/src/transactions.ts @@ -54,7 +54,7 @@ export function coerceTransactionCollections( | (TransactionCollectionOptions & { allowImplicit?: boolean }) | (string | collections.ArangoCollection)[] | string - | collections.ArangoCollection, + | collections.ArangoCollection ): CoercedTransactionCollections { if (typeof options === "string") { return { write: [options] }; @@ -167,11 +167,11 @@ export type TransactionAbortOptions = { //#region Transaction operation results /** - * Details for a transaction. + * Description of a transaction in a list of transactions. * * See also {@link TransactionInfo}. */ -export type TransactionDetails = { +export type TransactionDescription = { /** * Unique identifier of the transaction. */ @@ -185,7 +185,7 @@ export type TransactionDetails = { /** * Status of a given transaction. * - * See also {@link TransactionDetails}. + * See also {@link TransactionDescription}. */ export type TransactionInfo = { /** @@ -205,7 +205,7 @@ export type TransactionInfo = { * @param transaction - A value that might be a transaction. */ export function isArangoTransaction( - transaction: any, + transaction: any ): transaction is Transaction { return Boolean(transaction && transaction.isArangoTransaction); } @@ -291,7 +291,7 @@ export class Transaction { { pathname: `/_api/transaction/${encodeURIComponent(this.id)}`, }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -318,7 +318,7 @@ export class Transaction { pathname: `/_api/transaction/${encodeURIComponent(this.id)}`, allowDirtyRead, }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -345,7 +345,7 @@ export class Transaction { pathname: `/_api/transaction/${encodeURIComponent(this.id)}`, allowDirtyRead, }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -543,7 +543,7 @@ export class Transaction { const promise = callback(); if (!promise) { throw new Error( - "Transaction callback was not an async function or did not return a promise!", + "Transaction callback was not an async function or did not return a promise!" ); } return Promise.resolve(promise); From ab5887c856799b56108e1f7b5d483f96c9d1fdc3 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Tue, 10 Dec 2024 17:41:27 +0100 Subject: [PATCH 18/21] Add support for redirect=manual Fixes #613. --- CHANGELOG.md | 11 +++++++++++ src/connection.ts | 45 ++++++++++++++++++++++++++------------------- 2 files changed, 37 insertions(+), 19 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8e71842a0..751da27bf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -390,6 +390,17 @@ for upgrading your code to arangojs v10. If the `onError` callback throws an error or returns a promise that is rejected, that error will be thrown instead. +- Added support for `config.fetchOptions.redirect` option ([#613](https://github.com/arangodb/arangojs/issues/613)) + + This option can now be used to specify the redirect mode for requests. + + When set to `"manual"`, arangojs will throw an `HttpError` wrapping the + redirect response instead of automatically following redirects. + + Note that when set to `"error"`, the native fetch API will throw a + non-specific error (usually a `TypeError`) that arangojs will wrap in a + `FetchFailedError` instead. + - Added optional `ArangoError#request` property This property is always present if the error has a `response` property. In diff --git a/src/connection.ts b/src/connection.ts index 336678988..a73557f98 100644 --- a/src/connection.ts +++ b/src/connection.ts @@ -41,7 +41,7 @@ type Host = { | "hostUrl" | "expectBinary" | "isBinary" - >, + > ) => Promise; /** * @internal @@ -134,8 +134,8 @@ function createHost(arangojsHostUrl: string, agentOptions?: any): Host { headers.set( "authorization", `Basic ${btoa( - `${baseUrl.username || "root"}:${baseUrl.password || ""}`, - )}`, + `${baseUrl.username || "root"}:${baseUrl.password || ""}` + )}` ); } const abortController = new AbortController(); @@ -171,10 +171,15 @@ function createHost(arangojsHostUrl: string, agentOptions?: any): Host { request, arangojsHostUrl, }); + if (fetchOptions?.redirect === "manual" && isRedirect(response)) { + throw new errors.HttpError(response); + } } catch (e: unknown) { const cause = e instanceof Error ? e : new Error(String(e)); let error: errors.NetworkError; - if (signal.aborted) { + if (cause instanceof errors.NetworkError) { + error = cause; + } else if (signal.aborted) { const reason = typeof signal.reason == "string" ? signal.reason : undefined; if (reason === REASON_TIMEOUT) { @@ -276,7 +281,7 @@ const STATUS_CODE_DEFAULT_MESSAGES = { type KnownStatusCode = keyof typeof STATUS_CODE_DEFAULT_MESSAGES; const KNOWN_STATUS_CODES = Object.keys(STATUS_CODE_DEFAULT_MESSAGES).map((k) => - Number(k), + Number(k) ) as KnownStatusCode[]; const REDIRECT_CODES = [301, 302, 303, 307, 308] satisfies KnownStatusCode[]; type RedirectStatusCode = (typeof REDIRECT_CODES)[number]; @@ -292,9 +297,11 @@ function isKnownStatusCode(code: number): code is KnownStatusCode { } /** + * @internal + * * Indicates whether the given status code represents a redirect. */ -export function isRedirect(response: ProcessedResponse): boolean { +function isRedirect(response: ProcessedResponse): boolean { return REDIRECT_CODES.includes(response.status as RedirectStatusCode); } @@ -333,7 +340,7 @@ export type ArangoApiResponse = T & ArangoResponseMetadata; * Indicates whether the given value represents an ArangoDB error response. */ export function isArangoErrorResponse( - body: unknown, + body: unknown ): body is ArangoErrorResponse { if (!body || typeof body !== "object") return false; const obj = body as Record; @@ -576,7 +583,7 @@ export type CommonRequestOptions = { */ afterResponse?: ( err: errors.NetworkError | null, - res?: globalThis.Response & { request: globalThis.Request }, + res?: globalThis.Response & { request: globalThis.Request } ) => void | Promise; }; @@ -726,11 +733,11 @@ export class Connection { this._commonFetchOptions.headers.set( "x-arango-version", - String(arangoVersion), + String(arangoVersion) ); this._commonFetchOptions.headers.set( "x-arango-driver", - `arangojs/${process.env.ARANGOJS_VERSION} (cloud)`, + `arangojs/${process.env.ARANGOJS_VERSION} (cloud)` ); this.addToHostList(URLS); @@ -908,7 +915,7 @@ export class Connection { setBasicAuth(auth: configuration.BasicAuthCredentials) { this.setHeader( "authorization", - `Basic ${btoa(`${auth.username}:${auth.password}`)}`, + `Basic ${btoa(`${auth.username}:${auth.password}`)}` ); } @@ -942,7 +949,7 @@ export class Connection { */ database( databaseName: string, - database: databases.Database, + database: databases.Database ): databases.Database; /** * @internal @@ -956,7 +963,7 @@ export class Connection { database(databaseName: string, database: null): undefined; database( databaseName: string, - database?: databases.Database | null, + database?: databases.Database | null ): databases.Database | undefined { if (database === null) { this._databases.delete(databaseName); @@ -987,7 +994,7 @@ export class Connection { const i = this._hostUrls.indexOf(url); if (i !== -1) return this._hosts[i]; return createHost(url); - }), + }) ); this._hostUrls.splice(0, this._hostUrls.length, ...cleanUrls); } @@ -1003,10 +1010,10 @@ export class Connection { */ addToHostList(urls: string | string[]): string[] { const cleanUrls = (Array.isArray(urls) ? urls : [urls]).map((url) => - util.normalizeUrl(url), + util.normalizeUrl(url) ); const newUrls = cleanUrls.filter( - (url) => this._hostUrls.indexOf(url) === -1, + (url) => this._hostUrls.indexOf(url) === -1 ); this._hostUrls.push(...newUrls); this._hosts.push(...newUrls.map((url) => createHost(url))); @@ -1127,8 +1134,8 @@ export class Connection { res: globalThis.Response & { request: globalThis.Request; parsedBody?: any; - }, - ) => T, + } + ) => T ): Promise { const { hostUrl, @@ -1146,7 +1153,7 @@ export class Connection { const headers = util.mergeHeaders( this._commonFetchOptions.headers, - requestHeaders, + requestHeaders ); let body = requestBody; From f558557e6eb6876a95551a8dfeac0b76188f1987 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Tue, 10 Dec 2024 17:43:04 +0100 Subject: [PATCH 19/21] 10.0.0-rc.0 --- CHANGELOG.md | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 751da27bf..39361466c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,7 +14,7 @@ This driver uses semantic versioning: - A change in the major version (e.g. 1.Y.Z -> 2.0.0) indicates _breaking_ changes that require changes in your code to upgrade. -## [Unreleased] +## [10.0.0-rc.0] - 2024-12-10 This is a major release and breaks backwards compatibility. @@ -2396,7 +2396,7 @@ For a detailed list of changes between pre-release versions of v7 see the Graph methods now only return the relevant part of the response body. -[unreleased]: https://github.com/arangodb/arangojs/compare/v9.2.0...v10 +[10.0.0-rc.0]: https://github.com/arangodb/arangojs/compare/v9.2.0...v10.0.0-rc.0 [9.2.0]: https://github.com/arangodb/arangojs/compare/v9.1.0...v9.2.0 [9.1.0]: https://github.com/arangodb/arangojs/compare/v9.0.0...v9.1.0 [9.0.0]: https://github.com/arangodb/arangojs/compare/v8.8.1...v9.0.0 diff --git a/package.json b/package.json index dd2f0b357..8e0adca00 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "private": true, "type": "module", "name": "arangojs", - "version": "10.0.0-alpha.1", + "version": "10.0.0-rc.0", "engines": { "node": ">=18" }, From 99d469ffea6170fcb5d87d5e63b6574d691b7b37 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Wed, 18 Dec 2024 18:03:01 +0100 Subject: [PATCH 20/21] Add index types Fixes DE-956. Fixes DE-957. Fixes DE-958. --- CHANGELOG.md | 16 ++++ src/administration.ts | 2 +- src/collections.ts | 215 ++++++++++++++++++++++++------------------ src/indexes.ts | 124 +++++++++++++++++++++++- 4 files changed, 257 insertions(+), 100 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 39361466c..f5e1efa10 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,22 @@ This driver uses semantic versioning: - A change in the major version (e.g. 1.Y.Z -> 2.0.0) indicates _breaking_ changes that require changes in your code to upgrade. +## [Unreleased] + +### Added + +- Added support for `mdi-prefixed` indexes (DE-956) + +- Restored `fulltext` index type support (DE-957) + + The `fulltext` index type is still no longer supported for creating new + indexes but can be used to cast existing indexes from `IndexDescription`. + +- Added support for `edge` indexes (DE-958) + + The `SystemIndexDescription` type now includes the `EdgeIndexDescription` + type as well as the `PrimaryIndexDescription` type. + ## [10.0.0-rc.0] - 2024-12-10 This is a major release and breaks backwards compatibility. diff --git a/src/administration.ts b/src/administration.ts index 262989829..b8ba2ac10 100644 --- a/src/administration.ts +++ b/src/administration.ts @@ -142,7 +142,7 @@ export type ServerStatusInformation = { /** * Server operation mode. * - * @deprecated use `operationMode` instead + * @deprecated Use `operationMode` instead */ mode: "server" | "console"; /** diff --git a/src/collections.ts b/src/collections.ts index 38929f214..bc5abb348 100644 --- a/src/collections.ts +++ b/src/collections.ts @@ -26,7 +26,7 @@ import { COLLECTION_NOT_FOUND, DOCUMENT_NOT_FOUND } from "./lib/codes.js"; * @param collection - A value that might be a collection. */ export function isArangoCollection( - collection: any, + collection: any ): collection is ArangoCollection { return Boolean(collection && collection.isArangoCollection); } @@ -38,7 +38,7 @@ export function isArangoCollection( * @param collection - Collection name or {@link ArangoCollection} object. */ export function collectionToString( - collection: string | ArangoCollection, + collection: string | ArangoCollection ): string { if (isArangoCollection(collection)) { return String(collection.name); @@ -663,7 +663,7 @@ export interface DocumentCollection< create( options?: CreateCollectionOptions & { type?: CollectionType; - }, + } ): Promise< connection.ArangoApiResponse >; @@ -694,7 +694,7 @@ export interface DocumentCollection< * ``` */ properties( - properties: CollectionPropertiesOptions, + properties: CollectionPropertiesOptions ): Promise< connection.ArangoApiResponse >; @@ -745,7 +745,7 @@ export interface DocumentCollection< * ``` */ figures( - details?: boolean, + details?: boolean ): Promise< connection.ArangoApiResponse< CollectionDescription & @@ -782,7 +782,7 @@ export interface DocumentCollection< * ``` */ checksum( - options?: CollectionChecksumOptions, + options?: CollectionChecksumOptions ): Promise< connection.ArangoApiResponse< CollectionDescription & { revision: string; checksum: string } @@ -811,7 +811,7 @@ export interface DocumentCollection< * ``` */ rename( - newName: string, + newName: string ): Promise>; /** * Deletes all documents in the collection. @@ -826,7 +826,7 @@ export interface DocumentCollection< * ``` */ truncate( - options?: TruncateCollectionOptions, + options?: TruncateCollectionOptions ): Promise>; /** * Deletes the collection from the database. @@ -842,7 +842,7 @@ export interface DocumentCollection< * ``` */ drop( - options?: DropCollectionOptions, + options?: DropCollectionOptions ): Promise>; /** * Triggers compaction for a collection. @@ -872,7 +872,7 @@ export interface DocumentCollection< * ``` */ getResponsibleShard( - document: Partial>, + document: Partial> ): Promise; /** * Derives a document `_id` from the given selector for this collection. @@ -930,7 +930,7 @@ export interface DocumentCollection< */ documentExists( selector: documents.DocumentSelector, - options?: documents.DocumentExistsOptions, + options?: documents.DocumentExistsOptions ): Promise; /** * Retrieves the document matching the given key or id. @@ -968,7 +968,7 @@ export interface DocumentCollection< */ document( selector: documents.DocumentSelector, - options?: documents.ReadDocumentOptions, + options?: documents.ReadDocumentOptions ): Promise>; /** * Retrieves the document matching the given key or id. @@ -1007,7 +1007,7 @@ export interface DocumentCollection< */ document( selector: documents.DocumentSelector, - graceful: boolean, + graceful: boolean ): Promise>; /** * Retrieves the documents matching the given key or id values. @@ -1033,7 +1033,7 @@ export interface DocumentCollection< */ documents( selectors: (string | documents.ObjectWithDocumentKey)[], - options?: documents.BulkReadDocumentsOptions, + options?: documents.BulkReadDocumentsOptions ): Promise[]>; /** * Inserts a new document with the given `data` into the collection. @@ -1054,7 +1054,7 @@ export interface DocumentCollection< */ save( data: documents.DocumentData, - options?: documents.InsertDocumentOptions, + options?: documents.InsertDocumentOptions ): Promise< documents.DocumentOperationMetadata & { new?: documents.Document; @@ -1084,7 +1084,7 @@ export interface DocumentCollection< */ saveAll( data: Array>, - options?: documents.InsertDocumentOptions, + options?: documents.InsertDocumentOptions ): Promise< Array< | (documents.DocumentOperationMetadata & { @@ -1121,7 +1121,7 @@ export interface DocumentCollection< replace( selector: documents.DocumentSelector, newData: documents.DocumentData, - options?: documents.ReplaceDocumentOptions, + options?: documents.ReplaceDocumentOptions ): Promise< documents.DocumentOperationMetadata & { new?: documents.Document; @@ -1157,7 +1157,7 @@ export interface DocumentCollection< documents.DocumentData & ({ _key: string } | { _id: string }) >, - options?: Omit, + options?: Omit ): Promise< Array< | (documents.DocumentOperationMetadata & { @@ -1194,7 +1194,7 @@ export interface DocumentCollection< update( selector: documents.DocumentSelector, newData: documents.Patch>, - options?: documents.UpdateDocumentOptions, + options?: documents.UpdateDocumentOptions ): Promise< documents.DocumentOperationMetadata & { new?: documents.Document; @@ -1230,7 +1230,7 @@ export interface DocumentCollection< documents.Patch> & ({ _key: string } | { _id: string }) >, - options?: Omit, + options?: Omit ): Promise< Array< | (documents.DocumentOperationMetadata & { @@ -1269,7 +1269,7 @@ export interface DocumentCollection< */ remove( selector: documents.DocumentSelector, - options?: documents.RemoveDocumentOptions, + options?: documents.RemoveDocumentOptions ): Promise< documents.DocumentMetadata & { old?: documents.Document } >; @@ -1293,7 +1293,7 @@ export interface DocumentCollection< */ removeAll( selectors: (string | documents.ObjectWithDocumentKey)[], - options?: Omit, + options?: Omit ): Promise< Array< | (documents.DocumentMetadata & { @@ -1323,7 +1323,7 @@ export interface DocumentCollection< */ import( data: documents.DocumentData[], - options?: documents.ImportDocumentsOptions, + options?: documents.ImportDocumentsOptions ): Promise; /** * Bulk imports the given `data` into the collection. @@ -1349,7 +1349,7 @@ export interface DocumentCollection< */ import( data: any[][], - options?: documents.ImportDocumentsOptions, + options?: documents.ImportDocumentsOptions ): Promise; /** * Bulk imports the given `data` into the collection. @@ -1411,7 +1411,7 @@ export interface DocumentCollection< data: Buffer | Blob | string, options?: documents.ImportDocumentsOptions & { type?: "documents" | "list" | "auto"; - }, + } ): Promise; //#endregion @@ -1455,7 +1455,7 @@ export interface DocumentCollection< | indexes.IndexDescription | indexes.HiddenIndexDescription = indexes.IndexDescription, >( - options?: indexes.ListIndexesOptions, + options?: indexes.ListIndexesOptions ): Promise; /** * Returns an index description by name or `id` if it exists. @@ -1489,7 +1489,7 @@ export interface DocumentCollection< * ``` */ ensureIndex( - options: indexes.EnsurePersistentIndexOptions, + options: indexes.EnsurePersistentIndexOptions ): Promise< connection.ArangoApiResponse< indexes.PersistentIndexDescription & { isNewlyCreated: boolean } @@ -1525,7 +1525,7 @@ export interface DocumentCollection< * ``` */ ensureIndex( - options: indexes.EnsureTtlIndexOptions, + options: indexes.EnsureTtlIndexOptions ): Promise< connection.ArangoApiResponse< indexes.TtlIndexDescription & { isNewlyCreated: boolean } @@ -1550,12 +1550,39 @@ export interface DocumentCollection< * ``` */ ensureIndex( - options: indexes.EnsureMdiIndexOptions, + options: indexes.EnsureMdiIndexOptions ): Promise< connection.ArangoApiResponse< indexes.MdiIndexDescription & { isNewlyCreated: boolean } > >; + /** + * Creates a prefixed multi-dimensional index on the collection if it does + * not already exist. + * + * @param options - Options for creating the prefixed multi-dimensional index. + * + * @example + * ```js + * const db = new Database(); + * const collection = db.collection("some-points"); + * // Create a multi-dimensional index for the attributes x, y and z + * await collection.ensureIndex({ + * type: "mdi-prefixed", + * fields: ["x", "y", "z"], + * prefixFields: ["x"], + * fieldValueTypes: "double" + * }); + * ``` + * ``` + */ + ensureIndex( + options: indexes.EnsureMdiPrefixedIndexOptions + ): Promise< + connection.ArangoApiResponse< + indexes.MdiPrefixedIndexDescription & { isNewlyCreated: boolean } + > + >; /** * Creates a geo index on the collection if it does not already exist. * @@ -1574,7 +1601,7 @@ export interface DocumentCollection< * ``` */ ensureIndex( - options: indexes.EnsureGeoIndexOptions, + options: indexes.EnsureGeoIndexOptions ): Promise< connection.ArangoApiResponse< indexes.GeoIndexDescription & { isNewlyCreated: boolean } @@ -1597,7 +1624,7 @@ export interface DocumentCollection< * ``` */ ensureIndex( - options: indexes.EnsureInvertedIndexOptions, + options: indexes.EnsureInvertedIndexOptions ): Promise< connection.ArangoApiResponse< indexes.InvertedIndexDescription & { isNewlyCreated: boolean } @@ -1622,7 +1649,7 @@ export interface DocumentCollection< * ``` */ ensureIndex( - options: indexes.EnsureIndexOptions, + options: indexes.EnsureIndexOptions ): Promise< connection.ArangoApiResponse< indexes.IndexDescription & { isNewlyCreated: boolean } @@ -1642,7 +1669,7 @@ export interface DocumentCollection< * ``` */ dropIndex( - selector: indexes.IndexSelector, + selector: indexes.IndexSelector ): Promise>; //#endregion } @@ -1717,7 +1744,7 @@ export interface EdgeCollection< */ document( selector: documents.DocumentSelector, - options?: documents.ReadDocumentOptions, + options?: documents.ReadDocumentOptions ): Promise>; /** * Retrieves the document matching the given key or id. @@ -1756,7 +1783,7 @@ export interface EdgeCollection< */ document( selector: documents.DocumentSelector, - graceful: boolean, + graceful: boolean ): Promise>; /** * Retrieves the documents matching the given key or id values. @@ -1782,7 +1809,7 @@ export interface EdgeCollection< */ documents( selectors: (string | documents.ObjectWithDocumentKey)[], - options?: documents.BulkReadDocumentsOptions, + options?: documents.BulkReadDocumentsOptions ): Promise[]>; /** * Inserts a new document with the given `data` into the collection. @@ -1802,7 +1829,7 @@ export interface EdgeCollection< */ save( data: documents.EdgeData, - options?: documents.InsertDocumentOptions, + options?: documents.InsertDocumentOptions ): Promise< documents.DocumentOperationMetadata & { new?: documents.Edge; @@ -1830,7 +1857,7 @@ export interface EdgeCollection< */ saveAll( data: Array>, - options?: documents.InsertDocumentOptions, + options?: documents.InsertDocumentOptions ): Promise< Array< | (documents.DocumentOperationMetadata & { @@ -1875,7 +1902,7 @@ export interface EdgeCollection< replace( selector: documents.DocumentSelector, newData: documents.DocumentData, - options?: documents.ReplaceDocumentOptions, + options?: documents.ReplaceDocumentOptions ): Promise< documents.DocumentOperationMetadata & { new?: documents.Edge; @@ -1927,7 +1954,7 @@ export interface EdgeCollection< documents.DocumentData & ({ _key: string } | { _id: string }) >, - options?: documents.ReplaceDocumentOptions, + options?: documents.ReplaceDocumentOptions ): Promise< Array< | (documents.DocumentOperationMetadata & { @@ -1972,7 +1999,7 @@ export interface EdgeCollection< update( selector: documents.DocumentSelector, newData: documents.Patch>, - options?: documents.UpdateDocumentOptions, + options?: documents.UpdateDocumentOptions ): Promise< documents.DocumentOperationMetadata & { new?: documents.Edge; @@ -2022,7 +2049,7 @@ export interface EdgeCollection< documents.Patch> & ({ _key: string } | { _id: string }) >, - options?: documents.UpdateDocumentOptions, + options?: documents.UpdateDocumentOptions ): Promise< Array< | (documents.DocumentOperationMetadata & { @@ -2053,7 +2080,7 @@ export interface EdgeCollection< */ remove( selector: documents.DocumentSelector, - options?: documents.RemoveDocumentOptions, + options?: documents.RemoveDocumentOptions ): Promise< documents.DocumentMetadata & { old?: documents.Edge } >; @@ -2077,7 +2104,7 @@ export interface EdgeCollection< */ removeAll( selectors: documents.DocumentSelector[], - options?: documents.RemoveDocumentOptions, + options?: documents.RemoveDocumentOptions ): Promise< Array< | (documents.DocumentMetadata & { old?: documents.Edge }) @@ -2104,7 +2131,7 @@ export interface EdgeCollection< */ import( data: documents.EdgeData[], - options?: documents.ImportDocumentsOptions, + options?: documents.ImportDocumentsOptions ): Promise; /** * Bulk imports the given `data` into the collection. @@ -2129,7 +2156,7 @@ export interface EdgeCollection< */ import( data: any[][], - options?: documents.ImportDocumentsOptions, + options?: documents.ImportDocumentsOptions ): Promise; /** * Bulk imports the given `data` into the collection. @@ -2188,7 +2215,7 @@ export interface EdgeCollection< data: Buffer | Blob | string, options?: documents.ImportDocumentsOptions & { type?: "documents" | "list" | "auto"; - }, + } ): Promise; //#endregion @@ -2220,7 +2247,7 @@ export interface EdgeCollection< */ edges( selector: documents.DocumentSelector, - options?: documents.DocumentEdgesOptions, + options?: documents.DocumentEdgesOptions ): Promise< connection.ArangoApiResponse> >; @@ -2251,7 +2278,7 @@ export interface EdgeCollection< */ inEdges( selector: documents.DocumentSelector, - options?: documents.DocumentEdgesOptions, + options?: documents.DocumentEdgesOptions ): Promise< connection.ArangoApiResponse> >; @@ -2282,7 +2309,7 @@ export interface EdgeCollection< */ outEdges( selector: documents.DocumentSelector, - options?: documents.DocumentEdgesOptions, + options?: documents.DocumentEdgesOptions ): Promise< connection.ArangoApiResponse> >; @@ -2347,7 +2374,7 @@ export class Collection< create( options: CreateCollectionOptions & { type?: CollectionType; - } = {}, + } = {} ) { const { waitForSyncReplication = undefined, @@ -2390,7 +2417,7 @@ export class Collection< } properties( - properties?: CollectionPropertiesOptions, + properties?: CollectionPropertiesOptions ): Promise< connection.ArangoApiResponse > { @@ -2421,15 +2448,15 @@ export class Collection< { method: "PUT", pathname: `/_api/collection/${encodeURIComponent( - this._name, + this._name )}/recalculateCount`, }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } figures( - details = false, + details = false ): Promise< connection.ArangoApiResponse< CollectionDescription & @@ -2453,7 +2480,7 @@ export class Collection< } checksum( - options?: CollectionChecksumOptions, + options?: CollectionChecksumOptions ): Promise< connection.ArangoApiResponse< CollectionDescription & { revision: string; checksum: string } @@ -2472,7 +2499,7 @@ export class Collection< } truncate( - options?: TruncateCollectionOptions, + options?: TruncateCollectionOptions ): Promise> { return this._db.request({ method: "PUT", @@ -2499,17 +2526,17 @@ export class Collection< //#region Document operations getResponsibleShard( - document: Partial>, + document: Partial> ): Promise { return this._db.request( { method: "PUT", pathname: `/_api/collection/${encodeURIComponent( - this._name, + this._name )}/responsibleShard`, body: document, }, - (res) => res.parsedBody.shardId, + (res) => res.parsedBody.shardId ); } @@ -2519,7 +2546,7 @@ export class Collection< async documentExists( selector: documents.DocumentSelector, - options: documents.DocumentExistsOptions = {}, + options: documents.DocumentExistsOptions = {} ): Promise { const { ifMatch = undefined, ifNoneMatch = undefined } = options; const headers = {} as Record; @@ -2530,7 +2557,7 @@ export class Collection< { method: "HEAD", pathname: `/_api/document/${encodeURI( - documents._documentHandle(selector, this._name), + documents._documentHandle(selector, this._name) )}`, headers, }, @@ -2539,7 +2566,7 @@ export class Collection< throw new errors.HttpError(res); } return true; - }, + } ); } catch (err: any) { if (err.code === 404) { @@ -2551,7 +2578,7 @@ export class Collection< documents( selectors: (string | documents.ObjectWithDocumentKey)[], - options: documents.BulkReadDocumentsOptions = {}, + options: documents.BulkReadDocumentsOptions = {} ) { const { allowDirtyRead = undefined } = options; return this._db.request({ @@ -2565,7 +2592,7 @@ export class Collection< async document( selector: documents.DocumentSelector, - options: boolean | documents.ReadDocumentOptions = {}, + options: boolean | documents.ReadDocumentOptions = {} ) { if (typeof options === "boolean") { options = { graceful: options }; @@ -2582,7 +2609,7 @@ export class Collection< const result = this._db.request( { pathname: `/_api/document/${encodeURI( - documents._documentHandle(selector, this._name), + documents._documentHandle(selector, this._name) )}`, headers, allowDirtyRead, @@ -2592,7 +2619,7 @@ export class Collection< throw new errors.HttpError(res); } return res.parsedBody; - }, + } ); if (!graceful) return result; try { @@ -2607,7 +2634,7 @@ export class Collection< save( data: documents.DocumentData, - options?: documents.InsertDocumentOptions, + options?: documents.InsertDocumentOptions ) { return this._db.request( { @@ -2616,13 +2643,13 @@ export class Collection< body: data, search: options, }, - (res) => (options?.silent ? undefined : res.parsedBody), + (res) => (options?.silent ? undefined : res.parsedBody) ); } saveAll( data: Array>, - options?: documents.InsertDocumentOptions, + options?: documents.InsertDocumentOptions ) { return this._db.request( { @@ -2631,14 +2658,14 @@ export class Collection< body: data, search: options, }, - (res) => (options?.silent ? undefined : res.parsedBody), + (res) => (options?.silent ? undefined : res.parsedBody) ); } replace( selector: documents.DocumentSelector, newData: documents.DocumentData, - options: documents.ReplaceDocumentOptions = {}, + options: documents.ReplaceDocumentOptions = {} ) { const { ifMatch = undefined, ...opts } = options; const headers = {} as Record; @@ -2647,13 +2674,13 @@ export class Collection< { method: "PUT", pathname: `/_api/document/${encodeURI( - documents._documentHandle(selector, this._name), + documents._documentHandle(selector, this._name) )}`, headers, body: newData, search: opts, }, - (res) => (options?.silent ? undefined : res.parsedBody), + (res) => (options?.silent ? undefined : res.parsedBody) ); } @@ -2662,7 +2689,7 @@ export class Collection< documents.DocumentData & ({ _key: string } | { _id: string }) >, - options?: documents.ReplaceDocumentOptions, + options?: documents.ReplaceDocumentOptions ) { return this._db.request( { @@ -2671,14 +2698,14 @@ export class Collection< body: newData, search: options, }, - (res) => (options?.silent ? undefined : res.parsedBody), + (res) => (options?.silent ? undefined : res.parsedBody) ); } update( selector: documents.DocumentSelector, newData: documents.Patch>, - options: documents.UpdateDocumentOptions = {}, + options: documents.UpdateDocumentOptions = {} ) { const { ifMatch = undefined, ...opts } = options; const headers = {} as Record; @@ -2687,13 +2714,13 @@ export class Collection< { method: "PATCH", pathname: `/_api/document/${encodeURI( - documents._documentHandle(selector, this._name), + documents._documentHandle(selector, this._name) )}`, headers, body: newData, search: opts, }, - (res) => (options?.silent ? undefined : res.parsedBody), + (res) => (options?.silent ? undefined : res.parsedBody) ); } @@ -2702,7 +2729,7 @@ export class Collection< documents.Patch> & ({ _key: string } | { _id: string }) >, - options?: documents.UpdateDocumentOptions, + options?: documents.UpdateDocumentOptions ) { return this._db.request( { @@ -2711,13 +2738,13 @@ export class Collection< body: newData, search: options, }, - (res) => (options?.silent ? undefined : res.parsedBody), + (res) => (options?.silent ? undefined : res.parsedBody) ); } remove( selector: documents.DocumentSelector, - options: documents.RemoveDocumentOptions = {}, + options: documents.RemoveDocumentOptions = {} ) { const { ifMatch = undefined, ...opts } = options; const headers = {} as Record; @@ -2726,18 +2753,18 @@ export class Collection< { method: "DELETE", pathname: `/_api/document/${encodeURI( - documents._documentHandle(selector, this._name), + documents._documentHandle(selector, this._name) )}`, headers, search: opts, }, - (res) => (options?.silent ? undefined : res.parsedBody), + (res) => (options?.silent ? undefined : res.parsedBody) ); } removeAll( selectors: (string | documents.ObjectWithDocumentKey)[], - options?: documents.RemoveDocumentOptions, + options?: documents.RemoveDocumentOptions ) { return this._db.request( { @@ -2746,7 +2773,7 @@ export class Collection< body: selectors, search: options, }, - (res) => (options?.silent ? undefined : res.parsedBody), + (res) => (options?.silent ? undefined : res.parsedBody) ); } @@ -2754,7 +2781,7 @@ export class Collection< data: Buffer | Blob | string | any[], options: documents.ImportDocumentsOptions & { type?: "documents" | "list" | "auto"; - } = {}, + } = {} ): Promise { const search = { ...options, collection: this._name }; if (Array.isArray(data)) { @@ -2776,7 +2803,7 @@ export class Collection< protected _edges( selector: documents.DocumentSelector, options: documents.DocumentEdgesOptions = {}, - direction?: "in" | "out", + direction?: "in" | "out" ) { const { allowDirtyRead = undefined } = options; return this._db.request({ @@ -2791,21 +2818,21 @@ export class Collection< edges( vertex: documents.DocumentSelector, - options?: documents.DocumentEdgesOptions, + options?: documents.DocumentEdgesOptions ) { return this._edges(vertex, options); } inEdges( vertex: documents.DocumentSelector, - options?: documents.DocumentEdgesOptions, + options?: documents.DocumentEdgesOptions ) { return this._edges(vertex, options, "in"); } outEdges( vertex: documents.DocumentSelector, - options?: documents.DocumentEdgesOptions, + options?: documents.DocumentEdgesOptions ) { return this._edges(vertex, options, "out"); } @@ -2817,10 +2844,10 @@ export class Collection< { method: "PUT", pathname: `/_api/collection/${encodeURIComponent( - this._name, + this._name )}/loadIndexesIntoMemory`, }, - (res) => res.parsedBody.result, + (res) => res.parsedBody.result ); } @@ -2830,7 +2857,7 @@ export class Collection< pathname: "/_api/index", search: { collection: this._name, ...options }, }, - (res) => res.parsedBody.indexes, + (res) => res.parsedBody.indexes ); } diff --git a/src/indexes.ts b/src/indexes.ts index e360ee000..7ef54bb9f 100644 --- a/src/indexes.ts +++ b/src/indexes.ts @@ -61,6 +61,7 @@ export type EnsureIndexOptions = | EnsureGeoIndexOptions | EnsureTtlIndexOptions | EnsureMdiIndexOptions + | EnsureMdiPrefixedIndexOptions | EnsureInvertedIndexOptions; type EnsureIndexOptionsType< @@ -202,6 +203,70 @@ export type EnsureMdiIndexOptions = EnsureIndexOptionsType< * Default: `false` */ unique?: boolean; + /** + * If set to `true`, the index will omit documents that do not contain at + * least one of the attribute paths in `fields` and these documents will be + * ignored for uniqueness checks. + * + * Default: `false` + */ + sparse?: boolean; + /** + * If set to `false`, index selectivity estimates will be disabled for this + * index. + * + * Default: `true` + */ + estimates?: boolean; + /** + * An array of attribute paths that will be stored in the index but can not + * be used for index lookups or sorting but can avoid full document lookups. + */ + storedValues?: string[]; + } +>; + +/** + * Options for creating a prefixed MDI index. + */ +export type EnsureMdiPrefixedIndexOptions = EnsureIndexOptionsType< + "mdi-prefixed", + string[], + { + /** + * An array of attribute names used as a search prefix. + */ + prefixFields: string[]; + /** + * Data type of the dimension attributes. + */ + fieldValueTypes: "double"; + /** + * If set to `true`, a unique index will be created. + * + * Default: `false` + */ + unique?: boolean; + /** + * If set to `true`, the index will omit documents that do not contain at + * least one of the attribute paths in `fields` and these documents will be + * ignored for uniqueness checks. + * + * Default: `false` + */ + sparse?: boolean; + /** + * If set to `false`, index selectivity estimates will be disabled for this + * index. + * + * Default: `true` + */ + estimates?: boolean; + /** + * An array of attribute paths that will be stored in the index but can not + * be used for index lookups or sorting but can avoid full document lookups. + */ + storedValues?: string[]; } >; @@ -497,17 +562,21 @@ export type InvertedIndexStoredValueOptions = { * An object representing an index. */ export type IndexDescription = + | FulltextIndexDescription | GeoIndexDescription | PersistentIndexDescription | TtlIndexDescription | MdiIndexDescription + | MdiPrefixedIndexDescription | InvertedIndexDescription | SystemIndexDescription; /** * An object representing a system index. */ -export type SystemIndexDescription = PrimaryIndexDescription; +export type SystemIndexDescription = + | PrimaryIndexDescription + | EdgeIndexDescription; /** * Shared attributes of all index types. @@ -558,6 +627,7 @@ export type PersistentIndexDescription = IndexDescriptionType< cacheEnabled: boolean; deduplicate: boolean; estimates: boolean; + selectivityEstimate: number; storedValues?: string[]; } >; @@ -567,12 +637,37 @@ export type PersistentIndexDescription = IndexDescriptionType< */ export type PrimaryIndexDescription = IndexDescriptionType< "primary", - string[], + ["_key"], { selectivityEstimate: number; } >; +/** + * An object representing an edge index. + */ +export type EdgeIndexDescription = IndexDescriptionType< + "edge", + ["_from", "_to"], + { + selectivityEstimate: number; + } +>; + +/** + * An object representing a fulltext index. + * + * @deprecated The `fulltext` index type was deprecated in ArangoDB 3.10. Use + * {@link views.ArangoSearchView} instead. + */ +export type FulltextIndexDescription = IndexDescriptionType< + "fulltext", + [string], + { + minLength: number; + } +>; + /** * An object representing a geo index. */ @@ -596,6 +691,7 @@ export type TtlIndexDescription = IndexDescriptionType< [string], { expireAfter: number; + estimates: boolean; selectivityEstimate: number; } >; @@ -608,6 +704,24 @@ export type MdiIndexDescription = IndexDescriptionType< string[], { fieldValueTypes: "double"; + estimates: boolean; + selectivityEstimate: number; + storedValues?: string[]; + } +>; + +/** + * An object representing a prefixed MDI index. + */ +export type MdiPrefixedIndexDescription = IndexDescriptionType< + "mdi-prefixed", + string[], + { + fieldValueTypes: "double"; + estimates: boolean; + selectivityEstimate: number; + storedValues?: string[]; + prefixFields: string[]; } >; @@ -749,21 +863,21 @@ export type ObjectWithName = { */ export function _indexHandle( selector: IndexSelector, - collectionName: string, + collectionName: string ): string { if (typeof selector !== "string") { if (selector.id) { return _indexHandle(selector.id, collectionName); } throw new Error( - "Index handle must be a string or an object with an id attribute", + "Index handle must be a string or an object with an id attribute" ); } if (selector.includes("/")) { const [head] = selector.split("/"); if (head !== collectionName) { throw new Error( - `Index ID "${selector}" does not match collection name "${collectionName}"`, + `Index ID "${selector}" does not match collection name "${collectionName}"` ); } return selector; From ffac1e86d0a265681f9e46884325a63512bacebb Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Mon, 6 Jan 2025 12:08:19 +0100 Subject: [PATCH 21/21] Implement missing HTTP API methods Fixes DE-148. Fixes DE-149. Fixes DE-150. Fixes DE-151. Fixes DE-906. Fixes DE-932. Fixes DE-939. Fixes DE-949. --- CHANGELOG.md | 38 +++++++---- src/administration.ts | 77 +++++++++++++++++++++++ src/collections.ts | 41 ++++++++++++ src/databases.ts | 143 +++++++++++++++++++++++++++++++++++++++++- src/queries.ts | 102 ++++++++++++++++++++++++++++++ 5 files changed, 388 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f5e1efa10..24981ec42 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,22 @@ This driver uses semantic versioning: ### Added +- Added `db.compact` method (DE-906) + +- Added `db.engineStats` method (DE-932) + +- Added `db.getLicense` and `db.setLicense` methods (DE-949) + +- Added `db.listQueryCacheEntries` method (DE-149) + +- Added `db.clearQueryCache` method (DE-148) + +- Added `db.getQueryCacheProperties` method (DE-150) + +- Added `db.setQueryCacheProperties` method (DE-151) + +- Added `collection.shards` method (DE-939) + - Added support for `mdi-prefixed` indexes (DE-956) - Restored `fulltext` index type support (DE-957) @@ -466,13 +482,13 @@ for upgrading your code to arangojs v10. ### Added -- Added `database.availability` method +- Added `db.availability` method -- Added `database.engine` method (DE-931) +- Added `db.engine` method (DE-931) -- Added `database.status` method ([#811](https://github.com/arangodb/arangojs/issues/811)) +- Added `db.status` method ([#811](https://github.com/arangodb/arangojs/issues/811)) -- Added `database.supportInfo` method +- Added `db.supportInfo` method - Added `keepNull` option to `CollectionInsertOptions` type (DE-946) @@ -1567,7 +1583,7 @@ For a detailed list of changes between pre-release versions of v7 see the - Changed `db.createDatabase` return type to `Database` -- Renamed `database.setQueryTracking` to `database.queryTracking` +- Renamed `db.setQueryTracking` to `db.queryTracking` The method will now return the existing query tracking properties or set the new query tracking properties depending on whether an argument is provided. @@ -1963,7 +1979,7 @@ For a detailed list of changes between pre-release versions of v7 see the - Added support for ArangoDB 3.5 Analyzers API - See the documentation of the `database.analyzer` method and the `Analyzer` + See the documentation of the `db.analyzer` method and the `Analyzer` instances for information on using this API. - Added `collection.getResponsibleShard` method @@ -2137,7 +2153,7 @@ For a detailed list of changes between pre-release versions of v7 see the - Fixed `edgeCollection.save` not respecting options ([#554](https://github.com/arangodb/arangojs/issues/554)) -- Fixed `database.createDatabase` TypeScript signature ([#561](https://github.com/arangodb/arangojs/issues/561)) +- Fixed `db.createDatabase` TypeScript signature ([#561](https://github.com/arangodb/arangojs/issues/561)) ## [6.5.0] - 2018-08-03 @@ -2178,7 +2194,7 @@ For a detailed list of changes between pre-release versions of v7 see the - Added `ArangoError` and `CollectionType` to public exports -- Added `database.close` method +- Added `db.close` method - Added `opts` parameter to `EdgeCollection#save` @@ -2186,11 +2202,11 @@ For a detailed list of changes between pre-release versions of v7 see the ### Added -- Added `database.version` method +- Added `db.version` method -- Added `database.login` method +- Added `db.login` method -- Added `database.exists` method +- Added `db.exists` method - Added `collection.exists` method diff --git a/src/administration.ts b/src/administration.ts index b8ba2ac10..9d148f228 100644 --- a/src/administration.ts +++ b/src/administration.ts @@ -8,6 +8,27 @@ * @packageDocumentation */ +//#region Administrative operation options +/** + * Options for compacting all databases on the server. + */ +export type CompactOptions = { + /** + * Whether compacted data should be moved to the minimum possible level. + * + * Default: `false`. + */ + changeLevel?: boolean; + /** + * Whether to compact the bottom-most level of data. + * + * Default: `false`. + */ + compactBottomMostLevel?: boolean; +}; +//#endregion + +//#region Administrative operation results /** * Result of retrieving database version information. */ @@ -62,6 +83,59 @@ export type EngineInfo = { }; }; +/** + * Performance and resource usage information about the storage engine. + */ +export type EngineStatsInfo = Record< + string, + string | number | Record +>; + +/** + * Information about the server license. + */ +export type LicenseInfo = { + /** + * Properties of the license. + */ + features: { + /** + * The timestamp of the expiration date of the license in seconds since the + * Unix epoch. + */ + expires?: number; + }; + /** + * The hash value of the license. + */ + hash: string; + /** + * The encrypted license key in base 64 encoding, or `"none"` when running + * in the Community Edition. + */ + license?: string; + /** + * The status of the installed license. + * + * - `"good"`: The license is valid for more than 2 weeks. + * + * - `"expiring"`: The license is valid for less than 2 weeks. + * + * - `"expired"`: The license has expired. + * + * - `"read-only"`: The license has been expired for more than 2 weeks. + */ + status: "good" | "expiring" | "expired" | "read-only"; + /** + * Whether the server is performing a database upgrade. + */ + upgrading: boolean; + /** + * The license version number. + */ + version: number; +}; + /** * Information about the server status. */ @@ -326,7 +400,9 @@ export type ClusterSupportInfo = { */ host: Record; }; +//#endregion +//#region Queue time metrics /** * An object providing methods for accessing queue time metrics of the most * recently received server responses if the server supports this feature. @@ -348,3 +424,4 @@ export interface QueueTimeMetrics { */ getAvg(): number; } +//#endregion diff --git a/src/collections.ts b/src/collections.ts index bc5abb348..dd941ab13 100644 --- a/src/collections.ts +++ b/src/collections.ts @@ -788,6 +788,34 @@ export interface DocumentCollection< CollectionDescription & { revision: string; checksum: string } > >; + /** + * Retrieves the collection's shard IDs. + * + * @param details - If set to `true`, the response will include the responsible + * servers for each shard. + */ + shards( + details?: false + ): Promise< + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { shards: string[] } + > + >; + /** + * Retrieves the collection's shard IDs and the responsible servers for each + * shard. + * + * @param details - If set to `false`, the response will only include the + * shard IDs without the responsible servers for each shard. + */ + shards( + details: true + ): Promise< + connection.ArangoApiResponse< + CollectionDescription & + CollectionProperties & { shards: Record } + > + >; /** * Renames the collection and updates the instance's `name` to `newName`. * @@ -2492,6 +2520,19 @@ export class Collection< }); } + shards( + details?: boolean + ): Promise< + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { shards: any } + > + > { + return this._db.request({ + pathname: `/_api/collection/${encodeURIComponent(this._name)}/shards`, + search: { details }, + }); + } + async rename(newName: string) { const result = await this._db.renameCollection(this._name, newName); this._name = newName; diff --git a/src/databases.ts b/src/databases.ts index d1022b0c3..b4b8e7d12 100644 --- a/src/databases.ts +++ b/src/databases.ts @@ -600,6 +600,24 @@ export class Database { }); } + /** + * Fetches detailed storage engine performance and resource usage information + * from the ArangoDB server. + * + * @example + * ```js + * const db = new Database(); + * const stats = await db.engineStats(); + * // the stats object contains the storage engine stats + * ``` + */ + engineStats(): Promise { + return this.request({ + method: "GET", + pathname: "/_api/engine/stats", + }); + } + /** * Retrives the server's current system time in milliseconds with microsecond * precision. @@ -684,6 +702,51 @@ export class Database { }); } + /** + * Fetches the license information and status of an Enterprise Edition server. + */ + getLicense(): Promise { + return this.request({ + method: "GET", + pathname: "/_admin/license", + }); + } + + /** + * Set a new license for an Enterprise Edition server. + * + * @param license - The license as a base 64 encoded string. + * @param force - If set to `true`, the license will be changed even if it + * expires sooner than the current license. + */ + setLicense(license: string, force = false): Promise { + return this.request( + { + method: "PUT", + pathname: "/_admin/license", + body: license, + search: { force }, + }, + () => undefined + ); + } + + /** + * Compacts all databases on the server. + * + * @param options - Options for compacting the databases. + */ + compact(options: administration.CompactOptions = {}): Promise { + return this.request( + { + method: "PUT", + pathname: "/_admin/compact", + body: options, + }, + () => undefined + ); + } + /** * Attempts to initiate a clean shutdown of the server. */ @@ -778,6 +841,8 @@ export class Database { * Computes a set of move shard operations to rebalance the cluster and * executes them. * + * @param options - Options for rebalancing the cluster. + * * @example * ```js * const db = new Database(); @@ -789,14 +854,14 @@ export class Database { * ``` */ rebalanceCluster( - opts: cluster.ClusterRebalanceOptions + options: cluster.ClusterRebalanceOptions ): Promise { return this.request({ method: "PUT", pathname: "/_admin/cluster/rebalance", body: { version: 1, - ...opts, + ...options, }, }); } @@ -3122,6 +3187,80 @@ export class Database { () => undefined ); } + + /** + * Fetches a list of all entries in the AQL query results cache of the + * current database. + * + * @example + * ```js + * const db = new Database(); + * const entries = await db.listQueryCacheEntries(); + * console.log(entries); + * ``` + */ + listQueryCacheEntries(): Promise { + return this.request({ + pathname: "/_api/query-cache/entries", + }); + } + + /** + * Clears the AQL query results cache of the current database. + * + * @example + * ```js + * const db = new Database(); + * await db.clearQueryCache(); + * // Cache is now cleared + * ``` + */ + clearQueryCache(): Promise { + return this.request( + { + method: "DELETE", + pathname: "/_api/query-cache", + }, + () => undefined + ); + } + + /** + * Fetches the global properties for the AQL query results cache. + * + * @example + * ```js + * const db = new Database(); + * const properties = await db.getQueryCacheProperties(); + * console.log(properties); + * ``` + */ + getQueryCacheProperties(): Promise { + return this.request({ + pathname: "/_api/query-cache/properties", + }); + } + + /** + * Updates the global properties for the AQL query results cache. + * + * @param properties - The new properties for the AQL query results cache. + * + * @example + * ```js + * const db = new Database(); + * await db.setQueryCacheProperties({ maxResults: 9000 }); + * ``` + */ + setQueryCacheProperties( + properties: queries.QueryCachePropertiesOptions + ): Promise { + return this.request({ + method: "PUT", + pathname: "/_api/query-cache/properties", + body: properties, + }); + } //#endregion //#region user functions diff --git a/src/queries.ts b/src/queries.ts index 04fe41a0b..8f9901787 100644 --- a/src/queries.ts +++ b/src/queries.ts @@ -244,6 +244,35 @@ export type QueryTrackingOptions = { trackSlowQueries?: boolean; }; +/** + * Options for adjusting the global properties for the AQL query results cache. + */ +export type QueryCachePropertiesOptions = { + /** + * If set to `true`, the query cache will include queries that involve + * system collections. + */ + includeSystem?: boolean; + /** + * Maximum individual size of query results that will be stored per + * database-specific cache. + */ + maxEntrySize?: number; + /** + * Maximum number of query results that will be stored per database-specific + * cache. + */ + maxResults?: number; + /** + * Maximum cumulated size of query results that will be stored per + * database-specific cache. + */ + maxResultsSize?: number; + /** + * Mode the AQL query cache should operate in. + */ + mode?: "off" | "on" | "demand"; +}; //#endregion //#region Query operation results @@ -440,6 +469,79 @@ export type QueryTrackingInfo = { */ trackSlowQueries: boolean; }; + +/** + * Entry in the AQL query results cache. + */ +export type QueryCacheEntry = { + /** + * Hash of the query results. + */ + hash: string; + /** + * Query string. + */ + query: string; + /** + * Bind parameters used in the query. Only shown if tracking for bind + * variables was enabled at server start. + */ + bindVars: Record; + /** + * Size of the query results and bind parameters in bytes. + */ + size: number; + /** + * Number of documents/rows in the query results. + */ + results: number; + /** + * Date and time the query was started as an ISO 8601 timestamp. + */ + started: string; + /** + * Number of times the result was served from the cache. + */ + hits: number; + /** + * Running time of the query in seconds. + */ + runTime: number; + /** + * Collections and views involved in the query. + */ + dataSources: string[]; +}; + +/** + * Properties of the global AQL query results cache configuration. + */ +export type QueryCacheProperties = { + /** + * If set to `true`, the query cache will include queries that involve + * system collections. + */ + includeSystem: boolean; + /** + * Maximum individual size of query results that will be stored per + * database-specific cache. + */ + maxEntrySize: number; + /** + * Maximum number of query results that will be stored per database-specific + * cache. + */ + maxResults: number; + /** + * Maximum cumulated size of query results that will be stored per + * database-specific cache. + */ + maxResultsSize: number; + /** + * Mode the AQL query cache should operate in. + */ + mode: "off" | "on" | "demand"; +}; //#endregion //#region QueryDescription