diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d6f3aba6..830dbf222 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -45,6 +45,438 @@ This driver uses semantic versioning: The `Index` type now can also be cast to the `EdgeIndex` type. +## [10.0.0-rc.0] - 2024-12-10 + +This is a major release and breaks backwards compatibility. + +See [the migration guide](./MIGRATING.md#v9-to-v10) for detailed instructions +for upgrading your code to arangojs v10. + +### Removed + +- Removed unused `CreateUserOptions` type + + The actual type used by the `db.createUser` method is still `UserOptions`. + +- Removed unused `IndexDetails` type + + This type was intended to be returned by `collection.indexes` when the + `withStats` option is set to `true` but the `figures` property is already + included in the current return type. + +### Changed + +- Closing a connection now closes all open requests + + Previously in certain situations only the most recent request would be + closed per server. Note that this still merely aborts the requests but + does not guarantee the underlying connections are closed as these are + handled by Node.js or the browser natively. need to be installed + otherwise. + +- Moved fetch-specific `config` options from into `config.fetchOptions` + + The following options were moved: `credentials`, `headers` and `keepalive`. + +#### Error handling + +- Errors encountered before a request completes are now wrapped in a + `NetworkError` or a subclass thereof + + This should help making it easier to diagnose network issues and distinguish + the relevant error conditions. + + The originating error can still be accessed using the `cause` property of the + `NetworkError` error. + +- `HttpError` now extends the `NetworkError` class + + This allows treating all non-`ArangoError` errors as one category of errors, + even when there is no server response available. + +- `db.waitForPropagation` now throws a `PropagationTimeoutError` error when + invoked with a `timeout` option and the timeout duration is exceeded + + The method would previously throw the most recent error encountered while + waiting for replication. The originating error can still be accessed using + the `cause` property of the `PropagationTimeoutError` error. + +- `db.waitForPropagation` now respects the `timeout` option more strictly + + Previously the method would only time out if the timeout duration was + exceeded after the most recent request failed. Now the timeout is + recalculated and passed on to each request, preventing it from exceeding + the specified duration. + + If the propagation timed out due to an underlying request exceeding the + timeout duration, the `cause` property of the `PropagationTimeoutError` + error will be a `ResponseTimeoutError` error. + +- `config.beforeRequest` and `config.afterResponse` callbacks can now return + promises + + If the callback returns a promise, it will be awaited before the request + and response cycle proceeds. If either callback throws an error or returns + a promise that is rejected, that error will be thrown instead. + +- `config.afterResponse` callback signature changed + + The callback signature previously used the internal `ArangojsResponse` type. + The new signature uses the `Response` type of the Fetch API with an + additional `request` property to more accurately represent the actual value + it receives as the `parsedBody` property will never be present. + +- `response` property on `ArangoError` is now optional + + This property should always be present but this allows using the error in + situations where a response might not be available. + +#### General type changes + +- Changed `GraphVertexCollection` and `GraphEdgeCollection` generic types to + take separate `EntryResultType` and `EntryInputType` type parameters + +- Changed `db.collection`, `db.createCollection` and `db.createEdgeCollection` + methods to take separate `EntryResultType` and `EntryInputType` type + parameters + + These type parameters are used to narrow the the returned collection type. + +- Changed `db.removeUser` method return type to `Promise` + + The previous return type served no purpose. + +- Changed `QueueTimeMetrics` type to an interface + +- Changed `CursorExtras` and `CursorStats` interfaces to types + +#### Low-level request/route changes + +- Renamed `path` option to `pathname` in `RequestOptions` type + + This affects the `db.waitForPropagation` and `route.request` methods. + +- Removed `basePath` option from `RequestOptions` type + + This affects the `db.waitForPropagation` and `route.request` methods. + +- Renamed `route.path` property to `route.pathname` + +#### Renamed methods + +- Renamed various methods for consistency: + + Methods that return an array now follow the `listNouns` pattern, methods that + return a "list of nouns" wrapped in an object have been renamed to follow the + `getNouns` pattern to avoid confusion: + + - `db.listServiceScripts` -> `db.getServiceScripts` + - `db.listHotBackups` -> `db.getHotBackups` + - `db.listFunctions` -> `db.listUserFunctions` + - `db.getLogMessages` -> `db.listLogMessages` + +- Renamed AQL user function management methods: + + - `db.createFunction` -> `db.createUserFunction` + - `db.dropFunction` -> `db.dropUserFunction` + +#### Module renaming + +- Renamed most modules to plural form for consistency + + The following modules were renamed: + + - `arangojs/analyzer` -> `arangojs/analyzers` + - `arangojs/collection` -> `arangojs/collections` + - `arangojs/cursor` -> `arangojs/cursors` + - `arangojs/database` -> `arangojs/databases` + - `arangojs/error` -> `arangojs/errors` + - `arangojs/graph` -> `arangojs/graphs` + - `arangojs/job` -> `arangojs/jobs` + - `arangojs/route` -> `arangojs/routes` + - `arangojs/transaction` -> `arangojs/transactions` + - `arangojs/view` -> `arangojs/views` + +- Moved internal utility functions to new `arangojs/lib/util` module + + These methods are all still marked as internal and should not be used + directly. + +#### Moved types + +- Moved document related types from `arangojs/collection` module to + `arangojs/documents` module + + The following types were moved: `DocumentOperationFailure`, + `DocumentOperationMetadata`, `DocumentExistsOptions`, + `CollectionReadOptions`, `CollectionBatchReadOptions`, + `CollectionInsertOptions`, `CollectionReplaceOptions`, + `CollectionUpdateOptions`, `CollectionRemoveOptions`, + `CollectionImportOptions`, `CollectionEdgesOptions`, + `CollectionImportResult` and `CollectionEdgesResult` + +- Moved index related types from `arangojs/collection` module to + `arangojs/indexes` module + + The following types were moved: `IndexListOptions`. + +- Moved transaction related types from `arangojs/database` module to + `arangojs/transactions` module + + The following types were moved: `TransactionCollections`, + `TransactionOptions` and `TransactionDetails`. + +- Moved cluster related types from `arangojs/database` module to new + `arangojs/clusters` module + + The following types were moved: `ClusterImbalanceInfo`, + `ClusterRebalanceState`, `ClusterRebalanceOptions`, `ClusterRebalanceMove` + and `ClusterRebalanceResult`. + +- Moved hot backup related types from `arangojs/database` module to new + `arangojs/hot-backups` module + + The following types were moved: `HotBackupOptions`, `HotBackupResult` and + `HotBackupList`. + +- Moved query related types from `arangojs/database` module to new + `arangojs/queries` module + + The following types were moved: `QueryOptions`, `ExplainOptions`, + `ExplainPlan`, `ExplainStats`, `SingleExplainResult`, `MultiExplainResult`, + `AstNode`, `ParseResult`, `QueryOptimizerRule`, `QueryTracking`, + `QueryTrackingOptions`, `QueryInfo` and `AqlUserFunction`. + +- Moved service related types from `arangojs/database` module to new + `arangojs/services` module + + The following types were moved: `InstallServiceOptions`, + `ReplaceServiceOptions`, `UpgradeServiceOptions`, `UninstallServiceOptions`, + `ServiceSummary`, `ServiceInfo`, `ServiceConfiguration`, + `SingleServiceDependency`, `MultiServiceDependency`, `ServiceTestStats`, + `ServiceTestStreamTest`, `ServiceTestStreamReport`, `ServiceTestSuiteTest`, + `ServiceTestSuite`, `ServiceTestSuiteReport`, `ServiceTestXunitTest`, + `ServiceTestXunitReport`, `ServiceTestTapReport`, `ServiceTestDefaultTest`, + `ServiceTestDefaultReport` and `SwaggerJson`. + +- Moved user related types from `arangojs/database` module to new + `arangojs/users` module + + The following types were moved: `AccessLevel`, `ArangoUser`, `UserOptions`, + `UserAccessLevelOptions` and `CreateDatabaseUser`. + +- Moved server administration related types from `arangojs/database` module to + new `arangojs/administration` module + + The following types were moved: `QueueTimeMetrics` and `VersionInfo`. + +- Moved configuration related types to new `arangojs/config` module + + The following types were moved: `Config`, `LoadBalancingStrategy`, + `BasicAuthCredentials` and `BearerAuthCredentials`. + +- Moved `ArangoErrorResponse` type to `arangojs/connection` module + + The type is now also no longer marked as internal. + +- Moved configuration related types to new `arangojs/configuration` module + + The following types were moved: `ConfigOptions`, `LoadBalancingStrategy`, + `BasicAuthCredentials` and `BearerAuthCredentials`. + +#### Renamed types + +- Renamed `Index` types to `IndexDescription` for consistency + + The specific index types were also renamed accordingly: + + - `Index` -> `IndexDescription` + - `GeoIndex` -> `GeoIndexDescription` + - `PersistentIndex` -> `PersistentIndexDescription` + - `PrimaryIndex` -> `PrimaryIndexDescription` + - `TtlIndex` -> `TtlIndexDescription` + - `MdiIndex` -> `MdiIndexDescription` + - `InvertedIndex` -> `InvertedIndexDescription` + - `InternalArangosearchIndex` -> `ArangosearchIndexDescription` + - `InternalIndex` -> `InternalIndexDescription` + - `HiddenIndex` -> `HiddenIndexDescription` + + Note that the "Internal" prefix was dropped from `ArangosearchIndexDescription` + to more accurately reflect the index type name. The index type still refers + to an internal index, however. + +- Renamed various types for consistency: + + Types representing an instance of a specific entity type in ArangoDB like a + collection, graph or query now follow the `NounDescription` naming pattern: + + - `AqlUserFunction` -> `UserFunctionDescription` + - `CollectionMetadata` -> `CollectionDescription` + - `DatabaseInfo` -> `DatabaseDescription` + - `GraphInfo` -> `GraphDescription` + - `ServiceInfo` -> `ServiceDescription` + - `QueryInfo` -> `QueryDescription` + - `TransactionDetails` -> `TransactionDescription` + + Note that the `TransactionDescription` type used by `db.listTransactions` + is slightly different from the `TransactionInfo` type used by methods of + `Transaction` objects due to implementation details of ArangoDB. + + Types representing general information rather than an instance of something + now generally follow the `NounInfo` naming pattern, whereas types + representing the result of an operation generally follow the `NounResult` + or `VerbNounResult` naming pattern: + + - `QueryTracking` -> `QueryTrackingInfo` + - `CollectionImportResult` -> `ImportDocumentsResult` + - `CollectionEdgesResult` -> `DocumentEdgesResult` + + Types for options passed to methods now generally follow the `NounOptions`, + `VerbNounOptions` or `VerbNounAttributeOptions` naming patterns: + + - `Config` -> `ConfigOptions` + - `TransactionCollections` -> `TransactionCollectionOptions` + - `CreateDatabaseUser` -> `CreateDatabaseUserOptions` + - `CollectionDropOptions` -> `DropCollectionOptions` + - `CollectionTruncateOptions` -> `TruncateCollectionOptions` + - `IndexListOptions` -> `ListIndexesOptions` + + - Collection document operations: + + - `DocumentExistsOptions` -> `DocumentExistsOptions` + - `CollectionReadOptions` -> `ReadDocumentOptions` + - `CollectionBatchReadOptions` -> `BulkReadDocumentsOptions` + - `CollectionInsertOptions` -> `InsertDocumentOptions` + - `CollectionReplaceOptions` -> `ReplaceDocumentOptions` + - `CollectionUpdateOptions` -> `UpdateDocumentOptions` + - `CollectionRemoveOptions` -> `RemoveDocumentOptions` + - `CollectionImportOptions` -> `ImportDocumentsOptions` + - `CollectionEdgesOptions` -> `DocumentEdgesOptions` + + - Graph collection document operation: + + - `GraphCollectionReadOptions` -> `ReadGraphDocumentOptions` + - `GraphCollectionInsertOptions` -> `CreateGraphDocumentOptions` + - `GraphCollectionReplaceOptions` -> `ReplaceGraphDocumentOptions` + - `GraphCollectionRemoveOptions` -> `RemoveGraphDocumentOptions` + - `ViewPatchPropertiesOptions` -> `UpdateViewPropertiesOptions` + + - View operations: + + - `ArangoSearchViewPatchPropertiesOptions` -> `UpdateArangoSearchViewPropertiesOptions` + - `SearchAliasViewPatchPropertiesOptions` -> `UpdateSearchAliasViewPropertiesOptions` + - `SearchAliasViewPatchIndexOptions` -> `UpdateSearchAliasViewIndexOptions` + - `ArangoSearchViewStoredValueOptions` -> `CreateArangoSearchViewStoredValueOptions` + +- Renamed `ArrayCursor` and `BatchedArrayCursor` classes to `Cursor` and + `BatchCursor` respectively + + The previous name was misleading because it conflicted with how the ArangoDB + distinguishes between array cursors and streaming cursors in the interactive + shell. This distinction does not apply to the driver. + +- Renamed various types to reduce ambiguity: + + - `ObjectWithId` (in `indexes` module) -> `ObjectWithIndexId` + - `ObjectWithId` (in `documents` module) -> `ObjectWithDocumentId` + - `ObjectWithKey` (in `documents` module) -> `ObjectWithDocumentKey` + +### Added + +- Restored support for Unix domain sockets + + Using Unix domain sockets requires the `undici` library to be installed. + +- Restored support for `config.agentOptions` + + The `config.agentOptions` option can now be used to create a custom `undici` + agent if the `undici` library is installed. + +- Added `config.fetchOptions` option + + This option can now be used to specify default options for the `fetch` + function used by arangojs like `headers`, `credentials`, `keepalive` and + `redirect`. + +- Added `BatchCursor#itemsView` property and `BatchCursorItemsView` interface + + This property provides a low-level interface for consuming the items of the + cursor and is used by the regular item-wise `Cursor` class internally. + +- Added `ProcessedResponse` type + + This type replaces the previously internal `ArangojsResponse` type and + extends the native `Response` type with additional properties. + +#### Error handling + +- Added `config.onError` option (DE-955) + + This option can be used to specify a callback function that will be invoked + whenever a request results in an error. Unlike `afterResponse`, this callback + will be invoked even if the request completed but returned an error status. + In this case the error will be the `HttpError` or `ArangoError` representing + the error response. + + If the `onError` callback throws an error or returns a promise that is + rejected, that error will be thrown instead. + +- Added support for `config.fetchOptions.redirect` option ([#613](https://github.com/arangodb/arangojs/issues/613)) + + This option can now be used to specify the redirect mode for requests. + + When set to `"manual"`, arangojs will throw an `HttpError` wrapping the + redirect response instead of automatically following redirects. + + Note that when set to `"error"`, the native fetch API will throw a + non-specific error (usually a `TypeError`) that arangojs will wrap in a + `FetchFailedError` instead. + +- Added optional `ArangoError#request` property + + This property is always present if the error has a `response` property. In + normal use this should always be the case. + +- Added `NetworkError` class + + This is the common base class for all errors (including `HttpError`) that + occur while making a request. The originating error can be accessed using the + `cause` property. The request object can be accessed using the `request` + property. + + Note that `ArangoError` and the new `PropagationTimeoutError` error type + do not extend `NetworkError` but may wrap an underlying error, which can + be accessed using the `cause` property. + +- Added `ResponseTimeoutError` class + + This error extends `NetworkError` and is thrown when a request deliberately + times out using the `timeout` option. + +- Added `RequestAbortedError` class + + This error extends `NetworkError` and is thrown when a request is aborted + by using the `db.close` method. + +- Added `FetchFailedError` class + + This error extends `NetworkError` and is thrown when a request fails because + the underlying `fetch` call fails (usually with a `TypeError`). + + In Node.js the root cause of this error (e.g. a network failure) can often be + found in the `cause` property of the originating error, i.e. the `cause` + property of the `cause` property of this error. + + In browsers the root cause is usually not exposed directly but can often + be diagnosed by examining the developer console or network tab. + +- Added `PropagationTimeoutError` class + + This error does not extend `NetworkError` but wraps the most recent error + encountered while waiting for replication, which can be accessed using the + `cause` property. This error is only thrown when `db.waitForPropagation` + is invoked with a `timeout` option and the timeout duration is exceeded. + ## [9.2.0] - 2024-11-27 ### Added @@ -97,6 +529,8 @@ This driver uses semantic versioning: This property is only available when fetching indexes with the `withHidden` option set to `true`. +### Added + - Added `HiddenIndex` type (DE-849) This type is used to represent an index returned by `collection.indexes` when @@ -1994,6 +2428,7 @@ For a detailed list of changes between pre-release versions of v7 see the Graph methods now only return the relevant part of the response body. [9.3.0]: https://github.com/arangodb/arangojs/compare/v9.2.0...v9.3.0 +[10.0.0-rc.0]: https://github.com/arangodb/arangojs/compare/v9.2.0...v10.0.0-rc.0 [9.2.0]: https://github.com/arangodb/arangojs/compare/v9.1.0...v9.2.0 [9.1.0]: https://github.com/arangodb/arangojs/compare/v9.0.0...v9.1.0 [9.0.0]: https://github.com/arangodb/arangojs/compare/v8.8.1...v9.0.0 diff --git a/MIGRATING.md b/MIGRATING.md index c27238a8e..f9df2657b 100644 --- a/MIGRATING.md +++ b/MIGRATING.md @@ -1,5 +1,55 @@ # Migrating +## v9 to v10 + +Version 10 changes the error handling to make it easier to diagnose network +issues and distinguish between different error conditions. + +If you previously inspected errors other than `ArangoError` and `HttpError` +directly, you should now expect to see `NetworkError` or a subclass thereof +instead. The originating error can be found using the `cause` property of the +`NetworkError` error: + +```js +try { + await db.collection("my-collection").get(); +} catch (err) { + if (err instanceof NetworkError) console.log(err.cause); +} +``` + +### Module name changes + +Module names referring to resource types such as analyzers, collections, +databases, or views have been changed to use the plural form: + +```diff +-import { Database } from "arangojs/database"; ++import { Database } from "arangojs/databases"; +``` + +Note that the `aql` module and `foxx-manifest` modules have not been renamed +as these are utility modules. + +### Type imports + +Types that were previously exported by the `database` module but are not +related to managing databases have been moved to separate modules: + +```diff +-import type { +- ParseResult, +- TransactionOptions, +- VersionInfo +-} from "arangojs/database"; ++import type { VersionInfo } from "arangojs/administration"; ++import type { TransactionOptions } from "arangojs/transactions"; ++import type { ParseResult } from "arangojs/queries"; +``` + +Additionally, some types were renamed. For a full list of changes, see the +[changelog](./CHANGELOG.md). + ## v8 to v9 Version 9 reverts the automatic NFC normalization introduced in v7.7.0. This diff --git a/README.md b/README.md index 8f81115a1..2f72ea4fc 100644 --- a/README.md +++ b/README.md @@ -139,21 +139,42 @@ and [the `db` object](https://www.arangodb.com/docs/stable/appendix-references-d ## Error responses -If arangojs encounters an API error, it will throw an `ArangoError` with -an `errorNum` property indicating the ArangoDB error code and the `code` -property indicating the HTTP status code from the response body. +If the server returns an ArangoDB error response, arangojs will throw an +`ArangoError` with an `errorNum` property indicating the ArangoDB error code +and expose the response body as the `response` property of the error object. -For any other non-ArangoDB error responses (4xx/5xx status code), it will throw -an `HttpError` error with the status code indicated by the `code` property. +For all other errors during the request/response cycle, arangojs will throw a +`NetworkError` or a more specific subclass thereof and expose the originating +request object as the `request` property of the error object. -If the server response did not indicate an error but the response body could -not be parsed, a regular `SyntaxError` may be thrown instead. +If the server responded with a non-2xx status code, this `NetworkError` will +be an `HttpError` with a `code` property indicating the HTTP status code of the +response and a `response` property containing the response object itself. -In all of these cases the server response object will be exposed as the -`response` property on the error object. +If the error is caused by an exception, the originating exception will be +available as the `cause` property of the error object thrown by arangojs. For +network errors, this will often be a `TypeError`. -If the request failed at a network level or the connection was closed without -receiving a response, the underlying system error will be thrown instead. +### Node.js network errors + +In Node.js, network errors caused by a `TypeError` will often have a `cause` +property containing a more detailed exception. + +Specifically, these are often either system errors (represented by regular +`Error` objects with additional properties) or errors from the `undici` module +Node.js uses internally for its native `fetch` implementation. + +Node.js system error objects provide a `code` property containing the specific +string error code, a `syscall` property identifying the underlying system call +that triggered the error (e.g. `connect`), as well as other helpful properties. + +For more details on Node.js system errors, see the Node.js documentation of the +[`SystemError` interface](https://nodejs.org/api/errors.html#class-systemerror) +as well as the section on +[Node.js error codes](https://nodejs.org/api/errors.html#nodejs-error-codes). + +For more details on the errors thrown by `undici`, see the +[undici errors documentation](https://undici.nodejs.org/#/docs/api/Errors.md). ## Common issues @@ -170,6 +191,15 @@ Additionally please ensure that your version of Node.js (or browser) and ArangoDB are supported by the version of arangojs you are trying to use. See the [compatibility section](#compatibility) for additional information. +You can install an older version of arangojs using `npm` or `yarn`: + +```sh +# for version 8.x.x +yarn add arangojs@8 +# - or - +npm install --save arangojs@8 +``` + ### No code intelligence when using require instead of import If you are using `require` to import the `arangojs` module in JavaScript, the @@ -225,7 +255,7 @@ allowing arangojs to provide more meaningful stack traces at the cost of an impact to performance even when no error occurs. ```diff - const { Database } = require("arangojs"); + import { Database } from "arangojs"; const db = new Database({ url: ARANGODB_SERVER, @@ -239,15 +269,48 @@ that do not support the `stack` property on error objects, this option will still impact performance but not result in any additional information becoming available. +### Unix domain sockets + +If you want to use Unix domain sockets, you need to install the `undici` module, +which is an optional peer dependency of arangojs. + +```sh +npm install --save undici +``` + +If the `undici` module is not installed and arangojs attempts to make a request +over a Unix domain socket, the request will fail with a plain `Error` with a +message indicating that the `undici` module is unavailable. + ### Node.js with self-signed HTTPS certificates -If you need to support self-signed HTTPS certificates in Node.js, you may have -to override the global fetch agent. At the time of this writing, there is no -official way to do this for the native `fetch` implementation in Node.js. +If you need to support self-signed HTTPS certificates in Node.js, you will need +to install the `undici` module, which is an optional peer dependency of +arangojs. -However as Node.js uses the `undici` module for its `fetch` implementation -internally, you can override the global agent by adding `undici` as a -dependency to your project and using its `setGlobalDispatcher` as follows: +```sh +npm install --save undici +``` + +You can instruct arangojs to use the `undici` module by setting the +`config.agentOptions` option: + +```diff + import { Database } from "arangojs"; + + const db = new Database({ + url: ARANGODB_SERVER, ++ agentOptions: { ++ ca: [ ++ fs.readFileSync(".ssl/sub.class1.server.ca.pem"), ++ fs.readFileSync(".ssl/ca.pem"), ++ ], ++ }, + }); +``` + +To override the global fetch agent instead, you can use the `undici` module's +`setGlobalDispatcher` method as follows: ```js import { Agent, setGlobalDispatcher } from "undici"; @@ -263,20 +326,22 @@ setGlobalDispatcher( ``` Although this is **strongly discouraged**, it's also possible to disable -HTTPS certificate validation entirely, but note this has +HTTPS certificate validation entirely this way, but note this has **extremely dangerous** security implications: -```js -import { Agent, setGlobalDispatcher } from "undici"; +```diff + import { Database } from "arangojs"; -setGlobalDispatcher( - new Agent({ - rejectUnauthorized: false, - }) -); + const db = new Database({ + url: ARANGODB_SERVER, ++ agentOptions: { ++ rejectUnauthorized: false, ++ }, + }); ``` -This is a [known limitation](https://github.com/orgs/nodejs/discussions/44038#discussioncomment-5701073) +The requirement to use the `undici` module to override these settings is a +[known limitation](https://github.com/orgs/nodejs/discussions/44038#discussioncomment-5701073) of Node.js at the time of this writing. When using arangojs in the browser, self-signed HTTPS certificates need to diff --git a/package.json b/package.json index 133c340de..25fb2d52d 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "private": true, "type": "module", "name": "arangojs", - "version": "9.3.0", + "version": "10.0.0", "engines": { "node": ">=18" }, @@ -14,7 +14,7 @@ }, "repository": { "type": "git", - "url": "https://github.com/arangodb/arangojs.git" + "url": "git+https://github.com/arangodb/arangojs.git" }, "author": "ArangoDB GmbH", "contributors": [ @@ -102,5 +102,13 @@ "source-map-support": "^0.5.21", "typedoc": "^0.25.12", "typescript": "^5.4.2" + }, + "peerDependencies": { + "undici": ">=5.21.0" + }, + "peerDependenciesMeta": { + "undici": { + "optional": true + } } } diff --git a/src/administration.ts b/src/administration.ts new file mode 100644 index 000000000..9d148f228 --- /dev/null +++ b/src/administration.ts @@ -0,0 +1,427 @@ +/** + * ```ts + * import type { VersionInfo } from "arangojs/administration"; + * ``` + * + * The "administration" module provides types for database administration. + * + * @packageDocumentation + */ + +//#region Administrative operation options +/** + * Options for compacting all databases on the server. + */ +export type CompactOptions = { + /** + * Whether compacted data should be moved to the minimum possible level. + * + * Default: `false`. + */ + changeLevel?: boolean; + /** + * Whether to compact the bottom-most level of data. + * + * Default: `false`. + */ + compactBottomMostLevel?: boolean; +}; +//#endregion + +//#region Administrative operation results +/** + * Result of retrieving database version information. + */ +export type VersionInfo = { + /** + * Value identifying the server type, i.e. `"arango"`. + */ + server: string; + /** + * ArangoDB license type or "edition". + */ + license: "community" | "enterprise"; + /** + * ArangoDB server version. + */ + version: string; + /** + * Additional information about the ArangoDB server. + */ + details?: { [key: string]: string }; +}; + +/** + * Information about the storage engine. + */ +export type EngineInfo = { + /** + * Endianness of the storage engine. + */ + endianness?: "little" | "big"; + /** + * Name of the storage engine. + */ + name: string; + /** + * Features supported by the storage engine. + */ + supports?: { + /** + * Index types supported by the storage engine. + */ + indexes?: string[]; + /** + * Aliases supported by the storage engine. + */ + aliases?: { + /** + * Index type aliases supported by the storage engine. + */ + indexes?: Record; + }; + }; +}; + +/** + * Performance and resource usage information about the storage engine. + */ +export type EngineStatsInfo = Record< + string, + string | number | Record +>; + +/** + * Information about the server license. + */ +export type LicenseInfo = { + /** + * Properties of the license. + */ + features: { + /** + * The timestamp of the expiration date of the license in seconds since the + * Unix epoch. + */ + expires?: number; + }; + /** + * The hash value of the license. + */ + hash: string; + /** + * The encrypted license key in base 64 encoding, or `"none"` when running + * in the Community Edition. + */ + license?: string; + /** + * The status of the installed license. + * + * - `"good"`: The license is valid for more than 2 weeks. + * + * - `"expiring"`: The license is valid for less than 2 weeks. + * + * - `"expired"`: The license has expired. + * + * - `"read-only"`: The license has been expired for more than 2 weeks. + */ + status: "good" | "expiring" | "expired" | "read-only"; + /** + * Whether the server is performing a database upgrade. + */ + upgrading: boolean; + /** + * The license version number. + */ + version: number; +}; + +/** + * Information about the server status. + */ +export type ServerStatusInformation = { + /** + * (Cluster Coordinators and DB-Servers only.) The address of the server. + */ + address?: string; + /** + * (Cluster Coordinators and DB-Servers only.) Information about the Agency. + */ + agency?: { + /** + * Information about the communication with the Agency. + */ + agencyComm: { + /** + * A list of possible Agency endpoints. + */ + endpoints: string[]; + }; + }; + /** + * (Cluster Agents only.) Information about the Agents. + */ + agent?: { + /** + * The endpoint of the queried Agent. + */ + endpoint: string; + /** + * Server ID of the queried Agent. + */ + id: string; + /** + * Server ID of the leading Agent. + */ + leaderId: string; + /** + * Whether the queried Agent is the leader. + */ + leading: boolean; + /** + * The current term number. + */ + term: number; + }; + /** + * (Cluster Coordinators only.) Information about the Coordinators. + */ + coordinator?: { + /** + * The server ID of the Coordinator that is the Foxx master. + */ + foxxmaster: string[]; + /** + * Whether the queried Coordinator is the Foxx master. + */ + isFoxxmaster: boolean[]; + }; + /** + * Whether the Foxx API is enabled. + */ + foxxApi: boolean; + /** + * A host identifier defined by the HOST or NODE_NAME environment variable, + * or a fallback value using a machine identifier or the cluster/Agency address. + */ + host: string; + /** + * A hostname defined by the HOSTNAME environment variable. + */ + hostname?: string; + /** + * ArangoDB Edition. + */ + license: "community" | "enterprise"; + /** + * Server operation mode. + * + * @deprecated Use `operationMode` instead + */ + mode: "server" | "console"; + /** + * Server operation mode. + */ + operationMode: "server" | "console"; + /** + * The process ID of arangod. + */ + pid: number; + /** + * Server type. + */ + server: "arango"; + /** + * Information about the server status. + */ + serverInfo: { + /** + * Whether the maintenance mode is enabled. + */ + maintenance: boolean; + /** + * (Cluster only.) The persisted ID. + */ + persistedId?: string; + /** + * Startup and recovery information. + */ + progress: { + /** + * Internal name of the feature that is currently being prepared, started, stopped or unprepared. + */ + feature: string; + /** + * Name of the lifecycle phase the instance is currently in. + */ + phase: string; + /** + * Current recovery sequence number value. + */ + recoveryTick: number; + }; + /** + * Whether writes are disabled. + */ + readOnly: boolean; + /** + * (Cluster only.) The reboot ID. Changes on every restart. + */ + rebootId?: number; + /** + * Either "SINGLE", "COORDINATOR", "PRIMARY" (DB-Server), or "AGENT" + */ + role: "SINGLE" | "COORDINATOR" | "PRIMARY" | "AGENT"; + /** + * (Cluster Coordinators and DB-Servers only.) The server ID. + */ + serverId?: string; + /** + * (Cluster Coordinators and DB-Servers only.) Either "STARTUP", "SERVING", + * or "SHUTDOWN". + */ + state?: "STARTUP" | "SERVING" | "SHUTDOWN"; + /** + * The server version string. + */ + version: string; + /** + * Whether writes are enabled. + * + * @deprecated Use `readOnly` instead. + */ + writeOpsEnabled: boolean; + }; +}; + +/** + * Server availability. + * + * - `"default"`: The server is operational. + * + * - `"readonly"`: The server is in read-only mode. + * + * - `false`: The server is not available. + */ +export type ServerAvailability = "default" | "readonly" | false; + +/** + * Single server deployment information for support purposes. + */ +export type SingleServerSupportInfo = { + /** + * ISO 8601 datetime string of when the information was requested. + */ + date: string; + /** + * Information about the deployment. + */ + deployment: { + /** + * Deployment mode: + * + * - `"single"`: A single server deployment. + * + * - `"cluster"`: A cluster deployment. + */ + type: "single"; + }; +}; + +/** + * Cluster deployment information for support purposes. + */ +export type ClusterSupportInfo = { + /** + * ISO 8601 datetime string of when the information was requested. + */ + date: string; + /** + * Information about the deployment. + */ + deployment: { + /** + * Deployment mode: + * + * - `"single"`: A single server deployment. + * + * - `"cluster"`: A cluster deployment. + */ + type: "cluster"; + /** + * Information about the servers in the cluster. + */ + servers: Record>; + /** + * Number of agents in the cluster. + */ + agents: number; + /** + * Number of coordinators in the cluster. + */ + coordinators: number; + /** + * Number of DB-Servers in the cluster. + */ + dbServers: number; + /** + * Information about the shards in the cluster. + */ + shards: { + /** + * Number of collections in the cluster. + */ + collections: number; + /** + * Number of shards in the cluster. + */ + shards: number; + /** + * Number of leaders in the cluster. + */ + leaders: number; + /** + * Number of real leaders in the cluster. + */ + realLeaders: number; + /** + * Number of followers in the cluster. + */ + followers: number; + /** + * Number of servers in the cluster. + */ + servers: number; + }; + }; + /** + * (Cluster only.) Information about the ArangoDB instance as well as the + * host machine. + */ + host: Record; +}; +//#endregion + +//#region Queue time metrics +/** + * An object providing methods for accessing queue time metrics of the most + * recently received server responses if the server supports this feature. + */ +export interface QueueTimeMetrics { + /** + * Returns the queue time of the most recently received response in seconds. + */ + getLatest(): number | undefined; + /** + * Returns a list of the most recently received queue time values as tuples + * of the timestamp of the response being processed in milliseconds and the + * queue time in seconds. + */ + getValues(): [number, number][]; + /** + * Returns the average queue time of the most recently received responses + * in seconds. + */ + getAvg(): number; +} +//#endregion diff --git a/src/analyzer.ts b/src/analyzers.ts similarity index 54% rename from src/analyzer.ts rename to src/analyzers.ts index b005a4cbb..05a5f31a9 100644 --- a/src/analyzer.ts +++ b/src/analyzers.ts @@ -1,32 +1,56 @@ /** * ```ts - * import type { Analyzer } from "arangojs/analyzer.js"; + * import type { Analyzer } from "arangojs/analyzers"; * ``` * - * The "analyzer" module provides analyzer related types and interfaces + * The "analyzers" module provides Analyzer related types and interfaces * for TypeScript. * * @packageDocumentation */ -import { ArangoApiResponse } from "./connection.js"; -import { Database } from "./database.js"; -import { isArangoError } from "./error.js"; +import * as databases from "./databases.js"; +import * as connection from "./connection.js"; +import * as errors from "./errors.js"; import { ANALYZER_NOT_FOUND } from "./lib/codes.js"; +//#region Shared types /** - * Indicates whether the given value represents an {@link Analyzer}. - * - * @param analyzer - A value that might be an Analyzer. + * Name of a feature enabled for an Analyzer. */ -export function isArangoAnalyzer(analyzer: any): analyzer is Analyzer { - return Boolean(analyzer && analyzer.isArangoAnalyzer); -} +export type AnalyzerFeature = "frequency" | "norm" | "position" | "offset"; /** - * Name of a feature enabled for an Analyzer. + * Text case conversion type. */ -export type AnalyzerFeature = "frequency" | "norm" | "position" | "offset"; +export type CaseConversion = "lower" | "upper" | "none"; + +/** + * Token type for a Segmentation Analyzer. + */ +export type SegmentationTokenType = "all" | "alpha" | "graphic"; + +/** + * Token data type for an AQL Analyzer. + */ +export type AqlReturnTokenType = "string" | "number" | "bool"; + +/** + * GeoJSON type. + */ +export type GeoType = "shape" | "centroid" | "point"; + +/** + * Storage format of a Geo S2 Analyzer. + */ +export type GeoS2Format = "latLngDouble" | "latLngInt" | "s2Point"; +/** + * Type of an Analyzer. + */ +export type AnalyzerType = AnalyzerDescription["type"]; +//#endregion + +//#region CreateAnalyzerOptions /** * Analyzer type and its type-specific properties. */ @@ -51,106 +75,95 @@ export type CreateAnalyzerOptions = | CreateGeoPointAnalyzerOptions | CreateGeoS2AnalyzerOptions; +type CreateAnalyzerOptionsType< + Type extends AnalyzerType, + Properties = void, +> = Properties extends void + ? { + /** + * Type of the Analyzer. + */ + type: Type; + /** + * Features to enable for this Analyzer. + */ + features?: AnalyzerFeature[]; + /** + * This Analyzer does not take additional properties. + */ + properties?: Record; + } + : { + /** + * Type of the Analyzer. + */ + type: Type; + /** + * Features to enable for this Analyzer. + */ + features?: AnalyzerFeature[]; + /** + * Additional properties for the Analyzer. + */ + properties: Properties; + }; + /** * Options for creating an Identity Analyzer. */ -export type CreateIdentityAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "identity"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - * - * The `identity` Analyzer does not take additional properties. - */ - properties?: Record; -}; +export type CreateIdentityAnalyzerOptions = + CreateAnalyzerOptionsType<"identity">; /** * Options for creating a Delimiter Analyzer. */ -export type CreateDelimiterAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "delimiter"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - * - * The value will be used as delimiter to split text into tokens as specified - * in RFC 4180, without starting new records on newlines. - */ - properties: string | { delimiter: string }; -}; +export type CreateDelimiterAnalyzerOptions = CreateAnalyzerOptionsType< + "delimiter", + | string + | { + /** + * This value will be used as delimiter to split text into tokens as + * specified in RFC 4180, without starting new records on newlines. + */ + delimiter: string; + } +>; /** * Options for creating a Multi-Delimiter Analyzer. */ -export type CreateMultiDelimiterAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "multi_delimiter"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - * - * The value will be used as delimiter to split text into tokens as specified - * in RFC 4180, without starting new records on newlines. - */ - properties: { delimiters: string[] }; -}; +export type CreateMultiDelimiterAnalyzerOptions = CreateAnalyzerOptionsType< + "multi_delimiter", + { + /** + * This value will be used as delimiter to split text into tokens as + * specified in RFC 4180, without starting new records on newlines. + */ + delimiters: string[]; + } +>; /** * Options for creating a Stem Analyzer. */ -export type CreateStemAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "stem"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - * - * The value defines the text locale. - * - * Format: `language[_COUNTRY][.encoding][@variant]` - */ - properties: { locale: string }; -}; +export type CreateStemAnalyzerOptions = CreateAnalyzerOptionsType< + "stem", + { + /** + * Text locale. + * + * Format: `language[_COUNTRY][.encoding][@variant]` + */ + locale: string; + } +>; /** * Options for creating a Norm Analyzer. */ -export type CreateNormAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "norm"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateNormAnalyzerOptions = CreateAnalyzerOptionsType< + "norm", + { /** * Text locale. * @@ -162,32 +175,22 @@ export type CreateNormAnalyzerOptions = { * * Default: `"lower"` */ - case?: "lower" | "none" | "upper"; + case?: CaseConversion; /** * Preserve accents in returned words. * * Default: `false` */ accent?: boolean; - }; -}; + } +>; /** * Options for creating an Ngram Analyzer. */ -export type CreateNgramAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "ngram"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateNgramAnalyzerOptions = CreateAnalyzerOptionsType< + "ngram", + { /** * Maximum n-gram length. */ @@ -200,25 +203,15 @@ export type CreateNgramAnalyzerOptions = { * Output the original value as well. */ preserveOriginal: boolean; - }; -}; + } +>; /** * Options for creating a Text Analyzer. */ -export type CreateTextAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "text"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateTextAnalyzerOptions = CreateAnalyzerOptionsType< + "text", + { /** * Text locale. * @@ -230,7 +223,7 @@ export type CreateTextAnalyzerOptions = { * * Default: `"lower"` */ - case?: "lower" | "none" | "upper"; + case?: CaseConversion; /** * Words to omit from result. * @@ -260,57 +253,41 @@ export type CreateTextAnalyzerOptions = { /** * If present, then edge n-grams are generated for each token (word). */ - edgeNgram?: { min?: number; max?: number; preserveOriginal?: boolean }; - }; -}; + edgeNgram?: { + min?: number; + max?: number; + preserveOriginal?: boolean; + }; + } +>; /** * Options for creating a Segmentation Analyzer */ -export type CreateSegmentationAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "segmentation"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateSegmentationAnalyzerOptions = CreateAnalyzerOptionsType< + "segmentation", + { /** * Which tokens should be returned. * * Default: `"alpha"` */ - break?: "all" | "alpha" | "graphic"; + break?: SegmentationTokenType; /** * What case all returned tokens should be converted to if applicable. * * Default: `"none"` */ - case?: "lower" | "upper" | "none"; - }; -}; + case?: CaseConversion; + } +>; /** * Options for creating an AQL Analyzer */ -export type CreateAqlAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "aql"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateAqlAnalyzerOptions = CreateAnalyzerOptionsType< + "aql", + { /** * AQL query to be executed. */ @@ -345,49 +322,29 @@ export type CreateAqlAnalyzerOptions = { * * Default: `"string"` */ - returnType?: "string" | "number" | "bool"; - }; -}; + returnType?: AqlReturnTokenType; + } +>; /** * Options for creating a Pipeline Analyzer */ -export type CreatePipelineAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "pipeline"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreatePipelineAnalyzerOptions = CreateAnalyzerOptionsType< + "pipeline", + { /** * Definitions for Analyzers to chain in this Pipeline Analyzer. */ pipeline: Omit[]; - }; -}; + } +>; /** * Options for creating a Stopwords Analyzer */ -export type CreateStopwordsAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "stopwords"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateStopwordsAnalyzerOptions = CreateAnalyzerOptionsType< + "stopwords", + { /** * Array of strings that describe the tokens to be discarded. */ @@ -398,50 +355,30 @@ export type CreateStopwordsAnalyzerOptions = { * Default: `false` */ hex?: boolean; - }; -}; + } +>; /** * Options for creating a Collation Analyzer */ -export type CreateCollationAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "collation"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateCollationAnalyzerOptions = CreateAnalyzerOptionsType< + "collation", + { /** * Text locale. * * Format: `language[_COUNTRY][.encoding][@variant]` */ locale: string; - }; -}; + } +>; /** * (Enterprise Edition only.) Options for creating a MinHash Analyzer */ -export type CreateMinHashAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "minhash"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateMinHashAnalyzerOptions = CreateAnalyzerOptionsType< + "minhash", + { /** * An Analyzer definition-like object with `type` and `properties` attributes. */ @@ -450,25 +387,15 @@ export type CreateMinHashAnalyzerOptions = { * Size of the MinHash signature. */ numHashes: number; - }; -}; + } +>; /** * (Enterprise Edition only.) Options for creating a Classification Analyzer */ -export type CreateClassificationAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "classification"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateClassificationAnalyzerOptions = CreateAnalyzerOptionsType< + "classification", + { /** * On-disk path to the trained fastText supervised model. */ @@ -485,25 +412,15 @@ export type CreateClassificationAnalyzerOptions = { * Default: `0.99` */ threshold?: number; - }; -}; + } +>; /** * (Enterprise Edition only.) Options for creating a NearestNeighbors Analyzer. */ -export type CreateNearestNeighborsAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "nearest_neighbors"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateNearestNeighborsAnalyzerOptions = CreateAnalyzerOptionsType< + "nearest_neighbors", + { /** * On-disk path to the trained fastText supervised model. */ @@ -514,25 +431,15 @@ export type CreateNearestNeighborsAnalyzerOptions = { * Default: `1` */ top_k?: number; - }; -}; + } +>; /** * Options for creating a Wildcard Analyzer. */ -export type CreateWildcardAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "wildcard"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateWildcardAnalyzerOptions = CreateAnalyzerOptionsType< + "wildcard", + { /** * N-gram length. Must be a positive integer greater than or equal to 2. */ @@ -541,25 +448,15 @@ export type CreateWildcardAnalyzerOptions = { * An Analyzer definition-like object with `type` and `properties` attributes. */ analyzer?: Omit; - }; -}; + } +>; /** * Options for creating a GeoJSON Analyzer */ -export type CreateGeoJsonAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "geojson"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateGeoJsonAnalyzerOptions = CreateAnalyzerOptionsType< + "geojson", + { /** * If set to `"centroid"`, only the centroid of the input geometry will be * computed and indexed. @@ -569,32 +466,26 @@ export type CreateGeoJsonAnalyzerOptions = { * * Default: `"shape"` */ - type?: "shape" | "centroid" | "point"; + type?: GeoType; /** * Options for fine-tuning geo queries. * * Default: `{ maxCells: 20, minLevel: 4, maxLevel: 23 }` */ - options?: { maxCells?: number; minLevel?: number; maxLevel?: number }; - }; -}; + options?: { + maxCells?: number; + minLevel?: number; + maxLevel?: number; + }; + } +>; /** * Options for creating a GeoPoint Analyzer */ -export type CreateGeoPointAnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "geopoint"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateGeoPointAnalyzerOptions = CreateAnalyzerOptionsType< + "geopoint", + { /** * Attribute paths of the latitude value relative to the field for which the * Analyzer is defined in the View. @@ -610,26 +501,20 @@ export type CreateGeoPointAnalyzerOptions = { * * Default: `{ maxCells: 20, minLevel: 4, maxLevel: 23 }` */ - options?: { minCells?: number; minLevel?: number; maxLevel?: number }; - }; -}; + options?: { + minCells?: number; + minLevel?: number; + maxLevel?: number; + }; + } +>; /** * (Enterprise Edition only.) Options for creating a Geo S2 Analyzer */ -export type CreateGeoS2AnalyzerOptions = { - /** - * Type of the Analyzer. - */ - type: "geo_s2"; - /** - * Features to enable for this Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * Additional properties for the Analyzer. - */ - properties: { +export type CreateGeoS2AnalyzerOptions = CreateAnalyzerOptionsType< + "geo_s2", + { /** * If set to `"centroid"`, only the centroid of the input geometry will be * computed and indexed. @@ -639,13 +524,17 @@ export type CreateGeoS2AnalyzerOptions = { * * Default: `"shape"` */ - type?: "shape" | "centroid" | "point"; + type?: GeoType; /** * Options for fine-tuning geo queries. * * Default: `{ maxCells: 20, minLevel: 4, maxLevel: 23 }` */ - options?: { maxCells?: number; minLevel?: number; maxLevel?: number }; + options?: { + maxCells?: number; + minLevel?: number; + maxLevel?: number; + }; /** * If set to `"latLngDouble"`, each latitude and longitude value is stored * as an 8-byte floating-point value (16 bytes per coordinate pair). @@ -658,24 +547,12 @@ export type CreateGeoS2AnalyzerOptions = { * * Default: `"latLngDouble"` */ - format?: "latLngDouble" | "latLngInt" | "s2Point"; - }; -}; - -/** - * Shared attributes of all Analyzer descriptions. - */ -export type GenericAnalyzerDescription = { - /** - * A unique name for this Analyzer. - */ - name: string; - /** - * Features enabled for this Analyzer. - */ - features: AnalyzerFeature[]; -}; + format?: GeoS2Format; + } +>; +//#endregion +//#region AnalyzerDescription /** * An object describing an Analyzer. */ @@ -701,225 +578,274 @@ export type AnalyzerDescription = | GeoS2AnalyzerDescription; /** - * An object describing an Identity Analyzer. + * Shared attributes of all Analyzer descriptions. */ -export type IdentityAnalyzerDescription = GenericAnalyzerDescription & { - type: "identity"; - properties: Record; +type AnalyzerDescriptionType< + Type extends string, + Properties = Record, +> = { + /** + * A unique name for this Analyzer. + */ + name: string; + /** + * Type of the Analyzer. + */ + type: Type; + /** + * Features to enable for this Analyzer. + */ + features?: AnalyzerFeature[]; + /** + * Additional properties for the Analyzer. + */ + properties: Properties; }; +/** + * An object describing an Identity Analyzer. + */ +export type IdentityAnalyzerDescription = AnalyzerDescriptionType<"identity">; + /** * An object describing a Delimiter Analyzer. */ -export type DelimiterAnalyzerDescription = GenericAnalyzerDescription & { - type: "delimiter"; - properties: { delimiter: string }; -}; +export type DelimiterAnalyzerDescription = AnalyzerDescriptionType< + "delimiter", + { delimiter: string } +>; /** * An object describing a Multi Delimiter Analyzer. */ -export type MultiDelimiterAnalyzerDescription = GenericAnalyzerDescription & { - type: "multi_delimiter"; - properties: { delimiters: string[] }; -}; +export type MultiDelimiterAnalyzerDescription = AnalyzerDescriptionType< + "multi_delimiter", + { delimiters: string[] } +>; /** * An object describing a Stem Analyzer. */ -export type StemAnalyzerDescription = GenericAnalyzerDescription & { - type: "stem"; - properties: { locale: string }; -}; +export type StemAnalyzerDescription = AnalyzerDescriptionType< + "stem", + { locale: string } +>; /** * An object describing a Norm Analyzer. */ -export type NormAnalyzerDescription = GenericAnalyzerDescription & { - type: "norm"; - properties: { +export type NormAnalyzerDescription = AnalyzerDescriptionType< + "norm", + { locale: string; - case: "lower" | "none" | "upper"; + case: CaseConversion; accent: boolean; - }; -}; + } +>; /** * An object describing an Ngram Analyzer. */ -export type NgramAnalyzerDescription = GenericAnalyzerDescription & { - type: "ngram"; - properties: { - max: number; +export type NgramAnalyzerDescription = AnalyzerDescriptionType< + "ngram", + { min: number; + max: number; preserveOriginal: boolean; - }; -}; + } +>; /** * An object describing a Text Analyzer. */ -export type TextAnalyzerDescription = GenericAnalyzerDescription & { - type: "text"; - properties: { +export type TextAnalyzerDescription = AnalyzerDescriptionType< + "text", + { locale: string; - case: "lower" | "none" | "upper"; + case: CaseConversion; stopwords: string[]; stopwordsPath: string; accent: boolean; stemming: boolean; - edgeNgram: { min: number; max: number; preserveOriginal: boolean }; - }; -}; + edgeNgram: { + min: number; + max: number; + preserveOriginal: boolean; + }; + } +>; /** * An object describing a Segmentation Analyzer */ -export type SegmentationAnalyzerDescription = GenericAnalyzerDescription & { - type: "segmentation"; - properties: { - break: "all" | "alpha" | "graphic"; - case: "lower" | "upper" | "none"; - }; -}; +export type SegmentationAnalyzerDescription = AnalyzerDescriptionType< + "segmentation", + { + break: SegmentationTokenType; + case: CaseConversion; + } +>; /** * An object describing an AQL Analyzer */ -export type AqlAnalyzerDescription = GenericAnalyzerDescription & { - type: "aql"; - properties: { +export type AqlAnalyzerDescription = AnalyzerDescriptionType< + "aql", + { queryString: string; collapsePositions: boolean; keepNull: boolean; batchSize: number; memoryLimit: number; - returnType: "string" | "number" | "bool"; - }; -}; + returnType: AqlReturnTokenType; + } +>; /** * An object describing a Pipeline Analyzer */ -export type PipelineAnalyzerDescription = GenericAnalyzerDescription & { - type: "pipeline"; - properties: { +export type PipelineAnalyzerDescription = AnalyzerDescriptionType< + "pipeline", + { pipeline: Omit[]; - }; -}; + } +>; /** * An object describing a Stopwords Analyzer */ -export type StopwordsAnalyzerDescription = GenericAnalyzerDescription & { - type: "stopwords"; - properties: { +export type StopwordsAnalyzerDescription = AnalyzerDescriptionType< + "stopwords", + { stopwords: string[]; hex: boolean; - }; -}; + } +>; /** * An object describing a Collation Analyzer */ -export type CollationAnalyzerDescription = GenericAnalyzerDescription & { - type: "collation"; - properties: { +export type CollationAnalyzerDescription = AnalyzerDescriptionType< + "collation", + { locale: string; - }; -}; + } +>; /** * (Enterprise Edition only.) An object describing a MinHash Analyzer */ -export type MinHashAnalyzerDescription = GenericAnalyzerDescription & { - type: "minhash"; - properties: { +export type MinHashAnalyzerDescription = AnalyzerDescriptionType< + "minhash", + { analyzer: Omit; numHashes: number; - }; -}; + } +>; /** * (Enterprise Edition only.) An object describing a Classification Analyzer */ -export type ClassificationAnalyzerDescription = GenericAnalyzerDescription & { - type: "classification"; - properties: { +export type ClassificationAnalyzerDescription = AnalyzerDescriptionType< + "classification", + { model_location: string; top_k: number; threshold: number; - }; -}; + } +>; /** * (Enterprise Edition only.) An object describing a NearestNeighbors Analyzer */ -export type NearestNeighborsAnalyzerDescription = GenericAnalyzerDescription & { - type: "nearest_neighbors"; - properties: { +export type NearestNeighborsAnalyzerDescription = AnalyzerDescriptionType< + "nearest_neighbors", + { model_location: string; top_k: number; - }; -}; + } +>; /** * An object describing a Wildcard Analyzer */ -export type WildcardAnalyzerDescription = GenericAnalyzerDescription & { - type: "wildcard"; - properties: { +export type WildcardAnalyzerDescription = AnalyzerDescriptionType< + "wildcard", + { ngramSize: number; analyzer?: Omit; - }; -}; + } +>; /** * An object describing a GeoJSON Analyzer */ -export type GeoJsonAnalyzerDescription = GenericAnalyzerDescription & { - type: "geojson"; - properties: { - type: "shape" | "centroid" | "point"; - description: { maxCells: number; minLevel: number; maxLevel: number }; - }; -}; +export type GeoJsonAnalyzerDescription = AnalyzerDescriptionType< + "geojson", + { + type: GeoType; + description: { + maxCells: number; + minLevel: number; + maxLevel: number; + }; + } +>; /** * An object describing a GeoPoint Analyzer */ -export type GeoPointAnalyzerDescription = GenericAnalyzerDescription & { - type: "geopoint"; - properties: { +export type GeoPointAnalyzerDescription = AnalyzerDescriptionType< + "geopoint", + { latitude: string[]; longitude: string[]; - description: { minCells: number; minLevel: number; maxLevel: number }; - }; -}; + description: { + minCells: number; + minLevel: number; + maxLevel: number; + }; + } +>; /** * (Enterprise Edition only.) An object describing a GeoS2 Analyzer */ -export type GeoS2AnalyzerDescription = GenericAnalyzerDescription & { - type: "geo_s2"; - properties: { - type: "shape" | "centroid" | "point"; - description: { maxCells: number; minLevel: number; maxLevel: number }; - format: "latLngDouble" | "latLngInt" | "s2Point"; - }; -}; +export type GeoS2AnalyzerDescription = AnalyzerDescriptionType< + "geo_s2", + { + type: GeoType; + description: { + maxCells: number; + minLevel: number; + maxLevel: number; + }; + format: GeoS2Format; + } +>; +//#endregion + +//#region Analyzer class +/** + * Indicates whether the given value represents an {@link Analyzer}. + * + * @param analyzer - A value that might be an Analyzer. + */ +export function isArangoAnalyzer(analyzer: any): analyzer is Analyzer { + return Boolean(analyzer && analyzer.isArangoAnalyzer); +} /** - * Represents an Analyzer in a {@link database.Database}. + * Represents an Analyzer in a {@link databases.Database}. */ export class Analyzer { protected _name: string; - protected _db: Database; + protected _db: databases.Database; /** * @internal */ - constructor(db: Database, name: string) { + constructor(db: databases.Database, name: string) { this._db = db; this._name = name; } @@ -943,7 +869,7 @@ export class Analyzer { /** * Name of this Analyzer. * - * See also {@link database.Database}. + * See also {@link databases.Database}. */ get name() { return this._name; @@ -965,7 +891,7 @@ export class Analyzer { await this.get(); return true; } catch (err: any) { - if (isArangoError(err) && err.errorNum === ANALYZER_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === ANALYZER_NOT_FOUND) { return false; } throw err; @@ -983,16 +909,16 @@ export class Analyzer { * // definition contains the Analyzer definition * ``` */ - get(): Promise> { + get(): Promise> { return this._db.request({ - path: `/_api/analyzer/${encodeURIComponent(this._name)}`, + pathname: `/_api/analyzer/${encodeURIComponent(this._name)}`, }); } /** * Creates a new Analyzer with the given `options` and the instance's name. * - * See also {@link database.Database#createAnalyzer}. + * See also {@link databases.Database#createAnalyzer}. * * @param options - Options for creating the Analyzer. * @@ -1005,47 +931,47 @@ export class Analyzer { * ``` */ create( - options: Options + options: Options, ): Promise< Options extends CreateIdentityAnalyzerOptions - ? IdentityAnalyzerDescription - : Options extends CreateDelimiterAnalyzerOptions - ? DelimiterAnalyzerDescription - : Options extends CreateStemAnalyzerOptions - ? StemAnalyzerDescription - : Options extends CreateNormAnalyzerOptions - ? NormAnalyzerDescription - : Options extends CreateNgramAnalyzerOptions - ? NgramAnalyzerDescription - : Options extends CreateTextAnalyzerOptions - ? TextAnalyzerDescription - : Options extends CreateSegmentationAnalyzerOptions - ? SegmentationAnalyzerDescription - : Options extends CreateAqlAnalyzerOptions - ? AqlAnalyzerDescription - : Options extends CreatePipelineAnalyzerOptions - ? PipelineAnalyzerDescription - : Options extends CreateStopwordsAnalyzerOptions - ? StopwordsAnalyzerDescription - : Options extends CreateCollationAnalyzerOptions - ? CollationAnalyzerDescription - : Options extends CreateMinHashAnalyzerOptions - ? MinHashAnalyzerDescription - : Options extends CreateClassificationAnalyzerOptions - ? ClassificationAnalyzerDescription - : Options extends CreateNearestNeighborsAnalyzerOptions - ? NearestNeighborsAnalyzerDescription - : Options extends CreateGeoJsonAnalyzerOptions - ? GeoJsonAnalyzerDescription - : Options extends CreateGeoPointAnalyzerOptions - ? GeoPointAnalyzerDescription - : Options extends CreateGeoS2AnalyzerOptions - ? GeoS2AnalyzerDescription - : AnalyzerDescription + ? IdentityAnalyzerDescription + : Options extends CreateDelimiterAnalyzerOptions + ? DelimiterAnalyzerDescription + : Options extends CreateStemAnalyzerOptions + ? StemAnalyzerDescription + : Options extends CreateNormAnalyzerOptions + ? NormAnalyzerDescription + : Options extends CreateNgramAnalyzerOptions + ? NgramAnalyzerDescription + : Options extends CreateTextAnalyzerOptions + ? TextAnalyzerDescription + : Options extends CreateSegmentationAnalyzerOptions + ? SegmentationAnalyzerDescription + : Options extends CreateAqlAnalyzerOptions + ? AqlAnalyzerDescription + : Options extends CreatePipelineAnalyzerOptions + ? PipelineAnalyzerDescription + : Options extends CreateStopwordsAnalyzerOptions + ? StopwordsAnalyzerDescription + : Options extends CreateCollationAnalyzerOptions + ? CollationAnalyzerDescription + : Options extends CreateMinHashAnalyzerOptions + ? MinHashAnalyzerDescription + : Options extends CreateClassificationAnalyzerOptions + ? ClassificationAnalyzerDescription + : Options extends CreateNearestNeighborsAnalyzerOptions + ? NearestNeighborsAnalyzerDescription + : Options extends CreateGeoJsonAnalyzerOptions + ? GeoJsonAnalyzerDescription + : Options extends CreateGeoPointAnalyzerOptions + ? GeoPointAnalyzerDescription + : Options extends CreateGeoS2AnalyzerOptions + ? GeoS2AnalyzerDescription + : AnalyzerDescription > { return this._db.request({ method: "POST", - path: "/_api/analyzer", + pathname: "/_api/analyzer", body: { name: this._name, ...options }, }); } @@ -1064,11 +990,14 @@ export class Analyzer { * // the Analyzer "some-analyzer" no longer exists * ``` */ - drop(force: boolean = false): Promise> { + drop( + force: boolean = false, + ): Promise> { return this._db.request({ method: "DELETE", - path: `/_api/analyzer/${encodeURIComponent(this._name)}`, + pathname: `/_api/analyzer/${encodeURIComponent(this._name)}`, search: { force }, }); } } +//#endregion diff --git a/src/aql.ts b/src/aql.ts index 36286a2b0..5229ce3e4 100644 --- a/src/aql.ts +++ b/src/aql.ts @@ -1,6 +1,6 @@ /** * ```js - * import { aql } from "arangojs/aql.js"; + * import { aql } from "arangojs/aql"; * ``` * * The "aql" module provides the {@link aql} template string handler and @@ -10,10 +10,10 @@ * * @packageDocumentation */ -import { isArangoAnalyzer } from "./analyzer.js"; -import { ArangoCollection, isArangoCollection } from "./collection.js"; -import { Graph, isArangoGraph } from "./graph.js"; -import { isArangoView, View } from "./view.js"; +import * as analyzers from "./analyzers.js"; +import * as collections from "./collections.js"; +import * as graphs from "./graphs.js"; +import * as views from "./views.js"; declare const type: unique symbol; @@ -72,9 +72,9 @@ export interface AqlLiteral { * helper function. */ export type AqlValue = - | ArangoCollection - | View - | Graph + | collections.ArangoCollection + | views.View + | graphs.Graph | GeneratedAqlQuery | AqlLiteral | string @@ -124,7 +124,7 @@ export function isAqlLiteral(literal: any): literal is AqlLiteral { * Tagged template strings will return an {@link AqlQuery} object with * `query` and `bindVars` attributes reflecting any interpolated values. * - * Any {@link collection.ArangoCollection} instance used in a query string will + * Any {@link collections.ArangoCollection} instance used in a query string will * be recognized as a collection reference and generate an AQL collection bind * parameter instead of a regular AQL value bind parameter. * @@ -224,7 +224,7 @@ export function aql( 2, strings[i] + src.strings[0], ...src.strings.slice(1, src.args.length), - src.strings[src.args.length] + strings[i + 1] + src.strings[src.args.length] + strings[i + 1], ); } else { query += rawValue.query + strings[i + 1]; @@ -246,10 +246,10 @@ export function aql( const isKnown = index !== -1; let name = `value${isKnown ? index : bindValues.length}`; if ( - isArangoCollection(rawValue) || - isArangoGraph(rawValue) || - isArangoView(rawValue) || - isArangoAnalyzer(rawValue) + collections.isArangoCollection(rawValue) || + graphs.isArangoGraph(rawValue) || + views.isArangoView(rawValue) || + analyzers.isArangoAnalyzer(rawValue) ) { name = `@${name}`; value = rawValue.name; @@ -319,7 +319,7 @@ export function aql( * ``` */ export function literal( - value: string | number | boolean | AqlLiteral | null | undefined + value: string | number | boolean | AqlLiteral | null | undefined, ): AqlLiteral { if (isAqlLiteral(value)) { return value; diff --git a/src/cluster.ts b/src/cluster.ts new file mode 100644 index 000000000..4aa0169df --- /dev/null +++ b/src/cluster.ts @@ -0,0 +1,186 @@ +/** + * ```ts + * import type { ClusterImbalanceInfo } from "arangojs/cluster"; + * ``` + * + * The "cluster" module provides types for cluster management. + * + * @packageDocumentation + */ + +//#region Cluster operation options +/** + * Options for rebalancing the cluster. + */ +export type ClusterRebalanceOptions = { + /** + * Maximum number of moves to be computed. + * + * Default: `1000` + */ + maximumNumberOfMoves?: number; + /** + * Allow leader changes without moving data. + * + * Default: `true` + */ + leaderChanges?: boolean; + /** + * Allow moving leaders. + * + * Default: `false` + */ + moveLeaders?: boolean; + /** + * Allow moving followers. + * + * Default: `false` + */ + moveFollowers?: boolean; + /** + * Ignore system collections in the rebalance plan. + * + * Default: `false` + */ + excludeSystemCollections?: boolean; + /** + * Default: `256**6` + */ + piFactor?: number; + /** + * A list of database names to exclude from the analysis. + * + * Default: `[]` + */ + databasesExcluded?: string[]; +}; +//#endregion + +//#region Cluster operation results +/** + * The result of a cluster rebalance. + */ +export type ClusterRebalanceResult = { + /** + * Imbalance before the suggested move shard operations are applied. + */ + imbalanceBefore: ClusterImbalanceInfo; + /** + * Expected imbalance after the suggested move shard operations are applied. + */ + imbalanceAfter: ClusterImbalanceInfo; + /** + * Suggested move shard operations. + */ + moves: ClusterRebalanceMove[]; +}; + +/** + * Information about the current state of the cluster imbalance. + */ +export type ClusterRebalanceState = ClusterImbalanceInfo & { + /** + * The number of pending move shard operations. + */ + pendingMoveShards: number; + /** + * The number of planned move shard operations. + */ + todoMoveShards: number; +}; + +/** + * Information about a cluster imbalance. + */ +export type ClusterImbalanceInfo = { + /** + * Information about the leader imbalance. + */ + leader: { + /** + * The weight of leader shards per DB-Server. A leader has a weight of 1 by default but it is higher if collections can only be moved together because of `distributeShardsLike`. + */ + weightUsed: number[]; + /** + * The ideal weight of leader shards per DB-Server. + */ + targetWeight: number[]; + /** + * The number of leader shards per DB-Server. + */ + numberShards: number[]; + /** + * The measure of the leader shard distribution. The higher the number, the worse the distribution. + */ + leaderDupl: number[]; + /** + * The sum of all weights. + */ + totalWeight: number; + /** + * The measure of the total imbalance. A high value indicates a high imbalance. + */ + imbalance: number; + /** + * The sum of shards, counting leader shards only. + */ + totalShards: number; + }; + /** + * Information about the shard imbalance. + */ + shards: { + /** + * The size of shards per DB-Server. + */ + sizeUsed: number[]; + /** + * The ideal size of shards per DB-Server. + */ + targetSize: number[]; + /** + * The number of leader and follower shards per DB-Server. + */ + numberShards: number[]; + /** + * The sum of the sizes. + */ + totalUsed: number; + /** + * The sum of shards, counting leader and follower shards. + */ + totalShards: number; + /** + * The sum of system collection shards, counting leader shards only. + */ + totalShardsFromSystemCollections: number; + /** + * The measure of the total imbalance. A high value indicates a high imbalance. + */ + imbalance: number; + }; +}; + +export type ClusterRebalanceMove = { + /** + * The server name from which to move. + */ + from: string; + /** + * The ID of the destination server. + */ + to: string; + /** + * Shard ID of the shard to be moved. + */ + shard: string; + /** + * Collection ID of the collection the shard belongs to. + */ + collection: number; + /** + * True if this is a leader move shard operation. + */ + isLeader: boolean; +}; +//#endregion diff --git a/src/collection.ts b/src/collections.ts similarity index 69% rename from src/collection.ts rename to src/collections.ts index 2ce5a09da..9c2c60be8 100644 --- a/src/collection.ts +++ b/src/collections.ts @@ -3,50 +3,23 @@ * import type { * DocumentCollection, * EdgeCollection, - * } from "arangojs/collection.js"; + * } from "arangojs/collections"; * ``` * - * The "collection" module provides collection related types and interfaces + * The "collections" module provides collection related types and interfaces * for TypeScript. * * @packageDocumentation */ -import { AqlLiteral, AqlQuery, isAqlLiteral, isAqlQuery } from "./aql.js"; -import { ArangoApiResponse } from "./connection.js"; -import { Database } from "./database.js"; -import { - Document, - DocumentData, - DocumentMetadata, - DocumentSelector, - Edge, - EdgeData, - ObjectWithKey, - Patch, - _documentHandle, -} from "./documents.js"; -import { HttpError, isArangoError } from "./error.js"; -import { - EnsureGeoIndexOptions, - EnsureIndexOptions, - EnsureInvertedIndexOptions, - EnsureMdiIndexOptions, - EnsureMdiPrefixedIndexOptions, - EnsurePersistentIndexOptions, - EnsureTtlIndexOptions, - GeoIndex, - HiddenIndex, - Index, - IndexSelector, - InvertedIndex, - MdiIndex, - MdiPrefixedIndex, - PersistentIndex, - TtlIndex, - _indexHandle, -} from "./indexes.js"; +import * as aql from "./aql.js"; +import * as connection from "./connection.js"; +import * as databases from "./databases.js"; +import * as documents from "./documents.js"; +import * as errors from "./errors.js"; +import * as indexes from "./indexes.js"; import { COLLECTION_NOT_FOUND, DOCUMENT_NOT_FOUND } from "./lib/codes.js"; +//#region ArangoCollection interface /** * Indicates whether the given value represents an {@link ArangoCollection}. * @@ -76,7 +49,7 @@ export function collectionToString( * A marker interface identifying objects that can be used in AQL template * strings to create references to ArangoDB collections. * - * See {@link aql!aql}. + * See {@link aql.aql}. */ export interface ArangoCollection { /** @@ -90,7 +63,9 @@ export interface ArangoCollection { */ readonly name: string; } +//#endregion +//#region Shared types /** * Integer values indicating the collection type. */ @@ -142,417 +117,16 @@ export type ValidationLevel = "none" | "new" | "moderate" | "strict"; * Write operation that can result in a computed value being computed. */ export type WriteOperation = "insert" | "update" | "replace"; +//#endregion -/** - * Represents a bulk operation failure for an individual document. - */ -export type DocumentOperationFailure = { - /** - * Indicates that the operation failed. - */ - error: true; - /** - * Human-readable description of the failure. - */ - errorMessage: string; - /** - * Numeric representation of the failure. - */ - errorNum: number; -}; - -/** - * Metadata returned by a document operation. - */ -export type DocumentOperationMetadata = DocumentMetadata & { - /** - * Revision of the document that was updated or replaced by this operation. - */ - _oldRev?: string; -}; - -/** - * Properties defining a computed value. - */ -export type ComputedValueProperties = { - /** - * Name of the target attribute of the computed value. - */ - name: string; - /** - * AQL `RETURN` expression that computes the value. - */ - expression: string; - /** - * If set to `false`, the computed value will not be applied if the - * expression evaluates to `null`. - */ - overwrite: boolean; - /** - * Which operations should result in the value being computed. - */ - computeOn: WriteOperation[]; - /** - * If set to `false`, the field will be unset if the expression evaluates to - * `null`. Otherwise the field will be set to the value `null`. Has no effect - * if `overwrite` is set to `false`. - */ - keepNull: boolean; - /** - * Whether the write operation should fail if the expression produces a - * warning. - */ - failOnWarning: boolean; -}; - -/** - * General information about a collection. - */ -export type CollectionMetadata = { - /** - * Collection name. - */ - name: string; - /** - * A globally unique identifier for this collection. - */ - globallyUniqueId: string; - /** - * An integer indicating the collection loading status. - */ - status: CollectionStatus; - /** - * An integer indicating the collection type. - */ - type: CollectionType; - /** - * @internal - * - * Whether the collection is a system collection. - */ - isSystem: boolean; -}; - -/** - * An object defining the collection's key generation. - */ -export type CollectionKeyProperties = { - /** - * Type of key generator to use. - */ - type: KeyGenerator; - /** - * Whether documents can be created with a user-specified `_key` attribute. - */ - allowUserKeys: boolean; - /** - * (Autoincrement only.) How many steps to increment the key each time. - */ - increment?: number; - /** - * (Autoincrement only.) Initial offset for the key. - */ - offset?: number; - /** - * Most recent key that has been generated. - */ - lastValue: number; -}; - -/** - * Properties for validating documents in a collection. - */ -export type SchemaProperties = { - /** - * Type of document validation. - */ - type: "json"; - /** - * JSON Schema description of the validation schema for documents. - */ - rule: any; - /** - * When validation should be applied. - */ - level: ValidationLevel; - /** - * Message to be used if validation fails. - */ - message: string; -}; - -/** - * An object defining the properties of a collection. - */ -export type CollectionProperties = { - /** - * A human-readable representation of the collection loading status. - */ - statusString: string; - /** - * Whether data should be synchronized to disk before returning from - * a document create, update, replace or removal operation. - */ - waitForSync: boolean; - /** - * An object defining the collection's key generation. - */ - keyOptions: CollectionKeyProperties; - /** - * Properties for validating documents in the collection. - */ - schema: SchemaProperties | null; - /** - * (Cluster only.) Write concern for this collection. - */ - writeConcern: number; - /** - * (Cluster only.) Number of shards of this collection. - */ - numberOfShards?: number; - /** - * (Cluster only.) Keys of this collection that will be used for - * sharding. - */ - shardKeys?: string[]; - /** - * (Cluster only.) Replication factor of the collection. - */ - replicationFactor?: number | "satellite"; - /** - * (Cluster only.) Sharding strategy of the collection. - */ - shardingStrategy?: ShardingStrategy; - /** - * (Enterprise Edition cluster only.) If set to a collection name, sharding - * of the new collection will follow the rules for that collection. As long - * as the new collection exists, the indicated collection can not be dropped. - */ - distributeShardsLike?: string; - /** - * (Enterprise Edition cluster only.) Attribute containing the shard key - * value of the referred-to smart join collection. - */ - smartJoinAttribute?: string; - /** - * (Enterprise Edition cluster only.) Attribute used for sharding. - */ - smartGraphAttribute?: string; - /** - * Computed values applied to documents in this collection. - */ - computedValues: ComputedValueProperties[]; - /** - * Whether the in-memory hash cache is enabled for this collection. - */ - cacheEnabled: boolean; - /** - * Whether the newer revision-based replication protocol is enabled for - * this collection. - */ - syncByRevision: boolean; - /** - * (Enterprise Edition only.) Whether the collection is used in a SmartGraph or EnterpriseGraph. - */ - isSmart?: boolean; - /** - * (Enterprise Edition only.) Whether the SmartGraph this collection belongs to is disjoint. - */ - isDisjoint?: string; -}; - -// Options - -/** - * Options for creating a computed value. - */ -export type ComputedValueOptions = { - /** - * Name of the target attribute of the computed value. - */ - name: string; - /** - * AQL `RETURN` expression that computes the value. - * - * Note that when passing an AQL query object, the `bindVars` will be ignored. - */ - expression: string | AqlLiteral | AqlQuery; - /** - * If set to `false`, the computed value will not be applied if the - * expression evaluates to `null`. - * - * Default: `true` - */ - overwrite?: boolean; - /** - * Which operations should result in the value being computed. - * - * Default: `["insert", "update", "replace"]` - */ - computeOn?: WriteOperation[]; - /** - * If set to `false`, the field will be unset if the expression evaluates to - * `null`. Otherwise the field will be set to the value `null`. Has no effect - * if `overwrite` is set to `false`. - * - * Default: `true` - */ - keepNull?: boolean; - /** - * Whether the write operation should fail if the expression produces a - * warning. - * - * Default: `false` - */ - failOnWarning?: boolean; -}; - -/** - * Options for validating collection documents. - */ -export type SchemaOptions = { - /** - * JSON Schema description of the validation schema for documents. - */ - rule: any; - /** - * When validation should be applied. - * - * Default: `"strict"` - */ - level?: ValidationLevel; - /** - * Message to be used if validation fails. - */ - message?: string; -}; - -/** - * Options for setting a collection's properties. - * - * See {@link DocumentCollection#properties} and {@link EdgeCollection#properties}. - */ -export type CollectionPropertiesOptions = { - /** - * Whether data should be synchronized to disk before returning from - * a document create, update, replace or removal operation. - */ - waitForSync?: boolean; - /** - * (Cluster only.) How many copies of each document should be kept in the - * cluster. - * - * Default: `1` - */ - replicationFactor?: number | "satellite"; - /** - * (Cluster only.) Write concern for this collection. - */ - writeConcern?: number; - /** - * Options for validating documents in this collection. - */ - schema?: SchemaOptions; - /** - * Computed values to apply to documents in this collection. - */ - computedValues?: ComputedValueOptions[]; - /** - * Whether the in-memory hash cache is enabled for this collection. - * - * Default: `false` - */ - cacheEnabled?: boolean; -}; - -/** - * Options for retrieving a collection checksum. - */ -export type CollectionChecksumOptions = { - /** - * If set to `true`, revision IDs will be included in the calculation - * of the checksum. - * - * Default: `false` - */ - withRevisions?: boolean; - /** - * If set to `true`, document data will be included in the calculation - * of the checksum. - * - * Default: `false` - */ - withData?: boolean; -}; - -/** - * Options for truncating collections. - */ -export type CollectionTruncateOptions = { - /** - * Whether the collection should be compacted after truncation. - */ - compact?: boolean; - /** - * Whether data should be synchronized to disk before returning from this - * operation. - */ - waitForSync?: boolean; -}; - -/** - * Options for dropping collections. - */ -export type CollectionDropOptions = { - /** - * Whether the collection is a system collection. If the collection is a - * system collection, this option must be set to `true` or ArangoDB will - * refuse to drop the collection. - * - * Default: `false` - */ - isSystem?: boolean; -}; - -/** - * An object defining the collection's key generation. - */ -export type CollectionKeyOptions = { - /** - * Type of key generator to use. - */ - type?: KeyGenerator; - /** - * Unless set to `false`, documents can be created with a user-specified - * `_key` attribute. - * - * Default: `true` - */ - allowUserKeys?: boolean; - /** - * (Autoincrement only.) How many steps to increment the key each time. - */ - increment?: number; - /** - * (Autoincrement only.) Initial offset for the key. - */ - offset?: number; -}; - +//#region Collection operation options /** * Options for creating a collection. * - * See {@link database.Database#createCollection}, {@link database.Database#createEdgeCollection} + * See {@link databases.Database#createCollection}, {@link databases.Database#createEdgeCollection} * and {@link DocumentCollection#create} or {@link EdgeCollection#create}. */ -export type CreateCollectionOptions = { - /** - * If set to `true`, data will be synchronized to disk before returning from - * a document create, update, replace or removal operation. - * - * Default: `false` - */ - waitForSync?: boolean; +export type CreateCollectionOptions = CollectionPropertiesOptions & { /** * @internal * @@ -565,10 +139,6 @@ export type CreateCollectionOptions = { * An object defining the collection's key generation. */ keyOptions?: CollectionKeyOptions; - /** - * Options for validating documents in the collection. - */ - schema?: SchemaOptions; /** * (Cluster only.) Unless set to `false`, the server will wait for all * replicas to create the collection before returning. @@ -596,17 +166,6 @@ export type CreateCollectionOptions = { * Default: `["_key"]` */ shardKeys?: string[]; - /** - * (Cluster only.) How many copies of each document should be kept in the - * cluster. - * - * Default: `1` - */ - replicationFactor?: number; - /** - * (Cluster only.) Write concern for this collection. - */ - writeConcern?: number; /** * (Cluster only.) Sharding strategy to use. */ @@ -626,458 +185,384 @@ export type CreateCollectionOptions = { * (Enterprise Edition cluster only.) Attribute used for sharding. */ smartGraphAttribute?: string; - /** - * Computed values to apply to documents in this collection. - */ - computedValues?: ComputedValueOptions[]; - /** - * Whether the in-memory hash cache is enabled for this collection. - */ - cacheEnabled?: boolean; }; /** - * Options for checking whether a document exists in a collection. + * An object defining the collection's key generation. */ -export type DocumentExistsOptions = { +export type CollectionKeyOptions = { + /** + * Type of key generator to use. + */ + type?: KeyGenerator; /** - * If set to `true`, the request will explicitly permit ArangoDB to return a - * potentially dirty or stale result and arangojs will load balance the - * request without distinguishing between leaders and followers. + * Unless set to `false`, documents can be created with a user-specified + * `_key` attribute. + * + * Default: `true` */ - allowDirtyRead?: boolean; + allowUserKeys?: boolean; /** - * If set to a document revision, the document will only match if its `_rev` - * matches the given revision. + * (Autoincrement only.) How many steps to increment the key each time. */ - ifMatch?: string; + increment?: number; /** - * If set to a document revision, the document will only match if its `_rev` - * does not match the given revision. + * (Autoincrement only.) Initial offset for the key. */ - ifNoneMatch?: string; + offset?: number; }; /** - * Options for retrieving a document from a collection. + * Options for setting a collection's properties. + * + * See {@link DocumentCollection#properties} and {@link EdgeCollection#properties}. */ -export type CollectionReadOptions = { +export type CollectionPropertiesOptions = { + /** + * If set to `true`, data will be synchronized to disk before returning from + * a document create, update, replace or removal operation. + * + * Default: `false` + */ + waitForSync?: boolean; + /** + * (Cluster only.) How many copies of each document should be kept in the + * cluster. + * + * Default: `1` + */ + replicationFactor?: number | "satellite"; /** - * If set to `true`, `null` is returned instead of an exception being thrown - * if the document does not exist. + * (Cluster only.) Write concern for this collection. */ - graceful?: boolean; + writeConcern?: number; /** - * If set to `true`, the request will explicitly permit ArangoDB to return a - * potentially dirty or stale result and arangojs will load balance the - * request without distinguishing between leaders and followers. + * Options for validating documents in this collection. */ - allowDirtyRead?: boolean; + schema?: SchemaOptions; /** - * If set to a document revision, the request will fail with an error if the - * document exists but its `_rev` does not match the given revision. + * Computed values to apply to documents in this collection. */ - ifMatch?: string; + computedValues?: ComputedValueOptions[]; /** - * If set to a document revision, the request will fail with an error if the - * document exists and its `_rev` matches the given revision. Note that an - * `HttpError` with code 304 will be thrown instead of an `ArangoError`. + * Whether the in-memory hash cache is enabled for this collection. + * + * Default: `false` */ - ifNoneMatch?: string; + cacheEnabled?: boolean; }; /** - * Options for retrieving multiple documents from a collection. + * Options for validating collection documents. */ -export type CollectionBatchReadOptions = { +export type SchemaOptions = { /** - * If set to `true`, the request will explicitly permit ArangoDB to return a - * potentially dirty or stale result and arangojs will load balance the - * request without distinguishing between leaders and followers. + * JSON Schema description of the validation schema for documents. */ - allowDirtyRead?: boolean; + rule: any; /** - * If set to `false`, the existing document will only be modified if its - * `_rev` property matches the same property on the new data. + * When validation should be applied. * - * Default: `true` + * Default: `"strict"` + */ + level?: ValidationLevel; + /** + * Message to be used if validation fails. */ - ignoreRevs?: boolean; + message?: string; }; /** - * Options for inserting a new document into a collection. + * Options for creating a computed value. */ -export type CollectionInsertOptions = { - /** - * If set to `true`, data will be synchronized to disk before returning. - * - * Default: `false` - */ - waitForSync?: boolean; +export type ComputedValueOptions = { /** - * If set to `true`, no data will be returned by the server. This option can - * be used to reduce network traffic. - * - * Default: `false` + * Name of the target attribute of the computed value. */ - silent?: boolean; + name: string; /** - * If set to `true`, the complete new document will be returned as the `new` - * property on the result object. Has no effect if `silent` is set to `true`. + * AQL `RETURN` expression that computes the value. * - * Default: `false` + * Note that when passing an AQL query object, the `bindVars` will be ignored. */ - returnNew?: boolean; + expression: string | aql.AqlLiteral | aql.AqlQuery; /** - * If set to `true`, the complete old document will be returned as the `old` - * property on the result object. Has no effect if `silent` is set to `true`. - * This option is only available when `overwriteMode` is set to `"update"` or - * `"replace"`. + * If set to `false`, the computed value will not be applied if the + * expression evaluates to `null`. * - * Default: `false` + * Default: `true` */ - returnOld?: boolean; + overwrite?: boolean; /** - * Defines what should happen if a document with the same `_key` or `_id` - * already exists, instead of throwing an exception. + * Which operations should result in the value being computed. * - * Default: `"conflict" + * Default: `["insert", "update", "replace"]` */ - overwriteMode?: "ignore" | "update" | "replace" | "conflict"; + computeOn?: WriteOperation[]; /** - * If set to `false`, properties with a value of `null` will be removed from - * the new document. + * If set to `false`, the field will be unset if the expression evaluates to + * `null`. Otherwise the field will be set to the value `null`. Has no effect + * if `overwrite` is set to `false`. * * Default: `true` */ keepNull?: boolean; /** - * If set to `false`, object properties that already exist in the old - * document will be overwritten rather than merged when an existing document - * with the same `_key` or `_id` is updated. This does not affect arrays. + * Whether the write operation should fail if the expression produces a + * warning. * - * Default: `true` + * Default: `false` */ - mergeObjects?: boolean; + failOnWarning?: boolean; +}; + +/** + * Options for retrieving a collection checksum. + */ +export type CollectionChecksumOptions = { /** - * If set to `true`, new entries will be added to in-memory index caches if - * document insertions affect the edge index or cache-enabled persistent - * indexes. + * If set to `true`, revision IDs will be included in the calculation + * of the checksum. * * Default: `false` */ - refillIndexCaches?: boolean; + withRevisions?: boolean; /** - * If set, the attribute with the name specified by the option is looked up - * in the stored document and the attribute value is compared numerically to - * the value of the versioning attribute in the supplied document that is - * supposed to update/replace it. + * If set to `true`, document data will be included in the calculation + * of the checksum. + * + * Default: `false` */ - versionAttribute?: string; + withData?: boolean; }; /** - * Options for replacing an existing document in a collection. + * Options for truncating collections. */ -export type CollectionReplaceOptions = { +export type TruncateCollectionOptions = { /** - * If set to `true`, data will be synchronized to disk before returning. - * - * Default: `false` + * Whether the collection should be compacted after truncation. */ - waitForSync?: boolean; + compact?: boolean; /** - * If set to `true`, no data will be returned by the server. This option can - * be used to reduce network traffic. - * - * Default: `false` + * Whether data should be synchronized to disk before returning from this + * operation. */ - silent?: boolean; + waitForSync?: boolean; +}; + +/** + * Options for dropping collections. + */ +export type DropCollectionOptions = { /** - * If set to `true`, the complete new document will be returned as the `new` - * property on the result object. Has no effect if `silent` is set to `true`. + * Whether the collection is a system collection. If the collection is a + * system collection, this option must be set to `true` or ArangoDB will + * refuse to drop the collection. * * Default: `false` */ - returnNew?: boolean; + isSystem?: boolean; +}; +//#endregion + +//#region CollectionDescription +/** + * General information about a collection. + */ +export type CollectionDescription = { /** - * If set to `false`, the existing document will only be modified if its - * `_rev` property matches the same property on the new data. - * - * Default: `true` + * Collection name. */ - ignoreRevs?: boolean; + name: string; /** - * If set to `true`, the complete old document will be returned as the `old` - * property on the result object. Has no effect if `silent` is set to `true`. - * - * Default: `false` + * A globally unique identifier for this collection. */ - returnOld?: boolean; + globallyUniqueId: string; /** - * If set to a document revision, the document will only be replaced if its - * `_rev` matches the given revision. + * An integer indicating the collection loading status. */ - ifMatch?: string; + status: CollectionStatus; /** - * If set to `true`, existing entries in in-memory index caches will be - * updated if document replacements affect the edge index or cache-enabled - * persistent indexes. - * - * Default: `false` + * An integer indicating the collection type. */ - refillIndexCaches?: boolean; + type: CollectionType; /** - * If set, the attribute with the name specified by the option is looked up - * in the stored document and the attribute value is compared numerically to - * the value of the versioning attribute in the supplied document that is - * supposed to update/replace it. + * @internal + * + * Whether the collection is a system collection. */ - versionAttribute?: string; + isSystem: boolean; }; +//#endregion +//#region CollectionProperties /** - * Options for updating a document in a collection. + * An object defining the properties of a collection. */ -export type CollectionUpdateOptions = { +export type CollectionProperties = { /** - * If set to `true`, data will be synchronized to disk before returning. - * - * Default: `false` + * A human-readable representation of the collection loading status. */ - waitForSync?: boolean; + statusString: string; /** - * If set to `true`, no data will be returned by the server. This option can - * be used to reduce network traffic. - * - * Default: `false` + * Whether data should be synchronized to disk before returning from + * a document create, update, replace or removal operation. */ - silent?: boolean; + waitForSync: boolean; /** - * If set to `true`, the complete new document will be returned as the `new` - * property on the result object. Has no effect if `silent` is set to `true`. - * - * Default: `false` + * An object defining the collection's key generation. */ - returnNew?: boolean; + keyOptions: CollectionKeyProperties; /** - * If set to `false`, the existing document will only be modified if its - * `_rev` property matches the same property on the new data. - * - * Default: `true` + * Properties for validating documents in the collection. */ - ignoreRevs?: boolean; + schema: SchemaProperties | null; /** - * If set to `true`, the complete old document will be returned as the `old` - * property on the result object. Has no effect if `silent` is set to `true`. - * - * Default: `false` + * (Cluster only.) Write concern for this collection. */ - returnOld?: boolean; + writeConcern: number; /** - * If set to `false`, properties with a value of `null` will be removed from - * the new document. - * - * Default: `true` + * (Cluster only.) Number of shards of this collection. */ - keepNull?: boolean; + numberOfShards?: number; /** - * If set to `false`, object properties that already exist in the old - * document will be overwritten rather than merged. This does not affect - * arrays. - * - * Default: `true` + * (Cluster only.) Keys of this collection that will be used for + * sharding. */ - mergeObjects?: boolean; + shardKeys?: string[]; /** - * If set to a document revision, the document will only be updated if its - * `_rev` matches the given revision. + * (Cluster only.) Replication factor of the collection. */ - ifMatch?: string; + replicationFactor?: number | "satellite"; /** - * If set to `true`, existing entries in in-memory index caches will be - * updated if document updates affect the edge index or cache-enabled - * persistent indexes. - * - * Default: `false` + * (Cluster only.) Sharding strategy of the collection. */ - refillIndexCaches?: boolean; + shardingStrategy?: ShardingStrategy; /** - * If set, the attribute with the name specified by the option is looked up - * in the stored document and the attribute value is compared numerically to - * the value of the versioning attribute in the supplied document that is - * supposed to update/replace it. + * (Enterprise Edition cluster only.) If set to a collection name, sharding + * of the new collection will follow the rules for that collection. As long + * as the new collection exists, the indicated collection can not be dropped. */ - versionAttribute?: string; -}; - -/** - * Options for removing a document from a collection. - */ -export type CollectionRemoveOptions = { + distributeShardsLike?: string; /** - * If set to `true`, changes will be synchronized to disk before returning. - * - * Default: `false` + * (Enterprise Edition cluster only.) Attribute containing the shard key + * value of the referred-to smart join collection. */ - waitForSync?: boolean; + smartJoinAttribute?: string; /** - * If set to `true`, the complete old document will be returned as the `old` - * property on the result object. Has no effect if `silent` is set to `true`. - * - * Default: `false` + * (Enterprise Edition cluster only.) Attribute used for sharding. + */ + smartGraphAttribute?: string; + /** + * Computed values applied to documents in this collection. + */ + computedValues: ComputedValueProperties[]; + /** + * Whether the in-memory hash cache is enabled for this collection. */ - returnOld?: boolean; + cacheEnabled: boolean; /** - * If set to `true`, no data will be returned by the server. This option can - * be used to reduce network traffic. - * - * Default: `false` + * Whether the newer revision-based replication protocol is enabled for + * this collection. */ - silent?: boolean; + syncByRevision: boolean; /** - * If set to a document revision, the document will only be removed if its - * `_rev` matches the given revision. + * (Enterprise Edition only.) Whether the collection is used in a SmartGraph or EnterpriseGraph. */ - ifMatch?: string; + isSmart?: boolean; /** - * If set to `true`, existing entries in in-memory index caches will be - * deleted if document removals affect the edge index or cache-enabled - * persistent indexes. - * - * Default: `false` + * (Enterprise Edition only.) Whether the SmartGraph this collection belongs to is disjoint. */ - refillIndexCaches?: boolean; + isDisjoint?: string; }; /** - * Options for bulk importing documents into a collection. + * An object defining the collection's key generation. */ -export type CollectionImportOptions = { - /** - * (Edge collections only.) Prefix to prepend to `_from` attribute values. - */ - fromPrefix?: string; - /** - * (Edge collections only.) Prefix to prepend to `_to` attribute values. - */ - toPrefix?: string; +export type CollectionKeyProperties = { /** - * If set to `true`, the collection is truncated before the data is imported. - * - * Default: `false` + * Type of key generator to use. */ - overwrite?: boolean; + type: KeyGenerator; /** - * Whether to wait for the documents to have been synced to disk. + * Whether documents can be created with a user-specified `_key` attribute. */ - waitForSync?: boolean; + allowUserKeys: boolean; /** - * Controls behavior when a unique constraint is violated on the document key. - * - * * `"error"`: the document will not be imported. - * * `"update`: the document will be merged into the existing document. - * * `"replace"`: the document will replace the existing document. - * * `"ignore"`: the document will not be imported and the unique constraint - * error will be ignored. - * - * Default: `"error"` + * (Autoincrement only.) How many steps to increment the key each time. */ - onDuplicate?: "error" | "update" | "replace" | "ignore"; + increment?: number; /** - * If set to `true`, the import will abort if any error occurs. + * (Autoincrement only.) Initial offset for the key. */ - complete?: boolean; + offset?: number; /** - * Whether the response should contain additional details about documents - * that could not be imported. + * Most recent key that has been generated. */ - details?: boolean; + lastValue: number; }; /** - * Options for retrieving a document's edges from a collection. + * Properties for validating documents in a collection. */ -export type CollectionEdgesOptions = { +export type SchemaProperties = { /** - * If set to `true`, the request will explicitly permit ArangoDB to return a - * potentially dirty or stale result and arangojs will load balance the - * request without distinguishing between leaders and followers. + * Type of document validation. */ - allowDirtyRead?: boolean; -}; - -export type IndexListOptions = { + type: "json"; /** - * If set to `true`, includes additional information about each index. - * - * Default: `false` + * JSON Schema description of the validation schema for documents. */ - withStats?: boolean; + rule: any; /** - * If set to `true`, includes internal indexes as well as indexes that are - * not yet fully built but are in the building phase. - * - * You should cast the resulting indexes to `HiddenIndex` to ensure internal - * and incomplete indexes are accurately represented. - * - * Default: `false`. + * When validation should be applied. + */ + level: ValidationLevel; + /** + * Message to be used if validation fails. */ - withHidden?: boolean; + message: string; }; -// Results - /** - * Result of a collection bulk import. + * Properties defining a computed value. */ -export type CollectionImportResult = { - /** - * Whether the import failed. - */ - error: false; +export type ComputedValueProperties = { /** - * Number of new documents imported. + * Name of the target attribute of the computed value. */ - created: number; + name: string; /** - * Number of documents that failed with an error. + * AQL `RETURN` expression that computes the value. */ - errors: number; + expression: string; /** - * Number of empty documents. + * If set to `false`, the computed value will not be applied if the + * expression evaluates to `null`. */ - empty: number; + overwrite: boolean; /** - * Number of documents updated. + * Which operations should result in the value being computed. */ - updated: number; + computeOn: WriteOperation[]; /** - * Number of documents that failed with an error that is ignored. + * If set to `false`, the field will be unset if the expression evaluates to + * `null`. Otherwise the field will be set to the value `null`. Has no effect + * if `overwrite` is set to `false`. */ - ignored: number; + keepNull: boolean; /** - * Additional details about any errors encountered during the import. + * Whether the write operation should fail if the expression produces a + * warning. */ - details?: string[]; -}; - -/** - * Result of retrieving edges in a collection. - */ -export type CollectionEdgesResult = any> = { - edges: Edge[]; - stats: { - scannedIndex: number; - filtered: number; - }; + failOnWarning: boolean; }; +//#endregion -// Collections - +//#region DocumentCollection interface /** - * Represents an document collection in a {@link database.Database}. + * Represents an document collection in a {@link databases.Database}. * * See {@link EdgeCollection} for a variant of this interface more suited for * edge collections. @@ -1085,7 +570,10 @@ export type CollectionEdgesResult = any> = { * When using TypeScript, collections can be cast to a specific document data * type to increase type safety. * - * @param T - Type to use for document data. Defaults to `any`. + * @param EntryResultType - Type to represent document contents returned by the + * server (including computed properties). + * @param EntryInputType - Type to represent document contents passed when + * inserting or replacing documents (without computed properties). * * @example * ```ts @@ -1103,7 +591,8 @@ export interface DocumentCollection< /** * Database this collection belongs to. */ - readonly database: Database; + readonly database: databases.Database; + //#region Collection operations /** * Checks whether the collection exists. * @@ -1127,12 +616,12 @@ export interface DocumentCollection< * // data contains general information about the collection * ``` */ - get(): Promise>; + get(): Promise>; /** * Creates a collection with the given `options` and the instance's name. * - * See also {@link database.Database#createCollection} and - * {@link database.Database#createEdgeCollection}. + * See also {@link databases.Database#createCollection} and + * {@link databases.Database#createEdgeCollection}. * * **Note**: When called on an {@link EdgeCollection} instance in TypeScript, * the `type` option must still be set to the correct {@link CollectionType}. @@ -1175,7 +664,9 @@ export interface DocumentCollection< options?: CreateCollectionOptions & { type?: CollectionType; } - ): Promise>; + ): Promise< + connection.ArangoApiResponse + >; /** * Retrieves the collection's properties. * @@ -1188,7 +679,7 @@ export interface DocumentCollection< * ``` */ properties(): Promise< - ArangoApiResponse + connection.ArangoApiResponse >; /** * Replaces the properties of the collection. @@ -1204,7 +695,9 @@ export interface DocumentCollection< */ properties( properties: CollectionPropertiesOptions - ): Promise>; + ): Promise< + connection.ArangoApiResponse + >; /** * Retrieves information about the number of documents in a collection. * @@ -1217,8 +710,8 @@ export interface DocumentCollection< * ``` */ count(): Promise< - ArangoApiResponse< - CollectionMetadata & CollectionProperties & { count: number } + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { count: number } > >; /** @@ -1254,8 +747,8 @@ export interface DocumentCollection< figures( details?: boolean ): Promise< - ArangoApiResponse< - CollectionMetadata & + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { count: number; figures: Record } > >; @@ -1271,8 +764,8 @@ export interface DocumentCollection< * ``` */ revision(): Promise< - ArangoApiResponse< - CollectionMetadata & CollectionProperties & { revision: string } + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { revision: string } > >; /** @@ -1291,23 +784,38 @@ export interface DocumentCollection< checksum( options?: CollectionChecksumOptions ): Promise< - ArangoApiResponse< - CollectionMetadata & { revision: string; checksum: string } + connection.ArangoApiResponse< + CollectionDescription & { revision: string; checksum: string } > >; /** - * Instructs ArangoDB to load as many indexes of the collection into memory - * as permitted by the memory limit. + * Retrieves the collection's shard IDs. * - * @example - * ```js - * const db = new Database(); - * const collection = db.collection("indexed-collection"); - * await collection.loadIndexes(); - * // the indexes are now loaded into memory - * ``` + * @param details - If set to `true`, the response will include the responsible + * servers for each shard. */ - loadIndexes(): Promise; + shards( + details?: false + ): Promise< + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { shards: string[] } + > + >; + /** + * Retrieves the collection's shard IDs and the responsible servers for each + * shard. + * + * @param details - If set to `false`, the response will only include the + * shard IDs without the responsible servers for each shard. + */ + shards( + details: true + ): Promise< + connection.ArangoApiResponse< + CollectionDescription & + CollectionProperties & { shards: Record } + > + >; /** * Retrieves the collection's shard IDs. * @@ -1317,8 +825,8 @@ export interface DocumentCollection< shards( details?: false ): Promise< - ArangoApiResponse< - CollectionMetadata & CollectionProperties & { shards: string[] } + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { shards: string[] } > >; /** @@ -1331,15 +839,15 @@ export interface DocumentCollection< shards( details: true ): Promise< - ArangoApiResponse< - CollectionMetadata & + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { shards: Record } > >; /** * Renames the collection and updates the instance's `name` to `newName`. * - * Additionally removes the instance from the {@link database.Database}'s internal + * Additionally removes the instance from the {@link databases.Database}'s internal * cache. * * **Note**: Renaming collections may not be supported when ArangoDB is @@ -1358,7 +866,9 @@ export interface DocumentCollection< * // collection1 and collection3 represent the same ArangoDB collection! * ``` */ - rename(newName: string): Promise>; + rename( + newName: string + ): Promise>; /** * Deletes all documents in the collection. * @@ -1372,8 +882,8 @@ export interface DocumentCollection< * ``` */ truncate( - options?: CollectionTruncateOptions - ): Promise>; + options?: TruncateCollectionOptions + ): Promise>; /** * Deletes the collection from the database. * @@ -1388,10 +898,23 @@ export interface DocumentCollection< * ``` */ drop( - options?: CollectionDropOptions - ): Promise>>; + options?: DropCollectionOptions + ): Promise>; + /** + * Triggers compaction for a collection. + * + * @example + * ```js + * const db = new Database(); + * const collection = db.collection("some-collection"); + * await collection.compact(); + * // Background compaction is triggered on the collection + * ``` + */ + compact(): Promise>; + //#endregion - //#region crud + //#region Document operations /** * Retrieves the `shardId` of the shard responsible for the given document. * @@ -1405,7 +928,7 @@ export interface DocumentCollection< * ``` */ getResponsibleShard( - document: Partial> + document: Partial> ): Promise; /** * Derives a document `_id` from the given selector for this collection. @@ -1440,7 +963,7 @@ export interface DocumentCollection< * console.log(collection2.documentId(meta._key)); // ok but wrong collection * ``` */ - documentId(selector: DocumentSelector): string; + documentId(selector: documents.DocumentSelector): string; /** * Checks whether a document matching the given key or id exists in this * collection. @@ -1462,8 +985,8 @@ export interface DocumentCollection< * ``` */ documentExists( - selector: DocumentSelector, - options?: DocumentExistsOptions + selector: documents.DocumentSelector, + options?: documents.DocumentExistsOptions ): Promise; /** * Retrieves the document matching the given key or id. @@ -1500,9 +1023,9 @@ export interface DocumentCollection< * ``` */ document( - selector: DocumentSelector, - options?: CollectionReadOptions - ): Promise>; + selector: documents.DocumentSelector, + options?: documents.ReadDocumentOptions + ): Promise>; /** * Retrieves the document matching the given key or id. * @@ -1539,9 +1062,9 @@ export interface DocumentCollection< * ``` */ document( - selector: DocumentSelector, + selector: documents.DocumentSelector, graceful: boolean - ): Promise>; + ): Promise>; /** * Retrieves the documents matching the given key or id values. * @@ -1565,9 +1088,9 @@ export interface DocumentCollection< * ``` */ documents( - selectors: (string | ObjectWithKey)[], - options?: CollectionBatchReadOptions - ): Promise[]>; + selectors: (string | documents.ObjectWithDocumentKey)[], + options?: documents.BulkReadDocumentsOptions + ): Promise[]>; /** * Inserts a new document with the given `data` into the collection. * @@ -1586,12 +1109,12 @@ export interface DocumentCollection< * ``` */ save( - data: DocumentData, - options?: CollectionInsertOptions + data: documents.DocumentData, + options?: documents.InsertDocumentOptions ): Promise< - DocumentOperationMetadata & { - new?: Document; - old?: Document; + documents.DocumentOperationMetadata & { + new?: documents.Document; + old?: documents.Document; } >; /** @@ -1616,15 +1139,15 @@ export interface DocumentCollection< * ``` */ saveAll( - data: Array>, - options?: CollectionInsertOptions + data: Array>, + options?: documents.InsertDocumentOptions ): Promise< Array< - | (DocumentOperationMetadata & { - new?: Document; - old?: Document; + | (documents.DocumentOperationMetadata & { + new?: documents.Document; + old?: documents.Document; }) - | DocumentOperationFailure + | documents.DocumentOperationFailure > >; /** @@ -1652,13 +1175,13 @@ export interface DocumentCollection< * ``` */ replace( - selector: DocumentSelector, - newData: DocumentData, - options?: CollectionReplaceOptions + selector: documents.DocumentSelector, + newData: documents.DocumentData, + options?: documents.ReplaceDocumentOptions ): Promise< - DocumentOperationMetadata & { - new?: Document; - old?: Document; + documents.DocumentOperationMetadata & { + new?: documents.Document; + old?: documents.Document; } >; /** @@ -1687,16 +1210,17 @@ export interface DocumentCollection< */ replaceAll( newData: Array< - DocumentData & ({ _key: string } | { _id: string }) + documents.DocumentData & + ({ _key: string } | { _id: string }) >, - options?: Omit + options?: Omit ): Promise< Array< - | (DocumentOperationMetadata & { - new?: Document; - old?: Document; + | (documents.DocumentOperationMetadata & { + new?: documents.Document; + old?: documents.Document; }) - | DocumentOperationFailure + | documents.DocumentOperationFailure > >; /** @@ -1724,13 +1248,13 @@ export interface DocumentCollection< * ``` */ update( - selector: DocumentSelector, - newData: Patch>, - options?: CollectionUpdateOptions + selector: documents.DocumentSelector, + newData: documents.Patch>, + options?: documents.UpdateDocumentOptions ): Promise< - DocumentOperationMetadata & { - new?: Document; - old?: Document; + documents.DocumentOperationMetadata & { + new?: documents.Document; + old?: documents.Document; } >; /** @@ -1759,16 +1283,17 @@ export interface DocumentCollection< */ updateAll( newData: Array< - Patch> & ({ _key: string } | { _id: string }) + documents.Patch> & + ({ _key: string } | { _id: string }) >, - options?: Omit + options?: Omit ): Promise< Array< - | (DocumentOperationMetadata & { - new?: Document; - old?: Document; + | (documents.DocumentOperationMetadata & { + new?: documents.Document; + old?: documents.Document; }) - | DocumentOperationFailure + | documents.DocumentOperationFailure > >; /** @@ -1799,9 +1324,11 @@ export interface DocumentCollection< * ``` */ remove( - selector: DocumentSelector, - options?: CollectionRemoveOptions - ): Promise }>; + selector: documents.DocumentSelector, + options?: documents.RemoveDocumentOptions + ): Promise< + documents.DocumentMetadata & { old?: documents.Document } + >; /** * Removes existing documents from the collection. * @@ -1821,12 +1348,14 @@ export interface DocumentCollection< * ``` */ removeAll( - selectors: (string | ObjectWithKey)[], - options?: Omit + selectors: (string | documents.ObjectWithDocumentKey)[], + options?: Omit ): Promise< Array< - | (DocumentMetadata & { old?: Document }) - | DocumentOperationFailure + | (documents.DocumentMetadata & { + old?: documents.Document; + }) + | documents.DocumentOperationFailure > >; /** @@ -1849,9 +1378,9 @@ export interface DocumentCollection< * ``` */ import( - data: DocumentData[], - options?: CollectionImportOptions - ): Promise; + data: documents.DocumentData[], + options?: documents.ImportDocumentsOptions + ): Promise; /** * Bulk imports the given `data` into the collection. * @@ -1876,8 +1405,8 @@ export interface DocumentCollection< */ import( data: any[][], - options?: CollectionImportOptions - ): Promise; + options?: documents.ImportDocumentsOptions + ): Promise; /** * Bulk imports the given `data` into the collection. * @@ -1936,13 +1465,26 @@ export interface DocumentCollection< */ import( data: Buffer | Blob | string, - options?: CollectionImportOptions & { + options?: documents.ImportDocumentsOptions & { type?: "documents" | "list" | "auto"; } - ): Promise; + ): Promise; //#endregion - //#region indexes + //#region Index operations + /** + * Instructs ArangoDB to load as many indexes of the collection into memory + * as permitted by the memory limit. + * + * @example + * ```js + * const db = new Database(); + * const collection = db.collection("indexed-collection"); + * await collection.loadIndexes(); + * // the indexes are now loaded into memory + * ``` + */ + loadIndexes(): Promise; /** * Returns a list of all index descriptions for the collection. * @@ -1959,13 +1501,17 @@ export interface DocumentCollection< * ```js * const db = new Database(); * const collection = db.collection("some-collection"); - * const allIndexes = await collection.indexes({ + * const allIndexes = await collection.indexes({ * withHidden: true * }); * ``` */ - indexes( - options?: IndexListOptions + indexes< + IndexType extends + | indexes.IndexDescription + | indexes.HiddenIndexDescription = indexes.IndexDescription, + >( + options?: indexes.ListIndexesOptions ): Promise; /** * Returns an index description by name or `id` if it exists. @@ -1979,11 +1525,11 @@ export interface DocumentCollection< * const index = await collection.index("some-index"); * ``` */ - index(selector: IndexSelector): Promise; + index(selector: indexes.IndexSelector): Promise; /** * Creates a persistent index on the collection if it does not already exist. * - * @param details - Options for creating the persistent index. + * @param options - Options for creating the persistent index. * * @example * ```js @@ -1999,12 +1545,16 @@ export interface DocumentCollection< * ``` */ ensureIndex( - details: EnsurePersistentIndexOptions - ): Promise>; + options: indexes.EnsurePersistentIndexOptions + ): Promise< + connection.ArangoApiResponse< + indexes.PersistentIndexDescription & { isNewlyCreated: boolean } + > + >; /** * Creates a TTL index on the collection if it does not already exist. * - * @param details - Options for creating the TTL index. + * @param options - Options for creating the TTL index. * * @example * ```js @@ -2031,12 +1581,16 @@ export interface DocumentCollection< * ``` */ ensureIndex( - details: EnsureTtlIndexOptions - ): Promise>; + options: indexes.EnsureTtlIndexOptions + ): Promise< + connection.ArangoApiResponse< + indexes.TtlIndexDescription & { isNewlyCreated: boolean } + > + >; /** * Creates a multi-dimensional index on the collection if it does not already exist. * - * @param details - Options for creating the multi-dimensional index. + * @param options - Options for creating the multi-dimensional index. * * @example * ```js @@ -2052,8 +1606,39 @@ export interface DocumentCollection< * ``` */ ensureIndex( - details: EnsureMdiIndexOptions - ): Promise>; + options: indexes.EnsureMdiIndexOptions + ): Promise< + connection.ArangoApiResponse< + indexes.MdiIndexDescription & { isNewlyCreated: boolean } + > + >; + /** + * Creates a prefixed multi-dimensional index on the collection if it does + * not already exist. + * + * @param options - Options for creating the prefixed multi-dimensional index. + * + * @example + * ```js + * const db = new Database(); + * const collection = db.collection("some-points"); + * // Create a multi-dimensional index for the attributes x, y and z + * await collection.ensureIndex({ + * type: "mdi-prefixed", + * fields: ["x", "y", "z"], + * prefixFields: ["x"], + * fieldValueTypes: "double" + * }); + * ``` + * ``` + */ + ensureIndex( + options: indexes.EnsureMdiPrefixedIndexOptions + ): Promise< + connection.ArangoApiResponse< + indexes.MdiPrefixedIndexDescription & { isNewlyCreated: boolean } + > + >; /** * Creates a prefixed multi-dimensional index on the collection if it does not already exist. * @@ -2074,12 +1659,16 @@ export interface DocumentCollection< * ``` */ ensureIndex( - details: EnsureMdiPrefixedIndexOptions - ): Promise>; + details: indexes.EnsureMdiPrefixedIndexOptions + ): Promise< + connection.ArangoApiResponse< + indexes.MdiPrefixedIndexDescription & { isNewlyCreated: boolean } + > + >; /** * Creates a geo index on the collection if it does not already exist. * - * @param details - Options for creating the geo index. + * @param options - Options for creating the geo index. * * @example * ```js @@ -2094,12 +1683,16 @@ export interface DocumentCollection< * ``` */ ensureIndex( - details: EnsureGeoIndexOptions - ): Promise>; + options: indexes.EnsureGeoIndexOptions + ): Promise< + connection.ArangoApiResponse< + indexes.GeoIndexDescription & { isNewlyCreated: boolean } + > + >; /** * Creates a inverted index on the collection if it does not already exist. * - * @param details - Options for creating the inverted index. + * @param options - Options for creating the inverted index. * * @example * ```js @@ -2113,12 +1706,16 @@ export interface DocumentCollection< * ``` */ ensureIndex( - details: EnsureInvertedIndexOptions - ): Promise>; + options: indexes.EnsureInvertedIndexOptions + ): Promise< + connection.ArangoApiResponse< + indexes.InvertedIndexDescription & { isNewlyCreated: boolean } + > + >; /** * Creates an index on the collection if it does not already exist. * - * @param details - Options for creating the index. + * @param options - Options for creating the index. * * @example * ```js @@ -2134,8 +1731,12 @@ export interface DocumentCollection< * ``` */ ensureIndex( - details: EnsureIndexOptions - ): Promise>; + options: indexes.EnsureIndexOptions + ): Promise< + connection.ArangoApiResponse< + indexes.IndexDescription & { isNewlyCreated: boolean } + > + >; /** * Deletes the index with the given name or `id` from the database. * @@ -2150,36 +1751,29 @@ export interface DocumentCollection< * ``` */ dropIndex( - selector: IndexSelector - ): Promise>; - /** - * Triggers compaction for a collection. - * - * @example - * ```js - * const db = new Database(); - * const collection = db.collection("some-collection"); - * await collection.compact(); - * // Background compaction is triggered on the collection - * ``` - */ - compact(): Promise>>; + selector: indexes.IndexSelector + ): Promise>; //#endregion } +//#endregion +//#region EdgeCollection interface /** - * Represents an edge collection in a {@link database.Database}. + * Represents an edge collection in a {@link databases.Database}. * * See {@link DocumentCollection} for a more generic variant of this interface * more suited for regular document collections. * - * See also {@link graph.GraphEdgeCollection} for the type representing an edge - * collection in a {@link graph.Graph}. + * See also {@link graphs.GraphEdgeCollection} for the type representing an edge + * collection in a {@link graphs.Graph}. * * When using TypeScript, collections can be cast to a specific edge document * data type to increase type safety. * - * @param T - Type to use for edge document data. Defaults to `any`. + * @param EntryResultType - Type to represent edge document contents returned + * by the server (including computed properties). + * @param EntryInputType - Type to represent edge document contents passed when + * inserting or replacing edge documents (without computed properties). * * @example * ```ts @@ -2195,6 +1789,7 @@ export interface EdgeCollection< EntryResultType extends Record = any, EntryInputType extends Record = EntryResultType, > extends DocumentCollection { + //#region Document operations /** * Retrieves the document matching the given key or id. * @@ -2230,9 +1825,9 @@ export interface EdgeCollection< * ``` */ document( - selector: DocumentSelector, - options?: CollectionReadOptions - ): Promise>; + selector: documents.DocumentSelector, + options?: documents.ReadDocumentOptions + ): Promise>; /** * Retrieves the document matching the given key or id. * @@ -2269,9 +1864,9 @@ export interface EdgeCollection< * ``` */ document( - selector: DocumentSelector, + selector: documents.DocumentSelector, graceful: boolean - ): Promise>; + ): Promise>; /** * Retrieves the documents matching the given key or id values. * @@ -2295,9 +1890,9 @@ export interface EdgeCollection< * ``` */ documents( - selectors: (string | ObjectWithKey)[], - options?: CollectionBatchReadOptions - ): Promise[]>; + selectors: (string | documents.ObjectWithDocumentKey)[], + options?: documents.BulkReadDocumentsOptions + ): Promise[]>; /** * Inserts a new document with the given `data` into the collection. * @@ -2315,12 +1910,12 @@ export interface EdgeCollection< * ``` */ save( - data: EdgeData, - options?: CollectionInsertOptions + data: documents.EdgeData, + options?: documents.InsertDocumentOptions ): Promise< - DocumentOperationMetadata & { - new?: Edge; - old?: Edge; + documents.DocumentOperationMetadata & { + new?: documents.Edge; + old?: documents.Edge; } >; /** @@ -2343,15 +1938,15 @@ export interface EdgeCollection< * ``` */ saveAll( - data: Array>, - options?: CollectionInsertOptions + data: Array>, + options?: documents.InsertDocumentOptions ): Promise< Array< - | (DocumentOperationMetadata & { - new?: Edge; - old?: Edge; + | (documents.DocumentOperationMetadata & { + new?: documents.Edge; + old?: documents.Edge; }) - | DocumentOperationFailure + | documents.DocumentOperationFailure > >; /** @@ -2387,13 +1982,13 @@ export interface EdgeCollection< * ``` */ replace( - selector: DocumentSelector, - newData: DocumentData, - options?: CollectionReplaceOptions + selector: documents.DocumentSelector, + newData: documents.DocumentData, + options?: documents.ReplaceDocumentOptions ): Promise< - DocumentOperationMetadata & { - new?: Edge; - old?: Edge; + documents.DocumentOperationMetadata & { + new?: documents.Edge; + old?: documents.Edge; } >; /** @@ -2438,16 +2033,17 @@ export interface EdgeCollection< */ replaceAll( newData: Array< - DocumentData & ({ _key: string } | { _id: string }) + documents.DocumentData & + ({ _key: string } | { _id: string }) >, - options?: CollectionReplaceOptions + options?: documents.ReplaceDocumentOptions ): Promise< Array< - | (DocumentOperationMetadata & { - new?: Edge; - old?: Edge; + | (documents.DocumentOperationMetadata & { + new?: documents.Edge; + old?: documents.Edge; }) - | DocumentOperationFailure + | documents.DocumentOperationFailure > >; /** @@ -2483,13 +2079,13 @@ export interface EdgeCollection< * ``` */ update( - selector: DocumentSelector, - newData: Patch>, - options?: CollectionUpdateOptions + selector: documents.DocumentSelector, + newData: documents.Patch>, + options?: documents.UpdateDocumentOptions ): Promise< - DocumentOperationMetadata & { - new?: Edge; - old?: Edge; + documents.DocumentOperationMetadata & { + new?: documents.Edge; + old?: documents.Edge; } >; /** @@ -2532,16 +2128,17 @@ export interface EdgeCollection< */ updateAll( newData: Array< - Patch> & ({ _key: string } | { _id: string }) + documents.Patch> & + ({ _key: string } | { _id: string }) >, - options?: CollectionUpdateOptions + options?: documents.UpdateDocumentOptions ): Promise< Array< - | (DocumentOperationMetadata & { - new?: Edge; - old?: Edge; + | (documents.DocumentOperationMetadata & { + new?: documents.Edge; + old?: documents.Edge; }) - | DocumentOperationFailure + | documents.DocumentOperationFailure > >; /** @@ -2564,9 +2161,11 @@ export interface EdgeCollection< * ``` */ remove( - selector: DocumentSelector, - options?: CollectionRemoveOptions - ): Promise }>; + selector: documents.DocumentSelector, + options?: documents.RemoveDocumentOptions + ): Promise< + documents.DocumentMetadata & { old?: documents.Edge } + >; /** * Removes existing documents from the collection. * @@ -2586,12 +2185,12 @@ export interface EdgeCollection< * ``` */ removeAll( - selectors: DocumentSelector[], - options?: CollectionRemoveOptions + selectors: documents.DocumentSelector[], + options?: documents.RemoveDocumentOptions ): Promise< Array< - | (DocumentMetadata & { old?: Edge }) - | DocumentOperationFailure + | (documents.DocumentMetadata & { old?: documents.Edge }) + | documents.DocumentOperationFailure > >; /** @@ -2613,9 +2212,9 @@ export interface EdgeCollection< * ``` */ import( - data: EdgeData[], - options?: CollectionImportOptions - ): Promise; + data: documents.EdgeData[], + options?: documents.ImportDocumentsOptions + ): Promise; /** * Bulk imports the given `data` into the collection. * @@ -2639,8 +2238,8 @@ export interface EdgeCollection< */ import( data: any[][], - options?: CollectionImportOptions - ): Promise; + options?: documents.ImportDocumentsOptions + ): Promise; /** * Bulk imports the given `data` into the collection. * @@ -2696,16 +2295,16 @@ export interface EdgeCollection< */ import( data: Buffer | Blob | string, - options?: CollectionImportOptions & { + options?: documents.ImportDocumentsOptions & { type?: "documents" | "list" | "auto"; } - ): Promise; + ): Promise; //#endregion - //#region edges + //#region Edge operations /** - * Retrieves a list of all edges of the document matching the given - * `selector`. + * Retrieves a list of all edges in this collection of the document matching + * the given `selector`. * * Throws an exception when passed a document or `_id` from a different * collection. @@ -2729,9 +2328,11 @@ export interface EdgeCollection< * ``` */ edges( - selector: DocumentSelector, - options?: CollectionEdgesOptions - ): Promise>>; + selector: documents.DocumentSelector, + options?: documents.DocumentEdgesOptions + ): Promise< + connection.ArangoApiResponse> + >; /** * Retrieves a list of all incoming edges of the document matching the given * `selector`. @@ -2758,9 +2359,11 @@ export interface EdgeCollection< * ``` */ inEdges( - selector: DocumentSelector, - options?: CollectionEdgesOptions - ): Promise>>; + selector: documents.DocumentSelector, + options?: documents.DocumentEdgesOptions + ): Promise< + connection.ArangoApiResponse> + >; /** * Retrieves a list of all outgoing edges of the document matching the given * `selector`. @@ -2787,13 +2390,16 @@ export interface EdgeCollection< * ``` */ outEdges( - selector: DocumentSelector, - options?: CollectionEdgesOptions - ): Promise>>; - + selector: documents.DocumentSelector, + options?: documents.DocumentEdgesOptions + ): Promise< + connection.ArangoApiResponse> + >; //#endregion } +//#endregion +//#region Collection class /** * @internal */ @@ -2805,20 +2411,17 @@ export class Collection< EdgeCollection, DocumentCollection { - //#region attributes protected _name: string; - protected _db: Database; - //#endregion + protected _db: databases.Database; /** * @internal */ - constructor(db: Database, name: string) { + constructor(db: databases.Database, name: string) { this._name = name; this._db = db; } - //#region metadata get isArangoCollection(): true { return true; } @@ -2831,9 +2434,10 @@ export class Collection< return this._name; } + //#region Collection operations get() { return this._db.request({ - path: `/_api/collection/${encodeURIComponent(this._name)}`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}`, }); } @@ -2842,7 +2446,7 @@ export class Collection< await this.get(); return true; } catch (err: any) { - if (isArangoError(err) && err.errorNum === COLLECTION_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === COLLECTION_NOT_FOUND) { return false; } throw err; @@ -2861,13 +2465,13 @@ export class Collection< } = options; if (opts.computedValues) { opts.computedValues = opts.computedValues.map((computedValue) => { - if (isAqlLiteral(computedValue.expression)) { + if (aql.isAqlLiteral(computedValue.expression)) { return { ...computedValue, expression: computedValue.expression.toAQL(), }; } - if (isAqlQuery(computedValue.expression)) { + if (aql.isAqlQuery(computedValue.expression)) { return { ...computedValue, expression: computedValue.expression.query, @@ -2885,7 +2489,7 @@ export class Collection< } return this._db.request({ method: "POST", - path: "/_api/collection", + pathname: "/_api/collection", search, body: { ...opts, @@ -2896,26 +2500,28 @@ export class Collection< properties( properties?: CollectionPropertiesOptions - ): Promise> { + ): Promise< + connection.ArangoApiResponse + > { if (!properties) { return this._db.request({ - path: `/_api/collection/${encodeURIComponent(this._name)}/properties`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}/properties`, }); } return this._db.request({ method: "PUT", - path: `/_api/collection/${encodeURIComponent(this._name)}/properties`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}/properties`, body: properties, }); } count(): Promise< - ArangoApiResponse< - CollectionMetadata & CollectionProperties & { count: number } + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { count: number } > > { return this._db.request({ - path: `/_api/collection/${encodeURIComponent(this._name)}/count`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}/count`, }); } @@ -2923,7 +2529,7 @@ export class Collection< return this._db.request( { method: "PUT", - path: `/_api/collection/${encodeURIComponent( + pathname: `/_api/collection/${encodeURIComponent( this._name )}/recalculateCount`, }, @@ -2934,61 +2540,49 @@ export class Collection< figures( details = false ): Promise< - CollectionMetadata & - ArangoApiResponse< + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { count: number; figures: Record } - > + > > { return this._db.request({ - path: `/_api/collection/${encodeURIComponent(this._name)}/figures`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}/figures`, search: { details }, }); } revision(): Promise< - ArangoApiResponse< - CollectionMetadata & CollectionProperties & { revision: string } + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { revision: string } > > { return this._db.request({ - path: `/_api/collection/${encodeURIComponent(this._name)}/revision`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}/revision`, }); } checksum( options?: CollectionChecksumOptions ): Promise< - ArangoApiResponse< - CollectionMetadata & { revision: string; checksum: string } + connection.ArangoApiResponse< + CollectionDescription & { revision: string; checksum: string } > > { return this._db.request({ - path: `/_api/collection/${encodeURIComponent(this._name)}/checksum`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}/checksum`, search: options, }); } - async loadIndexes(): Promise { - return this._db.request( - { - method: "PUT", - path: `/_api/collection/${encodeURIComponent( - this._name - )}/loadIndexesIntoMemory`, - }, - (res) => res.parsedBody.result - ); - } - shards( details?: boolean ): Promise< - ArangoApiResponse< - CollectionMetadata & CollectionProperties & { shards: any } + connection.ArangoApiResponse< + CollectionDescription & CollectionProperties & { shards: any } > > { return this._db.request({ - path: `/_api/collection/${encodeURIComponent(this._name)}/shards`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}/shards`, search: { details }, }); } @@ -3000,32 +2594,39 @@ export class Collection< } truncate( - options?: CollectionTruncateOptions - ): Promise> { + options?: TruncateCollectionOptions + ): Promise> { return this._db.request({ method: "PUT", - path: `/_api/collection/${this._name}/truncate`, + pathname: `/_api/collection/${this._name}/truncate`, search: options, }); } - drop(options?: CollectionDropOptions) { + drop(options?: DropCollectionOptions) { return this._db.request({ method: "DELETE", - path: `/_api/collection/${encodeURIComponent(this._name)}`, + pathname: `/_api/collection/${encodeURIComponent(this._name)}`, search: options, }); } + + compact() { + return this._db.request({ + method: "PUT", + pathname: `/_api/collection/${this._name}/compact`, + }); + } //#endregion - //#region crud + //#region Document operations getResponsibleShard( - document: Partial> + document: Partial> ): Promise { return this._db.request( { method: "PUT", - path: `/_api/collection/${encodeURIComponent( + pathname: `/_api/collection/${encodeURIComponent( this._name )}/responsibleShard`, body: document, @@ -3034,13 +2635,13 @@ export class Collection< ); } - documentId(selector: DocumentSelector): string { - return _documentHandle(selector, this._name); + documentId(selector: documents.DocumentSelector): string { + return documents._documentHandle(selector, this._name); } async documentExists( - selector: DocumentSelector, - options: DocumentExistsOptions = {} + selector: documents.DocumentSelector, + options: documents.DocumentExistsOptions = {} ): Promise { const { ifMatch = undefined, ifNoneMatch = undefined } = options; const headers = {} as Record; @@ -3050,14 +2651,14 @@ export class Collection< return await this._db.request( { method: "HEAD", - path: `/_api/document/${encodeURI( - _documentHandle(selector, this._name) + pathname: `/_api/document/${encodeURI( + documents._documentHandle(selector, this._name) )}`, headers, }, (res) => { if (ifNoneMatch && res.status === 304) { - throw new HttpError(res); + throw new errors.HttpError(res); } return true; } @@ -3071,13 +2672,13 @@ export class Collection< } documents( - selectors: (string | ObjectWithKey)[], - options: CollectionBatchReadOptions = {} + selectors: (string | documents.ObjectWithDocumentKey)[], + options: documents.BulkReadDocumentsOptions = {} ) { const { allowDirtyRead = undefined } = options; return this._db.request({ method: "PUT", - path: `/_api/document/${encodeURIComponent(this._name)}`, + pathname: `/_api/document/${encodeURIComponent(this._name)}`, search: { onlyget: true }, allowDirtyRead, body: selectors, @@ -3085,8 +2686,8 @@ export class Collection< } async document( - selector: DocumentSelector, - options: boolean | CollectionReadOptions = {} + selector: documents.DocumentSelector, + options: boolean | documents.ReadDocumentOptions = {} ) { if (typeof options === "boolean") { options = { graceful: options }; @@ -3102,15 +2703,15 @@ export class Collection< if (ifNoneMatch) headers["if-none-match"] = ifNoneMatch; const result = this._db.request( { - path: `/_api/document/${encodeURI( - _documentHandle(selector, this._name) + pathname: `/_api/document/${encodeURI( + documents._documentHandle(selector, this._name) )}`, headers, allowDirtyRead, }, (res) => { if (ifNoneMatch && res.status === 304) { - throw new HttpError(res); + throw new errors.HttpError(res); } return res.parsedBody; } @@ -3119,18 +2720,21 @@ export class Collection< try { return await result; } catch (err: any) { - if (isArangoError(err) && err.errorNum === DOCUMENT_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === DOCUMENT_NOT_FOUND) { return null; } throw err; } } - save(data: DocumentData, options?: CollectionInsertOptions) { + save( + data: documents.DocumentData, + options?: documents.InsertDocumentOptions + ) { return this._db.request( { method: "POST", - path: `/_api/document/${encodeURIComponent(this._name)}`, + pathname: `/_api/document/${encodeURIComponent(this._name)}`, body: data, search: options, }, @@ -3139,13 +2743,13 @@ export class Collection< } saveAll( - data: Array>, - options?: CollectionInsertOptions + data: Array>, + options?: documents.InsertDocumentOptions ) { return this._db.request( { method: "POST", - path: `/_api/document/${encodeURIComponent(this._name)}`, + pathname: `/_api/document/${encodeURIComponent(this._name)}`, body: data, search: options, }, @@ -3154,9 +2758,9 @@ export class Collection< } replace( - selector: DocumentSelector, - newData: DocumentData, - options: CollectionReplaceOptions = {} + selector: documents.DocumentSelector, + newData: documents.DocumentData, + options: documents.ReplaceDocumentOptions = {} ) { const { ifMatch = undefined, ...opts } = options; const headers = {} as Record; @@ -3164,8 +2768,8 @@ export class Collection< return this._db.request( { method: "PUT", - path: `/_api/document/${encodeURI( - _documentHandle(selector, this._name) + pathname: `/_api/document/${encodeURI( + documents._documentHandle(selector, this._name) )}`, headers, body: newData, @@ -3177,14 +2781,15 @@ export class Collection< replaceAll( newData: Array< - DocumentData & ({ _key: string } | { _id: string }) + documents.DocumentData & + ({ _key: string } | { _id: string }) >, - options?: CollectionReplaceOptions + options?: documents.ReplaceDocumentOptions ) { return this._db.request( { method: "PUT", - path: `/_api/document/${encodeURIComponent(this._name)}`, + pathname: `/_api/document/${encodeURIComponent(this._name)}`, body: newData, search: options, }, @@ -3193,9 +2798,9 @@ export class Collection< } update( - selector: DocumentSelector, - newData: Patch>, - options: CollectionUpdateOptions = {} + selector: documents.DocumentSelector, + newData: documents.Patch>, + options: documents.UpdateDocumentOptions = {} ) { const { ifMatch = undefined, ...opts } = options; const headers = {} as Record; @@ -3203,8 +2808,8 @@ export class Collection< return this._db.request( { method: "PATCH", - path: `/_api/document/${encodeURI( - _documentHandle(selector, this._name) + pathname: `/_api/document/${encodeURI( + documents._documentHandle(selector, this._name) )}`, headers, body: newData, @@ -3216,14 +2821,15 @@ export class Collection< updateAll( newData: Array< - Patch> & ({ _key: string } | { _id: string }) + documents.Patch> & + ({ _key: string } | { _id: string }) >, - options?: CollectionUpdateOptions + options?: documents.UpdateDocumentOptions ) { return this._db.request( { method: "PATCH", - path: `/_api/document/${encodeURIComponent(this._name)}`, + pathname: `/_api/document/${encodeURIComponent(this._name)}`, body: newData, search: options, }, @@ -3231,15 +2837,18 @@ export class Collection< ); } - remove(selector: DocumentSelector, options: CollectionRemoveOptions = {}) { + remove( + selector: documents.DocumentSelector, + options: documents.RemoveDocumentOptions = {} + ) { const { ifMatch = undefined, ...opts } = options; const headers = {} as Record; if (ifMatch) headers["if-match"] = ifMatch; return this._db.request( { method: "DELETE", - path: `/_api/document/${encodeURI( - _documentHandle(selector, this._name) + pathname: `/_api/document/${encodeURI( + documents._documentHandle(selector, this._name) )}`, headers, search: opts, @@ -3249,13 +2858,13 @@ export class Collection< } removeAll( - selectors: (string | ObjectWithKey)[], - options?: CollectionRemoveOptions + selectors: (string | documents.ObjectWithDocumentKey)[], + options?: documents.RemoveDocumentOptions ) { return this._db.request( { method: "DELETE", - path: `/_api/document/${encodeURIComponent(this._name)}`, + pathname: `/_api/document/${encodeURIComponent(this._name)}`, body: selectors, search: options, }, @@ -3265,10 +2874,10 @@ export class Collection< import( data: Buffer | Blob | string | any[], - options: CollectionImportOptions & { + options: documents.ImportDocumentsOptions & { type?: "documents" | "list" | "auto"; } = {} - ): Promise { + ): Promise { const search = { ...options, collection: this._name }; if (Array.isArray(data)) { search.type = Array.isArray(data[0]) ? undefined : "documents"; @@ -3277,7 +2886,7 @@ export class Collection< } return this._db.request({ method: "POST", - path: "/_api/import", + pathname: "/_api/import", body: data, isBinary: true, search, @@ -3285,85 +2894,89 @@ export class Collection< } //#endregion - //#region edges + //#region Edge operations protected _edges( - selector: DocumentSelector, - options: CollectionEdgesOptions = {}, + selector: documents.DocumentSelector, + options: documents.DocumentEdgesOptions = {}, direction?: "in" | "out" ) { const { allowDirtyRead = undefined } = options; return this._db.request({ - path: `/_api/edges/${encodeURIComponent(this._name)}`, + pathname: `/_api/edges/${encodeURIComponent(this._name)}`, allowDirtyRead, search: { direction, - vertex: _documentHandle(selector, this._name, false), + vertex: documents._documentHandle(selector, this._name, false), }, }); } - edges(vertex: DocumentSelector, options?: CollectionEdgesOptions) { + edges( + vertex: documents.DocumentSelector, + options?: documents.DocumentEdgesOptions + ) { return this._edges(vertex, options); } - inEdges(vertex: DocumentSelector, options?: CollectionEdgesOptions) { + inEdges( + vertex: documents.DocumentSelector, + options?: documents.DocumentEdgesOptions + ) { return this._edges(vertex, options, "in"); } - outEdges(vertex: DocumentSelector, options?: CollectionEdgesOptions) { + outEdges( + vertex: documents.DocumentSelector, + options?: documents.DocumentEdgesOptions + ) { return this._edges(vertex, options, "out"); } //#endregion - //#region indexes - indexes(options?: IndexListOptions) { + //#region Index operations + async loadIndexes(): Promise { + return this._db.request( + { + method: "PUT", + pathname: `/_api/collection/${encodeURIComponent( + this._name + )}/loadIndexesIntoMemory`, + }, + (res) => res.parsedBody.result + ); + } + + indexes(options?: indexes.ListIndexesOptions) { return this._db.request( { - path: "/_api/index", + pathname: "/_api/index", search: { collection: this._name, ...options }, }, (res) => res.parsedBody.indexes ); } - index(selector: IndexSelector) { + index(selector: indexes.IndexSelector) { return this._db.request({ - path: `/_api/index/${encodeURI(_indexHandle(selector, this._name))}`, + pathname: `/_api/index/${encodeURI(indexes._indexHandle(selector, this._name))}`, }); } - ensureIndex( - options: - | EnsurePersistentIndexOptions - | EnsureGeoIndexOptions - | EnsureTtlIndexOptions - | EnsureMdiIndexOptions - | EnsureMdiPrefixedIndexOptions - | EnsureInvertedIndexOptions - ) { + ensureIndex(options: indexes.EnsureIndexOptions) { return this._db.request({ method: "POST", - path: "/_api/index", + pathname: "/_api/index", body: options, search: { collection: this._name }, }); } - dropIndex(selector: IndexSelector) { + dropIndex(selector: indexes.IndexSelector) { return this._db.request({ method: "DELETE", - path: `/_api/index/${encodeURI(_indexHandle(selector, this._name))}`, + pathname: `/_api/index/${encodeURI(indexes._indexHandle(selector, this._name))}`, }); } - - compact() { - return this._db.request( - { - method: "PUT", - path: `/_api/collection/${this._name}/compact`, - }, - (res) => res.parsedBody - ); - } //#endregion } +//#endregion diff --git a/src/configuration.ts b/src/configuration.ts new file mode 100644 index 000000000..69ee050c5 --- /dev/null +++ b/src/configuration.ts @@ -0,0 +1,195 @@ +/** + * ```ts + * import type { ConfigOptions } from "arangojs/configuration"; + * ``` + * + * The "configuration" module provides configuration related types for + * TypeScript. + * + * @packageDocumentation + */ +import * as connection from "./connection.js"; + +//#region Shared types +/** + * Determines the behavior when multiple URLs are used: + * + * - `"NONE"`: No load balancing. All requests will be handled by the first + * URL in the list until a network error is encountered. On network error, + * arangojs will advance to using the next URL in the list. + * + * - `"ONE_RANDOM"`: Randomly picks one URL from the list initially, then + * behaves like `"NONE"`. + * + * - `"ROUND_ROBIN"`: Every sequential request uses the next URL in the list. + */ +export type LoadBalancingStrategy = "NONE" | "ROUND_ROBIN" | "ONE_RANDOM"; +//#endregion + +//#region Credentials +/** + * Credentials for HTTP Basic authentication. + */ +export type BasicAuthCredentials = { + /** + * Username to use for authentication, e.g. `"root"`. + */ + username: string; + /** + * Password to use for authentication. Defaults to an empty string. + */ + password?: string; +}; + +/** + * Credentials for HTTP Bearer token authentication. + */ +export type BearerAuthCredentials = { + /** + * Bearer token to use for authentication. + */ + token: string; +}; + +/** + * Determines if the given credentials are for Bearer token authentication. + */ +export function isBearerAuth( + auth: BasicAuthCredentials | BearerAuthCredentials, +): auth is BearerAuthCredentials { + return auth.hasOwnProperty("token"); +} +//#endregion + +//#region Config +/** + * Options for configuring arangojs. + */ +export type ConfigOptions = connection.CommonRequestOptions & { + /** + * Name of the database to use. + * + * Default: `"_system"` + */ + databaseName?: string; + /** + * Base URL of the ArangoDB server or list of server URLs. + * + * When working with a cluster, the method {@link databases.Database#acquireHostList} + * can be used to automatically pick up additional coordinators/followers at + * any point. + * + * When running ArangoDB on a unix socket, e.g. `/tmp/arangodb.sock`, the + * following URL formats are supported for unix sockets: + * + * - `unix:///tmp/arangodb.sock` (no SSL) + * - `http+unix:///tmp/arangodb.sock` (or `https+unix://` for SSL) + * - `http://unix:/tmp/arangodb.sock` (or `https://unix:` for SSL) + * + * Additionally `ssl` and `tls` are treated as synonymous with `https` and + * `tcp` is treated as synonymous with `http`, so the following URLs are + * considered identical: + * + * - `tcp://127.0.0.1:8529` and `http://127.0.0.1:8529` + * - `ssl://127.0.0.1:8529` and `https://127.0.0.1:8529` + * - `tcp+unix:///tmp/arangodb.sock` and `http+unix:///tmp/arangodb.sock` + * - `ssl+unix:///tmp/arangodb.sock` and `https+unix:///tmp/arangodb.sock` + * - `tcp://unix:/tmp/arangodb.sock` and `http://unix:/tmp/arangodb.sock` + * - `ssl://unix:/tmp/arangodb.sock` and `https://unix:/tmp/arangodb.sock` + * + * See also `auth` for passing authentication credentials. + * + * Default: `"http://127.0.0.1:8529"` + */ + url?: string | string[]; + /** + * Credentials to use for authentication. + * + * See also {@link databases.Database#useBasicAuth} and + * {@link databases.Database#useBearerAuth}. + * + * Default: `{ username: "root", password: "" }` + */ + auth?: BasicAuthCredentials | BearerAuthCredentials; + /** + * Numeric representation of the ArangoDB version the driver should expect. + * The format is defined as `XYYZZ` where `X` is the major version, `Y` is + * the zero-filled two-digit minor version and `Z` is the zero-filled two-digit + * bugfix version, e.g. `30102` for 3.1.2, `20811` for 2.8.11. + * + * Depending on this value certain methods may become unavailable or change + * their behavior to remain compatible with different versions of ArangoDB. + * + * Default: `31100` + */ + arangoVersion?: number; + /** + * Determines the behavior when multiple URLs are provided: + * + * - `"NONE"`: No load balancing. All requests will be handled by the first + * URL in the list until a network error is encountered. On network error, + * arangojs will advance to using the next URL in the list. + * + * - `"ONE_RANDOM"`: Randomly picks one URL from the list initially, then + * behaves like `"NONE"`. + * + * - `"ROUND_ROBIN"`: Every sequential request uses the next URL in the list. + * + * Default: `"NONE"` + */ + loadBalancingStrategy?: LoadBalancingStrategy; + /** + * Maximum number of parallel requests arangojs will perform. If any + * additional requests are attempted, they will be enqueued until one of the + * active requests has completed. + * + * **Note:** when using `ROUND_ROBIN` load balancing and passing an array of + * URLs in the `url` option, the default value of this option will be set to + * `3 * url.length` instead of `3`. + * + * Default: `3` + */ + poolSize?: number; + /** + * Default options to pass to the `fetch` function when making requests. + * + * See [the Fetch API specification](https://fetch.spec.whatwg.org/#request-class) + * or the [MDN Web Docs](https://developer.mozilla.org/en-US/docs/Web/API/RequestInit) + * for more information on the available options. + */ + fetchOptions?: connection.CommonFetchOptions; + /** + * If set, arangojs will use the [`undici`](https://www.npmjs.com/package/undici) + * package to make requests and the provided options will be used to create + * the `undici` agent. + * + * See [the `undici` documentation](https://undici.nodejs.org/#/docs/api/Agent?id=parameter-agentoptions) + * for more information on the available options. + */ + agentOptions?: any; + /** + * Callback that will be invoked when a request + * + * @param err - Error encountered when handling this request. + */ + onError?: (err: Error) => void | Promise; + /** + * If set to `true`, arangojs will generate stack traces every time a request + * is initiated and augment the stack traces of any errors it generates. + * + * **Warning**: This will cause arangojs to generate stack traces in advance + * even if the request does not result in an error. Generating stack traces + * may negatively impact performance. + */ + precaptureStackTraces?: boolean; + /** + * Limits the number of values of server-reported response queue times that + * will be stored and accessible using {@link databases.Database#queueTime}. If set to + * a finite value, older values will be discarded to make room for new values + * when that limit is reached. + * + * Default: `10` + */ + responseQueueTimeSamples?: number; +}; +//#endregion diff --git a/src/connection.ts b/src/connection.ts index 2565226e3..a73557f98 100644 --- a/src/connection.ts +++ b/src/connection.ts @@ -1,53 +1,321 @@ /** * ```ts - * import type { Config } from "arangojs/connection.js"; + * import type { ArangoApiResponse } from "arangojs/connection"; * ``` * - * The "connection" module provides connection and configuration related types - * for TypeScript. + * The "connection" module provides connection related types for TypeScript. * * @packageDocumentation */ -import { LinkedList } from "./lib/linkedList.js"; -import { Database } from "./database.js"; -import { - ArangoError, - HttpError, - isArangoError, - isArangoErrorResponse, - isSystemError, -} from "./error.js"; -import { - ERROR_ARANGO_CONFLICT, - ERROR_ARANGO_MAINTENANCE_MODE, -} from "./lib/codes.js"; -import { normalizeUrl } from "./lib/normalizeUrl.js"; -import { - ArangojsError, - ArangojsResponse, - createRequest, - RequestConfig, - RequestFunction, -} from "./lib/request.js"; -import { joinPath } from "./lib/joinPath.js"; -import { mergeHeaders } from "./lib/mergeHeaders.js"; +import * as administration from "./administration.js"; +import * as configuration from "./configuration.js"; +import * as databases from "./databases.js"; +import * as errors from "./errors.js"; +import { ERROR_ARANGO_CONFLICT } from "./lib/codes.js"; +import * as util from "./lib/util.js"; +import { LinkedList } from "./lib/x3-linkedlist.js"; const MIME_JSON = /\/(json|javascript)(\W|$)/; const LEADER_ENDPOINT_HEADER = "x-arango-endpoint"; +const REASON_TIMEOUT = "timeout"; +//#region Host /** - * Determines the behavior when multiple URLs are used: + * @internal + */ +type Host = { + /** + * @internal + * + * Perform a fetch request against this host. + * + * @param pathname - URL path, relative to the server URL. + * @param options - Options for this fetch request. + */ + fetch: ( + options: Omit< + RequestOptions, + | "maxRetries" + | "retryOnConflict" + | "allowDirtyRead" + | "hostUrl" + | "expectBinary" + | "isBinary" + > + ) => Promise; + /** + * @internal + * + * Close the pending request, if any. + */ + close: () => void; +}; + +/** + * @internal * - * - `"NONE"`: No load balancing. All requests will be handled by the first - * URL in the list until a network error is encountered. On network error, - * arangojs will advance to using the next URL in the list. + * Create a function for performing fetch requests against a given host. * - * - `"ONE_RANDOM"`: Randomly picks one URL from the list initially, then - * behaves like `"NONE"`. + * @param arangojsHostUrl - Base URL of the host, i.e. protocol, port and domain name. + * @param options - Options to use for all fetch requests. + */ +function createHost(arangojsHostUrl: string, agentOptions?: any): Host { + const baseUrl = new URL(arangojsHostUrl); + let fetch = globalThis.fetch; + let createDispatcher: (() => Promise) | undefined; + let dispatcher: any; + let socketPath: string | undefined; + if (arangojsHostUrl.match(/^\w+:\/\/unix:\//)) { + socketPath = baseUrl.pathname; + baseUrl.hostname = "localhost"; + baseUrl.pathname = "/"; + agentOptions = { + ...agentOptions, + connect: { + ...agentOptions?.connect, + socketPath, + }, + }; + } + if (agentOptions) { + createDispatcher = async () => { + let undici: any; + try { + // Prevent overzealous bundlers from attempting to bundle undici + const undiciName = "undici"; + undici = await import(undiciName); + } catch (cause) { + if (socketPath) { + throw new Error("Undici is required for Unix domain sockets", { + cause, + }); + } + throw new Error("Undici is required when using config.agentOptions", { + cause, + }); + } + fetch = undici.fetch; + return new undici.Agent(agentOptions); + }; + } + const pending = new Map(); + return { + async fetch({ + method, + pathname, + search, + headers: requestHeaders, + body, + timeout, + fetchOptions, + beforeRequest, + afterResponse, + }: Omit< + RequestOptions, + | "maxRetries" + | "retryOnConflict" + | "allowDirtyRead" + | "hostUrl" + | "expectBinary" + | "isBinary" + >) { + const url = new URL(pathname + baseUrl.search, baseUrl); + if (search) { + const searchParams = + search instanceof URLSearchParams + ? search + : new URLSearchParams(search); + for (const [key, value] of searchParams) { + url.searchParams.append(key, value); + } + } + const headers = new Headers(requestHeaders); + if (!headers.has("authorization")) { + headers.set( + "authorization", + `Basic ${btoa( + `${baseUrl.username || "root"}:${baseUrl.password || ""}` + )}` + ); + } + const abortController = new AbortController(); + const signal = abortController.signal; + if (createDispatcher) { + dispatcher = await createDispatcher(); + createDispatcher = undefined; + } + const request = new Request(url, { + ...fetchOptions, + dispatcher, + method, + headers, + body, + signal, + } as globalThis.RequestInit); + if (beforeRequest) { + const p = beforeRequest(request); + if (p instanceof Promise) await p; + } + const requestId = util.generateRequestId(); + pending.set(requestId, abortController); + let clearTimer: (() => void) | undefined; + if (timeout) { + clearTimer = util.createTimer(timeout, () => { + clearTimer = undefined; + abortController.abort(REASON_TIMEOUT); + }); + } + let response: globalThis.Response & { request: globalThis.Request }; + try { + response = Object.assign(await fetch(request), { + request, + arangojsHostUrl, + }); + if (fetchOptions?.redirect === "manual" && isRedirect(response)) { + throw new errors.HttpError(response); + } + } catch (e: unknown) { + const cause = e instanceof Error ? e : new Error(String(e)); + let error: errors.NetworkError; + if (cause instanceof errors.NetworkError) { + error = cause; + } else if (signal.aborted) { + const reason = + typeof signal.reason == "string" ? signal.reason : undefined; + if (reason === REASON_TIMEOUT) { + error = new errors.ResponseTimeoutError(undefined, request, { + cause, + }); + } else { + error = new errors.RequestAbortedError(reason, request, { cause }); + } + } else if (cause instanceof TypeError) { + error = new errors.FetchFailedError(undefined, request, { cause }); + } else { + error = new errors.NetworkError(cause.message, request, { cause }); + } + if (afterResponse) { + const p = afterResponse(error); + if (p instanceof Promise) await p; + } + throw error; + } finally { + clearTimer?.(); + pending.delete(requestId); + } + if (afterResponse) { + const p = afterResponse(null, response); + if (p instanceof Promise) await p; + } + return response; + }, + close() { + if (!pending.size) return; + const controllers = [...pending.values()]; + pending.clear(); + for (const controller of controllers) { + try { + controller.abort(); + } catch (e) { + // noop + } + } + }, + }; +} +//#endregion + +//#region Response types +const STATUS_CODE_DEFAULT_MESSAGES = { + 0: "Network Error", + 300: "Multiple Choices", + 301: "Moved Permanently", + 302: "Found", + 303: "See Other", + 304: "Not Modified", + 307: "Temporary Redirect", + 308: "Permanent Redirect", + 400: "Bad Request", + 401: "Unauthorized", + 402: "Payment Required", + 403: "Forbidden", + 404: "Not Found", + 405: "Method Not Allowed", + 406: "Not Acceptable", + 407: "Proxy Authentication Required", + 408: "Request Timeout", + 409: "Conflict", + 410: "Gone", + 411: "Length Required", + 412: "Precondition Failed", + 413: "Payload Too Large", + 414: "Request-URI Too Long", + 415: "Unsupported Media Type", + 416: "Requested Range Not Satisfiable", + 417: "Expectation Failed", + 418: "I'm a teapot", + 421: "Misdirected Request", + 422: "Unprocessable Entity", + 423: "Locked", + 424: "Failed Dependency", + 426: "Upgrade Required", + 428: "Precondition Required", + 429: "Too Many Requests", + 431: "Request Header Fields Too Large", + 444: "Connection Closed Without Response", + 451: "Unavailable For Legal Reasons", + 499: "Client Closed Request", + 500: "Internal Server Error", + 501: "Not Implemented", + 502: "Bad Gateway", + 503: "Service Unavailable", + 504: "Gateway Timeout", + 505: "HTTP Version Not Supported", + 506: "Variant Also Negotiates", + 507: "Insufficient Storage", + 508: "Loop Detected", + 510: "Not Extended", + 511: "Network Authentication Required", + 599: "Network Connect Timeout Error", +}; + +type KnownStatusCode = keyof typeof STATUS_CODE_DEFAULT_MESSAGES; +const KNOWN_STATUS_CODES = Object.keys(STATUS_CODE_DEFAULT_MESSAGES).map((k) => + Number(k) +) as KnownStatusCode[]; +const REDIRECT_CODES = [301, 302, 303, 307, 308] satisfies KnownStatusCode[]; +type RedirectStatusCode = (typeof REDIRECT_CODES)[number]; + +/** + * @internal * - * - `"ROUND_ROBIN"`: Every sequential request uses the next URL in the list. + * Indicates whether the given status code can be translated to a known status + * message. */ -export type LoadBalancingStrategy = "NONE" | "ROUND_ROBIN" | "ONE_RANDOM"; +function isKnownStatusCode(code: number): code is KnownStatusCode { + return KNOWN_STATUS_CODES.includes(code as KnownStatusCode); +} + +/** + * @internal + * + * Indicates whether the given status code represents a redirect. + */ +function isRedirect(response: ProcessedResponse): boolean { + return REDIRECT_CODES.includes(response.status as RedirectStatusCode); +} + +/** + * Returns the status message for the given response's status code or the + * status text of the response. + */ +export function getStatusMessage(response: ProcessedResponse): string { + if (isKnownStatusCode(response.status)) { + return STATUS_CODE_DEFAULT_MESSAGES[response.status]; + } + if (response.statusText) return response.statusText; + return "Unknown response status"; +} /** * Generic properties shared by all ArangoDB HTTP API responses. @@ -69,220 +337,188 @@ export type ArangoResponseMetadata = { export type ArangoApiResponse = T & ArangoResponseMetadata; /** - * Credentials for HTTP Basic authentication. + * Indicates whether the given value represents an ArangoDB error response. */ -export type BasicAuthCredentials = { +export function isArangoErrorResponse( + body: unknown +): body is ArangoErrorResponse { + if (!body || typeof body !== "object") return false; + const obj = body as Record; + return ( + obj.error === true && + typeof obj.errorMessage === "string" && + typeof obj.errorNum === "number" && + (obj.code === undefined || typeof obj.code === "number") + ); +} + +/** + * Interface representing an ArangoDB error response. + */ +export type ArangoErrorResponse = { /** - * Username to use for authentication, e.g. `"root"`. + * Indicates that the request resulted in an error. */ - username: string; + error: true; /** - * Password to use for authentication. Defaults to an empty string. + * Intended response status code as provided in the response body. */ - password?: string; -}; - -/** - * Credentials for HTTP Bearer token authentication. - */ -export type BearerAuthCredentials = { + code?: number; /** - * Bearer token to use for authentication. + * Error message as provided in the response body. */ - token: string; + errorMessage: string; + /** + * ArangoDB error code as provided in the response body. + * + * See the [ArangoDB error documentation](https://docs.arangodb.com/stable/develop/error-codes-and-meanings/) + * for more information. + */ + errorNum: number; }; -function isBearerAuth(auth: any): auth is BearerAuthCredentials { - return auth.hasOwnProperty("token"); -} - -/** - * @internal - */ -function generateStackTrace() { - let err = new Error(); - if (!err.stack) { - try { - throw err; - } catch (e: any) { - err = e; - } - } - return err; -} - /** - * Options for performing a request with arangojs. + * Processed response object. */ -export type RequestOptions = { +export interface ProcessedResponse extends globalThis.Response { /** * @internal * - * Identifier of a specific ArangoDB host to use when more than one is known. + * Identifier of the ArangoDB host that served this request. */ - hostUrl?: string; + arangojsHostUrl?: string; /** - * HTTP method to use in order to perform the request. - * - * Default: `"GET"` + * Fetch request object. */ - method?: string; + request: globalThis.Request; /** - * Request body data. + * Parsed response body. */ - body?: any; - /** - * If set to `true`, the response body will not be interpreted as JSON and - * instead passed as-is. - */ - expectBinary?: boolean; - /** - * If set to `true`, the request body will not be converted to JSON and - * instead passed as-is. - */ - isBinary?: boolean; + parsedBody?: T; +} +//#endregion + +//#region Request options +/** + * Options available for requests made with the Fetch API. + */ +export type CommonFetchOptions = { /** - * Whether ArangoDB is allowed to perform a dirty read to respond to this - * request. If set to `true`, the response may reflect a dirty state from - * a non-authoritative server. + * Headers object containing any additional headers to send with the request. + * + * Note that the `Authorization` header will be overridden if the `auth` + * configuration option is set. */ - allowDirtyRead?: boolean; + headers?: + | string[][] + | Record> + | Headers; /** - * If set to a positive number, the request will automatically be retried at - * most this many times if it results in a write-write conflict. + * Controls whether the socket should be reused for subsequent requests. * - * Default: `config.retryOnConflict` + * Default: `false` */ - retryOnConflict?: number; + keepalive?: boolean; /** - * HTTP headers to pass along with this request in addition to the default - * headers generated by arangojs. + * Controls what to do when the response status code is a redirect. + * + * - `"error"`: Abort with a network error. + * - `"follow"`: Automatically follow redirects. + * - `"manual"`: Abort with an `HttpError`. + * + * Default: `"follow"` */ - headers?: Headers | Record; + redirect?: "error" | "follow" | "manual"; /** - * Time in milliseconds after which arangojs will abort the request if the - * socket has not already timed out. + * Value to use for the `Referer` header. + * + * If set to `"about:client"`, the default value for the context in which the + * request is made will be used. * - * See also `agentOptions.timeout` in {@link Config}. + * Default: `"about:client"` */ - timeout?: number; + referrer?: string; /** - * Optional prefix path to prepend to the `path`. + * (Browser only.) Controls the Attribution Reporting API specific behavior. + * + * See the [MDN Web Docs](https://developer.mozilla.org/en-US/docs/Web/API/RequestInit) + * for more information on the available options. */ - basePath?: string; + attributionReporting?: any; /** - * URL path, relative to the `basePath` and server domain. + * (Browser only.) Cache mode to use for the request. + * + * See [the Fetch API specification](https://fetch.spec.whatwg.org/#request-class) + * or the [MDN Web Docs](https://developer.mozilla.org/en-US/docs/Web/API/RequestInit) + * for more information on the available options. */ - path?: string; + cache?: string; /** - * URL parameters to pass as part of the query string. + * (Browser only.) Controls sending of credentials and cookies. + * + * - `"omit"`: Never send cookies. + * - `"include"`: Always send cookies. + * - `"same-origin"`: Only send cookies if the request is to the same origin. + * + * Default: `"same-origin"` */ - search?: URLSearchParams | Record; -}; - -/** - * @internal - */ -type Task = { - hostUrl?: string; - stack?: () => string; - allowDirtyRead: boolean; - retryOnConflict: number; - resolve: (result: any) => void; - reject: (error: Error) => void; - transform?: (res: ArangojsResponse) => any; - retries: number; - options: { - method: string; - expectBinary: boolean; - timeout?: number; - pathname: string; - search?: URLSearchParams; - headers: Headers; - body: any; - }; -}; - -/** - * Options for configuring arangojs. - */ -export type Config = { + credentials?: "omit" | "include" | "same-origin"; /** - * Name of the database to use. + * (Node.js only.) Undici `Dispatcher` instance to use for the request. * - * Default: `"_system"` + * Defaults to the global dispatcher. */ - databaseName?: string; + dispatcher?: any; /** - * Base URL of the ArangoDB server or list of server URLs. - * - * When working with a cluster, the method {@link database.Database#acquireHostList} - * can be used to automatically pick up additional coordinators/followers at - * any point. - * - * When running ArangoDB on a unix socket, e.g. `/tmp/arangodb.sock`, the - * following URL formats are supported for unix sockets: + * (Browser only.) Sets cross-origin behavior for the request. * - * - `unix:///tmp/arangodb.sock` (no SSL) - * - `http+unix:///tmp/arangodb.sock` (or `https+unix://` for SSL) - * - `http://unix:/tmp/arangodb.sock` (or `https://unix:` for SSL) + * See [the Fetch API specification](https://fetch.spec.whatwg.org/#request-class) + * or the [MDN Web Docs](https://developer.mozilla.org/en-US/docs/Web/API/RequestInit) + * for more information on the available options. * - * Additionally `ssl` and `tls` are treated as synonymous with `https` and - * `tcp` is treated as synonymous with `http`, so the following URLs are - * considered identical: - * - * - `tcp://127.0.0.1:8529` and `http://127.0.0.1:8529` - * - `ssl://127.0.0.1:8529` and `https://127.0.0.1:8529` - * - `tcp+unix:///tmp/arangodb.sock` and `http+unix:///tmp/arangodb.sock` - * - `ssl+unix:///tmp/arangodb.sock` and `https+unix:///tmp/arangodb.sock` - * - `tcp://unix:/tmp/arangodb.sock` and `http://unix:/tmp/arangodb.sock` - * - `ssl://unix:/tmp/arangodb.sock` and `https://unix:/tmp/arangodb.sock` - * - * See also `auth` for passing authentication credentials. - * - * Default: `"http://127.0.0.1:8529"` + * Default: `"cors"` */ - url?: string | string[]; + mode?: string; /** - * Credentials to use for authentication. + * (Browser only.) Request priority relative to other requests of the same type. * - * See also {@link database.Database#useBasicAuth} and - * {@link database.Database#useBearerAuth}. + * See [the Fetch API specification](https://fetch.spec.whatwg.org/#request-class) + * or the [MDN Web Docs](https://developer.mozilla.org/en-US/docs/Web/API/RequestInit) + * for more information on the available options. * - * Default: `{ username: "root", password: "" }` + * Default: `"auto"` */ - auth?: BasicAuthCredentials | BearerAuthCredentials; + priority?: "low" | "high" | "auto"; /** - * Numeric representation of the ArangoDB version the driver should expect. - * The format is defined as `XYYZZ` where `X` is the major version, `Y` is - * the zero-filled two-digit minor version and `Z` is the zero-filled two-digit - * bugfix version, e.g. `30102` for 3.1.2, `20811` for 2.8.11. + * (Browser only.) Policy to use for the `Referer` header, equivalent to the + * semantics of the `Referrer-Policy` header. * - * Depending on this value certain methods may become unavailable or change - * their behavior to remain compatible with different versions of ArangoDB. - * - * Default: `31100` + * See [the Fetch API specification](https://fetch.spec.whatwg.org/#request-class) + * or the [MDN Web Docs](https://developer.mozilla.org/en-US/docs/Web/API/RequestInit) + * for more information on the available options. */ - arangoVersion?: number; + referrerPolicy?: string; +}; + +/** + * Fetch-specific options for performing a request with arangojs. + */ +export type FetchOptions = CommonFetchOptions & { /** - * Determines the behavior when multiple URLs are provided: - * - * - `"NONE"`: No load balancing. All requests will be handled by the first - * URL in the list until a network error is encountered. On network error, - * arangojs will advance to using the next URL in the list. - * - * - `"ONE_RANDOM"`: Randomly picks one URL from the list initially, then - * behaves like `"NONE"`. - * - * - `"ROUND_ROBIN"`: Every sequential request uses the next URL in the list. - * - * Default: `"NONE"` + * Subresource integrity value to use for the request, formatted as + * `-`. */ - loadBalancingStrategy?: LoadBalancingStrategy; + integrity?: `${string}-${string}`; +}; + +/** + * Options that can be shared globally for all requests made with arangojs. + */ +export type CommonRequestOptions = { /** * Determines the behavior when a request fails because the underlying * connection to the server could not be opened - * (i.e. [`ECONNREFUSED` in Node.js](https://nodejs.org/api/errors.html#errors_common_system_errors)): + * (e.g. [`ECONNREFUSED` in Node.js](https://nodejs.org/api/errors.html#errors_common_system_errors)): * * - `false`: the request fails immediately. * @@ -309,44 +545,32 @@ export type Config = { */ maxRetries?: false | number; /** - * Maximum number of parallel requests arangojs will perform. If any - * additional requests are attempted, they will be enqueued until one of the - * active requests has completed. - * - * **Note:** when using `ROUND_ROBIN` load balancing and passing an array of - * URLs in the `url` option, the default value of this option will be set to - * `3 * url.length` instead of `3`. + * If set to a positive number, requests will automatically be retried at + * most this many times if they result in a write-write conflict. * - * Default: `3` + * Default: `0` */ - poolSize?: number; + retryOnConflict?: number; /** - * (Browser only.) Determines whether credentials (e.g. cookies) will be sent - * with requests to the ArangoDB server. - * - * If set to `same-origin`, credentials will only be included with requests - * on the same URL origin as the invoking script. If set to `include`, - * credentials will always be sent. If set to `omit`, credentials will be - * excluded from all requests. - * - * Default: `same-origin` + * Time in milliseconds after which arangojs will abort the request if the + * socket has not already timed out. */ - credentials?: "omit" | "include" | "same-origin"; + timeout?: number; /** - * If set to `true`, requests will keep the underlying connection open until - * it times out or is closed. In browsers this prevents requests from being - * cancelled when the user navigates away from the page. + * Whether ArangoDB is allowed to perform a dirty read to respond to the + * request. If set to `true`, the response may reflect a dirty state from + * a non-authoritative server. * - * Default: `true` + * Default: `false` */ - keepalive?: boolean; + allowDirtyRead?: boolean; /** * Callback that will be invoked with the finished request object before it * is finalized. In the browser the request may already have been sent. * * @param req - Request object or XHR instance used for this request. */ - beforeRequest?: (req: globalThis.Request) => void; + beforeRequest?: (req: globalThis.Request) => void | Promise; /** * Callback that will be invoked when the server response has been received * and processed or when the request has been failed without a response. @@ -357,40 +581,79 @@ export type Config = { * @param err - Error encountered when handling this request or `null`. * @param res - Response object for this request, if no error occurred. */ - afterResponse?: (err: ArangojsError | null, res?: ArangojsResponse) => void; + afterResponse?: ( + err: errors.NetworkError | null, + res?: globalThis.Response & { request: globalThis.Request } + ) => void | Promise; +}; + +/** + * Options for performing a request with arangojs. + */ +export type RequestOptions = CommonRequestOptions & { /** - * If set to a positive number, requests will automatically be retried at - * most this many times if they result in a write-write conflict. + * @internal * - * Default: `0` + * Identifier of a specific ArangoDB host to use when more than one is known. */ - retryOnConflict?: number; + hostUrl?: string; /** - * An object with additional headers to send with every request. + * HTTP method to use in order to perform the request. * - * If an `"authorization"` header is provided, it will be overridden when - * using {@link database.Database#useBasicAuth}, {@link database.Database#useBearerAuth} or - * the `auth` configuration option. + * Default: `"GET"` */ - headers?: Headers | Record; + method?: string; /** - * If set to `true`, arangojs will generate stack traces every time a request - * is initiated and augment the stack traces of any errors it generates. - * - * **Warning**: This will cause arangojs to generate stack traces in advance - * even if the request does not result in an error. Generating stack traces - * may negatively impact performance. + * URL path, relative to the server domain. */ - precaptureStackTraces?: boolean; + pathname?: string; /** - * Limits the number of values of server-reported response queue times that - * will be stored and accessible using {@link database.Database#queueTime}. If set to - * a finite value, older values will be discarded to make room for new values - * when that limit is reached. + * URL parameters to pass as part of the query string. + */ + search?: URLSearchParams | Record; + /** + * Headers object containing any additional headers to send with the request. * - * Default: `10` + * Note that the `Authorization` header will be overridden if the `auth` + * configuration option is set. */ - responseQueueTimeSamples?: number; + headers?: + | string[][] + | Record> + | Headers; + /** + * Request body data. + */ + body?: any; + /** + * Additional options to pass to the `fetch` function. + */ + fetchOptions?: Omit; + /** + * If set to `true`, the request body will not be converted to JSON and + * instead passed as-is. + */ + isBinary?: boolean; + /** + * If set to `true`, the response body will not be interpreted as JSON and + * instead passed as-is. + */ + expectBinary?: boolean; +}; +//#endregion + +//#region Connection class +/** + * @internal + */ +type Task = { + stack?: () => string; + resolve: (result: T) => void; + reject: (error: unknown) => void; + transform?: (res: ProcessedResponse) => T; + retries: number; + conflicts: number; + options: RequestOptions; }; /** @@ -411,20 +674,19 @@ export function isArangoConnection(connection: any): connection is Connection { */ export class Connection { protected _activeTasks: number = 0; - protected _arangoVersion: number = 31100; - protected _headers: Headers; - protected _loadBalancingStrategy: LoadBalancingStrategy; - protected _maxRetries: number | false; + protected _arangoVersion: number; + protected _loadBalancingStrategy: configuration.LoadBalancingStrategy; protected _taskPoolSize: number; - protected _requestConfig: RequestConfig; - protected _retryOnConflict: number; + protected _commonRequestOptions: CommonRequestOptions; + protected _commonFetchOptions: CommonFetchOptions & { headers: Headers }; protected _queue = new LinkedList(); - protected _databases = new Map(); - protected _hosts: RequestFunction[] = []; + protected _databases = new Map(); + protected _hosts: Host[] = []; protected _hostUrls: string[] = []; protected _activeHostUrl: string; protected _activeDirtyHostUrl: string; protected _transactionId: string | null = null; + protected _onError?: (err: Error) => void | Promise; protected _precaptureStackTraces: boolean; protected _queueTimes = new LinkedList<[number, number]>(); protected _responseQueueTimeSamples: number; @@ -437,51 +699,54 @@ export class Connection { * @param config - An object with configuration options. * */ - constructor(config: Omit = {}) { - const URLS = config.url - ? Array.isArray(config.url) - ? config.url - : [config.url] - : ["http://127.0.0.1:8529"]; - const DEFAULT_POOL_SIZE = - 3 * (config.loadBalancingStrategy === "ROUND_ROBIN" ? URLS.length : 1); + constructor(config: Omit = {}) { + const { + url = "http://127.0.0.1:8529", + auth, + arangoVersion = 31100, + loadBalancingStrategy = "NONE", + maxRetries = 0, + poolSize = 3 * + (loadBalancingStrategy === "ROUND_ROBIN" && Array.isArray(url) + ? url.length + : 1), + fetchOptions: { headers, ...commonFetchOptions } = {}, + onError, + precaptureStackTraces = false, + responseQueueTimeSamples = 10, + ...commonRequestOptions + } = config; + const URLS = Array.isArray(url) ? url : [url]; + this._loadBalancingStrategy = loadBalancingStrategy; + this._precaptureStackTraces = precaptureStackTraces; + this._responseQueueTimeSamples = + responseQueueTimeSamples < 0 ? Infinity : responseQueueTimeSamples; + this._arangoVersion = arangoVersion; + this._taskPoolSize = poolSize; + this._onError = onError; - if (config.arangoVersion !== undefined) { - this._arangoVersion = config.arangoVersion; - } - this._taskPoolSize = config.poolSize ?? DEFAULT_POOL_SIZE; - this._requestConfig = { - credentials: config.credentials ?? "same-origin", - keepalive: config.keepalive ?? true, - beforeRequest: config.beforeRequest, - afterResponse: config.afterResponse, + this._commonRequestOptions = commonRequestOptions; + this._commonFetchOptions = { + headers: new Headers(headers), + ...commonFetchOptions, }; - this._headers = new Headers(config.headers); - this._headers.set("x-arango-version", String(this._arangoVersion)); - this._headers.set( + + this._commonFetchOptions.headers.set( + "x-arango-version", + String(arangoVersion) + ); + this._commonFetchOptions.headers.set( "x-arango-driver", `arangojs/${process.env.ARANGOJS_VERSION} (cloud)` ); - this._loadBalancingStrategy = config.loadBalancingStrategy ?? "NONE"; - this._precaptureStackTraces = Boolean(config.precaptureStackTraces); - this._responseQueueTimeSamples = config.responseQueueTimeSamples ?? 10; - this._retryOnConflict = config.retryOnConflict ?? 0; - if (this._responseQueueTimeSamples < 0) { - this._responseQueueTimeSamples = Infinity; - } - if (config.maxRetries === false) { - this._maxRetries = false; - } else { - this._maxRetries = Number(config.maxRetries ?? 0); - } this.addToHostList(URLS); - if (config.auth) { - if (isBearerAuth(config.auth)) { - this.setBearerAuth(config.auth); + if (auth) { + if (configuration.isBearerAuth(auth)) { + this.setBearerAuth(auth); } else { - this.setBasicAuth(config.auth); + this.setBasicAuth(auth); } } @@ -505,7 +770,7 @@ export class Connection { return true; } - get queueTime() { + get queueTime(): administration.QueueTimeMetrics { return { getLatest: () => this._queueTimes.last?.value[1], getValues: () => Array.from(this._queueTimes.values()), @@ -520,132 +785,134 @@ export class Connection { } protected async _runQueue() { - if (!this._queue.length || this._activeTasks >= this._taskPoolSize) return; - const task = this._queue.shift()!; + if (this._activeTasks >= this._taskPoolSize) return; + const task = this._queue.shift(); + if (!task) return; let hostUrl = this._activeHostUrl; - if (task.hostUrl !== undefined) { - hostUrl = task.hostUrl; - } else if (task.allowDirtyRead) { - hostUrl = this._activeDirtyHostUrl; - this._activeDirtyHostUrl = - this._hostUrls[ - (this._hostUrls.indexOf(this._activeDirtyHostUrl) + 1) % - this._hostUrls.length - ]; - task.options.headers.set("x-arango-allow-dirty-read", "true"); - } else if (this._loadBalancingStrategy === "ROUND_ROBIN") { - this._activeHostUrl = - this._hostUrls[ - (this._hostUrls.indexOf(this._activeHostUrl) + 1) % - this._hostUrls.length - ]; - } - this._activeTasks += 1; try { - const res = await this._hosts[this._hostUrls.indexOf(hostUrl)]( - task.options - ); + this._activeTasks += 1; + if (task.options.hostUrl !== undefined) { + hostUrl = task.options.hostUrl; + } else if (task.options.allowDirtyRead) { + hostUrl = this._activeDirtyHostUrl; + const i = this._hostUrls.indexOf(this._activeDirtyHostUrl) + 1; + this._activeDirtyHostUrl = this._hostUrls[i % this._hostUrls.length]; + } else if (this._loadBalancingStrategy === "ROUND_ROBIN") { + const i = this._hostUrls.indexOf(this._activeHostUrl) + 1; + this._activeHostUrl = this._hostUrls[i % this._hostUrls.length]; + } + const host = this._hosts[this._hostUrls.indexOf(hostUrl)]; + const res: globalThis.Response & { + request: globalThis.Request; + arangojsHostUrl: string; + parsedBody?: any; + } = Object.assign(await host.fetch(task.options), { + arangojsHostUrl: hostUrl, + }); const leaderEndpoint = res.headers.get(LEADER_ENDPOINT_HEADER); if (res.status === 503 && leaderEndpoint) { const [cleanUrl] = this.addToHostList(leaderEndpoint); - task.hostUrl = cleanUrl; + task.options.hostUrl = cleanUrl; if (this._activeHostUrl === hostUrl) { this._activeHostUrl = cleanUrl; } this._queue.push(task); - } else { - res.arangojsHostUrl = hostUrl; - const contentType = res.headers.get("content-type"); - const queueTime = res.headers.get("x-arango-queue-time-seconds"); - if (queueTime) { - this._queueTimes.push([Date.now(), Number(queueTime)]); - while (this._responseQueueTimeSamples < this._queueTimes.length) { - this._queueTimes.shift(); - } + return; + } + const queueTime = res.headers.get("x-arango-queue-time-seconds"); + if (queueTime) { + this._queueTimes.push([Date.now(), Number(queueTime)]); + while (this._responseQueueTimeSamples < this._queueTimes.length) { + this._queueTimes.shift(); } - if (res.status >= 400) { + } + const contentType = res.headers.get("content-type"); + if (res.status >= 400) { + if (contentType?.match(MIME_JSON)) { + const errorResponse = res.clone(); + let errorBody: any; try { - if (contentType?.match(MIME_JSON)) { - const errorResponse = res.clone(); - let errorBody: any; - try { - errorBody = await errorResponse.json(); - } catch { - // noop - } - if (isArangoErrorResponse(errorBody)) { - res.parsedBody = errorBody; - throw new ArangoError(res); - } - } - throw new HttpError(res); - } catch (err: any) { - if (task.stack) { - err.stack += task.stack(); - } - throw err; + errorBody = await errorResponse.json(); + } catch { + // noop } - } - if (res.body) { - if (task.options.expectBinary) { - res.parsedBody = await res.blob(); - } else if (contentType?.match(MIME_JSON)) { - res.parsedBody = await res.json(); - } else { - res.parsedBody = await res.text(); + if (isArangoErrorResponse(errorBody)) { + res.parsedBody = errorBody; + throw errors.ArangoError.from(res); } } - task.resolve(task.transform ? task.transform(res) : res); + throw new errors.HttpError(res); } - } catch (err: any) { + if (res.body) { + if (task.options.expectBinary) { + res.parsedBody = await res.blob(); + } else if (contentType?.match(MIME_JSON)) { + res.parsedBody = await res.json(); + } else { + res.parsedBody = await res.text(); + } + } + let result: any = res; + if (task.transform) result = task.transform(res); + task.resolve(result); + } catch (e: unknown) { + const err = e as Error; if ( - !task.allowDirtyRead && + !task.options.allowDirtyRead && this._hosts.length > 1 && this._activeHostUrl === hostUrl && this._loadBalancingStrategy !== "ROUND_ROBIN" ) { - this._activeHostUrl = - this._hostUrls[ - (this._hostUrls.indexOf(this._activeHostUrl) + 1) % - this._hostUrls.length - ]; + const i = this._hostUrls.indexOf(this._activeHostUrl) + 1; + this._activeHostUrl = this._hostUrls[i % this._hostUrls.length]; } if ( - isArangoError(err) && + errors.isArangoError(err) && err.errorNum === ERROR_ARANGO_CONFLICT && - task.retryOnConflict > 0 + task.options.retryOnConflict && + task.conflicts < task.options.retryOnConflict ) { - task.retryOnConflict -= 1; + task.conflicts += 1; this._queue.push(task); - } else if ( - ((isSystemError(err) && - err.syscall === "connect" && - err.code === "ECONNREFUSED") || - (isArangoError(err) && - err.errorNum === ERROR_ARANGO_MAINTENANCE_MODE)) && - task.hostUrl === undefined && - this._maxRetries !== false && - task.retries < (this._maxRetries || this._hosts.length - 1) + return; + } + if ( + (errors.isNetworkError(err) || errors.isArangoError(err)) && + err.isSafeToRetry && + task.options.hostUrl === undefined && + this._commonRequestOptions.maxRetries !== false && + task.retries < + (this._commonRequestOptions.maxRetries || this._hosts.length - 1) ) { task.retries += 1; this._queue.push(task); - } else { - if (task.stack) { - err.stack += task.stack(); + return; + } + if (task.stack) { + err.stack += task.stack(); + } + if (this._onError) { + try { + const p = this._onError(err); + if (p instanceof Promise) await p; + } catch (e) { + (e as Error).cause = err; + task.reject(e); + return; } - task.reject(err); } + task.reject(err); } finally { this._activeTasks -= 1; + setTimeout(() => this._runQueue(), 0); } - this._runQueue(); } - setBearerAuth(auth: BearerAuthCredentials) { + setBearerAuth(auth: configuration.BearerAuthCredentials) { this.setHeader("authorization", `Bearer ${auth.token}`); } - setBasicAuth(auth: BasicAuthCredentials) { + setBasicAuth(auth: configuration.BasicAuthCredentials) { this.setHeader( "authorization", `Basic ${btoa(`${auth.username}:${auth.password}`)}` @@ -665,26 +932,29 @@ export class Connection { /** * @internal * - * Fetches a {@link database.Database} instance for the given database name from the + * Fetches a {@link databases.Database} instance for the given database name from the * internal cache, if available. * * @param databaseName - Name of the database. */ - database(databaseName: string): Database | undefined; + database(databaseName: string): databases.Database | undefined; /** * @internal * - * Adds a {@link database.Database} instance for the given database name to the + * Adds a {@link databases.Database} instance for the given database name to the * internal cache. * * @param databaseName - Name of the database. * @param database - Database instance to add to the cache. */ - database(databaseName: string, database: Database): Database; + database( + databaseName: string, + database: databases.Database + ): databases.Database; /** * @internal * - * Clears any {@link database.Database} instance stored for the given database name + * Clears any {@link databases.Database} instance stored for the given database name * from the internal cache, if present. * * @param databaseName - Name of the database. @@ -693,8 +963,8 @@ export class Connection { database(databaseName: string, database: null): undefined; database( databaseName: string, - database?: Database | null - ): Database | undefined { + database?: databases.Database | null + ): databases.Database | undefined { if (database === null) { this._databases.delete(databaseName); return undefined; @@ -716,18 +986,14 @@ export class Connection { * @param urls - URLs to use as host list. */ setHostList(urls: string[]): void { - const cleanUrls = urls.map((url) => normalizeUrl(url)); + const cleanUrls = urls.map((url) => util.normalizeUrl(url)); this._hosts.splice( 0, this._hosts.length, ...cleanUrls.map((url) => { const i = this._hostUrls.indexOf(url); if (i !== -1) return this._hosts[i]; - const parsedUrl = new URL(url); - if (!parsedUrl.pathname.endsWith("/")) { - parsedUrl.pathname += "/"; - } - return createRequest(parsedUrl, this._requestConfig); + return createHost(url); }) ); this._hostUrls.splice(0, this._hostUrls.length, ...cleanUrls); @@ -744,21 +1010,13 @@ export class Connection { */ addToHostList(urls: string | string[]): string[] { const cleanUrls = (Array.isArray(urls) ? urls : [urls]).map((url) => - normalizeUrl(url) + util.normalizeUrl(url) ); const newUrls = cleanUrls.filter( (url) => this._hostUrls.indexOf(url) === -1 ); this._hostUrls.push(...newUrls); - this._hosts.push( - ...newUrls.map((url: string) => { - const parsedUrl = new URL(url); - if (!parsedUrl.pathname.endsWith("/")) { - parsedUrl.pathname += "/"; - } - return createRequest(parsedUrl, this._requestConfig); - }) - ); + this._hosts.push(...newUrls.map((url) => createHost(url))); return cleanUrls; } @@ -799,9 +1057,9 @@ export class Connection { */ setHeader(headerName: string, value: string | null) { if (value === null) { - this._headers.delete(headerName); + this._commonFetchOptions.headers.delete(headerName); } else { - this._headers.set(headerName, value); + this._commonFetchOptions.headers.set(headerName, value); } } @@ -810,7 +1068,7 @@ export class Connection { * * Closes all open connections. * - * See {@link database.Database#close}. + * See {@link databases.Database#close}. */ close() { for (const host of this._hosts) { @@ -823,7 +1081,7 @@ export class Connection { * * Waits for propagation. * - * See {@link database.Database#waitForPropagation}. + * See {@link databases.Database#waitForPropagation}. * * @param request - Request to perform against each coordinator. * @param timeout - Maximum number of milliseconds to wait for propagation. @@ -832,6 +1090,7 @@ export class Connection { const numHosts = this._hosts.length; const propagated = [] as string[]; const started = Date.now(); + const endOfTime = started + timeout; let index = 0; while (true) { if (propagated.length === numHosts) { @@ -842,10 +1101,16 @@ export class Connection { } const hostUrl = this._hostUrls[index]; try { - await this.request({ ...request, hostUrl }); - } catch (e: any) { - if (started + timeout < Date.now()) { - throw e; + await this.request({ + ...request, + hostUrl, + timeout: endOfTime - Date.now(), + }); + } catch (e) { + if (endOfTime < Date.now()) { + throw new errors.PropagationTimeoutError(undefined, { + cause: e as Error, + }); } await new Promise((resolve) => setTimeout(resolve, 1000)); continue; @@ -861,67 +1126,86 @@ export class Connection { * * Performs a request using the arangojs connection pool. */ - request( - { + async request< + T = globalThis.Response & { request: globalThis.Request; parsedBody?: any }, + >( + requestOptions: RequestOptions & { isBinary?: boolean }, + transform?: ( + res: globalThis.Response & { + request: globalThis.Request; + parsedBody?: any; + } + ) => T + ): Promise { + const { hostUrl, - method = "GET", - body, - expectBinary = false, - isBinary = false, allowDirtyRead = false, - retryOnConflict = this._retryOnConflict, + isBinary = false, + maxRetries = 0, + method = "GET", + retryOnConflict = 0, timeout = 0, headers: requestHeaders, - basePath, - path, - search: params, - }: RequestOptions, - transform?: (res: ArangojsResponse) => T - ): Promise { - return new Promise((resolve, reject) => { - const headers = mergeHeaders(this._headers, requestHeaders ?? {}); - - if (body && !(body instanceof FormData)) { - let contentType; - if (isBinary) { - contentType = "application/octet-stream"; - } else if (typeof body === "object") { - body = JSON.stringify(body); - contentType = "application/json"; - } else { - body = String(body); - contentType = "text/plain"; - } - if (!headers.has("content-type")) { - headers.set("content-type", contentType); - } - } + body: requestBody, + fetchOptions, + ...taskOptions + } = { ...this._commonRequestOptions, ...requestOptions }; + + const headers = util.mergeHeaders( + this._commonFetchOptions.headers, + requestHeaders + ); - if (this._transactionId) { - headers.set("x-arango-trx-id", this._transactionId); + let body = requestBody; + if (body instanceof FormData) { + const res = new Response(body); + const blob = await res.blob(); + // Workaround for ArangoDB 3.12.0-rc1 and earlier: + // Omitting the final CRLF results in "bad request body" fatal error + body = new Blob([blob, "\r\n"], { type: blob.type }); + } else if (body) { + let contentType; + if (isBinary) { + contentType = "application/octet-stream"; + } else if (typeof body === "object") { + body = JSON.stringify(body); + contentType = "application/json"; + } else { + body = String(body); + contentType = "text/plain"; } + if (!headers.has("content-type")) { + headers.set("content-type", contentType); + } + } + if (this._transactionId) { + headers.set("x-arango-trx-id", this._transactionId); + } + + if (allowDirtyRead) { + headers.set("x-arango-allow-dirty-read", "true"); + } + + return new Promise((resolve, reject) => { const task: Task = { + resolve, + reject, + transform, retries: 0, - hostUrl, - allowDirtyRead, - retryOnConflict, + conflicts: 0, options: { - pathname: joinPath(basePath, path) ?? "", - search: - params && - (params instanceof URLSearchParams - ? params - : new URLSearchParams(params)), - headers, - timeout, + ...taskOptions, + hostUrl, method, - expectBinary, + headers, body, + allowDirtyRead, + retryOnConflict, + maxRetries, + fetchOptions, + timeout, }, - reject, - resolve, - transform, }; if (this._precaptureStackTraces) { @@ -931,7 +1215,9 @@ export class Connection { task.stack = () => `\n${capture.stack.split("\n").slice(3).join("\n")}`; } else { - const capture = generateStackTrace() as { readonly stack: string }; + const capture = util.generateStackTrace() as { + readonly stack: string; + }; if (Object.prototype.hasOwnProperty.call(capture, "stack")) { task.stack = () => `\n${capture.stack.split("\n").slice(4).join("\n")}`; @@ -944,3 +1230,4 @@ export class Connection { }); } } +//#endregion diff --git a/src/cursor.ts b/src/cursors.ts similarity index 88% rename from src/cursor.ts rename to src/cursors.ts index 808bb8237..1e9c1adc7 100644 --- a/src/cursor.ts +++ b/src/cursors.ts @@ -1,19 +1,21 @@ /** * ```ts - * import type { ArrayCursor, BatchedArrayCursor } from "arangojs/cursor.js"; + * import type { Cursor, BatchCursor } from "arangojs/cursors"; * ``` * - * The "cursor" module provides cursor-related interfaces for TypeScript. + * The "cursors" module provides cursor-related types and interfaces for + * TypeScript. * * @packageDocumentation */ -import { LinkedList } from "./lib/linkedList.js"; -import { Database } from "./database.js"; +import { LinkedList } from "./lib/x3-linkedlist.js"; +import * as databases from "./databases.js"; +//#region Cursor properties /** * Additional information about the cursor. */ -export interface CursorExtras { +export type CursorExtras = { /** * Warnings encountered while executing the query. */ @@ -33,12 +35,12 @@ export interface CursorExtras { * Additional statistics about the query execution. */ stats?: CursorStats; -} +}; /** * Additional statics about the query execution of the cursor. */ -export interface CursorStats { +export type CursorStats = { /** * Total number of index entries read from in-memory caches for indexes of * type edge or persistent. @@ -115,27 +117,31 @@ export interface CursorStats { */ runtime: number; }[]; -} +}; -interface BatchView { - isEmpty: boolean; +/** + * A low-level interface for consuming the items of a {@link BatchCursor}. + */ +export interface BatchCursorItemsView { + readonly isEmpty: boolean; more(): Promise; - shift(): T | undefined; + shift(): ItemType | undefined; } +//#endregion /** - * The `BatchedArrayCursor` provides a batch-wise API to an {@link ArrayCursor}. + * The `BatchCursor` provides a batch-wise API to an {@link Cursor}. * * When using TypeScript, cursors can be cast to a specific item type in order * to increase type safety. * - * @param T - Type to use for each item. Defaults to `any`. + * @param ItemType - Type to use for each item. Defaults to `any`. * * @example * ```ts * const db = new Database(); * const query = aql`FOR x IN 1..5 RETURN x`; - * const cursor = await db.query(query) as ArrayCursor; + * const cursor = await db.query(query) as Cursor; * const batches = cursor.batches; * ``` * @@ -152,9 +158,9 @@ interface BatchView { * } * ``` */ -export class BatchedArrayCursor { - protected _db: Database; - protected _batches: LinkedList>; +export class BatchCursor { + protected _db: databases.Database; + protected _batches: LinkedList>; protected _count?: number; protected _extra: CursorExtras; protected _hasMore: boolean; @@ -162,26 +168,26 @@ export class BatchedArrayCursor { protected _id: string | undefined; protected _hostUrl?: string; protected _allowDirtyRead?: boolean; - protected _itemsCursor: ArrayCursor; + protected _itemsCursor: Cursor; /** * @internal */ constructor( - db: Database, + db: databases.Database, body: { - extra: any; - result: T[]; + extra: CursorExtras; + result: ItemType[]; hasMore: boolean; nextBatchId?: string; id: string; count: number; }, hostUrl?: string, - allowDirtyRead?: boolean + allowDirtyRead?: boolean, ) { const batches = new LinkedList( - body.result.length ? [new LinkedList(body.result)] : [] + body.result.length ? [new LinkedList(body.result)] : [], ); this._db = db; this._batches = batches; @@ -192,30 +198,14 @@ export class BatchedArrayCursor { this._count = body.count; this._extra = body.extra; this._allowDirtyRead = allowDirtyRead; - this._itemsCursor = new ArrayCursor(this, { - get isEmpty() { - return !batches.length; - }, - more: () => this._more(), - shift: () => { - let batch = batches.first?.value; - while (batch && !batch.length) { - batches.shift(); - batch = batches.first?.value; - } - if (!batch) return undefined; - const value = batch.shift(); - if (!batch.length) batches.shift(); - return value; - }, - }); + this._itemsCursor = new Cursor(this, this.itemsView); } protected async _more(): Promise { if (!this._id || !this.hasMore) return; const body = await this._db.request({ method: "POST", - path: this._nextBatchId + pathname: this._nextBatchId ? `/_api/cursor/${encodeURIComponent(this._id)}/${this._nextBatchId}` : `/_api/cursor/${encodeURIComponent(this._id)}`, hostUrl: this._hostUrl, @@ -241,14 +231,38 @@ export class BatchedArrayCursor { } /** - * An {@link ArrayCursor} providing item-wise access to the cursor result set. + * An {@link Cursor} providing item-wise access to the cursor result set. * - * See also {@link ArrayCursor#batches}. + * See also {@link Cursor#batches}. */ get items() { return this._itemsCursor; } + /** + * A low-level interface for consuming the items of this {@link BatchCursor}. + */ + get itemsView(): BatchCursorItemsView { + const batches = this._batches; + return { + get isEmpty() { + return !batches.length; + }, + more: () => this._more(), + shift: () => { + let batch = batches.first?.value; + while (batch && !batch.length) { + batches.shift(); + batch = batches.first?.value; + } + if (!batch) return undefined; + const value = batch.shift(); + if (!batch.length) batches.shift(); + return value; + }, + }; + } + /** * Additional information about the cursor. */ @@ -305,9 +319,13 @@ export class BatchedArrayCursor { * } * ``` */ - async *[Symbol.asyncIterator](): AsyncGenerator { + async *[Symbol.asyncIterator](): AsyncGenerator< + ItemType[], + undefined, + undefined + > { while (this.hasNext) { - yield this.next() as Promise; + yield this.next() as Promise; } return undefined; } @@ -354,7 +372,7 @@ export class BatchedArrayCursor { * console.log(cursor.hasNext); // false * ``` */ - async all(): Promise { + async all(): Promise { return this.map((batch) => batch); } @@ -381,7 +399,7 @@ export class BatchedArrayCursor { * console.log(cursor.hasNext); // false * ``` */ - async next(): Promise { + async next(): Promise { while (!this._batches.length && this.hasNext) { await this._more(); } @@ -443,7 +461,11 @@ export class BatchedArrayCursor { * ``` */ async forEach( - callback: (currentBatch: T[], index: number, self: this) => false | void + callback: ( + currentBatch: ItemType[], + index: number, + self: this, + ) => false | void, ): Promise { let index = 0; while (this.hasNext) { @@ -463,8 +485,8 @@ export class BatchedArrayCursor { * * **Note**: This creates an array of all return values, which may impact * memory use when working with very large query result sets. Consider using - * {@link BatchedArrayCursor#forEach}, {@link BatchedArrayCursor#reduce} or - * {@link BatchedArrayCursor#flatMap} instead. + * {@link BatchCursor#forEach}, {@link BatchCursor#reduce} or + * {@link BatchCursor#flatMap} instead. * * See also: * [`Array.prototype.map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map). @@ -486,7 +508,7 @@ export class BatchedArrayCursor { * ``` */ async map( - callback: (currentBatch: T[], index: number, self: this) => R + callback: (currentBatch: ItemType[], index: number, self: this) => R, ): Promise { let index = 0; const result: any[] = []; @@ -543,7 +565,7 @@ export class BatchedArrayCursor { * ``` */ async flatMap( - callback: (currentBatch: T[], index: number, self: this) => R | R[] + callback: (currentBatch: ItemType[], index: number, self: this) => R | R[], ): Promise { let index = 0; const result: any[] = []; @@ -566,7 +588,7 @@ export class BatchedArrayCursor { * for the last batch. * * **Note**: Most complex uses of the `reduce` method can be replaced with - * simpler code using {@link BatchedArrayCursor#forEach} or the `for await` + * simpler code using {@link BatchCursor#forEach} or the `for await` * syntax. * * **Note**: If the result set spans multiple batches, any remaining batches @@ -632,11 +654,11 @@ export class BatchedArrayCursor { async reduce( reducer: ( accumulator: R, - currentBatch: T[], + currentBatch: ItemType[], index: number, - self: this + self: this, ) => R, - initialValue: R + initialValue: R, ): Promise; /** @@ -672,20 +694,20 @@ export class BatchedArrayCursor { */ async reduce( reducer: ( - accumulator: T[] | R, - currentBatch: T[], + accumulator: ItemType[] | R, + currentBatch: ItemType[], index: number, - self: this - ) => R + self: this, + ) => R, ): Promise; async reduce( reducer: ( accumulator: R, - currentBatch: T[], + currentBatch: ItemType[], index: number, - self: this + self: this, ) => R, - initialValue?: R + initialValue?: R, ): Promise { let index = 0; if (!this.hasNext) return initialValue; @@ -732,32 +754,32 @@ export class BatchedArrayCursor { return this._db.request( { method: "DELETE", - path: `/_api/cursor/${encodeURIComponent(this._id!)}`, + pathname: `/_api/cursor/${encodeURIComponent(this._id!)}`, }, () => { this._hasMore = false; return undefined; - } + }, ); } } /** - * The `ArrayCursor` type represents a cursor returned from a - * {@link database.Database#query}. + * The `Cursor` type represents a cursor returned from a + * {@link databases.Database#query}. * * When using TypeScript, cursors can be cast to a specific item type in order * to increase type safety. * - * See also {@link BatchedArrayCursor}. + * See also {@link BatchCursor}. * - * @param T - Type to use for each item. Defaults to `any`. + * @param ItemType - Type to use for each item. Defaults to `any`. * * @example * ```ts * const db = new Database(); * const query = aql`FOR x IN 1..5 RETURN x`; - * const result = await db.query(query) as ArrayCursor; + * const result = await db.query(query) as Cursor; * ``` * * @example @@ -771,14 +793,17 @@ export class BatchedArrayCursor { * } * ``` */ -export class ArrayCursor { - protected _batches: BatchedArrayCursor; - protected _view: BatchView; +export class Cursor { + protected _batches: BatchCursor; + protected _view: BatchCursorItemsView; /** * @internal */ - constructor(batchedCursor: BatchedArrayCursor, view: BatchView) { + constructor( + batchedCursor: BatchCursor, + view: BatchCursorItemsView, + ) { this._batches = batchedCursor; this._view = view; } @@ -798,10 +823,10 @@ export class ArrayCursor { } /** - * A {@link BatchedArrayCursor} providing batch-wise access to the cursor + * A {@link BatchCursor} providing batch-wise access to the cursor * result set. * - * See also {@link BatchedArrayCursor#items}. + * See also {@link BatchCursor#items}. */ get batches() { return this._batches; @@ -851,9 +876,13 @@ export class ArrayCursor { * } * ``` */ - async *[Symbol.asyncIterator](): AsyncGenerator { + async *[Symbol.asyncIterator](): AsyncGenerator< + ItemType, + undefined, + undefined + > { while (this.hasNext) { - yield this.next() as Promise; + yield this.next() as Promise; } return undefined; } @@ -869,7 +898,7 @@ export class ArrayCursor { * console.log(cursor.hasNext); // false * ``` */ - async all(): Promise { + async all(): Promise { return this.batches.flatMap((v) => v); } @@ -891,7 +920,7 @@ export class ArrayCursor { * const empty = await cursor.next(); // undefined * ``` */ - async next(): Promise { + async next(): Promise { while (this._view.isEmpty && this.batches.hasMore) { await this._view.more(); } @@ -939,7 +968,11 @@ export class ArrayCursor { * ``` */ async forEach( - callback: (currentValue: T, index: number, self: this) => false | void + callback: ( + currentValue: ItemType, + index: number, + self: this, + ) => false | void, ): Promise { let index = 0; while (this.hasNext) { @@ -958,8 +991,8 @@ export class ArrayCursor { * * **Note**: This creates an array of all return values, which may impact * memory use when working with very large query result sets. Consider using - * {@link ArrayCursor#forEach}, {@link ArrayCursor#reduce} or - * {@link ArrayCursor#flatMap} instead. + * {@link Cursor#forEach}, {@link Cursor#reduce} or + * {@link Cursor#flatMap} instead. * * See also: * [`Array.prototype.map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map). @@ -978,7 +1011,7 @@ export class ArrayCursor { * ``` */ async map( - callback: (currentValue: T, index: number, self: this) => R + callback: (currentValue: ItemType, index: number, self: this) => R, ): Promise { let index = 0; const result: any[] = []; @@ -1029,7 +1062,7 @@ export class ArrayCursor { * ``` */ async flatMap( - callback: (currentValue: T, index: number, self: this) => R | R[] + callback: (currentValue: ItemType, index: number, self: this) => R | R[], ): Promise { let index = 0; const result: any[] = []; @@ -1052,7 +1085,7 @@ export class ArrayCursor { * for the last item. * * **Note**: Most complex uses of the `reduce` method can be replaced with - * simpler code using {@link ArrayCursor#forEach} or the `for await` syntax. + * simpler code using {@link Cursor#forEach} or the `for await` syntax. * * **Note**: If the result set spans multiple batches, any remaining batches * will only be fetched on demand. Depending on the cursor's TTL and the @@ -1109,8 +1142,13 @@ export class ArrayCursor { * ``` */ async reduce( - reducer: (accumulator: R, currentValue: T, index: number, self: this) => R, - initialValue: R + reducer: ( + accumulator: R, + currentValue: ItemType, + index: number, + self: this, + ) => R, + initialValue: R, ): Promise; /** * Depletes the cursor by applying the `reducer` function to each item in @@ -1143,15 +1181,20 @@ export class ArrayCursor { */ async reduce( reducer: ( - accumulator: T | R, - currentValue: T, + accumulator: ItemType | R, + currentValue: ItemType, index: number, - self: this - ) => R + self: this, + ) => R, ): Promise; async reduce( - reducer: (accumulator: R, currentValue: T, index: number, self: this) => R, - initialValue?: R + reducer: ( + accumulator: R, + currentValue: ItemType, + index: number, + self: this, + ) => R, + initialValue?: R, ): Promise { let index = 0; if (!this.hasNext) return initialValue; diff --git a/src/database.ts b/src/databases.ts similarity index 61% rename from src/database.ts rename to src/databases.ts index 388cd1758..b4b8e7d12 100644 --- a/src/database.ts +++ b/src/databases.ts @@ -1,2233 +1,109 @@ /** * ```js - * import { Database } from "arangojs/database.js"; + * import { Database } from "arangojs/databases"; * ``` * - * The "database" module provides the {@link Database} class and associated + * The "databases" module provides the {@link Database} class and associated * types and interfaces for TypeScript. * * The Database class is also re-exported by the "index" module. * * @packageDocumentation */ -import { - Analyzer, - AnalyzerDescription, - CreateAnalyzerOptions, -} from "./analyzer.js"; -import { AqlLiteral, AqlQuery, isAqlLiteral, isAqlQuery } from "./aql.js"; -import { - ArangoCollection, - Collection, - CollectionMetadata, - CollectionType, - CreateCollectionOptions, - DocumentCollection, - EdgeCollection, - collectionToString, - isArangoCollection, -} from "./collection.js"; -import { - ArangoApiResponse, - Config, - Connection, - RequestOptions, -} from "./connection.js"; -import { ArrayCursor, BatchedArrayCursor } from "./cursor.js"; -import { HttpError, isArangoError } from "./error.js"; -import { FoxxManifest } from "./foxx-manifest.js"; -import { - CreateGraphOptions, - EdgeDefinitionOptions, - Graph, - GraphInfo, -} from "./graph.js"; -import { Job } from "./job.js"; +import * as administration from "./administration.js"; +import * as analyzers from "./analyzers.js"; +import * as aql from "./aql.js"; +import * as cluster from "./cluster.js"; +import * as collections from "./collections.js"; +import * as configuration from "./configuration.js"; +import * as connection from "./connection.js"; +import * as cursors from "./cursors.js"; +import * as errors from "./errors.js"; +import * as graphs from "./graphs.js"; +import * as hotBackups from "./hot-backups.js"; +import * as jobs from "./jobs.js"; import { DATABASE_NOT_FOUND } from "./lib/codes.js"; -import { ArangojsResponse } from "./lib/request.js"; -import { Route } from "./route.js"; -import { Transaction } from "./transaction.js"; -import { CreateViewOptions, View, ViewDescription } from "./view.js"; - -/** - * Indicates whether the given value represents a {@link Database}. - * - * @param database - A value that might be a database. - */ -export function isArangoDatabase(database: any): database is Database { - return Boolean(database && database.isArangoDatabase); -} - -/** - * @internal - */ -function coerceTransactionCollections( - collections: - | (TransactionCollections & { allowImplicit?: boolean }) - | (string | ArangoCollection)[] - | string - | ArangoCollection -): CoercedTransactionCollections { - if (typeof collections === "string") { - return { write: [collections] }; - } - if (Array.isArray(collections)) { - return { write: collections.map(collectionToString) }; - } - if (isArangoCollection(collections)) { - return { write: collectionToString(collections) }; - } - const cols: CoercedTransactionCollections = {}; - if (collections) { - if (collections.allowImplicit !== undefined) { - cols.allowImplicit = collections.allowImplicit; - } - if (collections.read) { - cols.read = Array.isArray(collections.read) - ? collections.read.map(collectionToString) - : collectionToString(collections.read); - } - if (collections.write) { - cols.write = Array.isArray(collections.write) - ? collections.write.map(collectionToString) - : collectionToString(collections.write); - } - if (collections.exclusive) { - cols.exclusive = Array.isArray(collections.exclusive) - ? collections.exclusive.map(collectionToString) - : collectionToString(collections.exclusive); - } - } - return cols; -} - -/** - * @internal - */ -type CoercedTransactionCollections = { - allowImplicit?: boolean; - exclusive?: string | string[]; - write?: string | string[]; - read?: string | string[]; -}; - -/** - * Collections involved in a transaction. - */ -export type TransactionCollections = { - /** - * An array of collections or a single collection that will be read from or - * written to during the transaction with no other writes being able to run - * in parallel. - */ - exclusive?: (string | ArangoCollection)[] | string | ArangoCollection; - /** - * An array of collections or a single collection that will be read from or - * written to during the transaction. - */ - write?: (string | ArangoCollection)[] | string | ArangoCollection; - /** - * An array of collections or a single collection that will be read from - * during the transaction. - */ - read?: (string | ArangoCollection)[] | string | ArangoCollection; -}; - -/** - * Options for how the transaction should be performed. - */ -export type TransactionOptions = { - /** - * Whether the transaction may read from collections not specified for this - * transaction. If set to `false`, accessing any collections not specified - * will result in the transaction being aborted to avoid potential deadlocks. - * - * Default: `true`. - */ - allowImplicit?: boolean; - /** - * If set to `true`, the request will explicitly permit ArangoDB to return a - * potentially dirty or stale result and arangojs will load balance the - * request without distinguishing between leaders and followers. - */ - allowDirtyRead?: boolean; - /** - * Determines whether to force the transaction to write all data to disk - * before returning. - */ - waitForSync?: boolean; - /** - * Determines how long the database will wait while attempting to gain locks - * on collections used by the transaction before timing out. - */ - lockTimeout?: number; - /** - * Determines the transaction size limit in bytes. - */ - maxTransactionSize?: number; - /** - * If set to `true`, the fast lock round will be skipped, which makes each - * locking operation take longer but guarantees deterministic locking order - * and may avoid deadlocks when many concurrent transactions are queued and - * try to access the same collection with an exclusive lock. - */ - skipFastLockRound?: boolean; -}; - -/** - * Options for executing a query. - * - * See {@link Database#query}. - */ -export type QueryOptions = { - /** - * If set to `true`, the query will be executed with support for dirty reads - * enabled, permitting ArangoDB to return a potentially dirty or stale result - * and arangojs will load balance the request without distinguishing between - * leaders and followers. - * - * Note that dirty reads are only supported for read-only queries, not data - * modification queries (e.g. using `INSERT`, `UPDATE`, `REPLACE` or - * `REMOVE`) and only when using ArangoDB 3.4 or later. - * - * Default: `false` - */ - allowDirtyRead?: boolean; - /** - * If set to `true`, cursor results will be stored by ArangoDB in such a way - * that batch reads can be retried in the case of a communication error. - * - * Default: `false` - */ - allowRetry?: boolean; - /** - * Maximum time in milliseconds arangojs will wait for a server response. - * Exceeding this value will result in the request being cancelled. - * - * **Note**: Setting a timeout for the client does not guarantee the query - * will be killed by ArangoDB if it is already being executed. See the - * `maxRuntime` option for limiting the execution time within ArangoDB. - */ - timeout?: number; - /** - * If set to a positive number, the query will automatically be retried at - * most this many times if it results in a write-write conflict. - * - * Default: `0` - */ - retryOnConflict?: number; - /** - * Unless set to `false`, the number of result values in the result set will - * be returned in the `count` attribute. This may be disabled by default in - * a future version of ArangoDB if calculating this value has a performance - * impact for some queries. - * - * Default: `true`. - */ - count?: boolean; - /** - * Number of result values to be transferred by the server in each - * network roundtrip (or "batch"). - * - * Must be greater than zero. - */ - batchSize?: number; - /** - * If set to `false`, the AQL query results cache lookup will be skipped for - * this query. - * - * Default: `true` - */ - cache?: boolean; - /** - * Maximum memory size in bytes that the query is allowed to use. - * Exceeding this value will result in the query failing with an error. - * - * If set to `0`, the memory limit is disabled. - * - * Default: `0` - */ - memoryLimit?: number; - /** - * Maximum allowed execution time before the query will be killed in seconds. - * - * If set to `0`, the query will be allowed to run indefinitely. - * - * Default: `0` - */ - maxRuntime?: number; - /** - * Time-to-live for the cursor in seconds. The cursor results may be - * garbage collected by ArangoDB after this much time has passed. - * - * Default: `30` - */ - ttl?: number; - /** - * If set to `true`, the query will throw an exception and abort if it would - otherwise produce a warning. - */ - failOnWarning?: boolean; - /** - * If set to `1` or `true`, additional query profiling information will be - * returned in the `extra.profile` attribute if the query is not served from - * the result cache. - * - * If set to `2`, the query will return execution stats per query plan node - * in the `extra.stats.nodes` attribute. Additionally the query plan is - * returned in `extra.plan`. - */ - profile?: boolean | number; - /** - * If set to `true`, the query will be executed as a streaming query. - */ - stream?: boolean; - /** - * Limits the maximum number of warnings a query will return. - */ - maxWarningsCount?: number; - /** - * If set to `true` and the query has a `LIMIT` clause, the total number of - * values matched before the last top-level `LIMIT` in the query was applied - * will be returned in the `extra.stats.fullCount` attribute. - */ - fullCount?: boolean; - /** - * If set to `false`, the query data will not be stored in the RocksDB block - * cache. This can be used to avoid thrashing he block cache when reading a - * lot of data. - */ - fillBlockCache?: boolean; - /** - * An object with a `rules` property specifying a list of optimizer rules to - * be included or excluded by the optimizer for this query. Prefix a rule - * name with `+` to include it, or `-` to exclude it. The name `all` acts as - * an alias matching all optimizer rules. - */ - optimizer?: { rules: string[] }; - /** - * Limits the maximum number of plans that will be created by the AQL query - * optimizer. - */ - maxPlans?: number; - /** - * Controls after how many execution nodes in a query a stack split should be - * performed. - * - * Default: `250` (`200` on macOS) - */ - maxNodesPerCallstack?: number; - /** - * Maximum size of transactions in bytes. - */ - maxTransactionSize?: number; - /** - * Maximum number of operations after which an intermediate commit is - * automatically performed. - */ - intermediateCommitCount?: number; - /** - * Maximum total size of operations in bytes after which an intermediate - * commit is automatically performed. - */ - intermediateCommitSize?: number; - /** - * (Enterprise Edition cluster only.) If set to `true`, collections - * inaccessible to current user will result in an access error instead - * of being treated as empty. - */ - skipInaccessibleCollections?: boolean; - /** - * (Enterprise Edition cluster only.) Limits the maximum time in seconds a - * DBServer will wait to bring satellite collections involved in the query - * into sync. Exceeding this value will result in the query being stopped. - * - * Default: `60` - */ - satelliteSyncWait?: number; -}; - -/** - * Options for explaining a query. - * - * See {@link Database#explain}. - */ -export type ExplainOptions = { - /** - * An object with a `rules` property specifying a list of optimizer rules to - * be included or excluded by the optimizer for this query. Prefix a rule - * name with `+` to include it, or `-` to exclude it. The name `all` acts as - * an alias matching all optimizer rules. - */ - optimizer?: { rules: string[] }; - /** - * Maximum number of plans that the optimizer is allowed to generate. - * Setting this to a low value limits the amount of work the optimizer does. - */ - maxNumberOfPlans?: number; - /** - * If set to true, all possible execution plans will be returned as the - * `plans` property. Otherwise only the optimal execution plan will be - * returned as the `plan` property. - * - * Default: `false` - */ - allPlans?: boolean; -}; - -/** - * Details for a transaction. - * - * See also {@link transaction.TransactionStatus}. - */ -export type TransactionDetails = { - /** - * Unique identifier of the transaction. - */ - id: string; - /** - * Status (or "state") of the transaction. - */ - state: "running" | "committed" | "aborted"; -}; - -/** - * Plan explaining query execution. - */ -export type ExplainPlan = { - /** - * Execution nodes in this plan. - */ - nodes: { - [key: string]: any; - type: string; - id: number; - dependencies: number[]; - estimatedCost: number; - estimatedNrItems: number; - }[]; - /** - * Rules applied by the optimizer. - */ - rules: string[]; - /** - * Information about collections involved in the query. - */ - collections: { - name: string; - type: "read" | "write"; - }[]; - /** - * Variables used in the query. - */ - variables: { - id: number; - name: string; - }[]; - /** - * Total estimated cost of the plan. - */ - estimatedCost: number; - /** - * Estimated number of items returned by the query. - */ - estimatedNrItems: number; - /** - * Whether the query is a data modification query. - */ - isModificationQuery: boolean; -}; - -/** - * Optimizer statistics for an explained query. - */ -export type ExplainStats = { - /** - * Total number of rules executed for this query. - */ - rulesExecuted: number; - /** - * Number of rules skipped for this query. - */ - rulesSkipped: number; - /** - * Total number of plans created. - */ - plansCreated: number; - /** - * Maximum memory usage in bytes of the query during explain. - */ - peakMemoryUsage: number; - /** - * Time in seconds needed to explain the query. - */ - executionTime: number; -}; - -/** - * Result of explaining a query with a single plan. - */ -export type SingleExplainResult = { - /** - * Query plan. - */ - plan: ExplainPlan; - /** - * Whether it would be possible to cache the query. - */ - cacheable: boolean; - /** - * Warnings encountered while planning the query execution. - */ - warnings: { code: number; message: string }[]; - /** - * Optimizer statistics for the explained query. - */ - stats: ExplainStats; -}; - -/** - * Result of explaining a query with multiple plans. - */ -export type MultiExplainResult = { - /** - * Query plans. - */ - plans: ExplainPlan[]; - /** - * Whether it would be possible to cache the query. - */ - cacheable: boolean; - /** - * Warnings encountered while planning the query execution. - */ - warnings: { code: number; message: string }[]; - /** - * Optimizer statistics for the explained query. - */ - stats: ExplainStats; -}; - -/** - * Node in an AQL abstract syntax tree (AST). - */ -export type AstNode = { - [key: string]: any; - type: string; - subNodes: AstNode[]; -}; - -/** - * Result of parsing a query. - */ -export type ParseResult = { - /** - * Whether the query was parsed. - */ - parsed: boolean; - /** - * Names of all collections involved in the query. - */ - collections: string[]; - /** - * Names of all bind parameters used in the query. - */ - bindVars: string[]; - /** - * Abstract syntax tree (AST) of the query. - */ - ast: AstNode[]; -}; - -/** - * Optimizer rule for AQL queries. - */ -export type QueryOptimizerRule = { - name: string; - flags: { - hidden: boolean; - clusterOnly: boolean; - canBeDisabled: boolean; - canCreateAdditionalPlans: boolean; - disabledByDefault: boolean; - enterpriseOnly: boolean; - }; -}; - -/** - * Information about query tracking. - */ -export type QueryTracking = { - /** - * Whether query tracking is enabled. - */ - enabled: boolean; - /** - * Maximum query string length in bytes that is kept in the list. - */ - maxQueryStringLength: number; - /** - * Maximum number of slow queries that is kept in the list. - */ - maxSlowQueries: number; - /** - * Threshold execution time in seconds for when a query is - * considered slow. - */ - slowQueryThreshold: number; - /** - * Whether bind parameters are being tracked along with queries. - */ - trackBindVars: boolean; - /** - * Whether slow queries are being tracked. - */ - trackSlowQueries: boolean; -}; - -/** - * Options for query tracking. - * - * See {@link Database#queryTracking}. - */ -export type QueryTrackingOptions = { - /** - * If set to `false`, neither queries nor slow queries will be tracked. - */ - enabled?: boolean; - /** - * Maximum query string length in bytes that will be kept in the list. - */ - maxQueryStringLength?: number; - /** - * Maximum number of slow queries to be kept in the list. - */ - maxSlowQueries?: number; - /** - * Threshold execution time in seconds for when a query will be - * considered slow. - */ - slowQueryThreshold?: number; - /** - * If set to `true`, bind parameters will be tracked along with queries. - */ - trackBindVars?: boolean; - /** - * If set to `true` and `enabled` is also set to `true`, slow queries will be - * tracked if their execution time exceeds `slowQueryThreshold`. - */ - trackSlowQueries?: boolean; -}; - -/** - * Entry in the AQL query results cache. - */ -export type QueryCacheEntry = { - /** - * Hash of the query results. - */ - hash: string; - /** - * Query string. - */ - query: string; - /** - * Bind parameters used in the query. Only shown if tracking for bind - * variables was enabled at server start. - */ - bindVars: Record; - /** - * Size of the query results and bind parameters in bytes. - */ - size: number; - /** - * Number of documents/rows in the query results. - */ - results: number; - /** - * Date and time the query was started as an ISO 8601 timestamp. - */ - started: string; - /** - * Number of times the result was served from the cache. - */ - hits: number; - /** - * Running time of the query in seconds. - */ - runTime: number; - /** - * Collections and views involved in the query. - */ - dataSources: string[]; -}; - -/** - * Properties of the global AQL query results cache configuration. - */ -export type QueryCacheProperties = { - /** - * If set to `true`, the query cache will include queries that involve - * system collections. - */ - includeSystem: boolean; - /** - * Maximum individual size of query results that will be stored per - * database-specific cache. - */ - maxEntrySize: number; - /** - * Maximum number of query results that will be stored per database-specific - * cache. - */ - maxResults: number; - /** - * Maximum cumulated size of query results that will be stored per - * database-specific cache. - */ - maxResultsSize: number; - /** - * Mode the AQL query cache should operate in. - */ - mode: "off" | "on" | "demand"; -}; - -/** - * Options for adjusting the global properties for the AQL query results cache. - */ -export type QueryCachePropertiesOptions = { - /** - * If set to `true`, the query cache will include queries that involve - * system collections. - */ - includeSystem?: boolean; - /** - * Maximum individual size of query results that will be stored per - * database-specific cache. - */ - maxEntrySize?: number; - /** - * Maximum number of query results that will be stored per database-specific - * cache. - */ - maxResults?: number; - /** - * Maximum cumulated size of query results that will be stored per - * database-specific cache. - */ - maxResultsSize?: number; - /** - * Mode the AQL query cache should operate in. - */ - mode?: "off" | "on" | "demand"; -}; - -/** - * Object describing a query. - */ -export type QueryInfo = { - /** - * Unique identifier for this query. - */ - id: string; - /** - * Name of the database the query runs in. - */ - database: string; - /** - * Name of the user that started the query. - */ - user: string; - /** - * Query string (potentially truncated). - */ - query: string; - /** - * Bind parameters used in the query. - */ - bindVars: Record; - /** - * Date and time the query was started. - */ - started: string; - /** - * Query's running time in seconds. - */ - runTime: number; - /** - * Maximum memory usage in bytes of the query. - */ - peakMemoryUsage: number; - /** - * Query's current execution state. - */ - state: "executing" | "finished" | "killed"; - /** - * Whether the query uses a streaming cursor. - */ - stream: boolean; -}; - -/** - * Information about a cluster imbalance. - */ -export type ClusterImbalanceInfo = { - /** - * Information about the leader imbalance. - */ - leader: { - /** - * The weight of leader shards per DB-Server. A leader has a weight of 1 by default but it is higher if collections can only be moved together because of `distributeShardsLike`. - */ - weightUsed: number[]; - /** - * The ideal weight of leader shards per DB-Server. - */ - targetWeight: number[]; - /** - * The number of leader shards per DB-Server. - */ - numberShards: number[]; - /** - * The measure of the leader shard distribution. The higher the number, the worse the distribution. - */ - leaderDupl: number[]; - /** - * The sum of all weights. - */ - totalWeight: number; - /** - * The measure of the total imbalance. A high value indicates a high imbalance. - */ - imbalance: number; - /** - * The sum of shards, counting leader shards only. - */ - totalShards: number; - }; - /** - * Information about the shard imbalance. - */ - shards: { - /** - * The size of shards per DB-Server. - */ - sizeUsed: number[]; - /** - * The ideal size of shards per DB-Server. - */ - targetSize: number[]; - /** - * The number of leader and follower shards per DB-Server. - */ - numberShards: number[]; - /** - * The sum of the sizes. - */ - totalUsed: number; - /** - * The sum of shards, counting leader and follower shards. - */ - totalShards: number; - /** - * The sum of system collection shards, counting leader shards only. - */ - totalShardsFromSystemCollections: number; - /** - * The measure of the total imbalance. A high value indicates a high imbalance. - */ - imbalance: number; - }; -}; - -/** - * Information about the current state of the cluster imbalance. - */ -export type ClusterRebalanceState = ClusterImbalanceInfo & { - /** - * The number of pending move shard operations. - */ - pendingMoveShards: number; - /** - * The number of planned move shard operations. - */ - todoMoveShards: number; -}; - -/** - * Options for rebalancing the cluster. - */ -export type ClusterRebalanceOptions = { - /** - * Maximum number of moves to be computed. - * - * Default: `1000` - */ - maximumNumberOfMoves?: number; - /** - * Allow leader changes without moving data. - * - * Default: `true` - */ - leaderChanges?: boolean; - /** - * Allow moving leaders. - * - * Default: `false` - */ - moveLeaders?: boolean; - /** - * Allow moving followers. - * - * Default: `false` - */ - moveFollowers?: boolean; - /** - * Ignore system collections in the rebalance plan. - * - * Default: `false` - */ - excludeSystemCollections?: boolean; - /** - * Default: `256**6` - */ - piFactor?: number; - /** - * A list of database names to exclude from the analysis. - * - * Default: `[]` - */ - databasesExcluded?: string[]; -}; - -export type ClusterRebalanceMove = { - /** - * The server name from which to move. - */ - from: string; - /** - * The ID of the destination server. - */ - to: string; - /** - * Shard ID of the shard to be moved. - */ - shard: string; - /** - * Collection ID of the collection the shard belongs to. - */ - collection: number; - /** - * True if this is a leader move shard operation. - */ - isLeader: boolean; -}; - -export type ClusterRebalanceResult = { - /** - * Imbalance before the suggested move shard operations are applied. - */ - imbalanceBefore: ClusterImbalanceInfo; - /** - * Expected imbalance after the suggested move shard operations are applied. - */ - imbalanceAfter: ClusterImbalanceInfo; - /** - * Suggested move shard operations. - */ - moves: ClusterRebalanceMove[]; -}; - -/** - * Database user to create with a database. - */ -export type CreateDatabaseUser = { - /** - * Username of the user to create. - */ - username: string; - /** - * Password of the user to create. - * - * Default: `""` - */ - passwd?: string; - /** - * Whether the user is active. - * - * Default: `true` - */ - active?: boolean; - /** - * Additional data to store with the user object. - */ - extra?: Record; -}; - -/** - * Options for creating a database. - * - * See {@link Database#createDatabase}. - */ -export type CreateDatabaseOptions = { - /** - * Database users to create with the database. - */ - users?: CreateDatabaseUser[]; - /** - * (Cluster only.) The sharding method to use for new collections in the - * database. - */ - sharding?: "" | "flexible" | "single"; - /** - * (Cluster only.) Default replication factor for new collections in this - * database. - * - * Setting this to `1` disables replication. Setting this to `"satellite"` - * will replicate to every DBServer. - */ - replicationFactor?: "satellite" | number; - /** - * (Cluster only.) Default write concern for new collections created in this - * database. - */ - writeConcern?: number; -}; - -/** - * Object describing a database. - * - * See {@link Database#get}. - */ -export type DatabaseInfo = { - /** - * Name of the database. - */ - name: string; - /** - * Unique identifier of the database. - */ - id: string; - /** - * File system path of the database. - */ - path: string; - /** - * Whether the database is the system database. - */ - isSystem: boolean; - /** - * (Cluster only.) The sharding method to use for new collections in the - * database. - */ - sharding?: "" | "flexible" | "single"; - /** - * (Cluster only.) Default replication factor for new collections in this - * database. - */ - replicationFactor?: "satellite" | number; - /** - * (Cluster only.) Default write concern for new collections created in this - * database. - */ - writeConcern?: number; -}; - -/** - * Result of retrieving database version information. - */ -export type VersionInfo = { - /** - * Value identifying the server type, i.e. `"arango"`. - */ - server: string; - /** - * ArangoDB license type or "edition". - */ - license: "community" | "enterprise"; - /** - * ArangoDB server version. - */ - version: string; - /** - * Additional information about the ArangoDB server. - */ - details?: { [key: string]: string }; -}; - -/** - * Information about the storage engine. - */ -export type EngineInfo = { - /** - * Endianness of the storage engine. - */ - endianness?: "little" | "big"; - /** - * Name of the storage engine. - */ - name: string; - /** - * Features supported by the storage engine. - */ - supports?: { - /** - * Index types supported by the storage engine. - */ - indexes?: string[]; - /** - * Aliases supported by the storage engine. - */ - aliases?: { - /** - * Index type aliases supported by the storage engine. - */ - indexes?: Record; - }; - }; -}; - -/** - * Performance and resource usage information about the storage engine. - */ -export type EngineStatsInfo = Record< - string, - string | number | Record ->; - -/** - * Information about the server status. - */ -export type ServerStatusInformation = { - /** - * (Cluster Coordinators and DB-Servers only.) The address of the server. - */ - address?: string; - /** - * (Cluster Coordinators and DB-Servers only.) Information about the Agency. - */ - agency?: { - /** - * Information about the communication with the Agency. - */ - agencyComm: { - /** - * A list of possible Agency endpoints. - */ - endpoints: string[]; - }; - }; - /** - * (Cluster Agents only.) Information about the Agents. - */ - agent?: { - /** - * The endpoint of the queried Agent. - */ - endpoint: string; - /** - * Server ID of the queried Agent. - */ - id: string; - /** - * Server ID of the leading Agent. - */ - leaderId: string; - /** - * Whether the queried Agent is the leader. - */ - leading: boolean; - /** - * The current term number. - */ - term: number; - }; - /** - * (Cluster Coordinators only.) Information about the Coordinators. - */ - coordinator?: { - /** - * The server ID of the Coordinator that is the Foxx master. - */ - foxxmaster: string[]; - /** - * Whether the queried Coordinator is the Foxx master. - */ - isFoxxmaster: boolean[]; - }; - /** - * Whether the Foxx API is enabled. - */ - foxxApi: boolean; - /** - * A host identifier defined by the HOST or NODE_NAME environment variable, - * or a fallback value using a machine identifier or the cluster/Agency address. - */ - host: string; - /** - * A hostname defined by the HOSTNAME environment variable. - */ - hostname?: string; - /** - * ArangoDB Edition. - */ - license: "community" | "enterprise"; - /** - * Server operation mode. - * - * @deprecated use `operationMode` instead - */ - mode: "server" | "console"; - /** - * Server operation mode. - */ - operationMode: "server" | "console"; - /** - * The process ID of arangod. - */ - pid: number; - /** - * Server type. - */ - server: "arango"; - /** - * Information about the server status. - */ - serverInfo: { - /** - * Whether the maintenance mode is enabled. - */ - maintenance: boolean; - /** - * (Cluster only.) The persisted ID. - */ - persistedId?: string; - /** - * Startup and recovery information. - */ - progress: { - /** - * Internal name of the feature that is currently being prepared, started, stopped or unprepared. - */ - feature: string; - /** - * Name of the lifecycle phase the instance is currently in. - */ - phase: string; - /** - * Current recovery sequence number value. - */ - recoveryTick: number; - }; - /** - * Whether writes are disabled. - */ - readOnly: boolean; - /** - * (Cluster only.) The reboot ID. Changes on every restart. - */ - rebootId?: number; - /** - * Either "SINGLE", "COORDINATOR", "PRIMARY" (DB-Server), or "AGENT" - */ - role: "SINGLE" | "COORDINATOR" | "PRIMARY" | "AGENT"; - /** - * (Cluster Coordinators and DB-Servers only.) The server ID. - */ - serverId?: string; - /** - * (Cluster Coordinators and DB-Servers only.) Either "STARTUP", "SERVING", - * or "SHUTDOWN". - */ - state?: "STARTUP" | "SERVING" | "SHUTDOWN"; - /** - * The server version string. - */ - version: string; - /** - * Whether writes are enabled. - * - * @deprecated Use `readOnly` instead. - */ - writeOpsEnabled: boolean; - }; -}; - -/** - * Server availability. - * - * - `"default"`: The server is operational. - * - * - `"readonly"`: The server is in read-only mode. - * - * - `false`: The server is not available. - */ -export type ServerAvailability = "default" | "readonly" | false; - -/** - * Single server deployment information for support purposes. - */ -export type SingleServerSupportInfo = { - /** - * ISO 8601 datetime string of when the information was requested. - */ - date: string; - /** - * Information about the deployment. - */ - deployment: { - /** - * Deployment mode: - * - * - `"single"`: A single server deployment. - * - * - `"cluster"`: A cluster deployment. - */ - type: "single"; - }; -}; - -/** - * Cluster deployment information for support purposes. - */ -export type ClusterSupportInfo = { - /** - * ISO 8601 datetime string of when the information was requested. - */ - date: string; - /** - * Information about the deployment. - */ - deployment: { - /** - * Deployment mode: - * - * - `"single"`: A single server deployment. - * - * - `"cluster"`: A cluster deployment. - */ - type: "cluster"; - /** - * Information about the servers in the cluster. - */ - servers: Record>; - /** - * Number of agents in the cluster. - */ - agents: number; - /** - * Number of coordinators in the cluster. - */ - coordinators: number; - /** - * Number of DB-Servers in the cluster. - */ - dbServers: number; - /** - * Information about the shards in the cluster. - */ - shards: { - /** - * Number of collections in the cluster. - */ - collections: number; - /** - * Number of shards in the cluster. - */ - shards: number; - /** - * Number of leaders in the cluster. - */ - leaders: number; - /** - * Number of real leaders in the cluster. - */ - realLeaders: number; - /** - * Number of followers in the cluster. - */ - followers: number; - /** - * Number of servers in the cluster. - */ - servers: number; - }; - }; - /** - * (Cluster only.) Information about the ArangoDB instance as well as the - * host machine. - */ - host: Record; -}; - -/** - * Information about the server license. - */ -export type LicenseInfo = { - /** - * Properties of the license. - */ - features: { - /** - * The timestamp of the expiration date of the license in seconds since the - * Unix epoch. - */ - expires?: number; - }; - /** - * The hash value of the license. - */ - hash: string; - /** - * The encrypted license key in base 64 encoding, or `"none"` when running - * in the Community Edition. - */ - license?: string; - /** - * The status of the installed license. - * - * - `"good"`: The license is valid for more than 2 weeks. - * - * - `"expiring"`: The license is valid for less than 2 weeks. - * - * - `"expired"`: The license has expired. - * - * - `"read-only"`: The license has been expired for more than 2 weeks. - */ - status: "good" | "expiring" | "expired" | "read-only"; - /** - * Whether the server is performing a database upgrade. - */ - upgrading: boolean; - /** - * The license version number. - */ - version: number; -}; - -/** - * Options for compacting all databases on the server. - */ -export type CompactOptions = { - /** - * Whether compacted data should be moved to the minimum possible level. - * - * Default: `false`. - */ - changeLevel?: boolean; - /** - * Whether to compact the bottom-most level of data. - * - * Default: `false`. - */ - compactBottomMostLevel?: boolean; -}; - -/** - * Definition of an AQL User Function. - */ -export type AqlUserFunction = { - /** - * Name of the AQL User Function. - */ - name: string; - /** - * Implementation of the AQL User Function. - */ - code: string; - /** - * Whether the function is deterministic. - * - * See {@link Database#createFunction}. - */ - isDeterministic: boolean; -}; - -/** - * Options for installing the service. - * - * See {@link Database#installService}. - */ -export type InstallServiceOptions = { - /** - * An object mapping configuration option names to values. - * - * See also {@link Database#getServiceConfiguration}. - */ - configuration?: Record; - /** - * An object mapping dependency aliases to mount points. - * - * See also {@link Database#getServiceDependencies}. - */ - dependencies?: Record; - /** - * Whether the service should be installed in development mode. - * - * See also {@link Database#setServiceDevelopmentMode}. - * - * Default: `false` - */ - development?: boolean; - /** - * Whether the service should be installed in legacy compatibility mode - * - * This overrides the `engines` option in the service manifest (if any). - * - * Default: `false` - */ - legacy?: boolean; - /** - * Whether the "setup" script should be executed. - * - * Default: `true` - */ - setup?: boolean; -}; - -/** - * Options for replacing a service. - * - * See {@link Database#replaceService}. - */ -export type ReplaceServiceOptions = { - /** - * An object mapping configuration option names to values. - * - * See also {@link Database#getServiceConfiguration}. - */ - configuration?: Record; - /** - * An object mapping dependency aliases to mount points. - * - * See also {@link Database#getServiceDependencies}. - */ - dependencies?: Record; - /** - * Whether the service should be installed in development mode. - * - * See also {@link Database#setServiceDevelopmentMode}. - * - * Default: `false` - */ - development?: boolean; - /** - * Whether the service should be installed in legacy compatibility mode - * - * This overrides the `engines` option in the service manifest (if any). - * - * Default: `false` - */ - legacy?: boolean; - /** - * Whether the "setup" script should be executed. - * - * Default: `true` - */ - setup?: boolean; - /** - * Whether the existing service's "teardown" script should be executed - * prior to removing that service. - * - * Default: `true` - */ - teardown?: boolean; - /** - * If set to `true`, replacing a service that does not already exist will - * fall back to installing the new service. - * - * Default: `false` - */ - force?: boolean; -}; - -/** - * Options for upgrading a service. - * - * See {@link Database#upgradeService}. - */ -export type UpgradeServiceOptions = { - /** - * An object mapping configuration option names to values. - * - * See also {@link Database#getServiceConfiguration}. - */ - configuration?: Record; - /** - * An object mapping dependency aliases to mount points. - * - * See also {@link Database#getServiceDependencies}. - */ - dependencies?: Record; - /** - * Whether the service should be installed in development mode. - * - * See also {@link Database#setServiceDevelopmentMode}. - * - * Default: `false` - */ - development?: boolean; - /** - * Whether the service should be installed in legacy compatibility mode - * - * This overrides the `engines` option in the service manifest (if any). - * - * Default: `false` - */ - legacy?: boolean; - /** - * Whether the "setup" script should be executed. - * - * Default: `true` - */ - setup?: boolean; - /** - * Whether the existing service's "teardown" script should be executed - * prior to upgrading that service. - * - * Default: `false` - */ - teardown?: boolean; - /** - * Unless set to `true`, upgrading a service that does not already exist will - * fall back to installing the new service. - * - * Default: `false` - */ - force?: boolean; -}; - -/** - * Options for uninstalling a service. - * - * See {@link Database#uninstallService}. - */ -export type UninstallServiceOptions = { - /** - * Whether the service's "teardown" script should be executed - * prior to removing that service. - * - * Default: `true` - */ - teardown?: boolean; - /** - * If set to `true`, uninstalling a service that does not already exist - * will be considered successful. - * - * Default: `false` - */ - force?: boolean; -}; - -/** - * Object briefly describing a Foxx service. - */ -export type ServiceSummary = { - /** - * Service mount point, relative to the database. - */ - mount: string; - /** - * Name defined in the service manifest. - */ - name?: string; - /** - * Version defined in the service manifest. - */ - version?: string; - /** - * Service dependencies the service expects to be able to match as a mapping - * from dependency names to versions the service is compatible with. - */ - provides: Record; - /** - * Whether development mode is enabled for this service. - */ - development: boolean; - /** - * Whether the service is running in legacy compatibility mode. - */ - legacy: boolean; -}; - -/** - * Object describing a Foxx service in detail. - */ -export type ServiceInfo = { - /** - * Service mount point, relative to the database. - */ - mount: string; - /** - * File system path of the service. - */ - path: string; - /** - * Name defined in the service manifest. - */ - name?: string; - /** - * Version defined in the service manifest. - */ - version?: string; - /** - * Whether development mode is enabled for this service. - */ - development: boolean; - /** - * Whether the service is running in legacy compatibility mode. - */ - legacy: boolean; - /** - * Content of the service manifest of this service. - */ - manifest: FoxxManifest; - /** - * Internal checksum of the service's initial source bundle. - */ - checksum: string; - /** - * Options for this service. - */ - options: { - /** - * Configuration values set for this service. - */ - configuration: Record; - /** - * Service dependency configuration of this service. - */ - dependencies: Record; - }; -}; - -/** - * Object describing a configuration option of a Foxx service. - */ -export type ServiceConfiguration = { - /** - * Data type of the configuration value. - * - * **Note**: `"int"` and `"bool"` are historical synonyms for `"integer"` and - * `"boolean"`. The `"password"` type is synonymous with `"string"` but can - * be used to distinguish values which should not be displayed in plain text - * by software when managing the service. - */ - type: - | "integer" - | "boolean" - | "string" - | "number" - | "json" - | "password" - | "int" - | "bool"; - /** - * Current value of the configuration option as stored internally. - */ - currentRaw: any; - /** - * Processed current value of the configuration option as exposed in the - * service code. - */ - current: any; - /** - * Formatted name of the configuration option. - */ - title: string; - /** - * Human-readable description of the configuration option. - */ - description?: string; - /** - * Whether the configuration option must be set in order for the service - * to be operational. - */ - required: boolean; - /** - * Default value of the configuration option. - */ - default?: any; -}; - -/** - * Object describing a single-service dependency defined by a Foxx service. - */ -export type SingleServiceDependency = { - /** - * Whether this is a multi-service dependency. - */ - multiple: false; - /** - * Current mount point the dependency is resolved to. - */ - current?: string; - /** - * Formatted name of the dependency. - */ - title: string; - /** - * Name of the service the dependency expects to match. - */ - name: string; - /** - * Version of the service the dependency expects to match. - */ - version: string; - /** - * Human-readable description of the dependency. - */ - description?: string; - /** - * Whether the dependency must be matched in order for the service - * to be operational. - */ - required: boolean; -}; - -/** - * Object describing a multi-service dependency defined by a Foxx service. - */ -export type MultiServiceDependency = { - /** - * Whether this is a multi-service dependency. - */ - multiple: true; - /** - * Current mount points the dependency is resolved to. - */ - current?: string[]; - /** - * Formatted name of the dependency. - */ - title: string; - /** - * Name of the service the dependency expects to match. - */ - name: string; - /** - * Version of the service the dependency expects to match. - */ - version: string; - /** - * Human-readable description of the dependency. - */ - description?: string; - /** - * Whether the dependency must be matched in order for the service - * to be operational. - */ - required: boolean; -}; - -/** - * Test stats for a Foxx service's tests. - */ -export type ServiceTestStats = { - /** - * Total number of tests found. - */ - tests: number; - /** - * Number of tests that ran successfully. - */ - passes: number; - /** - * Number of tests that failed. - */ - failures: number; - /** - * Number of tests skipped or not executed. - */ - pending: number; - /** - * Total test duration in milliseconds. - */ - duration: number; -}; - -/** - * Test results for a single test case using the stream reporter. - */ -export type ServiceTestStreamTest = { - title: string; - fullTitle: string; - duration: number; - err?: string; -}; - -/** - * Test results for a Foxx service's tests using the stream reporter. - */ -export type ServiceTestStreamReport = ( - | ["start", { total: number }] - | ["pass", ServiceTestStreamTest] - | ["fail", ServiceTestStreamTest] - | ["end", ServiceTestStats] -)[]; - -/** - * Test results for a single test case using the suite reporter. - */ -export type ServiceTestSuiteTest = { - result: "pending" | "pass" | "fail"; - title: string; - duration: number; - err?: any; -}; - -/** - * Test results for a single test suite using the suite reporter. - */ -export type ServiceTestSuite = { - title: string; - suites: ServiceTestSuite[]; - tests: ServiceTestSuiteTest[]; -}; - -/** - * Test results for a Foxx service's tests using the suite reporter. - */ -export type ServiceTestSuiteReport = { - stats: ServiceTestStats; - suites: ServiceTestSuite[]; - tests: ServiceTestSuiteTest[]; -}; - -/** - * Test results for a single test case in XUnit format using the JSONML - * representation. - */ -export type ServiceTestXunitTest = - | ["testcase", { classname: string; name: string; time: number }] - | [ - "testcase", - { classname: string; name: string; time: number }, - ["failure", { message: string; type: string }, string], - ]; - +import * as util from "./lib/util.js"; +import * as logs from "./logs.js"; +import * as queries from "./queries.js"; +import * as routes from "./routes.js"; +import * as services from "./services.js"; +import * as transactions from "./transactions.js"; +import * as users from "./users.js"; +import * as views from "./views.js"; + +//#region Database operation options /** - * Test results for a Foxx service's tests in XUnit format using the JSONML - * representation. - */ -export type ServiceTestXunitReport = [ - "testsuite", - { - timestamp: number; - tests: number; - errors: number; - failures: number; - skip: number; - time: number; - }, - ...ServiceTestXunitTest[], -]; - -/** - * Test results for a Foxx service's tests in TAP format. - */ -export type ServiceTestTapReport = string[]; - -/** - * Test results for a single test case using the default reporter. - */ -export type ServiceTestDefaultTest = { - title: string; - fullTitle: string; - duration: number; - err?: string; -}; - -/** - * Test results for a Foxx service's tests using the default reporter. - */ -export type ServiceTestDefaultReport = { - stats: ServiceTestStats; - tests: ServiceTestDefaultTest[]; - pending: ServiceTestDefaultTest[]; - failures: ServiceTestDefaultTest[]; - passes: ServiceTestDefaultTest[]; -}; - -/** - * OpenAPI 2.0 description of a Foxx service. - */ -export type SwaggerJson = { - [key: string]: any; - info: { - title: string; - description: string; - version: string; - license: string; - }; - path: { - [key: string]: any; - }; -}; - -/** - * Access level for an ArangoDB user's access to a collection or database. - */ -export type AccessLevel = "rw" | "ro" | "none"; - -/** - * Properties of an ArangoDB user object. - */ -export type ArangoUser = { - /** - * ArangoDB username of the user. - */ - user: string; - /** - * Whether the ArangoDB user account is enabled and can authenticate. - */ - active: boolean; - /** - * Additional information to store about this user. - */ - extra: Record; -}; - -/** - * Options for creating an ArangoDB user. - */ -export type CreateUserOptions = { - /** - * ArangoDB username of the user. - */ - user: string; - /** - * Password the ArangoDB user will use for authentication. - */ - passwd: string; - /** - * Whether the ArangoDB user account is enabled and can authenticate. - * - * Default: `true` - */ - active?: boolean; - /** - * Additional information to store about this user. - * - * Default: `{}` - */ - extra?: Record; -}; - -/** - * Options for modifying an ArangoDB user. + * Options for creating a database. + * + * See {@link Database#createDatabase}. */ -export type UserOptions = { - /** - * Password the ArangoDB user will use for authentication. - */ - passwd: string; +export type CreateDatabaseOptions = { /** - * Whether the ArangoDB user account is enabled and can authenticate. - * - * Default: `true` + * Database users to create with the database. */ - active?: boolean; + users?: users.CreateDatabaseUserOptions[]; /** - * Additional information to store about this user. - * - * Default: `{}` + * (Cluster only.) The sharding method to use for new collections in the + * database. */ - extra?: Record; -}; - -/** - * Options for accessing or manipulating access levels. - */ -export type UserAccessLevelOptions = { + sharding?: "" | "flexible" | "single"; /** - * The database to access or manipulate the access level of. + * (Cluster only.) Default replication factor for new collections in this + * database. * - * If `collection` is an `ArangoCollection`, this option defaults to the - * database the collection is contained in. Otherwise this option defaults to - * the current database. - */ - database?: Database | string; - /** - * The collection to access or manipulate the access level of. - */ - collection?: ArangoCollection | string; -}; - -/** - * An object providing methods for accessing queue time metrics of the most - * recently received server responses if the server supports this feature. - */ -export type QueueTimeMetrics = { - /** - * Returns the queue time of the most recently received response in seconds. - */ - getLatest: () => number | undefined; - /** - * Returns a list of the most recently received queue time values as tuples - * of the timestamp of the response being processed in milliseconds and the - * queue time in seconds. + * Setting this to `1` disables replication. Setting this to `"satellite"` + * will replicate to every DBServer. */ - getValues: () => [number, number][]; + replicationFactor?: "satellite" | number; /** - * Returns the average queue time of the most recently received responses - * in seconds. + * (Cluster only.) Default write concern for new collections created in this + * database. */ - getAvg: () => number; + writeConcern?: number; }; +//#endregion +//#region DatabaseDescription /** - * (Enterprise Edition only.) Options for creating a hot backup. + * Object describing a database. + * + * See {@link Database#get}. */ -export type HotBackupOptions = { - /** - * If set to `true` and no global transaction lock can be acquired within the - * given timeout, a possibly inconsistent backup is taken. - * - * Default: `false` - */ - allowInconsistent?: boolean; - /** - * (Enterprise Edition cluster only.) If set to `true` and no global - * transaction lock can be acquired within the given timeout, all running - * transactions are forcefully aborted to ensure that a consistent backup - * can be created. - * - * Default: `false`. - */ - force?: boolean; +export type DatabaseDescription = { /** - * Label to appended to the backup's identifier. - * - * Default: If omitted or empty, a UUID will be generated. + * Name of the database. */ - label?: string; + name: string; /** - * Time in seconds that the operation will attempt to get a consistent - * snapshot. - * - * Default: `120`. + * Unique identifier of the database. */ - timeout?: number; -}; - -/** - * (Enterprise Edition only.) Result of a hot backup. - */ -export type HotBackupResult = { id: string; - potentiallyInconsistent: boolean; - sizeInBytes: number; - datetime: string; - nrDBServers: number; - nrFiles: number; -}; - -/** - * (Enterprise Edition only.) List of known hot backups. - */ -export type HotBackupList = { - server: string; - list: Record< - string, - HotBackupResult & { - version: string; - keys: any[]; - available: boolean; - nrPiecesPresent: number; - countIncludesFilesOnly: boolean; - } - >; -}; - -/** - * Numeric representation of the logging level of a log entry. - */ -export enum LogLevel { - FATAL, - ERROR, - WARNING, - INFO, - DEBUG, -} - -/** - * String representation of the logging level of a log entry. - */ -export type LogLevelLabel = "FATAL" | "ERROR" | "WARNING" | "INFO" | "DEBUG"; - -/** - * Logging level setting. - */ -export type LogLevelSetting = LogLevelLabel | "DEFAULT"; - -/** - * Log sorting direction, ascending or descending. - */ -export type LogSortDirection = "asc" | "desc"; - -/** - * Options for retrieving log entries. - */ -export type LogEntriesOptions = { /** - * Maximum log level of the entries to retrieve. - * - * Default: `INFO`. - */ - upto?: LogLevel | LogLevelLabel | Lowercase; - /** - * If set, only log entries with this log level will be returned. - */ - level?: LogLevel | LogLevelLabel | Lowercase; - /** - * If set, only log entries with an `lid` greater than or equal to this value - * will be returned. + * File system path of the database. */ - start?: number; + path: string; /** - * If set, only this many entries will be returned. + * Whether the database is the system database. */ - size?: number; + isSystem: boolean; /** - * If set, this many log entries will be skipped. + * (Cluster only.) The sharding method to use for new collections in the + * database. */ - offset?: number; + sharding?: "" | "flexible" | "single"; /** - * If set, only log entries containing the specified text will be returned. + * (Cluster only.) Default replication factor for new collections in this + * database. */ - search?: string; + replicationFactor?: "satellite" | number; /** - * If set to `"desc"`, log entries will be returned in reverse chronological - * order. - * - * Default: `"asc"`. + * (Cluster only.) Default write concern for new collections created in this + * database. */ - sort?: LogSortDirection; -}; - -/** - * An object representing a single log entry. - */ -export type LogMessage = { - id: number; - topic: string; - level: LogLevelLabel; - date: string; - message: string; -}; - -/** - * An object representing a list of log entries. - */ -export type LogEntries = { - totalAmount: number; - lid: number[]; - topic: string[]; - level: LogLevel[]; - timestamp: number[]; - text: string[]; + writeConcern?: number; }; +//#endregion /** * @internal @@ -2239,25 +115,37 @@ type TrappedError = { /** * @internal */ -type TrappedRequest = { +type TrappedRequest = { error?: false; jobId: string; - onResolve: (res: ArangojsResponse) => void; + onResolve: (res: connection.ProcessedResponse) => void; onReject: (error: any) => void; }; +//#region Database class +/** + * Indicates whether the given value represents a {@link Database}. + * + * @param database - A value that might be a database. + */ +export function isArangoDatabase(database: any): database is Database { + return Boolean(database && database.isArangoDatabase); +} + /** * An object representing a single ArangoDB database. All arangojs collections, * cursors, analyzers and so on are linked to a `Database` object. */ export class Database { - protected _connection: Connection; + protected _connection: connection.Connection; protected _name: string; - protected _analyzers = new Map(); - protected _collections = new Map(); - protected _graphs = new Map(); - protected _views = new Map(); - protected _trapRequest?: (trapped: TrappedError | TrappedRequest) => void; + protected _analyzers = new Map(); + protected _collections = new Map(); + protected _graphs = new Map(); + protected _views = new Map(); + protected _trapRequest?: ( + trapped: TrappedError | TrappedRequest + ) => void; /** * Creates a new `Database` instance with its own connection pool. @@ -2275,14 +163,14 @@ export class Database { * }); * ``` */ - constructor(config?: Config); + constructor(config?: configuration.ConfigOptions); /** * Creates a new `Database` instance with its own connection pool. * * See also {@link Database#database}. * * @param url - Base URL of the ArangoDB server or list of server URLs. - * Equivalent to the `url` option in {@link connection.Config}. + * Equivalent to the `url` option in {@link configuration.ConfigOptions}. * * @example * ```js @@ -2296,7 +184,11 @@ export class Database { */ constructor(database: Database, name?: string); constructor( - configOrDatabase: string | string[] | Config | Database = {}, + configOrDatabase: + | string + | string[] + | configuration.ConfigOptions + | Database = {}, name?: string ) { if (isArangoDatabase(configOrDatabase)) { @@ -2312,7 +204,7 @@ export class Database { typeof config === "string" || Array.isArray(config) ? { databaseName: name, url: config } : config; - this._connection = new Connection(options); + this._connection = new connection.Connection(options); this._name = databaseName || "_system"; } } @@ -2335,7 +227,7 @@ export class Database { } /** - * Returns a new {@link route.Route} instance for the given path (relative to the + * Returns a new {@link routes.Route} instance for the given path (relative to the * database) that can be used to perform arbitrary HTTP requests. * * @param path - The database-relative URL of the route. Defaults to the @@ -2356,8 +248,11 @@ export class Database { * // with JSON request body '{"username": "admin", "password": "hunter2"}' * ``` */ - route(path?: string, headers?: Headers | Record): Route { - return new Route(this, path, headers); + route( + path?: string, + headers?: Headers | Record + ): routes.Route { + return new routes.Route(this, path, headers); } /** @@ -2365,56 +260,47 @@ export class Database { * * Performs an arbitrary HTTP request against the database. * - * If `absolutePath` is set to `true`, the database path will not be - * automatically prepended to the `basePath`. - * - * @param ReturnType - Return type to use. Defaults to the response object type. + * @param BodyType - Type of the expected response body. + * @param ReturnType - Type the response body will be transformed to. * @param options - Options for this request. * @param transform - An optional function to transform the low-level * response object to a more useful return value. */ - async request( - options: RequestOptions & { absolutePath?: boolean }, - transform?: (res: ArangojsResponse) => ReturnType + async request( + options: connection.RequestOptions, + transform?: (res: connection.ProcessedResponse) => ReturnType ): Promise; /** * @internal * * Performs an arbitrary HTTP request against the database. * - * If `absolutePath` is set to `true`, the database path will not be - * automatically prepended to the `basePath`. - * + * @param BodyType - Type of the expected response body. * @param options - Options for this request. * @param transform - If set to `false`, the raw response object will be * returned. */ - async request( - options: RequestOptions & { absolutePath?: boolean }, + async request( + options: connection.RequestOptions, transform: false - ): Promise; - async request( - { - absolutePath = false, - basePath, - ...opts - }: RequestOptions & { absolutePath?: boolean }, - transform: false | ((res: ArangojsResponse) => ReturnType) = (res) => - res.parsedBody + ): Promise>; + async request( + { pathname, ...opts }: connection.RequestOptions, + transform: + | false + | ((res: connection.ProcessedResponse) => ReturnType) = (res) => + res.parsedBody as ReturnType ): Promise { - if (!absolutePath) { - basePath = `/_db/${encodeURIComponent(this._name)}${basePath || ""}`; - } + pathname = util.joinPath("_db", encodeURIComponent(this._name), pathname); if (this._trapRequest) { const trap = this._trapRequest; this._trapRequest = undefined; return new Promise(async (resolveRequest, rejectRequest) => { - const options = { ...opts }; - options.headers = new Headers(options.headers); - options.headers.set("x-arango-async", "store"); - let jobRes: ArangojsResponse; + opts.headers = new Headers(opts.headers); + opts.headers.set("x-arango-async", "store"); + let jobRes: connection.ProcessedResponse; try { - jobRes = await this._connection.request({ basePath, ...options }); + jobRes = await this._connection.request({ pathname, ...opts }); } catch (e) { trap({ error: true }); rejectRequest(e); @@ -2436,7 +322,7 @@ export class Database { }); } return this._connection.request( - { basePath, ...opts }, + { pathname, ...opts }, transform || undefined ); } @@ -2444,7 +330,7 @@ export class Database { /** * Updates the URL list by requesting a list of all coordinators in the * cluster and adding any endpoints not initially specified in the - * {@link connection.Config}. + * {@link configuration.ConfigOptions}. * * For long-running processes communicating with an ArangoDB cluster it is * recommended to run this method periodically (e.g. once per hour) to make @@ -2469,7 +355,7 @@ export class Database { */ async acquireHostList(overwrite = false): Promise { const urls: string[] = await this.request( - { path: "/_api/cluster/endpoints" }, + { pathname: "/_api/cluster/endpoints" }, (res) => res.parsedBody.endpoints.map((endpoint: any) => endpoint.endpoint) ); @@ -2524,7 +410,7 @@ export class Database { * const analyzer = db.analyzer("my-analyzer"); * await analyzer.create(); * await db.waitForPropagation( - * { path: `/_api/analyzer/${encodeURIComponent(analyzer.name)}` }, + * { pathname: `/_api/analyzer/${encodeURIComponent(analyzer.name)}` }, * 30000 * ); * // Analyzer has been propagated to all coordinators and can safely be used @@ -2534,17 +420,21 @@ export class Database { * @param timeout - Maximum number of milliseconds to wait for propagation. */ async waitForPropagation( - request: RequestOptions, + request: connection.RequestOptions, timeout?: number ): Promise; async waitForPropagation( - { basePath, ...request }: RequestOptions, + { pathname, ...request }: connection.RequestOptions, timeout?: number ): Promise { await this._connection.waitForPropagation( { ...request, - basePath: `/_db/${encodeURIComponent(this._name)}${basePath || ""}`, + pathname: util.joinPath( + "_db", + encodeURIComponent(this._name), + pathname + ), }, timeout ); @@ -2554,7 +444,7 @@ export class Database { * Methods for accessing the server-reported queue times of the mostly * recently received responses. */ - get queueTime(): QueueTimeMetrics { + get queueTime(): administration.QueueTimeMetrics { return this._connection.queueTime; } @@ -2628,7 +518,7 @@ export class Database { return this.request( { method: "POST", - path: "/_open/auth", + pathname: "/_open/auth", body: { username, password }, }, (res) => { @@ -2656,7 +546,7 @@ export class Database { return this.request( { method: "POST", - path: "/_open/auth/renew", + pathname: "/_open/auth/renew", }, (res) => { if (!res.parsedBody.jwt) return null; @@ -2684,10 +574,10 @@ export class Database { * // server: description of the server * ``` */ - version(details?: boolean): Promise { + version(details?: boolean): Promise { return this.request({ method: "GET", - path: "/_api/version", + pathname: "/_api/version", search: { details }, }); } @@ -2703,10 +593,10 @@ export class Database { * // name: name of the storage engine * ``` */ - engine(): Promise { + engine(): Promise { return this.request({ method: "GET", - path: "/_api/engine", + pathname: "/_api/engine", }); } @@ -2721,10 +611,10 @@ export class Database { * // the stats object contains the storage engine stats * ``` */ - engineStats(): Promise { + engineStats(): Promise { return this.request({ method: "GET", - path: "/_api/engine/stats", + pathname: "/_api/engine/stats", }); } @@ -2736,7 +626,7 @@ export class Database { return this.request( { method: "GET", - path: "/_admin/time", + pathname: "/_admin/time", }, (res) => res.parsedBody.time * 1000 ); @@ -2754,10 +644,10 @@ export class Database { * // serverInfo: detailed information about the server * ``` */ - status(): Promise { + status(): Promise { return this.request({ method: "GET", - path: "/_admin/status", + pathname: "/_admin/status", }); } @@ -2775,18 +665,23 @@ export class Database { * // availability is either "default", "readonly", or false * ``` */ - async availability(graceful = false): Promise { + async availability( + graceful = false + ): Promise { try { return this.request( { method: "GET", - path: "/_admin/server/availability", + pathname: "/_admin/server/availability", }, (res) => res.parsedBody.mode ); } catch (e) { if (graceful) return false; - if ((isArangoError(e) || e instanceof HttpError) && e.code === 503) { + if ( + (errors.isArangoError(e) || e instanceof errors.HttpError) && + e.code === 503 + ) { return false; } throw e; @@ -2798,20 +693,22 @@ export class Database { * * Note that this API may reveal sensitive data about the deployment. */ - supportInfo(): Promise { + supportInfo(): Promise< + administration.SingleServerSupportInfo | administration.ClusterSupportInfo + > { return this.request({ method: "GET", - path: "/_admin/support-info", + pathname: "/_admin/support-info", }); } /** * Fetches the license information and status of an Enterprise Edition server. */ - getLicense(): Promise { + getLicense(): Promise { return this.request({ method: "GET", - path: "/_admin/license", + pathname: "/_admin/license", }); } @@ -2826,7 +723,7 @@ export class Database { return this.request( { method: "PUT", - path: "/_admin/license", + pathname: "/_admin/license", body: license, search: { force }, }, @@ -2839,11 +736,11 @@ export class Database { * * @param options - Options for compacting the databases. */ - compact(options: CompactOptions = {}): Promise { + compact(options: administration.CompactOptions = {}): Promise { return this.request( { method: "PUT", - path: "/_admin/compact", + pathname: "/_admin/compact", body: options, }, () => undefined @@ -2857,7 +754,7 @@ export class Database { return this.request( { method: "DELETE", - path: "/_admin/shutdown", + pathname: "/_admin/shutdown", }, () => undefined ); @@ -2874,9 +771,9 @@ export class Database { * const imbalance = await db.getClusterImbalance(); * ``` */ - getClusterImbalance(): Promise { + getClusterImbalance(): Promise { return this.request( - { path: "/_admin/cluster/rebalance" }, + { pathname: "/_admin/cluster/rebalance" }, (res) => res.parsedBody.result ); } @@ -2897,15 +794,15 @@ export class Database { * ``` */ computeClusterRebalance( - opts: ClusterRebalanceOptions - ): Promise { + options: cluster.ClusterRebalanceOptions + ): Promise { return this.request( { method: "POST", - path: "/_admin/cluster/rebalance", + pathname: "/_admin/cluster/rebalance", body: { version: 1, - ...opts, + ...options, }, }, (res) => res.parsedBody.result @@ -2927,10 +824,12 @@ export class Database { * } * ``` */ - executeClusterRebalance(moves: ClusterRebalanceMove[]): Promise { + executeClusterRebalance( + moves: cluster.ClusterRebalanceMove[] + ): Promise { return this.request({ method: "POST", - path: "/_admin/cluster/rebalance/execute", + pathname: "/_admin/cluster/rebalance/execute", body: { version: 1, moves, @@ -2955,11 +854,11 @@ export class Database { * ``` */ rebalanceCluster( - options: ClusterRebalanceOptions - ): Promise { + options: cluster.ClusterRebalanceOptions + ): Promise { return this.request({ method: "PUT", - path: "/_admin/cluster/rebalance", + pathname: "/_admin/cluster/rebalance", body: { version: 1, ...options, @@ -2997,9 +896,9 @@ export class Database { * // the database exists * ``` */ - get(): Promise { + get(): Promise { return this.request( - { path: "/_api/database/current" }, + { pathname: "/_api/database/current" }, (res) => res.parsedBody.result ); } @@ -3019,7 +918,7 @@ export class Database { await this.get(); return true; } catch (err: any) { - if (isArangoError(err) && err.errorNum === DATABASE_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === DATABASE_NOT_FOUND) { return false; } throw err; @@ -3062,11 +961,13 @@ export class Database { */ createDatabase( databaseName: string, - users: CreateDatabaseUser[] + users: users.CreateDatabaseUserOptions[] ): Promise; createDatabase( databaseName: string, - usersOrOptions: CreateDatabaseUser[] | CreateDatabaseOptions = {} + usersOrOptions: + | users.CreateDatabaseUserOptions[] + | CreateDatabaseOptions = {} ): Promise { const { users, ...options } = Array.isArray(usersOrOptions) ? { users: usersOrOptions } @@ -3074,7 +975,7 @@ export class Database { return this.request( { method: "POST", - path: "/_api/database", + pathname: "/_api/database", body: { name: databaseName, users, options }, }, () => this.database(databaseName) @@ -3096,7 +997,7 @@ export class Database { */ listDatabases(): Promise { return this.request( - { path: "/_api/database" }, + { pathname: "/_api/database" }, (res) => res.parsedBody.result ); } @@ -3117,7 +1018,7 @@ export class Database { */ listUserDatabases(): Promise { return this.request( - { path: "/_api/database/user" }, + { pathname: "/_api/database/user" }, (res) => res.parsedBody.result ); } @@ -3137,7 +1038,7 @@ export class Database { * ``` */ databases(): Promise { - return this.request({ path: "/_api/database" }, (res) => + return this.request({ pathname: "/_api/database" }, (res) => (res.parsedBody.result as string[]).map((databaseName) => this.database(databaseName) ) @@ -3159,7 +1060,7 @@ export class Database { * ``` */ userDatabases(): Promise { - return this.request({ path: "/_api/database/user" }, (res) => + return this.request({ pathname: "/_api/database/user" }, (res) => (res.parsedBody.result as string[]).map((databaseName) => this.database(databaseName) ) @@ -3182,7 +1083,7 @@ export class Database { return this.request( { method: "DELETE", - path: `/_api/database/${encodeURIComponent(databaseName)}`, + pathname: `/_api/database/${encodeURIComponent(databaseName)}`, }, (res) => res.parsedBody.result ); @@ -3194,10 +1095,13 @@ export class Database { * Returns a `Collection` instance for the given collection name. * * In TypeScript the collection implements both the - * {@link collection.DocumentCollection} and {@link collection.EdgeCollection} + * {@link collections.DocumentCollection} and {@link collections.EdgeCollection} * interfaces and can be cast to either type to enforce a stricter API. * - * @param T - Type to use for document data. Defaults to `any`. + * @param EntryResultType - Type to represent document contents returned by + * the server (including computed properties). + * @param EntryInputType - Type to represent document contents passed when + * inserting or replacing documents (without computed properties). * @param collectionName - Name of the edge collection. * * @example @@ -3229,14 +1133,18 @@ export class Database { * const edges = db.collection("friends") as EdgeCollection; * ``` */ - collection = any>( + collection< + EntryResultType extends Record = any, + EntryInputType extends Record = EntryResultType, + >( collectionName: string - ): DocumentCollection & EdgeCollection { + ): collections.DocumentCollection & + collections.EdgeCollection { collectionName = collectionName; if (!this._collections.has(collectionName)) { this._collections.set( collectionName, - new Collection(this, collectionName) + new collections.Collection(this, collectionName) ); } return this._collections.get(collectionName)!; @@ -3244,9 +1152,12 @@ export class Database { /** * Creates a new collection with the given `collectionName` and `options`, - * then returns a {@link collection.DocumentCollection} instance for the new collection. + * then returns a {@link collections.DocumentCollection} instance for the new collection. * - * @param T - Type to use for document data. Defaults to `any`. + * @param EntryResultType - Type to represent document contents returned by + * the server (including computed properties). + * @param EntryInputType - Type to represent document contents passed when + * inserting or replacing documents (without computed properties). * @param collectionName - Name of the new collection. * @param options - Options for creating the collection. * @@ -3265,18 +1176,24 @@ export class Database { * const documents = db.createCollection("persons"); * ``` */ - async createCollection = any>( + async createCollection< + EntryResultType extends Record = any, + EntryInputType extends Record = EntryResultType, + >( collectionName: string, - options?: CreateCollectionOptions & { - type?: CollectionType.DOCUMENT_COLLECTION; + options?: collections.CreateCollectionOptions & { + type?: collections.CollectionType.DOCUMENT_COLLECTION; } - ): Promise>; + ): Promise>; /** * Creates a new edge collection with the given `collectionName` and - * `options`, then returns an {@link collection.EdgeCollection} instance for the new + * `options`, then returns an {@link collections.EdgeCollection} instance for the new * edge collection. * - * @param T - Type to use for edge document data. Defaults to `any`. + * @param EntryResultType - Type to represent edge document contents returned + * by the server (including computed properties). + * @param EntryInputType - Type to represent edge document contents passed + * when inserting or replacing documents (without computed properties). * @param collectionName - Name of the new collection. * @param options - Options for creating the collection. * @@ -3300,16 +1217,27 @@ export class Database { * }); * ``` */ - async createCollection = any>( + async createCollection< + EntryResultType extends Record = any, + EntryInputType extends Record = EntryResultType, + >( collectionName: string, - options: CreateCollectionOptions & { - type: CollectionType.EDGE_COLLECTION; + options: collections.CreateCollectionOptions & { + type: collections.CollectionType.EDGE_COLLECTION; } - ): Promise>; - async createCollection = any>( + ): Promise>; + async createCollection< + EntryResultType extends Record = any, + EntryInputType extends Record = EntryResultType, + >( collectionName: string, - options?: CreateCollectionOptions & { type?: CollectionType } - ): Promise & EdgeCollection> { + options?: collections.CreateCollectionOptions & { + type?: collections.CollectionType; + } + ): Promise< + collections.DocumentCollection & + collections.EdgeCollection + > { const collection = this.collection(collectionName); await collection.create(options); return collection; @@ -3317,13 +1245,16 @@ export class Database { /** * Creates a new edge collection with the given `collectionName` and - * `options`, then returns an {@link collection.EdgeCollection} instance for the new + * `options`, then returns an {@link collections.EdgeCollection} instance for the new * edge collection. * * This is a convenience method for calling {@link Database#createCollection} * with `options.type` set to `EDGE_COLLECTION`. * - * @param T - Type to use for edge document data. Defaults to `any`. + * @param EntryResultType - Type to represent edge document contents returned + * by the server (including computed properties). + * @param EntryInputType - Type to represent edge document contents passed + * when inserting or replacing documents (without computed properties). * @param collectionName - Name of the new collection. * @param options - Options for creating the collection. * @@ -3343,13 +1274,16 @@ export class Database { * const edges = db.createEdgeCollection("friends"); * ``` */ - async createEdgeCollection = any>( + async createEdgeCollection< + EntryResultType extends Record = any, + EntryInputType extends Record = EntryResultType, + >( collectionName: string, - options?: CreateCollectionOptions - ): Promise> { + options?: collections.CreateCollectionOptions + ): Promise> { return this.createCollection(collectionName, { ...options, - type: CollectionType.EDGE_COLLECTION, + type: collections.CollectionType.EDGE_COLLECTION, }); } @@ -3368,10 +1302,10 @@ export class Database { async renameCollection( collectionName: string, newName: string - ): Promise> { + ): Promise> { const result = await this.request({ method: "PUT", - path: `/_api/collection/${encodeURIComponent(collectionName)}/rename`, + pathname: `/_api/collection/${encodeURIComponent(collectionName)}/rename`, body: { name: newName }, }); this._collections.delete(collectionName); @@ -3404,10 +1338,10 @@ export class Database { */ listCollections( excludeSystem: boolean = true - ): Promise { + ): Promise { return this.request( { - path: "/_api/collection", + pathname: "/_api/collection", search: { excludeSystem }, }, (res) => res.parsedBody.result @@ -3419,7 +1353,7 @@ export class Database { * `Collection` instances. * * In TypeScript these instances implement both the - * {@link collection.DocumentCollection} and {@link collection.EdgeCollection} + * {@link collections.DocumentCollection} and {@link collections.EdgeCollection} * interfaces and can be cast to either type to enforce a stricter API. * * See also {@link Database#listCollections}. @@ -3444,7 +1378,9 @@ export class Database { */ async collections( excludeSystem: boolean = true - ): Promise> { + ): Promise< + Array + > { const collections = await this.listCollections(excludeSystem); return collections.map((data) => this.collection(data.name)); } @@ -3452,7 +1388,7 @@ export class Database { //#region graphs /** - * Returns a {@link graph.Graph} instance representing the graph with the given + * Returns a {@link graphs.Graph} instance representing the graph with the given * `graphName`. * * @param graphName - Name of the graph. @@ -3463,16 +1399,16 @@ export class Database { * const graph = db.graph("some-graph"); * ``` */ - graph(graphName: string): Graph { + graph(graphName: string): graphs.Graph { if (!this._graphs.has(graphName)) { - this._graphs.set(graphName, new Graph(this, graphName)); + this._graphs.set(graphName, new graphs.Graph(this, graphName)); } return this._graphs.get(graphName)!; } /** * Creates a graph with the given `graphName` and `edgeDefinitions`, then - * returns a {@link graph.Graph} instance for the new graph. + * returns a {@link graphs.Graph} instance for the new graph. * * @param graphName - Name of the graph to be created. * @param edgeDefinitions - An array of edge definitions. @@ -3480,9 +1416,9 @@ export class Database { */ async createGraph( graphName: string, - edgeDefinitions: EdgeDefinitionOptions[], - options?: CreateGraphOptions - ): Promise { + edgeDefinitions: graphs.EdgeDefinitionOptions[], + options?: graphs.CreateGraphOptions + ): Promise { const graph = this.graph(graphName); await graph.create(edgeDefinitions, options); return graph; @@ -3501,15 +1437,15 @@ export class Database { * // graphs is an array of graph descriptions * ``` */ - listGraphs(): Promise { + listGraphs(): Promise { return this.request( - { path: "/_api/gharial" }, + { pathname: "/_api/gharial" }, (res) => res.parsedBody.graphs ); } /** - * Fetches all graphs from the database and returns an array of {@link graph.Graph} + * Fetches all graphs from the database and returns an array of {@link graphs.Graph} * instances for those graphs. * * See also {@link Database#listGraphs}. @@ -3521,7 +1457,7 @@ export class Database { * // graphs is an array of Graph instances * ``` */ - async graphs(): Promise { + async graphs(): Promise { const graphs = await this.listGraphs(); return graphs.map((data: any) => this.graph(data._key)); } @@ -3529,7 +1465,7 @@ export class Database { //#region views /** - * Returns a {@link view.View} instance for the given `viewName`. + * Returns a {@link views.View} instance for the given `viewName`. * * @param viewName - Name of the ArangoSearch or SearchAlias View. * @@ -3539,16 +1475,16 @@ export class Database { * const view = db.view("potatoes"); * ``` */ - view(viewName: string): View { + view(viewName: string): views.View { if (!this._views.has(viewName)) { - this._views.set(viewName, new View(this, viewName)); + this._views.set(viewName, new views.View(this, viewName)); } return this._views.get(viewName)!; } /** * Creates a new View with the given `viewName` and `options`, then returns a - * {@link view.View} instance for the new View. + * {@link views.View} instance for the new View. * * @param viewName - Name of the View. * @param options - An object defining the properties of the View. @@ -3562,8 +1498,8 @@ export class Database { */ async createView( viewName: string, - options: CreateViewOptions - ): Promise { + options: views.CreateViewOptions + ): Promise { const view = this.view(viewName); await view.create(options); return view; @@ -3572,7 +1508,7 @@ export class Database { /** * Renames the view `viewName` to `newName`. * - * Additionally removes any stored {@link view.View} instance for `viewName` from + * Additionally removes any stored {@link views.View} instance for `viewName` from * the `Database` instance's internal cache. * * **Note**: Renaming views may not be supported when ArangoDB is running in @@ -3584,10 +1520,10 @@ export class Database { async renameView( viewName: string, newName: string - ): Promise> { + ): Promise> { const result = await this.request({ method: "PUT", - path: `/_api/view/${encodeURIComponent(viewName)}/rename`, + pathname: `/_api/view/${encodeURIComponent(viewName)}/rename`, body: { name: newName }, }); this._views.delete(viewName); @@ -3608,13 +1544,16 @@ export class Database { * // views is an array of View descriptions * ``` */ - listViews(): Promise { - return this.request({ path: "/_api/view" }, (res) => res.parsedBody.result); + listViews(): Promise { + return this.request( + { pathname: "/_api/view" }, + (res) => res.parsedBody.result + ); } /** * Fetches all Views from the database and returns an array of - * {@link view.View} instances + * {@link views.View} instances * for the Views. * * See also {@link Database#listViews}. @@ -3626,7 +1565,7 @@ export class Database { * // views is an array of ArangoSearch View instances * ``` */ - async views(): Promise { + async views(): Promise { const views = await this.listViews(); return views.map((data) => this.view(data.name)); } @@ -3634,7 +1573,7 @@ export class Database { //#region analyzers /** - * Returns an {@link analyzer.Analyzer} instance representing the Analyzer with the + * Returns an {@link analyzers.Analyzer} instance representing the Analyzer with the * given `analyzerName`. * * @example @@ -3644,16 +1583,19 @@ export class Database { * const info = await analyzer.get(); * ``` */ - analyzer(analyzerName: string): Analyzer { + analyzer(analyzerName: string): analyzers.Analyzer { if (!this._analyzers.has(analyzerName)) { - this._analyzers.set(analyzerName, new Analyzer(this, analyzerName)); + this._analyzers.set( + analyzerName, + new analyzers.Analyzer(this, analyzerName) + ); } return this._analyzers.get(analyzerName)!; } /** * Creates a new Analyzer with the given `analyzerName` and `options`, then - * returns an {@link analyzer.Analyzer} instance for the new Analyzer. + * returns an {@link analyzers.Analyzer} instance for the new Analyzer. * * @param analyzerName - Name of the Analyzer. * @param options - An object defining the properties of the Analyzer. @@ -3667,8 +1609,8 @@ export class Database { */ async createAnalyzer( analyzerName: string, - options: CreateAnalyzerOptions - ): Promise { + options: analyzers.CreateAnalyzerOptions + ): Promise { const analyzer = this.analyzer(analyzerName); await analyzer.create(options); return analyzer; @@ -3687,16 +1629,16 @@ export class Database { * // analyzers is an array of Analyzer descriptions * ``` */ - listAnalyzers(): Promise { + listAnalyzers(): Promise { return this.request( - { path: "/_api/analyzer" }, + { pathname: "/_api/analyzer" }, (res) => res.parsedBody.result ); } /** * Fetches all Analyzers visible in the database and returns an array of - * {@link analyzer.Analyzer} instances for those Analyzers. + * {@link analyzers.Analyzer} instances for those Analyzers. * * See also {@link Database#listAnalyzers}. * @@ -3707,7 +1649,7 @@ export class Database { * // analyzers is an array of Analyzer instances * ``` */ - async analyzers(): Promise { + async analyzers(): Promise { const analyzers = await this.listAnalyzers(); return analyzers.map((data) => this.analyzer(data.name)); } @@ -3725,10 +1667,10 @@ export class Database { * // users is an array of user objects * ``` */ - listUsers(): Promise { + listUsers(): Promise { return this.request( { - path: "/_api/user", + pathname: "/_api/user", }, (res) => res.parsedBody.result ); @@ -3746,9 +1688,11 @@ export class Database { * // user is the user object for the user named "steve" * ``` */ - getUser(username: string): Promise> { + getUser( + username: string + ): Promise> { return this.request({ - path: `/_api/user/${encodeURIComponent(username)}`, + pathname: `/_api/user/${encodeURIComponent(username)}`, }); } @@ -3768,7 +1712,7 @@ export class Database { createUser( username: string, passwd: string - ): Promise>; + ): Promise>; /** * Creates a new ArangoDB user with the given options. * @@ -3784,19 +1728,19 @@ export class Database { */ createUser( username: string, - options: UserOptions - ): Promise>; + options: users.UserOptions + ): Promise>; createUser( username: string, - options: string | UserOptions - ): Promise> { + options: string | users.UserOptions + ): Promise> { if (typeof options === "string") { options = { passwd: options }; } return this.request( { method: "POST", - path: "/_api/user", + pathname: "/_api/user", body: { user: username, ...options }, }, (res) => res.parsedBody @@ -3819,7 +1763,7 @@ export class Database { updateUser( username: string, passwd: string - ): Promise>; + ): Promise>; /** * Updates the ArangoDB user with the new options. * @@ -3835,19 +1779,19 @@ export class Database { */ updateUser( username: string, - options: Partial - ): Promise>; + options: Partial + ): Promise>; updateUser( username: string, - options: string | Partial - ): Promise> { + options: string | Partial + ): Promise> { if (typeof options === "string") { options = { passwd: options }; } return this.request( { method: "PATCH", - path: `/_api/user/${encodeURIComponent(username)}`, + pathname: `/_api/user/${encodeURIComponent(username)}`, body: options, }, (res) => res.parsedBody @@ -3869,15 +1813,15 @@ export class Database { */ replaceUser( username: string, - options: UserOptions - ): Promise> { + options: users.UserOptions + ): Promise> { if (typeof options === "string") { options = { passwd: options }; } return this.request( { method: "PUT", - path: `/_api/user/${encodeURIComponent(username)}`, + pathname: `/_api/user/${encodeURIComponent(username)}`, body: options, }, (res) => res.parsedBody @@ -3896,15 +1840,13 @@ export class Database { * // The user "steve" has been removed * ``` */ - removeUser( - username: string - ): Promise>> { + removeUser(username: string): Promise { return this.request( { method: "DELETE", - path: `/_api/user/${encodeURIComponent(username)}`, + pathname: `/_api/user/${encodeURIComponent(username)}`, }, - (res) => res.parsedBody + () => undefined ); } @@ -3979,22 +1921,24 @@ export class Database { */ getUserAccessLevel( username: string, - { database, collection }: UserAccessLevelOptions - ): Promise { + { database, collection }: users.UserAccessLevelOptions + ): Promise { const databaseName = isArangoDatabase(database) ? database.name : (database ?? - (isArangoCollection(collection) - ? ((collection as any)._db as Database).name + (collection instanceof collections.Collection + ? collection.database.name : this._name)); const suffix = collection ? `/${encodeURIComponent( - isArangoCollection(collection) ? collection.name : collection + collections.isArangoCollection(collection) + ? collection.name + : collection )}` : ""; return this.request( { - path: `/_api/user/${encodeURIComponent( + pathname: `/_api/user/${encodeURIComponent( username )}/database/${encodeURIComponent(databaseName)}${suffix}`, }, @@ -4080,23 +2024,25 @@ export class Database { database, collection, grant, - }: UserAccessLevelOptions & { grant: AccessLevel } - ): Promise>> { + }: users.UserAccessLevelOptions & { grant: users.AccessLevel } + ): Promise>> { const databaseName = isArangoDatabase(database) ? database.name : (database ?? - (isArangoCollection(collection) - ? ((collection as any)._db as Database).name + (collection instanceof collections.Collection + ? collection.database.name : this._name)); const suffix = collection ? `/${encodeURIComponent( - isArangoCollection(collection) ? collection.name : collection + collections.isArangoCollection(collection) + ? collection.name + : collection )}` : ""; return this.request( { method: "PUT", - path: `/_api/user/${encodeURIComponent( + pathname: `/_api/user/${encodeURIComponent( username )}/database/${encodeURIComponent(databaseName)}${suffix}`, body: { grant }, @@ -4170,23 +2116,25 @@ export class Database { */ clearUserAccessLevel( username: string, - { database, collection }: UserAccessLevelOptions - ): Promise>> { + { database, collection }: users.UserAccessLevelOptions + ): Promise>> { const databaseName = isArangoDatabase(database) ? database.name : (database ?? - (isArangoCollection(collection) - ? ((collection as any)._db as Database).name + (collection instanceof collections.Collection + ? collection.database.name : this._name)); const suffix = collection ? `/${encodeURIComponent( - isArangoCollection(collection) ? collection.name : collection + collections.isArangoCollection(collection) + ? collection.name + : collection )}` : ""; return this.request( { method: "DELETE", - path: `/_api/user/${encodeURIComponent( + pathname: `/_api/user/${encodeURIComponent( username )}/database/${encodeURIComponent(databaseName)}${suffix}`, }, @@ -4213,7 +2161,7 @@ export class Database { getUserDatabases( username: string, full?: false - ): Promise>; + ): Promise>; /** * Fetches an object mapping names of databases to the access level of the * given ArangoDB user for those databases and the collections within each @@ -4241,15 +2189,15 @@ export class Database { Record< string, { - permission: AccessLevel; - collections: Record; + permission: users.AccessLevel; + collections: Record; } > >; getUserDatabases(username: string, full?: boolean) { return this.request( { - path: `/_api/user/${encodeURIComponent(username)}/database`, + pathname: `/_api/user/${encodeURIComponent(username)}/database`, search: { full }, }, (res) => res.parsedBody.result @@ -4263,9 +2211,9 @@ export class Database { * value. * * Collections can be specified as collection names (strings) or objects - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as well as - * (in TypeScript) {@link collection.DocumentCollection} and {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as well as + * (in TypeScript) {@link collections.DocumentCollection} and {@link collections.EdgeCollection}. * * **Note**: The `action` function will be evaluated and executed on the * server inside ArangoDB's embedded JavaScript environment and can not @@ -4308,17 +2256,19 @@ export class Database { * ``` */ executeTransaction( - collections: TransactionCollections & { allowImplicit?: boolean }, + collections: transactions.TransactionCollectionOptions & { + allowImplicit?: boolean; + }, action: string, - options?: TransactionOptions & { params?: any } + options?: transactions.TransactionOptions & { params?: any } ): Promise; /** * Performs a server-side transaction and returns its return value. * * Collections can be specified as collection names (strings) or objects - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as well as - * (in TypeScript) {@link collection.DocumentCollection} and {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as well as + * (in TypeScript) {@link collections.DocumentCollection} and {@link collections.EdgeCollection}. * * **Note**: The `action` function will be evaluated and executed on the * server inside ArangoDB's embedded JavaScript environment and can not @@ -4357,17 +2307,17 @@ export class Database { * ``` */ executeTransaction( - collections: (string | ArangoCollection)[], + collections: (string | collections.ArangoCollection)[], action: string, - options?: TransactionOptions & { params?: any } + options?: transactions.TransactionOptions & { params?: any } ): Promise; /** * Performs a server-side transaction and returns its return value. * * The Collection can be specified as a collection name (string) or an object - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as well as - * (in TypeScript) {@link collection.DocumentCollection} and {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as well as + * (in TypeScript) {@link collections.DocumentCollection} and {@link collections.EdgeCollection}. * * **Note**: The `action` function will be evaluated and executed on the * server inside ArangoDB's embedded JavaScript environment and can not @@ -4406,27 +2356,29 @@ export class Database { * ``` */ executeTransaction( - collection: string | ArangoCollection, + collection: string | collections.ArangoCollection, action: string, - options?: TransactionOptions & { params?: any } + options?: transactions.TransactionOptions & { params?: any } ): Promise; executeTransaction( collections: - | (TransactionCollections & { allowImplicit?: boolean }) - | (string | ArangoCollection)[] + | (transactions.TransactionCollectionOptions & { + allowImplicit?: boolean; + }) + | (string | collections.ArangoCollection)[] | string - | ArangoCollection, + | collections.ArangoCollection, action: string, - options: TransactionOptions & { params?: any } = {} + options: transactions.TransactionOptions & { params?: any } = {} ): Promise { const { allowDirtyRead = undefined, ...opts } = options; return this.request( { method: "POST", - path: "/_api/transaction", + pathname: "/_api/transaction", allowDirtyRead, body: { - collections: coerceTransactionCollections(collections), + collections: transactions.coerceTransactionCollections(collections), action, ...opts, }, @@ -4436,7 +2388,7 @@ export class Database { } /** - * Returns a {@link transaction.Transaction} instance for an existing streaming + * Returns a {@link transactions.Transaction} instance for an existing streaming * transaction with the given `id`. * * See also {@link Database#beginTransaction}. @@ -4452,19 +2404,19 @@ export class Database { * await trx2.commit(); * ``` */ - transaction(transactionId: string): Transaction { - return new Transaction(this, transactionId); + transaction(transactionId: string): transactions.Transaction { + return new transactions.Transaction(this, transactionId); } /** * Begins a new streaming transaction for the given collections, then returns - * a {@link transaction.Transaction} instance for the transaction. + * a {@link transactions.Transaction} instance for the transaction. * * Collections can be specified as collection names (strings) or objects - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as - * well as (in TypeScript) {@link collection.DocumentCollection} and - * {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as + * well as (in TypeScript) {@link collections.DocumentCollection} and + * {@link collections.EdgeCollection}. * * @param collections - Collections involved in the transaction. * @param options - Options for the transaction. @@ -4484,17 +2436,17 @@ export class Database { * ``` */ beginTransaction( - collections: TransactionCollections, - options?: TransactionOptions - ): Promise; + collections: transactions.TransactionCollectionOptions, + options?: transactions.TransactionOptions + ): Promise; /** * Begins a new streaming transaction for the given collections, then returns - * a {@link transaction.Transaction} instance for the transaction. + * a {@link transactions.Transaction} instance for the transaction. * * Collections can be specified as collection names (strings) or objects - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as well as - * (in TypeScript) {@link collection.DocumentCollection} and {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as well as + * (in TypeScript) {@link collections.DocumentCollection} and {@link collections.EdgeCollection}. * * @param collections - Collections that can be read from and written to * during the transaction. @@ -4515,17 +2467,17 @@ export class Database { * ``` */ beginTransaction( - collections: (string | ArangoCollection)[], - options?: TransactionOptions - ): Promise; + collections: (string | collections.ArangoCollection)[], + options?: transactions.TransactionOptions + ): Promise; /** * Begins a new streaming transaction for the given collections, then returns - * a {@link transaction.Transaction} instance for the transaction. + * a {@link transactions.Transaction} instance for the transaction. * * The Collection can be specified as a collection name (string) or an object - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as well as - * (in TypeScript) {@link collection.DocumentCollection} and {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as well as + * (in TypeScript) {@link collections.DocumentCollection} and {@link collections.EdgeCollection}. * * @param collection - A collection that can be read from and written to * during the transaction. @@ -4545,29 +2497,29 @@ export class Database { * ``` */ beginTransaction( - collection: string | ArangoCollection, - options?: TransactionOptions - ): Promise; + collection: string | collections.ArangoCollection, + options?: transactions.TransactionOptions + ): Promise; beginTransaction( collections: - | TransactionCollections - | (string | ArangoCollection)[] + | transactions.TransactionCollectionOptions + | (string | collections.ArangoCollection)[] | string - | ArangoCollection, - options: TransactionOptions = {} - ): Promise { + | collections.ArangoCollection, + options: transactions.TransactionOptions = {} + ): Promise { const { allowDirtyRead = undefined, ...opts } = options; return this.request( { method: "POST", - path: "/_api/transaction/begin", + pathname: "/_api/transaction/begin", allowDirtyRead, body: { - collections: coerceTransactionCollections(collections), + collections: transactions.coerceTransactionCollections(collections), ...opts, }, }, - (res) => new Transaction(this, res.parsedBody.result.id) + (res) => new transactions.Transaction(this, res.parsedBody.result.id) ); } @@ -4578,10 +2530,10 @@ export class Database { * is rejected, the transaction will be aborted. * * Collections can be specified as collection names (strings) or objects - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as - * well as (in TypeScript) {@link collection.DocumentCollection} and - * {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as + * well as (in TypeScript) {@link collections.DocumentCollection} and + * {@link collections.EdgeCollection}. * * @param collections - Collections involved in the transaction. * @param callback - Callback function executing the transaction steps. @@ -4605,9 +2557,9 @@ export class Database { * ``` */ withTransaction( - collections: TransactionCollections, - callback: (step: Transaction["step"]) => Promise, - options?: TransactionOptions + collections: transactions.TransactionCollectionOptions, + callback: (step: transactions.Transaction["step"]) => Promise, + options?: transactions.TransactionOptions ): Promise; /** * Begins and commits a transaction using the given callback. Individual @@ -4616,9 +2568,9 @@ export class Database { * is rejected, the transaction will be aborted. * * Collections can be specified as collection names (strings) or objects - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as well as - * (in TypeScript) {@link collection.DocumentCollection} and {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as well as + * (in TypeScript) {@link collections.DocumentCollection} and {@link collections.EdgeCollection}. * * @param collections - Collections that can be read from and written to * during the transaction. @@ -4643,9 +2595,9 @@ export class Database { * ``` */ withTransaction( - collections: (string | ArangoCollection)[], - callback: (step: Transaction["step"]) => Promise, - options?: TransactionOptions + collections: (string | collections.ArangoCollection)[], + callback: (step: transactions.Transaction["step"]) => Promise, + options?: transactions.TransactionOptions ): Promise; /** * Begins and commits a transaction using the given callback. Individual @@ -4654,9 +2606,9 @@ export class Database { * is rejected, the transaction will be aborted. * * The Collection can be specified as a collection name (string) or an object - * implementing the {@link collection.ArangoCollection} interface: `Collection`, - * {@link graph.GraphVertexCollection}, {@link graph.GraphEdgeCollection} as well as - * (in TypeScript) {@link collection.DocumentCollection} and {@link collection.EdgeCollection}. + * implementing the {@link collections.ArangoCollection} interface: `Collection`, + * {@link graphs.GraphVertexCollection}, {@link graphs.GraphEdgeCollection} as well as + * (in TypeScript) {@link collections.DocumentCollection} and {@link collections.EdgeCollection}. * * @param collection - A collection that can be read from and written to * during the transaction. @@ -4678,21 +2630,21 @@ export class Database { * ``` */ withTransaction( - collection: string | ArangoCollection, - callback: (step: Transaction["step"]) => Promise, - options?: TransactionOptions + collection: string | collections.ArangoCollection, + callback: (step: transactions.Transaction["step"]) => Promise, + options?: transactions.TransactionOptions ): Promise; async withTransaction( collections: - | TransactionCollections - | (string | ArangoCollection)[] + | transactions.TransactionCollectionOptions + | (string | collections.ArangoCollection)[] | string - | ArangoCollection, - callback: (step: Transaction["step"]) => Promise, - options: TransactionOptions = {} + | collections.ArangoCollection, + callback: (step: transactions.Transaction["step"]) => Promise, + options: transactions.TransactionOptions = {} ): Promise { const trx = await this.beginTransaction( - collections as TransactionCollections, + collections as transactions.TransactionCollectionOptions, options ); try { @@ -4720,16 +2672,16 @@ export class Database { * // transactions is an array of transaction descriptions * ``` */ - listTransactions(): Promise { + listTransactions(): Promise { return this._connection.request( - { path: "/_api/transaction" }, + { pathname: "/_api/transaction" }, (res) => res.parsedBody.transactions ); } /** * Fetches all active transactions from the database and returns an array of - * {@link transaction.Transaction} instances for those transactions. + * {@link transactions.Transaction} instances for those transactions. * * See also {@link Database#listTransactions}. * @@ -4740,7 +2692,7 @@ export class Database { * // transactions is an array of transactions * ``` */ - async transactions(): Promise { + async transactions(): Promise { const transactions = await this.listTransactions(); return transactions.map((data) => this.transaction(data.id)); } @@ -4749,9 +2701,9 @@ export class Database { //#region queries /** * Performs a database query using the given `query`, then returns a new - * {@link cursor.ArrayCursor} instance for the result set. + * {@link cursors.Cursor} instance for the result set. * - * See the {@link aql!aql} template string handler for information about how + * See the {@link aql.aql} template string handler for information about how * to create a query string without manually defining bind parameters nor * having to worry about escaping variables. * @@ -4760,7 +2712,7 @@ export class Database { * you do not need to use the `step` method to consume it. * * @param query - An object containing an AQL query string and bind - * parameters, e.g. the object returned from an {@link aql!aql} template string. + * parameters, e.g. the object returned from an {@link aql.aql} template string. * @param options - Options for the query execution. * * @example @@ -4798,14 +2750,14 @@ export class Database { * ``` */ query( - query: AqlQuery, - options?: QueryOptions - ): Promise>; + query: aql.AqlQuery, + options?: queries.QueryOptions + ): Promise>; /** * Performs a database query using the given `query` and `bindVars`, then - * returns a new {@link cursor.ArrayCursor} instance for the result set. + * returns a new {@link cursors.Cursor} instance for the result set. * - * See the {@link aql!aql} template string handler for a safer and easier + * See the {@link aql.aql} template string handler for a safer and easier * alternative to passing strings directly. * * **Note**: When executing a query in a streaming transaction using the @@ -4851,20 +2803,20 @@ export class Database { * ``` */ query( - query: string | AqlLiteral, + query: string | aql.AqlLiteral, bindVars?: Record, - options?: QueryOptions - ): Promise>; + options?: queries.QueryOptions + ): Promise>; query( - query: string | AqlQuery | AqlLiteral, + query: string | aql.AqlQuery | aql.AqlLiteral, bindVars?: Record, - options: QueryOptions = {} - ): Promise> { - if (isAqlQuery(query)) { + options: queries.QueryOptions = {} + ): Promise> { + if (aql.isAqlQuery(query)) { options = bindVars ?? {}; bindVars = query.bindVars; query = query.query; - } else if (isAqlLiteral(query)) { + } else if (aql.isAqlLiteral(query)) { query = query.toAQL(); } const { @@ -4881,7 +2833,7 @@ export class Database { return this.request( { method: "POST", - path: "/_api/cursor", + pathname: "/_api/cursor", body: { query, bindVars, @@ -4897,7 +2849,7 @@ export class Database { timeout, }, (res) => - new BatchedArrayCursor( + new cursors.BatchCursor( this, res.parsedBody, res.arangojsHostUrl, @@ -4909,12 +2861,12 @@ export class Database { /** * Explains a database query using the given `query`. * - * See the {@link aql!aql} template string handler for information about how + * See the {@link aql.aql} template string handler for information about how * to create a query string without manually defining bind parameters nor * having to worry about escaping variables. * * @param query - An object containing an AQL query string and bind - * parameters, e.g. the object returned from an {@link aql!aql} template string. + * parameters, e.g. the object returned from an {@link aql.aql} template string. * @param options - Options for explaining the query. * * @example @@ -4929,18 +2881,18 @@ export class Database { * ``` */ explain( - query: AqlQuery, - options?: ExplainOptions & { allPlans?: false } - ): Promise>; + query: aql.AqlQuery, + options?: queries.ExplainOptions & { allPlans?: false } + ): Promise>; /** * Explains a database query using the given `query`. * - * See the {@link aql!aql} template string handler for information about how + * See the {@link aql.aql} template string handler for information about how * to create a query string without manually defining bind parameters nor * having to worry about escaping variables. * * @param query - An object containing an AQL query string and bind - * parameters, e.g. the object returned from an {@link aql!aql} template string. + * parameters, e.g. the object returned from an {@link aql.aql} template string. * @param options - Options for explaining the query. * * @example @@ -4958,13 +2910,13 @@ export class Database { * ``` */ explain( - query: AqlQuery, - options?: ExplainOptions & { allPlans: true } - ): Promise>; + query: aql.AqlQuery, + options?: queries.ExplainOptions & { allPlans: true } + ): Promise>; /** * Explains a database query using the given `query` and `bindVars`. * - * See the {@link aql!aql} template string handler for a safer and easier + * See the {@link aql.aql} template string handler for a safer and easier * alternative to passing strings directly. * * @param query - An AQL query string. @@ -4986,14 +2938,14 @@ export class Database { * ``` */ explain( - query: string | AqlLiteral, + query: string | aql.AqlLiteral, bindVars?: Record, - options?: ExplainOptions & { allPlans?: false } - ): Promise>; + options?: queries.ExplainOptions & { allPlans?: false } + ): Promise>; /** * Explains a database query using the given `query` and `bindVars`. * - * See the {@link aql!aql} template string handler for a safer and easier + * See the {@link aql.aql} template string handler for a safer and easier * alternative to passing strings directly. * * @param query - An AQL query string. @@ -5016,25 +2968,29 @@ export class Database { * ``` */ explain( - query: string | AqlLiteral, + query: string | aql.AqlLiteral, bindVars?: Record, - options?: ExplainOptions & { allPlans: true } - ): Promise>; + options?: queries.ExplainOptions & { allPlans: true } + ): Promise>; explain( - query: string | AqlQuery | AqlLiteral, + query: string | aql.AqlQuery | aql.AqlLiteral, bindVars?: Record, - options?: ExplainOptions - ): Promise> { - if (isAqlQuery(query)) { + options?: queries.ExplainOptions + ): Promise< + connection.ArangoApiResponse< + queries.SingleExplainResult | queries.MultiExplainResult + > + > { + if (aql.isAqlQuery(query)) { options = bindVars; bindVars = query.bindVars; query = query.query; - } else if (isAqlLiteral(query)) { + } else if (aql.isAqlLiteral(query)) { query = query.toAQL(); } return this.request({ method: "POST", - path: "/_api/explain", + pathname: "/_api/explain", body: { query, bindVars, options }, }); } @@ -5042,12 +2998,12 @@ export class Database { /** * Parses the given query and returns the result. * - * See the {@link aql!aql} template string handler for information about how + * See the {@link aql.aql} template string handler for information about how * to create a query string without manually defining bind parameters nor * having to worry about escaping variables. * * @param query - An AQL query string or an object containing an AQL query - * string and bind parameters, e.g. the object returned from an {@link aql!aql} + * string and bind parameters, e.g. the object returned from an {@link aql.aql} * template string. * * @example @@ -5060,16 +3016,18 @@ export class Database { * RETURN doc._key * `); * ``` - */ - parse(query: string | AqlQuery | AqlLiteral): Promise { - if (isAqlQuery(query)) { + aql.*/ + parse( + query: string | aql.AqlQuery | aql.AqlLiteral + ): Promise { + if (aql.isAqlQuery(query)) { query = query.query; - } else if (isAqlLiteral(query)) { + } else if (aql.isAqlLiteral(query)) { query = query.toAQL(); } return this.request({ method: "POST", - path: "/_api/query", + pathname: "/_api/query", body: { query }, }); } @@ -5086,9 +3044,9 @@ export class Database { * } * ``` */ - queryRules(): Promise { + queryRules(): Promise { return this.request({ - path: "/_api/query/rules", + pathname: "/_api/query/rules", }); } @@ -5102,7 +3060,7 @@ export class Database { * console.log(tracking.enabled); * ``` */ - queryTracking(): Promise; + queryTracking(): Promise; /** * Modifies the query tracking properties. * @@ -5120,18 +3078,22 @@ export class Database { * }); * ``` */ - queryTracking(options: QueryTrackingOptions): Promise; - queryTracking(options?: QueryTrackingOptions): Promise { + queryTracking( + options: queries.QueryTrackingOptions + ): Promise; + queryTracking( + options?: queries.QueryTrackingOptions + ): Promise { return this.request( options ? { method: "PUT", - path: "/_api/query/properties", + pathname: "/_api/query/properties", body: options, } : { method: "GET", - path: "/_api/query/properties", + pathname: "/_api/query/properties", } ); } @@ -5147,10 +3109,10 @@ export class Database { * const queries = await db.listRunningQueries(); * ``` */ - listRunningQueries(): Promise { + listRunningQueries(): Promise { return this.request({ method: "GET", - path: "/_api/query/current", + pathname: "/_api/query/current", }); } @@ -5167,10 +3129,10 @@ export class Database { * // Only works if slow query tracking is enabled * ``` */ - listSlowQueries(): Promise { + listSlowQueries(): Promise { return this.request({ method: "GET", - path: "/_api/query/slow", + pathname: "/_api/query/slow", }); } @@ -5190,7 +3152,7 @@ export class Database { return this.request( { method: "DELETE", - path: "/_api/query/slow", + pathname: "/_api/query/slow", }, () => undefined ); @@ -5220,7 +3182,7 @@ export class Database { return this.request( { method: "DELETE", - path: `/_api/query/${encodeURIComponent(queryId)}`, + pathname: `/_api/query/${encodeURIComponent(queryId)}`, }, () => undefined ); @@ -5237,9 +3199,9 @@ export class Database { * console.log(entries); * ``` */ - listQueryCacheEntries(): Promise { + listQueryCacheEntries(): Promise { return this.request({ - path: "/_api/query-cache/entries", + pathname: "/_api/query-cache/entries", }); } @@ -5257,7 +3219,7 @@ export class Database { return this.request( { method: "DELETE", - path: "/_api/query-cache", + pathname: "/_api/query-cache", }, () => undefined ); @@ -5273,9 +3235,9 @@ export class Database { * console.log(properties); * ``` */ - getQueryCacheProperties(): Promise { + getQueryCacheProperties(): Promise { return this.request({ - path: "/_api/query-cache/properties", + pathname: "/_api/query-cache/properties", }); } @@ -5291,30 +3253,30 @@ export class Database { * ``` */ setQueryCacheProperties( - properties: QueryCachePropertiesOptions - ): Promise { + properties: queries.QueryCachePropertiesOptions + ): Promise { return this.request({ method: "PUT", - path: "/_api/query-cache/properties", + pathname: "/_api/query-cache/properties", body: properties, }); } //#endregion - //#region functions + //#region user functions /** * Fetches a list of all AQL user functions registered with the database. * * @example * ```js * const db = new Database(); - * const functions = await db.listFunctions(); + * const functions = await db.listUserFunctions(); * const names = functions.map(fn => fn.name); * ``` */ - listFunctions(): Promise { + listUserFunctions(): Promise { return this.request( - { path: "/_api/aqlfunction" }, + { pathname: "/_api/aqlfunction" }, (res) => res.parsedBody.result ); } @@ -5335,7 +3297,7 @@ export class Database { * @example * ```js * const db = new Database(); - * await db.createFunction( + * await db.createUserFunction( * "ACME::ACCOUNTING::CALCULATE_VAT", * "(price) => price * 0.19" * ); @@ -5350,14 +3312,14 @@ export class Database { * // cursor is a cursor for the query result * ``` */ - createFunction( + createUserFunction( name: string, code: string, isDeterministic: boolean = false - ): Promise> { + ): Promise> { return this.request({ method: "POST", - path: "/_api/aqlfunction", + pathname: "/_api/aqlfunction", body: { name, code, isDeterministic }, }); } @@ -5373,17 +3335,17 @@ export class Database { * @example * ```js * const db = new Database(); - * await db.dropFunction("ACME::ACCOUNTING::CALCULATE_VAT"); + * await db.dropUserFunction("ACME::ACCOUNTING::CALCULATE_VAT"); * // the function no longer exists * ``` */ - dropFunction( + dropUserFunction( name: string, group: boolean = false - ): Promise> { + ): Promise> { return this.request({ method: "DELETE", - path: `/_api/aqlfunction/${encodeURIComponent(name)}`, + pathname: `/_api/aqlfunction/${encodeURIComponent(name)}`, search: { group }, }); } @@ -5407,9 +3369,11 @@ export class Database { * const services = await db.listServices(false); // all services * ``` */ - listServices(excludeSystem: boolean = true): Promise { + listServices( + excludeSystem: boolean = true + ): Promise { return this.request({ - path: "/_api/foxx", + pathname: "/_api/foxx", search: { excludeSystem }, }); } @@ -5449,8 +3413,8 @@ export class Database { async installService( mount: string, source: File | Blob | string, - options: InstallServiceOptions = {} - ): Promise { + options: services.InstallServiceOptions = {} + ): Promise { const { configuration, dependencies, ...search } = options; const form = new FormData(); if (configuration) { @@ -5466,7 +3430,7 @@ export class Database { return await this.request({ body: form, method: "POST", - path: "/_api/foxx", + pathname: "/_api/foxx", search: { ...search, mount }, }); } @@ -5507,8 +3471,8 @@ export class Database { async replaceService( mount: string, source: File | Blob | string, - options: ReplaceServiceOptions = {} - ): Promise { + options: services.ReplaceServiceOptions = {} + ): Promise { const { configuration, dependencies, ...search } = options; const form = new FormData(); if (configuration) { @@ -5524,7 +3488,7 @@ export class Database { return await this.request({ body: form, method: "PUT", - path: "/_api/foxx/service", + pathname: "/_api/foxx/service", search: { ...search, mount }, }); } @@ -5565,8 +3529,8 @@ export class Database { async upgradeService( mount: string, source: File | Blob | string, - options: UpgradeServiceOptions = {} - ): Promise { + options: services.UpgradeServiceOptions = {} + ): Promise { const { configuration, dependencies, ...search } = options; const form = new FormData(); if (configuration) { @@ -5582,7 +3546,7 @@ export class Database { return await this.request({ body: form, method: "PATCH", - path: "/_api/foxx/service", + pathname: "/_api/foxx/service", search: { ...search, mount }, }); } @@ -5601,12 +3565,12 @@ export class Database { */ uninstallService( mount: string, - options?: UninstallServiceOptions + options?: services.UninstallServiceOptions ): Promise { return this.request( { method: "DELETE", - path: "/_api/foxx/service", + pathname: "/_api/foxx/service", search: { ...options, mount }, }, () => undefined @@ -5625,9 +3589,9 @@ export class Database { * // info contains detailed information about the service * ``` */ - getService(mount: string): Promise { + getService(mount: string): Promise { return this.request({ - path: "/_api/foxx/service", + pathname: "/_api/foxx/service", search: { mount }, }); } @@ -5656,7 +3620,7 @@ export class Database { getServiceConfiguration( mount: string, minimal?: false - ): Promise>; + ): Promise>; /** * Retrieves information about the service's configuration options and their * current values. @@ -5684,7 +3648,7 @@ export class Database { ): Promise>; getServiceConfiguration(mount: string, minimal: boolean = false) { return this.request({ - path: "/_api/foxx/configuration", + pathname: "/_api/foxx/configuration", search: { mount, minimal }, }); } @@ -5717,7 +3681,9 @@ export class Database { mount: string, cfg: Record, minimal?: false - ): Promise>; + ): Promise< + Record + >; /** * Replaces the configuration of the given service, discarding any existing * values for options not specified. @@ -5757,7 +3723,7 @@ export class Database { ) { return this.request({ method: "PUT", - path: "/_api/foxx/configuration", + pathname: "/_api/foxx/configuration", body: cfg, search: { mount, minimal }, }); @@ -5791,7 +3757,9 @@ export class Database { mount: string, cfg: Record, minimal?: false - ): Promise>; + ): Promise< + Record + >; /** * Updates the configuration of the given service while maintaining any * existing values for options not specified. @@ -5831,7 +3799,7 @@ export class Database { ) { return this.request({ method: "PATCH", - path: "/_api/foxx/configuration", + pathname: "/_api/foxx/configuration", body: cfg, search: { mount, minimal }, }); @@ -5861,7 +3829,12 @@ export class Database { getServiceDependencies( mount: string, minimal?: false - ): Promise>; + ): Promise< + Record< + string, + services.SingleServiceDependency | services.MultiServiceDependency + > + >; /** * Retrieves information about the service's dependencies and their current * mount points. @@ -5889,7 +3862,7 @@ export class Database { ): Promise>; getServiceDependencies(mount: string, minimal: boolean = false) { return this.request({ - path: "/_api/foxx/dependencies", + pathname: "/_api/foxx/dependencies", search: { mount, minimal }, }); } @@ -5925,7 +3898,9 @@ export class Database { ): Promise< Record< string, - (SingleServiceDependency | MultiServiceDependency) & { warning?: string } + (services.SingleServiceDependency | services.MultiServiceDependency) & { + warning?: string; + } > >; /** @@ -5971,7 +3946,7 @@ export class Database { ) { return this.request({ method: "PUT", - path: "/_api/foxx/dependencies", + pathname: "/_api/foxx/dependencies", body: deps, search: { mount, minimal }, }); @@ -6008,7 +3983,9 @@ export class Database { ): Promise< Record< string, - (SingleServiceDependency | MultiServiceDependency) & { warning?: string } + (services.SingleServiceDependency | services.MultiServiceDependency) & { + warning?: string; + } > >; /** @@ -6054,7 +4031,7 @@ export class Database { ) { return this.request({ method: "PATCH", - path: "/_api/foxx/dependencies", + pathname: "/_api/foxx/dependencies", body: deps, search: { mount, minimal }, }); @@ -6078,32 +4055,32 @@ export class Database { setServiceDevelopmentMode( mount: string, enabled: boolean = true - ): Promise { + ): Promise { return this.request({ method: enabled ? "POST" : "DELETE", - path: "/_api/foxx/development", + pathname: "/_api/foxx/development", search: { mount }, }); } /** - * Retrieves a list of scripts defined in the service manifest's "scripts" - * section mapped to their human readable representations. + * Retrieves an object mapping script names to their human readable + * representations, as defined in the service manifest's "scripts" section. * * @param mount - The service's mount point, relative to the database. * * @example * ```js * const db = new Database(); - * const scripts = await db.listServiceScripts("/my-service"); + * const scripts = await db.getServiceScripts("/my-service"); * for (const [name, title] of Object.entries(scripts)) { * console.log(`${name}: ${title}`); * } * ``` */ - listServiceScripts(mount: string): Promise> { + getServiceScripts(mount: string): Promise> { return this.request({ - path: "/_api/foxx/scripts", + pathname: "/_api/foxx/scripts", search: { mount }, }); } @@ -6135,7 +4112,7 @@ export class Database { runServiceScript(mount: string, name: string, params?: any): Promise { return this.request({ method: "POST", - path: `/_api/foxx/scripts/${encodeURIComponent(name)}`, + pathname: `/_api/foxx/scripts/${encodeURIComponent(name)}`, body: params, search: { mount }, }); @@ -6169,7 +4146,7 @@ export class Database { */ filter?: string; } - ): Promise; + ): Promise; /** * Runs the tests of a given service and returns the results using the * "suite" reporter, which groups the test result by test suite. @@ -6201,7 +4178,7 @@ export class Database { */ filter?: string; } - ): Promise; + ): Promise; /** * Runs the tests of a given service and returns the results using the * "stream" reporter, which represents the results as a sequence of tuples @@ -6234,7 +4211,7 @@ export class Database { */ filter?: string; } - ): Promise; + ): Promise; /** * Runs the tests of a given service and returns the results using the * "tap" reporter, which represents the results as an array of strings using @@ -6267,7 +4244,7 @@ export class Database { */ filter?: string; } - ): Promise; + ): Promise; /** * Runs the tests of a given service and returns the results using the * "xunit" reporter, which represents the results as an XML document using @@ -6300,7 +4277,7 @@ export class Database { */ filter?: string; } - ): Promise; + ): Promise; /** * Runs the tests of a given service and returns the results as a string * using the "stream" reporter in "idiomatic" mode, which represents the @@ -6413,7 +4390,7 @@ export class Database { ) { return this.request({ method: "POST", - path: "/_api/foxx/tests", + pathname: "/_api/foxx/tests", search: { ...options, mount, @@ -6438,7 +4415,7 @@ export class Database { */ getServiceReadme(mount: string): Promise { return this.request({ - path: "/_api/foxx/readme", + pathname: "/_api/foxx/readme", search: { mount }, }); } @@ -6456,9 +4433,9 @@ export class Database { * // spec is a Swagger API description of the service * ``` */ - getServiceDocumentation(mount: string): Promise { + getServiceDocumentation(mount: string): Promise { return this.request({ - path: "/_api/foxx/swagger", + pathname: "/_api/foxx/swagger", search: { mount }, }); } @@ -6479,7 +4456,7 @@ export class Database { downloadService(mount: string): Promise { return this.request({ method: "POST", - path: "/_api/foxx/download", + pathname: "/_api/foxx/download", search: { mount }, expectBinary: true, }); @@ -6509,7 +4486,7 @@ export class Database { return this.request( { method: "POST", - path: "/_api/foxx/commit", + pathname: "/_api/foxx/commit", search: { replace }, }, () => undefined @@ -6531,11 +4508,13 @@ export class Database { * // a hot backup has been created * ``` */ - createHotBackup(options: HotBackupOptions = {}): Promise { + createHotBackup( + options: hotBackups.HotBackupOptions = {} + ): Promise { return this.request( { method: "POST", - path: "/_admin/backup/create", + pathname: "/_admin/backup/create", body: options, }, (res) => res.parsedBody.result @@ -6551,17 +4530,17 @@ export class Database { * * @example * ```js - * const backups = await db.listHotBackups(); - * for (const backup of backups) { + * const backups = await db.getHotBackups(); + * for (const backup of backups.list) { * console.log(backup.id); * } * ``` */ - listHotBackups(id?: string | string[]): Promise { + getHotBackups(id?: string | string[]): Promise { return this.request( { method: "POST", - path: "/_admin/backup/list", + pathname: "/_admin/backup/list", body: id ? { id } : undefined, }, (res) => res.parsedBody.result @@ -6585,7 +4564,7 @@ export class Database { return this.request( { method: "POST", - path: "/_admin/backup/restore", + pathname: "/_admin/backup/restore", body: { id }, }, (res) => res.parsedBody.result.previous @@ -6607,7 +4586,7 @@ export class Database { return this.request( { method: "POST", - path: "/_admin/backup/delete", + pathname: "/_admin/backup/delete", body: { id }, }, () => undefined @@ -6630,10 +4609,10 @@ export class Database { * } * ``` */ - getLogEntries(options?: LogEntriesOptions): Promise { + getLogEntries(options?: logs.LogEntriesOptions): Promise { return this.request( { - path: "/_admin/log/entries", + pathname: "/_admin/log/entries", search: options, }, (res) => res.parsedBody @@ -6650,16 +4629,18 @@ export class Database { * * @example * ```js - * const messages = await db.getLogMessages(); + * const messages = await db.listLogMessages(); * for (const m of messages) { * console.log(`${m.date} - [${m.level}] ${m.message} (#${m.id})`); * } * ``` */ - getLogMessages(options?: LogEntriesOptions): Promise { + listLogMessages( + options?: logs.LogEntriesOptions + ): Promise { return this.request( { - path: "/_admin/log", + pathname: "/_admin/log", search: options, }, (res) => res.parsedBody.messages @@ -6675,9 +4656,9 @@ export class Database { * console.log(levels.request); // log level for incoming requests * ``` */ - getLogLevel(): Promise> { + getLogLevel(): Promise> { return this.request({ - path: "/_admin/log/level", + pathname: "/_admin/log/level", }); } @@ -6695,11 +4676,11 @@ export class Database { * ``` */ setLogLevel( - levels: Record - ): Promise> { + levels: Record + ): Promise> { return this.request({ method: "PUT", - path: "/_admin/log/level", + pathname: "/_admin/log/level", body: levels, }); } @@ -6711,7 +4692,7 @@ export class Database { * database request performed by the callback will be marked for asynchronous * execution and its result will be made available as an async job. * - * Returns a {@link Job} instance that can be used to retrieve the result + * Returns a {@link jobs.Job} instance that can be used to retrieve the result * of the callback function once the request has been executed. * * @param callback - Callback function to execute as an async job. @@ -6727,15 +4708,17 @@ export class Database { * // job.result is a list of Collection instances * ``` */ - async createJob(callback: () => Promise): Promise> { - const trap = new Promise((resolveTrap) => { - this._trapRequest = (trapped) => resolveTrap(trapped); - }); + async createJob(callback: () => Promise): Promise> { + const trap = new Promise>( + (resolveTrap) => { + this._trapRequest = (trapped) => resolveTrap(trapped); + } + ); const eventualResult = callback(); const trapped = await trap; if (trapped.error) return eventualResult as Promise; const { jobId, onResolve, onReject } = trapped; - return new Job( + return new jobs.Job( this, jobId, (res) => { @@ -6750,7 +4733,7 @@ export class Database { } /** - * Returns a {@link job.Job} instance for the given `jobId`. + * Returns a {@link jobs.Job} instance for the given `jobId`. * * @param jobId - ID of the async job. * @@ -6760,8 +4743,8 @@ export class Database { * const job = db.job("12345"); * ``` */ - job(jobId: string): Job { - return new Job(this, jobId); + job(jobId: string): jobs.Job { + return new jobs.Job(this, jobId); } /** @@ -6777,7 +4760,7 @@ export class Database { listPendingJobs(): Promise { return this.request( { - path: "/_api/job/pending", + pathname: "/_api/job/pending", }, (res) => res.parsedBody ); @@ -6796,7 +4779,7 @@ export class Database { listCompletedJobs(): Promise { return this.request( { - path: "/_api/job/done", + pathname: "/_api/job/done", }, (res) => res.parsedBody ); @@ -6820,7 +4803,7 @@ export class Database { return this.request( { method: "DELETE", - path: `/_api/job/expired`, + pathname: `/_api/job/expired`, search: { stamp: threshold / 1000 }, }, () => undefined @@ -6834,10 +4817,11 @@ export class Database { return this.request( { method: "DELETE", - path: `/_api/job/all`, + pathname: `/_api/job/all`, }, () => undefined ); } //#endregion } +//#endregion diff --git a/src/documents.ts b/src/documents.ts index 1154068a8..4f396e8d3 100644 --- a/src/documents.ts +++ b/src/documents.ts @@ -1,6 +1,6 @@ /** * ```ts - * import type { Document, Edge } from "arangojs/documents.js"; + * import type { Document, Edge } from "arangojs/documents"; * ``` * * The "documents" module provides document/edge related types for TypeScript. @@ -8,6 +8,7 @@ * @packageDocumentation */ +//#region Shared types /** * Common ArangoDB metadata properties of a document. */ @@ -41,7 +42,9 @@ export type EdgeMetadata = { */ _to: string; }; +//#endregion +//#region Document types /** * Type representing an object that can be stored in a collection. */ @@ -80,13 +83,476 @@ export type Edge = any> = T & export type Patch> = { [K in keyof T]?: T[K] | Patch; }; +//#endregion + +//#region Document operation options +/** + * Options for checking whether a document exists in a collection. + */ +export type DocumentExistsOptions = { + /** + * If set to `true`, the request will explicitly permit ArangoDB to return a + * potentially dirty or stale result and arangojs will load balance the + * request without distinguishing between leaders and followers. + */ + allowDirtyRead?: boolean; + /** + * If set to a document revision, the document will only match if its `_rev` + * matches the given revision. + */ + ifMatch?: string; + /** + * If set to a document revision, the document will only match if its `_rev` + * does not match the given revision. + */ + ifNoneMatch?: string; +}; + +/** + * Options for retrieving a document from a collection. + */ +export type ReadDocumentOptions = { + /** + * If set to `true`, `null` is returned instead of an exception being thrown + * if the document does not exist. + */ + graceful?: boolean; + /** + * If set to `true`, the request will explicitly permit ArangoDB to return a + * potentially dirty or stale result and arangojs will load balance the + * request without distinguishing between leaders and followers. + */ + allowDirtyRead?: boolean; + /** + * If set to a document revision, the request will fail with an error if the + * document exists but its `_rev` does not match the given revision. + */ + ifMatch?: string; + /** + * If set to a document revision, the request will fail with an error if the + * document exists and its `_rev` matches the given revision. Note that an + * `HttpError` with code 304 will be thrown instead of an `ArangoError`. + */ + ifNoneMatch?: string; +}; + +/** + * Options for retrieving multiple documents from a collection. + */ +export type BulkReadDocumentsOptions = { + /** + * If set to `true`, the request will explicitly permit ArangoDB to return a + * potentially dirty or stale result and arangojs will load balance the + * request without distinguishing between leaders and followers. + */ + allowDirtyRead?: boolean; + /** + * If set to `false`, the existing document will only be modified if its + * `_rev` property matches the same property on the new data. + * + * Default: `true` + */ + ignoreRevs?: boolean; +}; + +/** + * Options for inserting a new document into a collection. + */ +export type InsertDocumentOptions = { + /** + * If set to `true`, data will be synchronized to disk before returning. + * + * Default: `false` + */ + waitForSync?: boolean; + /** + * If set to `true`, no data will be returned by the server. This option can + * be used to reduce network traffic. + * + * Default: `false` + */ + silent?: boolean; + /** + * If set to `true`, the complete new document will be returned as the `new` + * property on the result object. Has no effect if `silent` is set to `true`. + * + * Default: `false` + */ + returnNew?: boolean; + /** + * If set to `true`, the complete old document will be returned as the `old` + * property on the result object. Has no effect if `silent` is set to `true`. + * This option is only available when `overwriteMode` is set to `"update"` or + * `"replace"`. + * + * Default: `false` + */ + returnOld?: boolean; + /** + * Defines what should happen if a document with the same `_key` or `_id` + * already exists, instead of throwing an exception. + * + * Default: `"conflict" + */ + overwriteMode?: "ignore" | "update" | "replace" | "conflict"; + /** + * If set to `false`, properties with a value of `null` will be removed from + * the new document. + * + * Default: `true` + */ + keepNull?: boolean; + /** + * If set to `false`, object properties that already exist in the old + * document will be overwritten rather than merged when an existing document + * with the same `_key` or `_id` is updated. This does not affect arrays. + * + * Default: `true` + */ + mergeObjects?: boolean; + /** + * If set to `true`, new entries will be added to in-memory index caches if + * document insertions affect the edge index or cache-enabled persistent + * indexes. + * + * Default: `false` + */ + refillIndexCaches?: boolean; + /** + * If set, the attribute with the name specified by the option is looked up + * in the stored document and the attribute value is compared numerically to + * the value of the versioning attribute in the supplied document that is + * supposed to update/replace it. + */ + versionAttribute?: string; +}; + +/** + * Options for replacing an existing document in a collection. + */ +export type ReplaceDocumentOptions = { + /** + * If set to `true`, data will be synchronized to disk before returning. + * + * Default: `false` + */ + waitForSync?: boolean; + /** + * If set to `true`, no data will be returned by the server. This option can + * be used to reduce network traffic. + * + * Default: `false` + */ + silent?: boolean; + /** + * If set to `true`, the complete new document will be returned as the `new` + * property on the result object. Has no effect if `silent` is set to `true`. + * + * Default: `false` + */ + returnNew?: boolean; + /** + * If set to `false`, the existing document will only be modified if its + * `_rev` property matches the same property on the new data. + * + * Default: `true` + */ + ignoreRevs?: boolean; + /** + * If set to `true`, the complete old document will be returned as the `old` + * property on the result object. Has no effect if `silent` is set to `true`. + * + * Default: `false` + */ + returnOld?: boolean; + /** + * If set to a document revision, the document will only be replaced if its + * `_rev` matches the given revision. + */ + ifMatch?: string; + /** + * If set to `true`, existing entries in in-memory index caches will be + * updated if document replacements affect the edge index or cache-enabled + * persistent indexes. + * + * Default: `false` + */ + refillIndexCaches?: boolean; + /** + * If set, the attribute with the name specified by the option is looked up + * in the stored document and the attribute value is compared numerically to + * the value of the versioning attribute in the supplied document that is + * supposed to update/replace it. + */ + versionAttribute?: string; +}; + +/** + * Options for updating a document in a collection. + */ +export type UpdateDocumentOptions = { + /** + * If set to `true`, data will be synchronized to disk before returning. + * + * Default: `false` + */ + waitForSync?: boolean; + /** + * If set to `true`, no data will be returned by the server. This option can + * be used to reduce network traffic. + * + * Default: `false` + */ + silent?: boolean; + /** + * If set to `true`, the complete new document will be returned as the `new` + * property on the result object. Has no effect if `silent` is set to `true`. + * + * Default: `false` + */ + returnNew?: boolean; + /** + * If set to `false`, the existing document will only be modified if its + * `_rev` property matches the same property on the new data. + * + * Default: `true` + */ + ignoreRevs?: boolean; + /** + * If set to `true`, the complete old document will be returned as the `old` + * property on the result object. Has no effect if `silent` is set to `true`. + * + * Default: `false` + */ + returnOld?: boolean; + /** + * If set to `false`, properties with a value of `null` will be removed from + * the new document. + * + * Default: `true` + */ + keepNull?: boolean; + /** + * If set to `false`, object properties that already exist in the old + * document will be overwritten rather than merged. This does not affect + * arrays. + * + * Default: `true` + */ + mergeObjects?: boolean; + /** + * If set to a document revision, the document will only be updated if its + * `_rev` matches the given revision. + */ + ifMatch?: string; + /** + * If set to `true`, existing entries in in-memory index caches will be + * updated if document updates affect the edge index or cache-enabled + * persistent indexes. + * + * Default: `false` + */ + refillIndexCaches?: boolean; + /** + * If set, the attribute with the name specified by the option is looked up + * in the stored document and the attribute value is compared numerically to + * the value of the versioning attribute in the supplied document that is + * supposed to update/replace it. + */ + versionAttribute?: string; +}; + +/** + * Options for removing a document from a collection. + */ +export type RemoveDocumentOptions = { + /** + * If set to `true`, changes will be synchronized to disk before returning. + * + * Default: `false` + */ + waitForSync?: boolean; + /** + * If set to `true`, the complete old document will be returned as the `old` + * property on the result object. Has no effect if `silent` is set to `true`. + * + * Default: `false` + */ + returnOld?: boolean; + /** + * If set to `true`, no data will be returned by the server. This option can + * be used to reduce network traffic. + * + * Default: `false` + */ + silent?: boolean; + /** + * If set to a document revision, the document will only be removed if its + * `_rev` matches the given revision. + */ + ifMatch?: string; + /** + * If set to `true`, existing entries in in-memory index caches will be + * deleted if document removals affect the edge index or cache-enabled + * persistent indexes. + * + * Default: `false` + */ + refillIndexCaches?: boolean; +}; + +/** + * Options for bulk importing documents into a collection. + */ +export type ImportDocumentsOptions = { + /** + * (Edge collections only.) Prefix to prepend to `_from` attribute values. + */ + fromPrefix?: string; + /** + * (Edge collections only.) Prefix to prepend to `_to` attribute values. + */ + toPrefix?: string; + /** + * If set to `true`, the collection is truncated before the data is imported. + * + * Default: `false` + */ + overwrite?: boolean; + /** + * Whether to wait for the documents to have been synced to disk. + */ + waitForSync?: boolean; + /** + * Controls behavior when a unique constraint is violated on the document key. + * + * * `"error"`: the document will not be imported. + * * `"update`: the document will be merged into the existing document. + * * `"replace"`: the document will replace the existing document. + * * `"ignore"`: the document will not be imported and the unique constraint + * error will be ignored. + * + * Default: `"error"` + */ + onDuplicate?: "error" | "update" | "replace" | "ignore"; + /** + * If set to `true`, the import will abort if any error occurs. + */ + complete?: boolean; + /** + * Whether the response should contain additional details about documents + * that could not be imported. + */ + details?: boolean; +}; + +/** + * Options for retrieving a document's edges from a collection. + */ +export type DocumentEdgesOptions = { + /** + * If set to `true`, the request will explicitly permit ArangoDB to return a + * potentially dirty or stale result and arangojs will load balance the + * request without distinguishing between leaders and followers. + */ + allowDirtyRead?: boolean; +}; +//#endregion + +//#region Document operation results +/** + * Represents a bulk operation failure for an individual document. + */ +export type DocumentOperationFailure = { + /** + * Indicates that the operation failed. + */ + error: true; + /** + * Human-readable description of the failure. + */ + errorMessage: string; + /** + * Numeric representation of the failure. + */ + errorNum: number; +}; + +/** + * Metadata returned by a document operation. + */ +export type DocumentOperationMetadata = DocumentMetadata & { + /** + * Revision of the document that was updated or replaced by this operation. + */ + _oldRev?: string; +}; + +/** + * Result of a collection bulk import. + */ +export type ImportDocumentsResult = { + /** + * Whether the import failed. + */ + error: false; + /** + * Number of new documents imported. + */ + created: number; + /** + * Number of documents that failed with an error. + */ + errors: number; + /** + * Number of empty documents. + */ + empty: number; + /** + * Number of documents updated. + */ + updated: number; + /** + * Number of documents that failed with an error that is ignored. + */ + ignored: number; + /** + * Additional details about any errors encountered during the import. + */ + details?: string[]; +}; + +/** + * Result of retrieving edges in a collection. + */ +export type DocumentEdgesResult = any> = { + edges: Edge[]; + stats: { + scannedIndex: number; + filtered: number; + }; +}; +//#endregion + +//#region Document selectors +/** + * A value that can be used to identify a document within a collection in + * arangojs methods, i.e. a partial ArangoDB document or the value of a + * document's `_key` or `_id`. + * + * See {@link DocumentMetadata}. + */ +export type DocumentSelector = + | ObjectWithDocumentId + | ObjectWithDocumentKey + | string; /** * An object with an ArangoDB document `_id` property. * - * See {@link documents.DocumentMetadata}. + * See {@link DocumentMetadata}. */ -export type ObjectWithId = { +export type ObjectWithDocumentId = { [key: string]: any; _id: string; }; @@ -94,29 +560,20 @@ export type ObjectWithId = { /** * An object with an ArangoDB document `_key` property. * - * See {@link documents.DocumentMetadata}. + * See {@link DocumentMetadata}. */ -export type ObjectWithKey = { +export type ObjectWithDocumentKey = { [key: string]: any; _key: string; }; -/** - * A value that can be used to identify a document within a collection in - * arangojs methods, i.e. a partial ArangoDB document or the value of a - * document's `_key` or `_id`. - * - * See {@link documents.DocumentMetadata}. - */ -export type DocumentSelector = ObjectWithId | ObjectWithKey | string; - /** * @internal */ export function _documentHandle( selector: DocumentSelector, collectionName: string, - strict: boolean = true + strict: boolean = true, ): string { if (typeof selector !== "string") { if (selector._id) { @@ -126,17 +583,18 @@ export function _documentHandle( return _documentHandle(selector._key, collectionName); } throw new Error( - "Document handle must be a string or an object with a _key or _id attribute" + "Document handle must be a string or an object with a _key or _id attribute", ); } if (selector.includes("/")) { const [head] = selector.split("/"); if (strict && head !== collectionName) { throw new Error( - `Document ID "${selector}" does not match collection name "${collectionName}"` + `Document ID "${selector}" does not match collection name "${collectionName}"`, ); } return selector; } return `${collectionName}/${selector}`; } +//#endregion diff --git a/src/error.ts b/src/error.ts deleted file mode 100644 index 02e5f5397..000000000 --- a/src/error.ts +++ /dev/null @@ -1,205 +0,0 @@ -/** - * ```ts - * import type { ArangoError, HttpError } from "arangojs/error.js"; - * ``` - * - * The "error" module provides types and interfaces for TypeScript related - * to arangojs error handling. - * - * @packageDocumentation - */ - -import { ArangojsResponse } from "./lib/request.js"; - -const messages: { [key: number]: string } = { - 0: "Network Error", - 304: "Not Modified", - 400: "Bad Request", - 401: "Unauthorized", - 402: "Payment Required", - 403: "Forbidden", - 404: "Not Found", - 405: "Method Not Allowed", - 406: "Not Acceptable", - 407: "Proxy Authentication Required", - 408: "Request Timeout", - 409: "Conflict", - 410: "Gone", - 411: "Length Required", - 412: "Precondition Failed", - 413: "Payload Too Large", - 414: "Request-URI Too Long", - 415: "Unsupported Media Type", - 416: "Requested Range Not Satisfiable", - 417: "Expectation Failed", - 418: "I'm a teapot", - 421: "Misdirected Request", - 422: "Unprocessable Entity", - 423: "Locked", - 424: "Failed Dependency", - 426: "Upgrade Required", - 428: "Precondition Required", - 429: "Too Many Requests", - 431: "Request Header Fields Too Large", - 444: "Connection Closed Without Response", - 451: "Unavailable For Legal Reasons", - 499: "Client Closed Request", - 500: "Internal Server Error", - 501: "Not Implemented", - 502: "Bad Gateway", - 503: "Service Unavailable", - 504: "Gateway Timeout", - 505: "HTTP Version Not Supported", - 506: "Variant Also Negotiates", - 507: "Insufficient Storage", - 508: "Loop Detected", - 510: "Not Extended", - 511: "Network Authentication Required", - 599: "Network Connect Timeout Error", -}; - -const nativeErrorKeys = [ - "fileName", - "lineNumber", - "columnNumber", - "stack", - "description", - "number", -] as (keyof Error)[]; - -/** - * Indicates whether the given value represents an {@link ArangoError}. - * - * @param error - A value that might be an `ArangoError`. - */ -export function isArangoError(error: any): error is ArangoError { - return Boolean(error && error.isArangoError); -} - -/** - * Indicates whether the given value represents an ArangoDB error response. - * - * @internal - */ -export function isArangoErrorResponse(body: any): boolean { - return ( - body && - body.hasOwnProperty("error") && - body.hasOwnProperty("code") && - body.hasOwnProperty("errorMessage") && - body.hasOwnProperty("errorNum") - ); -} - -/** - * Indicates whether the given value represents a Node.js `SystemError`. - */ -export function isSystemError(err: any): err is SystemError { - return ( - Object.getPrototypeOf(err) === Error.prototype && - err.hasOwnProperty("code") && - err.hasOwnProperty("errno") && - err.hasOwnProperty("syscall") - ); -} - -/** - * Interface representing a Node.js `SystemError`. - */ -export interface SystemError extends Error { - code: string; - errno: number | string; - syscall: string; -} - -/** - * Represents an error returned by ArangoDB. - */ -export class ArangoError extends Error { - name = "ArangoError"; - /** - * ArangoDB error code. - * - * See [ArangoDB error documentation](https://www.arangodb.com/docs/stable/appendix-error-codes.html). - */ - errorNum: number; - /** - * HTTP status code included in the server error response object. - */ - code: number; - /** - * Server response object. - */ - response: any; - - /** - * @internal - */ - constructor(response: ArangojsResponse) { - super(); - this.response = response; - this.message = response.parsedBody.errorMessage; - this.errorNum = response.parsedBody.errorNum; - this.code = response.parsedBody.code; - const err = new Error(this.message); - err.name = this.name; - for (const key of nativeErrorKeys) { - if (err[key]) this[key] = err[key] as string; - } - } - - /** - * @internal - * - * Indicates that this object represents an ArangoDB error. - */ - get isArangoError(): true { - return true; - } - - toJSON() { - return { - error: true, - errorMessage: this.message, - errorNum: this.errorNum, - code: this.code, - }; - } -} - -/** - * Represents a plain HTTP error response. - */ -export class HttpError extends Error { - name = "HttpError"; - /** - * Server response object. - */ - response: any; - /** - * HTTP status code of the server response. - */ - code: number; - - /** - * @internal - */ - constructor(response: ArangojsResponse) { - super(); - this.response = response; - this.code = response.status || 500; - this.message = messages[this.code] || messages[500]; - const err = new Error(this.message); - err.name = this.name; - for (const key of nativeErrorKeys) { - if (err[key]) this[key] = err[key] as string; - } - } - - toJSON() { - return { - error: true, - code: this.code, - }; - } -} diff --git a/src/errors.ts b/src/errors.ts new file mode 100644 index 000000000..eae50925c --- /dev/null +++ b/src/errors.ts @@ -0,0 +1,366 @@ +/** + * ```ts + * import type { ArangoError, HttpError } from "arangojs/errors"; + * ``` + * + * The "errors" module provides types and interfaces for TypeScript related + * to arangojs error handling. + * + * @packageDocumentation + */ + +import * as connection from "./connection.js"; +import { ERROR_ARANGO_MAINTENANCE_MODE } from "./lib/codes.js"; + +/** + * Indicates whether the given value represents an {@link ArangoError}. + * + * @param error - A value that might be an `ArangoError`. + */ +export function isArangoError(error: any): error is ArangoError { + return Boolean(error && error.isArangoError); +} + +/** + * Indicates whether the given value represents a {@link NetworkError}. + * + * @param error - A value that might be a `NetworkError`. + */ +export function isNetworkError(error: any): error is NetworkError { + return error instanceof NetworkError; +} + +/** + * @internal + * + * Indicates whether the given value represents a Node.js `SystemError`. + */ +export function isSystemError(err: any): err is SystemError { + if (!err || !(err instanceof Error)) return false; + if (Object.getPrototypeOf(err) !== Error.prototype) return false; + const error = err as SystemError; + if (typeof error.code !== "string") return false; + if (typeof error.syscall !== "string") return false; + return typeof error.errno === "number" || typeof error.errno === "string"; +} + +/** + * @internal + * + * Indicates whether the given value represents a Node.js `UndiciError`. + */ +export function isUndiciError(err: any): err is UndiciError { + if (!err || !(err instanceof Error)) return false; + const error = err as UndiciError; + if (typeof error.code !== "string") return false; + return error.code.startsWith("UND_"); +} + +/** + * @internal + * + * Determines whether the given failed fetch error cause is safe to retry. + */ +function isSafeToRetryFailedFetch(error?: Error): boolean | null { + if (!error || !error.cause) return null; + let cause = error.cause as Error; + if (isArangoError(cause) || isNetworkError(cause)) { + return cause.isSafeToRetry; + } + if ( + isSystemError(cause) && + cause.syscall === "connect" && + cause.code === "ECONNREFUSED" + ) { + return true; + } + if (isUndiciError(cause) && cause.code === "UND_ERR_CONNECT_TIMEOUT") { + return true; + } + return isSafeToRetryFailedFetch(cause); +} + +/** + * Interface representing a Node.js `UndiciError`. + * + * @internal + */ +export interface UndiciError extends Error { + code: `UND_${string}`; +} + +/** + * Interface representing a Node.js `SystemError`. + * + * @internal + */ +export interface SystemError extends Error { + code: string; + errno: number | string; + syscall: string; +} + +/** + * Represents an error from a deliberate timeout encountered while waiting + * for propagation. + */ +export class PropagationTimeoutError extends Error { + name = "PropagationTimeoutError"; + + constructor(message?: string, options: { cause?: Error } = {}) { + super(message ?? "Timed out while waiting for propagation", options); + } +} + +/** + * Represents a network error or an error encountered while performing a network request. + */ +export class NetworkError extends Error { + name = "NetworkError"; + + /** + * Indicates whether the request that caused this error can be safely retried. + */ + isSafeToRetry: boolean | null; + + /** + * Fetch request object. + */ + request: globalThis.Request; + + constructor( + message: string, + request: globalThis.Request, + options: { cause?: Error; isSafeToRetry?: boolean | null } = {}, + ) { + const { isSafeToRetry = null, ...opts } = options; + super(message, opts); + this.request = request; + this.isSafeToRetry = isSafeToRetry; + } + + toJSON() { + return { + error: true, + errorMessage: this.message, + code: 0, + }; + } +} + +/** + * Represents an error from a deliberate timeout encountered while waiting + * for a server response. + */ +export class ResponseTimeoutError extends NetworkError { + name = "ResponseTimeoutError"; + + constructor( + message: string | undefined, + request: globalThis.Request, + options: { cause?: Error; isSafeToRetry?: boolean | null } = {}, + ) { + super( + message ?? "Timed out while waiting for server response", + request, + options, + ); + } +} + +/** + * Represents an error from a request that was aborted. + */ +export class RequestAbortedError extends NetworkError { + name = "RequestAbortedError"; + + constructor( + message: string | undefined, + request: globalThis.Request, + options: { cause?: Error; isSafeToRetry?: boolean | null } = {}, + ) { + super(message ?? "Request aborted", request, options); + } +} + +/** + * Represents an error from a failed fetch request. + * + * The root cause is often extremely difficult to determine. + */ +export class FetchFailedError extends NetworkError { + name = "FetchFailedError"; + + constructor( + message: string | undefined, + request: globalThis.Request, + options: { cause?: Error; isSafeToRetry?: boolean | null } = {}, + ) { + let isSafeToRetry = + options.isSafeToRetry ?? isSafeToRetryFailedFetch(options.cause); + if (options.cause?.cause instanceof Error && options.cause.cause.message) { + message = `Fetch failed: ${options.cause.cause.message}`; + } + super(message ?? "Fetch failed", request, { ...options, isSafeToRetry }); + } +} + +/** + * Represents a plain HTTP error response. + */ +export class HttpError extends NetworkError { + name = "HttpError"; + + /** + * HTTP status code of the server response. + */ + code: number; + + /** + * Server response object. + */ + response: connection.ProcessedResponse; + + /** + * @internal + */ + constructor( + response: connection.ProcessedResponse, + options: { cause?: Error; isSafeToRetry?: boolean | null } = {}, + ) { + super(connection.getStatusMessage(response), response.request, options); + this.response = response; + this.code = response.status; + } + + toJSON() { + return { + error: true, + errorMessage: this.message, + code: this.code, + }; + } + + toString() { + return `${this.name} ${this.code}: ${this.message}`; + } +} + +/** + * Represents an error returned by ArangoDB. + */ +export class ArangoError extends Error { + name = "ArangoError"; + + /** + * Indicates whether the request that caused this error can be safely retried. + * + * @internal + */ + isSafeToRetry: boolean | null = null; + + /** + * @internal + */ + get error(): true { + return true; + } + + /** + * ArangoDB error code. + * + * See [ArangoDB error documentation](https://www.arangodb.com/docs/stable/appendix-error-codes.html). + */ + errorNum: number; + + /** + * Error message accompanying the error code. + */ + get errorMessage(): string { + return this.message; + } + + /** + * HTTP status code included in the server error response object. + */ + code?: number; + + /** + * @internal + * + * Creates a new `ArangoError` from a response object. + */ + static from( + response: connection.ProcessedResponse, + ): ArangoError { + return new ArangoError(response.parsedBody!, { + cause: new HttpError(response), + }); + } + + /** + * Creates a new `ArangoError` from an ArangoDB error response. + */ + constructor( + data: Omit, + options: { cause?: Error; isSafeToRetry?: boolean | null } = {}, + ) { + const { isSafeToRetry, ...opts } = options; + super(data.errorMessage, opts); + this.errorNum = data.errorNum; + this.code = data.code; + if (isSafeToRetry !== undefined) { + this.isSafeToRetry = isSafeToRetry; + } else if (this.errorNum === ERROR_ARANGO_MAINTENANCE_MODE) { + this.isSafeToRetry = true; + } else if (this.cause instanceof NetworkError) { + this.isSafeToRetry = this.cause.isSafeToRetry; + } + } + + /** + * Server response object. + */ + get response(): + | connection.ProcessedResponse + | undefined { + const cause = this.cause; + if (cause instanceof HttpError) { + return cause.response; + } + return undefined; + } + + /** + * Fetch request object. + */ + get request(): globalThis.Request | undefined { + const cause = this.cause; + if (cause instanceof NetworkError) { + return cause.request; + } + return undefined; + } + + /** + * @internal + * + * Indicates that this object represents an ArangoDB error. + */ + get isArangoError(): true { + return true; + } + + toJSON(): connection.ArangoErrorResponse { + return { + error: true, + errorMessage: this.errorMessage, + errorNum: this.errorNum, + code: this.code, + }; + } + + toString() { + return `${this.name} ${this.errorNum}: ${this.message}`; + } +} diff --git a/src/foxx-manifest.ts b/src/foxx-manifest.ts index ad4cd1d4b..76a91afb4 100644 --- a/src/foxx-manifest.ts +++ b/src/foxx-manifest.ts @@ -1,6 +1,6 @@ /** * ```ts - * import type { FoxxManifest } from "arangojs/foxx-manifest.js"; + * import type { FoxxManifest } from "arangojs/foxx-manifest"; * ``` * * The "foxx-manifest" module provides the Foxx manifest type for TypeScript. diff --git a/src/graph.ts b/src/graphs.ts similarity index 72% rename from src/graph.ts rename to src/graphs.ts index e90284d80..f82eb9373 100644 --- a/src/graph.ts +++ b/src/graphs.ts @@ -4,52 +4,26 @@ * Graph, * GraphVertexCollection, * GraphEdgeCollection, - * } from "arangojs/graph.js"; + * } from "arangojs/graphs"; * ``` * - * The "graph" module provides graph related types and interfaces + * The "graphs" module provides graph related types and interfaces * for TypeScript. * * @packageDocumentation */ -import { - ArangoCollection, - collectionToString, - DocumentCollection, - EdgeCollection, -} from "./collection.js"; -import { Database } from "./database.js"; -import { - Document, - DocumentData, - DocumentMetadata, - DocumentSelector, - Edge, - EdgeData, - Patch, - _documentHandle, -} from "./documents.js"; -import { isArangoError } from "./error.js"; +import * as collections from "./collections.js"; +import * as databases from "./databases.js"; +import * as documents from "./documents.js"; +import * as errors from "./errors.js"; import { DOCUMENT_NOT_FOUND, GRAPH_NOT_FOUND } from "./lib/codes.js"; -/** - * Indicates whether the given value represents a {@link graph.Graph}. - * - * @param graph - A value that might be a Graph. - */ -export function isArangoGraph(graph: any): graph is Graph { - return Boolean(graph && graph.isArangoGraph); -} - /** * @internal */ function mungeGharialResponse(body: any, prop: "vertex" | "edge" | "removed") { - const { new: newDoc, old: oldDoc, [prop]: doc, ...meta } = body; - const result = { ...meta, ...doc }; - if (typeof newDoc !== "undefined") result.new = newDoc; - if (typeof oldDoc !== "undefined") result.old = oldDoc; - return result; + const { [prop]: doc, ...meta } = body; + return { ...meta, ...doc }; } /** @@ -57,20 +31,23 @@ function mungeGharialResponse(body: any, prop: "vertex" | "edge" | "removed") { */ function coerceEdgeDefinition(options: EdgeDefinitionOptions): EdgeDefinition { const edgeDefinition = {} as EdgeDefinition; - edgeDefinition.collection = collectionToString(options.collection); + edgeDefinition.collection = collections.collectionToString( + options.collection, + ); edgeDefinition.from = Array.isArray(options.from) - ? options.from.map(collectionToString) - : [collectionToString(options.from)]; + ? options.from.map(collections.collectionToString) + : [collections.collectionToString(options.from)]; edgeDefinition.to = Array.isArray(options.to) - ? options.to.map(collectionToString) - : [collectionToString(options.to)]; + ? options.to.map(collections.collectionToString) + : [collections.collectionToString(options.to)]; return edgeDefinition; } +//#region Graph document operation options /** * Options for retrieving a document from a graph collection. */ -export type GraphCollectionReadOptions = { +export type ReadGraphDocumentOptions = { /** * If set to a document revision, the document will only be returned if its * `_rev` property matches this value. @@ -98,7 +75,7 @@ export type GraphCollectionReadOptions = { /** * Options for inserting a document into a graph collection. */ -export type GraphCollectionInsertOptions = { +export type InsertGraphDocumentOptions = { /** * If set to `true`, data will be synchronized to disk before returning. * @@ -117,7 +94,7 @@ export type GraphCollectionInsertOptions = { /** * Options for replacing a document in a graph collection. */ -export type GraphCollectionReplaceOptions = { +export type ReplaceGraphDocumentOptions = { /** * If set to a document revision, the document will only be modified if its * `_rev` property matches this value. @@ -157,7 +134,7 @@ export type GraphCollectionReplaceOptions = { /** * Options for removing a document from a graph collection. */ -export type GraphCollectionRemoveOptions = { +export type RemoveGraphDocumentOptions = { /** * If set to a document revision, the document will only be removed if its * `_rev` property matches this value. @@ -179,47 +156,39 @@ export type GraphCollectionRemoveOptions = { */ returnOld?: boolean; }; +//#endregion +//#region Edge definition operation options /** - * Definition of a relation in a {@link graph.Graph}. - */ -export type EdgeDefinition = { - /** - * Name of the collection containing the edges. - */ - collection: string; - /** - * Array of names of collections containing the start vertices. - */ - from: string[]; - /** - * Array of names of collections containing the end vertices. - */ - to: string[]; -}; - -/** - * An edge definition used to define a collection of edges in a {@link graph.Graph}. + * An edge definition used to define a collection of edges in a {@link Graph}. */ export type EdgeDefinitionOptions = { /** * Collection containing the edges. */ - collection: string | ArangoCollection; + collection: string | collections.ArangoCollection; /** * Collection or collections containing the start vertices. */ - from: (string | ArangoCollection)[] | string | ArangoCollection; + from: + | (string | collections.ArangoCollection)[] + | string + | collections.ArangoCollection; /** * Collection or collections containing the end vertices. */ - to: (string | ArangoCollection)[] | string | ArangoCollection; + to: + | (string | collections.ArangoCollection)[] + | string + | collections.ArangoCollection; }; +//#endregion +//#region GraphDescription /** * General information about a graph. */ -export type GraphInfo = { +export type GraphDescription = { /** * Key of the document internally representing this graph. * @@ -294,6 +263,26 @@ export type GraphInfo = { isDisjoint?: boolean; }; +/** + * Definition of a relation in a {@link Graph}. + */ +export type EdgeDefinition = { + /** + * Name of the collection containing the edges. + */ + collection: string; + /** + * Array of names of collections containing the start vertices. + */ + from: string[]; + /** + * Array of names of collections containing the end vertices. + */ + to: string[]; +}; +//#endregion + +//#region Graph operation options /** * Option for creating a graph. */ @@ -309,7 +298,10 @@ export type CreateGraphOptions = { * Additional vertex collections. Documents within these collections do not * have edges within this graph. */ - orphanCollections?: (string | ArangoCollection)[] | string | ArangoCollection; + orphanCollections?: + | (string | collections.ArangoCollection)[] + | string + | collections.ArangoCollection; /** * (Cluster only.) Number of shards that is used for every collection @@ -356,25 +348,34 @@ export type CreateGraphOptions = { * (Enterprise Edition cluster only.) Collections to be included in a Hybrid * SmartGraph. */ - satellites?: (string | ArangoCollection)[]; + satellites?: (string | collections.ArangoCollection)[]; }; +/** + * Options for adding a vertex collection to a graph. + */ export type AddVertexCollectionOptions = { /** * (Enterprise Edition cluster only.) Collections to be included in a Hybrid * SmartGraph. */ - satellites?: (string | ArangoCollection)[]; + satellites?: (string | collections.ArangoCollection)[]; }; +/** + * Options for adding an edge definition to a graph. + */ export type AddEdgeDefinitionOptions = { /** * (Enterprise Edition cluster only.) Collections to be included in a Hybrid * SmartGraph. */ - satellites?: (string | ArangoCollection)[]; + satellites?: (string | collections.ArangoCollection)[]; }; +/** + * Options for replacing an edge definition in a graph. + */ export type ReplaceEdgeDefinitionOptions = { /** * (Enterprise Edition cluster only.) Collections to be included in a Hybrid @@ -382,23 +383,34 @@ export type ReplaceEdgeDefinitionOptions = { */ satellites?: string[]; }; +//#endregion +//#region GraphVertexCollection class /** - * Represents a {@link collection.DocumentCollection} of vertices in a {@link graph.Graph}. + * Represents a {@link collections.DocumentCollection} of vertices in a {@link Graph}. * - * @param T - Type to use for document data. Defaults to `any`. + * @param EntryResultType - Type to represent vertex document contents returned + * by the server (including computed properties). + * @param EntryInputType - Type to represent vertex document contents passed + * when inserting or replacing vertex documents (without computed properties). */ -export class GraphVertexCollection = any> - implements ArangoCollection { - protected _db: Database; +export class GraphVertexCollection< + EntryResultType extends Record = any, + EntryInputType extends Record = EntryResultType, +> implements collections.ArangoCollection +{ + protected _db: databases.Database; protected _name: string; protected _graph: Graph; - protected _collection: DocumentCollection; + protected _collection: collections.DocumentCollection< + EntryResultType, + EntryInputType + >; /** * @internal */ - constructor(db: Database, name: string, graph: Graph) { + constructor(db: databases.Database, name: string, graph: Graph) { this._db = db; this._collection = db.collection(name); this._name = this._collection.name; @@ -429,14 +441,14 @@ export class GraphVertexCollection = any> } /** - * A {@link collection.DocumentCollection} instance for this vertex collection. + * A {@link collections.DocumentCollection} instance for this vertex collection. */ get collection() { return this._collection; } /** - * The {@link graph.Graph} instance this vertex collection is bound to. + * The {@link Graph} instance this vertex collection is bound to. */ get graph() { return this._graph; @@ -462,16 +474,16 @@ export class GraphVertexCollection = any> * } * ``` */ - async vertexExists(selector: DocumentSelector): Promise { + async vertexExists(selector: documents.DocumentSelector): Promise { try { return await this._db.request( { method: "HEAD", - path: `/_api/gharial/${encodeURIComponent( - this.graph.name - )}/vertex/${encodeURI(_documentHandle(selector, this._name))}`, + pathname: `/_api/gharial/${encodeURIComponent( + this.graph.name, + )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, }, - () => true + () => true, ); } catch (err: any) { if (err.code === 404) { @@ -516,9 +528,9 @@ export class GraphVertexCollection = any> * ``` */ async vertex( - selector: DocumentSelector, - options?: GraphCollectionReadOptions - ): Promise>; + selector: documents.DocumentSelector, + options?: ReadGraphDocumentOptions, + ): Promise>; /** * Retrieves the vertex matching the given key or id. * @@ -555,13 +567,13 @@ export class GraphVertexCollection = any> * ``` */ async vertex( - selector: DocumentSelector, - graceful: boolean - ): Promise>; + selector: documents.DocumentSelector, + graceful: boolean, + ): Promise>; async vertex( - selector: DocumentSelector, - options: boolean | GraphCollectionReadOptions = {} - ): Promise | null> { + selector: documents.DocumentSelector, + options: boolean | ReadGraphDocumentOptions = {}, + ): Promise | null> { if (typeof options === "boolean") { options = { graceful: options }; } @@ -575,20 +587,20 @@ export class GraphVertexCollection = any> if (rev) headers["if-match"] = rev; const result = this._db.request( { - path: `/_api/gharial/${encodeURIComponent( - this.graph.name - )}/vertex/${encodeURI(_documentHandle(selector, this._name))}`, + pathname: `/_api/gharial/${encodeURIComponent( + this.graph.name, + )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, headers, search, allowDirtyRead, }, - (res) => res.parsedBody.vertex + (res) => res.parsedBody.vertex, ); if (!graceful) return result; try { return await result; } catch (err: any) { - if (isArangoError(err) && err.errorNum === DOCUMENT_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === DOCUMENT_NOT_FOUND) { return null; } throw err; @@ -613,20 +625,25 @@ export class GraphVertexCollection = any> * ``` */ save( - data: DocumentData, - options?: GraphCollectionInsertOptions - ): Promise }>; - save(data: DocumentData, options?: GraphCollectionInsertOptions) { + data: documents.DocumentData, + options?: InsertGraphDocumentOptions, + ): Promise< + documents.DocumentMetadata & { new?: documents.Document } + >; + save( + data: documents.DocumentData, + options?: InsertGraphDocumentOptions, + ) { return this._db.request( { method: "POST", - path: `/_api/gharial/${encodeURIComponent( - this.graph.name + pathname: `/_api/gharial/${encodeURIComponent( + this.graph.name, )}/vertex/${encodeURIComponent(this._name)}`, body: data, search: options, }, - (res) => mungeGharialResponse(res.parsedBody, "vertex") + (res) => mungeGharialResponse(res.parsedBody, "vertex"), ); } @@ -655,14 +672,19 @@ export class GraphVertexCollection = any> * ``` */ replace( - selector: DocumentSelector, - newValue: DocumentData, - options?: GraphCollectionReplaceOptions - ): Promise; old?: Document }>; + selector: documents.DocumentSelector, + newValue: documents.DocumentData, + options?: ReplaceGraphDocumentOptions, + ): Promise< + documents.DocumentMetadata & { + new?: documents.Document; + old?: documents.Document; + } + >; replace( - selector: DocumentSelector, - newValue: DocumentData, - options: GraphCollectionReplaceOptions = {} + selector: documents.DocumentSelector, + newValue: documents.DocumentData, + options: ReplaceGraphDocumentOptions = {}, ) { if (typeof options === "string") { options = { rev: options }; @@ -673,14 +695,14 @@ export class GraphVertexCollection = any> return this._db.request( { method: "PUT", - path: `/_api/gharial/${encodeURIComponent( - this.graph.name - )}/vertex/${encodeURI(_documentHandle(selector, this._name))}`, + pathname: `/_api/gharial/${encodeURIComponent( + this.graph.name, + )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, body: newValue, search, headers, }, - (res) => mungeGharialResponse(res.parsedBody, "vertex") + (res) => mungeGharialResponse(res.parsedBody, "vertex"), ); } @@ -709,14 +731,19 @@ export class GraphVertexCollection = any> * ``` */ update( - selector: DocumentSelector, - newValue: Patch>, - options?: GraphCollectionReplaceOptions - ): Promise; old?: Document }>; + selector: documents.DocumentSelector, + newValue: documents.Patch>, + options?: ReplaceGraphDocumentOptions, + ): Promise< + documents.DocumentMetadata & { + new?: documents.Document; + old?: documents.Document; + } + >; update( - selector: DocumentSelector, - newValue: Patch>, - options: GraphCollectionReplaceOptions = {} + selector: documents.DocumentSelector, + newValue: documents.Patch>, + options: ReplaceGraphDocumentOptions = {}, ) { if (typeof options === "string") { options = { rev: options }; @@ -727,14 +754,14 @@ export class GraphVertexCollection = any> return this._db.request( { method: "PATCH", - path: `/_api/gharial/${encodeURIComponent( - this.graph.name - )}/vertex/${encodeURI(_documentHandle(selector, this._name))}`, + pathname: `/_api/gharial/${encodeURIComponent( + this.graph.name, + )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, body: newValue, search, headers, }, - (res) => mungeGharialResponse(res.parsedBody, "vertex") + (res) => mungeGharialResponse(res.parsedBody, "vertex"), ); } @@ -766,12 +793,14 @@ export class GraphVertexCollection = any> * ``` */ remove( - selector: DocumentSelector, - options?: GraphCollectionRemoveOptions - ): Promise }>; + selector: documents.DocumentSelector, + options?: RemoveGraphDocumentOptions, + ): Promise< + documents.DocumentMetadata & { old?: documents.Document } + >; remove( - selector: DocumentSelector, - options: GraphCollectionRemoveOptions = {} + selector: documents.DocumentSelector, + options: RemoveGraphDocumentOptions = {}, ) { if (typeof options === "string") { options = { rev: options }; @@ -782,33 +811,44 @@ export class GraphVertexCollection = any> return this._db.request( { method: "DELETE", - path: `/_api/gharial/${encodeURIComponent( - this.graph.name - )}/vertex/${encodeURI(_documentHandle(selector, this._name))}`, + pathname: `/_api/gharial/${encodeURIComponent( + this.graph.name, + )}/vertex/${encodeURI(documents._documentHandle(selector, this._name))}`, search, headers, }, - (res) => mungeGharialResponse(res.parsedBody, "removed") + (res) => mungeGharialResponse(res.parsedBody, "removed"), ); } } +//#endregion +//#region GraphEdgeCollection class /** - * Represents a {@link collection.EdgeCollection} of edges in a {@link graph.Graph}. + * Represents a {@link collections.EdgeCollection} of edges in a {@link Graph}. * - * @param T - Type to use for document data. Defaults to `any`. + * @param EntryResultType - Type to represent edge document contents returned + * by the server (including computed properties). + * @param EntryInputType - Type to represent edge document contents passed + * when inserting or replacing edge documents (without computed properties). */ -export class GraphEdgeCollection = any> - implements ArangoCollection { - protected _db: Database; +export class GraphEdgeCollection< + EntryResultType extends Record = any, + EntryInputType extends Record = EntryResultType, +> implements collections.ArangoCollection +{ + protected _db: databases.Database; protected _name: string; protected _graph: Graph; - protected _collection: EdgeCollection; + protected _collection: collections.EdgeCollection< + EntryResultType, + EntryInputType + >; /** * @internal */ - constructor(db: Database, name: string, graph: Graph) { + constructor(db: databases.Database, name: string, graph: Graph) { this._db = db; this._collection = db.collection(name); this._name = this._collection.name; @@ -839,14 +879,14 @@ export class GraphEdgeCollection = any> } /** - * A {@link collection.EdgeCollection} instance for this edge collection. + * A {@link collections.EdgeCollection} instance for this edge collection. */ get collection() { return this._collection; } /** - * The {@link graph.Graph} instance this edge collection is bound to. + * The {@link Graph} instance this edge collection is bound to. */ get graph() { return this._graph; @@ -872,16 +912,16 @@ export class GraphEdgeCollection = any> * } * ``` */ - async edgeExists(selector: DocumentSelector): Promise { + async edgeExists(selector: documents.DocumentSelector): Promise { try { return await this._db.request( { method: "HEAD", - path: `/_api/gharial/${encodeURIComponent( - this.graph.name - )}/edge/${encodeURI(_documentHandle(selector, this._name))}`, + pathname: `/_api/gharial/${encodeURIComponent( + this.graph.name, + )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, }, - () => true + () => true, ); } catch (err: any) { if (err.code === 404) { @@ -926,9 +966,9 @@ export class GraphEdgeCollection = any> * ``` */ async edge( - selector: DocumentSelector, - options?: GraphCollectionReadOptions - ): Promise>; + selector: documents.DocumentSelector, + options?: ReadGraphDocumentOptions, + ): Promise>; /** * Retrieves the edge matching the given key or id. * @@ -964,11 +1004,14 @@ export class GraphEdgeCollection = any> * } * ``` */ - async edge(selector: DocumentSelector, graceful: boolean): Promise>; async edge( - selector: DocumentSelector, - options: boolean | GraphCollectionReadOptions = {} - ): Promise | null> { + selector: documents.DocumentSelector, + graceful: boolean, + ): Promise>; + async edge( + selector: documents.DocumentSelector, + options: boolean | ReadGraphDocumentOptions = {}, + ): Promise | null> { if (typeof options === "boolean") { options = { graceful: options }; } @@ -982,19 +1025,19 @@ export class GraphEdgeCollection = any> if (rev) headers["if-match"] = rev; const result = this._db.request( { - path: `/_api/gharial/${encodeURIComponent( - this.graph.name - )}/edge/${encodeURI(_documentHandle(selector, this._name))}`, + pathname: `/_api/gharial/${encodeURIComponent( + this.graph.name, + )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, search, allowDirtyRead, }, - (res) => res.parsedBody.edge + (res) => res.parsedBody.edge, ); if (!graceful) return result; try { return await result; } catch (err: any) { - if (isArangoError(err) && err.errorNum === DOCUMENT_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === DOCUMENT_NOT_FOUND) { return null; } throw err; @@ -1018,20 +1061,25 @@ export class GraphEdgeCollection = any> * ``` */ save( - data: EdgeData, - options?: GraphCollectionInsertOptions - ): Promise }>; - save(data: EdgeData, options?: GraphCollectionInsertOptions) { + data: documents.EdgeData, + options?: InsertGraphDocumentOptions, + ): Promise< + documents.DocumentMetadata & { new?: documents.Edge } + >; + save( + data: documents.EdgeData, + options?: InsertGraphDocumentOptions, + ) { return this._db.request( { method: "POST", - path: `/_api/gharial/${encodeURIComponent( - this.graph.name + pathname: `/_api/gharial/${encodeURIComponent( + this.graph.name, )}/edge/${encodeURIComponent(this._name)}`, body: data, search: options, }, - (res) => mungeGharialResponse(res.parsedBody, "edge") + (res) => mungeGharialResponse(res.parsedBody, "edge"), ); } @@ -1068,14 +1116,19 @@ export class GraphEdgeCollection = any> * ``` */ replace( - selector: DocumentSelector, - newValue: EdgeData, - options?: GraphCollectionReplaceOptions - ): Promise; old?: Edge }>; + selector: documents.DocumentSelector, + newValue: documents.EdgeData, + options?: ReplaceGraphDocumentOptions, + ): Promise< + documents.DocumentMetadata & { + new?: documents.Edge; + old?: documents.Edge; + } + >; replace( - selector: DocumentSelector, - newValue: EdgeData, - options: GraphCollectionReplaceOptions = {} + selector: documents.DocumentSelector, + newValue: documents.EdgeData, + options: ReplaceGraphDocumentOptions = {}, ) { if (typeof options === "string") { options = { rev: options }; @@ -1086,14 +1139,14 @@ export class GraphEdgeCollection = any> return this._db.request( { method: "PUT", - path: `/_api/gharial/${encodeURIComponent( - this.graph.name - )}/edge/${encodeURI(_documentHandle(selector, this._name))}`, + pathname: `/_api/gharial/${encodeURIComponent( + this.graph.name, + )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, body: newValue, search, headers, }, - (res) => mungeGharialResponse(res.parsedBody, "edge") + (res) => mungeGharialResponse(res.parsedBody, "edge"), ); } @@ -1130,14 +1183,19 @@ export class GraphEdgeCollection = any> * ``` */ update( - selector: DocumentSelector, - newValue: Patch>, - options?: GraphCollectionReplaceOptions - ): Promise; old?: Edge }>; + selector: documents.DocumentSelector, + newValue: documents.Patch>, + options?: ReplaceGraphDocumentOptions, + ): Promise< + documents.DocumentMetadata & { + new?: documents.Edge; + old?: documents.Edge; + } + >; update( - selector: DocumentSelector, - newValue: Patch>, - options: GraphCollectionReplaceOptions = {} + selector: documents.DocumentSelector, + newValue: documents.Patch>, + options: ReplaceGraphDocumentOptions = {}, ) { if (typeof options === "string") { options = { rev: options }; @@ -1148,14 +1206,14 @@ export class GraphEdgeCollection = any> return this._db.request( { method: "PATCH", - path: `/_api/gharial/${encodeURIComponent( - this.graph.name - )}/edge/${encodeURI(_documentHandle(selector, this._name))}`, + pathname: `/_api/gharial/${encodeURIComponent( + this.graph.name, + )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, body: newValue, search, headers, }, - (res) => mungeGharialResponse(res.parsedBody, "edge") + (res) => mungeGharialResponse(res.parsedBody, "edge"), ); } @@ -1179,12 +1237,14 @@ export class GraphEdgeCollection = any> * ``` */ remove( - selector: DocumentSelector, - options?: GraphCollectionRemoveOptions - ): Promise }>; + selector: documents.DocumentSelector, + options?: RemoveGraphDocumentOptions, + ): Promise< + documents.DocumentMetadata & { old?: documents.Edge } + >; remove( - selector: DocumentSelector, - options: GraphCollectionRemoveOptions = {} + selector: documents.DocumentSelector, + options: RemoveGraphDocumentOptions = {}, ) { if (typeof options === "string") { options = { rev: options }; @@ -1195,36 +1255,45 @@ export class GraphEdgeCollection = any> return this._db.request( { method: "DELETE", - path: `/_api/gharial/${encodeURIComponent( - this.graph.name - )}/edge/${encodeURI(_documentHandle(selector, this._name))}`, + pathname: `/_api/gharial/${encodeURIComponent( + this.graph.name, + )}/edge/${encodeURI(documents._documentHandle(selector, this._name))}`, search, headers, }, - (res) => mungeGharialResponse(res.parsedBody, "removed") + (res) => mungeGharialResponse(res.parsedBody, "removed"), ); } } +//#endregion + +//#region Graph class +/** + * Indicates whether the given value represents a {@link Graph}. + * + * @param graph - A value that might be a Graph. + */ +export function isArangoGraph(graph: any): graph is Graph { + return Boolean(graph && graph.isArangoGraph); +} /** - * Represents a graph in a {@link database.Database}. + * Represents a graph in a {@link databases.Database}. */ export class Graph { protected _name: string; - protected _db: Database; + protected _db: databases.Database; /** * @internal */ - constructor(db: Database, name: string) { + constructor(db: databases.Database, name: string) { this._db = db; this._name = name; } /** - * @internal - * * Indicates that this object represents an ArangoDB Graph. */ get isArangoGraph(): true { @@ -1261,7 +1330,7 @@ export class Graph { await this.get(); return true; } catch (err: any) { - if (isArangoError(err) && err.errorNum === GRAPH_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === GRAPH_NOT_FOUND) { return false; } throw err; @@ -1279,10 +1348,10 @@ export class Graph { * // data contains general information about the graph * ``` */ - get(): Promise { + get(): Promise { return this._db.request( - { path: `/_api/gharial/${encodeURIComponent(this._name)}` }, - (res) => res.parsedBody.graph + { pathname: `/_api/gharial/${encodeURIComponent(this._name)}` }, + (res) => res.parsedBody.graph, ); } @@ -1309,28 +1378,31 @@ export class Graph { */ create( edgeDefinitions: EdgeDefinitionOptions[], - options: CreateGraphOptions = {} - ): Promise { + options: CreateGraphOptions = {}, + ): Promise { const { orphanCollections, satellites, waitForSync, isSmart, ...opts } = options; return this._db.request( { method: "POST", - path: "/_api/gharial", + pathname: "/_api/gharial", body: { orphanCollections: orphanCollections && (Array.isArray(orphanCollections) - ? orphanCollections.map(collectionToString) - : [collectionToString(orphanCollections)]), + ? orphanCollections.map(collections.collectionToString) + : [collections.collectionToString(orphanCollections)]), edgeDefinitions: edgeDefinitions.map(coerceEdgeDefinition), isSmart, name: this._name, - options: { ...opts, satellites: satellites?.map(collectionToString) }, + options: { + ...opts, + satellites: satellites?.map(collections.collectionToString), + }, }, search: { waitForSync }, }, - (res) => res.parsedBody.graph + (res) => res.parsedBody.graph, ); } @@ -1352,27 +1424,27 @@ export class Graph { return this._db.request( { method: "DELETE", - path: `/_api/gharial/${encodeURIComponent(this._name)}`, + pathname: `/_api/gharial/${encodeURIComponent(this._name)}`, search: { dropCollections }, }, - (res) => res.parsedBody.removed + (res) => res.parsedBody.removed, ); } /** - * Returns a {@link graph.GraphVertexCollection} instance for the given collection + * Returns a {@link GraphVertexCollection} instance for the given collection * name representing the collection in this graph. * * @param T - Type to use for document data. Defaults to `any`. * @param collection - Name of the vertex collection. */ vertexCollection = any>( - collection: string | ArangoCollection + collection: string | collections.ArangoCollection, ): GraphVertexCollection { return new GraphVertexCollection( this._db, - collectionToString(collection), - this + collections.collectionToString(collection), + this, ); } @@ -1380,7 +1452,7 @@ export class Graph { * Fetches all vertex collections of this graph from the database and returns * an array of their names. * - * See also {@link graph.Graph#vertexCollections}. + * See also {@link Graph#vertexCollections}. * * @example * ```js @@ -1399,16 +1471,16 @@ export class Graph { */ listVertexCollections(): Promise { return this._db.request( - { path: `/_api/gharial/${encodeURIComponent(this._name)}/vertex` }, - (res) => res.parsedBody.collections + { pathname: `/_api/gharial/${encodeURIComponent(this._name)}/vertex` }, + (res) => res.parsedBody.collections, ); } /** * Fetches all vertex collections of this graph from the database and returns - * an array of {@link graph.GraphVertexCollection} instances. + * an array of {@link GraphVertexCollection} instances. * - * See also {@link graph.Graph#listVertexCollections}. + * See also {@link Graph#listVertexCollections}. * * @example * ```js @@ -1451,20 +1523,23 @@ export class Graph { * ``` */ addVertexCollection( - collection: string | ArangoCollection, - options: AddVertexCollectionOptions = {} - ): Promise { + collection: string | collections.ArangoCollection, + options: AddVertexCollectionOptions = {}, + ): Promise { const { satellites, ...opts } = options; return this._db.request( { method: "POST", - path: `/_api/gharial/${encodeURIComponent(this._name)}/vertex`, + pathname: `/_api/gharial/${encodeURIComponent(this._name)}/vertex`, body: { - collection: collectionToString(collection), - options: { ...opts, satellites: satellites?.map(collectionToString) }, + collection: collections.collectionToString(collection), + options: { + ...opts, + satellites: satellites?.map(collections.collectionToString), + }, }, }, - (res) => res.parsedBody.graph + (res) => res.parsedBody.graph, ); } @@ -1491,25 +1566,25 @@ export class Graph { * ``` */ removeVertexCollection( - collection: string | ArangoCollection, - dropCollection: boolean = false - ): Promise { + collection: string | collections.ArangoCollection, + dropCollection: boolean = false, + ): Promise { return this._db.request( { method: "DELETE", - path: `/_api/gharial/${encodeURIComponent( - this._name - )}/vertex/${encodeURIComponent(collectionToString(collection))}`, + pathname: `/_api/gharial/${encodeURIComponent( + this._name, + )}/vertex/${encodeURIComponent(collections.collectionToString(collection))}`, search: { dropCollection, }, }, - (res) => res.parsedBody.graph + (res) => res.parsedBody.graph, ); } /** - * Returns a {@link graph.GraphEdgeCollection} instance for the given collection + * Returns a {@link GraphEdgeCollection} instance for the given collection * name representing the collection in this graph. * * @param T - Type to use for document data. Defaults to `any`. @@ -1532,12 +1607,12 @@ export class Graph { * ``` */ edgeCollection = any>( - collection: string | ArangoCollection + collection: string | collections.ArangoCollection, ): GraphEdgeCollection { return new GraphEdgeCollection( this._db, - collectionToString(collection), - this + collections.collectionToString(collection), + this, ); } @@ -1545,7 +1620,7 @@ export class Graph { * Fetches all edge collections of this graph from the database and returns * an array of their names. * - * See also {@link graph.Graph#edgeCollections}. + * See also {@link Graph#edgeCollections}. * * @example * ```js @@ -1564,16 +1639,16 @@ export class Graph { */ listEdgeCollections(): Promise { return this._db.request( - { path: `/_api/gharial/${encodeURIComponent(this._name)}/edge` }, - (res) => res.parsedBody.collections + { pathname: `/_api/gharial/${encodeURIComponent(this._name)}/edge` }, + (res) => res.parsedBody.collections, ); } /** * Fetches all edge collections of this graph from the database and returns - * an array of {@link graph.GraphEdgeCollection} instances. + * an array of {@link GraphEdgeCollection} instances. * - * See also {@link graph.Graph#listEdgeCollections}. + * See also {@link Graph#listEdgeCollections}. * * @example * ```js @@ -1617,19 +1692,22 @@ export class Graph { */ addEdgeDefinition( edgeDefinition: EdgeDefinitionOptions, - options: AddEdgeDefinitionOptions = {} - ): Promise { + options: AddEdgeDefinitionOptions = {}, + ): Promise { const { satellites, ...opts } = options; return this._db.request( { method: "POST", - path: `/_api/gharial/${encodeURIComponent(this._name)}/edge`, + pathname: `/_api/gharial/${encodeURIComponent(this._name)}/edge`, body: { ...coerceEdgeDefinition(edgeDefinition), - options: { ...opts, satellites: satellites?.map(collectionToString) }, + options: { + ...opts, + satellites: satellites?.map(collections.collectionToString), + }, }, }, - (res) => res.parsedBody.graph + (res) => res.parsedBody.graph, ); } @@ -1660,8 +1738,8 @@ export class Graph { */ replaceEdgeDefinition( edgeDefinition: EdgeDefinitionOptions, - options?: ReplaceEdgeDefinitionOptions - ): Promise; + options?: ReplaceEdgeDefinitionOptions, + ): Promise; /** * Replaces an edge definition in this graph. The existing edge definition * for the given edge collection will be overwritten. @@ -1689,23 +1767,23 @@ export class Graph { * ``` */ replaceEdgeDefinition( - collection: string | ArangoCollection, + collection: string | collections.ArangoCollection, edgeDefinition: EdgeDefinitionOptions, - options?: ReplaceEdgeDefinitionOptions - ): Promise; + options?: ReplaceEdgeDefinitionOptions, + ): Promise; replaceEdgeDefinition( collectionOrEdgeDefinitionOptions: | string - | ArangoCollection + | collections.ArangoCollection | EdgeDefinitionOptions, edgeDefinitionOrOptions?: | EdgeDefinitionOptions | ReplaceEdgeDefinitionOptions, - options: ReplaceEdgeDefinitionOptions = {} + options: ReplaceEdgeDefinitionOptions = {}, ) { let collection = collectionOrEdgeDefinitionOptions as | string - | ArangoCollection; + | collections.ArangoCollection; let edgeDefinition = edgeDefinitionOrOptions as EdgeDefinitionOptions; if ( edgeDefinitionOrOptions && @@ -1723,15 +1801,18 @@ export class Graph { return this._db.request( { method: "PUT", - path: `/_api/gharial/${encodeURIComponent( - this._name - )}/edge/${encodeURIComponent(collectionToString(collection))}`, + pathname: `/_api/gharial/${encodeURIComponent( + this._name, + )}/edge/${encodeURIComponent(collections.collectionToString(collection))}`, body: { ...coerceEdgeDefinition(edgeDefinition), - options: { ...opts, satellites: satellites?.map(collectionToString) }, + options: { + ...opts, + satellites: satellites?.map(collections.collectionToString), + }, }, }, - (res) => res.parsedBody.graph + (res) => res.parsedBody.graph, ); } @@ -1758,20 +1839,21 @@ export class Graph { * ``` */ removeEdgeDefinition( - collection: string | ArangoCollection, - dropCollection: boolean = false - ): Promise { + collection: string | collections.ArangoCollection, + dropCollection: boolean = false, + ): Promise { return this._db.request( { method: "DELETE", - path: `/_api/gharial/${encodeURIComponent( - this._name - )}/edge/${encodeURIComponent(collectionToString(collection))}`, + pathname: `/_api/gharial/${encodeURIComponent( + this._name, + )}/edge/${encodeURIComponent(collections.collectionToString(collection))}`, search: { dropCollection, }, }, - (res) => res.parsedBody.graph + (res) => res.parsedBody.graph, ); } } +//#endregion diff --git a/src/hot-backups.ts b/src/hot-backups.ts new file mode 100644 index 000000000..5b587b170 --- /dev/null +++ b/src/hot-backups.ts @@ -0,0 +1,73 @@ +/** + * ```ts + * import type { HotBackupOptions } from "arangojs/hot-backups"; + * ``` + * + * The "hot-backups" module provides types for managing hot backups. + * + * @packageDocumentation + */ + +/** + * (Enterprise Edition only.) Options for creating a hot backup. + */ +export type HotBackupOptions = { + /** + * If set to `true` and no global transaction lock can be acquired within the + * given timeout, a possibly inconsistent backup is taken. + * + * Default: `false` + */ + allowInconsistent?: boolean; + /** + * (Enterprise Edition cluster only.) If set to `true` and no global + * transaction lock can be acquired within the given timeout, all running + * transactions are forcefully aborted to ensure that a consistent backup + * can be created. + * + * Default: `false`. + */ + force?: boolean; + /** + * Label to appended to the backup's identifier. + * + * Default: If omitted or empty, a UUID will be generated. + */ + label?: string; + /** + * Time in seconds that the operation will attempt to get a consistent + * snapshot. + * + * Default: `120`. + */ + timeout?: number; +}; + +/** + * (Enterprise Edition only.) Result of a hot backup. + */ +export type HotBackupResult = { + id: string; + potentiallyInconsistent: boolean; + sizeInBytes: number; + datetime: string; + nrDBServers: number; + nrFiles: number; +}; + +/** + * (Enterprise Edition only.) List of known hot backups. + */ +export type HotBackupList = { + server: string; + list: Record< + string, + HotBackupResult & { + version: string; + keys: any[]; + available: boolean; + nrPiecesPresent: number; + countIncludesFilesOnly: boolean; + } + >; +}; diff --git a/src/index.ts b/src/index.ts index 8972b0434..af5e3c1af 100644 --- a/src/index.ts +++ b/src/index.ts @@ -8,12 +8,12 @@ * * If you are just getting started, you probably want to use the * {@link arangojs} function, which is also the default export of this module, - * or the {@link database.Database} class for which it is a wrapper. + * or the {@link databases.Database} class for which it is a wrapper. * * @packageDocumentation */ -import { Config } from "./connection.js"; -import { Database } from "./database.js"; +import * as configuration from "./configuration.js"; +import * as databases from "./databases.js"; if (typeof module !== "undefined" && typeof exports !== "undefined") { module.exports = exports = arangojs; @@ -21,7 +21,7 @@ if (typeof module !== "undefined" && typeof exports !== "undefined") { /** * Creates a new `Database` instance with its own connection pool. * - * This is a wrapper function for the {@link database.Database:constructor}. + * This is a wrapper function for the {@link databases.Database:constructor}. * * @param config - An object with configuration options. * @@ -34,14 +34,16 @@ if (typeof module !== "undefined" && typeof exports !== "undefined") { * }); * ``` */ -export function arangojs(config?: Config): Database; +export function arangojs( + config?: configuration.ConfigOptions, +): databases.Database; /** * Creates a new `Database` instance with its own connection pool. * - * This is a wrapper function for the {@link database.Database:constructor}. + * This is a wrapper function for the {@link databases.Database:constructor}. * * @param url - Base URL of the ArangoDB server or list of server URLs. - * Equivalent to the `url` option in {@link connection.Config}. + * Equivalent to the `url` option in {@link configuration.ConfigOptions}. * * @example * ```js @@ -49,14 +51,20 @@ export function arangojs(config?: Config): Database; * db.useBasicAuth("admin", "hunter2"); * ``` */ -export function arangojs(url: string | string[], name?: string): Database; -export function arangojs(config?: string | string[] | Config, name?: string) { +export function arangojs( + url: string | string[], + name?: string, +): databases.Database; +export function arangojs( + config?: string | string[] | configuration.ConfigOptions, + name?: string, +) { if (typeof config === "string" || Array.isArray(config)) { const url = config; - return new Database(url, name); + return new databases.Database(url, name); } - return new Database(config); + return new databases.Database(config); } export default arangojs; export { aql } from "./aql.js"; -export { Database } from "./database.js"; +export { Database } from "./databases.js"; diff --git a/src/indexes.ts b/src/indexes.ts index 9176cfb86..3143594ee 100644 --- a/src/indexes.ts +++ b/src/indexes.ts @@ -7,7 +7,7 @@ * PersistentIndex, * PrimaryIndex, * TtlIndex, - * } from "arangojs/indexes.js"; + * } from "arangojs/indexes"; * ``` * * The "indexes" module provides index-related types for TypeScript. @@ -15,53 +15,72 @@ * @packageDocumentation */ -import { AnalyzerFeature } from "./analyzer.js"; -import { Compression, Direction, TierConsolidationPolicy } from "./view.js"; +import * as analyzers from "./analyzers.js"; +import * as views from "./views.js"; +//#region Shared types /** - * Options for creating a persistent index. + * Type of an index. + */ +export type IndexType = IndexDescription["type"]; + +/** + * Type of an internal index. */ -export type EnsurePersistentIndexOptions = { +export type InternalIndexType = InternalIndexDescription["type"]; +//#endregion + +//#region Index operation options +/** + * Options for listing indexes. + */ +export type ListIndexesOptions = { /** - * Type of this index. + * If set to `true`, includes additional information about each index. + * + * Default: `false` */ - type: "persistent"; + withStats?: boolean; /** - * An array of attribute paths. + * If set to `true`, includes internal indexes as well as indexes that are + * not yet fully built but are in the building phase. + * + * You should cast the resulting indexes to `HiddenIndex` to ensure internal + * and incomplete indexes are accurately represented. + * + * Default: `false`. */ - fields: string[]; + withHidden?: boolean; +}; + +/** + * Options for creating an index. + */ +export type EnsureIndexOptions = + | EnsurePersistentIndexOptions + | EnsureGeoIndexOptions + | EnsureTtlIndexOptions + | EnsureMdiIndexOptions + | EnsureMdiPrefixedIndexOptions + | EnsureInvertedIndexOptions; + +type EnsureIndexOptionsType< + Type extends IndexType, + Fields extends any[], + Extra extends {} = {}, +> = { /** * A unique name for this index. */ name?: string; /** - * If set to `true`, a unique index will be created. - * - * Default: `false` - */ - unique?: boolean; - /** - * If set to `true`, the index will omit documents that do not contain at - * least one of the attribute paths in `fields` and these documents will be - * ignored for uniqueness checks. - * - * Default: `false` - */ - sparse?: boolean; - /** - * If set to `false`, inserting duplicate index values from the same - * document will lead to a unique constraint error if this is a unique index. - * - * Default: `true` + * Type of this index. */ - deduplicate?: boolean; + type: Type; /** - * If set to `false`, index selectivity estimates will be disabled for this - * index. - * - * Default: `true` + * An array of attribute paths. */ - estimates?: boolean; + fields: Fields; /** * If set to `true`, the index will be created in the background to reduce * the write-lock duration for the collection during index creation. @@ -69,232 +88,317 @@ export type EnsurePersistentIndexOptions = { * Default: `false` */ inBackground?: boolean; - /** - * If set to `true`, an in-memory hash cache will be put in front of the - * persistent index. - * - * Default: `false` - */ - cacheEnabled?: boolean; - /** - * An array of attribute paths that will be stored in the index but can not - * be used for index lookups or sorting but can avoid full document lookups. - */ - storedValues?: string[]; -}; +} & Extra; + +/** + * Options for creating a persistent index. + */ +export type EnsurePersistentIndexOptions = EnsureIndexOptionsType< + "persistent", + string[], + { + /** + * If set to `true`, a unique index will be created. + * + * Default: `false` + */ + unique?: boolean; + /** + * If set to `true`, the index will omit documents that do not contain at + * least one of the attribute paths in `fields` and these documents will be + * ignored for uniqueness checks. + * + * Default: `false` + */ + sparse?: boolean; + /** + * If set to `false`, inserting duplicate index values from the same + * document will lead to a unique constraint error if this is a unique index. + * + * Default: `true` + */ + deduplicate?: boolean; + /** + * If set to `false`, index selectivity estimates will be disabled for this + * index. + * + * Default: `true` + */ + estimates?: boolean; + /** + * If set to `true`, an in-memory hash cache will be put in front of the + * persistent index. + * + * Default: `false` + */ + cacheEnabled?: boolean; + /** + * An array of attribute paths that will be stored in the index but can not + * be used for index lookups or sorting but can avoid full document lookups. + */ + storedValues?: string[]; + } +>; /** * Options for creating a geo index. */ -export type EnsureGeoIndexOptions = - | { - type: "geo"; - /** - * If set to `true`, `fields` must be an array containing a single attribute - * path and the attribute value must be an array with two values, the first - * of which will be interpreted as the longitude and the second of which will - * be interpreted as the latitude of the document. - * - * Default: `false` - */ - geoJson?: false; - /** - * If set to `true`, the index will use pre-3.10 rules for parsing - * GeoJSON polygons. This option is always implicitly `true` when using - * ArangoDB 3.9 or lower. - */ - legacyPolygons?: boolean; - /** - * Attribute paths for the document's latitude and longitude values. - */ - fields: [string, string]; - /** - * A unique name for this index. - */ - name?: string; - /** - * If set to `true`, the index will be created in the background to reduce - * the write-lock duration for the collection during index creation. - * - * Default: `false` - */ - inBackground?: boolean; - } - | { - type: "geo"; - /** - * If set to `true`, `fields` must be an array containing a single attribute - * path and the attribute value must be an array with two values, the first - * of which will be interpreted as the longitude and the second of which will - * be interpreted as the latitude of the document. - * - * Default: `false` - */ - geoJson?: boolean; - /** - * If set to `true`, the index will use pre-3.10 rules for parsing - * GeoJSON polygons. This option is always implicitly `true` when using - * ArangoDB 3.9 or lower. - */ - legacyPolygons?: boolean; - /** - * An array containing the attribute path for an array containing two values, - * the first of which will be interpreted as the latitude, the second as the - * longitude. If `geoJson` is set to `true`, the order is reversed to match - * the GeoJSON format. - */ - fields: [string]; - /** - * A unique name for this index. - */ - name?: string; - /** - * If set to `true`, the index will be created in the background to reduce - * the write-lock duration for the collection during index creation. - * - * Default: `false` - */ - inBackground?: boolean; - }; +export type EnsureGeoIndexOptions = EnsureIndexOptionsType< + "geo", + [string, string] | [string], + { + /** + * If set to `true`, `fields` must be an array containing a single attribute + * path and the attribute value must be an array with two values, the first + * of which will be interpreted as the longitude and the second of which + * will be interpreted as the latitude of the document. + * + * If set to `false`, `fields` can be either an array containing two + * attribute paths, the first of which will be interpreted as the latitude + * and the second as the longitude, or a single attribute path for an array + * containing two values, the first of which will be interpreted as the + * latitude, the second as the longitude. + * + * Default: `false` + */ + geoJson?: boolean; + /** + * If set to `true`, the index will use pre-3.10 rules for parsing + * GeoJSON polygons. This option is always implicitly `true` when using + * ArangoDB 3.9 or lower. + */ + legacyPolygons?: boolean; + } +>; /** * Options for creating a TTL index. */ -export type EnsureTtlIndexOptions = { - /** - * Type of this index. - */ - type: "ttl"; - /** - * An array containing exactly one attribute path. - */ - fields: [string]; - /** - * A unique name for this index. - */ - name?: string; - /** - * Duration in seconds after the attribute value at which the document will - * be considered as expired. - */ - expireAfter: number; - /** - * If set to `true`, the index will be created in the background to reduce - * the write-lock duration for the collection during index creation. - * - * Default: `false` - */ - inBackground?: boolean; -}; +export type EnsureTtlIndexOptions = EnsureIndexOptionsType< + "ttl", + [string], + { + /** + * Duration in seconds after the attribute value at which the document will + * be considered as expired. + */ + expireAfter: number; + } +>; /** * Options for creating a MDI index. */ -export type EnsureMdiIndexOptions = { - /** - * Type of this index. - */ - type: "mdi"; - /** - * An array containing attribute paths for the dimensions. - */ - fields: string[]; - /** - * Data type of the dimension attributes. - */ - fieldValueTypes: "double"; - /** - * A unique name for this index. - */ - name?: string; - /** - * If set to `true`, a unique index will be created. - * - * Default: `false` - */ - unique?: boolean; - /** - * If set to `true`, the index will be created in the background to reduce - * the write-lock duration for the collection during index creation. - * - * Default: `false` - */ - inBackground?: boolean; -}; +export type EnsureMdiIndexOptions = EnsureIndexOptionsType< + "mdi", + string[], + { + /** + * Data type of the dimension attributes. + */ + fieldValueTypes: "double"; + /** + * If set to `true`, a unique index will be created. + * + * Default: `false` + */ + unique?: boolean; + /** + * If set to `true`, the index will omit documents that do not contain at + * least one of the attribute paths in `fields` and these documents will be + * ignored for uniqueness checks. + * + * Default: `false` + */ + sparse?: boolean; + /** + * If set to `false`, index selectivity estimates will be disabled for this + * index. + * + * Default: `true` + */ + estimates?: boolean; + /** + * An array of attribute paths that will be stored in the index but can not + * be used for index lookups or sorting but can avoid full document lookups. + */ + storedValues?: string[]; + } +>; /** * Options for creating a prefixed MDI index. */ -export type EnsureMdiPrefixedIndexOptions = { - /** - * Type of this index. - */ - type: "mdi-prefixed"; - /** - * An array containing attribute paths for the dimensions. - */ - fields: string[]; - /** - * Data type of the dimension attributes. - */ - fieldValueTypes: "double"; - /** - * An array of attribute names used as a search prefix. - */ - prefixFields: string[]; - /** - * A unique name for this index. - */ - name?: string; - /** - * If set to `true`, a unique index will be created. - * - * Default: `false` - */ - unique?: boolean; - /** - * If set to `true`, the index will be created in the background to reduce - * the write-lock duration for the collection during index creation. - * - * Default: `false` - */ - inBackground?: boolean; -}; +export type EnsureMdiPrefixedIndexOptions = EnsureIndexOptionsType< + "mdi-prefixed", + string[], + { + /** + * An array of attribute names used as a search prefix. + */ + prefixFields: string[]; + /** + * Data type of the dimension attributes. + */ + fieldValueTypes: "double"; + /** + * If set to `true`, a unique index will be created. + * + * Default: `false` + */ + unique?: boolean; + /** + * If set to `true`, the index will omit documents that do not contain at + * least one of the attribute paths in `fields` and these documents will be + * ignored for uniqueness checks. + * + * Default: `false` + */ + sparse?: boolean; + /** + * If set to `false`, index selectivity estimates will be disabled for this + * index. + * + * Default: `true` + */ + estimates?: boolean; + /** + * An array of attribute paths that will be stored in the index but can not + * be used for index lookups or sorting but can avoid full document lookups. + */ + storedValues?: string[]; + } +>; /** - * (Enterprise Edition only.) Options for a nested field in an inverted index. + * Options for creating an inverted index. */ -export type InvertedIndexNestedFieldOptions = { - /** - * An attribute path. - */ - name: string; - /** - * Name of the Analyzer to apply to the values of this field. - * - * Defaults to the `analyzer` specified on the parent options or on the index - * itself. - */ - analyzer?: string; - /** - * List of Analyzer features to enable for this field's Analyzer. - * - * Defaults to the features of the Analyzer. - */ - features?: AnalyzerFeature[]; - /** - * If set to `true` array values will be indexed using the same behavior as - * ArangoSearch Views. This option only applies when using the index in a - * SearchAlias View. - * - * Defaults to the value of `searchField` specified on the index itself. - */ - searchField?: boolean; - /** - * Sub-objects to index to allow querying for co-occurring values. - */ - nested?: (string | InvertedIndexNestedFieldOptions)[]; -}; +export type EnsureInvertedIndexOptions = EnsureIndexOptionsType< + "inverted", + (string | InvertedIndexFieldOptions)[], + { + /** + * If set to `true` array values will by default be indexed using the same + * behavior as ArangoSearch Views. This option only applies when using the + * index in a SearchAlias View. + * + * Default: `false` + */ + searchField?: boolean; + /** + * An array of attribute paths that will be stored in the index but can not + * be used for index lookups or sorting but can avoid full document lookups. + */ + storedValues?: InvertedIndexStoredValueOptions[]; + /** + * Primary sort order to optimize AQL queries using a matching sort order. + */ + primarySort?: InvertedIndexPrimarySortOptions; + /** + * (Enterprise Edition only.) If set to `true`, then the primary key column + * will always be cached in memory. + * + * Default: `false` + */ + primaryKeyCache?: boolean; + /** + * Name of the default Analyzer to apply to the values of indexed fields. + * + * Default: `"identity"` + */ + analyzer?: string; + /** + * List of Analyzer features to enable for the default Analyzer. + * + * Defaults to the Analyzer's features. + */ + features?: analyzers.AnalyzerFeature[]; + /** + * If set to `true`, all document attributes are indexed, excluding any + * sub-attributes configured in the `fields` array. The `analyzer` and + * `features` properties apply to the sub-attributes. This option only + * applies when using the index in a SearchAlias View. + * + * Default: `false` + */ + includeAllFields?: boolean; + /** + * If set to `true`, the position of values in array values are tracked and + * need to be specified in queries. Otherwise all values in an array are + * treated as equivalent. This option only applies when using the index in a + * SearchAlias View. + * + * Default: `false` + */ + trackListPositions?: boolean; + /** + * The number of threads to use for indexing the fields. + * + * Default: `2` + */ + parallelism?: number; + /** + * Wait at least this many commits between removing unused files in the + * ArangoSearch data directory. + * + * Default: `2` + */ + cleanupIntervalStep?: number; + /** + * Wait at least this many milliseconds between committing View data store + * changes and making documents visible to queries. + * + * Default: `1000` + */ + commitIntervalMsec?: number; + /** + * Wait at least this many milliseconds between applying + * `consolidationPolicy` to consolidate View data store and possibly release + * space on the filesystem. + * + * Default: `1000` + */ + consolidationIntervalMsec?: number; + /** + * The consolidation policy to apply for selecting which segments should be + * merged. + * + * Default: `{ type: "tier" }` + */ + consolidationPolicy?: views.TierConsolidationPolicy; + /** + * Maximum number of writers (segments) cached in the pool. + * + * Default: `64` + */ + writeBufferIdle?: number; + /** + * Maximum number of concurrent active writers (segments) that perform a + * transaction. + * + * Default: `0` (disabled) + */ + writeBufferActive?: number; + /** + * Maximum memory byte size per writer (segment) before a writer (segment) + * flush is triggered. + * + * Default: `33554432` (32 MiB) + */ + writeBufferSizeMax?: number; + /** + * (Enterprise Edition only.) If set to `true`, then field normalization + * values will always be cached in memory. + * + * Default: `false` + */ + cache?: boolean; + /** + * An array of strings defining sort expressions to optimize. + */ + optimizeTopK?: string[]; + } +>; /** * Options for an attribute path in an inverted index. @@ -315,7 +419,7 @@ export type InvertedIndexFieldOptions = { * * Defaults to the features of the Analyzer. */ - features?: AnalyzerFeature[]; + features?: analyzers.AnalyzerFeature[]; /** * If set to `true`, all document attributes are indexed, excluding any * sub-attributes configured in the `fields` array. The `analyzer` and @@ -358,22 +462,22 @@ export type InvertedIndexFieldOptions = { }; /** - * Options for defining a stored value on an inverted index. + * Options for defining a primary sort field on an inverted index. */ -export type InvertedIndexStoredValueOptions = { +export type InvertedIndexPrimarySortOptions = { /** - * The attribute paths to store. + * An array of fields to sort the index by. */ - fields: string[]; + fields: InvertedIndexPrimarySortFieldOptions[]; /** - * How the attribute values should be compressed. + * How the primary sort data should be compressed. * * Default: `"lz4"` */ - compression?: Compression; + compression?: views.Compression; /** - * (Enterprise Edition only.) If set to `true`, then stored values will - * always be cached in memory. + * (Enterprise Edition only.) If set to `true`, then primary sort columns + * will always be cached in memory. * * Default: `false` */ @@ -391,187 +495,97 @@ export type InvertedIndexPrimarySortFieldOptions = { /** * The sorting direction. */ - direction: Direction; + direction: views.Direction; }; /** - * Options for creating an inverted index. + * (Enterprise Edition only.) Options for a nested field in an inverted index. */ -export type EnsureInvertedIndexOptions = { - /** - * Type of this index. - */ - type: "inverted"; - /** - * An array of attribute paths or objects specifying options for the fields. - */ - fields: (string | InvertedIndexFieldOptions)[]; - /** - * A unique name for this index. - */ - name?: string; - /** - * If set to `true` array values will by default be indexed using the same - * behavior as ArangoSearch Views. This option only applies when using the - * index in a SearchAlias View. - * - * Default: `false` - */ - searchField?: boolean; - /** - * An array of attribute paths that will be stored in the index but can not - * be used for index lookups or sorting but can avoid full document lookups. - */ - storedValues?: InvertedIndexStoredValueOptions[]; - /** - * Primary sort order to optimize AQL queries using a matching sort order. - */ - primarySort?: { - /** - * An array of fields to sort the index by. - */ - fields: InvertedIndexPrimarySortFieldOptions[]; - /** - * How the primary sort data should be compressed. - * - * Default: `"lz4"` - */ - compression?: Compression; - /** - * (Enterprise Edition only.) If set to `true`, then primary sort columns - * will always be cached in memory. - * - * Default: `false` - */ - cache?: boolean; - }; +export type InvertedIndexNestedFieldOptions = { /** - * (Enterprise Edition only.) If set to `true`, then the primary key column - * will always be cached in memory. - * - * Default: `false` + * An attribute path. */ - primaryKeyCache?: boolean; + name: string; /** - * Name of the default Analyzer to apply to the values of indexed fields. + * Name of the Analyzer to apply to the values of this field. * - * Default: `"identity"` + * Defaults to the `analyzer` specified on the parent options or on the index + * itself. */ analyzer?: string; /** - * List of Analyzer features to enable for the default Analyzer. - * - * Defaults to the Analyzer's features. - */ - features?: AnalyzerFeature[]; - /** - * If set to `true`, all document attributes are indexed, excluding any - * sub-attributes configured in the `fields` array. The `analyzer` and - * `features` properties apply to the sub-attributes. This option only - * applies when using the index in a SearchAlias View. + * List of Analyzer features to enable for this field's Analyzer. * - * Default: `false` + * Defaults to the features of the Analyzer. */ - includeAllFields?: boolean; + features?: analyzers.AnalyzerFeature[]; /** - * If set to `true`, the position of values in array values are tracked and - * need to be specified in queries. Otherwise all values in an array are - * treated as equivalent. This option only applies when using the index in a + * If set to `true` array values will be indexed using the same behavior as + * ArangoSearch Views. This option only applies when using the index in a * SearchAlias View. * - * Default: `false` - */ - trackListPositions?: boolean; - /** - * The number of threads to use for indexing the fields. - * - * Default: `2` - */ - parallelism?: number; - /** - * Wait at least this many commits between removing unused files in the - * ArangoSearch data directory. - * - * Default: `2` - */ - cleanupIntervalStep?: number; - /** - * Wait at least this many milliseconds between committing View data store - * changes and making documents visible to queries. - * - * Default: `1000` - */ - commitIntervalMsec?: number; - /** - * Wait at least this many milliseconds between applying - * `consolidationPolicy` to consolidate View data store and possibly release - * space on the filesystem. - * - * Default: `1000` - */ - consolidationIntervalMsec?: number; - /** - * The consolidation policy to apply for selecting which segments should be - * merged. - * - * Default: `{ type: "tier" }` - */ - consolidationPolicy?: TierConsolidationPolicy; - /** - * Maximum number of writers (segments) cached in the pool. - * - * Default: `64` + * Defaults to the value of `searchField` specified on the index itself. */ - writeBufferIdle?: number; + searchField?: boolean; /** - * Maximum number of concurrent active writers (segments) that perform a - * transaction. - * - * Default: `0` (disabled) + * Sub-objects to index to allow querying for co-occurring values. */ - writeBufferActive?: number; + nested?: (string | InvertedIndexNestedFieldOptions)[]; +}; + +/** + * Options for defining a stored value on an inverted index. + */ +export type InvertedIndexStoredValueOptions = { /** - * Maximum memory byte size per writer (segment) before a writer (segment) - * flush is triggered. - * - * Default: `33554432` (32 MiB) + * The attribute paths to store. */ - writeBufferSizeMax?: number; + fields: string[]; /** - * If set to `true`, the index will be created in the background to reduce - * the write-lock duration for the collection during index creation. + * How the attribute values should be compressed. * - * Default: `false` + * Default: `"lz4"` */ - inBackground?: boolean; + compression?: views.Compression; /** - * (Enterprise Edition only.) If set to `true`, then field normalization - * values will always be cached in memory. + * (Enterprise Edition only.) If set to `true`, then stored values will + * always be cached in memory. * * Default: `false` */ cache?: boolean; - /** - * An array of strings defining sort expressions to optimize. - */ - optimizeTopK?: string[]; }; +//#endregion +//#region IndexDescription /** - * Options for creating an index. + * An object representing an index. */ -export type EnsureIndexOptions = - | EnsurePersistentIndexOptions - | EnsureGeoIndexOptions - | EnsureTtlIndexOptions - | EnsureMdiIndexOptions - | EnsureMdiPrefixedIndexOptions - | EnsureInvertedIndexOptions; +export type IndexDescription = + | FulltextIndexDescription + | GeoIndexDescription + | PersistentIndexDescription + | TtlIndexDescription + | MdiIndexDescription + | MdiPrefixedIndexDescription + | InvertedIndexDescription + | SystemIndexDescription; + +/** + * An object representing a system index. + */ +export type SystemIndexDescription = + | PrimaryIndexDescription + | EdgeIndexDescription; /** * Shared attributes of all index types. */ -export type GenericIndex = { +export type IndexDescriptionType< + Type extends string, + Fields extends any[], + Extra extends {} = {}, +> = { /** * A unique name for this index. */ @@ -580,6 +594,14 @@ export type GenericIndex = { * A unique identifier for this index. */ id: string; + /** + * Type of this index. + */ + type: Type; + /** + * An array of attribute paths. + */ + fields: Fields; /** * Whether documents not containing at least one of the attribute paths * are omitted by this index. @@ -593,38 +615,44 @@ export type GenericIndex = { * Additional stats about this index. */ figures?: Record; -}; +} & Extra; /** * An object representing a persistent index. */ -export type PersistentIndex = GenericIndex & { - type: "persistent"; - fields: string[]; - cacheEnabled: boolean; - deduplicate: boolean; - estimates: boolean; - selectivityEstimate: number; - storedValues?: string[]; -}; +export type PersistentIndexDescription = IndexDescriptionType< + "persistent", + string[], + { + cacheEnabled: boolean; + deduplicate: boolean; + estimates: boolean; + selectivityEstimate: number; + storedValues?: string[]; + } +>; /** * An object representing a primary index. */ -export type PrimaryIndex = GenericIndex & { - type: "primary"; - fields: string[]; - selectivityEstimate: number; -}; +export type PrimaryIndexDescription = IndexDescriptionType< + "primary", + ["_key"], + { + selectivityEstimate: number; + } +>; /** * An object representing an edge index. */ -export type EdgeIndex = GenericIndex & { - type: "edge"; - fields: ["_from", "_to"]; - selectivityEstimate: number; -}; +export type EdgeIndexDescription = IndexDescriptionType< + "edge", + ["_from", "_to"], + { + selectivityEstimate: number; + } +>; /** * An object representing a fulltext index. @@ -632,121 +660,165 @@ export type EdgeIndex = GenericIndex & { * @deprecated The `fulltext` index type was deprecated in ArangoDB 3.10. Use * {@link views.ArangoSearchView} instead. */ -export type FulltextIndex = GenericIndex & { - type: "fulltext"; - fields: [string]; - minLength: number; -}; +export type FulltextIndexDescription = IndexDescriptionType< + "fulltext", + [string], + { + minLength: number; + } +>; + +/** + * An object representing an edge index. + */ +export type EdgeIndex = IndexDescriptionType< + "edge", + ["_from", "_to"], + { + selectivityEstimate: number; + } +>; + +/** + * An object representing a fulltext index. + * + * @deprecated The `fulltext` index type was deprecated in ArangoDB 3.10. Use + * {@link views.ArangoSearchView} instead. + */ +export type FulltextIndex = IndexDescriptionType< + "fulltext", + [string], + { + minLength: number; + } +>; /** * An object representing a geo index. */ -export type GeoIndex = GenericIndex & { - type: "geo"; - fields: [string] | [string, string]; - geoJson: boolean; - legacyPolygons: boolean; - bestIndexedLevel: number; - worstIndexedLevel: number; - maxNumCoverCells: number; -}; +export type GeoIndexDescription = IndexDescriptionType< + "geo", + [string] | [string, string], + { + geoJson: boolean; + legacyPolygons: boolean; + bestIndexedLevel: number; + worstIndexedLevel: number; + maxNumCoverCells: number; + } +>; /** * An object representing a TTL index. */ -export type TtlIndex = GenericIndex & { - type: "ttl"; - fields: [string]; - expireAfter: number; - estimates: boolean; - selectivityEstimate: number; -}; +export type TtlIndexDescription = IndexDescriptionType< + "ttl", + [string], + { + expireAfter: number; + estimates: boolean; + selectivityEstimate: number; + } +>; /** * An object representing a MDI index. */ -export type MdiIndex = GenericIndex & { - type: "mdi"; - fields: string[]; - fieldValueTypes: "double"; - estimates: boolean; - selectivityEstimate: number; -}; +export type MdiIndexDescription = IndexDescriptionType< + "mdi", + string[], + { + fieldValueTypes: "double"; + estimates: boolean; + selectivityEstimate: number; + storedValues?: string[]; + } +>; /** * An object representing a prefixed MDI index. */ -export type MdiPrefixedIndex = GenericIndex & { - type: "mdi-prefixed"; - fields: string[]; - prefixFields: string[]; - fieldValueTypes: "double"; - estimates: boolean; - selectivityEstimate: number; -}; +export type MdiPrefixedIndexDescription = IndexDescriptionType< + "mdi-prefixed", + string[], + { + fieldValueTypes: "double"; + estimates: boolean; + selectivityEstimate: number; + storedValues?: string[]; + prefixFields: string[]; + } +>; /** - * (Enterprise Edition only.) An object representing a nested field in an - * inverted index. + * An object representing an inverted index. */ -export type InvertedIndexNestedField = { +export type InvertedIndexDescription = IndexDescriptionType< + "inverted", + InvertedIndexField[], + { + searchField: boolean; + cache?: boolean; + storedValues: { + fields: string[]; + compression: views.Compression; + cache?: boolean; + }[]; + primarySort: { + fields: { + field: string; + direction: views.Direction; + }[]; + compression: views.Compression; + cache?: boolean; + }; + primaryKeyCache?: boolean; + analyzer: string; + features: analyzers.AnalyzerFeature[]; + includeAllFields: boolean; + trackListPositions: boolean; + parallelism: number; + cleanupIntervalStep: number; + commitIntervalMsec: number; + consolidationIntervalMsec: number; + consolidationPolicy: Required; + writeBufferIdle: number; + writeBufferActive: number; + writeBufferSizeMax: number; + optimizeTopK: string[]; + } +>; + +/** + * An object representing a field in an inverted index. + */ +export type InvertedIndexField = { name: string; analyzer?: string; - features?: AnalyzerFeature[]; + features?: analyzers.AnalyzerFeature[]; + includeAllFields?: boolean; searchField?: boolean; + trackListPositions?: boolean; nested?: InvertedIndexNestedField[]; + cache?: boolean; }; /** - * An object representing an inverted index. + * (Enterprise Edition only.) An object representing a nested field in an + * inverted index. */ -export type InvertedIndex = GenericIndex & { - type: "inverted"; - fields: { - name: string; - analyzer?: string; - features?: AnalyzerFeature[]; - includeAllFields?: boolean; - searchField?: boolean; - trackListPositions?: boolean; - nested?: InvertedIndexNestedField[]; - cache?: boolean; - }[]; - searchField: boolean; - cache?: boolean; - storedValues: { - fields: string[]; - compression: Compression; - cache?: boolean; - }[]; - primarySort: { - fields: { - field: string; - direction: Direction; - }[]; - compression: Compression; - cache?: boolean; - }; - primaryKeyCache?: boolean; - analyzer: string; - features: AnalyzerFeature[]; - includeAllFields: boolean; - trackListPositions: boolean; - parallelism: number; - cleanupIntervalStep: number; - commitIntervalMsec: number; - consolidationIntervalMsec: number; - consolidationPolicy: Required; - writeBufferIdle: number; - writeBufferActive: number; - writeBufferSizeMax: number; - optimizeTopK: string[]; +export type InvertedIndexNestedField = { + name: string; + analyzer?: string; + features?: analyzers.AnalyzerFeature[]; + searchField?: boolean; + nested?: InvertedIndexNestedField[]; }; /** * An object representing an arangosearch index. */ -export type InternalArangosearchIndex = { +export type ArangosearchIndexDescription = { id: string; type: "arangosearch"; view: string; @@ -758,24 +830,10 @@ export type InternalArangosearchIndex = { storeValues: "none" | "id"; }; -/** - * An object representing an index. - */ -export type Index = - | GeoIndex - | PersistentIndex - | PrimaryIndex - | EdgeIndex - | FulltextIndex - | TtlIndex - | MdiIndex - | MdiPrefixedIndex - | InvertedIndex; - /** * An object representing an internal index. */ -export type InternalIndex = InternalArangosearchIndex; +export type InternalIndexDescription = ArangosearchIndexDescription; /** * An object representing a potentially hidden index. @@ -792,33 +850,39 @@ export type InternalIndex = InternalArangosearchIndex; * // property * ``` */ -export type HiddenIndex = (Index | InternalArangosearchIndex) & { +export type HiddenIndexDescription = ( + | IndexDescription + | InternalIndexDescription +) & { /** * Progress of this index if it is still being created. */ progress?: number; }; +//#endregion -export type IndexDetails = Index & { - figures?: Record; - progress?: number; -}; +//#region Index selectors +/** + * Index name, id or object with a `name` or `id` property. + */ +export type IndexSelector = ObjectWithIndexId | ObjectWithName | string; -export type ObjectWithId = { +/** + * An object with an `id` property. + */ +export type ObjectWithIndexId = { [key: string]: any; id: string; }; +/** + * An object with a `name` property. + */ export type ObjectWithName = { [key: string]: any; name: string; }; -/** - * Index name, id or object with a `name` or `id` property. - */ -export type IndexSelector = ObjectWithId | ObjectWithName | string; - /** * @internal */ @@ -845,3 +909,4 @@ export function _indexHandle( } return `${collectionName}/${String(selector)}`; } +//#endregion diff --git a/src/job.ts b/src/jobs.ts similarity index 67% rename from src/job.ts rename to src/jobs.ts index 86e53ab67..9bcd782be 100644 --- a/src/job.ts +++ b/src/jobs.ts @@ -1,25 +1,40 @@ -import { Database } from "./database.js"; -import { ArangojsResponse } from "./lib/request.js"; +/** + * ```ts + * import type { Job } from "arangojs/jobs"; + * ``` + * + * The "jobs" module provides job-related types for TypeScript. + * + * @packageDocumentation + */ +import * as connection from "./connection.js"; +import * as databases from "./databases.js"; /** - * Represents an async job in a {@link database.Database}. + * Represents an async job in a {@link databases.Database}. + * + * @param ResultType - The type of the job's result. */ -export class Job { +export class Job { protected _id: string; - protected _db: Database; - protected _transformResponse?: (res: ArangojsResponse) => Promise; - protected _transformError?: (error: any) => Promise; + protected _db: databases.Database; + protected _transformResponse?: ( + res: connection.ProcessedResponse, + ) => Promise; + protected _transformError?: (error: any) => Promise; protected _loaded: boolean = false; - protected _result: T | undefined; + protected _result: ResultType | undefined; /** * @internal */ constructor( - db: Database, + db: databases.Database, id: string, - transformResponse?: (res: ArangojsResponse) => Promise, - transformError?: (error: any) => Promise + transformResponse?: ( + res: connection.ProcessedResponse, + ) => Promise, + transformError?: (error: any) => Promise, ) { this._db = db; this._id = id; @@ -52,7 +67,7 @@ export class Job { /** * The job's result if it has been loaded or `undefined` otherwise. */ - get result(): T | undefined { + get result(): ResultType | undefined { return this._result; } @@ -71,16 +86,16 @@ export class Job { * console.log(job.result); * ``` */ - async load(): Promise { + async load(): Promise { if (!this.isLoaded) { - let res: ArangojsResponse; + let res: connection.ProcessedResponse; try { res = await this._db.request( { method: "PUT", - path: `/_api/job/${this._id}`, + pathname: `/_api/job/${this._id}`, }, - false + false, ); } catch (e) { if (this._transformError) { @@ -108,9 +123,9 @@ export class Job { return this._db.request( { method: "PUT", - path: `/_api/job/${this._id}/cancel`, + pathname: `/_api/job/${this._id}/cancel`, }, - () => undefined + () => undefined, ); } @@ -121,9 +136,9 @@ export class Job { return this._db.request( { method: "DELETE", - path: `/_api/job/${this._id}`, + pathname: `/_api/job/${this._id}`, }, - () => undefined + () => undefined, ); } @@ -146,9 +161,9 @@ export class Job { getCompleted(): Promise { return this._db.request( { - path: `/_api/job/${this._id}`, + pathname: `/_api/job/${this._id}`, }, - (res) => res.status !== 204 + (res) => res.status !== 204, ); } } diff --git a/src/lib/joinPath.ts b/src/lib/joinPath.ts deleted file mode 100644 index e43f268cd..000000000 --- a/src/lib/joinPath.ts +++ /dev/null @@ -1,19 +0,0 @@ -/** - * Helper to merge two path segments. - * - * @packageDocumentation - * @internal - */ - -/** - * @internal - */ -export function joinPath( - basePath: string | undefined, - path: string | undefined -): string | undefined { - if (!basePath) return path; - if (!path) return basePath; - if (!basePath.endsWith("/")) basePath += "/"; - return basePath + path.replace(/^\//g, ""); -} diff --git a/src/lib/mergeHeaders.ts b/src/lib/mergeHeaders.ts deleted file mode 100644 index 9f9f20953..000000000 --- a/src/lib/mergeHeaders.ts +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Utility function for merging headers. - * - * @packageDocumentation - * @internal - */ - -/** - * @internal - */ -export function mergeHeaders( - base: Headers, - extra: Headers | Record | undefined -) { - if (!extra) return base; - return new Headers([ - ...base, - ...(extra instanceof Headers ? extra : Object.entries(extra)), - ]); -} diff --git a/src/lib/normalizeUrl.ts b/src/lib/normalizeUrl.ts deleted file mode 100644 index c70b257b0..000000000 --- a/src/lib/normalizeUrl.ts +++ /dev/null @@ -1,17 +0,0 @@ -/** - * Utility function for normalizing URLs. - * - * @packageDocumentation - * @internal - */ - -/** - * @internal - */ -export function normalizeUrl(url: string): string { - const raw = url.match(/^(tcp|ssl|tls)((?::|\+).+)/); - if (raw) url = (raw[1] === "tcp" ? "http" : "https") + raw[2]; - const unix = url.match(/^(?:(http|https)\+)?unix:\/\/(\/.+)/); - if (unix) url = `${unix[1] || "http"}://unix:${unix[2]}`; - return url; -} diff --git a/src/lib/request.ts b/src/lib/request.ts deleted file mode 100644 index 0b421ac80..000000000 --- a/src/lib/request.ts +++ /dev/null @@ -1,166 +0,0 @@ -/** - * Request handling internals. - * - * @packageDocumentation - * @internal - */ - -import { SystemError } from "../error.js"; - -/** - * @internal - */ -function systemErrorToJSON(this: SystemError) { - return { - error: true, - errno: this.errno, - code: this.code, - syscall: this.syscall, - }; -} - -/** - * @internal - */ -export interface ArangojsResponse extends globalThis.Response { - request: globalThis.Request; - parsedBody?: any; - arangojsHostUrl?: string; -} - -/** - * @internal - */ -export interface ArangojsError extends Error { - request: globalThis.Request; - toJSON: () => Record; -} - -/** - * @internal - */ -export type RequestOptions = { - method: string; - pathname: string; - search?: URLSearchParams; - headers: Headers; - body: any; - expectBinary: boolean; - timeout?: number; -}; - -/** - * @internal - */ -export type RequestConfig = { - credentials: "omit" | "include" | "same-origin"; - keepalive: boolean; - beforeRequest?: (req: globalThis.Request) => void; - afterResponse?: (err: ArangojsError | null, res?: ArangojsResponse) => void; -}; - -/** - * @internal - */ -export type RequestFunction = { - (options: RequestOptions): Promise; - close?: () => void; -}; - -/** - * @internal - */ -export const isBrowser = false; - -/** - * Create a function for performing requests against a given host. - * - * @param baseUrl - Base URL of the host, i.e. protocol, port and domain name. - * @param config - Options to use for creating the agent. - * @param agent - Agent to use for performing requests. - * - * @internal - */ -export function createRequest( - baseUrl: URL, - config: RequestConfig -): RequestFunction { - let abort: AbortController | undefined; - return Object.assign( - async function request({ - method, - search: searchParams, - pathname: requestPath, - headers: requestHeaders, - body, - timeout, - }: RequestOptions) { - const headers = new Headers(requestHeaders); - const url = new URL( - baseUrl.search ? requestPath + baseUrl.search : requestPath, - baseUrl - ); - if (searchParams) { - for (const [key, value] of searchParams) { - url.searchParams.append(key, value); - } - } - if (body instanceof FormData) { - const res = new Response(body); - const blob = await res.blob(); - // Workaround for ArangoDB 3.12.0-rc1 and earlier: - // Omitting the final CRLF results in "bad request body" fatal error - body = new Blob([blob, "\r\n"], { type: blob.type }); - } - if (!headers.has("authorization")) { - headers.set( - "authorization", - `Basic ${btoa( - `${baseUrl.username || "root"}:${baseUrl.password || ""}` - )}` - ); - } - const request = new Request(url, { - method, - headers, - body, - credentials: config.credentials, - keepalive: config.keepalive, - }); - if (config.beforeRequest) { - config.beforeRequest(request); - } - abort = new AbortController(); - let t: ReturnType | undefined; - if (timeout) { - t = setTimeout(() => { - abort?.abort(); - }, timeout); - } - try { - const res = await fetch(request, { signal: abort.signal }); - if (t) clearTimeout(t); - const response = res as ArangojsResponse; - response.request = request; - if (config.afterResponse) { - config.afterResponse(null, response); - } - return response; - } catch (err) { - if (t) clearTimeout(t); - const error = err as ArangojsError; - error.request = request; - error.toJSON = systemErrorToJSON; - if (config.afterResponse) { - config.afterResponse(error); - } - throw error; - } - }, - { - close() { - abort?.abort(); - }, - } - ); -} diff --git a/src/lib/util.ts b/src/lib/util.ts new file mode 100644 index 000000000..3f6fa71a5 --- /dev/null +++ b/src/lib/util.ts @@ -0,0 +1,111 @@ +/** + * Utility functions for arangojs. + * + * @packageDocumentation + * @internal + */ + +const THIRTY_MINUTES = 30 * 60_000; + +/** + * @internal + * + * Helper to merge path segments. + */ +export function joinPath(...pathList: (string | undefined)[]): string { + if (!pathList.length) return ""; + return pathList + .flatMap((path, i) => { + if (!path) return []; + if (i === pathList.length - 1) { + if (i === 0) return [path]; + return [path.replace(/^\/+/, "")]; + } + if (i === 0) return [path.replace(/\/+$/, "")]; + return [path.replace(/^\/+|\/+$/, "")]; + }) + .join("/"); +} + +/** + * @internal + * + * Utility function for merging headers. + */ +export function mergeHeaders( + ...headersList: ( + | Headers + | string[][] + | Record> + | undefined + )[] +) { + if (!headersList.length) return new Headers(); + return new Headers([ + ...headersList.flatMap((headers) => + headers + ? [ + ...(headers instanceof Headers || Array.isArray(headers) + ? headers + : new Headers(headers)), + ] + : [], + ), + ]); +} + +/** + * @internal + * + * Utility function for normalizing URLs. + */ +export function normalizeUrl(url: string): string { + const raw = url.match(/^(tcp|ssl|tls)((?::|\+).+)/); + if (raw) url = (raw[1] === "tcp" ? "http" : "https") + raw[2]; + const unix = url.match(/^(?:(http|https)\+)?unix:\/\/(\/.+)/); + if (unix) url = `${unix[1] || "http"}://unix:${unix[2]}`; + else if (!url.endsWith("/")) url += "/"; + return url; +} + +/** + * @internal + * + * Generate a unique request ID. + */ +export function generateRequestId() { + return `${Date.now() % THIRTY_MINUTES}_${Math.random().toString(36).substring(2, 15)}`; +} + +/** + * @internal + * + * Creates a timer that will call the given callback after the specified + * timeout. + * + * @param timeout - Number of milliseconds after which the callback will be + * called. + * @param callback - Callback to call after the timeout. + * @returns A function that clears the timer. + */ +export function createTimer(timeout: number, callback: () => void) { + const t = setTimeout(callback, timeout); + return () => clearTimeout(t); +} + +/** + * @internal + * + * Generates a stack trace. + */ +export function generateStackTrace() { + let err = new Error(); + if (!err.stack) { + try { + throw err; + } catch (e: any) { + err = e; + } + } + return err; +} diff --git a/src/lib/linkedList.ts b/src/lib/x3-linkedlist.ts similarity index 96% rename from src/lib/linkedList.ts rename to src/lib/x3-linkedlist.ts index 7678bdb0e..66a1cdd7f 100644 --- a/src/lib/linkedList.ts +++ b/src/lib/x3-linkedlist.ts @@ -51,7 +51,7 @@ export class LinkedListItem { /** *Function to run on unlink() call. Usually used by LinkedList to fix first and last pointers and reduce length. */ - protected unlinkCleanup?: (item: LinkedListItem) => void + protected unlinkCleanup?: (item: LinkedListItem) => void, ) {} /** @@ -60,7 +60,7 @@ export class LinkedListItem { */ public insertBehind( /** LinkListItem to be inserted behind this one */ - item: LinkedListItem + item: LinkedListItem, ): void { item.insertBefore(this); @@ -80,7 +80,7 @@ export class LinkedListItem { */ public unlink( /** If true, additionally removes the reference to the item before and behind */ - unchain = false + unchain = false, ): void { if (this.before) this.before.behind = this.behind; @@ -105,7 +105,7 @@ export class LinkedListItem { */ protected insertBefore( /** LinkListItem to be inserted before this one */ - before: LinkedListItem + before: LinkedListItem, ): void { this.before = before; if (!this.unlinkCleanup) { @@ -137,7 +137,7 @@ export class LinkedList { constructor( /** Values to be added initially into list */ - values?: Iterable | LinkedList + values?: Iterable | LinkedList, ) { if (values) { if (values instanceof LinkedList) values = values.values(); @@ -160,7 +160,7 @@ export class LinkedList { */ public clear( /** If `true`, remove link info from every item. Changes complexity to O(n)! */ - unchain = false + unchain = false, ): void { if (unchain) { while (this.first) { @@ -181,7 +181,7 @@ export class LinkedList { /** Runs for every item in the LinkedList */ callback: (value: T, item: LinkedListItem, list: this) => boolean, /** If given, callback function will be bound to thisArg */ - thisArg?: C + thisArg?: C, ): boolean { if (thisArg) { callback = callback.bind(thisArg); @@ -203,7 +203,7 @@ export class LinkedList { /** decides wether given element should be part of new LinkedList */ callback: (value: T, item: LinkedListItem, list: this) => boolean, /** If given, callback function will be bound to thisArg */ - thisArg?: C + thisArg?: C, ): LinkedList { if (thisArg) { callback = callback.bind(thisArg); @@ -226,7 +226,7 @@ export class LinkedList { /** runs for every value in LinkedList. If it returns truthy, current value is returned. */ callback: (value: T, item: LinkedListItem, list: this) => boolean, /** If given, callback function will be bound to thisArg */ - thisArg?: C + thisArg?: C, ): T | undefined { if (thisArg) { callback = callback.bind(thisArg); @@ -249,7 +249,7 @@ export class LinkedList { /** runs for every LinkedListItem in LinkedList. If it returns truthy, current LinkedListItem is returned. */ callback: (value: T, item: LinkedListItem, list: this) => boolean, /** If given, callback function will be bound to thisArg */ - thisArg?: C + thisArg?: C, ): LinkedListItem | undefined { if (thisArg) { callback = callback.bind(thisArg); @@ -272,7 +272,7 @@ export class LinkedList { /** Gets every value in LinkedList once with corresponding LinkedListItem and LinkedList */ callback: (value: T, item: LinkedListItem, list: this) => void, /** If given, callback function will be bound to thisArg */ - thisArg?: C + thisArg?: C, ): void { if (thisArg) { callback = callback.bind(thisArg); @@ -291,7 +291,7 @@ export class LinkedList { /** value to be found in this */ value: T, /** Starting index. Supports negative values for which `this.size - 1 + fromIndex` will be used as starting point. */ - fromIndex = 0 + fromIndex = 0, ): boolean { let current = this.getItemByIndex(fromIndex); while (current) { @@ -311,7 +311,7 @@ export class LinkedList { /** Value to be found */ searchedValue: T, /** Index to start from */ - fromIndex = 0 + fromIndex = 0, ): LinkedListItem | undefined { let current = this.getItemByIndex(fromIndex); while (current) { @@ -331,7 +331,7 @@ export class LinkedList { /** Value to be found */ searchedValue: T, /** Index to start from */ - fromIndex = -1 + fromIndex = -1, ): LinkedListItem | undefined { let current = this.getItemByIndex(fromIndex); while (current) { @@ -351,7 +351,7 @@ export class LinkedList { /** Gets value, LinkedListeItem and LinkedList. The response will be used as value in the new LinkedList */ callback: (value: T, item: LinkedListItem, list: this) => V, /** If given, callback function will be bound to thisArg */ - thisArg?: C + thisArg?: C, ): LinkedList { if (thisArg) { callback = callback.bind(thisArg); @@ -378,8 +378,8 @@ export class LinkedList { accumulator: T, currentValue: T, currentItem: LinkedListItem, - list: this - ) => V + list: this, + ) => V, ): V; public reduce( /** @@ -390,25 +390,25 @@ export class LinkedList { accumulator: V, currentValue: T, currentItem: LinkedListItem, - list: this + list: this, ) => V, /** Value for the first call of callback */ - initialValue: V + initialValue: V, ): V; public reduce( callback: ( accumulator: V | T, currentValue: T, currentItem: LinkedListItem, - list: this + list: this, ) => V, - initialValue?: V | T + initialValue?: V | T, ): V | T { let current = this.first; if (!current) { if (!initialValue) { throw new TypeError( - "Empty accumulator on empty LinkedList is not allowed." + "Empty accumulator on empty LinkedList is not allowed.", ); } return initialValue; @@ -445,8 +445,8 @@ export class LinkedList { accumulator: T, currentValue: T, currentItem: LinkedListItem, - list: this - ) => V + list: this, + ) => V, ): V; public reduceRight( /** @@ -457,25 +457,25 @@ export class LinkedList { accumulator: V, currentValue: T, currentItem: LinkedListItem, - list: this + list: this, ) => V, /** Value for the first call of callback */ - initialValue: V + initialValue: V, ): V; public reduceRight( callback: ( accumulator: V | T, currentValue: T, currentItem: LinkedListItem, - list: this + list: this, ) => V, - initialValue?: V | T + initialValue?: V | T, ): V | T { let current = this.last; if (!current) { if (!initialValue) { throw new TypeError( - "Empty accumulator on empty LinkedList is not allowed." + "Empty accumulator on empty LinkedList is not allowed.", ); } return initialValue; @@ -503,7 +503,7 @@ export class LinkedList { /** called for every element. If response is truthy, this currentvalue will be returned by `.some()`. */ callback: (currentValue: T, item: LinkedListItem, list: this) => boolean, /** If given, callback function will be bound to thisArg */ - thisArg?: C + thisArg?: C, ): boolean { if (thisArg) { callback = callback.bind(thisArg); @@ -522,7 +522,7 @@ export class LinkedList { */ public join( /** separator between items in the resulting string */ - separator?: string + separator?: string, ): string { return [...this.values()].join(separator); } @@ -604,7 +604,7 @@ export class LinkedList { */ public remove( /** value to remove once */ - value: T + value: T, ): boolean { for (const item of this.keys()) { if (item.value === value) { @@ -621,7 +621,7 @@ export class LinkedList { */ public removeAllOccurrences( /** value to remove completely */ - value: T + value: T, ): boolean { let foundSomethingToDelete = false; @@ -704,7 +704,7 @@ export class LinkedList { */ private getItemByIndex( /** Index of item to get from list */ - index: number + index: number, ): LinkedListItem | undefined { if (index === undefined) { throw new Error("index must be a number!"); @@ -738,7 +738,7 @@ export class LinkedList { */ private unlinkCleanup = ( /** Item that has been unlinked */ - item: LinkedListItem + item: LinkedListItem, ): void => { if (this.first === item) { this.first = this.first.behind; diff --git a/src/logs.ts b/src/logs.ts new file mode 100644 index 000000000..f4f654196 --- /dev/null +++ b/src/logs.ts @@ -0,0 +1,104 @@ +/** + * ```ts + * import type { LogLevel } from "arangojs/logs"; + * ``` + * + * The "logs" module provides types for ArangoDB logs. + * + * @packageDocumentation + */ + +//#region Shared types +/** + * Numeric representation of the logging level of a log entry. + */ +export enum LogLevel { + FATAL, + ERROR, + WARNING, + INFO, + DEBUG, +} + +/** + * String representation of the logging level of a log entry. + */ +export type LogLevelLabel = keyof typeof LogLevel; + +/** + * Logging level setting. + */ +export type LogLevelSetting = LogLevelLabel | "DEFAULT"; + +/** + * Log sorting direction, ascending or descending. + */ +export type LogSortDirection = "asc" | "desc"; +//#endregion + +//#region Log operation options +/** + * Options for retrieving log entries. + */ +export type LogEntriesOptions = { + /** + * Maximum log level of the entries to retrieve. + * + * Default: `INFO`. + */ + upto?: LogLevel | LogLevelLabel | Lowercase; + /** + * If set, only log entries with this log level will be returned. + */ + level?: LogLevel | LogLevelLabel | Lowercase; + /** + * If set, only log entries with an `lid` greater than or equal to this value + * will be returned. + */ + start?: number; + /** + * If set, only this many entries will be returned. + */ + size?: number; + /** + * If set, this many log entries will be skipped. + */ + offset?: number; + /** + * If set, only log entries containing the specified text will be returned. + */ + search?: string; + /** + * If set to `"desc"`, log entries will be returned in reverse chronological + * order. + * + * Default: `"asc"`. + */ + sort?: LogSortDirection; +}; +//#endregion + +//#region Log operation results +/** + * An object representing a single log entry. + */ +export type LogMessage = { + id: number; + topic: string; + level: LogLevelLabel; + date: string; + message: string; +}; + +/** + * An object representing a list of log entries. + */ +export type LogEntries = { + totalAmount: number; + lid: number[]; + topic: string[]; + level: LogLevel[]; + timestamp: number[]; + text: string[]; +}; +//#endregion diff --git a/src/queries.ts b/src/queries.ts new file mode 100644 index 000000000..8f9901787 --- /dev/null +++ b/src/queries.ts @@ -0,0 +1,615 @@ +/** + * ```ts + * import type { QueryOptions } from "arangojs/queries"; + * ``` + * + * The "query" module provides query related types for TypeScript. + * + * @packageDocumentation + */ + +//#region Query operation options +/** + * Options for executing a query. + * + * See {@link databases.Database#query}. + */ +export type QueryOptions = { + /** + * If set to `true`, the query will be executed with support for dirty reads + * enabled, permitting ArangoDB to return a potentially dirty or stale result + * and arangojs will load balance the request without distinguishing between + * leaders and followers. + * + * Note that dirty reads are only supported for read-only queries, not data + * modification queries (e.g. using `INSERT`, `UPDATE`, `REPLACE` or + * `REMOVE`) and only when using ArangoDB 3.4 or later. + * + * Default: `false` + */ + allowDirtyRead?: boolean; + /** + * If set to `true`, cursor results will be stored by ArangoDB in such a way + * that batch reads can be retried in the case of a communication error. + * + * Default: `false` + */ + allowRetry?: boolean; + /** + * Maximum time in milliseconds arangojs will wait for a server response. + * Exceeding this value will result in the request being cancelled. + * + * **Note**: Setting a timeout for the client does not guarantee the query + * will be killed by ArangoDB if it is already being executed. See the + * `maxRuntime` option for limiting the execution time within ArangoDB. + */ + timeout?: number; + /** + * If set to a positive number, the query will automatically be retried at + * most this many times if it results in a write-write conflict. + * + * Default: `0` + */ + retryOnConflict?: number; + /** + * Unless set to `false`, the number of result values in the result set will + * be returned in the `count` attribute. This may be disabled by default in + * a future version of ArangoDB if calculating this value has a performance + * impact for some queries. + * + * Default: `true`. + */ + count?: boolean; + /** + * Number of result values to be transferred by the server in each + * network roundtrip (or "batch"). + * + * Must be greater than zero. + */ + batchSize?: number; + /** + * If set to `false`, the AQL query results cache lookup will be skipped for + * this query. + * + * Default: `true` + */ + cache?: boolean; + /** + * Maximum memory size in bytes that the query is allowed to use. + * Exceeding this value will result in the query failing with an error. + * + * If set to `0`, the memory limit is disabled. + * + * Default: `0` + */ + memoryLimit?: number; + /** + * Maximum allowed execution time before the query will be killed in seconds. + * + * If set to `0`, the query will be allowed to run indefinitely. + * + * Default: `0` + */ + maxRuntime?: number; + /** + * Time-to-live for the cursor in seconds. The cursor results may be + * garbage collected by ArangoDB after this much time has passed. + * + * Default: `30` + */ + ttl?: number; + /** + * If set to `true`, the query will throw an exception and abort if it would + otherwise produce a warning. + */ + failOnWarning?: boolean; + /** + * If set to `1` or `true`, additional query profiling information will be + * returned in the `extra.profile` attribute if the query is not served from + * the result cache. + * + * If set to `2`, the query will return execution stats per query plan node + * in the `extra.stats.nodes` attribute. Additionally the query plan is + * returned in `extra.plan`. + */ + profile?: boolean | number; + /** + * If set to `true`, the query will be executed as a streaming query. + */ + stream?: boolean; + /** + * Limits the maximum number of warnings a query will return. + */ + maxWarningsCount?: number; + /** + * If set to `true` and the query has a `LIMIT` clause, the total number of + * values matched before the last top-level `LIMIT` in the query was applied + * will be returned in the `extra.stats.fullCount` attribute. + */ + fullCount?: boolean; + /** + * If set to `false`, the query data will not be stored in the RocksDB block + * cache. This can be used to avoid thrashing he block cache when reading a + * lot of data. + */ + fillBlockCache?: boolean; + /** + * An object with a `rules` property specifying a list of optimizer rules to + * be included or excluded by the optimizer for this query. Prefix a rule + * name with `+` to include it, or `-` to exclude it. The name `all` acts as + * an alias matching all optimizer rules. + */ + optimizer?: { rules: string[] }; + /** + * Limits the maximum number of plans that will be created by the AQL query + * optimizer. + */ + maxPlans?: number; + /** + * Controls after how many execution nodes in a query a stack split should be + * performed. + * + * Default: `250` (`200` on macOS) + */ + maxNodesPerCallstack?: number; + /** + * Maximum size of transactions in bytes. + */ + maxTransactionSize?: number; + /** + * Maximum number of operations after which an intermediate commit is + * automatically performed. + */ + intermediateCommitCount?: number; + /** + * Maximum total size of operations in bytes after which an intermediate + * commit is automatically performed. + */ + intermediateCommitSize?: number; + /** + * (Enterprise Edition cluster only.) If set to `true`, collections + * inaccessible to current user will result in an access error instead + * of being treated as empty. + */ + skipInaccessibleCollections?: boolean; + /** + * (Enterprise Edition cluster only.) Limits the maximum time in seconds a + * DBServer will wait to bring satellite collections involved in the query + * into sync. Exceeding this value will result in the query being stopped. + * + * Default: `60` + */ + satelliteSyncWait?: number; +}; + +/** + * Options for explaining a query. + * + * See {@link Database#explain}. + */ +export type ExplainOptions = { + /** + * An object with a `rules` property specifying a list of optimizer rules to + * be included or excluded by the optimizer for this query. Prefix a rule + * name with `+` to include it, or `-` to exclude it. The name `all` acts as + * an alias matching all optimizer rules. + */ + optimizer?: { rules: string[] }; + /** + * Maximum number of plans that the optimizer is allowed to generate. + * Setting this to a low value limits the amount of work the optimizer does. + */ + maxNumberOfPlans?: number; + /** + * If set to true, all possible execution plans will be returned as the + * `plans` property. Otherwise only the optimal execution plan will be + * returned as the `plan` property. + * + * Default: `false` + */ + allPlans?: boolean; +}; + +/** + * Options for query tracking. + * + * See {@link Database#queryTracking}. + */ +export type QueryTrackingOptions = { + /** + * If set to `false`, neither queries nor slow queries will be tracked. + */ + enabled?: boolean; + /** + * Maximum query string length in bytes that will be kept in the list. + */ + maxQueryStringLength?: number; + /** + * Maximum number of slow queries to be kept in the list. + */ + maxSlowQueries?: number; + /** + * Threshold execution time in seconds for when a query will be + * considered slow. + */ + slowQueryThreshold?: number; + /** + * If set to `true`, bind parameters will be tracked along with queries. + */ + trackBindVars?: boolean; + /** + * If set to `true` and `enabled` is also set to `true`, slow queries will be + * tracked if their execution time exceeds `slowQueryThreshold`. + */ + trackSlowQueries?: boolean; +}; + +/** + * Options for adjusting the global properties for the AQL query results cache. + */ +export type QueryCachePropertiesOptions = { + /** + * If set to `true`, the query cache will include queries that involve + * system collections. + */ + includeSystem?: boolean; + /** + * Maximum individual size of query results that will be stored per + * database-specific cache. + */ + maxEntrySize?: number; + /** + * Maximum number of query results that will be stored per database-specific + * cache. + */ + maxResults?: number; + /** + * Maximum cumulated size of query results that will be stored per + * database-specific cache. + */ + maxResultsSize?: number; + /** + * Mode the AQL query cache should operate in. + */ + mode?: "off" | "on" | "demand"; +}; +//#endregion + +//#region Query operation results +/** + * Result of explaining a query with a single plan. + */ +export type SingleExplainResult = { + /** + * Query plan. + */ + plan: ExplainPlan; + /** + * Whether it would be possible to cache the query. + */ + cacheable: boolean; + /** + * Warnings encountered while planning the query execution. + */ + warnings: { code: number; message: string }[]; + /** + * Optimizer statistics for the explained query. + */ + stats: ExplainStats; +}; + +/** + * Result of explaining a query with multiple plans. + */ +export type MultiExplainResult = { + /** + * Query plans. + */ + plans: ExplainPlan[]; + /** + * Whether it would be possible to cache the query. + */ + cacheable: boolean; + /** + * Warnings encountered while planning the query execution. + */ + warnings: { code: number; message: string }[]; + /** + * Optimizer statistics for the explained query. + */ + stats: ExplainStats; +}; + +/** + * Plan explaining query execution. + */ +export type ExplainPlan = { + /** + * Execution nodes in this plan. + */ + nodes: { + [key: string]: any; + type: string; + id: number; + dependencies: number[]; + estimatedCost: number; + estimatedNrItems: number; + }[]; + /** + * Rules applied by the optimizer. + */ + rules: string[]; + /** + * Information about collections involved in the query. + */ + collections: { + name: string; + type: "read" | "write"; + }[]; + /** + * Variables used in the query. + */ + variables: { + id: number; + name: string; + }[]; + /** + * Total estimated cost of the plan. + */ + estimatedCost: number; + /** + * Estimated number of items returned by the query. + */ + estimatedNrItems: number; + /** + * Whether the query is a data modification query. + */ + isModificationQuery: boolean; +}; + +/** + * Optimizer statistics for an explained query. + */ +export type ExplainStats = { + /** + * Total number of rules executed for this query. + */ + rulesExecuted: number; + /** + * Number of rules skipped for this query. + */ + rulesSkipped: number; + /** + * Total number of plans created. + */ + plansCreated: number; + /** + * Maximum memory usage in bytes of the query during explain. + */ + peakMemoryUsage: number; + /** + * Time in seconds needed to explain the query. + */ + executionTime: number; +}; + +/** + * Result of parsing a query. + */ +export type ParseResult = { + /** + * Whether the query was parsed. + */ + parsed: boolean; + /** + * Names of all collections involved in the query. + */ + collections: string[]; + /** + * Names of all bind parameters used in the query. + */ + bindVars: string[]; + /** + * Abstract syntax tree (AST) of the query. + */ + ast: AstNode[]; +}; + +/** + * Node in an AQL abstract syntax tree (AST). + */ +export type AstNode = { + [key: string]: any; + type: string; + subNodes: AstNode[]; +}; + +/** + * Optimizer rule for AQL queries. + */ +export type QueryOptimizerRule = { + name: string; + flags: { + hidden: boolean; + clusterOnly: boolean; + canBeDisabled: boolean; + canCreateAdditionalPlans: boolean; + disabledByDefault: boolean; + enterpriseOnly: boolean; + }; +}; + +/** + * Information about query tracking. + */ +export type QueryTrackingInfo = { + /** + * Whether query tracking is enabled. + */ + enabled: boolean; + /** + * Maximum query string length in bytes that is kept in the list. + */ + maxQueryStringLength: number; + /** + * Maximum number of slow queries that is kept in the list. + */ + maxSlowQueries: number; + /** + * Threshold execution time in seconds for when a query is + * considered slow. + */ + slowQueryThreshold: number; + /** + * Whether bind parameters are being tracked along with queries. + */ + trackBindVars: boolean; + /** + * Whether slow queries are being tracked. + */ + trackSlowQueries: boolean; +}; + +/** + * Entry in the AQL query results cache. + */ +export type QueryCacheEntry = { + /** + * Hash of the query results. + */ + hash: string; + /** + * Query string. + */ + query: string; + /** + * Bind parameters used in the query. Only shown if tracking for bind + * variables was enabled at server start. + */ + bindVars: Record; + /** + * Size of the query results and bind parameters in bytes. + */ + size: number; + /** + * Number of documents/rows in the query results. + */ + results: number; + /** + * Date and time the query was started as an ISO 8601 timestamp. + */ + started: string; + /** + * Number of times the result was served from the cache. + */ + hits: number; + /** + * Running time of the query in seconds. + */ + runTime: number; + /** + * Collections and views involved in the query. + */ + dataSources: string[]; +}; + +/** + * Properties of the global AQL query results cache configuration. + */ +export type QueryCacheProperties = { + /** + * If set to `true`, the query cache will include queries that involve + * system collections. + */ + includeSystem: boolean; + /** + * Maximum individual size of query results that will be stored per + * database-specific cache. + */ + maxEntrySize: number; + /** + * Maximum number of query results that will be stored per database-specific + * cache. + */ + maxResults: number; + /** + * Maximum cumulated size of query results that will be stored per + * database-specific cache. + */ + maxResultsSize: number; + /** + * Mode the AQL query cache should operate in. + */ + mode: "off" | "on" | "demand"; +}; +//#endregion + +//#region QueryDescription +/** + * Object describing a query. + */ +export type QueryDescription = { + /** + * Unique identifier for this query. + */ + id: string; + /** + * Name of the database the query runs in. + */ + database: string; + /** + * Name of the user that started the query. + */ + user: string; + /** + * Query string (potentially truncated). + */ + query: string; + /** + * Bind parameters used in the query. + */ + bindVars: Record; + /** + * Date and time the query was started. + */ + started: string; + /** + * Query's running time in seconds. + */ + runTime: number; + /** + * Maximum memory usage in bytes of the query. + */ + peakMemoryUsage: number; + /** + * Query's current execution state. + */ + state: "executing" | "finished" | "killed"; + /** + * Whether the query uses a streaming cursor. + */ + stream: boolean; +}; +//#endregion + +//#region UserFunctionDescription +/** + * Definition of an AQL User Function. + */ +export type UserFunctionDescription = { + /** + * Name of the AQL User Function. + */ + name: string; + /** + * Implementation of the AQL User Function. + */ + code: string; + /** + * Whether the function is deterministic. + * + * See {@link Database#createFunction}. + */ + isDeterministic: boolean; +}; +//#endregion diff --git a/src/route.ts b/src/routes.ts similarity index 67% rename from src/route.ts rename to src/routes.ts index fef1954a7..c63f62c47 100644 --- a/src/route.ts +++ b/src/routes.ts @@ -1,37 +1,37 @@ /** * ```ts - * import type { Route } from "arangojs/route.js"; + * import type { Route } from "arangojs/routes"; * ``` * - * The "route" module provides route related types and interfaces for TypeScript. + * The "routes" module provides route related types and interfaces for + * TypeScript. * * @packageDocumentation */ -import { RequestOptions } from "./connection.js"; -import { Database } from "./database.js"; -import { ArangojsResponse } from "./lib/request.js"; -import { mergeHeaders } from "./lib/mergeHeaders.js"; +import * as connections from "./connection.js"; +import * as databases from "./databases.js"; +import * as util from "./lib/util.js"; /** * Represents an arbitrary route relative to an ArangoDB database. */ export class Route { - protected _db: Database; - protected _path: string; + protected _db: databases.Database; + protected _pathname: string; protected _headers: Headers; /** * @internal */ constructor( - db: Database, - path: string = "", - headers: Headers | Record = {} + db: databases.Database, + pathname: string = "", + headers: Headers | Record = {}, ) { - if (!path) path = ""; - else if (path.charAt(0) !== "/") path = `/${path}`; + if (!pathname) pathname = ""; + else if (pathname.charAt(0) !== "/") pathname = `/${pathname}`; this._db = db; - this._path = path; + this._pathname = pathname; this._headers = headers instanceof Headers ? headers : new Headers(headers); } @@ -45,8 +45,8 @@ export class Route { /** * Path of this route. */ - get path() { - return this._path; + get pathname() { + return this._pathname; } /** @@ -60,7 +60,7 @@ export class Route { * Creates a new route relative to this route that inherits any of its default * HTTP headers. * - * @param path - Path relative to this route. + * @param pathname - Path relative to this route. * @param headers - Additional headers that will be sent with each request. * * @example @@ -70,13 +70,11 @@ export class Route { * const users = foxx.route("/users"); * ``` */ - route(path: string, headers?: Headers | Record) { - if (!path) path = ""; - else if (path.charAt(0) !== "/") path = `/${path}`; + route(pathname: string, headers?: Headers | Record) { return new Route( this._db, - this._path + path, - mergeHeaders(this._headers, headers) + util.joinPath(this._pathname, pathname), + util.mergeHeaders(this._headers, headers), ); } @@ -92,7 +90,7 @@ export class Route { * const foxx = db.route("/my-foxx-service"); * const res = await foxx.request({ * method: "POST", - * path: "/users", + * pathname: "/users", * body: { * username: "admin", * password: "hunter2" @@ -100,22 +98,24 @@ export class Route { * }); * ``` */ - request(options?: RequestOptions) { - const opts = { ...options }; - if (!opts.path || opts.path === "/") opts.path = ""; - else if (!this._path || opts.path.charAt(0) === "/") opts.path = opts.path; - else opts.path = `/${opts.path}`; - opts.basePath = this._path; - opts.headers = mergeHeaders(this._headers, opts.headers); - opts.method = opts.method ? opts.method.toUpperCase() : "GET"; - return this._db.request(opts, false); + request(options: connections.RequestOptions = {}) { + const { method = "GET", pathname, headers, ...opts } = options; + return this._db.request( + { + ...opts, + method: method.toUpperCase(), + pathname: util.joinPath(this._pathname, pathname), + headers: util.mergeHeaders(this._headers, headers), + }, + false, + ); } /** * Performs a DELETE request against the given path relative to this route * and returns the server response. * - * @param path - Path relative to this route. + * @param pathname - Path relative to this route. * @param search - Query string parameters for this request. * @param headers - Additional headers to send with this request. * @@ -127,10 +127,10 @@ export class Route { * ``` */ delete( - path: string, + pathname: string, search?: URLSearchParams | Record, - headers?: Headers | Record - ): Promise; + headers?: Headers | Record, + ): Promise; /** * Performs a DELETE request against the given path relative to this route * and returns the server response. @@ -148,19 +148,19 @@ export class Route { */ delete( search?: URLSearchParams | Record, - headers?: Headers | Record - ): Promise; - delete(...args: any[]): Promise { - const path = typeof args[0] === "string" ? args.shift() : undefined; + headers?: Headers | Record, + ): Promise; + delete(...args: any[]): Promise { + const pathname = typeof args[0] === "string" ? args.shift() : undefined; const [search, headers] = args; - return this.request({ method: "DELETE", path, search, headers }); + return this.request({ method: "DELETE", pathname, search, headers }); } /** * Performs a GET request against the given path relative to this route * and returns the server response. * - * @param path - Path relative to this route. + * @param pathname - Path relative to this route. * @param search - Query string parameters for this request. * @param headers - Additional headers to send with this request. * @@ -172,10 +172,10 @@ export class Route { * ``` */ get( - path: string, + pathname: string, search?: URLSearchParams | Record, - headers?: Headers | Record - ): Promise; + headers?: Headers | Record, + ): Promise; /** * Performs a GET request against the given path relative to this route * and returns the server response. @@ -193,19 +193,19 @@ export class Route { */ get( search?: URLSearchParams | Record, - headers?: Headers | Record - ): Promise; - get(...args: any[]): Promise { - const path = typeof args[0] === "string" ? args.shift() : undefined; + headers?: Headers | Record, + ): Promise; + get(...args: any[]): Promise { + const pathname = typeof args[0] === "string" ? args.shift() : undefined; const [search, headers] = args; - return this.request({ method: "GET", path, search, headers }); + return this.request({ method: "GET", pathname, search, headers }); } /** * Performs a HEAD request against the given path relative to this route * and returns the server response. * - * @param path - Path relative to this route. + * @param pathname - Path relative to this route. * @param search - Query string parameters for this request. * @param headers - Additional headers to send with this request. * @@ -217,10 +217,10 @@ export class Route { * ``` */ head( - path: string, + pathname: string, search?: URLSearchParams | Record, - headers?: Headers | Record - ): Promise; + headers?: Headers | Record, + ): Promise; /** * Performs a HEAD request against the given path relative to this route * and returns the server response. @@ -238,19 +238,19 @@ export class Route { */ head( search?: URLSearchParams | Record, - headers?: Headers | Record - ): Promise; - head(...args: any[]): Promise { - const path = typeof args[0] === "string" ? args.shift() : undefined; + headers?: Headers | Record, + ): Promise; + head(...args: any[]): Promise { + const pathname = typeof args[0] === "string" ? args.shift() : undefined; const [search, headers] = args; - return this.request({ method: "HEAD", path, search, headers }); + return this.request({ method: "HEAD", pathname, search, headers }); } /** * Performs a PATCH request against the given path relative to this route * and returns the server response. * - * @param path - Path relative to this route. + * @param pathname - Path relative to this route. * @param body - Body of the request object. * @param search - Query string parameters for this request. * @param headers - Additional headers to send with this request. @@ -263,11 +263,11 @@ export class Route { * ``` */ patch( - path: string, + pathname: string, body?: any, search?: URLSearchParams | Record, - headers?: Headers | Record - ): Promise; + headers?: Headers | Record, + ): Promise; /** * Performs a PATCH request against the given path relative to this route * and returns the server response. @@ -289,19 +289,19 @@ export class Route { patch( body?: any, search?: URLSearchParams | Record, - headers?: Headers | Record - ): Promise; - patch(...args: any[]): Promise { - const path = typeof args[0] === "string" ? args.shift() : undefined; + headers?: Headers | Record, + ): Promise; + patch(...args: any[]): Promise { + const pathname = typeof args[0] === "string" ? args.shift() : undefined; const [body, search, headers] = args; - return this.request({ method: "PATCH", path, body, search, headers }); + return this.request({ method: "PATCH", pathname, body, search, headers }); } /** * Performs a POST request against the given path relative to this route * and returns the server response. * - * @param path - Path relative to this route. + * @param pathname - Path relative to this route. * @param body - Body of the request object. * @param search - Query string parameters for this request. * @param headers - Additional headers to send with this request. @@ -317,11 +317,11 @@ export class Route { * ``` */ post( - path: string, + pathname: string, body?: any, search?: URLSearchParams | Record, - headers?: Headers | Record - ): Promise; + headers?: Headers | Record, + ): Promise; /** * Performs a POST request against the given path relative to this route * and returns the server response. @@ -346,19 +346,19 @@ export class Route { post( body?: any, search?: URLSearchParams | Record, - headers?: Headers | Record - ): Promise; - post(...args: any[]): Promise { - const path = typeof args[0] === "string" ? args.shift() : undefined; + headers?: Headers | Record, + ): Promise; + post(...args: any[]): Promise { + const pathname = typeof args[0] === "string" ? args.shift() : undefined; const [body, search, headers] = args; - return this.request({ method: "POST", path, body, search, headers }); + return this.request({ method: "POST", pathname, body, search, headers }); } /** * Performs a PUT request against the given path relative to this route * and returns the server response. * - * @param path - Path relative to this route. + * @param pathname - Path relative to this route. * @param body - Body of the request object. * @param search - Query string parameters for this request. * @param headers - Additional headers to send with this request. @@ -371,11 +371,11 @@ export class Route { * ``` */ put( - path: string, + pathname: string, body?: any, search?: URLSearchParams | Record, - headers?: Headers | Record - ): Promise; + headers?: Headers | Record, + ): Promise; /** * Performs a PUT request against the given path relative to this route * and returns the server response. @@ -397,11 +397,11 @@ export class Route { put( body?: any, search?: URLSearchParams | Record, - headers?: Headers | Record - ): Promise; - put(...args: any[]): Promise { - const path = typeof args[0] === "string" ? args.shift() : undefined; + headers?: Headers | Record, + ): Promise; + put(...args: any[]): Promise { + const pathname = typeof args[0] === "string" ? args.shift() : undefined; const [body, search, headers] = args; - return this.request({ method: "PUT", path, body, search, headers }); + return this.request({ method: "PUT", pathname, body, search, headers }); } } diff --git a/src/services.ts b/src/services.ts new file mode 100644 index 000000000..0a646b332 --- /dev/null +++ b/src/services.ts @@ -0,0 +1,538 @@ +/** + * ```ts + * import type { ServiceInfo } from "arangojs/services"; + * ``` + * + * The "services" module provides types for Foxx services. + * + * @packageDocumentation + */ +import { FoxxManifest } from "./foxx-manifest.js"; + +//#region Service operation options +/** + * Options for installing the service. + * + * See {@link Database#installService}. + */ +export type InstallServiceOptions = { + /** + * An object mapping configuration option names to values. + * + * See also {@link Database#getServiceConfiguration}. + */ + configuration?: Record; + /** + * An object mapping dependency aliases to mount points. + * + * See also {@link Database#getServiceDependencies}. + */ + dependencies?: Record; + /** + * Whether the service should be installed in development mode. + * + * See also {@link Database#setServiceDevelopmentMode}. + * + * Default: `false` + */ + development?: boolean; + /** + * Whether the service should be installed in legacy compatibility mode + * + * This overrides the `engines` option in the service manifest (if any). + * + * Default: `false` + */ + legacy?: boolean; + /** + * Whether the "setup" script should be executed. + * + * Default: `true` + */ + setup?: boolean; +}; + +/** + * Options for replacing a service. + * + * See {@link Database#replaceService}. + */ +export type ReplaceServiceOptions = { + /** + * An object mapping configuration option names to values. + * + * See also {@link Database#getServiceConfiguration}. + */ + configuration?: Record; + /** + * An object mapping dependency aliases to mount points. + * + * See also {@link Database#getServiceDependencies}. + */ + dependencies?: Record; + /** + * Whether the service should be installed in development mode. + * + * See also {@link Database#setServiceDevelopmentMode}. + * + * Default: `false` + */ + development?: boolean; + /** + * Whether the service should be installed in legacy compatibility mode + * + * This overrides the `engines` option in the service manifest (if any). + * + * Default: `false` + */ + legacy?: boolean; + /** + * Whether the "setup" script should be executed. + * + * Default: `true` + */ + setup?: boolean; + /** + * Whether the existing service's "teardown" script should be executed + * prior to removing that service. + * + * Default: `true` + */ + teardown?: boolean; + /** + * If set to `true`, replacing a service that does not already exist will + * fall back to installing the new service. + * + * Default: `false` + */ + force?: boolean; +}; + +/** + * Options for upgrading a service. + * + * See {@link Database#upgradeService}. + */ +export type UpgradeServiceOptions = { + /** + * An object mapping configuration option names to values. + * + * See also {@link Database#getServiceConfiguration}. + */ + configuration?: Record; + /** + * An object mapping dependency aliases to mount points. + * + * See also {@link Database#getServiceDependencies}. + */ + dependencies?: Record; + /** + * Whether the service should be installed in development mode. + * + * See also {@link Database#setServiceDevelopmentMode}. + * + * Default: `false` + */ + development?: boolean; + /** + * Whether the service should be installed in legacy compatibility mode + * + * This overrides the `engines` option in the service manifest (if any). + * + * Default: `false` + */ + legacy?: boolean; + /** + * Whether the "setup" script should be executed. + * + * Default: `true` + */ + setup?: boolean; + /** + * Whether the existing service's "teardown" script should be executed + * prior to upgrading that service. + * + * Default: `false` + */ + teardown?: boolean; + /** + * Unless set to `true`, upgrading a service that does not already exist will + * fall back to installing the new service. + * + * Default: `false` + */ + force?: boolean; +}; + +/** + * Options for uninstalling a service. + * + * See {@link Database#uninstallService}. + */ +export type UninstallServiceOptions = { + /** + * Whether the service's "teardown" script should be executed + * prior to removing that service. + * + * Default: `true` + */ + teardown?: boolean; + /** + * If set to `true`, uninstalling a service that does not already exist + * will be considered successful. + * + * Default: `false` + */ + force?: boolean; +}; +//#endregion + +//#region Service operation results +/** + * Object briefly describing a Foxx service. + */ +export type ServiceSummary = { + /** + * Service mount point, relative to the database. + */ + mount: string; + /** + * Name defined in the service manifest. + */ + name?: string; + /** + * Version defined in the service manifest. + */ + version?: string; + /** + * Service dependencies the service expects to be able to match as a mapping + * from dependency names to versions the service is compatible with. + */ + provides: Record; + /** + * Whether development mode is enabled for this service. + */ + development: boolean; + /** + * Whether the service is running in legacy compatibility mode. + */ + legacy: boolean; +}; + +/** + * Object describing a configuration option of a Foxx service. + */ +export type ServiceConfiguration = { + /** + * Data type of the configuration value. + * + * **Note**: `"int"` and `"bool"` are historical synonyms for `"integer"` and + * `"boolean"`. The `"password"` type is synonymous with `"string"` but can + * be used to distinguish values which should not be displayed in plain text + * by software when managing the service. + */ + type: + | "integer" + | "boolean" + | "string" + | "number" + | "json" + | "password" + | "int" + | "bool"; + /** + * Current value of the configuration option as stored internally. + */ + currentRaw: any; + /** + * Processed current value of the configuration option as exposed in the + * service code. + */ + current: any; + /** + * Formatted name of the configuration option. + */ + title: string; + /** + * Human-readable description of the configuration option. + */ + description?: string; + /** + * Whether the configuration option must be set in order for the service + * to be operational. + */ + required: boolean; + /** + * Default value of the configuration option. + */ + default?: any; +}; + +/** + * Object describing a single-service dependency defined by a Foxx service. + */ +export type SingleServiceDependency = { + /** + * Whether this is a multi-service dependency. + */ + multiple: false; + /** + * Current mount point the dependency is resolved to. + */ + current?: string; + /** + * Formatted name of the dependency. + */ + title: string; + /** + * Name of the service the dependency expects to match. + */ + name: string; + /** + * Version of the service the dependency expects to match. + */ + version: string; + /** + * Human-readable description of the dependency. + */ + description?: string; + /** + * Whether the dependency must be matched in order for the service + * to be operational. + */ + required: boolean; +}; + +/** + * Object describing a multi-service dependency defined by a Foxx service. + */ +export type MultiServiceDependency = { + /** + * Whether this is a multi-service dependency. + */ + multiple: true; + /** + * Current mount points the dependency is resolved to. + */ + current?: string[]; + /** + * Formatted name of the dependency. + */ + title: string; + /** + * Name of the service the dependency expects to match. + */ + name: string; + /** + * Version of the service the dependency expects to match. + */ + version: string; + /** + * Human-readable description of the dependency. + */ + description?: string; + /** + * Whether the dependency must be matched in order for the service + * to be operational. + */ + required: boolean; +}; + +/** + * Test stats for a Foxx service's tests. + */ +export type ServiceTestStats = { + /** + * Total number of tests found. + */ + tests: number; + /** + * Number of tests that ran successfully. + */ + passes: number; + /** + * Number of tests that failed. + */ + failures: number; + /** + * Number of tests skipped or not executed. + */ + pending: number; + /** + * Total test duration in milliseconds. + */ + duration: number; +}; + +/** + * Test results for a Foxx service's tests using the stream reporter. + */ +export type ServiceTestStreamReport = ( + | ["start", { total: number }] + | ["pass", ServiceTestStreamTest] + | ["fail", ServiceTestStreamTest] + | ["end", ServiceTestStats] +)[]; + +/** + * Test results for a single test case using the stream reporter. + */ +export type ServiceTestStreamTest = { + title: string; + fullTitle: string; + duration: number; + err?: string; +}; + +/** + * Test results for a Foxx service's tests using the suite reporter. + */ +export type ServiceTestSuiteReport = { + stats: ServiceTestStats; + suites: ServiceTestSuite[]; + tests: ServiceTestSuiteTest[]; +}; + +/** + * Test results for a single test suite using the suite reporter. + */ +export type ServiceTestSuite = { + title: string; + suites: ServiceTestSuite[]; + tests: ServiceTestSuiteTest[]; +}; + +/** + * Test results for a single test case using the suite reporter. + */ +export type ServiceTestSuiteTest = { + result: "pending" | "pass" | "fail"; + title: string; + duration: number; + err?: any; +}; + +/** + * Test results for a Foxx service's tests in XUnit format using the JSONML + * representation. + */ +export type ServiceTestXunitReport = [ + "testsuite", + { + timestamp: number; + tests: number; + errors: number; + failures: number; + skip: number; + time: number; + }, + ...ServiceTestXunitTest[], +]; + +/** + * Test results for a single test case in XUnit format using the JSONML + * representation. + */ +export type ServiceTestXunitTest = + | ["testcase", { classname: string; name: string; time: number }] + | [ + "testcase", + { classname: string; name: string; time: number }, + ["failure", { message: string; type: string }, string], + ]; + +/** + * Test results for a Foxx service's tests in TAP format. + */ +export type ServiceTestTapReport = string[]; + +/** + * Test results for a Foxx service's tests using the default reporter. + */ +export type ServiceTestDefaultReport = { + stats: ServiceTestStats; + tests: ServiceTestDefaultTest[]; + pending: ServiceTestDefaultTest[]; + failures: ServiceTestDefaultTest[]; + passes: ServiceTestDefaultTest[]; +}; + +/** + * Test results for a single test case using the default reporter. + */ +export type ServiceTestDefaultTest = { + title: string; + fullTitle: string; + duration: number; + err?: string; +}; + +/** + * OpenAPI 2.0 description of a Foxx service. + */ +export type SwaggerJson = { + [key: string]: any; + info: { + title: string; + description: string; + version: string; + license: string; + }; + path: { + [key: string]: any; + }; +}; +//#endregion + +//#region ServiceDescription +/** + * Object describing a Foxx service in detail. + */ +export type ServiceDescription = { + /** + * Service mount point, relative to the database. + */ + mount: string; + /** + * File system path of the service. + */ + path: string; + /** + * Name defined in the service manifest. + */ + name?: string; + /** + * Version defined in the service manifest. + */ + version?: string; + /** + * Whether development mode is enabled for this service. + */ + development: boolean; + /** + * Whether the service is running in legacy compatibility mode. + */ + legacy: boolean; + /** + * Content of the service manifest of this service. + */ + manifest: FoxxManifest; + /** + * Internal checksum of the service's initial source bundle. + */ + checksum: string; + /** + * Options for this service. + */ + options: { + /** + * Configuration values set for this service. + */ + configuration: Record; + /** + * Service dependency configuration of this service. + */ + dependencies: Record; + }; +}; +//#endregion diff --git a/src/test/00-basics.ts b/src/test/00-basics.ts index 8a0c24adb..cf7d4d695 100644 --- a/src/test/00-basics.ts +++ b/src/test/00-basics.ts @@ -34,16 +34,21 @@ describe("Configuring the driver", () => { describe("with headers", () => { it("applies the headers", (done) => { const db = new Database({ - headers: { - "x-one": "1", - "x-two": "2", + fetchOptions: { + headers: { + "x-one": "1", + "x-two": "2", + }, }, }); (db as any)._connection._hosts = [ - ({ headers }: any) => { - expect(headers.get("x-one")).to.equal("1"); - expect(headers.get("x-two")).to.equal("2"); - done(); + { + fetch: ({ headers }: any) => { + expect(headers.get("x-one")).to.equal("1"); + expect(headers.get("x-two")).to.equal("2"); + done(); + }, + close: () => {}, }, ]; db.request({ headers: {} }, () => {}); @@ -53,9 +58,12 @@ describe("Configuring the driver", () => { it("sets the x-arango-version header", (done) => { const db = new Database({ arangoVersion: 99999 }); (db as any)._connection._hosts = [ - ({ headers }: any) => { - expect(headers.get("x-arango-version")).to.equal("99999"); - done(); + { + fetch: ({ headers }: any) => { + expect(headers.get("x-arango-version")).to.equal("99999"); + done(); + }, + close: () => {}, }, ]; db.request({ headers: {} }, () => {}); diff --git a/src/test/01-manipulating-databases.ts b/src/test/01-manipulating-databases.ts index bedd2541c..fb3e84645 100644 --- a/src/test/01-manipulating-databases.ts +++ b/src/test/01-manipulating-databases.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Database } from "../database.js"; -import { ArangoError } from "../error.js"; +import { Database } from "../databases.js"; +import { ArangoError } from "../errors.js"; import { config } from "./_config.js"; describe("Manipulating databases", function () { diff --git a/src/test/02-accessing-collections.ts b/src/test/02-accessing-collections.ts index 811e6ca6e..b3b119ca0 100644 --- a/src/test/02-accessing-collections.ts +++ b/src/test/02-accessing-collections.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { isArangoCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { isArangoCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; const range = (n: number): number[] => Array.from(Array(n).keys()); @@ -34,7 +34,7 @@ describe("Accessing collections", function () { }); describe("database.listCollections", () => { const nonSystemCollectionNames = range(4).map( - (i) => `c_${Date.now()}_${i}` + (i) => `c_${Date.now()}_${i}`, ); const systemCollectionNames = range(4).map((i) => `_c_${Date.now()}_${i}`); before(async () => { @@ -42,16 +42,16 @@ describe("Accessing collections", function () { ...nonSystemCollectionNames.map(async (name) => { const collection = await db.createCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }), ...systemCollectionNames.map(async (name) => { const collection = db.collection(name); await collection.create({ isSystem: true }); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }), ] as Promise[]); @@ -60,7 +60,7 @@ describe("Accessing collections", function () { await Promise.all([ ...nonSystemCollectionNames.map((name) => db.collection(name).drop()), ...systemCollectionNames.map((name) => - db.collection(name).drop({ isSystem: true }) + db.collection(name).drop({ isSystem: true }), ), ]); }); @@ -68,7 +68,7 @@ describe("Accessing collections", function () { const collections = await db.listCollections(); expect(collections.length).to.equal(nonSystemCollectionNames.length); expect(collections.map((c: any) => c.name).sort()).to.eql( - nonSystemCollectionNames + nonSystemCollectionNames, ); }); it("includes system collections if explicitly passed false", async () => { @@ -79,13 +79,13 @@ describe("Accessing collections", function () { .sort(); expect(collections.length).to.be.at.least(allCollectionNames.length); expect(collections.map((c: any) => c.name).sort()).to.eql( - allCollectionNames + allCollectionNames, ); }); }); describe("database.collections", () => { const documentCollectionNames = range(4).map( - (i) => `dc_${Date.now()}_${i}` + (i) => `dc_${Date.now()}_${i}`, ); const edgeCollectionNames = range(4).map((i) => `ec_${Date.now()}_${i}`); const systemCollectionNames = range(4).map((i) => `_c_${Date.now()}_${i}`); @@ -94,23 +94,23 @@ describe("Accessing collections", function () { ...documentCollectionNames.map(async (name) => { const collection = await db.createCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }), ...edgeCollectionNames.map(async (name) => { const collection = await db.createEdgeCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }), ...systemCollectionNames.map(async (name) => { const collection = db.collection(name); await collection.create({ isSystem: true }); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }), ] as Promise[]); @@ -120,17 +120,17 @@ describe("Accessing collections", function () { ...documentCollectionNames.map((name) => db.collection(name).drop()), ...edgeCollectionNames.map((name) => db.collection(name).drop()), ...systemCollectionNames.map((name) => - db.collection(name).drop({ isSystem: true }) + db.collection(name).drop({ isSystem: true }), ), ]); }); it("creates Collection instances", async () => { const collections = await db.collections(); expect(collections.length).to.equal( - documentCollectionNames.length + edgeCollectionNames.length + documentCollectionNames.length + edgeCollectionNames.length, ); expect(collections.map((c) => c.name).sort()).to.eql( - [...documentCollectionNames, ...edgeCollectionNames].sort() + [...documentCollectionNames, ...edgeCollectionNames].sort(), ); }); it("includes system collections if explicitly passed false", async () => { @@ -142,7 +142,7 @@ describe("Accessing collections", function () { ...builtinSystemCollections, ].sort(); expect(collections.map((c: any) => c.name).sort()).to.eql( - allCollectionNames + allCollectionNames, ); }); }); diff --git a/src/test/03-accessing-graphs.ts b/src/test/03-accessing-graphs.ts index e65e50600..849016241 100644 --- a/src/test/03-accessing-graphs.ts +++ b/src/test/03-accessing-graphs.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Database } from "../database.js"; -import { Graph } from "../graph.js"; +import { Database } from "../databases.js"; +import { Graph } from "../graphs.js"; import { config } from "./_config.js"; const range = (n: number): number[] => Array.from(Array(n).keys()); @@ -38,15 +38,15 @@ describe("Accessing graphs", function () { ...vertexCollectionNames.map(async (name) => { const collection = await db.createCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }), ...edgeCollectionNames.map(async (name) => { const collection = await db.createEdgeCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }), ] as Promise[]); @@ -58,11 +58,11 @@ describe("Accessing graphs", function () { collection: name, from: vertexCollectionNames, to: vertexCollectionNames, - })) + })), ); await db.waitForPropagation( - { path: `/_api/gharial/${graph.name}` }, - 10000 + { pathname: `/_api/gharial/${graph.name}` }, + 10000, ); }), ]); @@ -72,7 +72,7 @@ describe("Accessing graphs", function () { await Promise.all( vertexCollectionNames .concat(edgeCollectionNames) - .map((name) => db.collection(name).drop()) + .map((name) => db.collection(name).drop()), ); }); it("fetches information about all graphs", async () => { @@ -90,15 +90,15 @@ describe("Accessing graphs", function () { ...vertexCollectionNames.map(async (name) => { const collection = await db.createCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }), ...edgeCollectionNames.map(async (name) => { const collection = await db.createEdgeCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }), ] as Promise[]); @@ -110,11 +110,11 @@ describe("Accessing graphs", function () { collection: name, from: vertexCollectionNames, to: vertexCollectionNames, - })) + })), ); await db.waitForPropagation( - { path: `/_api/gharial/${graph.name}` }, - 10000 + { pathname: `/_api/gharial/${graph.name}` }, + 10000, ); }), ]); @@ -124,7 +124,7 @@ describe("Accessing graphs", function () { await Promise.all( vertexCollectionNames .concat(edgeCollectionNames) - .map((name) => db.collection(name).drop()) + .map((name) => db.collection(name).drop()), ); }); it("creates Graph instances", async () => { diff --git a/src/test/04-transactions.ts b/src/test/04-transactions.ts index 5b741d73d..f97bf3132 100644 --- a/src/test/04-transactions.ts +++ b/src/test/04-transactions.ts @@ -1,7 +1,7 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; -import { Transaction } from "../transaction.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; +import { Transaction } from "../transactions.js"; import { config } from "./_config.js"; describe("Transactions", () => { @@ -27,7 +27,7 @@ describe("Transactions", () => { const result = await db.executeTransaction( [], "function (params) {return params;}", - { params: "test" } + { params: "test" }, ); expect(result).to.equal("test"); }); @@ -44,16 +44,16 @@ describe("Transactions", () => { after(async () => { await Promise.all( allTransactions.map((transaction) => - transaction.abort().catch(() => undefined) - ) + transaction.abort().catch(() => undefined), + ), ); await system.dropDatabase(name); }); beforeEach(async () => { collection = await db.createCollection(`collection-${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }); afterEach(async () => { diff --git a/src/test/05-aql-helpers.ts b/src/test/05-aql-helpers.ts index 9bc70c6e6..8c2edc306 100644 --- a/src/test/05-aql-helpers.ts +++ b/src/test/05-aql-helpers.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; import { aql, join, literal } from "../aql.js"; -import { Database } from "../database.js"; +import { Database } from "../databases.js"; describe("AQL helpers", function () { describe("aql", () => { @@ -20,10 +20,10 @@ describe("AQL helpers", function () { ]; const query = aql`A ${values[0]} B ${values[1]} C ${values[2]} D ${values[3]} E ${values[4]} F ${values[5]} G ${values[6]} H ${values[7]} I ${values[8]} J ${values[9]} K EOF`; expect(query.query).to.equal( - `A @value0 B @value1 C @value2 D @value3 E @value4 F @value5 G @value6 H @value7 I @value8 J @value9 K EOF` + `A @value0 B @value1 C @value2 D @value3 E @value4 F @value5 G @value6 H @value7 I @value8 J @value9 K EOF`, ); const bindVarNames = Object.keys(query.bindVars).sort((a, b) => - +a.substr(5) > +b.substr(5) ? 1 : -1 + +a.substr(5) > +b.substr(5) ? 1 : -1, ); expect(bindVarNames).to.eql([ "value0", @@ -41,7 +41,7 @@ describe("AQL helpers", function () { }); it("omits undefined bindvars and empty queries", () => { const query = aql`A ${undefined} B ${aql``} C ${join([])} D ${literal( - "" + "", )} E`; expect(query.query).to.equal("A B C D E"); expect(query.bindVars).to.eql({}); @@ -111,20 +111,20 @@ describe("AQL helpers", function () { it("supports nesting simple queries", () => { const query = aql`FOR x IN (${aql`FOR a IN 1..3 RETURN a`}) RETURN x`; expect(query.query).to.equal( - "FOR x IN (FOR a IN 1..3 RETURN a) RETURN x" + "FOR x IN (FOR a IN 1..3 RETURN a) RETURN x", ); }); it("supports deeply nesting simple queries", () => { const query = aql`FOR x IN (${aql`FOR a IN (${aql`FOR b IN 1..3 RETURN b`}) RETURN a`}) RETURN x`; expect(query.query).to.equal( - "FOR x IN (FOR a IN (FOR b IN 1..3 RETURN b) RETURN a) RETURN x" + "FOR x IN (FOR a IN (FOR b IN 1..3 RETURN b) RETURN a) RETURN x", ); }); it("supports nesting with bindVars", () => { const collection = db.collection("paprika"); const query = aql`A ${collection} B ${aql`X ${collection} Y ${aql`J ${collection} K ${9} L`} Z`} C ${4}`; expect(query.query).to.equal( - "A @@value0 B X @@value0 Y J @@value0 K @value1 L Z C @value2" + "A @@value0 B X @@value0 Y J @@value0 K @value1 L Z C @value2", ); expect(query.bindVars).to.eql({ "@value0": "paprika", @@ -138,7 +138,7 @@ describe("AQL helpers", function () { const filter = aql`FILTER u.role == ${role}`; const query = aql`FOR u IN ${users} ${filter} RETURN u`; expect(query.query).to.equal( - "FOR u IN @@value0 FILTER u.role == @value1 RETURN u" + "FOR u IN @@value0 FILTER u.role == @value1 RETURN u", ); expect(query.bindVars).to.eql({ "@value0": users.name, @@ -153,7 +153,7 @@ describe("AQL helpers", function () { it("supports deep nesting", () => { const query = aql`A ${1} ${aql`a ${2} ${aql`X ${3} ${aql`x ${4} y`} ${5} Y`} ${6} b`} ${7} B`; expect(query.query).to.equal( - "A @value0 a @value1 X @value2 x @value3 y @value4 Y @value5 b @value6 B" + "A @value0 a @value1 X @value2 x @value3 y @value4 Y @value5 b @value6 B", ); expect(query.bindVars).to.eql({ value0: 1, @@ -185,7 +185,7 @@ describe("AQL helpers", function () { ]; for (const [value, result] of pairs) { it(`returns an AQL literal of "${result}" for ${String( - JSON.stringify(value) + JSON.stringify(value), )}`, () => { expect(literal(value).toAQL()).to.equal(result); }); diff --git a/src/test/06-managing-functions.ts b/src/test/06-managing-functions.ts index 790e83642..4e8f30dcb 100644 --- a/src/test/06-managing-functions.ts +++ b/src/test/06-managing-functions.ts @@ -1,5 +1,5 @@ import { expect } from "chai"; -import { Database } from "../database.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; describe("Managing functions", function () { @@ -20,15 +20,15 @@ describe("Managing functions", function () { }); describe("database.listFunctions", () => { it("should be empty per default", async () => { - const result = await db.listFunctions(); + const result = await db.listUserFunctions(); expect(result).to.be.instanceof(Array); expect(result).to.be.empty; }); it("should include before created function", async () => { const name = "myfunctions::temperature::celsiustofahrenheit"; const code = "function (celsius) { return celsius * 1.8 + 32; }"; - await db.createFunction(name, code); - const result = await db.listFunctions(); + await db.createUserFunction(name, code); + const result = await db.listUserFunctions(); expect(result).to.be.instanceof(Array); expect(result.length).to.equal(1); expect(result[0]).to.eql({ @@ -39,9 +39,9 @@ describe("Managing functions", function () { }); describe("database.createFunction", () => { it("should create a function", async () => { - const info = await db.createFunction( + const info = await db.createUserFunction( "myfunctions::temperature::celsiustofahrenheit2", - "function (celsius) { return celsius * 1.8 + 32; }" + "function (celsius) { return celsius * 1.8 + 32; }", ); expect(info).to.have.property("code", 201); expect(info).to.have.property("error", false); @@ -50,11 +50,11 @@ describe("Managing functions", function () { describe("database.dropFunction", () => { it("should drop a existing function", async () => { const name = "myfunctions::temperature::celsiustofahrenheit"; - await db.createFunction( + await db.createUserFunction( name, - "function (celsius) { return celsius * 1.8 + 32; }" + "function (celsius) { return celsius * 1.8 + 32; }", ); - const info = await db.dropFunction(name); + const info = await db.dropUserFunction(name); expect(info).to.have.property("deletedCount", 1); }); }); diff --git a/src/test/07-routes.ts b/src/test/07-routes.ts index 5e9b8d000..12853c770 100644 --- a/src/test/07-routes.ts +++ b/src/test/07-routes.ts @@ -1,7 +1,7 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; -import { Route } from "../route.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; +import { Route } from "../routes.js"; import { config } from "./_config.js"; describe("Arbitrary HTTP routes", () => { @@ -17,7 +17,7 @@ describe("Arbitrary HTTP routes", () => { it("creates a route for the given path", () => { const path = "/hi"; const route = db.route(path); - expect((route as any)._path).to.equal(path); + expect((route as any)._pathname).to.equal(path); }); it("passes the given headers to the new route", () => { const route = db.route("/hello", { "x-magic": "awesome" }); @@ -38,8 +38,8 @@ describe("Route API", function () { db = await system.createDatabase(name); collection = await db.createCollection(`c_${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }); after(async () => { @@ -55,7 +55,7 @@ describe("Route API", function () { describe("route.route", () => { it("should concat path", () => { const route = db.route("/_api").route("/version"); - expect(route).to.have.property("_path", "/_api/version"); + expect(route).to.have.property("_pathname", "/_api/version"); }); }); describe("route.get", () => { diff --git a/src/test/08-cursors.ts b/src/test/08-cursors.ts index 304bdd6b7..6d1ccba36 100644 --- a/src/test/08-cursors.ts +++ b/src/test/08-cursors.ts @@ -1,8 +1,8 @@ import { expect } from "chai"; -import { LinkedList } from "../lib/linkedList.js"; +import { LinkedList } from "../lib/x3-linkedlist.js"; import { aql } from "../aql.js"; -import { ArrayCursor, BatchedArrayCursor } from "../cursor.js"; -import { Database } from "../database.js"; +import { Cursor, BatchCursor } from "../cursors.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; const aqlQuery = aql`FOR i IN 0..10 RETURN i`; @@ -17,8 +17,8 @@ async function sleep(ms: number) { describe("Item-wise Cursor API", () => { const name = `testdb_${Date.now()}`; let system: Database, db: Database; - let cursor: ArrayCursor; - let allCursors: (ArrayCursor | BatchedArrayCursor)[]; + let cursor: Cursor; + let allCursors: (Cursor | BatchCursor)[]; before(async () => { allCursors = []; system = new Database(config); @@ -28,7 +28,7 @@ describe("Item-wise Cursor API", () => { }); after(async () => { await Promise.all( - allCursors.map((cursor) => cursor.kill().catch(() => undefined)) + allCursors.map((cursor) => cursor.kill().catch(() => undefined)), ); try { await system.dropDatabase(name); @@ -138,7 +138,7 @@ describe("Item-wise Cursor API", () => { }); it("returns false after last result is consumed (with large amount of results)", async () => { const EXPECTED_LENGTH = 10000; - async function loadMore(cursor: ArrayCursor, totalLength: number) { + async function loadMore(cursor: Cursor, totalLength: number) { await cursor.next(); totalLength++; expect(cursor.hasNext).to.equal(totalLength !== EXPECTED_LENGTH); @@ -199,7 +199,7 @@ describe("Item-wise Cursor API", () => { .reduce((acc, next) => { acc.push(...next); return acc; - }, [] as number[]) + }, [] as number[]), ); }); it("doesn't choke on non-arrays", async () => { @@ -226,7 +226,7 @@ describe("Item-wise Cursor API", () => { try { await db.request({ method: "PUT", - path: `/_api/cursor/${id}`, + pathname: `/_api/cursor/${id}`, hostUrl: hostUrl, }); } catch (e: any) { @@ -241,8 +241,8 @@ describe("Item-wise Cursor API", () => { describe("Batch-wise Cursor API", () => { const name = `testdb_${Date.now()}`; let system: Database, db: Database; - let cursor: BatchedArrayCursor; - let allCursors: (ArrayCursor | BatchedArrayCursor)[]; + let cursor: BatchCursor; + let allCursors: (Cursor | BatchCursor)[]; before(async () => { allCursors = []; system = new Database(config); @@ -252,7 +252,7 @@ describe("Batch-wise Cursor API", () => { }); after(async () => { await Promise.all( - allCursors.map((cursor) => cursor.kill().catch(() => undefined)) + allCursors.map((cursor) => cursor.kill().catch(() => undefined)), ); try { await system.dropDatabase(name); @@ -349,7 +349,7 @@ describe("Batch-wise Cursor API", () => { }); it("returns false after last result is consumed (with large amount of results)", async () => { const EXPECTED_LENGTH = 10000; - async function loadMore(cursor: ArrayCursor, totalLength: number) { + async function loadMore(cursor: Cursor, totalLength: number) { await cursor.next(); totalLength++; expect(cursor.hasNext).to.equal(totalLength !== EXPECTED_LENGTH); @@ -395,7 +395,7 @@ describe("Batch-wise Cursor API", () => { .reduce((acc, next) => { acc.push(...next); return acc; - }, [] as number[]) + }, [] as number[]), ); }); it("doesn't choke on non-arrays", async () => { @@ -442,7 +442,7 @@ describe("Batch-wise Cursor API", () => { try { await db.request({ method: "PUT", - path: `/_api/cursor/${id}`, + pathname: `/_api/cursor/${id}`, hostUrl: hostUrl, }); } catch (e: any) { diff --git a/src/test/09-collection-metadata.ts b/src/test/09-collection-metadata.ts index e7e2359c2..bbbe7fe64 100644 --- a/src/test/09-collection-metadata.ts +++ b/src/test/09-collection-metadata.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { COLLECTION_NOT_FOUND } from "../lib/codes.js"; import { config } from "./_config.js"; @@ -17,8 +17,8 @@ describe("Collection metadata", function () { db = system.database(dbName); collection = await db.createCollection(collectionName); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }); after(async () => { diff --git a/src/test/10-manipulating-collections.ts b/src/test/10-manipulating-collections.ts index af90b8915..f53908c86 100644 --- a/src/test/10-manipulating-collections.ts +++ b/src/test/10-manipulating-collections.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; describe("Manipulating collections", function () { @@ -23,8 +23,8 @@ describe("Manipulating collections", function () { beforeEach(async () => { collection = await db.createCollection(`collection-${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }); afterEach(async () => { @@ -38,11 +38,11 @@ describe("Manipulating collections", function () { describe("collection.create", () => { it("creates a new document collection", async () => { const collection = await db.createCollection( - `document-collection-${Date.now()}` + `document-collection-${Date.now()}`, ); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); const info = await db.collection(collection.name).get(); expect(info).to.have.property("name", collection.name); @@ -52,11 +52,11 @@ describe("Manipulating collections", function () { }); it("creates a new edge collection", async () => { const collection = await db.createEdgeCollection( - `edge-collection-${Date.now()}` + `edge-collection-${Date.now()}`, ); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); const info = await db.collection(collection.name).get(); expect(info).to.have.property("name", collection.name); diff --git a/src/test/11-managing-indexes.ts b/src/test/11-managing-indexes.ts index 0176e998c..f3ca332d8 100644 --- a/src/test/11-managing-indexes.ts +++ b/src/test/11-managing-indexes.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; const it312 = config.arangoVersion! >= 31200 ? it : it.skip; @@ -18,8 +18,8 @@ describe("Managing indexes", function () { db = system.database(dbName); collection = await db.createCollection(collectionName); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }); after(async () => { diff --git a/src/test/13-bulk-imports.ts b/src/test/13-bulk-imports.ts index 6e25712d5..31494d2b5 100644 --- a/src/test/13-bulk-imports.ts +++ b/src/test/13-bulk-imports.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; describe("Bulk imports", function () { @@ -16,8 +16,8 @@ describe("Bulk imports", function () { db = system.database(dbName); collection = await db.createCollection(collectionName); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }); after(async () => { @@ -77,7 +77,7 @@ describe("Bulk imports", function () { }); it("should accept buffer of LDJSON arrays", async () => { const data = Buffer.from( - '["_key", "data"]\r\n["tb1", "banana"]\r\n["tb2", "peach"]\r\n["tb3", "apricot"]\r\n' + '["_key", "data"]\r\n["tb1", "banana"]\r\n["tb2", "peach"]\r\n["tb3", "apricot"]\r\n', ); const info = await collection.import(data); expect(info).to.eql({ @@ -106,7 +106,7 @@ describe("Bulk imports", function () { }); it("should accept buffer of LDJSON documents", async () => { const data = Buffer.from( - `{"_key": "db1-${type}", "data": "banana"}\r\n{"_key": "db2-${type}", "data": "peach"}\r\n{"_key": "db3-${type}", "data": "apricot"}\r\n` + `{"_key": "db1-${type}", "data": "banana"}\r\n{"_key": "db2-${type}", "data": "peach"}\r\n{"_key": "db3-${type}", "data": "apricot"}\r\n`, ); const info = await collection.import(data, { type }); expect(info).to.eql({ @@ -144,7 +144,7 @@ describe("Bulk imports", function () { { _key: `jb1-${String(type)}`, data: "banana" }, { _key: `jb2-${String(type)}`, data: "peach" }, { _key: `jb3-${String(type)}`, data: "apricot" }, - ]) + ]), ); const info = await collection.import(data, { type }); expect(info).to.eql({ diff --git a/src/test/14-document-collections.ts b/src/test/14-document-collections.ts index ea934edd6..9ebede187 100644 --- a/src/test/14-document-collections.ts +++ b/src/test/14-document-collections.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { DocumentMetadata } from "../documents.js"; import { config } from "./_config.js"; @@ -24,8 +24,8 @@ describe("DocumentCollection API", function () { beforeEach(async () => { collection = await db.createCollection(`c_${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }); afterEach(async () => { @@ -177,7 +177,7 @@ describe("DocumentCollection API", function () { await collection.update( doc, { sup: "dawg", empty: null }, - { keepNull: false } + { keepNull: false }, ); const newData = await collection.document(doc._key); expect(newData).to.have.property("potato").that.equals(doc.potato); diff --git a/src/test/15-edge-collections.ts b/src/test/15-edge-collections.ts index c4f30a623..5a8811ed0 100644 --- a/src/test/15-edge-collections.ts +++ b/src/test/15-edge-collections.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { EdgeCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { EdgeCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { DocumentMetadata } from "../documents.js"; import { config } from "./_config.js"; @@ -28,8 +28,8 @@ describe("EdgeCollection API", function () { beforeEach(async () => { collection = await db.createEdgeCollection(`c_${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }); afterEach(async () => { @@ -110,7 +110,7 @@ describe("EdgeCollection API", function () { "_id", "_rev", "_from", - "_to" + "_to", ); expect(doc._id).to.equal(meta._id); expect(doc._key).to.equal(meta._key); @@ -138,7 +138,7 @@ describe("EdgeCollection API", function () { "_id", "_rev", "_from", - "_to" + "_to", ); expect(doc._id).to.equal(meta._id); expect(doc._rev).to.equal(meta._rev); @@ -163,7 +163,7 @@ describe("EdgeCollection API", function () { "_id", "_rev", "_from", - "_to" + "_to", ); expect(doc.something).to.equal(data.something); expect(doc._id).to.equal(meta._id); @@ -216,7 +216,7 @@ describe("EdgeCollection API", function () { await collection.update( doc, { more: "peanuts", empty: null }, - { keepNull: false } + { keepNull: false }, ); const newData = await collection.document(doc._key); expect(newData).to.have.property("something", doc.something); diff --git a/src/test/16-graphs.ts b/src/test/16-graphs.ts index c95d059a2..8cbba1800 100644 --- a/src/test/16-graphs.ts +++ b/src/test/16-graphs.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Database } from "../database.js"; -import { Graph } from "../graph.js"; +import { Database } from "../databases.js"; +import { Graph } from "../graphs.js"; import { config } from "./_config.js"; const range = (n: number): number[] => Array.from(Array(n).keys()); @@ -12,15 +12,15 @@ async function createCollections(db: Database) { ...vertexCollectionNames.map(async (name) => { const collection = await db.createCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }), ...edgeCollectionNames.map(async (name) => { const collection = await db.createEdgeCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }), ] as Promise[]); @@ -30,14 +30,14 @@ async function createCollections(db: Database) { async function createGraph( graph: Graph, vertexCollectionNames: string[], - edgeCollectionNames: string[] + edgeCollectionNames: string[], ) { return await graph.create( edgeCollectionNames.map((name) => ({ collection: name, from: vertexCollectionNames, to: vertexCollectionNames, - })) + })), ); } @@ -69,7 +69,7 @@ describe("Graph API", function () { after(async () => { await graph.drop(); await Promise.all( - collectionNames.map((name) => db.collection(name).drop()) + collectionNames.map((name) => db.collection(name).drop()), ); }); it("fetches information about the graph", async () => { @@ -106,11 +106,11 @@ describe("Graph API", function () { collection: name, from: vertexCollectionNames, to: vertexCollectionNames, - })) + })), ); await db.waitForPropagation( - { path: `/_api/gharial/${graph.name}` }, - 10000 + { pathname: `/_api/gharial/${graph.name}` }, + 10000, ); const data = await graph.get(); expect(data).to.have.property("name", graph.name); @@ -132,8 +132,8 @@ describe("Graph API", function () { db .collection(name) .drop() - .catch(() => null) - ) + .catch(() => null), + ), ); }); it("destroys the graph if not passed true", async () => { diff --git a/src/test/17-graph-vertices.ts b/src/test/17-graph-vertices.ts index f46bff192..f83c786a9 100644 --- a/src/test/17-graph-vertices.ts +++ b/src/test/17-graph-vertices.ts @@ -1,8 +1,8 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; -import { ArangoError } from "../error.js"; -import { Graph, GraphVertexCollection } from "../graph.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; +import { ArangoError } from "../errors.js"; +import { Graph, GraphVertexCollection } from "../graphs.js"; import { config } from "./_config.js"; const range = (n: number): number[] => Array.from(Array(n).keys()); @@ -14,15 +14,15 @@ async function createCollections(db: Database) { ...vertexCollectionNames.map(async (name) => { const collection = await db.createCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }), ...edgeCollectionNames.map(async (name) => { const collection = await db.createEdgeCollection(name); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }), ] as Promise[]); @@ -32,18 +32,18 @@ async function createCollections(db: Database) { async function createGraph( graph: Graph, vertexCollectionNames: string[], - edgeCollectionNames: string[] + edgeCollectionNames: string[], ) { const result = await graph.create( edgeCollectionNames.map((name) => ({ collection: name, from: vertexCollectionNames, to: vertexCollectionNames, - })) + })), ); - await (graph as any)._db.waitForPropagation( - { path: `/_api/gharial/${graph.name}` }, - 10000 + await graph.database.waitForPropagation( + { pathname: `/_api/gharial/${graph.name}` }, + 10000, ); return result; } @@ -75,7 +75,7 @@ describe("Manipulating graph vertices", function () { afterEach(async () => { await graph.drop(); await Promise.all( - collectionNames.map((name) => db.collection(name).drop()) + collectionNames.map((name) => db.collection(name).drop()), ); }); describe("graph.vertexCollection", () => { @@ -91,8 +91,8 @@ describe("Manipulating graph vertices", function () { beforeEach(async () => { vertexCollection = await db.createCollection(`xc_${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${vertexCollection.name}` }, - 10000 + { pathname: `/_api/collection/${vertexCollection.name}` }, + 10000, ); }); afterEach(async () => { @@ -108,8 +108,8 @@ describe("Manipulating graph vertices", function () { beforeEach(async () => { vertexCollection = await db.createCollection(`xc_${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${vertexCollection.name}` }, - 10000 + { pathname: `/_api/collection/${vertexCollection.name}` }, + 10000, ); await graph.addVertexCollection(vertexCollection.name); }); @@ -121,7 +121,7 @@ describe("Manipulating graph vertices", function () { it("destroys the collection if explicitly passed true", async () => { const data = await graph.removeVertexCollection( vertexCollection.name, - true + true, ); expect(data.orphanCollections).not.to.contain(vertexCollection.name); try { diff --git a/src/test/18-graph-edges.ts b/src/test/18-graph-edges.ts index 2aab1197c..de99a5356 100644 --- a/src/test/18-graph-edges.ts +++ b/src/test/18-graph-edges.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Database } from "../database.js"; -import { Graph } from "../graph.js"; +import { Database } from "../databases.js"; +import { Graph } from "../graphs.js"; import { config } from "./_config.js"; describe("Manipulating graph edges", function () { @@ -31,7 +31,10 @@ describe("Manipulating graph edges", function () { to: ["person"], }, ]); - await db.waitForPropagation({ path: `/_api/gharial/${graph.name}` }, 10000); + await db.waitForPropagation( + { pathname: `/_api/gharial/${graph.name}` }, + 10000, + ); }); afterEach(async () => { await graph.drop(); @@ -45,13 +48,13 @@ describe("Manipulating graph edges", function () { expect(info.edgeDefinitions.map((e) => e.collection)).to.contain("knows"); expect(info.edgeDefinitions.length).to.equal(1); const edgeDefinition = info.edgeDefinitions.filter( - (e) => e.collection === "knows" + (e) => e.collection === "knows", ); expect( - edgeDefinition.reduce((arr, e) => [...arr, ...e.from], [] as string[]) + edgeDefinition.reduce((arr, e) => [...arr, ...e.from], [] as string[]), ).to.contain("person"); expect( - edgeDefinition.reduce((arr, e) => [...arr, ...e.to], [] as string[]) + edgeDefinition.reduce((arr, e) => [...arr, ...e.to], [] as string[]), ).to.contain("person"); }); }); @@ -90,17 +93,17 @@ describe("Manipulating graph edges", function () { expect(info).to.have.property("edgeDefinitions"); expect(info.edgeDefinitions).to.be.instanceOf(Array); expect(info.edgeDefinitions.map((e) => e.collection)).to.contain( - "works_in" + "works_in", ); expect(info.edgeDefinitions.length).to.equal(2); const edgeDefinition = info.edgeDefinitions.filter( - (e) => e.collection === "works_in" + (e) => e.collection === "works_in", ); expect( - edgeDefinition.reduce((arr, e) => [...arr, ...e.from], [] as string[]) + edgeDefinition.reduce((arr, e) => [...arr, ...e.from], [] as string[]), ).to.contain("person"); expect( - edgeDefinition.reduce((arr, e) => [...arr, ...e.to], [] as string[]) + edgeDefinition.reduce((arr, e) => [...arr, ...e.to], [] as string[]), ).to.contain("city"); }); }); @@ -117,13 +120,13 @@ describe("Manipulating graph edges", function () { expect(info.edgeDefinitions.map((e) => e.collection)).to.contain("knows"); expect(info.edgeDefinitions.length).to.equal(1); const edgeDefinition = info.edgeDefinitions.filter( - (e) => e.collection === "knows" + (e) => e.collection === "knows", ); expect( - edgeDefinition.reduce((arr, e) => [...arr, ...e.from], [] as string[]) + edgeDefinition.reduce((arr, e) => [...arr, ...e.from], [] as string[]), ).to.contain("person"); expect( - edgeDefinition.reduce((arr, e) => [...arr, ...e.to], [] as string[]) + edgeDefinition.reduce((arr, e) => [...arr, ...e.to], [] as string[]), ).to.contain("city"); }); }); diff --git a/src/test/19-graph-vertex-collections.ts b/src/test/19-graph-vertex-collections.ts index 252dcf06a..1340d50ed 100644 --- a/src/test/19-graph-vertex-collections.ts +++ b/src/test/19-graph-vertex-collections.ts @@ -1,7 +1,7 @@ import { expect } from "chai"; -import { Database } from "../database.js"; +import { Database } from "../databases.js"; import { DocumentMetadata } from "../documents.js"; -import { GraphVertexCollection } from "../graph.js"; +import { GraphVertexCollection } from "../graphs.js"; import { config } from "./_config.js"; describe("GraphVertexCollection API", function () { @@ -22,7 +22,10 @@ describe("GraphVertexCollection API", function () { to: ["person"], }, ]); - await db.waitForPropagation({ path: `/_api/gharial/${graph.name}` }, 10000); + await db.waitForPropagation( + { pathname: `/_api/gharial/${graph.name}` }, + 10000, + ); collection = graph.vertexCollection("person"); }); after(async () => { @@ -132,7 +135,7 @@ describe("GraphVertexCollection API", function () { await collection.update( doc, { sup: "dawg", empty: null }, - { keepNull: false } + { keepNull: false }, ); const newData = await collection.vertex(doc._key); expect(newData).to.have.property("potato").that.equals(doc.potato); diff --git a/src/test/20-graph-edge-collections.ts b/src/test/20-graph-edge-collections.ts index 3de62de8a..71310f218 100644 --- a/src/test/20-graph-edge-collections.ts +++ b/src/test/20-graph-edge-collections.ts @@ -1,7 +1,7 @@ import { expect } from "chai"; -import { Database } from "../database.js"; +import { Database } from "../databases.js"; import { DocumentMetadata } from "../documents.js"; -import { GraphEdgeCollection } from "../graph.js"; +import { GraphEdgeCollection } from "../graphs.js"; import { config } from "./_config.js"; describe("GraphEdgeCollection API", function () { @@ -22,7 +22,10 @@ describe("GraphEdgeCollection API", function () { to: ["person"], }, ]); - await db.waitForPropagation({ path: `/_api/gharial/${graph.name}` }, 10000); + await db.waitForPropagation( + { pathname: `/_api/gharial/${graph.name}` }, + 10000, + ); collection = graph.edgeCollection("knows"); await graph .vertexCollection("person") @@ -163,7 +166,7 @@ describe("GraphEdgeCollection API", function () { await collection.update( doc, { sup: "dawg", empty: null }, - { keepNull: false } + { keepNull: false }, ); const newData = await collection.edge(doc._key); expect(newData).to.have.property("potato", doc.potato); diff --git a/src/test/22-foxx-api.ts b/src/test/22-foxx-api.ts index 9f636127a..51e9c0606 100644 --- a/src/test/22-foxx-api.ts +++ b/src/test/22-foxx-api.ts @@ -2,8 +2,8 @@ import { expect } from "chai"; import * as fs from "fs"; import * as path from "path"; -import { Database } from "../database.js"; -import { ArangoError } from "../error.js"; +import { Database } from "../databases.js"; +import { ArangoError } from "../errors.js"; import { config } from "./_config.js"; const localAppsPath = path.resolve(".", "fixtures"); @@ -23,9 +23,9 @@ describe("Foxx service", () => { serviceServiceMount, new Blob([ fs.readFileSync( - path.resolve("fixtures", "service-service-service.zip") + path.resolve("fixtures", "service-service-service.zip"), ), - ]) + ]), ); arangoPaths = (await db.route(serviceServiceMount).get()).parsedBody; }); @@ -65,7 +65,7 @@ describe("Foxx service", () => { source: () => new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.js") + path.resolve(localAppsPath, "minimal-working-service.js"), ), ]), }, @@ -74,7 +74,7 @@ describe("Foxx service", () => { source: () => new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), ]), }, @@ -92,7 +92,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "itzpapalotl.zip")), - ]) + ]), ); await db.replaceService(mount, c.source(arangoPaths)); const resp = await db.route(mount).get(); @@ -104,7 +104,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "itzpapalotl.zip")), - ]) + ]), ); await db.upgradeService(mount, c.source(arangoPaths)); const resp = await db.route(mount).get(); @@ -117,9 +117,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); await db.uninstallService(mount); try { @@ -133,9 +133,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const resp = await db.getServiceConfiguration(mount); expect(resp).to.eql({}); @@ -146,9 +146,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const resp = await db.getServiceConfiguration(mount, true); expect(resp).to.eql({}); @@ -159,7 +159,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); const resp = await db.getServiceConfiguration(mount); expect(resp).to.have.property("test1"); @@ -173,7 +173,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); const resp = await db.getServiceConfiguration(mount, true); expect(resp).to.have.eql({}); @@ -184,7 +184,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); const updateResp = await db.updateServiceConfiguration(mount, { test1: "test", @@ -207,14 +207,14 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); const updateResp = await db.updateServiceConfiguration( mount, { test1: "test", }, - true + true, ); expect(updateResp).to.have.property("values"); expect(updateResp.values).to.have.property("test1", "test"); @@ -230,7 +230,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); const replaceResp = await db.replaceServiceConfiguration(mount, { test1: "test", @@ -253,14 +253,14 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); const replaceResp = await db.replaceServiceConfiguration( mount, { test1: "test", }, - true + true, ); expect(replaceResp).to.have.property("values"); expect(replaceResp.values).to.have.property("test1", "test"); @@ -277,7 +277,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); await db.replaceServiceConfiguration(mount, { test2: "test2" }); await db.updateServiceConfiguration(mount, { test1: "test1" }); @@ -293,7 +293,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); await db.replaceServiceConfiguration(mount, { test2: "test2" }, true); await db.updateServiceConfiguration(mount, { test1: "test1" }, true); @@ -307,7 +307,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); await db.updateServiceConfiguration(mount, { test2: "test2" }); await db.replaceServiceConfiguration(mount, { test1: "test" }); @@ -323,7 +323,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-configuration.zip")), - ]) + ]), ); await db.updateServiceConfiguration(mount, { test2: "test2" }, true); await db.replaceServiceConfiguration(mount, { test1: "test" }, true); @@ -337,9 +337,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const resp = await db.getServiceDependencies(mount); expect(resp).to.eql({}); @@ -350,9 +350,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const resp = await db.getServiceDependencies(mount, true); expect(resp).to.eql({}); @@ -363,7 +363,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const resp = await db.getServiceDependencies(mount); expect(resp).to.have.property("test1"); @@ -377,7 +377,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const resp = await db.getServiceDependencies(mount, true); expect(resp).to.eql({}); @@ -388,7 +388,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const updateResp = await db.updateServiceDependencies(mount, { test1: "/test", @@ -411,14 +411,14 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const updateResp = await db.updateServiceDependencies( mount, { test1: "/test", }, - true + true, ); expect(updateResp).to.have.property("values"); expect(updateResp.values).to.have.property("test1", "/test"); @@ -434,7 +434,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const replaceResp = await db.replaceServiceDependencies(mount, { test1: "/test", @@ -457,14 +457,14 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const replaceResp = await db.replaceServiceDependencies( mount, { test1: "/test", }, - true + true, ); expect(replaceResp).to.have.property("values"); expect(replaceResp.values).to.have.property("test1", "/test"); @@ -481,7 +481,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const replaceResp = await db.replaceServiceDependencies(mount, { test2: "/test2", @@ -511,12 +511,12 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const replaceResp = await db.replaceServiceDependencies( mount, { test2: "/test2" }, - true + true, ); expect(replaceResp).to.have.property("values"); expect(replaceResp.values).to.have.property("test2", "/test2"); @@ -528,7 +528,7 @@ describe("Foxx service", () => { { test1: "/test1", }, - true + true, ); expect(updateResp).to.have.property("values"); expect(updateResp.values).to.have.property("test1", "/test1"); @@ -543,7 +543,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const updateResp = await db.updateServiceDependencies(mount, { test2: "/test2", @@ -575,14 +575,14 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-dependencies.zip")), - ]) + ]), ); const updateResp = await db.updateServiceDependencies( mount, { test2: "/test2", }, - true + true, ); expect(updateResp).to.have.property("values"); expect(updateResp.values).to.not.have.property("test1"); @@ -593,7 +593,7 @@ describe("Foxx service", () => { { test1: "/test1", }, - true + true, ); expect(replaceResp).to.have.property("values"); expect(replaceResp.values).to.have.property("test1", "/test1"); @@ -610,9 +610,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const resp = await db.downloadService(mount); expect(resp).to.be.instanceof(Blob); @@ -623,9 +623,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const services = await db.listServices(); expect(services).to.be.instanceOf(Array); @@ -637,9 +637,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const services = await db.listServices(); const service = services.find((service) => service.mount === mount)!; @@ -656,9 +656,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const service = await db.getService(mount); expect(service).to.have.property("mount", mount); @@ -679,11 +679,11 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-setup-teardown.zip") + path.resolve(localAppsPath, "minimal-working-setup-teardown.zip"), ), - ]) + ]), ); - const scripts = await db.listServiceScripts(mount); + const scripts = await db.getServiceScripts(mount); expect(scripts).to.have.property("setup", "Setup"); expect(scripts).to.have.property("teardown", "Teardown"); }); @@ -693,9 +693,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-setup-teardown.zip") + path.resolve(localAppsPath, "minimal-working-setup-teardown.zip"), ), - ]) + ]), ); const col = `${mount}_setup_teardown`.replace(/\//, "").replace(/-/g, "_"); expect(await db.collection(col).get()).to.be.instanceOf(Object); @@ -714,7 +714,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "echo-script.zip")), - ]) + ]), ); try { await db.runServiceScript(mount, "no", {}); @@ -730,7 +730,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "echo-script.zip")), - ]) + ]), ); const argv = { hello: "world" }; const resp = await db.runServiceScript(mount, "echo", argv); @@ -742,7 +742,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "echo-script.zip")), - ]) + ]), ); const argv = ["yes", "please"]; const resp = await db.runServiceScript(mount, "echo", argv); @@ -754,9 +754,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const resp = await db.getService(mount); expect(resp.development).to.equal(false); @@ -771,10 +771,10 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), ]), - { development: true } + { development: true }, ); const resp = await db.getService(mount); expect(resp.development).to.equal(true); @@ -787,7 +787,9 @@ describe("Foxx service", () => { it("tests should run", async () => { await db.installService( mount, - new Blob([fs.readFileSync(path.resolve(localAppsPath, "with-tests.zip"))]) + new Blob([ + fs.readFileSync(path.resolve(localAppsPath, "with-tests.zip")), + ]), ); const resp = await db.runServiceTests(mount, {}); expect(resp).to.have.property("stats"); @@ -802,7 +804,7 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync(path.resolve(localAppsPath, "with-readme.zip")), - ]) + ]), ); const resp = await db.getServiceReadme(mount); expect(resp).to.equal("Please read this."); @@ -813,9 +815,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const resp = await db.getServiceReadme(mount); expect(resp).to.equal(undefined); @@ -826,9 +828,9 @@ describe("Foxx service", () => { mount, new Blob([ fs.readFileSync( - path.resolve(localAppsPath, "minimal-working-service.zip") + path.resolve(localAppsPath, "minimal-working-service.zip"), ), - ]) + ]), ); const resp = await db.getServiceDocumentation(mount); expect(resp).to.have.property("swagger", "2.0"); @@ -853,7 +855,7 @@ describe("Foxx service", () => { "getServiceDependencies", (mount: string) => db.getServiceDependencies(mount), ], - ["listServiceScripts", (mount: string) => db.listServiceScripts(mount)], + ["listServiceScripts", (mount: string) => db.getServiceScripts(mount)], ["upgradeService", (mount: string) => db.upgradeService(mount, {} as any)], [ "updateServiceConfiguration", diff --git a/src/test/23-aql-queries-stream.ts b/src/test/23-aql-queries-stream.ts index cced8f0d9..55a39e5e9 100644 --- a/src/test/23-aql-queries-stream.ts +++ b/src/test/23-aql-queries-stream.ts @@ -1,13 +1,14 @@ import { expect } from "chai"; import { aql } from "../aql.js"; -import { ArrayCursor } from "../cursor.js"; -import { Database, QueryOptions } from "../database.js"; +import { Cursor } from "../cursors.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; +import { QueryOptions } from "../queries.js"; describe("AQL Stream queries", function () { const name = `testdb_${Date.now()}`; let system: Database, db: Database; - let allCursors: ArrayCursor[]; + let allCursors: Cursor[]; before(async () => { allCursors = []; system = new Database(config); @@ -17,7 +18,7 @@ describe("AQL Stream queries", function () { }); after(async () => { await Promise.all( - allCursors.map((cursor) => cursor.kill().catch(() => undefined)) + allCursors.map((cursor) => cursor.kill().catch(() => undefined)), ); try { await system.dropDatabase(name); @@ -29,7 +30,7 @@ describe("AQL Stream queries", function () { it("returns a cursor for the query result", async () => { const cursor = await db.query("RETURN 23", {}, { stream: true }); allCursors.push(cursor); - expect(cursor).to.be.an.instanceof(ArrayCursor); + expect(cursor).to.be.an.instanceof(Cursor); }); it("supports bindVars", async () => { const cursor = await db.query("RETURN @x", { x: 5 }, { stream: true }); @@ -45,7 +46,7 @@ describe("AQL Stream queries", function () { }); allCursors.push(cursor); expect(cursor.count).to.equal(undefined); - expect((cursor as any).batches.hasMore).to.equal(true); + expect(cursor.batches.hasMore).to.equal(true); }); it("supports compact queries with options", async () => { const query: any = { @@ -59,7 +60,7 @@ describe("AQL Stream queries", function () { }); allCursors.push(cursor); expect(cursor.count).to.equal(undefined); // count will be ignored - expect((cursor as any).batches.hasMore).to.equal(true); + expect(cursor.batches.hasMore).to.equal(true); }); }); describe("with some data", () => { @@ -67,13 +68,13 @@ describe("AQL Stream queries", function () { before(async () => { const collection = await db.createCollection(cname); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); await Promise.all( Array.from(Array(1000).keys()).map((i: number) => - collection.save({ hallo: i }) - ) + collection.save({ hallo: i }), + ), ); }); /*after(async () => { @@ -86,15 +87,15 @@ describe("AQL Stream queries", function () { let count = 0; const cursors = await Promise.all( - Array.from(Array(25)).map(() => db.query(query, options)) + Array.from(Array(25)).map(() => db.query(query, options)), ); allCursors.push(...cursors); await Promise.all( cursors.map((c) => - (c as ArrayCursor).forEach(() => { + (c as Cursor).forEach(() => { count++; - }) - ) + }), + ), ); expect(count).to.equal(25 * 1000); }); diff --git a/src/test/24-accessing-views.ts b/src/test/24-accessing-views.ts index 7adfe43f9..0cdc063bd 100644 --- a/src/test/24-accessing-views.ts +++ b/src/test/24-accessing-views.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Database } from "../database.js"; -import { View } from "../view.js"; +import { Database } from "../databases.js"; +import { View } from "../views.js"; import { config } from "./_config.js"; const range = (n: number): number[] => Array.from(Array(n).keys()); @@ -37,10 +37,10 @@ describe("Accessing views", function () { const view = db.view(name); await view.create({ type: "arangosearch" }); await db.waitForPropagation( - { path: `/_api/view/${view.name}` }, - 10000 + { pathname: `/_api/view/${view.name}` }, + 10000, ); - }) + }), ); }); after(async () => { @@ -60,15 +60,15 @@ describe("Accessing views", function () { const view = db.view(name); await view.create({ type: "arangosearch" }); await db.waitForPropagation( - { path: `/_api/view/${view.name}` }, - 10000 + { pathname: `/_api/view/${view.name}` }, + 10000, ); - }) + }), ); }); after(async () => { await Promise.all( - arangoSearchViewNames.map((name) => db.view(name).drop()) + arangoSearchViewNames.map((name) => db.view(name).drop()), ); }); it("creates View instances", async () => { @@ -76,7 +76,7 @@ describe("Accessing views", function () { const arangoSearchViews = views.filter((v) => v instanceof View).sort(); expect(arangoSearchViews.length).to.equal(arangoSearchViewNames.length); expect(arangoSearchViews.map((v) => v.name).sort()).to.eql( - arangoSearchViewNames + arangoSearchViewNames, ); }); }); diff --git a/src/test/25-view-metadata.ts b/src/test/25-view-metadata.ts index 2a946a12e..1c1eda411 100644 --- a/src/test/25-view-metadata.ts +++ b/src/test/25-view-metadata.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Database } from "../database.js"; -import { ArangoSearchViewProperties, View } from "../view.js"; +import { Database } from "../databases.js"; +import { ArangoSearchViewProperties, View } from "../views.js"; import { config } from "./_config.js"; describe("View metadata", function () { @@ -16,7 +16,7 @@ describe("View metadata", function () { db = system.database(dbName); view = db.view(viewName); await view.create({ type: "arangosearch" }); - await db.waitForPropagation({ path: `/_api/view/${view.name}` }, 10000); + await db.waitForPropagation({ pathname: `/_api/view/${view.name}` }, 10000); }); after(async () => { await system.dropDatabase(dbName); diff --git a/src/test/26-manipulating-views.ts b/src/test/26-manipulating-views.ts index 1e0942560..512344267 100644 --- a/src/test/26-manipulating-views.ts +++ b/src/test/26-manipulating-views.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Database } from "../database.js"; -import { ArangoSearchViewProperties, View } from "../view.js"; +import { Database } from "../databases.js"; +import { ArangoSearchViewProperties, View } from "../views.js"; import { config } from "./_config.js"; // NOTE These tests will not reliably work in a cluster. @@ -27,7 +27,7 @@ describe("Manipulating views", function () { beforeEach(async () => { view = db.view(`v-${Date.now()}`); await view.create({ type: "arangosearch" }); - await db.waitForPropagation({ path: `/_api/view/${view.name}` }, 10000); + await db.waitForPropagation({ pathname: `/_api/view/${view.name}` }, 10000); }); afterEach(async () => { try { @@ -41,7 +41,10 @@ describe("Manipulating views", function () { it("creates a new arangosearch view", async () => { const view = db.view(`asv-${Date.now()}`); await view.create({ type: "arangosearch" }); - await db.waitForPropagation({ path: `/_api/view/${view.name}` }, 10000); + await db.waitForPropagation( + { pathname: `/_api/view/${view.name}` }, + 10000, + ); const info = await view.get(); expect(info).to.have.property("name", view.name); expect(info).to.have.property("type", "arangosearch"); @@ -76,7 +79,7 @@ describe("Manipulating views", function () { commitIntervalMsec: 30000, }); expect(properties.consolidationIntervalMsec).to.equal( - initial.consolidationIntervalMsec + initial.consolidationIntervalMsec, ); expect(properties.commitIntervalMsec).to.equal(30000); }); diff --git a/src/test/27-query-management.ts b/src/test/27-query-management.ts index b1e6bda27..3e25cd5bc 100644 --- a/src/test/27-query-management.ts +++ b/src/test/27-query-management.ts @@ -1,8 +1,8 @@ import { expect } from "chai"; import { aql } from "../aql.js"; -import { ArrayCursor } from "../cursor.js"; -import { Database } from "../database.js"; -import { ArangoError } from "../error.js"; +import { Cursor } from "../cursors.js"; +import { Database } from "../databases.js"; +import { ArangoError, ResponseTimeoutError } from "../errors.js"; import { config } from "./_config.js"; // NOTE These tests will not reliably work with load balancing. @@ -18,7 +18,7 @@ async function sleep(ms: number) { describe("Query Management API", function () { const dbName = `testdb_${Date.now()}`; let system: Database, db: Database; - let allCursors: ArrayCursor[]; + let allCursors: Cursor[]; before(async () => { allCursors = []; system = new Database(config); @@ -28,12 +28,12 @@ describe("Query Management API", function () { // the following makes calls to /_db/${name} on all coordinators, thus waiting // long enough for the database to become available on all instances if (Array.isArray(config.url)) { - await db.waitForPropagation({ path: `/_api/version` }, 10000); + await db.waitForPropagation({ pathname: `/_api/version` }, 10000); } }); after(async () => { await Promise.all( - allCursors.map((cursor) => cursor.kill().catch(() => undefined)) + allCursors.map((cursor) => cursor.kill().catch(() => undefined)), ); try { await system.dropDatabase(dbName); @@ -46,7 +46,7 @@ describe("Query Management API", function () { it("returns a cursor for the query result", async () => { const cursor = await db.query("RETURN 23"); allCursors.push(cursor); - expect(cursor).to.be.an.instanceof(ArrayCursor); + expect(cursor).to.be.an.instanceof(Cursor); }); it("throws an exception on error", async () => { try { @@ -67,7 +67,7 @@ describe("Query Management API", function () { } catch (err: any) { expect(err).is.instanceof(Error); expect(err).is.not.instanceof(ArangoError); - expect(err.name).to.equal("AbortError"); + expect(err).is.instanceof(ResponseTimeoutError); return; } expect.fail(); @@ -95,7 +95,7 @@ describe("Query Management API", function () { }); allCursors.push(cursor); expect(cursor.count).to.equal(10); - expect((cursor as any).batches.hasMore).to.equal(true); + expect(cursor.batches.hasMore).to.equal(true); }); it("supports AQB queries", async () => { const cursor = await db.query({ toAQL: () => "RETURN 42" }); @@ -126,7 +126,7 @@ describe("Query Management API", function () { const cursor = await db.query(query, { batchSize: 2, count: true }); allCursors.push(cursor); expect(cursor.count).to.equal(10); - expect((cursor as any).batches.hasMore).to.equal(true); + expect(cursor.batches.hasMore).to.equal(true); }); }); @@ -200,7 +200,7 @@ describe("Query Management API", function () { // must filter the list here, as there could be other (system) queries // ongoing at the same time queries = (await db.listRunningQueries()).filter( - (i: any) => i.query === query + (i: any) => i.query === query, ); if (queries.length > 0) { break; @@ -237,7 +237,7 @@ describe("Query Management API", function () { allCursors.push(cursor); // must filter the list here, as there could have been other (system) queries const queries = (await db.listSlowQueries()).filter( - (i: any) => i.query === query + (i: any) => i.query === query, ); expect(queries).to.have.lengthOf(1); expect(queries[0]).to.have.property("query", query); @@ -266,12 +266,12 @@ describe("Query Management API", function () { allCursors.push(cursor); // must filter the list here, as there could have been other (system) queries const queries1 = (await db.listSlowQueries()).filter( - (i: any) => i.query === query + (i: any) => i.query === query, ); expect(queries1).to.have.lengthOf(1); await db.clearSlowQueries(); const queries2 = (await db.listSlowQueries()).filter( - (i: any) => i.query === query + (i: any) => i.query === query, ); expect(queries2).to.have.lengthOf(0); }); @@ -285,7 +285,7 @@ describe("Query Management API", function () { const p1 = db.query(query); p1.then((cursor) => allCursors.push(cursor)); const queries = (await db.listSlowQueries()).filter( - (i: any) => i.query === query + (i: any) => i.query === query, ); expect(queries).to.have.lengthOf(1); expect(queries[0]).to.have.property("bindVars"); diff --git a/src/test/28-accessing-analyzers.ts b/src/test/28-accessing-analyzers.ts index 06512c91c..f524cebfb 100644 --- a/src/test/28-accessing-analyzers.ts +++ b/src/test/28-accessing-analyzers.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { Analyzer } from "../analyzer.js"; -import { Database } from "../database.js"; +import { Analyzer } from "../analyzers.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; const range = (n: number): number[] => Array.from(Array(n).keys()); @@ -42,17 +42,17 @@ describe("Accessing analyzers", function () { const analyzer = db.analyzer(name.replace(/^[^:]+::/, "")); await analyzer.create({ type: "identity" }); await db.waitForPropagation( - { path: `/_api/analyzer/${analyzer.name}` }, - 65000 + { pathname: `/_api/analyzer/${analyzer.name}` }, + 65000, ); - }) + }), ); }); after(async () => { await Promise.all( analyzerNames.map((name) => - db.analyzer(name.replace(/^[^:]+::/, "")).drop() - ) + db.analyzer(name.replace(/^[^:]+::/, "")).drop(), + ), ); }); it("fetches information about all analyzers", async () => { @@ -70,17 +70,17 @@ describe("Accessing analyzers", function () { const analyzer = db.analyzer(name.replace(/^[^:]+::/, "")); await analyzer.create({ type: "identity" }); await db.waitForPropagation( - { path: `/_api/analyzer/${analyzer.name}` }, - 65000 + { pathname: `/_api/analyzer/${analyzer.name}` }, + 65000, ); - }) + }), ); }); after(async () => { await Promise.all( analyzerNames.map((name) => - db.analyzer(name.replace(/^[^:]+::/, "")).drop() - ) + db.analyzer(name.replace(/^[^:]+::/, "")).drop(), + ), ); }); it("creates Analyzer instances", async () => { diff --git a/src/test/29-manipulating-analyzers.ts b/src/test/29-manipulating-analyzers.ts index 0cf02e879..f5726c145 100644 --- a/src/test/29-manipulating-analyzers.ts +++ b/src/test/29-manipulating-analyzers.ts @@ -1,10 +1,10 @@ import { expect } from "chai"; -import { Analyzer } from "../analyzer.js"; -import { Database } from "../database.js"; +import { Analyzer } from "../analyzers.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; function waitForAnalyzer(db: Database, name: string) { - return db.waitForPropagation({ path: `/_api/analyzer/${name}` }, 30000); + return db.waitForPropagation({ pathname: `/_api/analyzer/${name}` }, 30000); } describe("Manipulating analyzers", function () { diff --git a/src/test/29-queue-time.ts b/src/test/29-queue-time.ts index ba2686e05..06cc6d5f5 100644 --- a/src/test/29-queue-time.ts +++ b/src/test/29-queue-time.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; const range = (n: number): number[] => Array.from(Array(n).keys()); @@ -18,8 +18,8 @@ describe("Queue time metrics", function () { db = system.database(dbName); collection = await db.createCollection(`c_${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }); after(async () => { @@ -34,7 +34,7 @@ describe("Queue time metrics", function () { }); it("should trim existing queue times when set to a lower value", async () => { await Promise.all( - range(10).map(() => collection.save({ value: Math.random() })) + range(10).map(() => collection.save({ value: Math.random() })), ); expect(db.queueTime.getValues().length).to.equal(10); db.setResponseQueueTimeSamples(5); @@ -42,23 +42,23 @@ describe("Queue time metrics", function () { }); it("should allow more values when set to a higher value", async () => { await Promise.all( - range(10).map(() => collection.save({ value: Math.random() })) + range(10).map(() => collection.save({ value: Math.random() })), ); expect(db.queueTime.getValues().length).to.equal(10); db.setResponseQueueTimeSamples(20); await Promise.all( - range(10).map(() => collection.save({ value: Math.random() })) + range(10).map(() => collection.save({ value: Math.random() })), ); expect(db.queueTime.getValues().length).to.equal(20); }); it("should allow fewer values when set to a lower value", async () => { await Promise.all( - range(10).map(() => collection.save({ value: Math.random() })) + range(10).map(() => collection.save({ value: Math.random() })), ); expect(db.queueTime.getValues().length).to.equal(10); db.setResponseQueueTimeSamples(5); await Promise.all( - range(10).map(() => collection.save({ value: Math.random() })) + range(10).map(() => collection.save({ value: Math.random() })), ); expect(db.queueTime.getValues().length).to.equal(5); }); @@ -67,7 +67,7 @@ describe("Queue time metrics", function () { it("should return the latest value", async () => { expect(db.queueTime.getLatest()).to.equal(undefined); await Promise.all( - range(10).map(() => collection.save({ value: Math.random() })) + range(10).map(() => collection.save({ value: Math.random() })), ); const values = db.queueTime.getValues(); expect(values.length).to.be.greaterThan(0); @@ -79,7 +79,7 @@ describe("Queue time metrics", function () { const min = Date.now(); expect(db.queueTime.getValues()).to.eql([]); await Promise.all( - range(10).map(() => collection.save({ value: Math.random() })) + range(10).map(() => collection.save({ value: Math.random() })), ); const max = Date.now(); const values = db.queueTime.getValues(); @@ -96,7 +96,7 @@ describe("Queue time metrics", function () { it("should return the arithmetic average of all current values", async () => { expect(db.queueTime.getAvg()).to.equal(0); await Promise.all( - range(10).map(() => collection.save({ value: Math.random() })) + range(10).map(() => collection.save({ value: Math.random() })), ); const values = db.queueTime.getValues(); expect(values.length).to.be.greaterThan(0); diff --git a/src/test/30-concurrent-transactions.ts b/src/test/30-concurrent-transactions.ts index 9c8124a46..0ded03b89 100644 --- a/src/test/30-concurrent-transactions.ts +++ b/src/test/30-concurrent-transactions.ts @@ -1,9 +1,9 @@ /* eslint-disable no-console */ import { expect } from "chai"; -import { DocumentCollection } from "../collection.js"; +import { DocumentCollection } from "../collections.js"; import { Connection } from "../connection.js"; -import { Database } from "../database.js"; -import { Transaction } from "../transaction.js"; +import { Database } from "../databases.js"; +import { Transaction } from "../transactions.js"; import { config } from "./_config.js"; const delay = (ms: number) => @@ -32,8 +32,8 @@ describe("Transactions", function () { after(async () => { await Promise.all( allTransactions.map((transaction) => - transaction.abort().catch(() => undefined) - ) + transaction.abort().catch(() => undefined), + ), ); try { await system.dropDatabase(name); @@ -42,8 +42,8 @@ describe("Transactions", function () { beforeEach(async () => { collection = await db.createCollection(`collection-${Date.now()}`); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }); afterEach(async () => { @@ -57,8 +57,8 @@ describe("Transactions", function () { it("can run concurrent transactions in parallel", async () => { const conn = (db as any)._connection as Connection; - const range = Array.from(Array((conn as any)._maxTasks).keys()).map( - (i) => i + 1 + const range = Array.from(Array((conn as any)._taskPoolSize).keys()).map( + (i) => i + 1, ); let failed = 0; await Promise.all( @@ -73,7 +73,7 @@ describe("Transactions", function () { trx.id, "completed begin after", Date.now() - started, - "ms elapsed" + "ms elapsed", ); await trx.step(() => collection.save({ enabled: true })); console.log( @@ -82,7 +82,7 @@ describe("Transactions", function () { trx.id, "completed save after", Date.now() - started, - "ms elapsed" + "ms elapsed", ); await delay(Math.random() * 10); await trx.commit(); @@ -92,7 +92,7 @@ describe("Transactions", function () { trx.id, "completed commit after", Date.now() - started, - "ms elapsed" + "ms elapsed", ); } catch (e: any) { console.error( @@ -102,18 +102,18 @@ describe("Transactions", function () { "failed after", Date.now() - started, "ms elapsed:", - String(e) + String(e), ); failed++; } - }) + }), ); expect(failed).to.equal(0); }); it("respects transactional guarantees", async () => { const conn = (db as any)._connection as Connection; - const range = Array.from(Array((conn as any)._maxTasks).keys()).map( - (i) => i + 1 + const range = Array.from(Array((conn as any)._taskPoolSize).keys()).map( + (i) => i + 1, ); const key = "test"; await collection.save({ _key: key, i: 0 }); @@ -134,7 +134,7 @@ describe("Transactions", function () { "adding", value, "=", - doc.i + value + doc.i + value, ); await trx.step(() => collection.update(key, { i: doc.i + value })); console.log(value, "committing"); @@ -144,7 +144,7 @@ describe("Transactions", function () { console.error(value, "failed:", String(e)); failed++; } - }) + }), ); const doc = await collection.document(key); expect(doc.i).to.equal(range.reduce((a, b) => a + b)); diff --git a/src/test/31-conflicts.ts b/src/test/31-conflicts.ts index 824fc08b7..6bd73a07f 100644 --- a/src/test/31-conflicts.ts +++ b/src/test/31-conflicts.ts @@ -1,7 +1,7 @@ import { expect } from "chai"; import { aql } from "../aql.js"; -import { DocumentCollection } from "../collection.js"; -import { Database } from "../database.js"; +import { DocumentCollection } from "../collections.js"; +import { Database } from "../databases.js"; import { config } from "./_config.js"; const range = (n: number): number[] => Array.from(Array(n).keys()); @@ -19,8 +19,8 @@ describe("config.maxRetries", () => { db = await system.createDatabase(dbName); collection = await db.createCollection(collectionName); await db.waitForPropagation( - { path: `/_api/collection/${collection.name}` }, - 10000 + { pathname: `/_api/collection/${collection.name}` }, + 10000, ); }); after(async () => { @@ -45,12 +45,12 @@ describe("config.maxRetries", () => { LET doc = DOCUMENT(${collection}, ${docKey}) UPDATE doc WITH { data: doc.data + 1 } IN ${collection} `, - { retryOnConflict: 0 } - ) - ) + { retryOnConflict: 0 }, + ), + ), ); expect( - result.filter(({ status }) => status === "rejected") + result.filter(({ status }) => status === "rejected"), ).not.to.have.lengthOf(0); const { data } = await collection.document(docKey); expect(data).not.to.equal(1_000); @@ -65,9 +65,9 @@ describe("config.maxRetries", () => { LET doc = DOCUMENT(${collection}, ${docKey}) UPDATE doc WITH { data: doc.data + 1 } IN ${collection} `, - { retryOnConflict: 100 } - ) - ) + { retryOnConflict: 100 }, + ), + ), ); const { data } = await collection.document(docKey); expect(data).to.equal(1_000); diff --git a/src/test/_config.ts b/src/test/_config.ts index 472d940eb..53419dcbb 100644 --- a/src/test/_config.ts +++ b/src/test/_config.ts @@ -1,8 +1,8 @@ -import { Config, LoadBalancingStrategy } from "../connection.js"; +import { ConfigOptions, LoadBalancingStrategy } from "../configuration.js"; const ARANGO_URL = process.env.TEST_ARANGODB_URL || "http://127.0.0.1:8529"; const ARANGO_VERSION = Number( - process.env.ARANGO_VERSION || process.env.ARANGOJS_DEVEL_VERSION || 0 + process.env.ARANGO_VERSION || process.env.ARANGOJS_DEVEL_VERSION || 0, ); const ARANGO_RELEASE = process.env.ARANGO_RELEASE || ""; let arangoVersion: number = 39999; @@ -17,8 +17,8 @@ else if (ARANGO_RELEASE.includes(".")) { const ARANGO_LOAD_BALANCING_STRATEGY = process.env .TEST_ARANGO_LOAD_BALANCING_STRATEGY as LoadBalancingStrategy | undefined; -export const config: Config & { - arangoVersion: NonNullable; +export const config: ConfigOptions & { + arangoVersion: NonNullable; } = ARANGO_URL.includes(",") ? { url: ARANGO_URL.split(",").filter((s) => Boolean(s)), diff --git a/src/transaction.ts b/src/transactions.ts similarity index 68% rename from src/transaction.ts rename to src/transactions.ts index 29a3d0b8a..7f222187f 100644 --- a/src/transaction.ts +++ b/src/transactions.ts @@ -1,29 +1,145 @@ /** * ```ts - * import type { Transaction } from "arangojs/transaction.js"; + * import type { Transaction } from "arangojs/transactions"; * ``` * - * The "transaction" module provides transaction related types and interfaces + * The "transactions" module provides transaction related types and interfaces * for TypeScript. * * @packageDocumentation */ -import { Connection } from "./connection.js"; -import { Database } from "./database.js"; -import { isArangoError } from "./error.js"; +import * as collections from "./collections.js"; +import * as connection from "./connection.js"; +import * as databases from "./databases.js"; +import * as errors from "./errors.js"; import { TRANSACTION_NOT_FOUND } from "./lib/codes.js"; +//#region Transaction operation options /** - * Indicates whether the given value represents a {@link Transaction}. - * - * @param transaction - A value that might be a transaction. + * Collections involved in a transaction. */ -export function isArangoTransaction( - transaction: any -): transaction is Transaction { - return Boolean(transaction && transaction.isArangoTransaction); +export type TransactionCollectionOptions = { + /** + * An array of collections or a single collection that will be read from or + * written to during the transaction with no other writes being able to run + * in parallel. + */ + exclusive?: + | (string | collections.ArangoCollection)[] + | string + | collections.ArangoCollection; + /** + * An array of collections or a single collection that will be read from or + * written to during the transaction. + */ + write?: + | (string | collections.ArangoCollection)[] + | string + | collections.ArangoCollection; + /** + * An array of collections or a single collection that will be read from + * during the transaction. + */ + read?: + | (string | collections.ArangoCollection)[] + | string + | collections.ArangoCollection; +}; + +/** + * @internal + */ +export function coerceTransactionCollections( + options: + | (TransactionCollectionOptions & { allowImplicit?: boolean }) + | (string | collections.ArangoCollection)[] + | string + | collections.ArangoCollection +): CoercedTransactionCollections { + if (typeof options === "string") { + return { write: [options] }; + } + if (Array.isArray(options)) { + return { write: options.map(collections.collectionToString) }; + } + if (collections.isArangoCollection(options)) { + return { write: collections.collectionToString(options) }; + } + const opts: CoercedTransactionCollections = {}; + if (options) { + if (options.allowImplicit !== undefined) { + opts.allowImplicit = options.allowImplicit; + } + if (options.read) { + opts.read = Array.isArray(options.read) + ? options.read.map(collections.collectionToString) + : collections.collectionToString(options.read); + } + if (options.write) { + opts.write = Array.isArray(options.write) + ? options.write.map(collections.collectionToString) + : collections.collectionToString(options.write); + } + if (options.exclusive) { + opts.exclusive = Array.isArray(options.exclusive) + ? options.exclusive.map(collections.collectionToString) + : collections.collectionToString(options.exclusive); + } + } + return opts; } +/** + * @internal + */ +type CoercedTransactionCollections = { + allowImplicit?: boolean; + exclusive?: string | string[]; + write?: string | string[]; + read?: string | string[]; +}; + +/** + * Options for how the transaction should be performed. + */ +export type TransactionOptions = { + /** + * Whether the transaction may read from collections not specified for this + * transaction. If set to `false`, accessing any collections not specified + * will result in the transaction being aborted to avoid potential deadlocks. + * + * Default: `true`. + */ + allowImplicit?: boolean; + /** + * If set to `true`, the request will explicitly permit ArangoDB to return a + * potentially dirty or stale result and arangojs will load balance the + * request without distinguishing between leaders and followers. + */ + allowDirtyRead?: boolean; + /** + * Determines whether to force the transaction to write all data to disk + * before returning. + */ + waitForSync?: boolean; + /** + * Determines how long the database will wait while attempting to gain locks + * on collections used by the transaction before timing out. + */ + lockTimeout?: number; + /** + * Determines the transaction size limit in bytes. + */ + maxTransactionSize?: number; + /** + * If set to `true`, the fast lock round will be skipped, which makes each + * locking operation take longer but guarantees deterministic locking order + * and may avoid deadlocks when many concurrent transactions are queued and + * try to access the same collection with an exclusive lock. + */ + skipFastLockRound?: boolean; +}; + /** * Options for how the transaction should be committed. */ @@ -47,13 +163,31 @@ export type TransactionAbortOptions = { */ allowDirtyRead?: boolean; }; +//#endregion + +//#region Transaction operation results +/** + * Description of a transaction in a list of transactions. + * + * See also {@link TransactionInfo}. + */ +export type TransactionDescription = { + /** + * Unique identifier of the transaction. + */ + id: string; + /** + * Status (or "state") of the transaction. + */ + state: "running" | "committed" | "aborted"; +}; /** * Status of a given transaction. * - * See also {@link database.TransactionDetails}. + * See also {@link TransactionDescription}. */ -export type TransactionStatus = { +export type TransactionInfo = { /** * Unique identifier of the transaction. */ @@ -63,18 +197,30 @@ export type TransactionStatus = { */ status: "running" | "committed" | "aborted"; }; +//#endregion +//#region Transaction class +/** + * Indicates whether the given value represents a {@link Transaction}. + * + * @param transaction - A value that might be a transaction. + */ +export function isArangoTransaction( + transaction: any +): transaction is Transaction { + return Boolean(transaction && transaction.isArangoTransaction); +} /** - * Represents a streaming transaction in a {@link database.Database}. + * Represents a streaming transaction in a {@link databases.Database}. */ export class Transaction { - protected _db: Database; + protected _db: databases.Database; protected _id: string; /** * @internal */ - constructor(db: Database, id: string) { + constructor(db: databases.Database, id: string) { this._db = db; this._id = id; } @@ -98,7 +244,7 @@ export class Transaction { /** * Unique identifier of this transaction. * - * See {@link database.Database#transaction}. + * See {@link databases.Database#transaction}. */ get id() { return this._id; @@ -120,7 +266,7 @@ export class Transaction { await this.get(); return true; } catch (err: any) { - if (isArangoError(err) && err.errorNum === TRANSACTION_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === TRANSACTION_NOT_FOUND) { return false; } throw err; @@ -140,10 +286,10 @@ export class Transaction { * // the transaction exists * ``` */ - get(): Promise { + get(): Promise { return this._db.request( { - path: `/_api/transaction/${encodeURIComponent(this.id)}`, + pathname: `/_api/transaction/${encodeURIComponent(this.id)}`, }, (res) => res.parsedBody.result ); @@ -164,12 +310,12 @@ export class Transaction { * // result indicates the updated transaction status * ``` */ - commit(options: TransactionCommitOptions = {}): Promise { + commit(options: TransactionCommitOptions = {}): Promise { const { allowDirtyRead = undefined } = options; return this._db.request( { method: "PUT", - path: `/_api/transaction/${encodeURIComponent(this.id)}`, + pathname: `/_api/transaction/${encodeURIComponent(this.id)}`, allowDirtyRead, }, (res) => res.parsedBody.result @@ -191,12 +337,12 @@ export class Transaction { * // result indicates the updated transaction status * ``` */ - abort(options: TransactionAbortOptions = {}): Promise { + abort(options: TransactionAbortOptions = {}): Promise { const { allowDirtyRead = undefined } = options; return this._db.request( { method: "DELETE", - path: `/_api/transaction/${encodeURIComponent(this.id)}`, + pathname: `/_api/transaction/${encodeURIComponent(this.id)}`, allowDirtyRead, }, (res) => res.parsedBody.result @@ -391,7 +537,7 @@ export class Transaction { * ``` */ step(callback: () => Promise): Promise { - const conn = (this._db as any)._connection as Connection; + const conn = (this._db as any)._connection as connection.Connection; conn.setTransactionId(this.id); try { const promise = callback(); @@ -406,3 +552,4 @@ export class Transaction { } } } +//#endregion diff --git a/src/users.ts b/src/users.ts new file mode 100644 index 000000000..1baebea1e --- /dev/null +++ b/src/users.ts @@ -0,0 +1,106 @@ +/** + * ```ts + * import type { ArangoUser } from "arangojs/users"; + * ``` + * + * The "users" module provides types for ArangoDB users. + * + * @packageDocumentation + */ +import * as collections from "./collections.js"; +import * as databases from "./databases.js"; + +//#region Shared types +/** + * Access level for an ArangoDB user's access to a collection or database. + */ +export type AccessLevel = "rw" | "ro" | "none"; +//#endregion + +//#region User operation options +/** + * Options for modifying an ArangoDB user. + */ +export type UserOptions = { + /** + * Password the ArangoDB user will use for authentication. + */ + passwd: string; + /** + * Whether the ArangoDB user account is enabled and can authenticate. + * + * Default: `true` + */ + active?: boolean; + /** + * Additional information to store about this user. + * + * Default: `{}` + */ + extra?: Record; +}; + +/** + * Options for accessing or manipulating access levels. + */ +export type UserAccessLevelOptions = { + /** + * The database to access or manipulate the access level of. + * + * If `collection` is an `ArangoCollection`, this option defaults to the + * database the collection is contained in. Otherwise this option defaults to + * the current database. + */ + database?: databases.Database | string; + /** + * The collection to access or manipulate the access level of. + */ + collection?: collections.ArangoCollection | string; +}; + +/** + * Database user to create with a database. + */ +export type CreateDatabaseUserOptions = { + /** + * Username of the user to create. + */ + username: string; + /** + * Password of the user to create. + * + * Default: `""` + */ + passwd?: string; + /** + * Whether the user is active. + * + * Default: `true` + */ + active?: boolean; + /** + * Additional data to store with the user object. + */ + extra?: Record; +}; +//#endregion + +//#region User operation results +/** + * Properties of an ArangoDB user object. + */ +export type ArangoUser = { + /** + * ArangoDB username of the user. + */ + user: string; + /** + * Whether the ArangoDB user account is enabled and can authenticate. + */ + active: boolean; + /** + * Additional information to store about this user. + */ + extra: Record; +}; +//#endregion diff --git a/src/view.ts b/src/views.ts similarity index 75% rename from src/view.ts rename to src/views.ts index 05c6bfaaf..9b7f5fa67 100644 --- a/src/view.ts +++ b/src/views.ts @@ -1,30 +1,28 @@ /** * ```ts - * import type { ArangoSearchView } from "arangojs/view.js"; + * import type { ArangoSearchView } from "arangojs/views"; * ``` * - * The "view" module provides View related types and interfaces for TypeScript. + * The "views" module provides View related types and interfaces for + * TypeScript. * * @packageDocumentation */ -import { ArangoApiResponse } from "./connection.js"; -import { Database } from "./database.js"; -import { isArangoError } from "./error.js"; +import * as connections from "./connection.js"; +import * as databases from "./databases.js"; +import * as errors from "./errors.js"; import { VIEW_NOT_FOUND } from "./lib/codes.js"; +//#region Shared types /** - * Indicates whether the given value represents a {@link View}. - * - * @param view - A value that might be a View. + * Sorting direction. Descending or ascending. */ -export function isArangoView(view: any): view is View { - return Boolean(view && view.isArangoView); -} +export type Direction = "desc" | "asc"; /** - * Sorting direction. Descending or ascending. + * Compression for storing data. */ -export type Direction = "desc" | "asc"; +export type Compression = "lz4" | "none"; /** * Policy to consolidate based on segment byte size and live document count as @@ -89,10 +87,12 @@ export type TierConsolidationPolicy = { }; /** - * Compression for storing data. + * Type of a View. */ -export type Compression = "lz4" | "none"; +export type ViewType = ViewDescription["type"]; +//#endregion +//#region CreateViewOptions /** * Options for creating a View. */ @@ -100,152 +100,19 @@ export type CreateViewOptions = | CreateArangoSearchViewOptions | CreateSearchAliasViewOptions; -/** - * Options for replacing a View's properties. - */ -export type ViewPropertiesOptions = - | ArangoSearchViewPropertiesOptions - | SearchAliasViewPropertiesOptions; - -/** - * Options for partially modifying a View's properties. - */ -export type ViewPatchPropertiesOptions = - | ArangoSearchViewPropertiesOptions - | SearchAliasViewPatchPropertiesOptions; - -/** - * A link definition for an ArangoSearch View. - */ -export type ArangoSearchViewLinkOptions = { - /** - * A list of names of Analyzers to apply to values of processed document - * attributes. - * - * Default: `["identity"]` - */ - analyzers?: string[]; - /** - * An object mapping names of attributes to process for each document to - * {@link ArangoSearchViewLinkOptions} definitions. - */ - fields?: Record; - /** - * If set to `true`, all document attributes will be processed, otherwise - * only the attributes in `fields` will be processed. - * - * Default: `false` - */ - includeAllFields?: boolean; - /** - * (Enterprise Edition only.) An object mapping attribute names to - * {@link ArangoSearchViewLinkOptions} definitions to index sub-objects - * stored in an array. - */ - nested?: Record; - /** - * If set to `true`, the position of values in array values will be tracked, - * otherwise all values in an array will be treated as equal alternatives. - */ - trackListPositions?: boolean; - /** - * Controls how the view should keep track of the attribute values. - * - * Default: `"none"` - */ - storeValues?: "none" | "id"; - /** - * If set to `true`, then no exclusive lock is used on the source collection - * during View index creation, so that it remains basically available. - * - * Default: `false` - */ - inBackground?: boolean; - /** - * (Enterprise Edition only.) If set to `true`, then field normalization - * values will always be cached in memory. - * - * Default: `false` - */ - cache?: boolean; -}; - -/** - * Options for modifying the properties of an ArangoSearch View. - */ -export type ArangoSearchViewPropertiesOptions = { +type CreateViewOptionsType = { /** - * How many commits to wait between removing unused files. - * - * Default: `2` - */ - cleanupIntervalStep?: number; - /** - * How long to wait between applying the `consolidationPolicy`. - * - * Default: `10000` - */ - consolidationIntervalMsec?: number; - /** - * How long to wait between commiting View data store changes and making - * documents visible to queries. - * - * Default: `1000` - */ - commitIntervalMsec?: number; - /** - * Consolidation policy to apply for selecting which segments should be - * merged. - * - * Default: `{ type: "tier" }` - */ - consolidationPolicy?: TierConsolidationPolicy; - /** - * An object mapping names of linked collections to - * {@link ArangoSearchViewLinkOptions} definitions. - */ - links?: Record>; -}; - -/** - * Options for partially modifying the properties of an ArangoSearch View. - */ -export type ArangoSearchViewPatchPropertiesOptions = - ArangoSearchViewPropertiesOptions; - -/** - * Options for creating a stored value in an ArangoSearch View. - */ -export interface ArangoSearchViewStoredValueOptions { - /** - * Attribute paths for which values should be stored in the view index - * in addition to those used for sorting via `primarySort`. - */ - fields: string[]; - /** - * How the attribute values should be compressed. - * - * Default: `"lz4"` - */ - compression?: Compression; - /** - * (Enterprise Edition only.) If set to `true`, then stored values will - * always be cached in memory. - * - * Default: `false` + * Type of the View. */ - cache?: boolean; -} + type: Type; +} & extra; /** * Options for creating an ArangoSearch View. */ -export type CreateArangoSearchViewOptions = +export type CreateArangoSearchViewOptions = CreateViewOptionsType< + "arangosearch", ArangoSearchViewPropertiesOptions & { - /** - * Type of the View. - */ - type: "arangosearch"; /** * Maximum number of writers cached in the pool. * @@ -274,32 +141,7 @@ export type CreateArangoSearchViewOptions = * If `direction` is set to `"desc"` or `asc` is set to `false`, * the primary sorting order will be descending. */ - primarySort?: ( - | { - /** - * Attribute path for the value of each document to use for - * sorting. - */ - field: string; - /** - * If set to `"asc"`, the primary sorting order will be ascending. - * If set to `"desc"`, the primary sorting order will be descending. - */ - direction: Direction; - } - | { - /** - * Attribute path for the value of each document to use for - * sorting. - */ - field: string; - /** - * If set to `true`, the primary sorting order will be ascending. - * If set to `false`, the primary sorting order will be descending. - */ - asc: boolean; - } - )[]; + primarySort?: CreateArangoSearchViewPrimarySortOptions[]; /** * Compression to use for the primary sort data. * @@ -324,41 +166,107 @@ export type CreateArangoSearchViewOptions = * Attribute paths for which values should be stored in the view index * in addition to those used for sorting via `primarySort`. */ - storedValues?: ArangoSearchViewStoredValueOptions[] | string[] | string[][]; + storedValues?: + | CreateArangoSearchViewStoredValueOptions[] + | string[] + | string[][]; /** * An array of strings defining sort expressions to optimize. */ optimizeTopK?: string[]; - }; + } +>; /** - * Options defining an index used in a SearchAlias View. + * Options for creating a primary sort in an ArangoSearch View. */ -export type SearchAliasViewIndexOptions = { +export type CreateArangoSearchViewPrimarySortOptions = + | { + /** + * Attribute path for the value of each document to use for + * sorting. + */ + field: string; + /** + * If set to `"asc"`, the primary sorting order will be ascending. + * If set to `"desc"`, the primary sorting order will be descending. + */ + direction: Direction; + } + | { + /** + * Attribute path for the value of each document to use for + * sorting. + */ + field: string; + /** + * If set to `true`, the primary sorting order will be ascending. + * If set to `false`, the primary sorting order will be descending. + */ + asc: boolean; + }; + +/** + * Options for creating a stored value in an ArangoSearch View. + */ +export interface CreateArangoSearchViewStoredValueOptions { /** - * Name of a collection. + * Attribute paths for which values should be stored in the view index + * in addition to those used for sorting via `primarySort`. */ - collection: string; + fields: string[]; /** - * Name of an inverted index in the collection. + * How the attribute values should be compressed. + * + * Default: `"lz4"` */ - index: string; -}; + compression?: Compression; + /** + * (Enterprise Edition only.) If set to `true`, then stored values will + * always be cached in memory. + * + * Default: `false` + */ + cache?: boolean; +} /** - * Options for modifying the properties of a SearchAlias View. + * Options for creating a SearchAlias View. */ -export type SearchAliasViewPropertiesOptions = { +export type CreateSearchAliasViewOptions = CreateViewOptionsType< + "search-alias", + SearchAliasViewPropertiesOptions +>; +//#endregion + +//#region UpdateViewPropertiesOptions +/** + * Options for partially modifying a View's properties. + */ +export type UpdateViewPropertiesOptions = + | UpdateArangoSearchViewPropertiesOptions + | UpdateSearchAliasViewPropertiesOptions; + +/** + * Options for partially modifying the properties of an ArangoSearch View. + */ +export type UpdateArangoSearchViewPropertiesOptions = + ArangoSearchViewPropertiesOptions; + +/** + * Options for partially modifying the properties of a SearchAlias View. + */ +export type UpdateSearchAliasViewPropertiesOptions = { /** * An array of inverted indexes to add to the View. */ - indexes: SearchAliasViewIndexOptions[]; + indexes: UpdateSearchAliasViewIndexOptions[]; }; /** * Options defining an index to be modified in a SearchAlias View. */ -export type SearchAliasViewPatchIndexOptions = SearchAliasViewIndexOptions & { +export type UpdateSearchAliasViewIndexOptions = SearchAliasViewIndexOptions & { /** * Whether to add or remove the index. * @@ -367,30 +275,147 @@ export type SearchAliasViewPatchIndexOptions = SearchAliasViewIndexOptions & { operation?: "add" | "del"; }; +//#endregion + +//#region ViewPropertiesOptions /** - * Options for partially modifying the properties of a SearchAlias View. + * Options for replacing a View's properties. + */ +export type ViewPropertiesOptions = + | ArangoSearchViewPropertiesOptions + | SearchAliasViewPropertiesOptions; + +/** + * Options for modifying the properties of an ArangoSearch View. + */ +export type ArangoSearchViewPropertiesOptions = { + /** + * How many commits to wait between removing unused files. + * + * Default: `2` + */ + cleanupIntervalStep?: number; + /** + * How long to wait between applying the `consolidationPolicy`. + * + * Default: `10000` + */ + consolidationIntervalMsec?: number; + /** + * How long to wait between commiting View data store changes and making + * documents visible to queries. + * + * Default: `1000` + */ + commitIntervalMsec?: number; + /** + * Consolidation policy to apply for selecting which segments should be + * merged. + * + * Default: `{ type: "tier" }` + */ + consolidationPolicy?: TierConsolidationPolicy; + /** + * An object mapping names of linked collections to + * {@link ArangoSearchViewLinkOptions} definitions. + */ + links?: Record>; +}; + +/** + * A link definition for an ArangoSearch View. */ -export type SearchAliasViewPatchPropertiesOptions = { +export type ArangoSearchViewLinkOptions = { + /** + * A list of names of Analyzers to apply to values of processed document + * attributes. + * + * Default: `["identity"]` + */ + analyzers?: string[]; + /** + * An object mapping names of attributes to process for each document to + * {@link ArangoSearchViewLinkOptions} definitions. + */ + fields?: Record; + /** + * If set to `true`, all document attributes will be processed, otherwise + * only the attributes in `fields` will be processed. + * + * Default: `false` + */ + includeAllFields?: boolean; + /** + * (Enterprise Edition only.) An object mapping attribute names to + * {@link ArangoSearchViewLinkOptions} definitions to index sub-objects + * stored in an array. + */ + nested?: Record; + /** + * If set to `true`, the position of values in array values will be tracked, + * otherwise all values in an array will be treated as equal alternatives. + */ + trackListPositions?: boolean; + /** + * Controls how the view should keep track of the attribute values. + * + * Default: `"none"` + */ + storeValues?: "none" | "id"; + /** + * If set to `true`, then no exclusive lock is used on the source collection + * during View index creation, so that it remains basically available. + * + * Default: `false` + */ + inBackground?: boolean; + /** + * (Enterprise Edition only.) If set to `true`, then field normalization + * values will always be cached in memory. + * + * Default: `false` + */ + cache?: boolean; +}; + +/** + * Options for modifying the properties of a SearchAlias View. + */ +export type SearchAliasViewPropertiesOptions = { /** * An array of inverted indexes to add to the View. */ - indexes: SearchAliasViewPatchIndexOptions[]; + indexes: SearchAliasViewIndexOptions[]; }; /** - * Options for creating a SearchAlias View. + * Options defining an index used in a SearchAlias View. */ -export type CreateSearchAliasViewOptions = SearchAliasViewPropertiesOptions & { +export type SearchAliasViewIndexOptions = { /** - * Type of the View. + * Name of a collection. + */ + collection: string; + /** + * Name of an inverted index in the collection. */ - type: "search-alias"; + index: string; }; +//#endregion + +//#region ViewDescription +export type ViewDescription = + | ArangoSearchViewDescription + | SearchAliasViewDescription; /** * Generic description of a View. */ -export type GenericViewDescription = { +export type ViewDescriptionType = { + /** + * Type of the View. + */ + type: Type; /** * A globally unique identifier for this View. */ @@ -405,35 +430,16 @@ export type GenericViewDescription = { name: string; }; -export type ViewDescription = - | ArangoSearchViewDescription - | SearchAliasViewDescription; +export type ArangoSearchViewDescription = ViewDescriptionType<"arangosearch">; -export type ArangoSearchViewDescription = GenericViewDescription & { - type: "arangosearch"; -}; - -export type SearchAliasViewDescription = GenericViewDescription & { - type: "search-alias"; -}; +export type SearchAliasViewDescription = ViewDescriptionType<"search-alias">; +//#endregion +//#region ViewProperties export type ViewProperties = | ArangoSearchViewProperties | SearchAliasViewProperties; -/** - * A link definition for an ArangoSearch View. - */ -export type ArangoSearchViewLink = { - analyzers: string[]; - fields: Record; - includeAllFields: boolean; - nested?: Record; - trackListPositions: boolean; - storeValues: "none" | "id"; - cache: boolean; -}; - /** * Properties of an ArangoSearch View. */ @@ -461,24 +467,54 @@ export type ArangoSearchViewProperties = ArangoSearchViewDescription & { optimizeTopK: string[]; }; +/** + * A link definition for an ArangoSearch View. + */ +export type ArangoSearchViewLink = { + analyzers: string[]; + fields: Record; + includeAllFields: boolean; + nested?: Record; + trackListPositions: boolean; + storeValues: "none" | "id"; + cache: boolean; +}; + /** * Properties of a SearchAlias View. */ export type SearchAliasViewProperties = SearchAliasViewDescription & { indexes: { collection: string; index: string }[]; }; +//#endregion + +//#region View class +/** + * Indicates whether the given value represents a {@link View}. + * + * @param view - A value that might be a View. + */ +export function isArangoView(view: any): view is View { + return Boolean(view && view.isArangoView); +} + +// Note: Resist the urge to attempt to create separate ArangoSearchView +// and SearchAliasView classes or interfaces. The requirements for producing +// a meaningful typedoc documentation, providing a nice API in the `Database` +// class and having these as separate interfaces seem to be mutually +// incompatible. /** - * Represents a View in a {@link database.Database}. + * Represents a View in a {@link databases.Database}. */ export class View { protected _name: string; - protected _db: Database; + protected _db: databases.Database; /** * @internal */ - constructor(db: Database, name: string) { + constructor(db: databases.Database, name: string) { this._db = db; this._name = name; } @@ -517,9 +553,9 @@ export class View { * // data contains general information about the View * ``` */ - get(): Promise> { + get(): Promise> { return this._db.request({ - path: `/_api/view/${encodeURIComponent(this._name)}`, + pathname: `/_api/view/${encodeURIComponent(this._name)}`, }); } @@ -539,7 +575,7 @@ export class View { await this.get(); return true; } catch (err: any) { - if (isArangoError(err) && err.errorNum === VIEW_NOT_FOUND) { + if (errors.isArangoError(err) && err.errorNum === VIEW_NOT_FOUND) { return false; } throw err; @@ -549,7 +585,7 @@ export class View { /** * Creates a View with the given `options` and the instance's name. * - * See also {@link database.Database#createView}. + * See also {@link databases.Database#createView}. * * @example * ```js @@ -560,17 +596,17 @@ export class View { * ``` */ create( - options: CreateViewOptions + options: CreateViewOptions, ): Promise< typeof options extends CreateArangoSearchViewOptions - ? ArangoSearchViewDescription - : Options extends CreateSearchAliasViewOptions - ? SearchAliasViewDescription - : ViewDescription + ? ArangoSearchViewDescription + : Options extends CreateSearchAliasViewOptions + ? SearchAliasViewDescription + : ViewDescription > { return this._db.request({ method: "POST", - path: "/_api/view", + pathname: "/_api/view", body: { ...options, name: this._name, @@ -581,8 +617,8 @@ export class View { /** * Renames the View and updates the instance's `name` to `newName`. * - * Additionally removes the instance from the {@link database.Database}'s internal - * cache. + * Additionally removes the instance from the {@link databases.Database}'s + * internal cache. * * **Note**: Renaming Views may not be supported when ArangoDB is * running in a cluster configuration. @@ -600,7 +636,9 @@ export class View { * // view1 and view3 represent the same ArangoDB view! * ``` */ - async rename(newName: string): Promise> { + async rename( + newName: string, + ): Promise> { const result = this._db.renameView(this._name, newName); this._name = newName; return result; @@ -617,9 +655,9 @@ export class View { * // data contains the View's properties * ``` */ - properties(): Promise> { + properties(): Promise> { return this._db.request({ - path: `/_api/view/${encodeURIComponent(this._name)}/properties`, + pathname: `/_api/view/${encodeURIComponent(this._name)}/properties`, }); } @@ -638,18 +676,18 @@ export class View { * console.log(result.consolidationIntervalMsec); // 234 * ``` */ - updateProperties( - properties?: Properties + updateProperties( + properties?: Properties, ): Promise< - Properties extends ArangoSearchViewPatchPropertiesOptions - ? ArangoSearchViewProperties - : Properties extends SearchAliasViewPatchPropertiesOptions - ? SearchAliasViewProperties - : ViewProperties + Properties extends UpdateArangoSearchViewPropertiesOptions + ? ArangoSearchViewProperties + : Properties extends UpdateSearchAliasViewPropertiesOptions + ? SearchAliasViewProperties + : ViewProperties > { return this._db.request({ method: "PATCH", - path: `/_api/view/${encodeURIComponent(this._name)}/properties`, + pathname: `/_api/view/${encodeURIComponent(this._name)}/properties`, body: properties ?? {}, }); } @@ -670,17 +708,17 @@ export class View { * ``` */ replaceProperties( - properties?: Properties + properties?: Properties, ): Promise< Properties extends ArangoSearchViewPropertiesOptions - ? ArangoSearchViewProperties - : Properties extends SearchAliasViewPropertiesOptions - ? SearchAliasViewProperties - : ViewProperties + ? ArangoSearchViewProperties + : Properties extends SearchAliasViewPropertiesOptions + ? SearchAliasViewProperties + : ViewProperties > { return this._db.request({ method: "PUT", - path: `/_api/view/${encodeURIComponent(this._name)}/properties`, + pathname: `/_api/view/${encodeURIComponent(this._name)}/properties`, body: properties ?? {}, }); } @@ -701,9 +739,10 @@ export class View { return this._db.request( { method: "DELETE", - path: `/_api/view/${encodeURIComponent(this._name)}`, + pathname: `/_api/view/${encodeURIComponent(this._name)}`, }, - (res) => res.parsedBody.result + (res) => res.parsedBody.result, ); } } +//#endregion diff --git a/src/web.js b/src/web.js deleted file mode 100644 index e7c6d2aaf..000000000 --- a/src/web.js +++ /dev/null @@ -1,25 +0,0 @@ -/* eslint-disable @typescript-eslint/no-var-requires */ -"use strict"; -const { aql } = require("./aql"); -const { CollectionStatus, CollectionType } = require("./collection"); -const { ViewType } = require("./view"); -const { Database } = require("./database"); - -module.exports = exports = arangojs; - -function arangojs(config) { - if (typeof config === "string" || Array.isArray(config)) { - const url = config; - return new Database(url); - } - return new Database(config); -} - -Object.assign(arangojs, { - aql, - arangojs, - CollectionStatus, - CollectionType, - Database, - ViewType, -});