This commit is contained in:
2025-01-04 00:34:03 +01:00
parent 41829408dc
commit 0ca14bbc19
18111 changed files with 1871397 additions and 0 deletions

View File

@@ -0,0 +1,78 @@
/**
* @typedef {Object} DatabaseGetOperation
* @property {Record<string, any>} query A query object which identifies the set of Documents retrieved
* @property {false} [broadcast] Get requests are never broadcast
* @property {boolean} [index] Return indices only instead of full Document records
* @property {string[]} [indexFields] An array of field identifiers which should be indexed
* @property {string|null} [pack=null] A compendium collection ID which contains the Documents
* @property {foundry.abstract.Document|null} [parent=null] A parent Document within which Documents are embedded
* @property {string} [parentUuid] A parent Document UUID provided when the parent instance is unavailable
*/
/**
* @typedef {Object} DatabaseCreateOperation
* @property {boolean} broadcast Whether the database operation is broadcast to other connected clients
* @property {object[]} data An array of data objects from which to create Documents
* @property {boolean} [keepId=false] Retain the _id values of provided data instead of generating new ids
* @property {boolean} [keepEmbeddedIds=true] Retain the _id values of embedded document data instead of generating
* new ids for each embedded document
* @property {number} [modifiedTime] The timestamp when the operation was performed
* @property {boolean} [noHook=false] Block the dispatch of hooks related to this operation
* @property {boolean} [render=true] Re-render Applications whose display depends on the created Documents
* @property {boolean} [renderSheet=false] Render the sheet Application for any created Documents
* @property {foundry.abstract.Document|null} [parent=null] A parent Document within which Documents are embedded
* @property {string|null} pack A compendium collection ID which contains the Documents
* @property {string|null} [parentUuid] A parent Document UUID provided when the parent instance is unavailable
* @property {(string|object)[]} [_result] An alias for 'data' used internally by the server-side backend
*/
/**
* @typedef {Object} DatabaseUpdateOperation
* @property {boolean} broadcast Whether the database operation is broadcast to other connected clients
* @property {object[]} updates An array of data objects used to update existing Documents.
* Each update object must contain the _id of the target Document
* @property {boolean} [diff=true] Difference each update object against current Document data and only use
* differential data for the update operation
* @property {number} [modifiedTime] The timestamp when the operation was performed
* @property {boolean} [recursive=true] Merge objects recursively. If false, inner objects will be replaced
* explicitly. Use with caution!
* @property {boolean} [render=true] Re-render Applications whose display depends on the created Documents
* @property {boolean} [noHook=false] Block the dispatch of hooks related to this operation
* @property {foundry.abstract.Document|null} [parent=null] A parent Document within which Documents are embedded
* @property {string|null} pack A compendium collection ID which contains the Documents
* @property {string|null} [parentUuid] A parent Document UUID provided when the parent instance is unavailable
* @property {(string|object)[]} [_result] An alias for 'updates' used internally by the server-side backend
*
*/
/**
* @typedef {Object} DatabaseDeleteOperation
* @property {boolean} broadcast Whether the database operation is broadcast to other connected clients
* @property {string[]} ids An array of Document ids which should be deleted
* @property {boolean} [deleteAll=false] Delete all documents in the Collection, regardless of _id
* @property {number} [modifiedTime] The timestamp when the operation was performed
* @property {boolean} [noHook=false] Block the dispatch of hooks related to this operation
* @property {boolean} [render=true] Re-render Applications whose display depends on the deleted Documents
* @property {foundry.abstract.Document|null} [parent=null] A parent Document within which Documents are embedded
* @property {string|null} pack A compendium collection ID which contains the Documents
* @property {string|null} [parentUuid] A parent Document UUID provided when the parent instance is unavailable
* @property {(string|object)[]} [_result] An alias for 'ids' used internally by the server-side backend
*/
/**
* @typedef {"get"|"create"|"update"|"delete"} DatabaseAction
*/
/**
* @typedef {DatabaseGetOperation|DatabaseCreateOperation|DatabaseUpdateOperation|DatabaseDeleteOperation} DatabaseOperation
*/
/**
* @typedef {Object} DocumentSocketRequest
* @property {string} type The type of Document being transacted
* @property {DatabaseAction} action The action of the request
* @property {DatabaseOperation} operation Operation parameters for the request
* @property {string} userId The id of the requesting User
* @property {boolean} broadcast Should the response be broadcast to other connected clients?
*/

View File

@@ -0,0 +1,318 @@
import Document from "./document.mjs";
/**
* @typedef {import("./_types.mjs").DatabaseGetOperation} DatabaseGetOperation
* @typedef {import("./_types.mjs").DatabaseCreateOperation} DatabaseCreateOperation
* @typedef {import("./_types.mjs").DatabaseUpdateOperation} DatabaseUpdateOperation
* @typedef {import("./_types.mjs").DatabaseDeleteOperation} DatabaseDeleteOperation
*/
/**
* An abstract base class extended on both the client and server which defines how Documents are retrieved, created,
* updated, and deleted.
* @alias foundry.abstract.DatabaseBackend
* @abstract
*/
export default class DatabaseBackend {
/* -------------------------------------------- */
/* Get Operations */
/* -------------------------------------------- */
/**
* Retrieve Documents based on provided query parameters.
* It recommended to use CompendiumCollection#getDocuments or CompendiumCollection#getIndex rather
* than calling this method directly.
* @param {typeof Document} documentClass The Document class definition
* @param {DatabaseGetOperation} operation Parameters of the get operation
* @param {BaseUser} [user] The requesting User
* @returns {Promise<Document[]|object[]>} An array of retrieved Document instances or index objects
*/
async get(documentClass, operation, user) {
operation = await this.#configureGet(operation);
return this._getDocuments(documentClass, operation, user);
}
/* -------------------------------------------- */
/**
* Validate and configure the parameters of the get operation.
* @param {DatabaseGetOperation} operation The requested operation
*/
async #configureGet(operation) {
await this.#configureOperation(operation);
operation.broadcast = false; // Get requests are never broadcast
return operation;
}
/* -------------------------------------------- */
/**
* Retrieve Document instances using the specified operation parameters.
* @param {typeof Document} documentClass The Document class definition
* @param {DatabaseGetOperation} operation Parameters of the get operation
* @param {BaseUser} [user] The requesting User
* @returns {Promise<Document[]|object[]>} An array of retrieved Document instances or index objects
* @abstract
* @internal
* @ignore
*/
async _getDocuments(documentClass, operation, user) {}
/* -------------------------------------------- */
/* Create Operations */
/* -------------------------------------------- */
/**
* Create new Documents using provided data and context.
* It is recommended to use {@link Document.createDocuments} or {@link Document.create} rather than calling this
* method directly.
* @param {typeof Document} documentClass The Document class definition
* @param {DatabaseCreateOperation} operation Parameters of the create operation
* @param {BaseUser} [user] The requesting User
* @returns {Promise<Document[]>} An array of created Document instances
*/
async create(documentClass, operation, user) {
operation = await this.#configureCreate(operation);
return this._createDocuments(documentClass, operation, user);
}
/* -------------------------------------------- */
/**
* Validate and configure the parameters of the create operation.
* @param {DatabaseCreateOperation} operation The requested operation
*/
async #configureCreate(operation) {
if ( !Array.isArray(operation.data) ) {
throw new Error("The data provided to the DatabaseBackend#create operation must be an array of data objects");
}
await this.#configureOperation(operation);
operation.render ??= true;
operation.renderSheet ??= false;
return operation;
}
/* -------------------------------------------- */
/**
* Create Document instances using provided data and operation parameters.
* @param {typeof Document} documentClass The Document class definition
* @param {DatabaseCreateOperation} operation Parameters of the create operation
* @param {BaseUser} [user] The requesting User
* @returns {Promise<Document[]>} An array of created Document instances
* @abstract
* @internal
* @ignore
*/
async _createDocuments(documentClass, operation, user) {}
/* -------------------------------------------- */
/* Update Operations */
/* -------------------------------------------- */
/**
* Update Documents using provided data and context.
* It is recommended to use {@link Document.updateDocuments} or {@link Document#update} rather than calling this
* method directly.
* @param {typeof Document} documentClass The Document class definition
* @param {DatabaseUpdateOperation} operation Parameters of the update operation
* @param {BaseUser} [user] The requesting User
* @returns {Promise<Document[]>} An array of updated Document instances
*/
async update(documentClass, operation, user) {
operation = await this.#configureUpdate(operation);
return this._updateDocuments(documentClass, operation, user);
}
/* -------------------------------------------- */
/**
* Validate and configure the parameters of the update operation.
* @param {DatabaseUpdateOperation} operation The requested operation
*/
async #configureUpdate(operation) {
if ( !Array.isArray(operation.updates) ) {
throw new Error("The updates provided to the DatabaseBackend#update operation must be an array of data objects");
}
await this.#configureOperation(operation);
operation.diff ??= true;
operation.recursive ??= true;
operation.render ??= true;
return operation;
}
/* -------------------------------------------- */
/**
* Update Document instances using provided data and operation parameters.
* @param {typeof Document} documentClass The Document class definition
* @param {DatabaseUpdateOperation} operation Parameters of the update operation
* @param {BaseUser} [user] The requesting User
* @returns {Promise<Document[]>} An array of updated Document instances
* @abstract
* @internal
* @ignore
*/
async _updateDocuments(documentClass, operation, user) {}
/* -------------------------------------------- */
/* Delete Operations */
/* -------------------------------------------- */
/**
* Delete Documents using provided ids and context.
* It is recommended to use {@link foundry.abstract.Document.deleteDocuments} or
* {@link foundry.abstract.Document#delete} rather than calling this method directly.
* @param {typeof Document} documentClass The Document class definition
* @param {DatabaseDeleteOperation} operation Parameters of the delete operation
* @param {BaseUser} [user] The requesting User
* @returns {Promise<Document[]>} An array of deleted Document instances
*/
async delete(documentClass, operation, user) {
operation = await this.#configureDelete(operation);
return this._deleteDocuments(documentClass, operation, user);
}
/* -------------------------------------------- */
/**
* Validate and configure the parameters of the delete operation.
* @param {DatabaseDeleteOperation} operation The requested operation
*/
async #configureDelete(operation) {
if ( !Array.isArray(operation.ids) ) {
throw new Error("The document ids provided to the DatabaseBackend#delete operation must be an array of strings");
}
await this.#configureOperation(operation);
operation.deleteAll ??= false;
operation.render ??= true;
return operation;
}
/* -------------------------------------------- */
/**
* Delete Document instances using provided ids and operation parameters.
* @param {typeof Document} documentClass The Document class definition
* @param {DatabaseDeleteOperation} operation Parameters of the delete operation
* @param {BaseUser} [user] The requesting User
* @returns {Promise<Document[]>} An array of deleted Document instances
* @abstract
* @internal
* @ignore
*/
async _deleteDocuments(documentClass, operation, user) {}
/* -------------------------------------------- */
/* Helper Methods */
/* -------------------------------------------- */
/**
* Common database operation configuration steps.
* @param {DatabaseOperation} operation The requested operation
* @returns {Promise<void>}
*/
async #configureOperation(operation) {
if ( operation.pack && !this.getCompendiumScopes().includes(operation.pack) ) {
throw new Error(`Compendium pack "${operation.pack}" is not a valid Compendium identifier`);
}
operation.parent = await this._getParent(operation);
operation.modifiedTime = Date.now();
}
/* -------------------------------------------- */
/**
* Get the parent Document (if any) associated with a request context.
* @param {DatabaseOperation} operation The requested database operation
* @return {Promise<Document|null>} The parent Document, or null
* @internal
* @ignore
*/
async _getParent(operation) {
if ( operation.parent && !(operation.parent instanceof Document) ) {
throw new Error("A parent Document provided to the database operation must be a Document instance");
}
else if ( operation.parent ) return operation.parent;
if ( operation.parentUuid ) return globalThis.fromUuid(operation.parentUuid, {invalid: true});
return null;
}
/* -------------------------------------------- */
/**
* Describe the scopes which are suitable as the namespace for a flag key
* @returns {string[]}
*/
getFlagScopes() {}
/* -------------------------------------------- */
/**
* Describe the scopes which are suitable as the namespace for a flag key
* @returns {string[]}
*/
getCompendiumScopes() {}
/* -------------------------------------------- */
/**
* Log a database operations message.
* @param {string} level The logging level
* @param {string} message The message
* @abstract
* @protected
*/
_log(level, message) {}
/* -------------------------------------------- */
/**
* Log a database operation for an embedded document, capturing the action taken and relevant IDs
* @param {string} action The action performed
* @param {string} type The document type
* @param {abstract.Document[]} documents The documents modified
* @param {string} [level=info] The logging level
* @param {abstract.Document} [parent] A parent document
* @param {string} [pack] A compendium pack within which the operation occurred
* @protected
*/
_logOperation(action, type, documents, {parent, pack, level="info"}={}) {
let msg = (documents.length === 1) ? `${action} ${type}` : `${action} ${documents.length} ${type} documents`;
if (documents.length === 1) msg += ` with id [${documents[0].id}]`;
else if (documents.length <= 5) msg += ` with ids: [${documents.map(d => d.id)}]`;
msg += this.#logContext(parent, pack);
this._log(level, msg);
}
/* -------------------------------------------- */
/**
* Construct a standardized error message given the context of an attempted operation
* @returns {string}
* @protected
*/
_logError(user, action, subject, {parent, pack}={}) {
if ( subject instanceof Document ) {
subject = subject.id ? `${subject.documentName} [${subject.id}]` : `a new ${subject.documentName}`;
}
let msg = `User ${user.name} lacks permission to ${action} ${subject}`;
return msg + this.#logContext(parent, pack);
}
/* -------------------------------------------- */
/**
* Determine a string suffix for a log message based on the parent and/or compendium context.
* @param {Document|null} parent
* @param {string|null} pack
* @returns {string}
*/
#logContext(parent, pack) {
let context = "";
if ( parent ) context += ` in parent ${parent.constructor.metadata.name} [${parent.id}]`;
if ( pack ) context += ` in Compendium ${pack}`;
return context;
}
}

View File

@@ -0,0 +1,614 @@
import {deepClone, diffObject, expandObject, flattenObject, getType, isEmpty, mergeObject} from "../utils/helpers.mjs";
import {
DataField,
SchemaField,
EmbeddedDataField,
EmbeddedCollectionField,
ObjectField,
TypeDataField, EmbeddedDocumentField
} from "../data/fields.mjs";
import {DataModelValidationFailure} from "../data/validation-failure.mjs";
/**
* @typedef {Record<string, DataField>} DataSchema
*/
/**
* @typedef {Object} DataValidationOptions
* @property {boolean} [strict=true] Throw an error if validation fails.
* @property {boolean} [fallback=false] Attempt to replace invalid values with valid defaults?
* @property {boolean} [partial=false] Allow partial source data, ignoring absent fields?
* @property {boolean} [dropInvalidEmbedded=false] If true, invalid embedded documents will emit a warning and be
* placed in the invalidDocuments collection rather than causing the
* parent to be considered invalid.
*/
/**
* The abstract base class which defines the data schema contained within a Document.
* @param {object} [data={}] Initial data used to construct the data object. The provided object
* will be owned by the constructed model instance and may be mutated.
* @param {DataValidationOptions} [options={}] Options which affect DataModel construction
* @param {Document} [options.parent] A parent DataModel instance to which this DataModel belongs
* @abstract
*/
export default class DataModel {
constructor(data={}, {parent=null, strict=true, ...options}={}) {
// Parent model
Object.defineProperty(this, "parent", {
value: (() => {
if ( parent === null ) return null;
if ( parent instanceof DataModel ) return parent;
throw new Error("The provided parent must be a DataModel instance");
})(),
writable: false,
enumerable: false
});
// Source data
Object.defineProperty(this, "_source", {
value: this._initializeSource(data, {strict, ...options}),
writable: false,
enumerable: false
});
Object.seal(this._source);
// Additional subclass configurations
this._configure(options);
// Data validation and initialization
const fallback = options.fallback ?? !strict;
const dropInvalidEmbedded = options.dropInvalidEmbedded ?? !strict;
this.validate({strict, fallback, dropInvalidEmbedded, fields: true, joint: true});
this._initialize({strict, ...options});
}
/**
* Configure the data model instance before validation and initialization workflows are performed.
* @protected
*/
_configure(options={}) {}
/* -------------------------------------------- */
/**
* The source data object for this DataModel instance.
* Once constructed, the source object is sealed such that no keys may be added nor removed.
* @type {object}
*/
_source;
/**
* The defined and cached Data Schema for all instances of this DataModel.
* @type {SchemaField}
* @private
*/
static _schema;
/**
* An immutable reverse-reference to a parent DataModel to which this model belongs.
* @type {DataModel|null}
*/
parent;
/* ---------------------------------------- */
/* Data Schema */
/* ---------------------------------------- */
/**
* Define the data schema for documents of this type.
* The schema is populated the first time it is accessed and cached for future reuse.
* @virtual
* @returns {DataSchema}
*/
static defineSchema() {
throw new Error(`The ${this["name"]} subclass of DataModel must define its Document schema`);
}
/* ---------------------------------------- */
/**
* The Data Schema for all instances of this DataModel.
* @type {SchemaField}
*/
static get schema() {
if ( this.hasOwnProperty("_schema") ) return this._schema;
const schema = new SchemaField(Object.freeze(this.defineSchema()));
Object.defineProperty(this, "_schema", {value: schema, writable: false});
return schema;
}
/* ---------------------------------------- */
/**
* Define the data schema for this document instance.
* @type {SchemaField}
*/
get schema() {
return this.constructor.schema;
}
/* ---------------------------------------- */
/**
* Is the current state of this DataModel invalid?
* The model is invalid if there is any unresolved failure.
* @type {boolean}
*/
get invalid() {
return Object.values(this.#validationFailures).some(f => f?.unresolved);
}
/**
* An array of validation failure instances which may have occurred when this instance was last validated.
* @type {{fields: DataModelValidationFailure|null, joint: DataModelValidationFailure|null}}
*/
get validationFailures() {
return this.#validationFailures;
}
#validationFailures = Object.seal({fields: null, joint: null });
/**
* A set of localization prefix paths which are used by this DataModel.
* @type {string[]}
*/
static LOCALIZATION_PREFIXES = [];
/* ---------------------------------------- */
/* Data Cleaning Methods */
/* ---------------------------------------- */
/**
* Initialize the source data for a new DataModel instance.
* One-time migrations and initial cleaning operations are applied to the source data.
* @param {object|DataModel} data The candidate source data from which the model will be constructed
* @param {object} [options] Options provided to the model constructor
* @returns {object} Migrated and cleaned source data which will be stored to the model instance
* @protected
*/
_initializeSource(data, options={}) {
if ( data instanceof DataModel ) data = data.toObject();
const dt = getType(data);
if ( dt !== "Object" ) {
logger.error(`${this.constructor.name} was incorrectly constructed with a ${dt} instead of an object.
Attempting to fall back to default values.`)
data = {};
}
data = this.constructor.migrateDataSafe(data); // Migrate old data to the new format
data = this.constructor.cleanData(data); // Clean the data in the new format
return this.constructor.shimData(data); // Apply shims which preserve backwards compatibility
}
/* ---------------------------------------- */
/**
* Clean a data source object to conform to a specific provided schema.
* @param {object} [source] The source data object
* @param {object} [options={}] Additional options which are passed to field cleaning methods
* @returns {object} The cleaned source data
*/
static cleanData(source={}, options={}) {
return this.schema.clean(source, options);
}
/* ---------------------------------------- */
/* Data Initialization */
/* ---------------------------------------- */
/**
* A generator that orders the DataFields in the DataSchema into an expected initialization order.
* @returns {Generator<[string,DataField]>}
* @protected
*/
static *_initializationOrder() {
for ( const entry of this.schema.entries() ) yield entry;
}
/* ---------------------------------------- */
/**
* Initialize the instance by copying data from the source object to instance attributes.
* This mirrors the workflow of SchemaField#initialize but with some added functionality.
* @param {object} [options] Options provided to the model constructor
* @protected
*/
_initialize(options={}) {
for ( let [name, field] of this.constructor._initializationOrder() ) {
const sourceValue = this._source[name];
// Field initialization
const value = field.initialize(sourceValue, this, options);
// Special handling for Document IDs.
if ( (name === "_id") && (!Object.getOwnPropertyDescriptor(this, "_id") || (this._id === null)) ) {
Object.defineProperty(this, name, {value, writable: false, configurable: true});
}
// Readonly fields
else if ( field.readonly ) {
if ( this[name] !== undefined ) continue;
Object.defineProperty(this, name, {value, writable: false});
}
// Getter fields
else if ( value instanceof Function ) {
Object.defineProperty(this, name, {get: value, set() {}, configurable: true});
}
// Writable fields
else this[name] = value;
}
}
/* ---------------------------------------- */
/**
* Reset the state of this data instance back to mirror the contained source data, erasing any changes.
*/
reset() {
this._initialize();
}
/* ---------------------------------------- */
/**
* Clone a model, creating a new data model by combining current data with provided overrides.
* @param {Object} [data={}] Additional data which overrides current document data at the time of creation
* @param {object} [context={}] Context options passed to the data model constructor
* @returns {Document|Promise<Document>} The cloned Document instance
*/
clone(data={}, context={}) {
data = mergeObject(this.toObject(), data, {insertKeys: false, performDeletions: true, inplace: true});
return new this.constructor(data, {parent: this.parent, ...context});
}
/* ---------------------------------------- */
/* Data Validation Methods */
/* ---------------------------------------- */
/**
* Validate the data contained in the document to check for type and content
* This function throws an error if data within the document is not valid
*
* @param {object} options Optional parameters which customize how validation occurs.
* @param {object} [options.changes] A specific set of proposed changes to validate, rather than the full
* source data of the model.
* @param {boolean} [options.clean=false] If changes are provided, attempt to clean the changes before validating
* them?
* @param {boolean} [options.fallback=false] Allow replacement of invalid values with valid defaults?
* @param {boolean} [options.dropInvalidEmbedded=false] If true, invalid embedded documents will emit a warning and
* be placed in the invalidDocuments collection rather than
* causing the parent to be considered invalid.
* @param {boolean} [options.strict=true] Throw if an invalid value is encountered, otherwise log a warning?
* @param {boolean} [options.fields=true] Perform validation on individual fields?
* @param {boolean} [options.joint] Perform joint validation on the full data model?
* Joint validation will be performed by default if no changes are passed.
* Joint validation will be disabled by default if changes are passed.
* Joint validation can be performed on a complete set of changes (for
* example testing a complete data model) by explicitly passing true.
* @return {boolean} An indicator for whether the document contains valid data
*/
validate({changes, clean=false, fallback=false, dropInvalidEmbedded=false, strict=true, fields=true, joint}={}) {
const source = changes ?? this._source;
this.#validationFailures.fields = this.#validationFailures.joint = null; // Remove any prior failures
// Determine whether we are performing partial or joint validation
const partial = !!changes;
joint = joint ?? !changes;
if ( partial && joint ) {
throw new Error("It is not supported to perform joint data model validation with only a subset of changes");
}
// Optionally clean the data before validating
if ( partial && clean ) this.constructor.cleanData(source, {partial});
// Validate individual fields in the data or in a specific change-set, throwing errors if validation fails
if ( fields ) {
const failure = this.schema.validate(source, {partial, fallback, dropInvalidEmbedded});
if ( failure ) {
const id = this._source._id ? `[${this._source._id}] ` : "";
failure.message = `${this.constructor.name} ${id}validation errors:`;
this.#validationFailures.fields = failure;
if ( strict && failure.unresolved ) throw failure.asError();
else logger.warn(failure.asError());
}
}
// Perform joint document-level validations which consider all fields together
if ( joint ) {
try {
this.schema._validateModel(source); // Validate inner models
this.constructor.validateJoint(source); // Validate this model
} catch (err) {
const id = this._source._id ? `[${this._source._id}] ` : "";
const message = [this.constructor.name, id, `Joint Validation Error:\n${err.message}`].filterJoin(" ");
const failure = new DataModelValidationFailure({message, unresolved: true});
this.#validationFailures.joint = failure;
if ( strict ) throw failure.asError();
else logger.warn(failure.asError());
}
}
return !this.invalid;
}
/* ---------------------------------------- */
/**
* Evaluate joint validation rules which apply validation conditions across multiple fields of the model.
* Field-specific validation rules should be defined as part of the DataSchema for the model.
* This method allows for testing aggregate rules which impose requirements on the overall model.
* @param {object} data Candidate data for the model
* @throws An error if a validation failure is detected
*/
static validateJoint(data) {
/**
* @deprecated since v11
* @ignore
*/
if ( this.prototype._validateModel instanceof Function ) {
const msg = `${this.name} defines ${this.name}.prototype._validateModel instance method which should now be`
+ ` declared as ${this.name}.validateJoint static method.`
foundry.utils.logCompatibilityWarning(msg, {from: 11, until: 13});
return this.prototype._validateModel.call(this, data);
}
}
/* ---------------------------------------- */
/* Data Management */
/* ---------------------------------------- */
/**
* Update the DataModel locally by applying an object of changes to its source data.
* The provided changes are cleaned, validated, and stored to the source data object for this model.
* The source data is then re-initialized to apply those changes to the prepared data.
* The method returns an object of differential changes which modified the original data.
*
* @param {object} changes New values which should be applied to the data model
* @param {object} [options={}] Options which determine how the new data is merged
* @returns {object} An object containing the changed keys and values
*/
updateSource(changes={}, options={}) {
const schema = this.schema;
const source = this._source;
const _diff = {};
const _backup = {};
const _collections = this.collections;
const _singletons = this.singletons;
// Expand the object, if dot-notation keys are provided
if ( Object.keys(changes).some(k => /\./.test(k)) ) changes = expandObject(changes);
// Clean and validate the provided changes, throwing an error if any change is invalid
this.validate({changes, clean: true, fallback: options.fallback, strict: true, fields: true, joint: false});
// Update the source data for all fields and validate the final combined model
let error;
try {
DataModel.#updateData(schema, source, changes, {_backup, _collections, _singletons, _diff, ...options});
this.validate({fields: this.invalid, joint: true, strict: true});
} catch(err) {
error = err;
}
// Restore the backup data
if ( error || options.dryRun ) {
mergeObject(this._source, _backup, { recursive: false });
if ( error ) throw error;
}
// Initialize the updated data
if ( !options.dryRun ) this._initialize();
return _diff;
}
/* ---------------------------------------- */
/**
* Update the source data for a specific DataSchema.
* This method assumes that both source and changes are valid objects.
* @param {SchemaField} schema The data schema to update
* @param {object} source Source data to be updated
* @param {object} changes Changes to apply to the source data
* @param {object} [options={}] Options which modify the update workflow
* @returns {object} The updated source data
* @throws An error if the update operation was unsuccessful
* @private
*/
static #updateData(schema, source, changes, options) {
const {_backup, _diff} = options;
for ( let [name, value] of Object.entries(changes) ) {
const field = schema.get(name);
if ( !field ) continue;
// Skip updates where the data is unchanged
const prior = source[name];
if ( (value?.equals instanceof Function) && value.equals(prior) ) continue; // Arrays, Sets, etc...
if ( (prior === value) ) continue; // Direct comparison
_backup[name] = deepClone(prior);
_diff[name] = value;
// Field-specific updating logic
this.#updateField(name, field, source, value, options);
}
return source;
}
/* ---------------------------------------- */
/**
* Update the source data for a specific DataField.
* @param {string} name The field name being updated
* @param {DataField} field The field definition being updated
* @param {object} source The source object being updated
* @param {*} value The new value for the field
* @param {object} options Options which modify the update workflow
* @throws An error if the new candidate value is invalid
* @private
*/
static #updateField(name, field, source, value, options) {
const {dryRun, fallback, recursive, restoreDelta, _collections, _singletons, _diff, _backup} = options;
let current = source?.[name]; // The current value may be null or undefined
// Special Case: Update Embedded Collection
if ( field instanceof EmbeddedCollectionField ) {
_backup[name] = current;
if ( !dryRun ) _collections[name].update(value, {fallback, recursive, restoreDelta});
return;
}
// Special Case: Update Embedded Document
if ( (field instanceof EmbeddedDocumentField) && _singletons[name] ) {
_diff[name] = _singletons[name].updateSource(value ?? {}, {dryRun, fallback, recursive, restoreDelta});
if ( isEmpty(_diff[name]) ) delete _diff[name];
return;
}
// Special Case: Inner Data Schema
let innerSchema;
if ( (field instanceof SchemaField) || (field instanceof EmbeddedDataField) ) innerSchema = field;
else if ( field instanceof TypeDataField ) {
const cls = field.getModelForType(source.type);
if ( cls ) {
innerSchema = cls.schema;
if ( dryRun ) {
_backup[name] = current;
current = deepClone(current);
}
}
}
if ( innerSchema && current && value ) {
_diff[name] = {};
const recursiveOptions = {fallback, recursive, _backup: current, _collections, _diff: _diff[name]};
this.#updateData(innerSchema, current, value, recursiveOptions);
if ( isEmpty(_diff[name]) ) delete _diff[name];
}
// Special Case: Object Field
else if ( (field instanceof ObjectField) && current && value && (recursive !== false) ) {
_diff[name] = diffObject(current, value);
mergeObject(current, value, {insertKeys: true, insertValues: true, performDeletions: true});
if ( isEmpty(_diff[name]) ) delete _diff[name];
}
// Standard Case: Update Directly
else source[name] = value;
}
/* ---------------------------------------- */
/* Serialization and Storage */
/* ---------------------------------------- */
/**
* Copy and transform the DataModel into a plain object.
* Draw the values of the extracted object from the data source (by default) otherwise from its transformed values.
* @param {boolean} [source=true] Draw values from the underlying data source rather than transformed values
* @returns {object} The extracted primitive object
*/
toObject(source=true) {
if ( source ) return deepClone(this._source);
// We have use the schema of the class instead of the schema of the instance to prevent an infinite recursion:
// the EmbeddedDataField replaces the schema of its model instance with itself
// and EmbeddedDataField#toObject calls DataModel#toObject.
return this.constructor.schema.toObject(this);
}
/* ---------------------------------------- */
/**
* Extract the source data for the DataModel into a simple object format that can be serialized.
* @returns {object} The document source data expressed as a plain object
*/
toJSON() {
return this.toObject(true);
}
/* -------------------------------------------- */
/**
* Create a new instance of this DataModel from a source record.
* The source is presumed to be trustworthy and is not strictly validated.
* @param {object} source Initial document data which comes from a trusted source.
* @param {DocumentConstructionContext & DataValidationOptions} [context] Model construction context
* @param {boolean} [context.strict=false] Models created from trusted source data are validated non-strictly
* @returns {DataModel}
*/
static fromSource(source, {strict=false, ...context}={}) {
return new this(source, {strict, ...context});
}
/* ---------------------------------------- */
/**
* Create a DataModel instance using a provided serialized JSON string.
* @param {string} json Serialized document data in string format
* @returns {DataModel} A constructed data model instance
*/
static fromJSON(json) {
return this.fromSource(JSON.parse(json))
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* Migrate candidate source data for this DataModel which may require initial cleaning or transformations.
* @param {object} source The candidate source data from which the model will be constructed
* @returns {object} Migrated source data, if necessary
*/
static migrateData(source) {
if ( !source ) return source;
this.schema.migrateSource(source, source);
return source;
}
/* ---------------------------------------- */
/**
* Wrap data migration in a try/catch which attempts it safely
* @param {object} source The candidate source data from which the model will be constructed
* @returns {object} Migrated source data, if necessary
*/
static migrateDataSafe(source) {
try {
this.migrateData(source);
} catch(err) {
err.message = `Failed data migration for ${this.name}: ${err.message}`;
logger.warn(err);
}
return source;
}
/* ---------------------------------------- */
/**
* Take data which conforms to the current data schema and add backwards-compatible accessors to it in order to
* support older code which uses this data.
* @param {object} data Data which matches the current schema
* @param {object} [options={}] Additional shimming options
* @param {boolean} [options.embedded=true] Apply shims to embedded models?
* @returns {object} Data with added backwards-compatible properties
*/
static shimData(data, {embedded=true}={}) {
if ( Object.isSealed(data) ) return data;
const schema = this.schema;
if ( embedded ) {
for ( const [name, value] of Object.entries(data) ) {
const field = schema.get(name);
if ( (field instanceof EmbeddedDataField) && !Object.isSealed(value) ) {
data[name] = field.model.shimData(value || {});
}
else if ( field instanceof EmbeddedCollectionField ) {
for ( const d of (value || []) ) {
if ( !Object.isSealed(d) ) field.model.shimData(d)
}
}
}
}
return data;
}
}
export {DataModel};

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,247 @@
import EmbeddedCollection from "./embedded-collection.mjs";
import {deepClone, randomID} from "../utils/helpers.mjs";
/**
* An embedded collection delta contains delta source objects that can be compared against other objects inside a base
* embedded collection, and generate new embedded Documents by combining them.
*/
export default class EmbeddedCollectionDelta extends EmbeddedCollection {
/**
* Maintain a list of IDs that are managed by this collection delta to distinguish from those IDs that are inherited
* from the base collection.
* @type {Set<string>}
*/
#managedIds = new Set();
/* -------------------------------------------- */
/**
* Maintain a list of IDs that are tombstone Documents.
* @type {Set<string>}
*/
#tombstones = new Set();
/* -------------------------------------------- */
/**
* A convenience getter to return the corresponding base collection.
* @type {EmbeddedCollection}
*/
get baseCollection() {
return this.model.getBaseCollection?.(this.name);
}
/* -------------------------------------------- */
/**
* A convenience getter to return the corresponding synthetic collection.
* @type {EmbeddedCollection}
*/
get syntheticCollection() {
return this.model.syntheticActor?.getEmbeddedCollection(this.name);
}
/* -------------------------------------------- */
/** @override */
createDocument(data, context={}) {
return new this.documentClass(data, {
...context,
parent: this.model.syntheticActor ?? this.model,
parentCollection: this.name,
pack: this.model.pack
});
}
/* -------------------------------------------- */
/** @override */
initialize({full=false, ...options} = {}) {
// Repeat initialization.
if ( this._initialized && !full ) return;
// First-time initialization.
this.clear();
if ( !this.baseCollection ) return;
// Initialize the deltas.
for ( const d of this._source ) {
if ( d._tombstone ) this.#tombstones.add(d._id);
else this._initializeDocument(d, options);
this.#managedIds.add(d._id);
}
// Include the Documents from the base collection.
for ( const d of this.baseCollection._source ) {
if ( this.has(d._id) || this.isTombstone(d._id) ) continue;
this._initializeDocument(deepClone(d), options);
}
this._initialized = true;
}
/* -------------------------------------------- */
/** @override */
_initializeDocument(data, context) {
if ( !data._id ) data._id = randomID(16);
let doc;
if ( this.syntheticCollection ) doc = this.syntheticCollection.get(data._id);
else {
try {
doc = this.createDocument(data, context);
} catch(err) {
this._handleInvalidDocument(data._id, err, context);
}
}
if ( doc ) super.set(doc.id, doc, {modifySource: false});
}
/* -------------------------------------------- */
/** @override */
_createOrUpdate(data, options) {
if ( options.recursive === false ) {
if ( data._tombstone ) return this.delete(data._id);
else if ( this.isTombstone(data._id) ) return this.set(data._id, this.createDocument(data));
}
else if ( this.isTombstone(data._id) || data._tombstone ) return;
let doc = this.get(data._id);
if ( doc ) doc.updateSource(data, options);
else doc = this.createDocument(data);
this.set(doc.id, doc);
}
/* -------------------------------------------- */
/**
* Determine whether a given ID is managed directly by this collection delta or inherited from the base collection.
* @param {string} key The Document ID.
* @returns {boolean}
*/
manages(key) {
return this.#managedIds.has(key);
}
/* -------------------------------------------- */
/**
* Determine whether a given ID exists as a tombstone Document in the collection delta.
* @param {string} key The Document ID.
* @returns {boolean}
*/
isTombstone(key) {
return this.#tombstones.has(key);
}
/* -------------------------------------------- */
/**
* Restore a Document so that it is no longer managed by the collection delta and instead inherits from the base
* Document.
* @param {string} id The Document ID.
* @returns {Promise<Document>} The restored Document.
*/
async restoreDocument(id) {
const docs = await this.restoreDocuments([id]);
return docs.shift();
}
/* -------------------------------------------- */
/**
* Restore the given Documents so that they are no longer managed by the collection delta and instead inherit directly
* from their counterparts in the base Actor.
* @param {string[]} ids The IDs of the Documents to restore.
* @returns {Promise<Document[]>} An array of updated Document instances.
*/
async restoreDocuments(ids) {
if ( !this.model.syntheticActor ) return [];
const baseActor = this.model.parent.baseActor;
const embeddedName = this.documentClass.documentName;
const {deltas, tombstones} = ids.reduce((obj, id) => {
if ( !this.manages(id) ) return obj;
const doc = baseActor.getEmbeddedCollection(this.name).get(id);
if ( this.isTombstone(id) ) obj.tombstones.push(doc.toObject());
else obj.deltas.push(doc.toObject());
return obj;
}, {deltas: [], tombstones: []});
// For the benefit of downstream CRUD workflows, we emulate events from the perspective of the synthetic Actor.
// Restoring an Item to the version on the base Actor is equivalent to updating that Item on the synthetic Actor
// with the version of the Item on the base Actor.
// Restoring an Item that has been deleted on the synthetic Actor is equivalent to creating a new Item on the
// synthetic Actor with the contents of the version on the base Actor.
// On the ActorDelta, those Items are removed from this collection delta so that they are once again 'linked' to the
// base Actor's Item, as though they had never been modified from the original in the first place.
let updated = [];
if ( deltas.length ) {
updated = await this.model.syntheticActor.updateEmbeddedDocuments(embeddedName, deltas, {
diff: false, recursive: false, restoreDelta: true
});
}
let created = [];
if ( tombstones.length ) {
created = await this.model.syntheticActor.createEmbeddedDocuments(embeddedName, tombstones, {
keepId: true, restoreDelta: true
});
}
return updated.concat(created);
}
/* -------------------------------------------- */
/** @inheritdoc */
set(key, value, options={}) {
super.set(key, value, options);
this.syntheticCollection?.set(key, value, options);
}
/* -------------------------------------------- */
/** @override */
_set(key, value, {restoreDelta=false}={}) {
if ( restoreDelta ) {
this._source.findSplice(entry => entry._id === key);
this.#managedIds.delete(key);
this.#tombstones.delete(key);
return;
}
if ( this.manages(key) ) this._source.findSplice(d => d._id === key, value._source);
else this._source.push(value._source);
this.#managedIds.add(key);
}
/* -------------------------------------------- */
/** @inheritdoc */
delete(key, options={}) {
super.delete(key, options);
this.syntheticCollection?.delete(key, options);
}
/* -------------------------------------------- */
/** @override */
_delete(key, {restoreDelta=false}={}) {
if ( !this.baseCollection ) return;
// Remove the document from this collection, if it exists.
if ( this.manages(key) ) {
this._source.findSplice(entry => entry._id === key);
this.#managedIds.delete(key);
this.#tombstones.delete(key);
}
// If the document exists in the base collection, push a tombstone in its place.
if ( !restoreDelta && this.baseCollection.has(key) ) {
this._source.push({_id: key, _tombstone: true});
this.#managedIds.add(key);
this.#tombstones.add(key);
}
}
}

View File

@@ -0,0 +1,305 @@
import Collection from "../utils/collection.mjs";
import {randomID} from "../utils/helpers.mjs";
/**
* An extension of the Collection.
* Used for the specific task of containing embedded Document instances within a parent Document.
*/
export default class EmbeddedCollection extends Collection {
/**
* @param {string} name The name of this collection in the parent Document.
* @param {DataModel} parent The parent DataModel instance to which this collection belongs.
* @param {object[]} sourceArray The source data array for the collection in the parent Document data.
*/
constructor(name, parent, sourceArray) {
if ( typeof name !== "string" ) throw new Error("The signature of EmbeddedCollection has changed in v11.");
super();
Object.defineProperties(this, {
_source: {value: sourceArray, writable: false},
documentClass: {value: parent.constructor.hierarchy[name].model, writable: false},
name: {value: name, writable: false},
model: {value: parent, writable: false}
});
}
/**
* The Document implementation used to construct instances within this collection.
* @type {typeof foundry.abstract.Document}
*/
documentClass;
/**
* The name of this collection in the parent Document.
* @type {string}
*/
name;
/**
* The parent DataModel to which this EmbeddedCollection instance belongs.
* @type {DataModel}
*/
model;
/**
* Has this embedded collection been initialized as a one-time workflow?
* @type {boolean}
* @protected
*/
_initialized = false;
/**
* The source data array from which the embedded collection is created
* @type {object[]}
* @private
*/
_source;
/**
* Record the set of document ids where the Document was not initialized because of invalid source data
* @type {Set<string>}
*/
invalidDocumentIds = new Set();
/* -------------------------------------------- */
/**
* Instantiate a Document for inclusion in the Collection.
* @param {object} data The Document data.
* @param {DocumentConstructionContext} [context] Document creation context.
* @returns {Document}
*/
createDocument(data, context={}) {
return new this.documentClass(data, {
...context,
parent: this.model,
parentCollection: this.name,
pack: this.model.pack
});
}
/* -------------------------------------------- */
/**
* Initialize the EmbeddedCollection object by constructing its contained Document instances
* @param {DocumentConstructionContext} [options] Initialization options.
*/
initialize(options={}) {
// Repeat initialization
if ( this._initialized ) {
for ( const doc of this ) doc._initialize(options);
return;
}
// First-time initialization
this.clear();
for ( const d of this._source ) this._initializeDocument(d, options);
this._initialized = true;
}
/* -------------------------------------------- */
/**
* Initialize an embedded document and store it in the collection.
* @param {object} data The Document data.
* @param {DocumentConstructionContext} [context] Context to configure Document initialization.
* @protected
*/
_initializeDocument(data, context) {
if ( !data._id ) data._id = randomID(16);
let doc;
try {
doc = this.createDocument(data, context);
super.set(doc.id, doc);
} catch(err) {
this._handleInvalidDocument(data._id, err, context);
}
}
/* -------------------------------------------- */
/**
* Log warnings or errors when a Document is found to be invalid.
* @param {string} id The invalid Document's ID.
* @param {Error} err The validation error.
* @param {object} [options] Options to configure invalid Document handling.
* @param {boolean} [options.strict=true] Whether to throw an error or only log a warning.
* @protected
*/
_handleInvalidDocument(id, err, {strict=true}={}) {
const docName = this.documentClass.documentName;
const parent = this.model;
this.invalidDocumentIds.add(id);
// Wrap the error with more information
const uuid = `${parent.uuid}.${docName}.${id}`;
const msg = `Failed to initialize ${docName} [${uuid}]:\n${err.message}`;
const error = new Error(msg, {cause: err});
if ( strict ) globalThis.logger.error(error);
else globalThis.logger.warn(error);
if ( globalThis.Hooks && strict ) {
Hooks.onError(`${this.constructor.name}#_initializeDocument`, error, {id, documentName: docName});
}
}
/* -------------------------------------------- */
/**
* Get an element from the EmbeddedCollection by its ID.
* @param {string} id The ID of the Embedded Document to retrieve.
* @param {object} [options] Additional options to configure retrieval.
* @param {boolean} [options.strict=false] Throw an Error if the requested Embedded Document does not exist.
* @param {boolean} [options.invalid=false] Allow retrieving an invalid Embedded Document.
* @returns {Document}
* @throws If strict is true and the Embedded Document cannot be found.
*/
get(id, {invalid=false, strict=false}={}) {
let result = super.get(id);
if ( !result && invalid ) result = this.getInvalid(id, { strict: false });
if ( !result && strict ) throw new Error(`${this.constructor.documentName} id [${id}] does not exist in the `
+ `${this.constructor.name} collection.`);
return result;
}
/* ---------------------------------------- */
/**
* Add an item to the collection.
* @param {string} key The embedded Document ID.
* @param {Document} value The embedded Document instance.
* @param {object} [options] Additional options to the set operation.
* @param {boolean} [options.modifySource=true] Whether to modify the collection's source as part of the operation.
* */
set(key, value, {modifySource=true, ...options}={}) {
if ( modifySource ) this._set(key, value, options);
return super.set(key, value);
}
/* -------------------------------------------- */
/**
* Modify the underlying source array to include the Document.
* @param {string} key The Document ID key.
* @param {Document} value The Document.
* @protected
*/
_set(key, value) {
if ( this.has(key) || this.invalidDocumentIds.has(key) ) this._source.findSplice(d => d._id === key, value._source);
else this._source.push(value._source);
}
/* ---------------------------------------- */
/**
* @param {string} key The embedded Document ID.
* @param {object} [options] Additional options to the delete operation.
* @param {boolean} [options.modifySource=true] Whether to modify the collection's source as part of the operation.
* */
delete(key, {modifySource=true, ...options}={}) {
if ( modifySource ) this._delete(key, options);
return super.delete(key);
}
/* -------------------------------------------- */
/**
* Remove the value from the underlying source array.
* @param {string} key The Document ID key.
* @param {object} [options] Additional options to configure deletion behavior.
* @protected
*/
_delete(key, options={}) {
if ( this.has(key) || this.invalidDocumentIds.has(key) ) this._source.findSplice(d => d._id === key);
}
/* ---------------------------------------- */
/**
* Update an EmbeddedCollection using an array of provided document data.
* @param {DataModel[]} changes An array of provided Document data
* @param {object} [options={}] Additional options which modify how the collection is updated
*/
update(changes, options={}) {
const updated = new Set();
// Create or update documents within the collection
for ( let data of changes ) {
if ( !data._id ) data._id = randomID(16);
this._createOrUpdate(data, options);
updated.add(data._id);
}
// If the update was not recursive, remove all non-updated documents
if ( options.recursive === false ) {
for ( const id of this._source.map(d => d._id) ) {
if ( !updated.has(id) ) this.delete(id, options);
}
}
}
/* -------------------------------------------- */
/**
* Create or update an embedded Document in this collection.
* @param {DataModel} data The update delta.
* @param {object} [options={}] Additional options which modify how the collection is updated.
* @protected
*/
_createOrUpdate(data, options) {
const current = this.get(data._id);
if ( current ) current.updateSource(data, options);
else {
const doc = this.createDocument(data);
this.set(doc.id, doc);
}
}
/* ---------------------------------------- */
/**
* Obtain a temporary Document instance for a document id which currently has invalid source data.
* @param {string} id A document ID with invalid source data.
* @param {object} [options] Additional options to configure retrieval.
* @param {boolean} [options.strict=true] Throw an Error if the requested ID is not in the set of invalid IDs for
* this collection.
* @returns {Document} An in-memory instance for the invalid Document
* @throws If strict is true and the requested ID is not in the set of invalid IDs for this collection.
*/
getInvalid(id, {strict=true}={}) {
if ( !this.invalidDocumentIds.has(id) ) {
if ( strict ) throw new Error(`${this.constructor.documentName} id [${id}] is not in the set of invalid ids`);
return;
}
const data = this._source.find(d => d._id === id);
return this.documentClass.fromSource(foundry.utils.deepClone(data), {parent: this.model});
}
/* ---------------------------------------- */
/**
* Convert the EmbeddedCollection to an array of simple objects.
* @param {boolean} [source=true] Draw data for contained Documents from the underlying data source?
* @returns {object[]} The extracted array of primitive objects
*/
toObject(source=true) {
const arr = [];
for ( let doc of this.values() ) {
arr.push(doc.toObject(source));
}
return arr;
}
/* -------------------------------------------- */
/**
* Follow-up actions to take when a database operation modifies Documents in this EmbeddedCollection.
* @param {DatabaseAction} action The database action performed
* @param {foundry.abstract.Document[]} documents The array of modified Documents
* @param {any[]} result The result of the database operation
* @param {DatabaseOperation} operation Database operation details
* @param {foundry.documents.BaseUser} user The User who performed the operation
* @internal
*/
_onModifyContents(action, documents, result, operation, user) {}
}

View File

@@ -0,0 +1,9 @@
export * as types from "./_types.mjs";
export {default as DataModel} from "./data.mjs";
export {default as TypeDataModel} from "./type-data.mjs";
export {default as Document} from "./document.mjs";
export {default as DocumentSocketResponse} from "./socket.mjs";
export {default as DatabaseBackend} from "./backend.mjs";
export {default as EmbeddedCollection} from "./embedded-collection.mjs";
export {default as EmbeddedCollectionDelta} from "./embedded-collection-delta.mjs";
export {default as SingletonEmbeddedCollection} from "./singleton-collection.mjs";

View File

@@ -0,0 +1,32 @@
import EmbeddedCollection from "./embedded-collection.mjs";
/**
* This class provides a {@link Collection} wrapper around a singleton embedded Document so that it can be interacted
* with via a common interface.
*/
export default class SingletonEmbeddedCollection extends EmbeddedCollection {
/** @inheritdoc */
set(key, value) {
if ( this.size && !this.has(key) ) {
const embeddedName = this.documentClass.documentName;
const parentName = this.model.documentName;
throw new Error(`Cannot create singleton embedded ${embeddedName} [${key}] in parent ${parentName} `
+ `[${this.model.id}] as it already has one assigned.`);
}
return super.set(key, value);
}
/* -------------------------------------------- */
/** @override */
_set(key, value) {
this.model._source[this.name] = value?._source ?? null;
}
/* -------------------------------------------- */
/** @override */
_delete(key) {
this.model._source[this.name] = null;
}
}

View File

@@ -0,0 +1,64 @@
/**
* @typedef {import("./_types.mjs").DatabaseAction} DatabaseAction
* @typedef {import("./_types.mjs").DatabaseOperation} DatabaseOperation
* @typedef {import("./_types.mjs").DocumentSocketRequest} DocumentSocketRequest
*/
/**
* The data structure of a modifyDocument socket response.
* @alias foundry.abstract.DocumentSocketResponse
*/
export default class DocumentSocketResponse {
/**
* Prepare a response for an incoming request.
* @param {DocumentSocketRequest} request The incoming request that is being responded to
*/
constructor(request) {
for ( const [k, v] of Object.entries(request) ) {
if ( this.hasOwnProperty(k) ) this[k] = v;
}
}
/**
* The type of Document being transacted.
* @type {string}
*/
type;
/**
* The database action that was performed.
* @type {DatabaseAction}
*/
action;
/**
* Was this response broadcast to other connected clients?
* @type {boolean}
*/
broadcast;
/**
* The database operation that was requested.
* @type {DatabaseOperation}
*/
operation;
/**
* The identifier of the requesting user.
* @type {string}
*/
userId;
/**
* The result of the request. Present if successful
* @type {object[]|string[]}
*/
result;
/**
* An error that occurred. Present if unsuccessful
* @type {Error}
*/
error;
}

View File

@@ -0,0 +1,204 @@
import DataModel from "./data.mjs";
import {TypeDataField} from "../data/fields.mjs";
/**
* A specialized subclass of DataModel, intended to represent a Document's type-specific data.
* Systems or Modules that provide DataModel implementations for sub-types of Documents (such as Actors or Items)
* should subclass this class instead of the base DataModel class.
*
* @see {@link Document}
* @extends {DataModel}
* @abstract
*
* @example Registering a custom sub-type for a Module.
*
* **module.json**
* ```json
* {
* "id": "my-module",
* "esmodules": ["main.mjs"],
* "documentTypes": {
* "Actor": {
* "sidekick": {},
* "villain": {}
* },
* "JournalEntryPage": {
* "dossier": {},
* "quest": {
* "htmlFields": ["description"]
* }
* }
* }
* }
* ```
*
* **main.mjs**
* ```js
* Hooks.on("init", () => {
* Object.assign(CONFIG.Actor.dataModels, {
* "my-module.sidekick": SidekickModel,
* "my-module.villain": VillainModel
* });
* Object.assign(CONFIG.JournalEntryPage.dataModels, {
* "my-module.dossier": DossierModel,
* "my-module.quest": QuestModel
* });
* });
*
* class QuestModel extends foundry.abstract.TypeDataModel {
* static defineSchema() {
* const fields = foundry.data.fields;
* return {
* description: new fields.HTMLField({required: false, blank: true, initial: ""}),
* steps: new fields.ArrayField(new fields.StringField())
* };
* }
*
* prepareDerivedData() {
* this.totalSteps = this.steps.length;
* }
* }
* ```
*/
export default class TypeDataModel extends DataModel {
/** @inheritdoc */
constructor(data={}, options={}) {
super(data, options);
/**
* The package that is providing this DataModel for the given sub-type.
* @type {System|Module|null}
*/
Object.defineProperty(this, "modelProvider", {value: TypeDataField.getModelProvider(this), writable: false});
}
/**
* A set of localization prefix paths which are used by this data model.
* @type {string[]}
*/
static LOCALIZATION_PREFIXES = [];
/* ---------------------------------------- */
/** @override */
static get schema() {
if ( this.hasOwnProperty("_schema") ) return this._schema;
const schema = super.schema;
schema.name = "system";
return schema;
}
/* -------------------------------------------- */
/**
* Prepare data related to this DataModel itself, before any derived data is computed.
*
* Called before {@link ClientDocument#prepareBaseData} in {@link ClientDocument#prepareData}.
*/
prepareBaseData() {}
/* -------------------------------------------- */
/**
* Apply transformations of derivations to the values of the source data object.
* Compute data fields whose values are not stored to the database.
*
* Called before {@link ClientDocument#prepareDerivedData} in {@link ClientDocument#prepareData}.
*/
prepareDerivedData() {}
/* -------------------------------------------- */
/**
* Convert this Document to some HTML display for embedding purposes.
* @param {DocumentHTMLEmbedConfig} config Configuration for embedding behavior.
* @param {EnrichmentOptions} [options] The original enrichment options for cases where the Document embed content
* also contains text that must be enriched.
* @returns {Promise<HTMLElement|HTMLCollection|null>}
*/
async toEmbed(config, options={}) {
return null;
}
/* -------------------------------------------- */
/* Database Operations */
/* -------------------------------------------- */
/**
* Called by {@link ClientDocument#_preCreate}.
*
* @param {object} data The initial data object provided to the document creation request
* @param {object} options Additional options which modify the creation request
* @param {documents.BaseUser} user The User requesting the document creation
* @returns {Promise<boolean|void>} Return false to exclude this Document from the creation operation
* @internal
*/
async _preCreate(data, options, user) {}
/* -------------------------------------------- */
/**
* Called by {@link ClientDocument#_onCreate}.
*
* @param {object} data The initial data object provided to the document creation request
* @param {object} options Additional options which modify the creation request
* @param {string} userId The id of the User requesting the document update
* @protected
* @internal
*/
_onCreate(data, options, userId) {}
/* -------------------------------------------- */
/**
* Called by {@link ClientDocument#_preUpdate}.
*
* @param {object} changes The candidate changes to the Document
* @param {object} options Additional options which modify the update request
* @param {documents.BaseUser} user The User requesting the document update
* @returns {Promise<boolean|void>} A return value of false indicates the update operation should be cancelled.
* @protected
* @internal
*/
async _preUpdate(changes, options, user) {}
/* -------------------------------------------- */
/**
* Called by {@link ClientDocument#_onUpdate}.
*
* @param {object} changed The differential data that was changed relative to the documents prior values
* @param {object} options Additional options which modify the update request
* @param {string} userId The id of the User requesting the document update
* @protected
* @internal
*/
_onUpdate(changed, options, userId) {}
/* -------------------------------------------- */
/**
* Called by {@link ClientDocument#_preDelete}.
*
* @param {object} options Additional options which modify the deletion request
* @param {documents.BaseUser} user The User requesting the document deletion
* @returns {Promise<boolean|void>} A return value of false indicates the deletion operation should be cancelled.
* @protected
* @internal
*/
async _preDelete(options, user) {}
/* -------------------------------------------- */
/**
* Called by {@link ClientDocument#_onDelete}.
*
* @param {object} options Additional options which modify the deletion request
* @param {string} userId The id of the User requesting the document update
* @protected
* @internal
*/
_onDelete(options, userId) {}
}