Initial
This commit is contained in:
1
resources/app/dist/database/backend/compendium-folder.mjs
vendored
Normal file
1
resources/app/dist/database/backend/compendium-folder.mjs
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export default function ServerCompendiumFolderMixin(e){return class extends db.Folder{static _compendium=e;static{Object.defineProperty(this,"_db",{get:()=>e._db})}static get collectionName(){return"folders"}static get sublevel(){return this._db.sublevels.folders}get compendium(){return this.constructor._compendium}static metadata=(()=>foundry.utils.mergeObject(super.metadata,{permissions:{create:this.#e.bind(this),update:this.#e.bind(this),delete:this.#e.bind(this)}},{inplace:!1}))();static async getMany(e,t={}){const i=[];for(let o of e)i.push(await this._compendium.getFolder(o,t));return i}static fromSource(e,t={}){return t.pack=this._compendium.collectionName,super.fromSource(e,t)}static#e(e,t,i){if(((game.compendiumConfiguration||{})[t.pack]||{}).locked??"world"!==t.compendium.package.type)throw new Error(`You may not modify the ${t.pack} Compendium which is currently locked.`);return db.packs.get(t.pack).isOwner(e)}}}
|
||||
1
resources/app/dist/database/backend/embedded-delta.mjs
vendored
Normal file
1
resources/app/dist/database/backend/embedded-delta.mjs
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export default function EmbeddedDeltaMixin(e){return class extends e{async _preUpdate(e,t,s){if(!1===await super._preUpdate(e,t,s))return!1;const i=this.parent.getEmbeddedCollection(this.parentCollection);i.manages(this.id)||i.set(this.id,this)}batchWrite(e,{restoreDelta:t=!1,...s}={}){const i=this.parent.getEmbeddedCollection(this.parentCollection);t&&!i.manages(this.id)?super.batchDelete(e):super.batchWrite(e,s)}batchDelete(e,{restoreDelta:t=!1}={}){const s=this.parent.getEmbeddedCollection(this.parentCollection);if(!t&&s.isTombstone(this.id)){const t=new foundry.data.TombstoneData({_id:this.id}),{dbKey:s,sublevelName:i}=this;this.constructor.batchWrite(t.toObject(),e,{dbKey:s,sublevelName:i})}else super.batchDelete(e)}}}
|
||||
1
resources/app/dist/database/backend/level-database.mjs
vendored
Normal file
1
resources/app/dist/database/backend/level-database.mjs
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import fs from"node:fs";import{ClassicLevel}from"classic-level";import SublevelDatabase from"./sublevel-database.mjs";import Semaphore from"../../../common/utils/semaphore.mjs";export default class LevelDatabase extends ClassicLevel{constructor(e,a,{sublevels:t=[],...s}={}){if(!e||!a)throw new Error("You must provide a unique database name and file path location");if(LevelDatabase.#e.has(e))throw new Error(`The database "${e}" is already open and cannot be re-created.`);s.keyEncoding="utf8",s.valueEncoding="json",super(a,s),this.#a=e,LevelDatabase.#e.set(e,this),this.setMaxListeners(Math.max(10,t.length+1));const n={keyEncoding:s.keyEncoding,valueEncoding:s.valueEncoding};for(const e of t)this.#t[e]=this.sublevel(e,n)}static async connect(e,a,{allowRepair:t=!0,...s}={}){const n=new this(e,a,{passive:!0,createIfMissing:!0,...s});try{await n.open(),await n.keys().all(),global.logger.info(`Connected to database "${e}"`)}catch(i){if(await n.close(),i.message=`Failed to connect to database "${e}": ${i.message}`,t)return logger.error(i),LevelDatabase.#s(e,a,s);throw i}return n}static async#s(e,a,t){return logger.warn(`FoundryVTT | Attempting database repair for ${a}`),await this.repair(a),logger.warn(`FoundryVTT | Repair of ${a} complete. Attempting re-connection`),LevelDatabase.connect(e,a,{allowRepair:!1,...t})}semaphore=new Semaphore(1);get name(){return this.#a}#a;get sublevels(){return this.#t}#t={};static get databases(){return LevelDatabase.#e}static#e=new Map;static formatKey(...e){return e.join(".")}async close(...e){if(LevelDatabase.#e.delete(this.#a),"open"===this.status)try{await this.compactFull()}catch(e){e.message=`Unable to compact database ${this.location}: ${e.message}`,logger.error(e)}return super.close(...e)}async clone(e,a){if(this.constructor.databases.has(e)||a===this.location)throw new Error("The cloned database name and location must be unique");const t=await this.constructor.connect(e,a,{sublevels:Object.keys(this.sublevels)}),s=t.batch(),n=this.iterator();for await(const[e,a]of n)s.put(e,a);return await n.close(),await s.write(),t}async destroy(){await this.close(),fs.rmSync(this.location,{recursive:!0})}async compactFull(){const e=this.keys({limit:1,fillCache:!1}),a=await e.next();await e.close();const t=this.keys({limit:1,reverse:!0,fillCache:!1}),s=await t.next();return await t.close(),this.compactRange(a,s,{keyEncoding:"utf8"})}async size(){const e=this.keys({limit:1,fillCache:!1}),a=await e.next();await e.close();const t=this.keys({limit:1,reverse:!0,fillCache:!1}),s=await t.next();return await t.close(),this.approximateSize(a,s,{keyEncoding:"utf8"})}_sublevel(e,a){return new SublevelDatabase(this,e,a)}}
|
||||
1
resources/app/dist/database/backend/server-backend.mjs
vendored
Normal file
1
resources/app/dist/database/backend/server-backend.mjs
vendored
Normal file
File diff suppressed because one or more lines are too long
1
resources/app/dist/database/backend/server-compendium.mjs
vendored
Normal file
1
resources/app/dist/database/backend/server-compendium.mjs
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import fs from"node:fs";import ServerCompendiumFolderMixin from"./compendium-folder.mjs";import{tagModelStats}from"../../core/utils.mjs";import*as CONST from"../../../common/constants.mjs";import{PACKAGE_TYPE_MAPPING}from"../../packages/_module.mjs";export default function ServerCompendiumMixin(e,t){return class extends e{static{this._db=void 0,this._dbState=0,this._dbWait=void 0,this.sublevel=void 0}static packData=t;static folderClass=ServerCompendiumFolderMixin(this);static get package(){return packages[this.packData.packageType.titleCase()].get(this.packData.packageName)}static metadata=(()=>foundry.utils.mergeObject(super.metadata,{permissions:{create:this.#e.bind(this),update:this.#e.bind(this),delete:this.#e.bind(this)}},{inplace:!1}))();static get collectionName(){return this.packData.id}static get implementation(){return db.packs.get(this.collectionName)}static get filename(){return this.packData.absPath}static _getSublevelNames(){const e=super._getSublevelNames();return e.push("folders"),e}static async disconnect(){await super.disconnect(),db.packs.delete(this.collectionName)}static fromSource(e,t={}){return t.pack=this.collectionName,super.fromSource(e,t)}static isOwner(e){const t=CONST.DOCUMENT_OWNERSHIP_LEVELS;return(e.isGM?t.OWNER:this.getUserLevel(e))>=t.OWNER}static getUserLevel(e){const t=CONST.DOCUMENT_OWNERSHIP_LEVELS;let i=t.NONE;const a=(game.compendiumConfiguration||{})[this.collectionName]||{},s=a?.ownership??this.packData?.ownership??{...PACKAGE_TYPE_MAPPING.module.schema.getField("packs.ownership").initial};for(const[a,o]of Object.entries(s))e.hasRole(a)&&(i=Math.max(i,t[o]));return i}static#e(e,t,i){if(((game.compendiumConfiguration||{})[t.collectionName]||{}).locked??"world"!==t.constructor.package.type)throw new Error(`You may not modify the ${t.collectionName} Compendium which is currently locked.`);return db.packs.get(t.collectionName).isOwner(e)}static async deleteCompendium(){await this.disconnect(),await fs.promises.rm(this.filename,{force:!0,recursive:!0}),await fs.promises.rm(`${this.filename}.db`,{force:!0}),logger.info(`Deleted Compendium Pack ${this.collectionName}`)}static async getIndex(e){if(!e)throw new Error("You must provide an array of index fields to retrieve");return this.connected||await this.connect(),this.database.get(this,{query:{},index:!0,indexFields:e})}static async getFolders(){return this.db.sublevels.folders.find()}static async getFolder(e,t={}){const i=await this.db.sublevels.folders.get(e);if(void 0!==i)return this.folderClass.fromSource(i,t);if(!0===t.strict)throw new Error(`The Folder [${e}] does not exist in ${this.collectionName}.`)}static async migrate({user:e,...t}={}){logger.info(`Migrating ${this.collectionName} Compendium to updated system template version.`),this.connected||await this.connect();const i=await this.find(),a=this.db.batch();for(let t of i)this.hasTypeData&&t.updateSource({system:t.migrateSystemData()}),tagModelStats(t,{user:e}),t.batchWrite(a),logger.info(`Migrated ${this.documentName} ${t.name} in Compendium pack ${this.collectionName}`);await a.write(),logger.info(`Migrated all ${i.length} ${this.documentName} Documents in Compendium pack ${this.collectionName}`)}}}
|
||||
1
resources/app/dist/database/backend/server-document.mjs
vendored
Normal file
1
resources/app/dist/database/backend/server-document.mjs
vendored
Normal file
File diff suppressed because one or more lines are too long
1
resources/app/dist/database/backend/sublevel-database.mjs
vendored
Normal file
1
resources/app/dist/database/backend/sublevel-database.mjs
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{AbstractSublevel}from"abstract-level";import{filterObject,getType,mergeObject,randomID}from"../../../common/utils/helpers.mjs";export default class SublevelDatabase extends AbstractSublevel{async createNewId(){for(;;){const e=randomID(16);if(!await this.has(e))return e}}prefixKey(e,t="utf8"){return super.prefixKey(e,t)}async get(e,t={},r){try{return await super.get(e,t,r)}catch(e){return}}async put(e,t,r={},a){return await super.put(e,t,r,a),t}async has(e){const t=this.keys({gte:e,lte:e,limit:1,fillCache:!1}),r=await t.next();return await t.close(),!!r}async delMany(e=[]){const t=await this.getMany(e,{fillCache:!1}),r=this.batch();for(const[a,s]of t.entries()){const t=e[a];s&&r.del(t)}return await r.write(),t}async find(e,{project:t,map:r,sort:a}={}){e=SublevelDatabase.#e(e);const s=[],i=await this.values({fillCache:!1}).all();for(let a of i)SublevelDatabase.#t(a,e)&&(t&&(a=filterObject(a,t)),r&&(a=await r(a)),s.push(a));return a&&("string"==typeof a&&(a={[a]:1}),s.sort(((e,t)=>SublevelDatabase.#r(e,t,a)))),s}async findOne(e,t){const r=await this.find(e,t);if(r.length)return r.length>1&&global.logger.warn(`Multiple results found for query "${JSON.stringify(e)}"`),r[0]}async findUpdate(e,t){e=SublevelDatabase.#e(e);const r=this.batch(),a=[];for(const[s,i]of await this.iterator({fillCache:!1}).all())SublevelDatabase.#t(i,e)&&(mergeObject(i,t),r.put(s,i),a.push(i));return await r.write(),a}async findDelete(e){e=SublevelDatabase.#e(e);const t=this.batch(),r=[];for(const[a,s]of await this.iterator({fillCache:!1}).all())SublevelDatabase.#t(s,e)&&(t.del(a),r.push(s));return await t.write(),r}static#r(e,t,r={}){for(const[a,s]of Object.entries(r)){const r=e[a],i=t[a];let n=0;if("string"==typeof r?n=r.compare(i)*s:"number"==typeof i&&(n=(r-i)*s),0!==n)return n}return 0}static#e(e){if(!e)return;const t=/^([A-z]+)__([a-z]+)$/;for(const[r,a]of Object.entries(e)){const s=r.match(t);if(s){const[t,i,n]=s;if(delete e[r],"in"===n)e[i]=new QueryOperation(i,a,SublevelDatabase.#a)}else"Object"===getType(a)&&SublevelDatabase.#e(a)}return e}static#t(e,t){if(!t)return!0;for(const[r,a]of Object.entries(t)){const t=e[r];if(a instanceof QueryOperation){if(!a.test(t))return!1}else if("Object"===getType(a)){if(!SublevelDatabase.#t(t,a))return!1}else if(t!==a)return!1}return!0}static#a(e,t){if(!Array.isArray(t))throw new Error("You must provide an array of target values when querying field__in");return t.includes(e)}}class QueryOperation{constructor(e,t,r){Object.defineProperties(this,{key:{value:e,writable:!1},target:{value:t,writable:!1},comparator:{value:r,writable:!1}})}test(e){return this.comparator(e,this.target)}}
|
||||
Reference in New Issue
Block a user