This commit is contained in:
2025-01-04 00:34:03 +01:00
parent 41829408dc
commit 0ca14bbc19
18111 changed files with 1871397 additions and 0 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,183 @@
/**
* A mixin which decorates any container with base canvas common properties.
* @category - Mixins
* @param {typeof Container} ContainerClass The parent Container class being mixed.
* @returns {typeof CanvasGroupMixin} A ContainerClass subclass mixed with CanvasGroupMixin features.
*/
const CanvasGroupMixin = ContainerClass => {
return class CanvasGroup extends ContainerClass {
constructor(...args) {
super(...args);
this.sortableChildren = true;
this.layers = this._createLayers();
}
/**
* The name of this canvas group.
* @type {string}
* @abstract
*/
static groupName;
/**
* If this canvas group should teardown non-layers children.
* @type {boolean}
*/
static tearDownChildren = true;
/**
* The canonical name of the canvas group is the name of the constructor that is the immediate child of the
* defined base class.
* @type {string}
*/
get name() {
let cls = Object.getPrototypeOf(this.constructor);
let name = this.constructor.name;
while ( cls ) {
if ( cls !== CanvasGroup ) {
name = cls.name;
cls = Object.getPrototypeOf(cls);
}
else break;
}
return name;
}
/**
* The name used by hooks to construct their hook string.
* Note: You should override this getter if hookName should not return the class constructor name.
* @type {string}
*/
get hookName() {
return this.name;
}
/**
* A mapping of CanvasLayer classes which belong to this group.
* @type {Record<string, CanvasLayer>}
*/
layers;
/* -------------------------------------------- */
/**
* Create CanvasLayer instances which belong to the canvas group.
* @protected
*/
_createLayers() {
const layers = {};
for ( let [name, config] of Object.entries(CONFIG.Canvas.layers) ) {
if ( config.group !== this.constructor.groupName ) continue;
const layer = layers[name] = new config.layerClass();
Object.defineProperty(this, name, {value: layer, writable: false});
if ( !(name in canvas) ) Object.defineProperty(canvas, name, {value: layer, writable: false});
}
return layers;
}
/* -------------------------------------------- */
/* Rendering */
/* -------------------------------------------- */
/**
* An internal reference to a Promise in-progress to draw the canvas group.
* @type {Promise<this>}
*/
#drawing = Promise.resolve(this);
/* -------------------------------------------- */
/**
* Is the group drawn?
* @type {boolean}
*/
#drawn = false;
/* -------------------------------------------- */
/**
* Draw the canvas group and all its components.
* @param {object} [options={}]
* @returns {Promise<this>} A Promise which resolves once the group is fully drawn
*/
async draw(options={}) {
return this.#drawing = this.#drawing.finally(async () => {
console.log(`${vtt} | Drawing the ${this.hookName} canvas group`);
await this.tearDown();
await this._draw(options);
Hooks.callAll(`draw${this.hookName}`, this);
this.#drawn = true;
MouseInteractionManager.emulateMoveEvent();
});
}
/**
* Draw the canvas group and all its component layers.
* @param {object} options
* @protected
*/
async _draw(options) {
// Draw CanvasLayer instances
for ( const layer of Object.values(this.layers) ) {
this.addChild(layer);
await layer.draw();
}
}
/* -------------------------------------------- */
/* Tear-Down */
/* -------------------------------------------- */
/**
* Remove and destroy all layers from the base canvas.
* @param {object} [options={}]
* @returns {Promise<this>}
*/
async tearDown(options={}) {
if ( !this.#drawn ) return this;
this.#drawn = false;
await this._tearDown(options);
Hooks.callAll(`tearDown${this.hookName}`, this);
MouseInteractionManager.emulateMoveEvent();
return this;
}
/**
* Remove and destroy all layers from the base canvas.
* @param {object} options
* @protected
*/
async _tearDown(options) {
// Remove layers
for ( const layer of Object.values(this.layers).reverse() ) {
await layer.tearDown();
this.removeChild(layer);
}
// Check if we need to handle other children
if ( !this.constructor.tearDownChildren ) return;
// Yes? Then proceed with children cleaning
for ( const child of this.removeChildren() ) {
if ( child instanceof CachedContainer ) child.clear();
else child.destroy({children: true});
}
}
};
};
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
Object.defineProperty(globalThis, "BaseCanvasMixin", {
get() {
const msg = "BaseCanvasMixin is deprecated in favor of CanvasGroupMixin";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
return CanvasGroupMixin;
}
});

View File

@@ -0,0 +1,333 @@
/**
* A special type of PIXI.Container which draws its contents to a cached RenderTexture.
* This is accomplished by overriding the Container#render method to draw to our own special RenderTexture.
*/
class CachedContainer extends PIXI.Container {
/**
* Construct a CachedContainer.
* @param {PIXI.Sprite|SpriteMesh} [sprite] A specific sprite to bind to this CachedContainer and its renderTexture.
*/
constructor(sprite) {
super();
const renderer = canvas.app?.renderer;
/**
* The RenderTexture that is the render destination for the contents of this Container
* @type {PIXI.RenderTexture}
*/
this.#renderTexture = this.createRenderTexture();
// Bind a sprite to the container
if ( sprite ) this.sprite = sprite;
// Listen for resize events
this.#onResize = this.#resize.bind(this, renderer);
renderer.on("resize", this.#onResize);
}
/**
* The texture configuration to use for this cached container
* @type {{multisample: PIXI.MSAA_QUALITY, scaleMode: PIXI.SCALE_MODES, format: PIXI.FORMATS}}
* @abstract
*/
static textureConfiguration = {};
/**
* A bound resize function which fires on the renderer resize event.
* @type {function(PIXI.Renderer)}
* @private
*/
#onResize;
/**
* A map of render textures, linked to their render function and an optional RGBA clear color.
* @type {Map<PIXI.RenderTexture,{renderFunction: Function, clearColor: number[]}>}
* @protected
*/
_renderPaths = new Map();
/**
* An object which stores a reference to the normal renderer target and source frame.
* We track this so we can restore them after rendering our cached texture.
* @type {{sourceFrame: PIXI.Rectangle, renderTexture: PIXI.RenderTexture}}
* @private
*/
#backup = {
renderTexture: undefined,
sourceFrame: canvas.app.renderer.screen.clone()
};
/**
* An RGBA array used to define the clear color of the RenderTexture
* @type {number[]}
*/
clearColor = [0, 0, 0, 1];
/**
* Should our Container also be displayed on screen, in addition to being drawn to the cached RenderTexture?
* @type {boolean}
*/
displayed = false;
/**
* If true, the Container is rendered every frame.
* If false, the Container is rendered only if {@link CachedContainer#renderDirty} is true.
* @type {boolean}
*/
autoRender = true;
/**
* Does the Container need to be rendered?
* Set to false after the Container is rendered.
* @type {boolean}
*/
renderDirty = true;
/* ---------------------------------------- */
/**
* The primary render texture bound to this cached container.
* @type {PIXI.RenderTexture}
*/
get renderTexture() {
return this.#renderTexture;
}
/** @private */
#renderTexture;
/* ---------------------------------------- */
/**
* Set the alpha mode of the cached container render texture.
* @param {PIXI.ALPHA_MODES} mode
*/
set alphaMode(mode) {
this.#renderTexture.baseTexture.alphaMode = mode;
this.#renderTexture.baseTexture.update();
}
/* ---------------------------------------- */
/**
* A PIXI.Sprite or SpriteMesh which is bound to this CachedContainer.
* The RenderTexture from this Container is associated with the Sprite which is automatically rendered.
* @type {PIXI.Sprite|SpriteMesh}
*/
get sprite() {
return this.#sprite;
}
set sprite(sprite) {
if ( sprite instanceof PIXI.Sprite || sprite instanceof SpriteMesh ) {
sprite.texture = this.renderTexture;
this.#sprite = sprite;
}
else if ( sprite ) {
throw new Error("You may only bind a PIXI.Sprite or a SpriteMesh as the render target for a CachedContainer.");
}
}
/** @private */
#sprite;
/* ---------------------------------------- */
/**
* Create a render texture, provide a render method and an optional clear color.
* @param {object} [options={}] Optional parameters.
* @param {Function} [options.renderFunction] Render function that will be called to render into the RT.
* @param {number[]} [options.clearColor] An optional clear color to clear the RT before rendering into it.
* @returns {PIXI.RenderTexture} A reference to the created render texture.
*/
createRenderTexture({renderFunction, clearColor}={}) {
const renderOptions = {};
const renderer = canvas.app.renderer;
const conf = this.constructor.textureConfiguration;
const pm = canvas.performance.mode;
// Disabling linear filtering by default for low/medium performance mode
const defaultScaleMode = (pm > CONST.CANVAS_PERFORMANCE_MODES.MED)
? PIXI.SCALE_MODES.LINEAR
: PIXI.SCALE_MODES.NEAREST;
// Creating the render texture
const renderTexture = PIXI.RenderTexture.create({
width: renderer.screen.width,
height: renderer.screen.height,
resolution: renderer.resolution,
multisample: conf.multisample ?? renderer.multisample,
scaleMode: conf.scaleMode ?? defaultScaleMode,
format: conf.format ?? PIXI.FORMATS.RGBA
});
renderOptions.renderFunction = renderFunction; // Binding the render function
renderOptions.clearColor = clearColor; // Saving the optional clear color
this._renderPaths.set(renderTexture, renderOptions); // Push into the render paths
this.renderDirty = true;
// Return a reference to the render texture
return renderTexture;
}
/* ---------------------------------------- */
/**
* Remove a previously created render texture.
* @param {PIXI.RenderTexture} renderTexture The render texture to remove.
* @param {boolean} [destroy=true] Should the render texture be destroyed?
*/
removeRenderTexture(renderTexture, destroy=true) {
this._renderPaths.delete(renderTexture);
if ( destroy ) renderTexture?.destroy(true);
this.renderDirty = true;
}
/* ---------------------------------------- */
/**
* Clear the cached container, removing its current contents.
* @param {boolean} [destroy=true] Tell children that we should destroy texture as well.
* @returns {CachedContainer} A reference to the cleared container for chaining.
*/
clear(destroy=true) {
Canvas.clearContainer(this, destroy);
return this;
}
/* ---------------------------------------- */
/** @inheritdoc */
destroy(options) {
if ( this.#onResize ) canvas.app.renderer.off("resize", this.#onResize);
for ( const [rt] of this._renderPaths ) rt?.destroy(true);
this._renderPaths.clear();
super.destroy(options);
}
/* ---------------------------------------- */
/** @inheritdoc */
render(renderer) {
if ( !this.renderable ) return; // Skip updating the cached texture
if ( this.autoRender || this.renderDirty ) {
this.renderDirty = false;
this.#bindPrimaryBuffer(renderer); // Bind the primary buffer (RT)
super.render(renderer); // Draw into the primary buffer
this.#renderSecondary(renderer); // Draw into the secondary buffer(s)
this.#bindOriginalBuffer(renderer); // Restore the original buffer
}
this.#sprite?.render(renderer); // Render the bound sprite
if ( this.displayed ) super.render(renderer); // Optionally draw to the screen
}
/* ---------------------------------------- */
/**
* Custom rendering for secondary render textures
* @param {PIXI.Renderer} renderer The active canvas renderer.
* @protected
*/
#renderSecondary(renderer) {
if ( this._renderPaths.size <= 1 ) return;
// Bind the render texture and call the custom render method for each render path
for ( const [rt, ro] of this._renderPaths ) {
if ( !ro.renderFunction ) continue;
this.#bind(renderer, rt, ro.clearColor);
ro.renderFunction.call(this, renderer);
}
}
/* ---------------------------------------- */
/**
* Bind the primary render texture to the renderer, replacing and saving the original buffer and source frame.
* @param {PIXI.Renderer} renderer The active canvas renderer.
* @private
*/
#bindPrimaryBuffer(renderer) {
// Get the RenderTexture to bind
const tex = this.renderTexture;
const rt = renderer.renderTexture;
// Backup the current render target
this.#backup.renderTexture = rt.current;
this.#backup.sourceFrame.copyFrom(rt.sourceFrame);
// Bind the render texture
this.#bind(renderer, tex);
}
/* ---------------------------------------- */
/**
* Bind a render texture to this renderer.
* Must be called after bindPrimaryBuffer and before bindInitialBuffer.
* @param {PIXI.Renderer} renderer The active canvas renderer.
* @param {PIXI.RenderTexture} tex The texture to bind.
* @param {number[]} [clearColor] A custom clear color.
* @protected
*/
#bind(renderer, tex, clearColor) {
const rt = renderer.renderTexture;
// Bind our texture to the renderer
renderer.batch.flush();
rt.bind(tex, undefined, undefined);
rt.clear(clearColor ?? this.clearColor);
// Enable Filters which are applied to this Container to apply to our cached RenderTexture
const fs = renderer.filter.defaultFilterStack;
if ( fs.length > 1 ) {
fs[fs.length - 1].renderTexture = tex;
}
}
/* ---------------------------------------- */
/**
* Remove the render texture from the Renderer, re-binding the original buffer.
* @param {PIXI.Renderer} renderer The active canvas renderer.
* @private
*/
#bindOriginalBuffer(renderer) {
renderer.batch.flush();
// Restore Filters to apply to the original RenderTexture
const fs = renderer.filter.defaultFilterStack;
if ( fs.length > 1 ) {
fs[fs.length - 1].renderTexture = this.#backup.renderTexture;
}
// Re-bind the original RenderTexture to the renderer
renderer.renderTexture.bind(this.#backup.renderTexture, this.#backup.sourceFrame, undefined);
this.#backup.renderTexture = undefined;
}
/* ---------------------------------------- */
/**
* Resize bound render texture(s) when the dimensions or resolution of the Renderer have changed.
* @param {PIXI.Renderer} renderer The active canvas renderer.
* @private
*/
#resize(renderer) {
for ( const [rt] of this._renderPaths ) CachedContainer.resizeRenderTexture(renderer, rt);
if ( this.#sprite ) this.#sprite._boundsID++; // Inform PIXI that bounds need to be recomputed for this sprite mesh
this.renderDirty = true;
}
/* ---------------------------------------- */
/**
* Resize a render texture passed as a parameter with the renderer.
* @param {PIXI.Renderer} renderer The active canvas renderer.
* @param {PIXI.RenderTexture} rt The render texture to resize.
*/
static resizeRenderTexture(renderer, rt) {
const screen = renderer?.screen;
if ( !rt || !screen ) return;
if ( rt.baseTexture.resolution !== renderer.resolution ) rt.baseTexture.resolution = renderer.resolution;
if ( (rt.width !== screen.width) || (rt.height !== screen.height) ) rt.resize(screen.width, screen.height);
}
}

View File

@@ -0,0 +1,31 @@
/**
* Augment any PIXI.DisplayObject to assume bounds that are always aligned with the full visible screen.
* The bounds of this container do not depend on its children but always fill the entire canvas.
* @param {typeof PIXI.DisplayObject} Base Any PIXI DisplayObject subclass
* @returns {typeof FullCanvasObject} The decorated subclass with full canvas bounds
*/
function FullCanvasObjectMixin(Base) {
return class FullCanvasObject extends Base {
/** @override */
calculateBounds() {
const bounds = this._bounds;
const { x, y, width, height } = canvas.dimensions.rect;
bounds.clear();
bounds.addFrame(this.transform, x, y, x + width, y + height);
bounds.updateID = this._boundsID;
}
};
}
/**
* @deprecated since v11
* @ignore
*/
class FullCanvasContainer extends FullCanvasObjectMixin(PIXI.Container) {
constructor(...args) {
super(...args);
const msg = "You are using the FullCanvasContainer class which has been deprecated in favor of a more flexible "
+ "FullCanvasObjectMixin which can augment any PIXI.DisplayObject subclass.";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
}
}

View File

@@ -0,0 +1,118 @@
/**
* Extension of a PIXI.Mesh, with the capabilities to provide a snapshot of the framebuffer.
* @extends PIXI.Mesh
*/
class PointSourceMesh extends PIXI.Mesh {
/**
* To store the previous blend mode of the last renderer PointSourceMesh.
* @type {PIXI.BLEND_MODES}
* @protected
*/
static _priorBlendMode;
/**
* The current texture used by the mesh.
* @type {PIXI.Texture}
* @protected
*/
static _currentTexture;
/**
* The transform world ID of the bounds.
* @type {number}
*/
_worldID = -1;
/**
* The geometry update ID of the bounds.
* @type {number}
*/
_updateID = -1;
/* -------------------------------------------- */
/* PointSourceMesh Properties */
/* -------------------------------------------- */
/** @override */
get geometry() {
return super.geometry;
}
/** @override */
set geometry(value) {
if ( this._geometry !== value ) this._updateID = -1;
super.geometry = value;
}
/* -------------------------------------------- */
/* PointSourceMesh Methods */
/* -------------------------------------------- */
/** @override */
addChild() {
throw new Error("You can't add children to a PointSourceMesh.");
}
/* ---------------------------------------- */
/** @override */
addChildAt() {
throw new Error("You can't add children to a PointSourceMesh.");
}
/* ---------------------------------------- */
/** @override */
_render(renderer) {
if ( this.uniforms.framebufferTexture !== undefined ) {
if ( canvas.blur.enabled ) {
// We need to use the snapshot only if blend mode is changing
const requireUpdate = (this.state.blendMode !== PointSourceMesh._priorBlendMode)
&& (PointSourceMesh._priorBlendMode !== undefined);
if ( requireUpdate ) PointSourceMesh._currentTexture = canvas.snapshot.getFramebufferTexture(renderer);
PointSourceMesh._priorBlendMode = this.state.blendMode;
}
this.uniforms.framebufferTexture = PointSourceMesh._currentTexture;
}
super._render(renderer);
}
/* ---------------------------------------- */
/** @override */
calculateBounds() {
const {transform, geometry} = this;
// Checking bounds id to update only when it is necessary
if ( this._worldID !== transform._worldID
|| this._updateID !== geometry.buffers[0]._updateID ) {
this._worldID = transform._worldID;
this._updateID = geometry.buffers[0]._updateID;
const {x, y, width, height} = this.geometry.bounds;
this._bounds.clear();
this._bounds.addFrame(transform, x, y, x + width, y + height);
}
this._bounds.updateID = this._boundsID;
}
/* ---------------------------------------- */
/** @override */
_calculateBounds() {
this.calculateBounds();
}
/* ---------------------------------------- */
/**
* The local bounds need to be drawn from the underlying geometry.
* @override
*/
getLocalBounds(rect) {
rect ??= this._localBoundsRect ??= new PIXI.Rectangle();
return this.geometry.bounds.copyTo(rect);
}
}

View File

@@ -0,0 +1,125 @@
/**
* A basic rectangular mesh with a shader only. Does not natively handle textures (but a bound shader can).
* Bounds calculations are simplified and the geometry does not need to handle texture coords.
* @param {AbstractBaseShader} shaderClass The shader class to use.
*/
class QuadMesh extends PIXI.Container {
constructor(shaderClass) {
super();
// Assign shader, state and properties
if ( !AbstractBaseShader.isPrototypeOf(shaderClass) ) {
throw new Error("QuadMesh shader class must inherit from AbstractBaseShader.");
}
this.#shader = shaderClass.create();
}
/**
* Geometry bound to this QuadMesh.
* @type {PIXI.Geometry}
*/
#geometry = new PIXI.Geometry()
.addAttribute("aVertexPosition", [0, 0, 1, 0, 1, 1, 0, 1], 2)
.addIndex([0, 1, 2, 0, 2, 3]);
/* ---------------------------------------- */
/**
* The shader bound to this mesh.
* @type {AbstractBaseShader}
*/
get shader() {
return this.#shader;
}
/**
* @type {AbstractBaseShader}
*/
#shader;
/* ---------------------------------------- */
/**
* Assigned blend mode to this mesh.
* @type {PIXI.BLEND_MODES}
*/
get blendMode() {
return this.#state.blendMode;
}
set blendMode(value) {
this.#state.blendMode = value;
}
/**
* State bound to this QuadMesh.
* @type {PIXI.State}
*/
#state = PIXI.State.for2d();
/* ---------------------------------------- */
/**
* Initialize shader based on the shader class type.
* @param {class} shaderClass Shader class used. Must inherit from AbstractBaseShader.
*/
setShaderClass(shaderClass) {
// Escape conditions
if ( !AbstractBaseShader.isPrototypeOf(shaderClass) ) {
throw new Error("QuadMesh shader class must inherit from AbstractBaseShader.");
}
if ( this.#shader.constructor === shaderClass ) return;
// Create shader program
this.#shader = shaderClass.create();
}
/* ---------------------------------------- */
/** @override */
_render(renderer) {
this.#shader._preRender(this, renderer);
this.#shader.uniforms.translationMatrix = this.transform.worldTransform.toArray(true);
// Flush batch renderer
renderer.batch.flush();
// Set state
renderer.state.set(this.#state);
// Bind shader and geometry
renderer.shader.bind(this.#shader);
renderer.geometry.bind(this.#geometry, this.#shader);
// Draw the geometry
renderer.geometry.draw(PIXI.DRAW_MODES.TRIANGLES);
}
/* ---------------------------------------- */
/** @override */
_calculateBounds() {
this._bounds.addFrame(this.transform, 0, 0, 1, 1);
}
/* ---------------------------------------- */
/**
* Tests if a point is inside this QuadMesh.
* @param {PIXI.IPointData} point
* @returns {boolean}
*/
containsPoint(point) {
return this.getBounds().contains(point.x, point.y);
}
/* ---------------------------------------- */
/** @override */
destroy(options) {
super.destroy(options);
this.#geometry.dispose();
this.#geometry = null;
this.#shader = null;
this.#state = null;
}
}

View File

@@ -0,0 +1,319 @@
/**
* @typedef {object} QuadtreeObject
* @property {Rectangle} r
* @property {*} t
* @property {Set<Quadtree>} [n]
*/
/**
* A Quadtree implementation that supports collision detection for rectangles.
*
* @param {Rectangle} bounds The outer bounds of the region
* @param {object} [options] Additional options which configure the Quadtree
* @param {number} [options.maxObjects=20] The maximum number of objects per node
* @param {number} [options.maxDepth=4] The maximum number of levels within the root Quadtree
* @param {number} [options._depth=0] The depth level of the sub-tree. For internal use
* @param {number} [options._root] The root of the quadtree. For internal use
*/
class Quadtree {
constructor(bounds, {maxObjects=20, maxDepth=4, _depth=0, _root}={}) {
/**
* The bounding rectangle of the region
* @type {PIXI.Rectangle}
*/
this.bounds = new PIXI.Rectangle(bounds.x, bounds.y, bounds.width, bounds.height);
/**
* The maximum number of objects allowed within this node before it must split
* @type {number}
*/
this.maxObjects = maxObjects;
/**
* The maximum number of levels that the base quadtree is allowed
* @type {number}
*/
this.maxDepth = maxDepth;
/**
* The depth of this node within the root Quadtree
* @type {number}
*/
this.depth = _depth;
/**
* The objects contained at this level of the tree
* @type {QuadtreeObject[]}
*/
this.objects = [];
/**
* Children of this node
* @type {Quadtree[]}
*/
this.nodes = [];
/**
* The root Quadtree
* @type {Quadtree}
*/
this.root = _root || this;
}
/**
* A constant that enumerates the index order of the quadtree nodes from top-left to bottom-right.
* @enum {number}
*/
static INDICES = {tl: 0, tr: 1, bl: 2, br: 3};
/* -------------------------------------------- */
/**
* Return an array of all the objects in the Quadtree (recursive)
* @returns {QuadtreeObject[]}
*/
get all() {
if ( this.nodes.length ) {
return this.nodes.reduce((arr, n) => arr.concat(n.all), []);
}
return this.objects;
}
/* -------------------------------------------- */
/* Tree Management */
/* -------------------------------------------- */
/**
* Split this node into 4 sub-nodes.
* @returns {Quadtree} The split Quadtree
*/
split() {
const b = this.bounds;
const w = b.width / 2;
const h = b.height / 2;
const options = {
maxObjects: this.maxObjects,
maxDepth: this.maxDepth,
_depth: this.depth + 1,
_root: this.root
};
// Create child quadrants
this.nodes[Quadtree.INDICES.tl] = new Quadtree(new PIXI.Rectangle(b.x, b.y, w, h), options);
this.nodes[Quadtree.INDICES.tr] = new Quadtree(new PIXI.Rectangle(b.x+w, b.y, w, h), options);
this.nodes[Quadtree.INDICES.bl] = new Quadtree(new PIXI.Rectangle(b.x, b.y+h, w, h), options);
this.nodes[Quadtree.INDICES.br] = new Quadtree(new PIXI.Rectangle(b.x+w, b.y+h, w, h), options);
// Assign current objects to child nodes
for ( let o of this.objects ) {
o.n.delete(this);
this.insert(o);
}
this.objects = [];
return this;
}
/* -------------------------------------------- */
/* Object Management */
/* -------------------------------------------- */
/**
* Clear the quadtree of all existing contents
* @returns {Quadtree} The cleared Quadtree
*/
clear() {
this.objects = [];
for ( let n of this.nodes ) {
n.clear();
}
this.nodes = [];
return this;
}
/* -------------------------------------------- */
/**
* Add a rectangle object to the tree
* @param {QuadtreeObject} obj The object being inserted
* @returns {Quadtree[]} The Quadtree nodes the object was added to.
*/
insert(obj) {
obj.n = obj.n || new Set();
// If we will exceeded the maximum objects we need to split
if ( (this.objects.length === this.maxObjects - 1) && (this.depth < this.maxDepth) ) {
if ( !this.nodes.length ) this.split();
}
// If this node has children, recursively insert
if ( this.nodes.length ) {
let nodes = this.getChildNodes(obj.r);
return nodes.reduce((arr, n) => arr.concat(n.insert(obj)), []);
}
// Otherwise store the object here
obj.n.add(this);
this.objects.push(obj);
return [this];
}
/* -------------------------------------------- */
/**
* Remove an object from the quadtree
* @param {*} target The quadtree target being removed
* @returns {Quadtree} The Quadtree for method chaining
*/
remove(target) {
this.objects.findSplice(o => o.t === target);
for ( let n of this.nodes ) {
n.remove(target);
}
return this;
}
/* -------------------------------------------- */
/**
* Remove an existing object from the quadtree and re-insert it with a new position
* @param {QuadtreeObject} obj The object being inserted
* @returns {Quadtree[]} The Quadtree nodes the object was added to
*/
update(obj) {
this.remove(obj.t);
return this.insert(obj);
}
/* -------------------------------------------- */
/* Target Identification */
/* -------------------------------------------- */
/**
* Get all the objects which could collide with the provided rectangle
* @param {Rectangle} rect The normalized target rectangle
* @param {object} [options] Options affecting the collision test.
* @param {Function} [options.collisionTest] Function to further refine objects to return
* after a potential collision is found. Parameters are the object and rect, and the
* function should return true if the object should be added to the result set.
* @param {Set} [options._s] The existing result set, for internal use.
* @returns {Set} The objects in the Quadtree which represent potential collisions
*/
getObjects(rect, { collisionTest, _s } = {}) {
const objects = _s || new Set();
// Recursively retrieve objects from child nodes
if ( this.nodes.length ) {
const nodes = this.getChildNodes(rect);
for ( let n of nodes ) {
n.getObjects(rect, {collisionTest, _s: objects});
}
}
// Otherwise, retrieve from this node
else {
for ( let o of this.objects) {
if ( rect.overlaps(o.r) && (!collisionTest || collisionTest(o, rect)) ) objects.add(o.t);
}
}
// Return the result set
return objects;
}
/* -------------------------------------------- */
/**
* Obtain the leaf nodes to which a target rectangle belongs.
* This traverses the quadtree recursively obtaining the final nodes which have no children.
* @param {Rectangle} rect The target rectangle.
* @returns {Quadtree[]} The Quadtree nodes to which the target rectangle belongs
*/
getLeafNodes(rect) {
if ( !this.nodes.length ) return [this];
const nodes = this.getChildNodes(rect);
return nodes.reduce((arr, n) => arr.concat(n.getLeafNodes(rect)), []);
}
/* -------------------------------------------- */
/**
* Obtain the child nodes within the current node which a rectangle belongs to.
* Note that this function is not recursive, it only returns nodes at the current or child level.
* @param {Rectangle} rect The target rectangle.
* @returns {Quadtree[]} The Quadtree nodes to which the target rectangle belongs
*/
getChildNodes(rect) {
// If this node has no children, use it
if ( !this.nodes.length ) return [this];
// Prepare data
const nodes = [];
const hx = this.bounds.x + (this.bounds.width / 2);
const hy = this.bounds.y + (this.bounds.height / 2);
// Determine orientation relative to the node
const startTop = rect.y <= hy;
const startLeft = rect.x <= hx;
const endBottom = (rect.y + rect.height) > hy;
const endRight = (rect.x + rect.width) > hx;
// Top-left
if ( startLeft && startTop ) nodes.push(this.nodes[Quadtree.INDICES.tl]);
// Top-right
if ( endRight && startTop ) nodes.push(this.nodes[Quadtree.INDICES.tr]);
// Bottom-left
if ( startLeft && endBottom ) nodes.push(this.nodes[Quadtree.INDICES.bl]);
// Bottom-right
if ( endRight && endBottom ) nodes.push(this.nodes[Quadtree.INDICES.br]);
return nodes;
}
/* -------------------------------------------- */
/**
* Identify all nodes which are adjacent to this one within the parent Quadtree.
* @returns {Quadtree[]}
*/
getAdjacentNodes() {
const bounds = this.bounds.clone().pad(1);
return this.root.getLeafNodes(bounds);
}
/* -------------------------------------------- */
/**
* Visualize the nodes and objects in the quadtree
* @param {boolean} [objects] Visualize the rectangular bounds of objects in the Quadtree. Default is false.
* @private
*/
visualize({objects=false}={}) {
const debug = canvas.controls.debug;
if ( this.depth === 0 ) debug.clear().endFill();
debug.lineStyle(2, 0x00FF00, 0.5).drawRect(this.bounds.x, this.bounds.y, this.bounds.width, this.bounds.height);
if ( objects ) {
for ( let o of this.objects ) {
debug.lineStyle(2, 0xFF0000, 0.5).drawRect(o.r.x, o.r.y, Math.max(o.r.width, 1), Math.max(o.r.height, 1));
}
}
for ( let n of this.nodes ) {
n.visualize({objects});
}
}
}
/* -------------------------------------------- */
/**
* A subclass of Quadtree specifically intended for classifying the location of objects on the game canvas.
*/
class CanvasQuadtree extends Quadtree {
constructor(options={}) {
super({}, options);
Object.defineProperty(this, "bounds", {get: () => canvas.dimensions.rect});
}
}

View File

@@ -0,0 +1,820 @@
/**
* An extension of PIXI.Mesh which emulate a PIXI.Sprite with a specific shader.
* @param {PIXI.Texture} [texture=PIXI.Texture.EMPTY] Texture bound to this sprite mesh.
* @param {typeof BaseSamplerShader} [shaderClass=BaseSamplerShader] Shader class used by this sprite mesh.
*/
class SpriteMesh extends PIXI.Container {
constructor(texture, shaderClass=BaseSamplerShader) {
super();
// Create shader program
if ( !foundry.utils.isSubclass(shaderClass, BaseSamplerShader) ) {
throw new Error("SpriteMesh shader class must be a subclass of BaseSamplerShader.");
}
this._shader = shaderClass.create();
// Initialize other data to emulate sprite
this.vertexData = this.#geometry.buffers[0].data;
this.uvs = this.#geometry.buffers[1].data;
this.indices = this.#geometry.indexBuffer.data;
this._texture = null;
this._anchor = new PIXI.ObservablePoint(
this._onAnchorUpdate,
this,
(texture ? texture.defaultAnchor.x : 0),
(texture ? texture.defaultAnchor.y : 0)
);
this.texture = texture;
this.isSprite = true;
// Assigning some batch data that will not change during the life of this sprite mesh
this._batchData.vertexData = this.vertexData;
this._batchData.indices = this.indices;
this._batchData.uvs = this.uvs;
this._batchData.object = this;
}
/**
* A temporary reusable rect.
* @type {PIXI.Rectangle}
*/
static #TEMP_RECT = new PIXI.Rectangle();
/**
* A temporary reusable point.
* @type {PIXI.Point}
*/
static #TEMP_POINT = new PIXI.Point();
/**
* Geometry bound to this SpriteMesh.
* @type {PIXI.Geometry}
*/
#geometry = new PIXI.Geometry()
.addAttribute("aVertexPosition", new PIXI.Buffer(new Float32Array(8), false), 2)
.addAttribute("aTextureCoord", new PIXI.Buffer(new Float32Array(8), true), 2)
.addIndex([0, 1, 2, 0, 2, 3]);
/**
* Snapshot of some parameters of this display object to render in batched mode.
* @type {{_tintRGB: number, _texture: PIXI.Texture, indices: number[],
* uvs: number[], blendMode: PIXI.BLEND_MODES, vertexData: number[], worldAlpha: number}}
* @protected
*/
_batchData = {
_texture: undefined,
vertexData: undefined,
indices: undefined,
uvs: undefined,
worldAlpha: undefined,
_tintRGB: undefined,
blendMode: undefined,
object: undefined
};
/**
* The indices of the geometry.
* @type {Uint16Array}
*/
indices;
/**
* The width of the sprite (this is initially set by the texture).
* @type {number}
* @protected
*/
_width = 0;
/**
* The height of the sprite (this is initially set by the texture)
* @type {number}
* @protected
*/
_height = 0;
/**
* The texture that the sprite is using.
* @type {PIXI.Texture}
* @protected
*/
_texture;
/**
* The texture ID.
* @type {number}
* @protected
*/
_textureID = -1;
/**
* Cached tint value so we can tell when the tint is changed.
* @type {[red: number, green: number, blue: number, alpha: number]}
* @protected
* @internal
*/
_cachedTint = [1, 1, 1, 1];
/**
* The texture trimmed ID.
* @type {number}
* @protected
*/
_textureTrimmedID = -1;
/**
* This is used to store the uvs data of the sprite, assigned at the same time
* as the vertexData in calculateVertices().
* @type {Float32Array}
* @protected
*/
uvs;
/**
* The anchor point defines the normalized coordinates
* in the texture that map to the position of this
* sprite.
*
* By default, this is `(0,0)` (or `texture.defaultAnchor`
* if you have modified that), which means the position
* `(x,y)` of this `Sprite` will be the top-left corner.
*
* Note: Updating `texture.defaultAnchor` after
* constructing a `Sprite` does _not_ update its anchor.
*
* {@link https://docs.cocos2d-x.org/cocos2d-x/en/sprites/manipulation.html}
* @type {PIXI.ObservablePoint}
* @protected
*/
_anchor;
/**
* This is used to store the vertex data of the sprite (basically a quad).
* @type {Float32Array}
* @protected
*/
vertexData;
/**
* This is used to calculate the bounds of the object IF it is a trimmed sprite.
* @type {Float32Array|null}
* @protected
*/
vertexTrimmedData = null;
/**
* The transform ID.
* @type {number}
* @private
*/
_transformID = -1;
/**
* The transform ID.
* @type {number}
* @private
*/
_transformTrimmedID = -1;
/**
* The tint applied to the sprite. This is a hex value. A value of 0xFFFFFF will remove any tint effect.
* @type {PIXI.Color}
* @protected
*/
_tintColor = new PIXI.Color(0xFFFFFF);
/**
* The tint applied to the sprite. This is a RGB value. A value of 0xFFFFFF will remove any tint effect.
* @type {number}
* @protected
*/
_tintRGB = 0xFFFFFF;
/**
* An instance of a texture uvs used for padded SpriteMesh.
* Instanced only when padding becomes non-zero.
* @type {PIXI.TextureUvs|null}
* @protected
*/
_textureUvs = null;
/**
* Used to track a tint or alpha change to execute a recomputation of _cachedTint.
* @type {boolean}
* @protected
*/
_tintAlphaDirty = true;
/**
* The PIXI.State of this SpriteMesh.
* @type {PIXI.State}
*/
#state = PIXI.State.for2d();
/* ---------------------------------------- */
/**
* The shader bound to this mesh.
* @type {BaseSamplerShader}
*/
get shader() {
return this._shader;
}
/**
* The shader bound to this mesh.
* @type {BaseSamplerShader}
* @protected
*/
_shader;
/* ---------------------------------------- */
/**
* The x padding in pixels (must be a non-negative value.)
* @type {number}
*/
get paddingX() {
return this._paddingX;
}
set paddingX(value) {
if ( value < 0 ) throw new Error("The padding must be a non-negative value.");
if ( this._paddingX === value ) return;
this._paddingX = value;
this._textureID = -1;
this._textureTrimmedID = -1;
this._textureUvs ??= new PIXI.TextureUvs();
}
/**
* They y padding in pixels (must be a non-negative value.)
* @type {number}
*/
get paddingY() {
return this._paddingY;
}
set paddingY(value) {
if ( value < 0 ) throw new Error("The padding must be a non-negative value.");
if ( this._paddingY === value ) return;
this._paddingY = value;
this._textureID = -1;
this._textureTrimmedID = -1;
this._textureUvs ??= new PIXI.TextureUvs();
}
/**
* The maximum x/y padding in pixels (must be a non-negative value.)
* @type {number}
*/
get padding() {
return Math.max(this._paddingX, this._paddingY);
}
set padding(value) {
if ( value < 0 ) throw new Error("The padding must be a non-negative value.");
this.paddingX = this.paddingY = value;
}
/**
* @type {number}
* @protected
*/
_paddingX = 0;
/**
* @type {number}
* @protected
*/
_paddingY = 0;
/* ---------------------------------------- */
/**
* The blend mode applied to the SpriteMesh.
* @type {PIXI.BLEND_MODES}
* @defaultValue PIXI.BLEND_MODES.NORMAL
*/
set blendMode(value) {
this.#state.blendMode = value;
}
get blendMode() {
return this.#state.blendMode;
}
/* ---------------------------------------- */
/**
* If true PixiJS will Math.round() x/y values when rendering, stopping pixel interpolation.
* Advantages can include sharper image quality (like text) and faster rendering on canvas.
* The main disadvantage is movement of objects may appear less smooth.
* To set the global default, change PIXI.settings.ROUND_PIXELS
* @defaultValue PIXI.settings.ROUND_PIXELS
*/
set roundPixels(value) {
if ( this.#roundPixels !== value ) this._transformID = -1;
this.#roundPixels = value;
}
get roundPixels() {
return this.#roundPixels;
}
#roundPixels = PIXI.settings.ROUND_PIXELS;
/* ---------------------------------------- */
/**
* Used to force an alpha mode on this sprite mesh.
* If this property is non null, this value will replace the texture alphaMode when computing color channels.
* Affects how tint, worldAlpha and alpha are computed each others.
* @type {PIXI.ALPHA_MODES}
*/
get alphaMode() {
return this.#alphaMode ?? this._texture.baseTexture.alphaMode;
}
set alphaMode(mode) {
if ( this.#alphaMode === mode ) return;
this.#alphaMode = mode;
this._tintAlphaDirty = true;
}
#alphaMode = null;
/* ---------------------------------------- */
/**
* Returns the SpriteMesh associated batch plugin. By default the returned plugin is that of the associated shader.
* If a plugin is forced, it will returns the forced plugin.
* @type {string}
*/
get pluginName() {
return this.#pluginName ?? this._shader.pluginName;
}
set pluginName(name) {
this.#pluginName = name;
}
#pluginName = null;
/* ---------------------------------------- */
/** @override */
get width() {
return Math.abs(this.scale.x) * this._texture.orig.width;
}
set width(width) {
const s = Math.sign(this.scale.x) || 1;
this.scale.x = s * width / this._texture.orig.width;
this._width = width;
}
/* ---------------------------------------- */
/** @override */
get height() {
return Math.abs(this.scale.y) * this._texture.orig.height;
}
set height(height) {
const s = Math.sign(this.scale.y) || 1;
this.scale.y = s * height / this._texture.orig.height;
this._height = height;
}
/* ---------------------------------------- */
/**
* The texture that the sprite is using.
* @type {PIXI.Texture}
*/
get texture() {
return this._texture;
}
set texture(texture) {
texture = texture ?? null;
if ( this._texture === texture ) return;
if ( this._texture ) this._texture.off("update", this._onTextureUpdate, this);
this._texture = texture || PIXI.Texture.EMPTY;
this._textureID = this._textureTrimmedID = -1;
this._tintAlphaDirty = true;
if ( texture ) {
if ( this._texture.baseTexture.valid ) this._onTextureUpdate();
else this._texture.once("update", this._onTextureUpdate, this);
}
}
/* ---------------------------------------- */
/**
* The anchor sets the origin point of the sprite. The default value is taken from the {@link PIXI.Texture|Texture}
* and passed to the constructor.
*
* The default is `(0,0)`, this means the sprite's origin is the top left.
*
* Setting the anchor to `(0.5,0.5)` means the sprite's origin is centered.
*
* Setting the anchor to `(1,1)` would mean the sprite's origin point will be the bottom right corner.
*
* If you pass only single parameter, it will set both x and y to the same value as shown in the example below.
* @type {PIXI.ObservablePoint}
*/
get anchor() {
return this._anchor;
}
set anchor(anchor) {
this._anchor.copyFrom(anchor);
}
/* ---------------------------------------- */
/**
* The tint applied to the sprite. This is a hex value.
*
* A value of 0xFFFFFF will remove any tint effect.
* @type {number}
* @defaultValue 0xFFFFFF
*/
get tint() {
return this._tintColor.value;
}
set tint(tint) {
this._tintColor.setValue(tint);
const tintRGB = this._tintColor.toLittleEndianNumber();
if ( tintRGB === this._tintRGB ) return;
this._tintRGB = tintRGB;
this._tintAlphaDirty = true;
}
/* ---------------------------------------- */
/**
* The HTML source element for this SpriteMesh texture.
* @type {HTMLImageElement|HTMLVideoElement|null}
*/
get sourceElement() {
if ( !this.texture.valid ) return null;
return this.texture?.baseTexture.resource?.source || null;
}
/* ---------------------------------------- */
/**
* Is this SpriteMesh rendering a video texture?
* @type {boolean}
*/
get isVideo() {
const source = this.sourceElement;
return source?.tagName === "VIDEO";
}
/* ---------------------------------------- */
/**
* When the texture is updated, this event will fire to update the scale and frame.
* @protected
*/
_onTextureUpdate() {
this._textureID = this._textureTrimmedID = this._transformID = this._transformTrimmedID = -1;
if ( this._width ) this.scale.x = Math.sign(this.scale.x) * this._width / this._texture.orig.width;
if ( this._height ) this.scale.y = Math.sign(this.scale.y) * this._height / this._texture.orig.height;
// Alpha mode of the texture could have changed
this._tintAlphaDirty = true;
this.updateUvs();
}
/* ---------------------------------------- */
/**
* Called when the anchor position updates.
* @protected
*/
_onAnchorUpdate() {
this._textureID = this._textureTrimmedID = this._transformID = this._transformTrimmedID = -1;
}
/* ---------------------------------------- */
/**
* Update uvs and push vertices and uv buffers on GPU if necessary.
*/
updateUvs() {
if ( this._textureID !== this._texture._updateID ) {
let textureUvs;
if ( (this._paddingX !== 0) || (this._paddingY !== 0) ) {
const texture = this._texture;
const frame = SpriteMesh.#TEMP_RECT.copyFrom(texture.frame).pad(this._paddingX, this._paddingY);
textureUvs = this._textureUvs;
textureUvs.set(frame, texture.baseTexture, texture.rotate);
} else {
textureUvs = this._texture._uvs;
}
this.uvs.set(textureUvs.uvsFloat32);
this.#geometry.buffers[1].update();
}
}
/* ---------------------------------------- */
/**
* Initialize shader based on the shader class type.
* @param {typeof BaseSamplerShader} shaderClass The shader class
*/
setShaderClass(shaderClass) {
if ( !foundry.utils.isSubclass(shaderClass, BaseSamplerShader) ) {
throw new Error("SpriteMesh shader class must inherit from BaseSamplerShader.");
}
if ( this._shader.constructor === shaderClass ) return;
this._shader = shaderClass.create();
}
/* ---------------------------------------- */
/** @override */
updateTransform() {
super.updateTransform();
// We set tintAlphaDirty to true if the worldAlpha has changed
// It is needed to recompute the _cachedTint vec4 which is a combination of tint and alpha
if ( this.#worldAlpha !== this.worldAlpha ) {
this.#worldAlpha = this.worldAlpha;
this._tintAlphaDirty = true;
}
}
#worldAlpha;
/* ---------------------------------------- */
/**
* Calculates worldTransform * vertices, store it in vertexData.
*/
calculateVertices() {
if ( this._transformID === this.transform._worldID && this._textureID === this._texture._updateID ) return;
// Update uvs if necessary
this.updateUvs();
this._transformID = this.transform._worldID;
this._textureID = this._texture._updateID;
// Set the vertex data
const {a, b, c, d, tx, ty} = this.transform.worldTransform;
const orig = this._texture.orig;
const trim = this._texture.trim;
const padX = this._paddingX;
const padY = this._paddingY;
let w1; let w0; let h1; let h0;
if ( trim ) {
// If the sprite is trimmed and is not a tilingsprite then we need to add the extra
// space before transforming the sprite coords
w1 = trim.x - (this._anchor._x * orig.width) - padX;
w0 = w1 + trim.width + (2 * padX);
h1 = trim.y - (this._anchor._y * orig.height) - padY;
h0 = h1 + trim.height + (2 * padY);
}
else {
w1 = (-this._anchor._x * orig.width) - padX;
w0 = w1 + orig.width + (2 * padX);
h1 = (-this._anchor._y * orig.height) - padY;
h0 = h1 + orig.height + (2 * padY);
}
const vertexData = this.vertexData;
vertexData[0] = (a * w1) + (c * h1) + tx;
vertexData[1] = (d * h1) + (b * w1) + ty;
vertexData[2] = (a * w0) + (c * h1) + tx;
vertexData[3] = (d * h1) + (b * w0) + ty;
vertexData[4] = (a * w0) + (c * h0) + tx;
vertexData[5] = (d * h0) + (b * w0) + ty;
vertexData[6] = (a * w1) + (c * h0) + tx;
vertexData[7] = (d * h0) + (b * w1) + ty;
if ( this.roundPixels ) {
const r = PIXI.settings.RESOLUTION;
for ( let i = 0; i < vertexData.length; ++i ) vertexData[i] = Math.round(vertexData[i] * r) / r;
}
this.#geometry.buffers[0].update();
}
/* ---------------------------------------- */
/**
* Calculates worldTransform * vertices for a non texture with a trim. store it in vertexTrimmedData.
*
* This is used to ensure that the true width and height of a trimmed texture is respected.
*/
calculateTrimmedVertices() {
if ( !this.vertexTrimmedData ) this.vertexTrimmedData = new Float32Array(8);
else if ( (this._transformTrimmedID === this.transform._worldID)
&& (this._textureTrimmedID === this._texture._updateID) ) return;
this._transformTrimmedID = this.transform._worldID;
this._textureTrimmedID = this._texture._updateID;
const texture = this._texture;
const vertexData = this.vertexTrimmedData;
const orig = texture.orig;
const anchor = this._anchor;
const padX = this._paddingX;
const padY = this._paddingY;
// Compute the new untrimmed bounds
const wt = this.transform.worldTransform;
const a = wt.a;
const b = wt.b;
const c = wt.c;
const d = wt.d;
const tx = wt.tx;
const ty = wt.ty;
const w1 = (-anchor._x * orig.width) - padX;
const w0 = w1 + orig.width + (2 * padX);
const h1 = (-anchor._y * orig.height) - padY;
const h0 = h1 + orig.height + (2 * padY);
vertexData[0] = (a * w1) + (c * h1) + tx;
vertexData[1] = (d * h1) + (b * w1) + ty;
vertexData[2] = (a * w0) + (c * h1) + tx;
vertexData[3] = (d * h1) + (b * w0) + ty;
vertexData[4] = (a * w0) + (c * h0) + tx;
vertexData[5] = (d * h0) + (b * w0) + ty;
vertexData[6] = (a * w1) + (c * h0) + tx;
vertexData[7] = (d * h0) + (b * w1) + ty;
if ( this.roundPixels ) {
const r = PIXI.settings.RESOLUTION;
for ( let i = 0; i < vertexData.length; ++i ) vertexData[i] = Math.round(vertexData[i] * r) / r;
}
}
/* ---------------------------------------- */
/** @override */
_render(renderer) {
const pluginName = this.pluginName;
if ( pluginName ) this.#renderBatched(renderer, pluginName);
else this.#renderDirect(renderer, this._shader);
}
/* ---------------------------------------- */
/**
* Render with batching.
* @param {PIXI.Renderer} renderer The renderer
* @param {string} pluginName The batch renderer
*/
#renderBatched(renderer, pluginName) {
this.calculateVertices();
this._updateBatchData();
const batchRenderer = renderer.plugins[pluginName];
renderer.batch.setObjectRenderer(batchRenderer);
batchRenderer.render(this._batchData);
}
/* ---------------------------------------- */
/**
* Render without batching.
* @param {PIXI.Renderer} renderer The renderer
* @param {BaseSamplerShader} shader The shader
*/
#renderDirect(renderer, shader) {
this.calculateVertices();
if ( this._tintAlphaDirty ) {
PIXI.Color.shared.setValue(this._tintColor)
.premultiply(this.worldAlpha, this.alphaMode > 0)
.toArray(this._cachedTint);
this._tintAlphaDirty = false;
}
shader._preRender(this, renderer);
renderer.batch.flush();
renderer.shader.bind(shader);
renderer.state.set(this.#state);
renderer.geometry.bind(this.#geometry, shader);
renderer.geometry.draw(PIXI.DRAW_MODES.TRIANGLES, 6, 0);
}
/* ---------------------------------------- */
/**
* Update the batch data object.
* @protected
*/
_updateBatchData() {
this._batchData._texture = this._texture;
this._batchData.worldAlpha = this.worldAlpha;
this._batchData._tintRGB = this._tintRGB;
this._batchData.blendMode = this.#state.blendMode;
}
/* ---------------------------------------- */
/** @override */
_calculateBounds() {
const trim = this._texture.trim;
const orig = this._texture.orig;
// First lets check to see if the current texture has a trim.
if ( !trim || ((trim.width === orig.width) && (trim.height === orig.height)) ) {
this.calculateVertices();
this._bounds.addQuad(this.vertexData);
}
else {
this.calculateTrimmedVertices();
this._bounds.addQuad(this.vertexTrimmedData);
}
}
/* ---------------------------------------- */
/** @override */
getLocalBounds(rect) {
// Fast local bounds computation if the sprite has no children!
if ( this.children.length === 0 ) {
if ( !this._localBounds ) this._localBounds = new PIXI.Bounds();
const padX = this._paddingX;
const padY = this._paddingY;
const orig = this._texture.orig;
this._localBounds.minX = (orig.width * -this._anchor._x) - padX;
this._localBounds.minY = (orig.height * -this._anchor._y) - padY;
this._localBounds.maxX = (orig.width * (1 - this._anchor._x)) + padX;
this._localBounds.maxY = (orig.height * (1 - this._anchor._y)) + padY;
if ( !rect ) {
if ( !this._localBoundsRect ) this._localBoundsRect = new PIXI.Rectangle();
rect = this._localBoundsRect;
}
return this._localBounds.getRectangle(rect);
}
return super.getLocalBounds(rect);
}
/* ---------------------------------------- */
/** @override */
containsPoint(point) {
const tempPoint = SpriteMesh.#TEMP_POINT;
this.worldTransform.applyInverse(point, tempPoint);
const width = this._texture.orig.width;
const height = this._texture.orig.height;
const x1 = -width * this.anchor.x;
let y1 = 0;
if ( (tempPoint.x >= x1) && (tempPoint.x < (x1 + width)) ) {
y1 = -height * this.anchor.y;
if ( (tempPoint.y >= y1) && (tempPoint.y < (y1 + height)) ) return true;
}
return false;
}
/* ---------------------------------------- */
/** @override */
destroy(options) {
super.destroy(options);
this.#geometry.dispose();
this.#geometry = null;
this._shader = null;
this.#state = null;
this.uvs = null;
this.indices = null;
this.vertexData = null;
this._texture.off("update", this._onTextureUpdate, this);
this._anchor = null;
const destroyTexture = (typeof options === "boolean" ? options : options?.texture);
if ( destroyTexture ) {
const destroyBaseTexture = (typeof options === "boolean" ? options : options?.baseTexture);
this._texture.destroy(!!destroyBaseTexture);
}
this._texture = null;
}
/* ---------------------------------------- */
/**
* Create a SpriteMesh from another source.
* You can specify texture options and a specific shader class derived from BaseSamplerShader.
* @param {string|PIXI.Texture|HTMLCanvasElement|HTMLVideoElement} source Source to create texture from.
* @param {object} [textureOptions] See {@link PIXI.BaseTexture}'s constructor for options.
* @param {BaseSamplerShader} [shaderClass] The shader class to use. BaseSamplerShader by default.
* @returns {SpriteMesh}
*/
static from(source, textureOptions, shaderClass) {
const texture = source instanceof PIXI.Texture ? source : PIXI.Texture.from(source, textureOptions);
return new SpriteMesh(texture, shaderClass);
}
}

View File

@@ -0,0 +1,50 @@
/**
* UnboundContainers behave like PIXI.Containers except that they are not bound to their parent's transforms.
* However, they normally propagate their own transformations to their children.
*/
class UnboundContainer extends PIXI.Container {
constructor(...args) {
super(...args);
// Replacing PIXI.Transform with an UnboundTransform
this.transform = new UnboundTransform();
}
}
/* -------------------------------------------- */
/**
* A custom Transform class which is not bound to the parent worldTransform.
* localTransform are working as usual.
*/
class UnboundTransform extends PIXI.Transform {
/** @override */
static IDENTITY = new UnboundTransform();
/* -------------------------------------------- */
/** @override */
updateTransform(parentTransform) {
const lt = this.localTransform;
if ( this._localID !== this._currentLocalID ) {
// Get the matrix values of the displayobject based on its transform properties..
lt.a = this._cx * this.scale.x;
lt.b = this._sx * this.scale.x;
lt.c = this._cy * this.scale.y;
lt.d = this._sy * this.scale.y;
lt.tx = this.position.x - ((this.pivot.x * lt.a) + (this.pivot.y * lt.c));
lt.ty = this.position.y - ((this.pivot.x * lt.b) + (this.pivot.y * lt.d));
this._currentLocalID = this._localID;
// Force an update
this._parentID = -1;
}
if ( this._parentID !== parentTransform._worldID ) {
// We don't use the values from the parent transform. We're just updating IDs.
this._parentID = parentTransform._worldID;
this._worldID++;
}
}
}

View File

@@ -0,0 +1,314 @@
/**
* @typedef {Object} CanvasAnimationAttribute
* @property {string} attribute The attribute name being animated
* @property {Object} parent The object within which the attribute is stored
* @property {number} to The destination value of the attribute
* @property {number} [from] An initial value of the attribute, otherwise parent[attribute] is used
* @property {number} [delta] The computed delta between to and from
* @property {number} [done] The amount of the total delta which has been animated
* @property {boolean} [color] Is this a color animation that applies to RGB channels
*/
/**
* @typedef {Object} CanvasAnimationOptions
* @property {PIXI.DisplayObject} [context] A DisplayObject which defines context to the PIXI.Ticker function
* @property {string|symbol} [name] A unique name which can be used to reference the in-progress animation
* @property {number} [duration] A duration in milliseconds over which the animation should occur
* @property {number} [priority] A priority in PIXI.UPDATE_PRIORITY which defines when the animation
* should be evaluated related to others
* @property {Function|string} [easing] An easing function used to translate animation time or the string name
* of a static member of the CanvasAnimation class
* @property {function(number, CanvasAnimationData)} [ontick] A callback function which fires after every frame
* @property {Promise} [wait] The animation isn't started until this promise resolves
*/
/**
* @typedef {Object} _CanvasAnimationData
* @property {Function} fn The animation function being executed each frame
* @property {number} time The current time of the animation, in milliseconds
* @property {CanvasAnimationAttribute[]} attributes The attributes being animated
* @property {number} state The current state of the animation (see {@link CanvasAnimation.STATES})
* @property {Promise} promise A Promise which resolves once the animation is complete
* @property {Function} resolve The resolution function, allowing animation to be ended early
* @property {Function} reject The rejection function, allowing animation to be ended early
*/
/**
* @typedef {_CanvasAnimationData & CanvasAnimationOptions} CanvasAnimationData
*/
/**
* A helper class providing utility methods for PIXI Canvas animation
*/
class CanvasAnimation {
/**
* The possible states of an animation.
* @enum {number}
*/
static get STATES() {
return this.#STATES;
}
static #STATES = Object.freeze({
/**
* An error occurred during waiting or running the animation.
*/
FAILED: -2,
/**
* The animation was terminated before it could complete.
*/
TERMINATED: -1,
/**
* Waiting for the wait promise before the animation is started.
*/
WAITING: 0,
/**
* The animation has been started and is running.
*/
RUNNING: 1,
/**
* The animation was completed without errors and without being terminated.
*/
COMPLETED: 2
});
/* -------------------------------------------- */
/**
* The ticker used for animations.
* @type {PIXI.Ticker}
*/
static get ticker() {
return canvas.app.ticker;
}
/* -------------------------------------------- */
/**
* Track an object of active animations by name, context, and function
* This allows a currently playing animation to be referenced and terminated
* @type {Record<string, CanvasAnimationData>}
*/
static animations = {};
/* -------------------------------------------- */
/**
* Apply an animation from the current value of some attribute to a new value
* Resolve a Promise once the animation has concluded and the attributes have reached their new target
*
* @param {CanvasAnimationAttribute[]} attributes An array of attributes to animate
* @param {CanvasAnimationOptions} options Additional options which customize the animation
*
* @returns {Promise<boolean>} A Promise which resolves to true once the animation has concluded
* or false if the animation was prematurely terminated
*
* @example Animate Token Position
* ```js
* let animation = [
* {
* parent: token,
* attribute: "x",
* to: 1000
* },
* {
* parent: token,
* attribute: "y",
* to: 2000
* }
* ];
* CanvasAnimation.animate(attributes, {duration:500});
* ```
*/
static async animate(attributes, {context=canvas.stage, name, duration=1000, easing, ontick, priority, wait}={}) {
priority ??= PIXI.UPDATE_PRIORITY.LOW + 1;
if ( typeof easing === "string" ) easing = this[easing];
// If an animation with this name already exists, terminate it
if ( name ) this.terminateAnimation(name);
// Define the animation and its animation function
attributes = attributes.map(a => {
a.from = a.from ?? a.parent[a.attribute];
a.delta = a.to - a.from;
a.done = 0;
// Special handling for color transitions
if ( a.to instanceof Color ) {
a.color = true;
a.from = Color.from(a.from);
}
return a;
});
if ( attributes.length && attributes.every(a => a.delta === 0) ) return;
const animation = {attributes, context, duration, easing, name, ontick, time: 0, wait,
state: CanvasAnimation.STATES.WAITING};
animation.fn = dt => CanvasAnimation.#animateFrame(dt, animation);
// Create a promise which manages the animation lifecycle
const promise = new Promise(async (resolve, reject) => {
animation.resolve = completed => {
if ( (animation.state === CanvasAnimation.STATES.WAITING)
|| (animation.state === CanvasAnimation.STATES.RUNNING) ) {
animation.state = completed ? CanvasAnimation.STATES.COMPLETED : CanvasAnimation.STATES.TERMINATED;
resolve(completed);
}
};
animation.reject = error => {
if ( (animation.state === CanvasAnimation.STATES.WAITING)
|| (animation.state === CanvasAnimation.STATES.RUNNING) ) {
animation.state = CanvasAnimation.STATES.FAILED;
reject(error);
}
};
try {
if ( wait instanceof Promise ) await wait;
if ( animation.state === CanvasAnimation.STATES.WAITING ) {
animation.state = CanvasAnimation.STATES.RUNNING;
this.ticker.add(animation.fn, context, priority);
}
} catch(err) {
animation.reject(err);
}
})
// Log any errors
.catch(err => console.error(err))
// Remove the animation once completed
.finally(() => {
this.ticker.remove(animation.fn, context);
if ( name && (this.animations[name] === animation) ) delete this.animations[name];
});
// Record the animation and return
if ( name ) {
animation.promise = promise;
this.animations[name] = animation;
}
return promise;
}
/* -------------------------------------------- */
/**
* Retrieve an animation currently in progress by its name
* @param {string} name The animation name to retrieve
* @returns {CanvasAnimationData} The animation data, or undefined
*/
static getAnimation(name) {
return this.animations[name];
}
/* -------------------------------------------- */
/**
* If an animation using a certain name already exists, terminate it
* @param {string} name The animation name to terminate
*/
static terminateAnimation(name) {
let animation = this.animations[name];
if (animation) animation.resolve(false);
}
/* -------------------------------------------- */
/**
* Cosine based easing with smooth in-out.
* @param {number} pt The proportional animation timing on [0,1]
* @returns {number} The eased animation progress on [0,1]
*/
static easeInOutCosine(pt) {
return (1 - Math.cos(Math.PI * pt)) * 0.5;
}
/* -------------------------------------------- */
/**
* Shallow ease out.
* @param {number} pt The proportional animation timing on [0,1]
* @returns {number} The eased animation progress on [0,1]
*/
static easeOutCircle(pt) {
return Math.sqrt(1 - Math.pow(pt - 1, 2));
}
/* -------------------------------------------- */
/**
* Shallow ease in.
* @param {number} pt The proportional animation timing on [0,1]
* @returns {number} The eased animation progress on [0,1]
*/
static easeInCircle(pt) {
return 1 - Math.sqrt(1 - Math.pow(pt, 2));
}
/* -------------------------------------------- */
/**
* Generic ticker function to implement the animation.
* This animation wrapper executes once per frame for the duration of the animation event.
* Once the animated attributes have converged to their targets, it resolves the original Promise.
* The user-provided ontick function runs each frame update to apply additional behaviors.
*
* @param {number} deltaTime The incremental time which has elapsed
* @param {CanvasAnimationData} animation The animation which is being performed
*/
static #animateFrame(deltaTime, animation) {
const {attributes, duration, ontick} = animation;
// Compute animation timing and progress
const dt = this.ticker.elapsedMS; // Delta time in MS
animation.time += dt; // Total time which has elapsed
const complete = animation.time >= duration;
const pt = complete ? 1 : animation.time / duration; // Proportion of total duration
const pa = animation.easing ? animation.easing(pt) : pt;
// Update each attribute
try {
for ( let a of attributes ) CanvasAnimation.#updateAttribute(a, pa);
if ( ontick ) ontick(dt, animation);
}
// Terminate the animation if any errors occur
catch(err) {
animation.reject(err);
}
// Resolve the original promise once the animation is complete
if ( complete ) animation.resolve(true);
}
/* -------------------------------------------- */
/**
* Update a single attribute according to its animation completion percentage
* @param {CanvasAnimationAttribute} attribute The attribute being animated
* @param {number} percentage The animation completion percentage
*/
static #updateAttribute(attribute, percentage) {
attribute.done = attribute.delta * percentage;
// Complete animation
if ( percentage === 1 ) {
attribute.parent[attribute.attribute] = attribute.to;
return;
}
// Color animation
if ( attribute.color ) {
attribute.parent[attribute.attribute] = attribute.from.mix(attribute.to, percentage);
return;
}
// Numeric attribute
attribute.parent[attribute.attribute] = attribute.from + attribute.done;
}
}

View File

@@ -0,0 +1,105 @@
/**
* A generic helper for drawing a standard Control Icon
* @type {PIXI.Container}
*/
class ControlIcon extends PIXI.Container {
constructor({texture, size=40, borderColor=0xFF5500, tint=null, elevation=0}={}, ...args) {
super(...args);
// Define arguments
this.iconSrc = texture;
this.size = size;
this.rect = [-2, -2, size+4, size+4];
this.borderColor = borderColor;
/**
* The color of the icon tint, if any
* @type {number|null}
*/
this.tintColor = tint;
// Define hit area
this.eventMode = "static";
this.interactiveChildren = false;
this.hitArea = new PIXI.Rectangle(...this.rect);
this.cursor = "pointer";
// Background
this.bg = this.addChild(new PIXI.Graphics());
this.bg.clear().beginFill(0x000000, 0.4).lineStyle(2, 0x000000, 1.0).drawRoundedRect(...this.rect, 5).endFill();
// Icon
this.icon = this.addChild(new PIXI.Sprite());
// Border
this.border = this.addChild(new PIXI.Graphics());
this.border.visible = false;
// Elevation
this.tooltip = this.addChild(new PreciseText());
this.tooltip.visible = false;
// Set the initial elevation
this.elevation = elevation;
// Draw asynchronously
this.draw();
}
/* -------------------------------------------- */
/**
* The elevation of the ControlIcon, which is displayed in its tooltip text.
* @type {number}
*/
get elevation() {
return this.#elevation;
}
set elevation(value) {
if ( (typeof value !== "number") || !Number.isFinite(value) ) {
throw new Error("ControlIcon#elevation must be a finite numeric value.");
}
if ( value === this.#elevation ) return;
this.#elevation = value;
this.tooltip.text = `${value > 0 ? "+" : ""}${value} ${canvas.grid.units}`.trim();
this.tooltip.visible = value !== 0;
}
#elevation = 0;
/* -------------------------------------------- */
/**
* Initial drawing of the ControlIcon
* @returns {Promise<ControlIcon>}
*/
async draw() {
if ( this.destroyed ) return this;
this.texture = this.texture ?? await loadTexture(this.iconSrc);
this.icon.texture = this.texture;
this.icon.width = this.icon.height = this.size;
this.tooltip.style = CONFIG.canvasTextStyle;
this.tooltip.anchor.set(0.5, 1);
this.tooltip.position.set(this.size / 2, -12);
return this.refresh();
}
/* -------------------------------------------- */
/**
* Incremental refresh for ControlIcon appearance.
*/
refresh({visible, iconColor, borderColor, borderVisible}={}) {
if ( iconColor !== undefined ) this.tintColor = iconColor;
this.icon.tint = this.tintColor ?? 0xFFFFFF;
if ( borderColor !== undefined ) this.borderColor = borderColor;
this.border.clear().lineStyle(2, this.borderColor, 1.0).drawRoundedRect(...this.rect, 5).endFill();
if ( borderVisible !== undefined ) this.border.visible = borderVisible;
if ( visible !== undefined && (this.visible !== visible) ) {
this.visible = visible;
MouseInteractionManager.emulateMoveEvent();
}
return this;
}
}

View File

@@ -0,0 +1,885 @@
/**
* Handle mouse interaction events for a Canvas object.
* There are three phases of events: hover, click, and drag
*
* Hover Events:
* _handlePointerOver
* action: hoverIn
* _handlePointerOut
* action: hoverOut
*
* Left Click and Double-Click
* _handlePointerDown
* action: clickLeft
* action: clickLeft2
* action: unclickLeft
*
* Right Click and Double-Click
* _handleRightDown
* action: clickRight
* action: clickRight2
* action: unclickRight
*
* Drag and Drop
* _handlePointerMove
* action: dragLeftStart
* action: dragRightStart
* action: dragLeftMove
* action: dragRightMove
* _handlePointerUp
* action: dragLeftDrop
* action: dragRightDrop
* _handleDragCancel
* action: dragLeftCancel
* action: dragRightCancel
*/
class MouseInteractionManager {
constructor(object, layer, permissions={}, callbacks={}, options={}) {
this.object = object;
this.layer = layer;
this.permissions = permissions;
this.callbacks = callbacks;
/**
* Interaction options which configure handling workflows
* @type {{target: PIXI.DisplayObject, dragResistance: number}}
*/
this.options = options;
/**
* The current interaction state
* @type {number}
*/
this.state = this.states.NONE;
/**
* Bound interaction data object to populate with custom data.
* @type {Record<string, any>}
*/
this.interactionData = {};
/**
* The drag handling time
* @type {number}
*/
this.dragTime = 0;
/**
* The time of the last left-click event
* @type {number}
*/
this.lcTime = 0;
/**
* The time of the last right-click event
* @type {number}
*/
this.rcTime = 0;
/**
* A flag for whether we are right-click dragging
* @type {boolean}
*/
this._dragRight = false;
/**
* An optional ControlIcon instance for the object
* @type {ControlIcon|null}
*/
this.controlIcon = this.options.target ? this.object[this.options.target] : null;
/**
* The view id pertaining to the PIXI Application.
* If not provided, default to canvas.app.view.id
* @type {string}
*/
this.viewId = (this.options.application ?? canvas.app).view.id;
}
/**
* The client position of the last left/right-click.
* @type {PIXI.Point}
*/
lastClick = new PIXI.Point();
/**
* Bound handlers which can be added and removed
* @type {Record<string, Function>}
*/
#handlers = {};
/**
* Enumerate the states of a mouse interaction workflow.
* 0: NONE - the object is inactive
* 1: HOVER - the mouse is hovered over the object
* 2: CLICKED - the object is clicked
* 3: GRABBED - the object is grabbed
* 4: DRAG - the object is being dragged
* 5: DROP - the object is being dropped
* @enum {number}
*/
static INTERACTION_STATES = {
NONE: 0,
HOVER: 1,
CLICKED: 2,
GRABBED: 3,
DRAG: 4,
DROP: 5
};
/**
* Enumerate the states of handle outcome.
* -2: SKIPPED - the handler has been skipped by previous logic
* -1: DISALLOWED - the handler has dissallowed further process
* 1: REFUSED - the handler callback has been processed and is refusing further process
* 2: ACCEPTED - the handler callback has been processed and is accepting further process
* @enum {number}
*/
static #HANDLER_OUTCOME = {
SKIPPED: -2,
DISALLOWED: -1,
REFUSED: 1,
ACCEPTED: 2
};
/**
* The maximum number of milliseconds between two clicks to be considered a double-click.
* @type {number}
*/
static DOUBLE_CLICK_TIME_MS = 250;
/**
* The maximum number of pixels between two clicks to be considered a double-click.
* @type {number}
*/
static DOUBLE_CLICK_DISTANCE_PX = 5;
/**
* The number of milliseconds of mouse click depression to consider it a long press.
* @type {number}
*/
static LONG_PRESS_DURATION_MS = 500;
/**
* Global timeout for the long-press event.
* @type {number|null}
*/
static longPressTimeout = null;
/* -------------------------------------------- */
/**
* Emulate a pointermove event. Needs to be called when an object with the static event mode
* or any of its parents is transformed or its visibility is changed.
*/
static emulateMoveEvent() {
MouseInteractionManager.#emulateMoveEvent();
}
static #emulateMoveEvent = foundry.utils.throttle(() => {
const events = canvas.app.renderer.events;
const rootPointerEvent = events.rootPointerEvent;
if ( !events.supportsPointerEvents ) return;
if ( events.supportsTouchEvents && (rootPointerEvent.pointerType === "touch") ) return;
events.domElement.dispatchEvent(new PointerEvent("pointermove", {
pointerId: rootPointerEvent.pointerId,
pointerType: rootPointerEvent.pointerType,
isPrimary: rootPointerEvent.isPrimary,
clientX: rootPointerEvent.clientX,
clientY: rootPointerEvent.clientY,
pageX: rootPointerEvent.pageX,
pageY: rootPointerEvent.pageY,
altKey: rootPointerEvent.altKey,
ctrlKey: rootPointerEvent.ctrlKey,
metaKey: rootPointerEvent.metaKey,
shiftKey: rootPointerEvent.shiftKey
}));
}, 10);
/* -------------------------------------------- */
/**
* Get the target.
* @type {PIXI.DisplayObject}
*/
get target() {
return this.options.target ? this.object[this.options.target] : this.object;
}
/**
* Is this mouse manager in a dragging state?
* @type {boolean}
*/
get isDragging() {
return this.state >= this.states.DRAG;
}
/* -------------------------------------------- */
/**
* Activate interactivity for the handled object
*/
activate() {
// Remove existing listeners
this.state = this.states.NONE;
this.target.removeAllListeners();
// Create bindings for all handler functions
this.#handlers = {
pointerover: this.#handlePointerOver.bind(this),
pointerout: this.#handlePointerOut.bind(this),
pointerdown: this.#handlePointerDown.bind(this),
pointermove: this.#handlePointerMove.bind(this),
pointerup: this.#handlePointerUp.bind(this),
contextmenu: this.#handleDragCancel.bind(this)
};
// Activate hover events to start the workflow
this.#activateHoverEvents();
// Set the target as interactive
this.target.eventMode = "static";
return this;
}
/* -------------------------------------------- */
/**
* Test whether the current user has permission to perform a step of the workflow
* @param {string} action The action being attempted
* @param {Event|PIXI.FederatedEvent} event The event being handled
* @returns {boolean} Can the action be performed?
*/
can(action, event) {
const fn = this.permissions[action];
if ( typeof fn === "boolean" ) return fn;
if ( fn instanceof Function ) return fn.call(this.object, game.user, event);
return true;
}
/* -------------------------------------------- */
/**
* Execute a callback function associated with a certain action in the workflow
* @param {string} action The action being attempted
* @param {Event|PIXI.FederatedEvent} event The event being handled
* @param {...*} args Additional callback arguments.
* @returns {boolean} A boolean which may indicate that the event was handled by the callback.
* Events which do not specify a callback are assumed to have been handled as no-op.
*/
callback(action, event, ...args) {
const fn = this.callbacks[action];
if ( fn instanceof Function ) {
this.#assignInteractionData(event);
return fn.call(this.object, event, ...args) ?? true;
}
return true;
}
/* -------------------------------------------- */
/**
* A reference to the possible interaction states which can be observed
* @returns {Record<string, number>}
*/
get states() {
return this.constructor.INTERACTION_STATES;
}
/* -------------------------------------------- */
/**
* A reference to the possible interaction states which can be observed
* @returns {Record<string, number>}
*/
get handlerOutcomes() {
return MouseInteractionManager.#HANDLER_OUTCOME;
}
/* -------------------------------------------- */
/* Listener Activation and Deactivation */
/* -------------------------------------------- */
/**
* Activate a set of listeners which handle hover events on the target object
*/
#activateHoverEvents() {
// Disable and re-register mouseover and mouseout handlers
this.target.off("pointerover", this.#handlers.pointerover).on("pointerover", this.#handlers.pointerover);
this.target.off("pointerout", this.#handlers.pointerout).on("pointerout", this.#handlers.pointerout);
}
/* -------------------------------------------- */
/**
* Activate a new set of listeners for click events on the target object.
*/
#activateClickEvents() {
this.#deactivateClickEvents();
this.target.on("pointerdown", this.#handlers.pointerdown);
this.target.on("pointerup", this.#handlers.pointerup);
this.target.on("pointerupoutside", this.#handlers.pointerup);
}
/* -------------------------------------------- */
/**
* Deactivate event listeners for click events on the target object.
*/
#deactivateClickEvents() {
this.target.off("pointerdown", this.#handlers.pointerdown);
this.target.off("pointerup", this.#handlers.pointerup);
this.target.off("pointerupoutside", this.#handlers.pointerup);
}
/* -------------------------------------------- */
/**
* Activate events required for handling a drag-and-drop workflow
*/
#activateDragEvents() {
this.#deactivateDragEvents();
this.layer.on("pointermove", this.#handlers.pointermove);
if ( !this._dragRight ) {
canvas.app.view.addEventListener("contextmenu", this.#handlers.contextmenu, {capture: true});
}
}
/* -------------------------------------------- */
/**
* Deactivate events required for handling drag-and-drop workflow.
* @param {boolean} [silent] Set to true to activate the silent mode.
*/
#deactivateDragEvents(silent) {
this.layer.off("pointermove", this.#handlers.pointermove);
canvas.app.view.removeEventListener("contextmenu", this.#handlers.contextmenu, {capture: true});
}
/* -------------------------------------------- */
/* Hover In and Hover Out */
/* -------------------------------------------- */
/**
* Handle mouse-over events which activate downstream listeners and do not stop propagation.
* @param {PIXI.FederatedEvent} event
*/
#handlePointerOver(event) {
const action = "hoverIn";
if ( (this.state !== this.states.NONE) || (event.nativeEvent && (event.nativeEvent.target.id !== this.viewId)) ) {
return this.#debug(action, event, this.handlerOutcomes.SKIPPED);
}
if ( !this.can(action, event) ) return this.#debug(action, event, this.handlerOutcomes.DISALLOWED);
// Invoke the callback function
this.state = this.states.HOVER;
if ( this.callback(action, event) === false ) {
this.state = this.states.NONE;
return this.#debug(action, event, this.handlerOutcomes.REFUSED);
}
// Activate click events
this.#activateClickEvents();
return this.#debug(action, event);
}
/* -------------------------------------------- */
/**
* Handle mouse-out events which terminate hover workflows and do not stop propagation.
* @param {PIXI.FederatedEvent} event
*/
#handlePointerOut(event) {
if ( event.pointerType === "touch" ) return; // Ignore Touch events
const action = "hoverOut";
if ( !this.state.between(this.states.HOVER, this.states.CLICKED)
|| (event.nativeEvent && (event.nativeEvent.target.id !== this.viewId) ) ) {
return this.#debug(action, event, this.handlerOutcomes.SKIPPED);
}
if ( !this.can(action, event) ) return this.#debug(action, event, this.handlerOutcomes.DISALLOWED);
// Was the mouse-out event handled by the callback?
const priorState = this.state;
this.state = this.states.NONE;
if ( this.callback(action, event) === false ) {
this.state = priorState;
return this.#debug(action, event, this.handlerOutcomes.REFUSED);
}
// Deactivate click events
this.#deactivateClickEvents();
return this.#debug(action, event);
}
/* -------------------------------------------- */
/**
* Handle mouse-down events which activate downstream listeners.
* @param {PIXI.FederatedEvent} event
*/
#handlePointerDown(event) {
if ( event.button === 0 ) return this.#handleLeftDown(event);
if ( event.button === 2 ) return this.#handleRightDown(event);
}
/* -------------------------------------------- */
/* Left Click and Double Click */
/* -------------------------------------------- */
/**
* Handle left-click mouse-down events.
* Stop further propagation only if the event is allowed by either single or double-click.
* @param {PIXI.FederatedEvent} event
*/
#handleLeftDown(event) {
if ( !this.state.between(this.states.HOVER, this.states.DRAG) ) return;
// Determine double vs single click
const isDouble = ((event.timeStamp - this.lcTime) <= MouseInteractionManager.DOUBLE_CLICK_TIME_MS)
&& (Math.hypot(event.clientX - this.lastClick.x, event.clientY - this.lastClick.y)
<= MouseInteractionManager.DOUBLE_CLICK_DISTANCE_PX);
this.lcTime = isDouble ? 0 : event.timeStamp;
this.lastClick.set(event.clientX, event.clientY);
// Set the origin point from layer local position
this.interactionData.origin = event.getLocalPosition(this.layer);
// Activate a timeout to detect long presses
if ( !isDouble ) {
clearTimeout(this.constructor.longPressTimeout);
this.constructor.longPressTimeout = setTimeout(() => {
this.#handleLongPress(event, this.interactionData.origin);
}, MouseInteractionManager.LONG_PRESS_DURATION_MS);
}
// Dispatch to double and single-click handlers
if ( isDouble && this.can("clickLeft2", event) ) return this.#handleClickLeft2(event);
else return this.#handleClickLeft(event);
}
/* -------------------------------------------- */
/**
* Handle mouse-down which trigger a single left-click workflow.
* @param {PIXI.FederatedEvent} event
*/
#handleClickLeft(event) {
const action = "clickLeft";
if ( !this.can(action, event) ) return this.#debug(action, event, this.handlerOutcomes.DISALLOWED);
this._dragRight = false;
// Was the left-click event handled by the callback?
const priorState = this.state;
if ( this.state === this.states.HOVER ) this.state = this.states.CLICKED;
canvas.currentMouseManager = this;
if ( this.callback(action, event) === false ) {
this.state = priorState;
canvas.currentMouseManager = null;
return this.#debug(action, event, this.handlerOutcomes.REFUSED);
}
// Activate drag event handlers
if ( (this.state === this.states.CLICKED) && this.can("dragStart", event) ) {
this.state = this.states.GRABBED;
this.#activateDragEvents();
}
return this.#debug(action, event);
}
/* -------------------------------------------- */
/**
* Handle mouse-down which trigger a single left-click workflow.
* @param {PIXI.FederatedEvent} event
*/
#handleClickLeft2(event) {
const action = "clickLeft2";
if ( this.callback(action, event) === false ) return this.#debug(action, event, this.handlerOutcomes.REFUSED);
return this.#debug(action, event);
}
/* -------------------------------------------- */
/**
* Handle a long mouse depression to trigger a long-press workflow.
* @param {PIXI.FederatedEvent} event The mousedown event.
* @param {PIXI.Point} origin The original canvas coordinates of the mouse click
*/
#handleLongPress(event, origin) {
const action = "longPress";
if ( this.callback(action, event, origin) === false ) {
return this.#debug(action, event, this.handlerOutcomes.REFUSED);
}
return this.#debug(action, event);
}
/* -------------------------------------------- */
/* Right Click and Double Click */
/* -------------------------------------------- */
/**
* Handle right-click mouse-down events.
* Stop further propagation only if the event is allowed by either single or double-click.
* @param {PIXI.FederatedEvent} event
*/
#handleRightDown(event) {
if ( !this.state.between(this.states.HOVER, this.states.DRAG) ) return;
// Determine double vs single click
const isDouble = ((event.timeStamp - this.rcTime) <= MouseInteractionManager.DOUBLE_CLICK_TIME_MS)
&& (Math.hypot(event.clientX - this.lastClick.x, event.clientY - this.lastClick.y)
<= MouseInteractionManager.DOUBLE_CLICK_DISTANCE_PX);
this.rcTime = isDouble ? 0 : event.timeStamp;
this.lastClick.set(event.clientX, event.clientY);
// Update event data
this.interactionData.origin = event.getLocalPosition(this.layer);
// Dispatch to double and single-click handlers
if ( isDouble && this.can("clickRight2", event) ) return this.#handleClickRight2(event);
else return this.#handleClickRight(event);
}
/* -------------------------------------------- */
/**
* Handle single right-click actions.
* @param {PIXI.FederatedEvent} event
*/
#handleClickRight(event) {
const action = "clickRight";
if ( !this.can(action, event) ) return this.#debug(action, event, this.handlerOutcomes.DISALLOWED);
this._dragRight = true;
// Was the right-click event handled by the callback?
const priorState = this.state;
if ( this.state === this.states.HOVER ) this.state = this.states.CLICKED;
canvas.currentMouseManager = this;
if ( this.callback(action, event) === false ) {
this.state = priorState;
canvas.currentMouseManager = null;
return this.#debug(action, event, this.handlerOutcomes.REFUSED);
}
// Activate drag event handlers
if ( (this.state === this.states.CLICKED) && this.can("dragRight", event) ) {
this.state = this.states.GRABBED;
this.#activateDragEvents();
}
return this.#debug(action, event);
}
/* -------------------------------------------- */
/**
* Handle double right-click actions.
* @param {PIXI.FederatedEvent} event
*/
#handleClickRight2(event) {
const action = "clickRight2";
if ( this.callback(action, event) === false ) return this.#debug(action, event, this.handlerOutcomes.REFUSED);
return this.#debug(action, event);
}
/* -------------------------------------------- */
/* Drag and Drop */
/* -------------------------------------------- */
/**
* Handle mouse movement during a drag workflow
* @param {PIXI.FederatedEvent} event
*/
#handlePointerMove(event) {
if ( !this.state.between(this.states.GRABBED, this.states.DRAG) ) return;
// Limit dragging to 60 updates per second
const now = Date.now();
if ( (now - this.dragTime) < canvas.app.ticker.elapsedMS ) return;
this.dragTime = now;
// Update interaction data
const data = this.interactionData;
data.destination = event.getLocalPosition(this.layer, data.destination);
// Handling rare case when origin is not defined
// FIXME: The root cause should be identified and this code removed
if ( data.origin === undefined ) data.origin = new PIXI.Point().copyFrom(data.destination);
// Begin a new drag event
if ( this.state !== this.states.DRAG ) {
const dx = data.destination.x - data.origin.x;
const dy = data.destination.y - data.origin.y;
const dz = Math.hypot(dx, dy);
const r = this.options.dragResistance || (canvas.dimensions.size / 4);
if ( dz >= r ) this.#handleDragStart(event);
}
// Continue a drag event
if ( this.state === this.states.DRAG ) this.#handleDragMove(event);
}
/* -------------------------------------------- */
/**
* Handle the beginning of a new drag start workflow, moving all controlled objects on the layer
* @param {PIXI.FederatedEvent} event
*/
#handleDragStart(event) {
clearTimeout(this.constructor.longPressTimeout);
const action = this._dragRight ? "dragRightStart" : "dragLeftStart";
if ( !this.can(action, event) ) {
this.#debug(action, event, this.handlerOutcomes.DISALLOWED);
this.cancel(event);
return;
}
this.state = this.states.DRAG;
if ( this.callback(action, event) === false ) {
this.state = this.states.GRABBED;
return this.#debug(action, event, this.handlerOutcomes.REFUSED);
}
return this.#debug(action, event, this.handlerOutcomes.ACCEPTED);
}
/* -------------------------------------------- */
/**
* Handle the continuation of a drag workflow, moving all controlled objects on the layer
* @param {PIXI.FederatedEvent} event
*/
#handleDragMove(event) {
clearTimeout(this.constructor.longPressTimeout);
const action = this._dragRight ? "dragRightMove" : "dragLeftMove";
if ( !this.can(action, event) ) return this.#debug(action, event, this.handlerOutcomes.DISALLOWED);
const handled = this.callback(action, event);
return this.#debug(action, event, handled ? this.handlerOutcomes.ACCEPTED : this.handlerOutcomes.REFUSED);
}
/* -------------------------------------------- */
/**
* Handle mouse up events which may optionally conclude a drag workflow
* @param {PIXI.FederatedEvent} event
*/
#handlePointerUp(event) {
clearTimeout(this.constructor.longPressTimeout);
// If this is a touch hover event, treat it as a drag
if ( (this.state === this.states.HOVER) && (event.pointerType === "touch") ) {
this.state = this.states.DRAG;
}
// Save prior state
const priorState = this.state;
// Update event data
this.interactionData.destination = event.getLocalPosition(this.layer, this.interactionData.destination);
if ( this.state >= this.states.DRAG ) {
event.stopPropagation();
if ( event.type.startsWith("right") && !this._dragRight ) return;
if ( this.state === this.states.DRAG ) this.#handleDragDrop(event);
}
// Continue a multi-click drag workflow
if ( event.defaultPrevented ) {
this.state = priorState;
return this.#debug("mouseUp", event, this.handlerOutcomes.SKIPPED);
}
// Handle the unclick event
this.#handleUnclick(event);
// Cancel the drag workflow
this.#handleDragCancel(event);
}
/* -------------------------------------------- */
/**
* Handle the conclusion of a drag workflow, placing all dragged objects back on the layer
* @param {PIXI.FederatedEvent} event
*/
#handleDragDrop(event) {
const action = this._dragRight ? "dragRightDrop" : "dragLeftDrop";
if ( !this.can(action, event) ) return this.#debug(action, event, this.handlerOutcomes.DISALLOWED);
// Was the drag-drop event handled by the callback?
this.state = this.states.DROP;
if ( this.callback(action, event) === false ) {
this.state = this.states.DRAG;
return this.#debug(action, event, this.handlerOutcomes.REFUSED);
}
// Update the workflow state
return this.#debug(action, event);
}
/* -------------------------------------------- */
/**
* Handle the cancellation of a drag workflow, resetting back to the original state
* @param {PIXI.FederatedEvent} event
*/
#handleDragCancel(event) {
this.cancel(event);
}
/* -------------------------------------------- */
/**
* Handle the unclick event
* @param {PIXI.FederatedEvent} event
*/
#handleUnclick(event) {
const action = event.button === 0 ? "unclickLeft" : "unclickRight";
if ( !this.state.between(this.states.CLICKED, this.states.GRABBED) ) {
return this.#debug(action, event, this.handlerOutcomes.SKIPPED);
}
if ( this.callback(action, event) === false ) return this.#debug(action, event, this.handlerOutcomes.REFUSED);
return this.#debug(action, event);
}
/* -------------------------------------------- */
/**
* A public method to handle directly an event into this manager, according to its type.
* Note: drag events are not handled.
* @param {PIXI.FederatedEvent} event
* @returns {boolean} Has the event been processed?
*/
handleEvent(event) {
switch ( event.type ) {
case "pointerover":
this.#handlePointerOver(event);
break;
case "pointerout":
this.#handlePointerOut(event);
break;
case "pointerup":
this.#handlePointerUp(event);
break;
case "pointerdown":
this.#handlePointerDown(event);
break;
default:
return false;
}
return true;
}
/* -------------------------------------------- */
/**
* A public method to cancel a current interaction workflow from this manager.
* @param {PIXI.FederatedEvent} [event] The event that initiates the cancellation
*/
cancel(event) {
const eventSystem = canvas.app.renderer.events;
const rootBoundary = eventSystem.rootBoundary;
const createEvent = !event;
if ( createEvent ) {
event = rootBoundary.createPointerEvent(eventSystem.pointer, "pointermove", this.target);
event.defaultPrevented = false;
event.path = null;
}
try {
const action = this._dragRight ? "dragRightCancel" : "dragLeftCancel";
const endState = this.state;
if ( endState <= this.states.HOVER ) return this.#debug(action, event, this.handlerOutcomes.SKIPPED);
// Dispatch a cancellation callback
if ( endState >= this.states.DRAG ) {
if ( this.callback(action, event) === false ) return this.#debug(action, event, this.handlerOutcomes.REFUSED);
}
// Continue a multi-click drag workflow if the default event was prevented in the callback
if ( event.defaultPrevented ) {
this.state = this.states.DRAG;
return this.#debug(action, event, this.handlerOutcomes.SKIPPED);
}
// Reset the interaction data and state and deactivate drag events
this.interactionData = {};
this.state = this.states.HOVER;
canvas.currentMouseManager = null;
clearTimeout(this.constructor.longPressTimeout);
this.#deactivateDragEvents();
this.#debug(action, event);
// Check hover state and hover out if necessary
if ( !rootBoundary.trackingData(event.pointerId).overTargets?.includes(this.target) ) {
this.#handlePointerOut(event);
}
} finally {
if ( createEvent ) rootBoundary.freeEvent(event);
}
}
/* -------------------------------------------- */
/**
* Display a debug message in the console (if mouse interaction debug is activated).
* @param {string} action Which action to display?
* @param {Event|PIXI.FederatedEvent} event Which event to display?
* @param {number} [outcome=this.handlerOutcomes.ACCEPTED] The handler outcome.
*/
#debug(action, event, outcome=this.handlerOutcomes.ACCEPTED) {
if ( CONFIG.debug.mouseInteraction ) {
const name = this.object.constructor.name;
const targetName = event.target?.constructor.name;
const {eventPhase, type, button} = event;
const state = Object.keys(this.states)[this.state.toString()];
let msg = `${name} | ${action} | state:${state} | target:${targetName} | phase:${eventPhase} | type:${type} | `
+ `btn:${button} | skipped:${outcome <= -2} | allowed:${outcome > -1} | handled:${outcome > 1}`;
console.debug(msg);
}
}
/* -------------------------------------------- */
/**
* Reset the mouse manager.
* @param {object} [options]
* @param {boolean} [options.interactionData=true] Reset the interaction data?
* @param {boolean} [options.state=true] Reset the state?
*/
reset({interactionData=true, state=true}={}) {
if ( CONFIG.debug.mouseInteraction ) {
console.debug(`${this.object.constructor.name} | Reset | interactionData:${interactionData} | state:${state}`);
}
if ( interactionData ) this.interactionData = {};
if ( state ) this.state = MouseInteractionManager.INTERACTION_STATES.NONE;
}
/* -------------------------------------------- */
/**
* Assign the interaction data to the event.
* @param {PIXI.FederatedEvent} event
*/
#assignInteractionData(event) {
this.interactionData.object = this.object;
event.interactionData = this.interactionData;
// Add deprecated event data references
for ( const k of Object.keys(this.interactionData) ) {
if ( event.hasOwnProperty(k) ) continue;
/**
* @deprecated since v11
* @ignore
*/
Object.defineProperty(event, k, {
get() {
const msg = `event.data.${k} is deprecated in favor of event.interactionData.${k}.`;
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return this.interactionData[k];
},
set(value) {
const msg = `event.data.${k} is deprecated in favor of event.interactionData.${k}.`;
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
this.interactionData[k] = value;
}
});
}
}
}

View File

@@ -0,0 +1,59 @@
/**
* @typedef {object} PingOptions
* @property {number} [duration=900] The duration of the animation in milliseconds.
* @property {number} [size=128] The size of the ping graphic.
* @property {string} [color=#ff6400] The color of the ping graphic.
* @property {string} [name] The name for the ping animation to pass to {@link CanvasAnimation.animate}.
*/
/**
* A class to manage a user ping on the canvas.
* @param {Point} origin The canvas coordinates of the origin of the ping.
* @param {PingOptions} [options] Additional options to configure the ping animation.
*/
class Ping extends PIXI.Container {
constructor(origin, options={}) {
super();
this.x = origin.x;
this.y = origin.y;
this.options = foundry.utils.mergeObject({duration: 900, size: 128, color: "#ff6400"}, options);
this._color = Color.from(this.options.color);
}
/* -------------------------------------------- */
/** @inheritdoc */
destroy(options={}) {
options.children = true;
super.destroy(options);
}
/* -------------------------------------------- */
/**
* Start the ping animation.
* @returns {Promise<boolean>} Returns true if the animation ran to completion, false otherwise.
*/
async animate() {
const completed = await CanvasAnimation.animate([], {
context: this,
name: this.options.name,
duration: this.options.duration,
ontick: this._animateFrame.bind(this)
});
this.destroy();
return completed;
}
/* -------------------------------------------- */
/**
* On each tick, advance the animation.
* @param {number} dt The number of ms that elapsed since the previous frame.
* @param {CanvasAnimationData} animation The animation state.
* @protected
*/
_animateFrame(dt, animation) {
throw new Error("Subclasses of Ping must implement the _animateFrame method.");
}
}

View File

@@ -0,0 +1,122 @@
/**
* A type of ping that points to a specific location.
* @param {Point} origin The canvas coordinates of the origin of the ping.
* @param {PingOptions} [options] Additional options to configure the ping animation.
* @extends Ping
*/
class ChevronPing extends Ping {
constructor(origin, options={}) {
super(origin, options);
this._r = (this.options.size / 2) * .75;
// The inner ring is 3/4s the size of the outer.
this._rInner = this._r * .75;
// The animation is split into three stages. First, the chevron fades in and moves downwards, then the rings fade
// in, then everything fades out as the chevron moves back up.
// Store the 1/4 time slice.
this._t14 = this.options.duration * .25;
// Store the 1/2 time slice.
this._t12 = this.options.duration * .5;
// Store the 3/4s time slice.
this._t34 = this._t14 * 3;
}
/**
* The path to the chevron texture.
* @type {string}
* @private
*/
static _CHEVRON_PATH = "icons/pings/chevron.webp";
/* -------------------------------------------- */
/** @inheritdoc */
async animate() {
this.removeChildren();
this.addChild(...this._createRings());
this._chevron = await this._loadChevron();
this.addChild(this._chevron);
return super.animate();
}
/* -------------------------------------------- */
/** @inheritdoc */
_animateFrame(dt, animation) {
const { time } = animation;
if ( time < this._t14 ) {
// Normalise t between 0 and 1.
const t = time / this._t14;
// Apply easing function.
const dy = CanvasAnimation.easeOutCircle(t);
this._chevron.y = this._y + (this._h2 * dy);
this._chevron.alpha = time / this._t14;
} else if ( time < this._t34 ) {
const t = time - this._t14;
const a = t / this._t12;
this._drawRings(a);
} else {
const t = (time - this._t34) / this._t14;
const a = 1 - t;
const dy = CanvasAnimation.easeInCircle(t);
this._chevron.y = this._y + ((1 - dy) * this._h2);
this._chevron.alpha = a;
this._drawRings(a);
}
}
/* -------------------------------------------- */
/**
* Draw the outer and inner rings.
* @param {number} a The alpha.
* @private
*/
_drawRings(a) {
this._outer.clear();
this._inner.clear();
this._outer.lineStyle(6, this._color, a).drawCircle(0, 0, this._r);
this._inner.lineStyle(3, this._color, a).arc(0, 0, this._rInner, 0, Math.PI * 1.5);
}
/* -------------------------------------------- */
/**
* Load the chevron texture.
* @returns {Promise<PIXI.Sprite>}
* @private
*/
async _loadChevron() {
const texture = await TextureLoader.loader.loadTexture(ChevronPing._CHEVRON_PATH);
const chevron = PIXI.Sprite.from(texture);
chevron.tint = this._color;
const w = this.options.size;
const h = (texture.height / texture.width) * w;
chevron.width = w;
chevron.height = h;
// The chevron begins the animation slightly above the pinged point.
this._h2 = h / 2;
chevron.x = -(w / 2);
chevron.y = this._y = -h - this._h2;
return chevron;
}
/* -------------------------------------------- */
/**
* Draw the two rings that are used as part of the ping animation.
* @returns {PIXI.Graphics[]}
* @private
*/
_createRings() {
this._outer = new PIXI.Graphics();
this._inner = new PIXI.Graphics();
return [this._outer, this._inner];
}
}

View File

@@ -0,0 +1,216 @@
/**
* @typedef {PingOptions} PulsePingOptions
* @property {number} [rings=3] The number of rings used in the animation.
* @property {string} [color2=#ffffff] The alternate color that the rings begin at. Use white for a 'flashing' effect.
*/
/**
* A type of ping that produces a pulsing animation.
* @param {Point} origin The canvas coordinates of the origin of the ping.
* @param {PulsePingOptions} [options] Additional options to configure the ping animation.
* @extends Ping
*/
class PulsePing extends Ping {
constructor(origin, {rings=3, color2="#ffffff", ...options}={}) {
super(origin, {rings, color2, ...options});
this._color2 = game.settings.get("core", "photosensitiveMode") ? this._color : Color.from(color2);
// The radius is half the diameter.
this._r = this.options.size / 2;
// This is the radius that the rings initially begin at. It's set to 1/5th of the maximum radius.
this._r0 = this._r / 5;
this._computeTimeSlices();
}
/* -------------------------------------------- */
/**
* Initialize some time slice variables that will be used to control the animation.
*
* The animation for each ring can be separated into two consecutive stages.
* Stage 1: Fade in a white ring with radius r0.
* Stage 2: Expand radius outward. While the radius is expanding outward, we have two additional, consecutive
* animations:
* Stage 2.1: Transition color from white to the configured color.
* Stage 2.2: Fade out.
* 1/5th of the animation time is allocated to Stage 1. 4/5ths are allocated to Stage 2. Of those 4/5ths, 2/5ths
* are allocated to Stage 2.1, and 2/5ths are allocated to Stage 2.2.
* @private
*/
_computeTimeSlices() {
// We divide up the total duration of the animation into rings + 1 time slices. Ring animations are staggered by 1
// slice, and last for a total of 2 slices each. This uses up the full duration and creates the ripple effect.
this._timeSlice = this.options.duration / (this.options.rings + 1);
this._timeSlice2 = this._timeSlice * 2;
// Store the 1/5th time slice for Stage 1.
this._timeSlice15 = this._timeSlice2 / 5;
// Store the 2/5ths time slice for the subdivisions of Stage 2.
this._timeSlice25 = this._timeSlice15 * 2;
// Store the 4/5ths time slice for Stage 2.
this._timeSlice45 = this._timeSlice25 * 2;
}
/* -------------------------------------------- */
/** @inheritdoc */
async animate() {
// Draw rings.
this.removeChildren();
for ( let i = 0; i < this.options.rings; i++ ) {
this.addChild(new PIXI.Graphics());
}
// Add a blur filter to soften the sharp edges of the shape.
const f = new PIXI.BlurFilter(2);
f.padding = this.options.size;
this.filters = [f];
return super.animate();
}
/* -------------------------------------------- */
/** @inheritdoc */
_animateFrame(dt, animation) {
const { time } = animation;
for ( let i = 0; i < this.options.rings; i++ ) {
const ring = this.children[i];
// Offset each ring by 1 time slice.
const tMin = this._timeSlice * i;
// Each ring gets 2 time slices to complete its full animation.
const tMax = tMin + this._timeSlice2;
// If it's not time for this ring to animate, do nothing.
if ( (time < tMin) || (time >= tMax) ) continue;
// Normalise our t.
let t = time - tMin;
ring.clear();
if ( t < this._timeSlice15 ) {
// Stage 1. Fade in a white ring of radius r0.
const a = t / this._timeSlice15;
this._drawShape(ring, this._color2, a, this._r0);
} else {
// Stage 2. Expand radius, transition color, and fade out. Re-normalize t for Stage 2.
t -= this._timeSlice15;
const dr = this._r / this._timeSlice45;
const r = this._r0 + (t * dr);
const c0 = this._color;
const c1 = this._color2;
const c = t <= this._timeSlice25 ? this._colorTransition(c0, c1, this._timeSlice25, t) : c0;
const ta = Math.max(0, t - this._timeSlice25);
const a = 1 - (ta / this._timeSlice25);
this._drawShape(ring, c, a, r);
}
}
}
/* -------------------------------------------- */
/**
* Transition linearly from one color to another.
* @param {Color} from The color to transition from.
* @param {Color} to The color to transition to.
* @param {number} duration The length of the transition in milliseconds.
* @param {number} t The current time along the duration.
* @returns {number} The incremental color between from and to.
* @private
*/
_colorTransition(from, to, duration, t) {
const d = t / duration;
const rgbFrom = from.rgb;
const rgbTo = to.rgb;
return Color.fromRGB(rgbFrom.map((c, i) => {
const diff = rgbTo[i] - c;
return c + (d * diff);
}));
}
/* -------------------------------------------- */
/**
* Draw the shape for this ping.
* @param {PIXI.Graphics} g The graphics object to draw to.
* @param {number} color The color of the shape.
* @param {number} alpha The alpha of the shape.
* @param {number} size The size of the shape to draw.
* @protected
*/
_drawShape(g, color, alpha, size) {
g.lineStyle({color, alpha, width: 6, cap: PIXI.LINE_CAP.ROUND, join: PIXI.LINE_JOIN.BEVEL});
g.drawCircle(0, 0, size);
}
}
/**
* A type of ping that produces an arrow pointing in a given direction.
* @property {PIXI.Point} origin The canvas coordinates of the origin of the ping. This becomes the arrow's
* tip.
* @property {PulsePingOptions} [options] Additional options to configure the ping animation.
* @property {number} [options.rotation=0] The angle of the arrow in radians.
* @extends PulsePing
*/
class ArrowPing extends PulsePing {
constructor(origin, {rotation=0, ...options}={}) {
super(origin, options);
this.rotation = Math.normalizeRadians(rotation + (Math.PI * 1.5));
}
/* -------------------------------------------- */
/** @inheritdoc */
_drawShape(g, color, alpha, size) {
g.lineStyle({color, alpha, width: 6, cap: PIXI.LINE_CAP.ROUND, join: PIXI.LINE_JOIN.BEVEL});
const half = size / 2;
const x = -half;
const y = -size;
g.moveTo(x, y)
.lineTo(0, 0)
.lineTo(half, y)
.lineTo(0, -half)
.lineTo(x, y);
}
}
/**
* A type of ping that produces a pulse warning sign animation.
* @param {PIXI.Point} origin The canvas coordinates of the origin of the ping.
* @param {PulsePingOptions} [options] Additional options to configure the ping animation.
* @extends PulsePing
*/
class AlertPing extends PulsePing {
constructor(origin, {color="#ff0000", ...options}={}) {
super(origin, {color, ...options});
this._r = this.options.size;
}
/* -------------------------------------------- */
/** @inheritdoc */
_drawShape(g, color, alpha, size) {
// Draw a chamfered triangle.
g.lineStyle({color, alpha, width: 6, cap: PIXI.LINE_CAP.ROUND, join: PIXI.LINE_JOIN.BEVEL});
const half = size / 2;
const chamfer = size / 10;
const chamfer2 = chamfer / 2;
const x = -half;
const y = -(size / 3);
g.moveTo(x+chamfer, y)
.lineTo(x+size-chamfer, y)
.lineTo(x+size, y+chamfer)
.lineTo(x+half+chamfer2, y+size-chamfer)
.lineTo(x+half-chamfer2, y+size-chamfer)
.lineTo(x, y+chamfer)
.lineTo(x+chamfer, y);
}
}

View File

@@ -0,0 +1,189 @@
/**
* @typedef {Object} RenderFlag
* @property {string[]} propagate Activating this flag also sets these flags to true
* @property {string[]} reset Activating this flag resets these flags to false
* @property {object} [deprecated] Is this flag deprecated? The deprecation options are passed to
* logCompatibilityWarning. The deprectation message is auto-generated
* unless message is passed with the options.
* By default the message is logged only once.
*/
/**
* A data structure for tracking a set of boolean status flags.
* This is a restricted set which can only accept flag values which are pre-defined.
* @param {Record<string, RenderFlag>} flags An object which defines the flags which are supported for tracking
* @param {object} [config] Optional configuration
* @param {RenderFlagObject} [config.object] The object which owns this RenderFlags instance
* @param {number} [config.priority] The ticker priority at which these render flags are handled
*/
class RenderFlags extends Set {
constructor(flags={}, {object, priority=PIXI.UPDATE_PRIORITY.OBJECTS}={}) {
super([]);
for ( const cfg of Object.values(flags) ) {
cfg.propagate ||= [];
cfg.reset ||= [];
}
Object.defineProperties(this, {
/**
* The flags tracked by this data structure.
* @type {Record<string, RenderFlag>}
*/
flags: {value: Object.freeze(flags), enumerable: false, writable: false},
/**
* The RenderFlagObject instance which owns this set of RenderFlags
* @type {RenderFlagObject}
*/
object: {value: object, enumerable: false, writable: false},
/**
* The update priority when these render flags are applied.
* Valid options are OBJECTS or PERCEPTION.
* @type {string}
*/
priority: {value: priority, enumerable: false, writable: false}
});
}
/* -------------------------------------------- */
/**
* @inheritDoc
* @returns {Record<string, boolean>} The flags which were previously set that have been cleared.
*/
clear() {
// Record which flags were previously active
const flags = {};
for ( const flag of this ) {
flags[flag] = true;
}
// Empty the set
super.clear();
// Remove the object from the pending queue
if ( this.object ) canvas.pendingRenderFlags[this.priority].delete(this.object);
return flags;
}
/* -------------------------------------------- */
/**
* Allow for handling one single flag at a time.
* This function returns whether the flag needs to be handled and removes it from the pending set.
* @param {string} flag
* @returns {boolean}
*/
handle(flag) {
const active = this.has(flag);
this.delete(flag);
return active;
}
/* -------------------------------------------- */
/**
* Activate certain flags, also toggling propagation and reset behaviors
* @param {Record<string, boolean>} changes
*/
set(changes) {
const seen = new Set();
for ( const [flag, value] of Object.entries(changes) ) {
this.#set(flag, value, seen);
}
if ( this.object ) canvas.pendingRenderFlags[this.priority].add(this.object);
}
/* -------------------------------------------- */
/**
* Recursively set a flag.
* This method applies propagation or reset behaviors when flags are assigned.
* @param {string} flag
* @param {boolean} value
* @param {Set<string>} seen
*/
#set(flag, value, seen) {
if ( seen.has(flag) || !value ) return;
seen.add(flag);
const cfg = this.flags[flag];
if ( !cfg ) throw new Error(`"${flag}" is not defined as a supported RenderFlag option.`);
if ( cfg.deprecated ) this.#logDreprecationWarning(flag);
if ( !cfg.alias ) this.add(flag);
for ( const r of cfg.reset ) this.delete(r);
for ( const p of cfg.propagate ) this.#set(p, true, seen);
}
/* -------------------------------------------- */
/**
* Log the deprecation warning of the flag.
* @param {string} flag
*/
#logDreprecationWarning(flag) {
const cfg = this.flags[flag];
if ( !cfg.deprecated ) throw new Error(`The RenderFlag "${flag}" is not deprecated`);
let {message, ...options} = cfg.deprecated;
if ( !message ) {
message = `The RenderFlag "${flag}"`;
if ( this.object ) message += ` of ${this.object.constructor.name}`;
message += " is deprecated";
if ( cfg.propagate.length === 0 ) message += " without replacement.";
else if ( cfg.propagate.length === 1 ) message += ` in favor of ${cfg.propagate[0]}.`;
else message += `. Use ${cfg.propagate.slice(0, -1).join(", ")} and/or ${cfg.propagate.at(-1)} instead.`;
}
options.once ??= true;
foundry.utils.logCompatibilityWarning(message, options);
}
}
/* -------------------------------------------- */
/**
* Add RenderFlags functionality to some other object.
* This mixin standardizes the interface for such functionality.
* @param {typeof PIXI.DisplayObject|typeof Object} Base The base class being mixed. Normally a PIXI.DisplayObject
* @returns {typeof RenderFlagObject} The mixed class definition
*/
function RenderFlagsMixin(Base) {
return class RenderFlagObject extends Base {
constructor(...args) {
super(...args);
this.renderFlags = new RenderFlags(this.constructor.RENDER_FLAGS, {
object: this,
priority: this.constructor.RENDER_FLAG_PRIORITY
});
}
/**
* Configure the render flags used for this class.
* @type {Record<string, RenderFlag>}
*/
static RENDER_FLAGS = {};
/**
* The ticker priority when RenderFlags of this class are handled.
* Valid values are OBJECTS or PERCEPTION.
* @type {string}
*/
static RENDER_FLAG_PRIORITY = "OBJECTS";
/**
* Status flags which are applied at render-time to update the PlaceableObject.
* If an object defines RenderFlags, it should at least include flags for "redraw" and "refresh".
* @type {RenderFlags}
*/
renderFlags;
/**
* Apply any current render flags, clearing the renderFlags set.
* Subclasses should override this method to define behavior.
*/
applyRenderFlags() {
this.renderFlags.clear();
}
};
}
/* -------------------------------------------- */

View File

@@ -0,0 +1,97 @@
class ResizeHandle extends PIXI.Graphics {
constructor(offset, handlers={}) {
super();
this.offset = offset;
this.handlers = handlers;
this.lineStyle(4, 0x000000, 1.0).beginFill(0xFF9829, 1.0).drawCircle(0, 0, 10).endFill();
this.cursor = "pointer";
}
/**
* Track whether the handle is being actively used for a drag workflow
* @type {boolean}
*/
active = false;
/* -------------------------------------------- */
refresh(bounds) {
this.position.set(bounds.x + (bounds.width * this.offset[0]), bounds.y + (bounds.height * this.offset[1]));
this.hitArea = new PIXI.Rectangle(-16, -16, 32, 32); // Make the handle easier to grab
}
/* -------------------------------------------- */
updateDimensions(current, origin, destination, {aspectRatio=null}={}) {
// Identify the change in dimensions
const dx = destination.x - origin.x;
const dy = destination.y - origin.y;
// Determine the new width and the new height
let width = Math.max(origin.width + dx, 24);
let height = Math.max(origin.height + dy, 24);
// Constrain the aspect ratio
if ( aspectRatio ) {
if ( width >= height ) width = height * aspectRatio;
else height = width / aspectRatio;
}
// Adjust the final points
return {
x: current.x,
y: current.y,
width: width * Math.sign(current.width),
height: height * Math.sign(current.height)
};
}
/* -------------------------------------------- */
/* Interactivity */
/* -------------------------------------------- */
activateListeners() {
this.off("pointerover").off("pointerout").off("pointerdown")
.on("pointerover", this._onHoverIn.bind(this))
.on("pointerout", this._onHoverOut.bind(this))
.on("pointerdown", this._onMouseDown.bind(this));
this.eventMode = "static";
}
/* -------------------------------------------- */
/**
* Handle mouse-over event on a control handle
* @param {PIXI.FederatedEvent} event The mouseover event
* @protected
*/
_onHoverIn(event) {
const handle = event.target;
handle.scale.set(1.5, 1.5);
}
/* -------------------------------------------- */
/**
* Handle mouse-out event on a control handle
* @param {PIXI.FederatedEvent} event The mouseout event
* @protected
*/
_onHoverOut(event) {
const handle = event.target;
handle.scale.set(1.0, 1.0);
}
/* -------------------------------------------- */
/**
* When we start a drag event - create a preview copy of the Tile for re-positioning
* @param {PIXI.FederatedEvent} event The mousedown event
* @protected
*/
_onMouseDown(event) {
if ( this.handlers.canDrag && !this.handlers.canDrag() ) return;
this.active = true;
}
}

View File

@@ -0,0 +1,52 @@
/**
* A subclass of Set which manages the Token ids which the User has targeted.
* @extends {Set}
* @see User#targets
*/
class UserTargets extends Set {
constructor(user) {
super();
if ( user.targets ) throw new Error(`User ${user.id} already has a targets set defined`);
this.user = user;
}
/**
* Return the Token IDs which are user targets
* @type {string[]}
*/
get ids() {
return Array.from(this).map(t => t.id);
}
/** @override */
add(token) {
if ( this.has(token) ) return this;
super.add(token);
this.#hook(token, true);
return this;
}
/** @override */
clear() {
const tokens = Array.from(this);
super.clear();
tokens.forEach(t => this.#hook(t, false));
}
/** @override */
delete(token) {
if ( !this.has(token) ) return false;
super.delete(token);
this.#hook(token, false);
return true;
}
/**
* Dispatch the targetToken hook whenever the user's target set changes.
* @param {Token} token The targeted Token
* @param {boolean} targeted Whether the Token has been targeted or untargeted
*/
#hook(token, targeted) {
Hooks.callAll("targetToken", this.user, token, targeted);
}
}

View File

@@ -0,0 +1,512 @@
/**
* A Loader class which helps with loading video and image textures.
*/
class TextureLoader {
/**
* The duration in milliseconds for which a texture will remain cached
* @type {number}
*/
static CACHE_TTL = 1000 * 60 * 15;
/**
* Record the timestamps when each asset path is retrieved from cache.
* @type {Map<PIXI.BaseTexture|PIXI.Spritesheet,{src:string,time:number}>}
*/
static #cacheTime = new Map();
/**
* A mapping of cached texture data
* @type {WeakMap<PIXI.BaseTexture,Map<string, TextureAlphaData>>}
*/
static #textureDataMap = new WeakMap();
/**
* Create a fixed retry string to use for CORS retries.
* @type {string}
*/
static #retryString = Date.now().toString();
/**
* To know if the basis transcoder has been initialized
* @type {boolean}
*/
static #basisTranscoderInitialized = false;
/* -------------------------------------------- */
/**
* Initialize the basis transcoder for PIXI.Assets
* @returns {Promise<*>}
*/
static async initializeBasisTranscoder() {
if ( this.#basisTranscoderInitialized ) return;
this.#basisTranscoderInitialized = true;
return await PIXI.TranscoderWorker.loadTranscoder(
"scripts/basis_transcoder.js",
"scripts/basis_transcoder.wasm"
);
}
/* -------------------------------------------- */
/**
* Check if a source has a text file extension.
* @param {string} src The source.
* @returns {boolean} If the source has a text extension or not.
*/
static hasTextExtension(src) {
let rgx = new RegExp(`(\\.${Object.keys(CONST.TEXT_FILE_EXTENSIONS).join("|\\.")})(\\?.*)?`, "i");
return rgx.test(src);
}
/* -------------------------------------------- */
/**
* @typedef {Object} TextureAlphaData
* @property {number} width The width of the (downscaled) texture.
* @property {number} height The height of the (downscaled) texture.
* @property {number} minX The minimum x-coordinate with alpha > 0.
* @property {number} minY The minimum y-coordinate with alpha > 0.
* @property {number} maxX The maximum x-coordinate with alpha > 0 plus 1.
* @property {number} maxY The maximum y-coordinate with alpha > 0 plus 1.
* @property {Uint8Array} data The array containing the texture alpha values (0-255)
* with the dimensions (maxX-minX)×(maxY-minY).
*/
/**
* Use the texture to create a cached mapping of pixel alpha and cache it.
* Cache the bounding box of non-transparent pixels for the un-rotated shape.
* @param {PIXI.Texture} texture The provided texture.
* @param {number} [resolution=1] Resolution of the texture data output.
* @returns {TextureAlphaData|undefined} The texture data if the texture is valid, else undefined.
*/
static getTextureAlphaData(texture, resolution=1) {
// If texture is not present
if ( !texture?.valid ) return;
// Get the base tex and the stringified frame + width/height
const width = Math.ceil(Math.round(texture.width * texture.resolution) * resolution);
const height = Math.ceil(Math.round(texture.height * texture.resolution) * resolution);
const baseTex = texture.baseTexture;
const frame = texture.frame;
const sframe = `${frame.x},${frame.y},${frame.width},${frame.height},${width},${height}`;
// Get frameDataMap and textureData if they exist
let textureData;
let frameDataMap = this.#textureDataMap.get(baseTex);
if ( frameDataMap ) textureData = frameDataMap.get(sframe);
// If texture data exists for the baseTex/frame couple, we return it
if ( textureData ) return textureData;
else textureData = {};
// Create a temporary Sprite using the provided texture
const sprite = new PIXI.Sprite(texture);
sprite.width = textureData.width = width;
sprite.height = textureData.height = height;
sprite.anchor.set(0, 0);
// Create or update the alphaMap render texture
const tex = PIXI.RenderTexture.create({width: width, height: height});
canvas.app.renderer.render(sprite, {renderTexture: tex});
sprite.destroy(false);
const pixels = canvas.app.renderer.extract.pixels(tex);
tex.destroy(true);
// Trim pixels with zero alpha
let minX = width;
let minY = height;
let maxX = 0;
let maxY = 0;
for ( let i = 3, y = 0; y < height; y++ ) {
for ( let x = 0; x < width; x++, i += 4 ) {
const alpha = pixels[i];
if ( alpha === 0 ) continue;
if ( x < minX ) minX = x;
if ( x >= maxX ) maxX = x + 1;
if ( y < minY ) minY = y;
if ( y >= maxY ) maxY = y + 1;
}
}
// Special case when the whole texture is alpha 0
if ( minX > maxX ) minX = minY = maxX = maxY = 0;
// Set the bounds of the trimmed region
textureData.minX = minX;
textureData.minY = minY;
textureData.maxX = maxX;
textureData.maxY = maxY;
// Create new buffer for storing the alpha channel only
const data = textureData.data = new Uint8Array((maxX - minX) * (maxY - minY));
for ( let i = 0, y = minY; y < maxY; y++ ) {
for ( let x = minX; x < maxX; x++, i++ ) {
data[i] = pixels[(((width * y) + x) * 4) + 3];
}
}
// Saving the texture data
if ( !frameDataMap ) {
frameDataMap = new Map();
this.#textureDataMap.set(baseTex, frameDataMap);
}
frameDataMap.set(sframe, textureData);
return textureData;
}
/* -------------------------------------------- */
/**
* Load all the textures which are required for a particular Scene
* @param {Scene} scene The Scene to load
* @param {object} [options={}] Additional options that configure texture loading
* @param {boolean} [options.expireCache=true] Destroy other expired textures
* @param {boolean} [options.additionalSources=[]] Additional sources to load during canvas initialize
* @param {number} [options.maxConcurrent] The maximum number of textures that can be loaded concurrently
* @returns {Promise<void[]>}
*/
static loadSceneTextures(scene, {expireCache=true, additionalSources=[], maxConcurrent}={}) {
let toLoad = [];
// Scene background and foreground textures
if ( scene.background.src ) toLoad.push(scene.background.src);
if ( scene.foreground ) toLoad.push(scene.foreground);
if ( scene.fog.overlay ) toLoad.push(scene.fog.overlay);
// Tiles
toLoad = toLoad.concat(scene.tiles.reduce((arr, t) => {
if ( t.texture.src ) arr.push(t.texture.src);
return arr;
}, []));
// Tokens
toLoad.push(CONFIG.Token.ring.spritesheet);
toLoad = toLoad.concat(scene.tokens.reduce((arr, t) => {
if ( t.texture.src ) arr.push(t.texture.src);
if ( t.ring.enabled ) arr.push(t.ring.subject.texture);
return arr;
}, []));
// Control Icons
toLoad = toLoad.concat(Object.values(CONFIG.controlIcons));
// Status Effect textures
toLoad = toLoad.concat(CONFIG.statusEffects.map(e => e.img ?? /** @deprecated since v12 */ e.icon));
// Configured scene textures
toLoad.push(...Object.values(canvas.sceneTextures));
// Additional requested sources
toLoad.push(...additionalSources);
// Load files
const showName = scene.active || scene.visible;
const loadName = showName ? (scene.navName || scene.name) : "...";
return this.loader.load(toLoad, {
message: game.i18n.format("SCENES.Loading", {name: loadName}),
expireCache,
maxConcurrent
});
}
/* -------------------------------------------- */
/**
* Load an Array of provided source URL paths
* @param {string[]} sources The source URLs to load
* @param {object} [options={}] Additional options which modify loading
* @param {string} [options.message] The status message to display in the load bar
* @param {boolean} [options.expireCache=false] Expire other cached textures?
* @param {number} [options.maxConcurrent] The maximum number of textures that can be loaded concurrently.
* @param {boolean} [options.displayProgress] Display loading progress bar
* @returns {Promise<void[]>} A Promise which resolves once all textures are loaded
*/
async load(sources, {message, expireCache=false, maxConcurrent, displayProgress=true}={}) {
sources = new Set(sources);
const progress = {message: message, loaded: 0, failed: 0, total: sources.size, pct: 0};
console.groupCollapsed(`${vtt} | Loading ${sources.size} Assets`);
const loadTexture = async src => {
try {
await this.loadTexture(src);
if ( displayProgress ) TextureLoader.#onProgress(src, progress);
} catch(err) {
TextureLoader.#onError(src, progress, err);
}
};
const promises = [];
if ( maxConcurrent ) {
const semaphore = new foundry.utils.Semaphore(maxConcurrent);
for ( const src of sources ) promises.push(semaphore.add(loadTexture, src));
} else {
for ( const src of sources ) promises.push(loadTexture(src));
}
await Promise.allSettled(promises);
console.groupEnd();
if ( expireCache ) await this.expireCache();
}
/* -------------------------------------------- */
/**
* Load a single texture or spritesheet on-demand from a given source URL path
* @param {string} src The source texture path to load
* @returns {Promise<PIXI.BaseTexture|PIXI.Spritesheet|null>} The loaded texture object
*/
async loadTexture(src) {
const loadAsset = async (src, bustCache=false) => {
if ( bustCache ) src = TextureLoader.getCacheBustURL(src);
if ( !src ) return null;
try {
return await PIXI.Assets.load(src);
} catch ( err ) {
if ( bustCache ) throw err;
return await loadAsset(src, true);
}
};
let asset = await loadAsset(src);
if ( !asset?.baseTexture?.valid ) return null;
if ( asset instanceof PIXI.Texture ) asset = asset.baseTexture;
this.setCache(src, asset);
return asset;
}
/* --------------------------------------------- */
/**
* Use the Fetch API to retrieve a resource and return a Blob instance for it.
* @param {string} src
* @param {object} [options] Options to configure the loading behaviour.
* @param {boolean} [options.bustCache=false] Append a cache-busting query parameter to the request.
* @returns {Promise<Blob>} A Blob containing the loaded data
*/
static async fetchResource(src, {bustCache=false}={}) {
const fail = `Failed to load texture ${src}`;
const req = bustCache ? TextureLoader.getCacheBustURL(src) : src;
if ( !req ) throw new Error(`${fail}: Invalid URL`);
let res;
try {
res = await fetch(req, {mode: "cors", credentials: "same-origin"});
} catch(err) {
// We may have encountered a common CORS limitation: https://bugs.chromium.org/p/chromium/issues/detail?id=409090
if ( !bustCache ) return this.fetchResource(src, {bustCache: true});
throw new Error(`${fail}: CORS failure`);
}
if ( !res.ok ) throw new Error(`${fail}: Server responded with ${res.status}`);
return res.blob();
}
/* -------------------------------------------- */
/**
* Log texture loading progress in the console and in the Scene loading bar
* @param {string} src The source URL being loaded
* @param {object} progress Loading progress
* @private
*/
static #onProgress(src, progress) {
progress.loaded++;
progress.pct = Math.round((progress.loaded + progress.failed) * 100 / progress.total);
SceneNavigation.displayProgressBar({label: progress.message, pct: progress.pct});
console.log(`Loaded ${src} (${progress.pct}%)`);
}
/* -------------------------------------------- */
/**
* Log failed texture loading
* @param {string} src The source URL being loaded
* @param {object} progress Loading progress
* @param {Error} error The error which occurred
* @private
*/
static #onError(src, progress, error) {
progress.failed++;
progress.pct = Math.round((progress.loaded + progress.failed) * 100 / progress.total);
SceneNavigation.displayProgressBar({label: progress.message, pct: progress.pct});
console.warn(`Loading failed for ${src} (${progress.pct}%): ${error.message}`);
}
/* -------------------------------------------- */
/* Cache Controls */
/* -------------------------------------------- */
/**
* Add an image or a sprite sheet url to the assets cache.
* @param {string} src The source URL.
* @param {PIXI.BaseTexture|PIXI.Spritesheet} asset The asset
*/
setCache(src, asset) {
TextureLoader.#cacheTime.set(asset, {src, time: Date.now()});
}
/* -------------------------------------------- */
/**
* Retrieve a texture or a sprite sheet from the assets cache
* @param {string} src The source URL
* @returns {PIXI.BaseTexture|PIXI.Spritesheet|null} The cached texture, a sprite sheet or undefined
*/
getCache(src) {
if ( !src ) return null;
if ( !PIXI.Assets.cache.has(src) ) src = TextureLoader.getCacheBustURL(src) || src;
let asset = PIXI.Assets.get(src);
if ( !asset?.baseTexture?.valid ) return null;
if ( asset instanceof PIXI.Texture ) asset = asset.baseTexture;
this.setCache(src, asset);
return asset;
}
/* -------------------------------------------- */
/**
* Expire and unload assets from the cache which have not been used for more than CACHE_TTL milliseconds.
*/
async expireCache() {
const promises = [];
const t = Date.now();
for ( const [asset, {src, time}] of TextureLoader.#cacheTime.entries() ) {
const baseTexture = asset instanceof PIXI.Spritesheet ? asset.baseTexture : asset;
if ( !baseTexture || baseTexture.destroyed ) {
TextureLoader.#cacheTime.delete(asset);
continue;
}
if ( (t - time) <= TextureLoader.CACHE_TTL ) continue;
console.log(`${vtt} | Expiring cached texture: ${src}`);
promises.push(PIXI.Assets.unload(src));
TextureLoader.#cacheTime.delete(asset);
}
await Promise.allSettled(promises);
}
/* -------------------------------------------- */
/**
* Return a URL with a cache-busting query parameter appended.
* @param {string} src The source URL being attempted
* @returns {string|boolean} The new URL, or false on a failure.
*/
static getCacheBustURL(src) {
const url = URL.parseSafe(src);
if ( !url ) return false;
if ( url.origin === window.location.origin ) return false;
url.searchParams.append("cors-retry", TextureLoader.#retryString);
return url.href;
}
/* -------------------------------------------- */
/* Deprecations */
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
async loadImageTexture(src) {
const warning = "TextureLoader#loadImageTexture is deprecated. Use TextureLoader#loadTexture instead.";
foundry.utils.logCompatibilityWarning(warning, {since: 11, until: 13});
return this.loadTexture(src);
}
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
async loadVideoTexture(src) {
const warning = "TextureLoader#loadVideoTexture is deprecated. Use TextureLoader#loadTexture instead.";
foundry.utils.logCompatibilityWarning(warning, {since: 11, until: 13});
return this.loadTexture(src);
}
/**
* @deprecated since v12
* @ignore
*/
static get textureBufferDataMap() {
const warning = "TextureLoader.textureBufferDataMap is deprecated without replacement. Use " +
"TextureLoader.getTextureAlphaData to create a texture data map and cache it automatically, or create your own" +
" caching system.";
foundry.utils.logCompatibilityWarning(warning, {since: 12, until: 14});
return this.#textureBufferDataMap;
}
/**
* @deprecated since v12
* @ignore
*/
static #textureBufferDataMap = new Map();
}
/**
* A global reference to the singleton texture loader
* @type {TextureLoader}
*/
TextureLoader.loader = new TextureLoader();
/* -------------------------------------------- */
/**
* Test whether a file source exists by performing a HEAD request against it
* @param {string} src The source URL or path to test
* @returns {Promise<boolean>} Does the file exist at the provided url?
*/
async function srcExists(src) {
return foundry.utils.fetchWithTimeout(src, { method: "HEAD" }).then(resp => {
return resp.status < 400;
}).catch(() => false);
}
/* -------------------------------------------- */
/**
* Get a single texture or sprite sheet from the cache.
* @param {string} src The texture path to load.
* @returns {PIXI.Texture|PIXI.Spritesheet|null} A texture, a sprite sheet or null if not found in cache.
*/
function getTexture(src) {
const asset = TextureLoader.loader.getCache(src);
const baseTexture = asset instanceof PIXI.Spritesheet ? asset.baseTexture : asset;
if ( !baseTexture?.valid ) return null;
return (asset instanceof PIXI.Spritesheet ? asset : new PIXI.Texture(asset));
}
/* -------------------------------------------- */
/**
* Load a single asset and return a Promise which resolves once the asset is ready to use
* @param {string} src The requested asset source
* @param {object} [options] Additional options which modify asset loading
* @param {string} [options.fallback] A fallback texture URL to use if the requested source is unavailable
* @returns {PIXI.Texture|PIXI.Spritesheet|null} The loaded Texture or sprite sheet,
* or null if loading failed with no fallback
*/
async function loadTexture(src, {fallback}={}) {
let asset;
let error;
try {
asset = await TextureLoader.loader.loadTexture(src);
const baseTexture = asset instanceof PIXI.Spritesheet ? asset.baseTexture : asset;
if ( !baseTexture?.valid ) error = new Error(`Invalid Asset ${src}`);
}
catch(err) {
err.message = `The requested asset ${src} could not be loaded: ${err.message}`;
error = err;
}
if ( error ) {
console.error(error);
if ( TextureLoader.hasTextExtension(src) ) return null; // No fallback for spritesheets
return fallback ? loadTexture(fallback) : null;
}
if ( asset instanceof PIXI.Spritesheet ) return asset;
return new PIXI.Texture(asset);
}

View File

@@ -0,0 +1,161 @@
/**
* A special class of Polygon which implements a limited angle of emission for a Point Source.
* The shape is defined by a point origin, radius, angle, and rotation.
* The shape is further customized by a configurable density which informs the approximation.
* An optional secondary externalRadius can be provided which adds supplementary visibility outside the primary angle.
*/
class LimitedAnglePolygon extends PIXI.Polygon {
constructor(origin, {radius, angle=360, rotation=0, density, externalRadius=0} = {}) {
super([]);
/**
* The origin point of the Polygon
* @type {Point}
*/
this.origin = origin;
/**
* The radius of the emitted cone.
* @type {number}
*/
this.radius = radius;
/**
* The angle of the Polygon in degrees.
* @type {number}
*/
this.angle = angle;
/**
* The direction of rotation at the center of the emitted angle in degrees.
* @type {number}
*/
this.rotation = rotation;
/**
* The density of rays which approximate the cone, defined as rays per PI.
* @type {number}
*/
this.density = density ?? PIXI.Circle.approximateVertexDensity(this.radius);
/**
* An optional "external radius" which is included in the polygon for the supplementary area outside the cone.
* @type {number}
*/
this.externalRadius = externalRadius;
/**
* The angle of the left (counter-clockwise) edge of the emitted cone in radians.
* @type {number}
*/
this.aMin = Math.normalizeRadians(Math.toRadians(this.rotation + 90 - (this.angle / 2)));
/**
* The angle of the right (clockwise) edge of the emitted cone in radians.
* @type {number}
*/
this.aMax = this.aMin + Math.toRadians(this.angle);
// Generate polygon points
this.#generatePoints();
}
/**
* The bounding box of the circle defined by the externalRadius, if any
* @type {PIXI.Rectangle}
*/
externalBounds;
/* -------------------------------------------- */
/**
* Generate the points of the LimitedAnglePolygon using the provided configuration parameters.
*/
#generatePoints() {
const {x, y} = this.origin;
// Construct polygon points for the primary angle
const primaryAngle = this.aMax - this.aMin;
const nPrimary = Math.ceil((primaryAngle * this.density) / (2 * Math.PI));
const dPrimary = primaryAngle / nPrimary;
for ( let i=0; i<=nPrimary; i++ ) {
const pad = Ray.fromAngle(x, y, this.aMin + (i * dPrimary), this.radius);
this.points.push(pad.B.x, pad.B.y);
}
// Add secondary angle
if ( this.externalRadius ) {
const secondaryAngle = (2 * Math.PI) - primaryAngle;
const nSecondary = Math.ceil((secondaryAngle * this.density) / (2 * Math.PI));
const dSecondary = secondaryAngle / nSecondary;
for ( let i=0; i<=nSecondary; i++ ) {
const pad = Ray.fromAngle(x, y, this.aMax + (i * dSecondary), this.externalRadius);
this.points.push(pad.B.x, pad.B.y);
}
this.externalBounds = (new PIXI.Circle(x, y, this.externalRadius)).getBounds();
}
// No secondary angle
else {
this.points.unshift(x, y);
this.points.push(x, y);
}
}
/* -------------------------------------------- */
/**
* Restrict the edges which should be included in a PointSourcePolygon based on this specialized shape.
* We use two tests to jointly keep or reject edges.
* 1. If this shape uses an externalRadius, keep edges which collide with the bounding box of that circle.
* 2. Keep edges which are contained within or collide with one of the primary angle boundary rays.
* @param {Point} a The first edge vertex
* @param {Point} b The second edge vertex
* @returns {boolean} Should the edge be included in the PointSourcePolygon computation?
* @internal
*/
_includeEdge(a, b) {
// 1. If this shape uses an externalRadius, keep edges which collide with the bounding box of that circle.
if ( this.externalBounds?.lineSegmentIntersects(a, b, {inside: true}) ) return true;
// 2. Keep edges which are contained within or collide with one of the primary angle boundary rays.
const roundPoint = p => ({x: Math.round(p.x), y: Math.round(p.y)});
const rMin = Ray.fromAngle(this.origin.x, this.origin.y, this.aMin, this.radius);
roundPoint(rMin.B);
const rMax = Ray.fromAngle(this.origin.x, this.origin.y, this.aMax, this.radius);
roundPoint(rMax.B);
// If either vertex is inside, keep the edge
if ( LimitedAnglePolygon.pointBetweenRays(a, rMin, rMax, this.angle) ) return true;
if ( LimitedAnglePolygon.pointBetweenRays(b, rMin, rMax, this.angle) ) return true;
// If both vertices are outside, test whether the edge collides with one (either) of the limiting rays
if ( foundry.utils.lineSegmentIntersects(rMin.A, rMin.B, a, b) ) return true;
if ( foundry.utils.lineSegmentIntersects(rMax.A, rMax.B, a, b) ) return true;
// Otherwise, the edge can be discarded
return false;
}
/* -------------------------------------------- */
/**
* Test whether a vertex lies between two boundary rays.
* If the angle is greater than 180, test for points between rMax and rMin (inverse).
* Otherwise, keep vertices that are between the rays directly.
* @param {Point} point The candidate point
* @param {PolygonRay} rMin The counter-clockwise bounding ray
* @param {PolygonRay} rMax The clockwise bounding ray
* @param {number} angle The angle being tested, in degrees
* @returns {boolean} Is the vertex between the two rays?
*/
static pointBetweenRays(point, rMin, rMax, angle) {
const ccw = foundry.utils.orient2dFast;
if ( angle > 180 ) {
const outside = (ccw(rMax.A, rMax.B, point) <= 0) && (ccw(rMin.A, rMin.B, point) >= 0);
return !outside;
}
return (ccw(rMin.A, rMin.B, point) <= 0) && (ccw(rMax.A, rMax.B, point) >= 0);
}
}

View File

@@ -0,0 +1,446 @@
// noinspection TypeScriptUMDGlobal
/**
* A helper class used to construct triangulated polygon meshes
* Allow to add padding and a specific depth value.
* @param {number[]|PIXI.Polygon} poly Closed polygon to be processed and converted to a mesh
* (array of points or PIXI Polygon)
* @param {object|{}} options Various options : normalizing, offsetting, add depth, ...
*/
class PolygonMesher {
constructor(poly, options = {}) {
this.options = {...this.constructor._defaultOptions, ...options};
const {normalize, x, y, radius, scale, offset} = this.options;
// Creating the scaled values
this.#scaled.sradius = radius * scale;
this.#scaled.sx = x * scale;
this.#scaled.sy = y * scale;
this.#scaled.soffset = offset * scale;
// Computing required number of pass (minimum 1)
this.#nbPass = Math.ceil(Math.abs(offset) / 3);
// Get points from poly param
const points = poly instanceof PIXI.Polygon ? poly.points : poly;
if ( !Array.isArray(points) ) {
throw new Error("You must provide a PIXI.Polygon or an array of vertices to the PolygonMesher constructor");
}
// Correcting normalize option if necessary. We can't normalize with a radius of 0.
if ( normalize && (radius === 0) ) this.options.normalize = false;
// Creating the mesh vertices
this.#computePolygonMesh(points);
}
/**
* Default options values
* @type {Record<string,boolean|number>}
*/
static _defaultOptions = {
offset: 0, // The position value in pixels
normalize: false, // Should the vertices be normalized?
x: 0, // The x origin
y: 0, // The y origin
radius: 0, // The radius
depthOuter: 0, // The depth value on the outer polygon
depthInner: 1, // The depth value on the inner(s) polygon(s)
scale: 10e8, // Constant multiplier to avoid floating point imprecision with ClipperLib
miterLimit: 7, // Distance of the miter limit, when sharp angles are cut during offsetting.
interleaved: false // Should the vertex data be interleaved into one VBO?
};
/* -------------------------------------------- */
/**
* Polygon mesh vertices
* @type {number[]}
*/
vertices = [];
/**
* Polygon mesh indices
* @type {number[]}
*/
indices = [];
/**
* Contains options to apply during the meshing process
* @type {Record<string,boolean|number>}
*/
options = {};
/**
* Contains some options values scaled by the constant factor
* @type {Record<string,number>}
* @private
*/
#scaled = {};
/**
* Polygon mesh geometry
* @type {PIXI.Geometry}
* @private
*/
#geometry = null;
/**
* Contain the polygon tree node object, containing the main forms and its holes and sub-polygons
* @type {{poly: number[], nPoly: number[], children: object[]}}
* @private
*/
#polygonNodeTree = null;
/**
* Contains the the number of offset passes required to compute the polygon
* @type {number}
* @private
*/
#nbPass;
/* -------------------------------------------- */
/* Polygon Mesher static helper methods */
/* -------------------------------------------- */
/**
* Convert a flat points array into a 2 dimensional ClipperLib path
* @param {number[]|PIXI.Polygon} poly PIXI.Polygon or points flat array.
* @param {number} [dimension=2] Dimension.
* @returns {number[]|undefined} The clipper lib path.
*/
static getClipperPathFromPoints(poly, dimension = 2) {
poly = poly instanceof PIXI.Polygon ? poly.points : poly;
// If points is not an array or if its dimension is 1, 0 or negative, it can't be translated to a path.
if ( !Array.isArray(poly) || dimension < 2 ) {
throw new Error("You must provide valid coordinates to create a path.");
}
const path = new ClipperLib.Path();
if ( poly.length <= 1 ) return path; // Returning an empty path if we have zero or one point.
for ( let i = 0; i < poly.length; i += dimension ) {
path.push(new ClipperLib.IntPoint(poly[i], poly[i + 1]));
}
return path;
}
/* -------------------------------------------- */
/* Polygon Mesher Methods */
/* -------------------------------------------- */
/**
* Create the polygon mesh
* @param {number[]} points
* @private
*/
#computePolygonMesh(points) {
if ( !points || points.length < 6 ) return;
this.#updateVertices(points);
this.#updatePolygonNodeTree();
}
/* -------------------------------------------- */
/**
* Update vertices and add depth
* @param {number[]} vertices
* @private
*/
#updateVertices(vertices) {
const {offset, depthOuter, scale} = this.options;
const z = (offset === 0 ? 1.0 : depthOuter);
for ( let i = 0; i < vertices.length; i += 2 ) {
const x = Math.round(vertices[i] * scale);
const y = Math.round(vertices[i + 1] * scale);
this.vertices.push(x, y, z);
}
}
/* -------------------------------------------- */
/**
* Create the polygon by generating the edges and the interior of the polygon if an offset != 0,
* and just activate a fast triangulation if offset = 0
* @private
*/
#updatePolygonNodeTree() {
// Initializing the polygon node tree
this.#polygonNodeTree = {poly: this.vertices, nPoly: this.#normalize(this.vertices), children: []};
// Computing offset only if necessary
if ( this.options.offset === 0 ) return this.#polygonNodeTree.fastTriangulation = true;
// Creating the offsetter ClipperLib object, and adding our polygon path to it.
const offsetter = new ClipperLib.ClipperOffset(this.options.miterLimit);
// Launching the offset computation
return this.#createOffsetPolygon(offsetter, this.#polygonNodeTree);
}
/* -------------------------------------------- */
/**
* Recursively create offset polygons in successive passes
* @param {ClipperLib.ClipperOffset} offsetter ClipperLib offsetter
* @param {object} node A polygon node object to offset
* @param {number} [pass=0] The pass number (initialized with 0 for the first call)
*/
#createOffsetPolygon(offsetter, node, pass = 0) {
// Time to stop recursion on this node branch?
if ( pass >= this.#nbPass ) return;
const path = PolygonMesher.getClipperPathFromPoints(node.poly, 3); // Converting polygon points to ClipperLib path
const passOffset = Math.round(this.#scaled.soffset / this.#nbPass); // Mapping the offset for this path
const depth = Math.mix(this.options.depthOuter, this.options.depthInner, (pass + 1) / this.#nbPass); // Computing depth according to the actual pass and maximum number of pass (linear interpolation)
// Executing the offset
const paths = new ClipperLib.Paths();
offsetter.AddPath(path, ClipperLib.JoinType.jtMiter, ClipperLib.EndType.etClosedPolygon);
offsetter.Execute(paths, passOffset);
offsetter.Clear();
// Verifying if we have pathes. If it's not the case, the area is too small to generate pathes with this offset.
// It's time to stop recursion on this node branch.
if ( !paths.length ) return;
// Incrementing the number of pass to know when recursive offset should stop
pass++;
// Creating offsets for children
for ( const path of paths ) {
const flat = this.#flattenVertices(path, depth);
const child = { poly: flat, nPoly: this.#normalize(flat), children: []};
node.children.push(child);
this.#createOffsetPolygon(offsetter, child, pass);
}
}
/* -------------------------------------------- */
/**
* Flatten a ClipperLib path to array of numbers
* @param {ClipperLib.IntPoint[]} path path to convert
* @param {number} depth depth to add to the flattened vertices
* @returns {number[]} flattened array of points
* @private
*/
#flattenVertices(path, depth) {
const flattened = [];
for ( const point of path ) {
flattened.push(point.X, point.Y, depth);
}
return flattened;
}
/* -------------------------------------------- */
/**
* Normalize polygon coordinates and put result into nPoly property.
* @param {number[]} poly the poly to normalize
* @returns {number[]} the normalized poly array
* @private
*/
#normalize(poly) {
if ( !this.options.normalize ) return [];
// Compute the normalized vertex
const {sx, sy, sradius} = this.#scaled;
const nPoly = [];
for ( let i = 0; i < poly.length; i+=3 ) {
const x = (poly[i] - sx) / sradius;
const y = (poly[i+1] - sy) / sradius;
nPoly.push(x, y, poly[i+2]);
}
return nPoly;
}
/* -------------------------------------------- */
/**
* Execute the triangulation to create indices
* @param {PIXI.Geometry} geometry A geometry to update
* @returns {PIXI.Geometry} The resulting geometry
*/
triangulate(geometry) {
this.#geometry = geometry;
// Can we draw at least one triangle (counting z now)? If not, update or create an empty geometry
if ( this.vertices.length < 9 ) return this.#emptyGeometry();
// Triangulate the mesh and create indices
if ( this.#polygonNodeTree.fastTriangulation ) this.#triangulateFast();
else this.#triangulateTree();
// Update the geometry
return this.#updateGeometry();
}
/* -------------------------------------------- */
/**
* Fast triangulation of the polygon node tree
* @private
*/
#triangulateFast() {
this.indices = PIXI.utils.earcut(this.vertices, null, 3);
if ( this.options.normalize ) {
this.vertices = this.#polygonNodeTree.nPoly;
}
}
/* -------------------------------------------- */
/**
* Recursive triangulation of the polygon node tree
* @private
*/
#triangulateTree() {
this.vertices = [];
this.indices = this.#triangulateNode(this.#polygonNodeTree);
}
/* -------------------------------------------- */
/**
* Triangulate a node and its children recursively to compose a mesh with multiple levels of depth
* @param {object} node The polygon node tree to triangulate
* @param {number[]} [indices=[]] An optional array to receive indices (used for recursivity)
* @returns {number[]} An array of indices, result of the triangulation
*/
#triangulateNode(node, indices = []) {
const {normalize} = this.options;
const vert = [];
const polyLength = node.poly.length / 3;
const hasChildren = !!node.children.length;
vert.push(...node.poly);
// If the node is the outer hull (beginning polygon), it has a position of 0 into the vertices array.
if ( !node.position ) {
node.position = 0;
this.vertices.push(...(normalize ? node.nPoly : node.poly));
}
// If the polygon has no children, it is an interior polygon triangulated in the fast way. Returning here.
if ( !hasChildren ) {
indices.push(...(PIXI.utils.earcut(vert, null, 3).map(v => v + node.position)));
return indices;
}
let holePosition = polyLength;
let holes = [];
let holeGroupPosition = 0;
for ( const nodeChild of node.children ) {
holes.push(holePosition);
nodeChild.position = (this.vertices.length / 3);
if ( !holeGroupPosition ) holeGroupPosition = nodeChild.position; // The position of the holes as a contiguous group.
holePosition += (nodeChild.poly.length / 3);
vert.push(...nodeChild.poly);
this.vertices.push(...(normalize ? nodeChild.nPoly : nodeChild.poly));
}
// We need to shift the result of the indices, to match indices as it is saved in the vertices.
// We are using earcutEdges to enforce links between the outer and inner(s) polygons.
const holeGroupShift = holeGroupPosition - polyLength;
indices.push(...(earcut.earcutEdges(vert, holes).map(v => {
if ( v < polyLength ) return v + node.position;
else return v + holeGroupShift;
})));
// Triangulating children
for ( const nodeChild of node.children ) {
this.#triangulateNode(nodeChild, indices);
}
return indices;
}
/* -------------------------------------------- */
/**
* Updating or creating the PIXI.Geometry that will be used by the mesh
* @private
*/
#updateGeometry() {
const {interleaved, normalize, scale} = this.options;
// Unscale non normalized vertices
if ( !normalize ) {
for ( let i = 0; i < this.vertices.length; i+=3 ) {
this.vertices[i] /= scale;
this.vertices[i+1] /= scale;
}
}
// If VBO shouldn't be interleaved, we create a separate array for vertices and depth
let vertices; let depth;
if ( !interleaved ) {
vertices = [];
depth = [];
for ( let i = 0; i < this.vertices.length; i+=3 ) {
vertices.push(this.vertices[i], this.vertices[i+1]);
depth.push(this.vertices[i+2]);
}
}
else vertices = this.vertices;
if ( this.#geometry ) {
const vertBuffer = this.#geometry.getBuffer("aVertexPosition");
vertBuffer.update(new Float32Array(vertices));
const indicesBuffer = this.#geometry.getIndex();
indicesBuffer.update(new Uint16Array(this.indices));
if ( !interleaved ) {
const depthBuffer = this.#geometry.getBuffer("aDepthValue");
depthBuffer.update(new Float32Array(depth));
}
}
else this.#geometry = this.#createGeometry(vertices, depth);
return this.#geometry;
}
/* -------------------------------------------- */
/**
* Empty the geometry, or if geometry is null, create an empty geometry.
* @private
*/
#emptyGeometry() {
const {interleaved} = this.options;
// Empty the current geometry if it exists
if ( this.#geometry ) {
const vertBuffer = this.#geometry.getBuffer("aVertexPosition");
vertBuffer.update(new Float32Array([0, 0]));
const indicesBuffer = this.#geometry.getIndex();
indicesBuffer.update(new Uint16Array([0, 0]));
if ( !interleaved ) {
const depthBuffer = this.#geometry.getBuffer("aDepthValue");
depthBuffer.update(new Float32Array([0]));
}
}
// Create an empty geometry otherwise
else if ( interleaved ) {
// Interleaved version
return new PIXI.Geometry().addAttribute("aVertexPosition", [0, 0, 0], 3).addIndex([0, 0]);
}
else {
this.#geometry = new PIXI.Geometry().addAttribute("aVertexPosition", [0, 0], 2)
.addAttribute("aTextureCoord", [0, 0, 0, 1, 1, 1, 1, 0], 2)
.addAttribute("aDepthValue", [0], 1)
.addIndex([0, 0]);
}
return this.#geometry;
}
/* -------------------------------------------- */
/**
* Create a new Geometry from provided buffers
* @param {number[]} vertices provided vertices array (interleaved or not)
* @param {number[]} [depth=undefined] provided depth array
* @param {number[]} [indices=this.indices] provided indices array
* @returns {PIXI.Geometry} the new PIXI.Geometry constructed from the provided buffers
*/
#createGeometry(vertices, depth=undefined, indices=this.indices) {
if ( this.options.interleaved ) {
return new PIXI.Geometry().addAttribute("aVertexPosition", vertices, 3).addIndex(indices);
}
if ( !depth ) throw new Error("You must provide a separate depth buffer when the data is not interleaved.");
return new PIXI.Geometry()
.addAttribute("aVertexPosition", vertices, 2)
.addAttribute("aTextureCoord", [0, 0, 1, 0, 1, 1, 0, 1], 2)
.addAttribute("aDepthValue", depth, 1)
.addIndex(indices);
}
}

View File

@@ -0,0 +1,37 @@
/**
* An extension of the default PIXI.Text object which forces double resolution.
* At default resolution Text often looks blurry or fuzzy.
*/
class PreciseText extends PIXI.Text {
constructor(...args) {
super(...args);
this._autoResolution = false;
this._resolution = 2;
}
/**
* Prepare a TextStyle object which merges the canvas defaults with user-provided options
* @param {object} [options={}] Additional options merged with the default TextStyle
* @param {number} [options.anchor] A text anchor point from CONST.TEXT_ANCHOR_POINTS
* @returns {PIXI.TextStyle} The prepared TextStyle
*/
static getTextStyle({anchor, ...options}={}) {
const style = CONFIG.canvasTextStyle.clone();
for ( let [k, v] of Object.entries(options) ) {
if ( v !== undefined ) style[k] = v;
}
// Positioning
if ( !("align" in options) ) {
if ( anchor === CONST.TEXT_ANCHOR_POINTS.LEFT ) style.align = "right";
else if ( anchor === CONST.TEXT_ANCHOR_POINTS.RIGHT ) style.align = "left";
}
// Adaptive Stroke
if ( !("stroke" in options) ) {
const fill = Color.from(style.fill);
style.stroke = fill.hsv[2] > 0.6 ? 0x000000 : 0xFFFFFF;
}
return style;
}
}

View File

@@ -0,0 +1,248 @@
/**
* @typedef {Object} RayIntersection
* @property {number} x The x-coordinate of intersection
* @property {number} y The y-coordinate of intersection
* @property {number} t0 The proximity to the Ray origin, as a ratio of distance
* @property {number} t1 The proximity to the Ray destination, as a ratio of distance
*/
/**
* A ray for the purposes of computing sight and collision
* Given points A[x,y] and B[x,y]
*
* Slope-Intercept form:
* y = a + bx
* y = A.y + ((B.y - A.Y) / (B.x - A.x))x
*
* Parametric form:
* R(t) = (1-t)A + tB
*
* @param {Point} A The origin of the Ray
* @param {Point} B The destination of the Ray
*/
class Ray {
constructor(A, B) {
/**
* The origin point, {x, y}
* @type {Point}
*/
this.A = A;
/**
* The destination point, {x, y}
* @type {Point}
*/
this.B = B;
/**
* The origin y-coordinate
* @type {number}
*/
this.y0 = A.y;
/**
* The origin x-coordinate
* @type {number}
*/
this.x0 = A.x;
/**
* The horizontal distance of the ray, x1 - x0
* @type {number}
*/
this.dx = B.x - A.x;
/**
* The vertical distance of the ray, y1 - y0
* @type {number}
*/
this.dy = B.y - A.y;
/**
* The slope of the ray, dy over dx
* @type {number}
*/
this.slope = this.dy / this.dx;
}
/* -------------------------------------------- */
/* Attributes */
/* -------------------------------------------- */
/**
* The cached angle, computed lazily in Ray#angle
* @type {number}
* @private
*/
_angle = undefined;
/**
* The cached distance, computed lazily in Ray#distance
* @type {number}
* @private
*/
_distance = undefined;
/* -------------------------------------------- */
/**
* The normalized angle of the ray in radians on the range (-PI, PI).
* The angle is computed lazily (only if required) and cached.
* @type {number}
*/
get angle() {
if ( this._angle === undefined ) this._angle = Math.atan2(this.dy, this.dx);
return this._angle;
}
set angle(value) {
this._angle = Number(value);
}
/* -------------------------------------------- */
/**
* A normalized bounding rectangle that encompasses the Ray
* @type {PIXI.Rectangle}
*/
get bounds() {
return new PIXI.Rectangle(this.A.x, this.A.y, this.dx, this.dy).normalize();
}
/* -------------------------------------------- */
/**
* The distance (length) of the Ray in pixels.
* The distance is computed lazily (only if required) and cached.
* @type {number}
*/
get distance() {
if ( this._distance === undefined ) this._distance = Math.hypot(this.dx, this.dy);
return this._distance;
}
set distance(value) {
this._distance = Number(value);
}
/* -------------------------------------------- */
/* Methods */
/* -------------------------------------------- */
/**
* A factory method to construct a Ray from an origin point, an angle, and a distance
* @param {number} x The origin x-coordinate
* @param {number} y The origin y-coordinate
* @param {number} radians The ray angle in radians
* @param {number} distance The distance of the ray in pixels
* @returns {Ray} The constructed Ray instance
*/
static fromAngle(x, y, radians, distance) {
const dx = Math.cos(radians);
const dy = Math.sin(radians);
const ray = this.fromArrays([x, y], [x + (dx * distance), y + (dy * distance)]);
ray._angle = Math.normalizeRadians(radians); // Store the angle, cheaper to compute here
ray._distance = distance; // Store the distance, cheaper to compute here
return ray;
}
/* -------------------------------------------- */
/**
* A factory method to construct a Ray from points in array format.
* @param {number[]} A The origin point [x,y]
* @param {number[]} B The destination point [x,y]
* @returns {Ray} The constructed Ray instance
*/
static fromArrays(A, B) {
return new this({x: A[0], y: A[1]}, {x: B[0], y: B[1]});
}
/* -------------------------------------------- */
/**
* Project the Array by some proportion of it's initial distance.
* Return the coordinates of that point along the path.
* @param {number} t The distance along the Ray
* @returns {Object} The coordinates of the projected point
*/
project(t) {
return {
x: this.A.x + (t * this.dx),
y: this.A.y + (t * this.dy)
};
}
/* -------------------------------------------- */
/**
* Create a Ray by projecting a certain distance towards a known point.
* @param {Point} origin The origin of the Ray
* @param {Point} point The point towards which to project
* @param {number} distance The distance of projection
* @returns {Ray}
*/
static towardsPoint(origin, point, distance) {
const dx = point.x - origin.x;
const dy = point.y - origin.y;
const t = distance / Math.hypot(dx, dy);
return new this(origin, {
x: origin.x + (t * dx),
y: origin.y + (t * dy)
});
}
/* -------------------------------------------- */
/**
* Create a Ray by projecting a certain squared-distance towards a known point.
* @param {Point} origin The origin of the Ray
* @param {Point} point The point towards which to project
* @param {number} distance2 The squared distance of projection
* @returns {Ray}
*/
static towardsPointSquared(origin, point, distance2) {
const dx = point.x - origin.x;
const dy = point.y - origin.y;
const t = Math.sqrt(distance2 / (Math.pow(dx, 2) + Math.pow(dy, 2)));
return new this(origin, {
x: origin.x + (t * dx),
y: origin.y + (t * dy)
});
}
/* -------------------------------------------- */
/**
* Reverse the direction of the Ray, returning a second Ray
* @returns {Ray}
*/
reverse() {
const r = new Ray(this.B, this.A);
r._distance = this._distance;
r._angle = Math.PI - this._angle;
return r;
}
/* -------------------------------------------- */
/**
* Create a new ray which uses the same origin point, but a slightly offset angle and distance
* @param {number} offset An offset in radians which modifies the angle of the original Ray
* @param {number} [distance] A distance the new ray should project, otherwise uses the same distance.
* @return {Ray} A new Ray with an offset angle
*/
shiftAngle(offset, distance) {
return this.constructor.fromAngle(this.x0, this.y0, this.angle + offset, distance || this.distance);
}
/* -------------------------------------------- */
/**
* Find the point I[x,y] and distance t* on ray R(t) which intersects another ray
* @see foundry.utils.lineLineIntersection
*/
intersectSegment(coords) {
return foundry.utils.lineSegmentIntersection(this.A, this.B, {x: coords[0], y: coords[1]}, {x: coords[2], y: coords[3]});
}
}

View File

@@ -0,0 +1,531 @@
/**
* @typedef {"light"|"sight"|"sound"|"move"|"universal"} PointSourcePolygonType
*/
/**
* @typedef {Object} PointSourcePolygonConfig
* @property {PointSourcePolygonType} type The type of polygon being computed
* @property {number} [angle=360] The angle of emission, if limited
* @property {number} [density] The desired density of padding rays, a number per PI
* @property {number} [radius] A limited radius of the resulting polygon
* @property {number} [rotation] The direction of facing, required if the angle is limited
* @property {number} [wallDirectionMode] Customize how wall direction of one-way walls is applied
* @property {boolean} [useThreshold=false] Compute the polygon with threshold wall constraints applied
* @property {boolean} [includeDarkness=false] Include edges coming from darkness sources
* @property {number} [priority] Priority when it comes to ignore edges from darkness sources
* @property {boolean} [debug] Display debugging visualization and logging for the polygon
* @property {PointSource} [source] The object (if any) that spawned this polygon.
* @property {Array<PIXI.Rectangle|PIXI.Circle|PIXI.Polygon>} [boundaryShapes] Limiting polygon boundary shapes
* @property {Readonly<boolean>} [useInnerBounds] Does this polygon use the Scene inner or outer bounding rectangle
* @property {Readonly<boolean>} [hasLimitedRadius] Does this polygon have a limited radius?
* @property {Readonly<boolean>} [hasLimitedAngle] Does this polygon have a limited angle?
* @property {Readonly<PIXI.Rectangle>} [boundingBox] The computed bounding box for the polygon
*/
/**
* An extension of the default PIXI.Polygon which is used to represent the line of sight for a point source.
* @extends {PIXI.Polygon}
*/
class PointSourcePolygon extends PIXI.Polygon {
/**
* Customize how wall direction of one-way walls is applied
* @enum {number}
*/
static WALL_DIRECTION_MODES = Object.freeze({
NORMAL: 0,
REVERSED: 1,
BOTH: 2
});
/**
* The rectangular bounds of this polygon
* @type {PIXI.Rectangle}
*/
bounds = new PIXI.Rectangle(0, 0, 0, 0);
/**
* The origin point of the source polygon.
* @type {Point}
*/
origin;
/**
* The configuration of this polygon.
* @type {PointSourcePolygonConfig}
*/
config = {};
/* -------------------------------------------- */
/**
* An indicator for whether this polygon is constrained by some boundary shape?
* @type {boolean}
*/
get isConstrained() {
return this.config.boundaryShapes.length > 0;
}
/* -------------------------------------------- */
/**
* Benchmark the performance of polygon computation for this source
* @param {number} iterations The number of test iterations to perform
* @param {Point} origin The origin point to benchmark
* @param {PointSourcePolygonConfig} config The polygon configuration to benchmark
*/
static benchmark(iterations, origin, config) {
const f = () => this.create(foundry.utils.deepClone(origin), foundry.utils.deepClone(config));
Object.defineProperty(f, "name", {value: `${this.name}.construct`, configurable: true});
return foundry.utils.benchmark(f, iterations);
}
/* -------------------------------------------- */
/**
* Compute the polygon given a point origin and radius
* @param {Point} origin The origin source point
* @param {PointSourcePolygonConfig} [config={}] Configuration options which customize the polygon computation
* @returns {PointSourcePolygon} The computed polygon instance
*/
static create(origin, config={}) {
const poly = new this();
poly.initialize(origin, config);
poly.compute();
return this.applyThresholdAttenuation(poly);
}
/* -------------------------------------------- */
/**
* Create a clone of this polygon.
* This overrides the default PIXI.Polygon#clone behavior.
* @override
* @returns {PointSourcePolygon} A cloned instance
*/
clone() {
const poly = new this.constructor([...this.points]);
poly.config = foundry.utils.deepClone(this.config);
poly.origin = {...this.origin};
poly.bounds = this.bounds.clone();
return poly;
}
/* -------------------------------------------- */
/* Polygon Computation */
/* -------------------------------------------- */
/**
* Compute the polygon using the origin and configuration options.
* @returns {PointSourcePolygon} The computed polygon
*/
compute() {
let t0 = performance.now();
const {x, y} = this.origin;
const {width, height} = canvas.dimensions;
const {angle, debug, radius} = this.config;
if ( !(x >= 0 && x <= width && y >= 0 && y <= height) ) {
console.warn("The polygon cannot be computed because its origin is out of the scene bounds.");
this.points.length = 0;
this.bounds = new PIXI.Rectangle(0, 0, 0, 0);
return this;
}
// Skip zero-angle or zero-radius polygons
if ( (radius === 0) || (angle === 0) ) {
this.points.length = 0;
this.bounds = new PIXI.Rectangle(0, 0, 0, 0);
return this;
}
// Clear the polygon bounds
this.bounds = undefined;
// Delegate computation to the implementation
this._compute();
// Cache the new polygon bounds
this.bounds = this.getBounds();
// Debugging and performance metrics
if ( debug ) {
let t1 = performance.now();
console.log(`Created ${this.constructor.name} in ${Math.round(t1 - t0)}ms`);
this.visualize();
}
return this;
}
/**
* Perform the implementation-specific computation
* @protected
*/
_compute() {
throw new Error("Each subclass of PointSourcePolygon must define its own _compute method");
}
/* -------------------------------------------- */
/**
* Customize the provided configuration object for this polygon type.
* @param {Point} origin The provided polygon origin
* @param {PointSourcePolygonConfig} config The provided configuration object
*/
initialize(origin, config) {
// Polygon origin
const o = this.origin = {x: Math.round(origin.x), y: Math.round(origin.y)};
// Configure radius
const cfg = this.config = config;
const maxR = canvas.dimensions.maxR;
cfg.radius = Math.min(cfg.radius ?? maxR, maxR);
cfg.hasLimitedRadius = (cfg.radius > 0) && (cfg.radius < maxR);
cfg.density = cfg.density ?? PIXI.Circle.approximateVertexDensity(cfg.radius);
// Configure angle
cfg.angle = cfg.angle ?? 360;
cfg.rotation = cfg.rotation ?? 0;
cfg.hasLimitedAngle = cfg.angle !== 360;
// Determine whether to use inner or outer bounds
const sceneRect = canvas.dimensions.sceneRect;
cfg.useInnerBounds ??= (cfg.type === "sight")
&& (o.x >= sceneRect.left && o.x <= sceneRect.right && o.y >= sceneRect.top && o.y <= sceneRect.bottom);
// Customize wall direction
cfg.wallDirectionMode ??= PointSourcePolygon.WALL_DIRECTION_MODES.NORMAL;
// Configure threshold
cfg.useThreshold ??= false;
// Configure darkness inclusion
cfg.includeDarkness ??= false;
// Boundary Shapes
cfg.boundaryShapes ||= [];
if ( cfg.hasLimitedAngle ) this.#configureLimitedAngle();
else if ( cfg.hasLimitedRadius ) this.#configureLimitedRadius();
if ( CONFIG.debug.polygons ) cfg.debug = true;
}
/* -------------------------------------------- */
/**
* Configure a limited angle and rotation into a triangular polygon boundary shape.
*/
#configureLimitedAngle() {
this.config.boundaryShapes.push(new LimitedAnglePolygon(this.origin, this.config));
}
/* -------------------------------------------- */
/**
* Configure a provided limited radius as a circular polygon boundary shape.
*/
#configureLimitedRadius() {
this.config.boundaryShapes.push(new PIXI.Circle(this.origin.x, this.origin.y, this.config.radius));
}
/* -------------------------------------------- */
/**
* Apply a constraining boundary shape to an existing PointSourcePolygon.
* Return a new instance of the polygon with the constraint applied.
* The new instance is only a "shallow clone", as it shares references to component properties with the original.
* @param {PIXI.Circle|PIXI.Rectangle|PIXI.Polygon} constraint The constraining boundary shape
* @param {object} [intersectionOptions] Options passed to the shape intersection method
* @returns {PointSourcePolygon} A new constrained polygon
*/
applyConstraint(constraint, intersectionOptions={}) {
// Enhance polygon configuration data using knowledge of the constraint
const poly = this.clone();
poly.config.boundaryShapes.push(constraint);
if ( (constraint instanceof PIXI.Circle) && (constraint.x === this.origin.x) && (constraint.y === this.origin.y) ) {
if ( poly.config.radius <= constraint.radius ) return poly;
poly.config.radius = constraint.radius;
poly.config.density = intersectionOptions.density ??= PIXI.Circle.approximateVertexDensity(constraint.radius);
if ( constraint.radius === 0 ) {
poly.points.length = 0;
poly.bounds.x = poly.bounds.y = poly.bounds.width = poly.bounds.height = 0;
return poly;
}
}
if ( !poly.points.length ) return poly;
// Apply the constraint and return the constrained polygon
const c = constraint.intersectPolygon(poly, intersectionOptions);
poly.points = c.points;
poly.bounds = poly.getBounds();
return poly;
}
/* -------------------------------------------- */
/** @inheritDoc */
contains(x, y) {
return this.bounds.contains(x, y) && super.contains(x, y);
}
/* -------------------------------------------- */
/* Polygon Boundary Constraints */
/* -------------------------------------------- */
/**
* Constrain polygon points by applying boundary shapes.
* @protected
*/
_constrainBoundaryShapes() {
const {density, boundaryShapes} = this.config;
if ( (this.points.length < 6) || !boundaryShapes.length ) return;
let constrained = this;
const intersectionOptions = {density, scalingFactor: 100};
for ( const c of boundaryShapes ) {
constrained = c.intersectPolygon(constrained, intersectionOptions);
}
this.points = constrained.points;
}
/* -------------------------------------------- */
/* Collision Testing */
/* -------------------------------------------- */
/**
* Test whether a Ray between the origin and destination points would collide with a boundary of this Polygon.
* A valid wall restriction type is compulsory and must be passed into the config options.
* @param {Point} origin An origin point
* @param {Point} destination A destination point
* @param {PointSourcePolygonConfig} config The configuration that defines a certain Polygon type
* @param {"any"|"all"|"closest"} [config.mode] The collision mode to test: "any", "all", or "closest"
* @returns {boolean|PolygonVertex|PolygonVertex[]|null} The collision result depends on the mode of the test:
* * any: returns a boolean for whether any collision occurred
* * all: returns a sorted array of PolygonVertex instances
* * closest: returns a PolygonVertex instance or null
*/
static testCollision(origin, destination, {mode="all", ...config}={}) {
if ( !CONST.WALL_RESTRICTION_TYPES.includes(config.type) ) {
throw new Error("A valid wall restriction type is required for testCollision.");
}
const poly = new this();
const ray = new Ray(origin, destination);
config.boundaryShapes ||= [];
config.boundaryShapes.push(ray.bounds);
poly.initialize(origin, config);
return poly._testCollision(ray, mode);
}
/* -------------------------------------------- */
/**
* Determine the set of collisions which occurs for a Ray.
* @param {Ray} ray The Ray to test
* @param {string} mode The collision mode being tested
* @returns {boolean|PolygonVertex|PolygonVertex[]|null} The collision test result
* @protected
* @abstract
*/
_testCollision(ray, mode) {
throw new Error(`The ${this.constructor.name} class must implement the _testCollision method`);
}
/* -------------------------------------------- */
/* Visualization and Debugging */
/* -------------------------------------------- */
/**
* Visualize the polygon, displaying its computed area and applied boundary shapes.
* @returns {PIXI.Graphics|undefined} The rendered debugging shape
*/
visualize() {
if ( !this.points.length ) return;
let dg = canvas.controls.debug;
dg.clear();
for ( const constraint of this.config.boundaryShapes ) {
dg.lineStyle(2, 0xFFFFFF, 1.0).beginFill(0xAAFF00).drawShape(constraint).endFill();
}
dg.lineStyle(2, 0xFFFFFF, 1.0).beginFill(0xFFAA99, 0.25).drawShape(this).endFill();
return dg;
}
/* -------------------------------------------- */
/**
* Determine if the shape is a complete circle.
* The config object must have an angle and a radius properties.
*/
isCompleteCircle() {
const { radius, angle, density } = this.config;
if ( radius === 0 ) return true;
if ( angle < 360 || (this.points.length !== (density * 2)) ) return false;
const shapeArea = Math.abs(this.signedArea());
const circleArea = (0.5 * density * Math.sin(2 * Math.PI / density)) * (radius ** 2);
return circleArea.almostEqual(shapeArea, 1e-5);
}
/* -------------------------------------------- */
/* Threshold Polygons */
/* -------------------------------------------- */
/**
* Augment a PointSourcePolygon by adding additional coverage for shapes permitted by threshold walls.
* @param {PointSourcePolygon} polygon The computed polygon
* @returns {PointSourcePolygon} The augmented polygon
*/
static applyThresholdAttenuation(polygon) {
const config = polygon.config;
if ( !config.useThreshold ) return polygon;
// Identify threshold walls and confirm whether threshold augmentation is required
const {nAttenuated, edges} = PointSourcePolygon.#getThresholdEdges(polygon.origin, config);
if ( !nAttenuated ) return polygon;
// Create attenuation shapes for all threshold walls
const attenuationShapes = PointSourcePolygon.#createThresholdShapes(polygon, edges);
if ( !attenuationShapes.length ) return polygon;
// Compute a second polygon which does not enforce threshold walls
const noThresholdPolygon = new this();
noThresholdPolygon.initialize(polygon.origin, {...config, useThreshold: false});
noThresholdPolygon.compute();
// Combine the unrestricted polygon with the attenuation shapes
const combined = PointSourcePolygon.#combineThresholdShapes(noThresholdPolygon, attenuationShapes);
polygon.points = combined.points;
polygon.bounds = polygon.getBounds();
return polygon;
}
/* -------------------------------------------- */
/**
* Identify edges in the Scene which include an active threshold.
* @param {Point} origin
* @param {object} config
* @returns {{edges: Edge[], nAttenuated: number}}
*/
static #getThresholdEdges(origin, config) {
let nAttenuated = 0;
const edges = [];
for ( const edge of canvas.edges.values() ) {
if ( edge.applyThreshold(config.type, origin, config.externalRadius) ) {
edges.push(edge);
nAttenuated += edge.threshold.attenuation;
}
}
return {edges, nAttenuated};
}
/* -------------------------------------------- */
/**
* @typedef {ClipperPoint[]} ClipperPoints
*/
/**
* For each threshold wall that this source passes through construct a shape representing the attenuated source.
* The attenuated shape is a circle with a radius modified by origin proximity to the threshold wall.
* Intersect the attenuated shape against the LOS with threshold walls considered.
* The result is the LOS for the attenuated light source.
* @param {PointSourcePolygon} thresholdPolygon The computed polygon with thresholds applied
* @param {Edge[]} edges The identified array of threshold walls
* @returns {ClipperPoints[]} The resulting array of intersected threshold shapes
*/
static #createThresholdShapes(thresholdPolygon, edges) {
const cps = thresholdPolygon.toClipperPoints();
const origin = thresholdPolygon.origin;
const {radius, externalRadius, type} = thresholdPolygon.config;
const shapes = [];
// Iterate over threshold walls
for ( const edge of edges ) {
let thresholdShape;
// Create attenuated shape
if ( edge.threshold.attenuation ) {
const r = PointSourcePolygon.#calculateThresholdAttenuation(edge, origin, radius, externalRadius, type);
if ( !r.outside ) continue;
thresholdShape = new PIXI.Circle(origin.x, origin.y, r.inside + r.outside);
}
// No attenuation, use the full circle
else thresholdShape = new PIXI.Circle(origin.x, origin.y, radius);
// Intersect each shape against the LOS
const ix = thresholdShape.intersectClipper(cps, {convertSolution: false});
if ( ix.length && ix[0].length > 2 ) shapes.push(ix[0]);
}
return shapes;
}
/* -------------------------------------------- */
/**
* Calculate the attenuation of the source as it passes through the threshold wall.
* The distance of perception through the threshold wall depends on proximity of the source from the wall.
* @param {Edge} edge The Edge for which this threshold applies
* @param {Point} origin Origin point on the canvas for this source
* @param {number} radius Radius to use for this source, before considering attenuation
* @param {number} externalRadius The external radius of the source
* @param {string} type Sense type for the source
* @returns {{inside: number, outside: number}} The inside and outside portions of the radius
*/
static #calculateThresholdAttenuation(edge, origin, radius, externalRadius, type) {
const d = edge.threshold?.[type];
if ( !d ) return { inside: radius, outside: radius };
const proximity = edge[type] === CONST.WALL_SENSE_TYPES.PROXIMITY;
// Find the closest point on the threshold wall to the source.
// Calculate the proportion of the source radius that is "inside" and "outside" the threshold wall.
const pt = foundry.utils.closestPointToSegment(origin, edge.a, edge.b);
const inside = Math.hypot(pt.x - origin.x, pt.y - origin.y);
const outside = radius - inside;
if ( (outside < 0) || outside.almostEqual(0) ) return { inside, outside: 0 };
// Attenuate the radius outside the threshold wall based on source proximity to the wall.
const sourceDistance = proximity ? Math.max(inside - externalRadius, 0) : (inside + externalRadius);
const percentDistance = sourceDistance / d;
const pInv = proximity ? 1 - percentDistance : Math.min(1, percentDistance - 1);
const a = (pInv / (2 * (1 - pInv))) * CONFIG.Wall.thresholdAttenuationMultiplier;
return { inside, outside: Math.min(a * d, outside) };
}
/* -------------------------------------------- */
/**
* Union the attenuated shape-LOS intersections with the closed LOS.
* The portion of the light sources "inside" the threshold walls are not modified from their default radius or shape.
* Clipper can union everything at once. Use a positive fill to avoid checkerboard; fill any overlap.
* @param {PointSourcePolygon} los The LOS polygon with threshold walls inactive
* @param {ClipperPoints[]} shapes Attenuation shapes for threshold walls
* @returns {PIXI.Polygon} The combined LOS polygon with threshold shapes
*/
static #combineThresholdShapes(los, shapes) {
const c = new ClipperLib.Clipper();
const combined = [];
const cPaths = [los.toClipperPoints(), ...shapes];
c.AddPaths(cPaths, ClipperLib.PolyType.ptSubject, true);
const p = ClipperLib.PolyFillType.pftPositive;
c.Execute(ClipperLib.ClipType.ctUnion, combined, p, p);
return PIXI.Polygon.fromClipperPoints(combined.length ? combined[0] : []);
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/** @ignore */
get rays() {
foundry.utils.logCompatibilityWarning("You are referencing PointSourcePolygon#rays which is no longer a required "
+ "property of that interface. If your subclass uses the rays property it should be explicitly defined by the "
+ "subclass which requires it.", {since: 11, until: 13});
return this.#rays;
}
set rays(rays) {
this.#rays = rays;
}
/** @deprecated since v11 */
#rays = [];
}

View File

@@ -0,0 +1,175 @@
/**
* Determine the center of the circle.
* Trivial, but used to match center method for other shapes.
* @type {PIXI.Point}
*/
Object.defineProperty(PIXI.Circle.prototype, "center", { get: function() {
return new PIXI.Point(this.x, this.y);
}});
/* -------------------------------------------- */
/**
* Determine if a point is on or nearly on this circle.
* @param {Point} point Point to test
* @param {number} epsilon Tolerated margin of error
* @returns {boolean} Is the point on the circle within the allowed tolerance?
*/
PIXI.Circle.prototype.pointIsOn = function(point, epsilon = 1e-08) {
const dist2 = Math.pow(point.x - this.x, 2) + Math.pow(point.y - this.y, 2);
const r2 = Math.pow(this.radius, 2);
return dist2.almostEqual(r2, epsilon);
};
/* -------------------------------------------- */
/**
* Get all intersection points on this circle for a segment A|B
* Intersections are sorted from A to B.
* @param {Point} a The first endpoint on segment A|B
* @param {Point} b The second endpoint on segment A|B
* @returns {Point[]} Points where the segment A|B intersects the circle
*/
PIXI.Circle.prototype.segmentIntersections = function(a, b) {
const ixs = foundry.utils.lineCircleIntersection(a, b, this, this.radius);
return ixs.intersections;
};
/* -------------------------------------------- */
/**
* Calculate an x,y point on this circle's circumference given an angle
* 0: due east
* π / 2: due south
* π or -π: due west
* -π/2: due north
* @param {number} angle Angle of the point, in radians
* @returns {Point} The point on the circle at the given angle
*/
PIXI.Circle.prototype.pointAtAngle = function(angle) {
return {
x: this.x + (this.radius * Math.cos(angle)),
y: this.y + (this.radius * Math.sin(angle))
};
};
/* -------------------------------------------- */
/**
* Get all the points for a polygon approximation of this circle between two points.
* The two points can be anywhere in 2d space. The intersection of this circle with the line from this circle center
* to the point will be used as the start or end point, respectively.
* This is used to draw the portion of the circle (the arc) between two intersection points on this circle.
* @param {Point} a Point in 2d space representing the start point
* @param {Point} b Point in 2d space representing the end point
* @param {object} [options] Options passed on to the pointsForArc method
* @returns { Point[]} An array of points arranged clockwise from start to end
*/
PIXI.Circle.prototype.pointsBetween = function(a, b, options) {
const fromAngle = Math.atan2(a.y - this.y, a.x - this.x);
const toAngle = Math.atan2(b.y - this.y, b.x - this.x);
return this.pointsForArc(fromAngle, toAngle, { includeEndpoints: false, ...options });
};
/* -------------------------------------------- */
/**
* Get the points that would approximate a circular arc along this circle, given a starting and ending angle.
* Points returned are clockwise. If from and to are the same, a full circle will be returned.
* @param {number} fromAngle Starting angle, in radians. π is due north, π/2 is due east
* @param {number} toAngle Ending angle, in radians
* @param {object} [options] Options which affect how the circle is converted
* @param {number} [options.density] The number of points which defines the density of approximation
* @param {boolean} [options.includeEndpoints] Whether to include points at the circle where the arc starts and ends
* @returns {Point[]} An array of points along the requested arc
*/
PIXI.Circle.prototype.pointsForArc = function(fromAngle, toAngle, {density, includeEndpoints=true} = {}) {
const pi2 = 2 * Math.PI;
density ??= this.constructor.approximateVertexDensity(this.radius);
const points = [];
const delta = pi2 / density;
if ( includeEndpoints ) points.push(this.pointAtAngle(fromAngle));
// Determine number of points to add
let dAngle = toAngle - fromAngle;
while ( dAngle <= 0 ) dAngle += pi2; // Angles may not be normalized, so normalize total.
const nPoints = Math.round(dAngle / delta);
// Construct padding rays (clockwise)
for ( let i = 1; i < nPoints; i++ ) points.push(this.pointAtAngle(fromAngle + (i * delta)));
if ( includeEndpoints ) points.push(this.pointAtAngle(toAngle));
return points;
};
/* -------------------------------------------- */
/**
* Approximate this PIXI.Circle as a PIXI.Polygon
* @param {object} [options] Options forwarded on to the pointsForArc method
* @returns {PIXI.Polygon} The Circle expressed as a PIXI.Polygon
*/
PIXI.Circle.prototype.toPolygon = function(options) {
const points = this.pointsForArc(0, 0, options);
points.pop(); // Drop the repeated endpoint
return new PIXI.Polygon(points);
};
/* -------------------------------------------- */
/**
* The recommended vertex density for the regular polygon approximation of a circle of a given radius.
* Small radius circles have fewer vertices. The returned value will be rounded up to the nearest integer.
* See the formula described at:
* https://math.stackexchange.com/questions/4132060/compute-number-of-regular-polgy-sides-to-approximate-circle-to-defined-precision
* @param {number} radius Circle radius
* @param {number} [epsilon] The maximum tolerable distance between an approximated line segment and the true radius.
* A larger epsilon results in fewer points for a given radius.
* @returns {number} The number of points for the approximated polygon
*/
PIXI.Circle.approximateVertexDensity = function(radius, epsilon=1) {
return Math.ceil(Math.PI / Math.sqrt(2 * (epsilon / radius)));
};
/* -------------------------------------------- */
/**
* Intersect this PIXI.Circle with a PIXI.Polygon.
* @param {PIXI.Polygon} polygon A PIXI.Polygon
* @param {object} [options] Options which configure how the intersection is computed
* @param {number} [options.density] The number of points which defines the density of approximation
* @param {number} [options.clipType] The clipper clip type
* @param {string} [options.weilerAtherton=true] Use the Weiler-Atherton algorithm. Otherwise, use Clipper.
* @returns {PIXI.Polygon} The intersected polygon
*/
PIXI.Circle.prototype.intersectPolygon = function(polygon, {density, clipType, weilerAtherton=true, ...options}={}) {
if ( !this.radius ) return new PIXI.Polygon([]);
clipType ??= ClipperLib.ClipType.ctIntersection;
// Use Weiler-Atherton for efficient intersection or union
if ( weilerAtherton && polygon.isPositive ) {
const res = WeilerAthertonClipper.combine(polygon, this, {clipType, density, ...options});
if ( !res.length ) return new PIXI.Polygon([]);
return res[0];
}
// Otherwise, use Clipper polygon intersection
const approx = this.toPolygon({density});
return polygon.intersectPolygon(approx, options);
};
/* -------------------------------------------- */
/**
* Intersect this PIXI.Circle with an array of ClipperPoints.
* Convert the circle to a Polygon approximation and use intersectPolygon.
* In the future we may replace this with more specialized logic which uses the line-circle intersection formula.
* @param {ClipperPoint[]} clipperPoints Array of ClipperPoints generated by PIXI.Polygon.toClipperPoints()
* @param {object} [options] Options which configure how the intersection is computed
* @param {number} [options.density] The number of points which defines the density of approximation
* @returns {PIXI.Polygon} The intersected polygon
*/
PIXI.Circle.prototype.intersectClipper = function(clipperPoints, {density, ...options}={}) {
if ( !this.radius ) return [];
const approx = this.toPolygon({density});
return approx.intersectClipper(clipperPoints, options);
};

View File

@@ -0,0 +1,150 @@
/**
* Draws a path.
* @param {number[]|PIXI.IPointData[]|PIXI.Polygon|...number|...PIXI.IPointData} path The polygon or points.
* @returns {this} This Graphics instance.
*/
PIXI.Graphics.prototype.drawPath = function(...path) {
let closeStroke = false;
let polygon = path[0];
let points;
if ( polygon.points ) {
closeStroke = polygon.closeStroke;
points = polygon.points;
} else if ( Array.isArray(path[0]) ) {
points = path[0];
} else {
points = path;
}
polygon = new PIXI.Polygon(points);
polygon.closeStroke = closeStroke;
return this.drawShape(polygon);
};
PIXI.LegacyGraphics.prototype.drawPath = PIXI.Graphics.prototype.drawPath;
PIXI.smooth.SmoothGraphics.prototype.drawPath = PIXI.Graphics.prototype.drawPath;
/* -------------------------------------------- */
/**
* Draws a smoothed polygon.
* @param {number[]|PIXI.IPointData[]|PIXI.Polygon|...number|...PIXI.IPointData} path The polygon or points.
* @param {number} [smoothing=0] The smoothness in the range [0, 1]. 0: no smoothing; 1: maximum smoothing.
* @returns {this} This Graphics instance.
*/
PIXI.Graphics.prototype.drawSmoothedPolygon = function(...path) {
let closeStroke = true;
let polygon = path[0];
let points;
let factor;
if ( polygon.points ) {
closeStroke = polygon.closeStroke;
points = polygon.points;
factor = path[1];
} else if ( Array.isArray(path[0]) ) {
points = path[0];
factor = path[1];
} else if ( typeof path[0] === "number" ) {
points = path;
factor = path.length % 2 ? path.at(-1) : 0;
} else {
const n = path.length - (typeof path.at(-1) !== "object" ? 1 : 0);
points = [];
for ( let i = 0; i < n; i++ ) points.push(path[i].x, path[i].y);
factor = path.at(n);
}
factor ??= 0;
if ( (points.length < 6) || (factor <= 0) ) {
polygon = new PIXI.Polygon(points.slice(0, points.length - (points.length % 2)));
polygon.closeStroke = closeStroke;
return this.drawShape(polygon);
}
const dedupedPoints = [points[0], points[1]];
for ( let i = 2; i < points.length - 1; i += 2 ) {
const x = points[i];
const y = points[i + 1];
if ( (x === points[i - 2]) && (y === points[i - 1]) ) continue;
dedupedPoints.push(x, y);
}
points = dedupedPoints;
if ( closeStroke && (points[0] === points.at(-2)) && (points[1] === points.at(-1)) ) points.length -= 2;
if ( points.length < 6 ) {
polygon = new PIXI.Polygon(points);
polygon.closeStroke = closeStroke;
return this.drawShape(polygon);
}
const getBezierControlPoints = (fromX, fromY, toX, toY, nextX, nextY) => {
const vectorX = nextX - fromX;
const vectorY = nextY - fromY;
const preDistance = Math.hypot(toX - fromX, toY - fromY);
const postDistance = Math.hypot(nextX - toX, nextY - toY);
const totalDistance = preDistance + postDistance;
const cp0d = 0.5 * factor * (preDistance / totalDistance);
const cp1d = 0.5 * factor * (postDistance / totalDistance);
return [
toX - (vectorX * cp0d),
toY - (vectorY * cp0d),
toX + (vectorX * cp1d),
toY + (vectorY * cp1d)
];
};
let [fromX, fromY, toX, toY] = points;
let [cpX, cpY, cpXNext, cpYNext] = getBezierControlPoints(points.at(-2), points.at(-1), fromX, fromY, toX, toY);
this.moveTo(fromX, fromY);
for ( let i = 2, n = points.length + (closeStroke ? 2 : 0); i < n; i += 2 ) {
const nextX = points[(i + 2) % points.length];
const nextY = points[(i + 3) % points.length];
cpX = cpXNext;
cpY = cpYNext;
let cpX2;
let cpY2;
[cpX2, cpY2, cpXNext, cpYNext] = getBezierControlPoints(fromX, fromY, toX, toY, nextX, nextY);
if ( !closeStroke && (i === 2) ) this.quadraticCurveTo(cpX2, cpY2, toX, toY);
else if ( !closeStroke && (i === points.length - 2) ) this.quadraticCurveTo(cpX, cpY, toX, toY);
else this.bezierCurveTo(cpX, cpY, cpX2, cpY2, toX, toY);
fromX = toX;
fromY = toY;
toX = nextX;
toY = nextY;
}
if ( closeStroke ) this.closePath();
this.finishPoly();
return this;
};
PIXI.LegacyGraphics.prototype.drawSmoothedPolygon = PIXI.Graphics.prototype.drawSmoothedPolygon;
PIXI.smooth.SmoothGraphics.prototype.drawSmoothedPolygon = PIXI.Graphics.prototype.drawSmoothedPolygon;
/* -------------------------------------------- */
/**
* Draws a smoothed path.
* @param {number[]|PIXI.IPointData[]|PIXI.Polygon|...number|...PIXI.IPointData} path The polygon or points.
* @param {number} [smoothing=0] The smoothness in the range [0, 1]. 0: no smoothing; 1: maximum smoothing.
* @returns {this} This Graphics instance.
*/
PIXI.Graphics.prototype.drawSmoothedPath = function(...path) {
let closeStroke = false;
let polygon = path[0];
let points;
let factor;
if ( polygon.points ) {
closeStroke = polygon.closeStroke;
points = polygon.points;
factor = path[1];
} else if ( Array.isArray(path[0]) ) {
points = path[0];
factor = path[1];
} else if ( typeof path[0] === "number" ) {
points = path;
factor = path.length % 2 ? path.at(-1) : 0;
} else {
const n = path.length - (typeof path.at(-1) !== "object" ? 1 : 0);
points = [];
for ( let i = 0; i < n; i++ ) points.push(path[i].x, path[i].y);
factor = path.at(n);
}
polygon = new PIXI.Polygon(points);
polygon.closeStroke = closeStroke;
return this.drawSmoothedPolygon(polygon, factor);
};
PIXI.LegacyGraphics.prototype.drawSmoothedPath = PIXI.Graphics.prototype.drawSmoothedPath;
PIXI.smooth.SmoothGraphics.prototype.drawSmoothedPath = PIXI.Graphics.prototype.drawSmoothedPath;

View File

@@ -0,0 +1,50 @@
/**
* A custom Transform class allowing to observe changes with a callback.
* @extends PIXI.Transform
*
* @param {Function} callback The callback called to observe changes.
* @param {Object} scope The scope of the callback.
*/
class ObservableTransform extends PIXI.Transform {
constructor(callback, scope) {
super();
if ( !(callback instanceof Function) ) {
throw new Error("The callback bound to an ObservableTransform class must be a valid function.")
}
if ( !(scope instanceof Object) ) {
throw new Error("The scope bound to an ObservableTransform class must be a valid object/class.")
}
this.scope = scope;
this.cb = callback;
}
/**
* The callback which is observing the changes.
* @type {Function}
*/
cb;
/**
* The scope of the callback.
* @type {Object}
*/
scope;
/* -------------------------------------------- */
/* Methods */
/* -------------------------------------------- */
/** @inheritDoc */
onChange() {
super.onChange();
this.cb.call(this.scope);
}
/* -------------------------------------------- */
/** @inheritDoc */
updateSkew() {
super.updateSkew();
this.cb.call(this.scope);
}
}

View File

@@ -0,0 +1,236 @@
/**
* Test whether the polygon is has a positive signed area.
* Using a y-down axis orientation, this means that the polygon is "clockwise".
* @type {boolean}
*/
Object.defineProperties(PIXI.Polygon.prototype, {
isPositive: {
get: function() {
if ( this._isPositive !== undefined ) return this._isPositive;
if ( this.points.length < 6 ) return undefined;
return this._isPositive = this.signedArea() > 0;
}
},
_isPositive: {value: undefined, writable: true, enumerable: false}
});
/* -------------------------------------------- */
/**
* Clear the cached signed orientation.
*/
PIXI.Polygon.prototype.clearCache = function() {
this._isPositive = undefined;
};
/* -------------------------------------------- */
/**
* Compute the signed area of polygon using an approach similar to ClipperLib.Clipper.Area.
* The math behind this is based on the Shoelace formula. https://en.wikipedia.org/wiki/Shoelace_formula.
* The area is positive if the orientation of the polygon is positive.
* @returns {number} The signed area of the polygon
*/
PIXI.Polygon.prototype.signedArea = function() {
const points = this.points;
const ln = points.length;
if ( ln < 6 ) return 0;
// Compute area
let area = 0;
let x1 = points[ln - 2];
let y1 = points[ln - 1];
for ( let i = 0; i < ln; i += 2 ) {
const x2 = points[i];
const y2 = points[i + 1];
area += (x2 - x1) * (y2 + y1);
x1 = x2;
y1 = y2;
}
// Negate the area because in Foundry canvas, y-axis is reversed
// See https://sourceforge.net/p/jsclipper/wiki/documentation/#clipperlibclipperorientation
// The 1/2 comes from the Shoelace formula
return area * -0.5;
};
/* -------------------------------------------- */
/**
* Reverse the order of the polygon points in-place, replacing the points array into the polygon.
* Note: references to the old points array will not be affected.
* @returns {PIXI.Polygon} This polygon with its orientation reversed
*/
PIXI.Polygon.prototype.reverseOrientation = function() {
const reversed_pts = [];
const pts = this.points;
const ln = pts.length - 2;
for ( let i = ln; i >= 0; i -= 2 ) reversed_pts.push(pts[i], pts[i + 1]);
this.points = reversed_pts;
if ( this._isPositive !== undefined ) this._isPositive = !this._isPositive;
return this;
};
/* -------------------------------------------- */
/**
* Add a de-duplicated point to the Polygon.
* @param {Point} point The point to add to the Polygon
* @returns {PIXI.Polygon} A reference to the polygon for method chaining
*/
PIXI.Polygon.prototype.addPoint = function({x, y}={}) {
const l = this.points.length;
if ( (x === this.points[l-2]) && (y === this.points[l-1]) ) return this;
this.points.push(x, y);
this.clearCache();
return this;
};
/* -------------------------------------------- */
/**
* Return the bounding box for a PIXI.Polygon.
* The bounding rectangle is normalized such that the width and height are non-negative.
* @returns {PIXI.Rectangle} The bounding PIXI.Rectangle
*/
PIXI.Polygon.prototype.getBounds = function() {
if ( this.points.length < 2 ) return new PIXI.Rectangle(0, 0, 0, 0);
let maxX; let maxY;
let minX = maxX = this.points[0];
let minY = maxY = this.points[1];
for ( let i=3; i<this.points.length; i+=2 ) {
const x = this.points[i-1];
const y = this.points[i];
if ( x < minX ) minX = x;
else if ( x > maxX ) maxX = x;
if ( y < minY ) minY = y;
else if ( y > maxY ) maxY = y;
}
return new PIXI.Rectangle(minX, minY, maxX - minX, maxY - minY);
};
/* -------------------------------------------- */
/**
* @typedef {Object} ClipperPoint
* @property {number} X
* @property {number} Y
*/
/**
* Construct a PIXI.Polygon instance from an array of clipper points [{X,Y}, ...].
* @param {ClipperPoint[]} points An array of points returned by clipper
* @param {object} [options] Options which affect how canvas points are generated
* @param {number} [options.scalingFactor=1] A scaling factor used to preserve floating point precision
* @returns {PIXI.Polygon} The resulting PIXI.Polygon
*/
PIXI.Polygon.fromClipperPoints = function(points, {scalingFactor=1}={}) {
const polygonPoints = [];
for ( const point of points ) {
polygonPoints.push(point.X / scalingFactor, point.Y / scalingFactor);
}
return new PIXI.Polygon(polygonPoints);
};
/* -------------------------------------------- */
/**
* Convert a PIXI.Polygon into an array of clipper points [{X,Y}, ...].
* Note that clipper points must be rounded to integers.
* In order to preserve some amount of floating point precision, an optional scaling factor may be provided.
* @param {object} [options] Options which affect how clipper points are generated
* @param {number} [options.scalingFactor=1] A scaling factor used to preserve floating point precision
* @returns {ClipperPoint[]} An array of points to be used by clipper
*/
PIXI.Polygon.prototype.toClipperPoints = function({scalingFactor=1}={}) {
const points = [];
for ( let i = 1; i < this.points.length; i += 2 ) {
points.push({
X: Math.round(this.points[i-1] * scalingFactor),
Y: Math.round(this.points[i] * scalingFactor)
});
}
return points;
};
/* -------------------------------------------- */
/**
* Determine whether the PIXI.Polygon is closed, defined by having the same starting and ending point.
* @type {boolean}
*/
Object.defineProperty(PIXI.Polygon.prototype, "isClosed", {
get: function() {
const ln = this.points.length;
if ( ln < 4 ) return false;
return (this.points[0] === this.points[ln-2]) && (this.points[1] === this.points[ln-1]);
},
enumerable: false
});
/* -------------------------------------------- */
/* Intersection Methods */
/* -------------------------------------------- */
/**
* Intersect this PIXI.Polygon with another PIXI.Polygon using the clipper library.
* @param {PIXI.Polygon} other Another PIXI.Polygon
* @param {object} [options] Options which configure how the intersection is computed
* @param {number} [options.clipType] The clipper clip type
* @param {number} [options.scalingFactor] A scaling factor passed to Polygon#toClipperPoints to preserve precision
* @returns {PIXI.Polygon} The intersected polygon
*/
PIXI.Polygon.prototype.intersectPolygon = function(other, {clipType, scalingFactor}={}) {
const otherPts = other.toClipperPoints({scalingFactor});
const solution = this.intersectClipper(otherPts, {clipType, scalingFactor});
return PIXI.Polygon.fromClipperPoints(solution.length ? solution[0] : [], {scalingFactor});
};
/* -------------------------------------------- */
/**
* Intersect this PIXI.Polygon with an array of ClipperPoints.
* @param {ClipperPoint[]} clipperPoints Array of clipper points generated by PIXI.Polygon.toClipperPoints()
* @param {object} [options] Options which configure how the intersection is computed
* @param {number} [options.clipType] The clipper clip type
* @param {number} [options.scalingFactor] A scaling factor passed to Polygon#toClipperPoints to preserve precision
* @returns {ClipperPoint[]} The resulting ClipperPaths
*/
PIXI.Polygon.prototype.intersectClipper = function(clipperPoints, {clipType, scalingFactor} = {}) {
clipType ??= ClipperLib.ClipType.ctIntersection;
const c = new ClipperLib.Clipper();
c.AddPath(this.toClipperPoints({scalingFactor}), ClipperLib.PolyType.ptSubject, true);
c.AddPath(clipperPoints, ClipperLib.PolyType.ptClip, true);
const solution = new ClipperLib.Paths();
c.Execute(clipType, solution);
return solution;
};
/* -------------------------------------------- */
/**
* Intersect this PIXI.Polygon with a PIXI.Circle.
* For now, convert the circle to a Polygon approximation and use intersectPolygon.
* In the future we may replace this with more specialized logic which uses the line-circle intersection formula.
* @param {PIXI.Circle} circle A PIXI.Circle
* @param {object} [options] Options which configure how the intersection is computed
* @param {number} [options.density] The number of points which defines the density of approximation
* @returns {PIXI.Polygon} The intersected polygon
*/
PIXI.Polygon.prototype.intersectCircle = function(circle, options) {
return circle.intersectPolygon(this, options);
};
/* -------------------------------------------- */
/**
* Intersect this PIXI.Polygon with a PIXI.Rectangle.
* For now, convert the rectangle to a Polygon and use intersectPolygon.
* In the future we may replace this with more specialized logic which uses the line-line intersection formula.
* @param {PIXI.Rectangle} rect A PIXI.Rectangle
* @param {object} [options] Options which configure how the intersection is computed
* @returns {PIXI.Polygon} The intersected polygon
*/
PIXI.Polygon.prototype.intersectRectangle = function(rect, options) {
return rect.intersectPolygon(this, options);
};

View File

@@ -0,0 +1,520 @@
/**
* Bit code labels splitting a rectangle into zones, based on the Cohen-Sutherland algorithm.
* See https://en.wikipedia.org/wiki/Cohen%E2%80%93Sutherland_algorithm
* left central right
* top 1001 1000 1010
* central 0001 0000 0010
* bottom 0101 0100 0110
* @enum {number}
*/
PIXI.Rectangle.CS_ZONES = {
INSIDE: 0x0000,
LEFT: 0x0001,
RIGHT: 0x0010,
TOP: 0x1000,
BOTTOM: 0x0100,
TOPLEFT: 0x1001,
TOPRIGHT: 0x1010,
BOTTOMRIGHT: 0x0110,
BOTTOMLEFT: 0x0101
};
/* -------------------------------------------- */
/**
* Calculate center of this rectangle.
* @type {Point}
*/
Object.defineProperty(PIXI.Rectangle.prototype, "center", { get: function() {
return { x: this.x + (this.width * 0.5), y: this.y + (this.height * 0.5) };
}});
/* -------------------------------------------- */
/**
* Return the bounding box for a PIXI.Rectangle.
* The bounding rectangle is normalized such that the width and height are non-negative.
* @returns {PIXI.Rectangle}
*/
PIXI.Rectangle.prototype.getBounds = function() {
let {x, y, width, height} = this;
x = width > 0 ? x : x + width;
y = height > 0 ? y : y + height;
return new PIXI.Rectangle(x, y, Math.abs(width), Math.abs(height));
};
/* -------------------------------------------- */
/**
* Determine if a point is on or nearly on this rectangle.
* @param {Point} p Point to test
* @returns {boolean} Is the point on the rectangle boundary?
*/
PIXI.Rectangle.prototype.pointIsOn = function(p) {
const CSZ = PIXI.Rectangle.CS_ZONES;
return this._getZone(p) === CSZ.INSIDE && this._getEdgeZone(p) !== CSZ.INSIDE;
};
/* -------------------------------------------- */
/**
* Calculate the rectangle Zone for a given point located around, on, or in the rectangle.
* See https://en.wikipedia.org/wiki/Cohen%E2%80%93Sutherland_algorithm
* This differs from _getZone in how points on the edge are treated: they are not considered inside.
* @param {Point} point A point to test for location relative to the rectangle
* @returns {PIXI.Rectangle.CS_ZONES} Which edge zone does the point belong to?
*/
PIXI.Rectangle.prototype._getEdgeZone = function(point) {
const CSZ = PIXI.Rectangle.CS_ZONES;
let code = CSZ.INSIDE;
if ( point.x < this.x || point.x.almostEqual(this.x) ) code |= CSZ.LEFT;
else if ( point.x > this.right || point.x.almostEqual(this.right) ) code |= CSZ.RIGHT;
if ( point.y < this.y || point.y.almostEqual(this.y) ) code |= CSZ.TOP;
else if ( point.y > this.bottom || point.y.almostEqual(this.bottom) ) code |= CSZ.BOTTOM;
return code;
};
/* -------------------------------------------- */
/**
* Get all the points (corners) for a polygon approximation of a rectangle between two points on the rectangle.
* The two points can be anywhere in 2d space on or outside the rectangle.
* The starting and ending side are based on the zone of the corresponding a and b points.
* (See PIXI.Rectangle.CS_ZONES.)
* This is the rectangular version of PIXI.Circle.prototype.pointsBetween, and is similarly used
* to draw the portion of the shape between two intersection points on that shape.
* @param { Point } a A point on or outside the rectangle, representing the starting position.
* @param { Point } b A point on or outside the rectangle, representing the starting position.
* @returns { Point[]} Points returned are clockwise from start to end.
*/
PIXI.Rectangle.prototype.pointsBetween = function(a, b) {
const CSZ = PIXI.Rectangle.CS_ZONES;
// Assume the point could be outside the rectangle but not inside (which would be undefined).
const zoneA = this._getEdgeZone(a);
if ( !zoneA ) return [];
const zoneB = this._getEdgeZone(b);
if ( !zoneB ) return [];
// If on the same wall, return none if end is counterclockwise to start.
if ( zoneA === zoneB && foundry.utils.orient2dFast(this.center, a, b) <= 0 ) return [];
let z = zoneA;
const pts = [];
for ( let i = 0; i < 4; i += 1) {
if ( (z & CSZ.LEFT) ) {
if ( z !== CSZ.TOPLEFT ) pts.push({ x: this.left, y: this.top });
z = CSZ.TOP;
} else if ( (z & CSZ.TOP) ) {
if ( z !== CSZ.TOPRIGHT ) pts.push({ x: this.right, y: this.top });
z = CSZ.RIGHT;
} else if ( (z & CSZ.RIGHT) ) {
if ( z !== CSZ.BOTTOMRIGHT ) pts.push({ x: this.right, y: this.bottom });
z = CSZ.BOTTOM;
} else if ( (z & CSZ.BOTTOM) ) {
if ( z !== CSZ.BOTTOMLEFT ) pts.push({ x: this.left, y: this.bottom });
z = CSZ.LEFT;
}
if ( z & zoneB ) break;
}
return pts;
};
/* -------------------------------------------- */
/**
* Get all intersection points for a segment A|B
* Intersections are sorted from A to B.
* @param {Point} a Endpoint A of the segment
* @param {Point} b Endpoint B of the segment
* @returns {Point[]} Array of intersections or empty if no intersection.
* If A|B is parallel to an edge of this rectangle, returns the two furthest points on
* the segment A|B that are on the edge.
* The return object's t0 property signifies the location of the intersection on segment A|B.
* This will be NaN if the segment is a point.
* The return object's t1 property signifies the location of the intersection on the rectangle edge.
* The t1 value is measured relative to the intersecting edge of the rectangle.
*/
PIXI.Rectangle.prototype.segmentIntersections = function(a, b) {
// The segment is collinear with a vertical edge
if ( a.x.almostEqual(b.x) && (a.x.almostEqual(this.left) || a.x.almostEqual(this.right)) ) {
const minY1 = Math.min(a.y, b.y);
const minY2 = Math.min(this.top, this.bottom);
const maxY1 = Math.max(a.y, b.y);
const maxY2 = Math.max(this.top, this.bottom);
const minIxY = Math.max(minY1, minY2);
const maxIxY = Math.min(maxY1, maxY2);
// Test whether the two segments intersect
const pointIntersection = minIxY.almostEqual(maxIxY);
if ( pointIntersection || (minIxY < maxIxY) ) {
// Determine t-values of the a|b segment intersections (t0) and the rectangle edge (t1).
const distAB = Math.abs(b.y - a.y);
const distRect = this.height;
const y = (b.y - a.y) > 0 ? a.y : b.y;
const rectY = a.x.almostEqual(this.right) ? this.top : this.bottom;
const minRes = {x: a.x, y: minIxY, t0: (minIxY - y) / distAB, t1: Math.abs((minIxY - rectY) / distRect)};
// If true, the a|b segment is nearly a point and t0 is likely NaN.
if ( pointIntersection ) return [minRes];
// Return in order nearest a, nearest b
const maxRes = {x: a.x, y: maxIxY, t0: (maxIxY - y) / distAB, t1: Math.abs((maxIxY - rectY) / distRect)};
return Math.abs(minIxY - a.y) < Math.abs(maxIxY - a.y)
? [minRes, maxRes]
: [maxRes, minRes];
}
}
// The segment is collinear with a horizontal edge
else if ( a.y.almostEqual(b.y) && (a.y.almostEqual(this.top) || a.y.almostEqual(this.bottom))) {
const minX1 = Math.min(a.x, b.x);
const minX2 = Math.min(this.right, this.left);
const maxX1 = Math.max(a.x, b.x);
const maxX2 = Math.max(this.right, this.left);
const minIxX = Math.max(minX1, minX2);
const maxIxX = Math.min(maxX1, maxX2);
// Test whether the two segments intersect
const pointIntersection = minIxX.almostEqual(maxIxX);
if ( pointIntersection || (minIxX < maxIxX) ) {
// Determine t-values of the a|b segment intersections (t0) and the rectangle edge (t1).
const distAB = Math.abs(b.x - a.x);
const distRect = this.width;
const x = (b.x - a.x) > 0 ? a.x : b.x;
const rectX = a.y.almostEqual(this.top) ? this.left : this.right;
const minRes = {x: minIxX, y: a.y, t0: (minIxX - x) / distAB, t1: Math.abs((minIxX - rectX) / distRect)};
// If true, the a|b segment is nearly a point and t0 is likely NaN.
if ( pointIntersection ) return [minRes];
// Return in order nearest a, nearest b
const maxRes = {x: maxIxX, y: a.y, t0: (maxIxX - x) / distAB, t1: Math.abs((maxIxX - rectX) / distRect)};
return Math.abs(minIxX - a.x) < Math.abs(maxIxX - a.x) ? [minRes, maxRes] : [maxRes, minRes];
}
}
// Follows structure of lineSegmentIntersects
const zoneA = this._getZone(a);
const zoneB = this._getZone(b);
if ( !(zoneA | zoneB) ) return []; // Bitwise OR is 0: both points inside rectangle.
// Regular AND: one point inside, one outside
// Otherwise, both points outside
const zones = !(zoneA && zoneB) ? [zoneA || zoneB] : [zoneA, zoneB];
// If 2 zones, line likely intersects two edges.
// It is possible to have a line that starts, for example, at center left and moves to center top.
// In this case it may not cross the rectangle.
if ( zones.length === 2 && !this.lineSegmentIntersects(a, b) ) return [];
const CSZ = PIXI.Rectangle.CS_ZONES;
const lsi = foundry.utils.lineSegmentIntersects;
const lli = foundry.utils.lineLineIntersection;
const { leftEdge, rightEdge, bottomEdge, topEdge } = this;
const ixs = [];
for ( const z of zones ) {
let ix;
if ( (z & CSZ.LEFT)
&& lsi(leftEdge.A, leftEdge.B, a, b)) ix = lli(a, b, leftEdge.A, leftEdge.B);
if ( !ix && (z & CSZ.RIGHT)
&& lsi(rightEdge.A, rightEdge.B, a, b)) ix = lli(a, b, rightEdge.A, rightEdge.B);
if ( !ix && (z & CSZ.TOP)
&& lsi(topEdge.A, topEdge.B, a, b)) ix = lli(a, b, topEdge.A, topEdge.B);
if ( !ix && (z & CSZ.BOTTOM)
&& lsi(bottomEdge.A, bottomEdge.B, a, b)) ix = lli(a, b, bottomEdge.A, bottomEdge.B);
// The ix should always be a point by now
if ( !ix ) throw new Error("PIXI.Rectangle.prototype.segmentIntersections returned an unexpected null point.");
ixs.push(ix);
}
return ixs;
};
/* -------------------------------------------- */
/**
* Compute the intersection of this Rectangle with some other Rectangle.
* @param {PIXI.Rectangle} other Some other rectangle which intersects this one
* @returns {PIXI.Rectangle} The intersected rectangle
*/
PIXI.Rectangle.prototype.intersection = function(other) {
const x0 = this.x < other.x ? other.x : this.x;
const x1 = this.right > other.right ? other.right : this.right;
const y0 = this.y < other.y ? other.y : this.y;
const y1 = this.bottom > other.bottom ? other.bottom : this.bottom;
return new PIXI.Rectangle(x0, y0, x1 - x0, y1 - y0);
};
/* -------------------------------------------- */
/**
* Convert this PIXI.Rectangle into a PIXI.Polygon
* @returns {PIXI.Polygon} The Rectangle expressed as a PIXI.Polygon
*/
PIXI.Rectangle.prototype.toPolygon = function() {
const points = [this.left, this.top, this.right, this.top, this.right, this.bottom, this.left, this.bottom];
return new PIXI.Polygon(points);
};
/* -------------------------------------------- */
/**
* Get the left edge of this rectangle.
* The returned edge endpoints are oriented clockwise around the rectangle.
* @type {{A: Point, B: Point}}
*/
Object.defineProperty(PIXI.Rectangle.prototype, "leftEdge", { get: function() {
return { A: { x: this.left, y: this.bottom }, B: { x: this.left, y: this.top }};
}});
/* -------------------------------------------- */
/**
* Get the right edge of this rectangle.
* The returned edge endpoints are oriented clockwise around the rectangle.
* @type {{A: Point, B: Point}}
*/
Object.defineProperty(PIXI.Rectangle.prototype, "rightEdge", { get: function() {
return { A: { x: this.right, y: this.top }, B: { x: this.right, y: this.bottom }};
}});
/* -------------------------------------------- */
/**
* Get the top edge of this rectangle.
* The returned edge endpoints are oriented clockwise around the rectangle.
* @type {{A: Point, B: Point}}
*/
Object.defineProperty(PIXI.Rectangle.prototype, "topEdge", { get: function() {
return { A: { x: this.left, y: this.top }, B: { x: this.right, y: this.top }};
}});
/* -------------------------------------------- */
/**
* Get the bottom edge of this rectangle.
* The returned edge endpoints are oriented clockwise around the rectangle.
* @type {{A: Point, B: Point}}
*/
Object.defineProperty(PIXI.Rectangle.prototype, "bottomEdge", { get: function() {
return { A: { x: this.right, y: this.bottom }, B: { x: this.left, y: this.bottom }};
}});
/* -------------------------------------------- */
/**
* Calculate the rectangle Zone for a given point located around or in the rectangle.
* https://en.wikipedia.org/wiki/Cohen%E2%80%93Sutherland_algorithm
*
* @param {Point} p Point to test for location relative to the rectangle
* @returns {PIXI.Rectangle.CS_ZONES}
*/
PIXI.Rectangle.prototype._getZone = function(p) {
const CSZ = PIXI.Rectangle.CS_ZONES;
let code = CSZ.INSIDE;
if ( p.x < this.x ) code |= CSZ.LEFT;
else if ( p.x > this.right ) code |= CSZ.RIGHT;
if ( p.y < this.y ) code |= CSZ.TOP;
else if ( p.y > this.bottom ) code |= CSZ.BOTTOM;
return code;
};
/**
* Test whether a line segment AB intersects this rectangle.
* @param {Point} a The first endpoint of segment AB
* @param {Point} b The second endpoint of segment AB
* @param {object} [options] Options affecting the intersect test.
* @param {boolean} [options.inside] If true, a line contained within the rectangle will
* return true.
* @returns {boolean} True if intersects.
*/
PIXI.Rectangle.prototype.lineSegmentIntersects = function(a, b, { inside = false } = {}) {
const zoneA = this._getZone(a);
const zoneB = this._getZone(b);
if ( !(zoneA | zoneB) ) return inside; // Bitwise OR is 0: both points inside rectangle.
if ( zoneA & zoneB ) return false; // Bitwise AND is not 0: both points share outside zone
if ( !(zoneA && zoneB) ) return true; // Regular AND: one point inside, one outside
// Line likely intersects, but some possibility that the line starts at, say, center left
// and moves to center top which means it may or may not cross the rectangle
const CSZ = PIXI.Rectangle.CS_ZONES;
const lsi = foundry.utils.lineSegmentIntersects;
// If the zone is a corner, like top left, test one side and then if not true, test
// the other. If the zone is on a side, like left, just test that side.
const leftEdge = this.leftEdge;
if ( (zoneA & CSZ.LEFT) && lsi(leftEdge.A, leftEdge.B, a, b) ) return true;
const rightEdge = this.rightEdge;
if ( (zoneA & CSZ.RIGHT) && lsi(rightEdge.A, rightEdge.B, a, b) ) return true;
const topEdge = this.topEdge;
if ( (zoneA & CSZ.TOP) && lsi(topEdge.A, topEdge.B, a, b) ) return true;
const bottomEdge = this.bottomEdge;
if ( (zoneA & CSZ.BOTTOM ) && lsi(bottomEdge.A, bottomEdge.B, a, b) ) return true;
return false;
};
/* -------------------------------------------- */
/**
* Intersect this PIXI.Rectangle with a PIXI.Polygon.
* Currently uses the clipper library.
* In the future we may replace this with more specialized logic which uses the line-line intersection formula.
* @param {PIXI.Polygon} polygon A PIXI.Polygon
* @param {object} [options] Options which configure how the intersection is computed
* @param {number} [options.clipType] The clipper clip type
* @param {number} [options.scalingFactor] A scaling factor passed to Polygon#toClipperPoints for precision
* @param {string} [options.weilerAtherton=true] Use the Weiler-Atherton algorithm. Otherwise, use Clipper.
* @param {boolean} [options.canMutate] If the WeilerAtherton constructor could mutate or not
* @returns {PIXI.Polygon} The intersected polygon
*/
PIXI.Rectangle.prototype.intersectPolygon = function(polygon, {clipType, scalingFactor, canMutate, weilerAtherton=true}={}) {
if ( !this.width || !this.height ) return new PIXI.Polygon([]);
clipType ??= ClipperLib.ClipType.ctIntersection;
// Use Weiler-Atherton for efficient intersection or union
if ( weilerAtherton && polygon.isPositive ) {
const res = WeilerAthertonClipper.combine(polygon, this, {clipType, canMutate, scalingFactor});
if ( !res.length ) return new PIXI.Polygon([]);
return res[0];
}
// Use Clipper polygon intersection
return polygon.intersectPolygon(this.toPolygon(), {clipType, canMutate, scalingFactor});
};
/* -------------------------------------------- */
/**
* Intersect this PIXI.Rectangle with an array of ClipperPoints. Currently, uses the clipper library.
* In the future we may replace this with more specialized logic which uses the line-line intersection formula.
* @param {ClipperPoint[]} clipperPoints An array of ClipperPoints generated by PIXI.Polygon.toClipperPoints()
* @param {object} [options] Options which configure how the intersection is computed
* @param {number} [options.clipType] The clipper clip type
* @param {number} [options.scalingFactor] A scaling factor passed to Polygon#toClipperPoints to preserve precision
* @returns {PIXI.Polygon|null} The intersected polygon or null if no solution was present
*/
PIXI.Rectangle.prototype.intersectClipper = function(clipperPoints, {clipType, scalingFactor}={}) {
if ( !this.width || !this.height ) return [];
return this.toPolygon().intersectPolygon(clipperPoints, {clipType, scalingFactor});
};
/* -------------------------------------------- */
/**
* Determine whether some other Rectangle overlaps with this one.
* This check differs from the parent class Rectangle#intersects test because it is true for adjacency (zero area).
* @param {PIXI.Rectangle} other Some other rectangle against which to compare
* @returns {boolean} Do the rectangles overlap?
*/
PIXI.Rectangle.prototype.overlaps = function(other) {
return (other.right >= this.left)
&& (other.left <= this.right)
&& (other.bottom >= this.top)
&& (other.top <= this.bottom);
};
/* -------------------------------------------- */
/**
* Normalize the width and height of the rectangle in-place, enforcing that those dimensions be positive.
* @returns {PIXI.Rectangle}
*/
PIXI.Rectangle.prototype.normalize = function() {
if ( this.width < 0 ) {
this.x += this.width;
this.width = Math.abs(this.width);
}
if ( this.height < 0 ) {
this.y += this.height;
this.height = Math.abs(this.height);
}
return this;
};
/* -------------------------------------------- */
/**
* Fits this rectangle around this rectangle rotated around the given pivot counterclockwise by the given angle in radians.
* @param {number} radians The angle of rotation.
* @param {PIXI.Point} [pivot] An optional pivot point (normalized).
* @returns {this} This rectangle.
*/
PIXI.Rectangle.prototype.rotate = function(radians, pivot) {
if ( radians === 0 ) return this;
return this.constructor.fromRotation(this.x, this.y, this.width, this.height, radians, pivot, this);
};
/* -------------------------------------------- */
/**
* Create normalized rectangular bounds given a rectangle shape and an angle of central rotation.
* @param {number} x The top-left x-coordinate of the un-rotated rectangle
* @param {number} y The top-left y-coordinate of the un-rotated rectangle
* @param {number} width The width of the un-rotated rectangle
* @param {number} height The height of the un-rotated rectangle
* @param {number} radians The angle of rotation about the center
* @param {PIXI.Point} [pivot] An optional pivot point (if not provided, the pivot is the centroid)
* @param {PIXI.Rectangle} [_outRect] (Internal)
* @returns {PIXI.Rectangle} The constructed rotated rectangle bounds
*/
PIXI.Rectangle.fromRotation = function(x, y, width, height, radians, pivot, _outRect) {
const cosAngle = Math.cos(radians);
const sinAngle = Math.sin(radians);
// Create the output rect if necessary
_outRect ??= new PIXI.Rectangle();
// Is it possible to do with the simple computation?
if ( pivot === undefined || ((pivot.x === 0.5) && (pivot.y === 0.5)) ) {
_outRect.height = (height * Math.abs(cosAngle)) + (width * Math.abs(sinAngle));
_outRect.width = (height * Math.abs(sinAngle)) + (width * Math.abs(cosAngle));
_outRect.x = x + ((width - _outRect.width) / 2);
_outRect.y = y + ((height - _outRect.height) / 2);
return _outRect;
}
// Calculate the pivot point in absolute coordinates
const pivotX = x + (width * pivot.x);
const pivotY = y + (height * pivot.y);
// Calculate vectors from pivot to the rectangle's corners
const tlX = x - pivotX;
const tlY = y - pivotY;
const trX = x + width - pivotX;
const trY = y - pivotY;
const blX = x - pivotX;
const blY = y + height - pivotY;
const brX = x + width - pivotX;
const brY = y + height - pivotY;
// Apply rotation to the vectors
const rTlX = (cosAngle * tlX) - (sinAngle * tlY);
const rTlY = (sinAngle * tlX) + (cosAngle * tlY);
const rTrX = (cosAngle * trX) - (sinAngle * trY);
const rTrY = (sinAngle * trX) + (cosAngle * trY);
const rBlX = (cosAngle * blX) - (sinAngle * blY);
const rBlY = (sinAngle * blX) + (cosAngle * blY);
const rBrX = (cosAngle * brX) - (sinAngle * brY);
const rBrY = (sinAngle * brX) + (cosAngle * brY);
// Find the new corners of the bounding rectangle
const minX = Math.min(rTlX, rTrX, rBlX, rBrX);
const minY = Math.min(rTlY, rTrY, rBlY, rBrY);
const maxX = Math.max(rTlX, rTrX, rBlX, rBrX);
const maxY = Math.max(rTlY, rTrY, rBlY, rBrY);
// Assign the new computed bounding box
_outRect.x = pivotX + minX;
_outRect.y = pivotY + minY;
_outRect.width = maxX - minX;
_outRect.height = maxY - minY;
return _outRect;
};

View File

@@ -0,0 +1,498 @@
/**
* @typedef {foundry.utils.Collection} EffectsCollection
*/
/**
* A container group which contains visual effects rendered above the primary group.
*
* TODO:
* The effects canvas group is now only performing shape initialization, logic that needs to happen at
* the placeable or object level is now their burden.
* - [DONE] Adding or removing a source from the EffectsCanvasGroup collection.
* - [TODO] A change in a darkness source should re-initialize all overlaping light and vision source.
*
* ### Hook Events
* - {@link hookEvents.lightingRefresh}
*
* @category - Canvas
*/
class EffectsCanvasGroup extends CanvasGroupMixin(PIXI.Container) {
/**
* The name of the darkness level animation.
* @type {string}
*/
static #DARKNESS_ANIMATION_NAME = "lighting.animateDarkness";
/**
* Whether to currently animate light sources.
* @type {boolean}
*/
animateLightSources = true;
/**
* Whether to currently animate vision sources.
* @type {boolean}
*/
animateVisionSources = true;
/**
* A mapping of light sources which are active within the rendered Scene.
* @type {EffectsCollection<string, PointLightSource>}
*/
lightSources = new foundry.utils.Collection();
/**
* A mapping of darkness sources which are active within the rendered Scene.
* @type {EffectsCollection<string, PointDarknessSource>}
*/
darknessSources = new foundry.utils.Collection();
/**
* A Collection of vision sources which are currently active within the rendered Scene.
* @type {EffectsCollection<string, PointVisionSource>}
*/
visionSources = new foundry.utils.Collection();
/**
* A set of vision mask filters used in visual effects group
* @type {Set<VisualEffectsMaskingFilter>}
*/
visualEffectsMaskingFilters = new Set();
/* -------------------------------------------- */
/**
* Iterator for all light and darkness sources.
* @returns {Generator<PointDarknessSource|PointLightSource, void, void>}
* @yields foundry.canvas.sources.PointDarknessSource|foundry.canvas.sources.PointLightSource
*/
* allSources() {
for ( const darknessSource of this.darknessSources ) yield darknessSource;
for ( const lightSource of this.lightSources ) yield lightSource;
}
/* -------------------------------------------- */
/** @override */
_createLayers() {
/**
* A layer of background alteration effects which change the appearance of the primary group render texture.
* @type {CanvasBackgroundAlterationEffects}
*/
this.background = this.addChild(new CanvasBackgroundAlterationEffects());
/**
* A layer which adds illumination-based effects to the scene.
* @type {CanvasIlluminationEffects}
*/
this.illumination = this.addChild(new CanvasIlluminationEffects());
/**
* A layer which adds color-based effects to the scene.
* @type {CanvasColorationEffects}
*/
this.coloration = this.addChild(new CanvasColorationEffects());
/**
* A layer which adds darkness effects to the scene.
* @type {CanvasDarknessEffects}
*/
this.darkness = this.addChild(new CanvasDarknessEffects());
return {
background: this.background,
illumination: this.illumination,
coloration: this.coloration,
darkness: this.darkness
};
}
/* -------------------------------------------- */
/**
* Clear all effects containers and animated sources.
*/
clearEffects() {
this.background.clear();
this.illumination.clear();
this.coloration.clear();
this.darkness.clear();
}
/* -------------------------------------------- */
/** @override */
async _draw(options) {
// Draw each component layer
await this.background.draw();
await this.illumination.draw();
await this.coloration.draw();
await this.darkness.draw();
// Call hooks
Hooks.callAll("drawEffectsCanvasGroup", this);
// Activate animation of drawn objects
this.activateAnimation();
}
/* -------------------------------------------- */
/* Perception Management Methods */
/* -------------------------------------------- */
/**
* Initialize positive light sources which exist within the active Scene.
* Packages can use the "initializeLightSources" hook to programmatically add light sources.
*/
initializeLightSources() {
for ( let source of this.lightSources ) source.initialize();
Hooks.callAll("initializeLightSources", this);
}
/* -------------------------------------------- */
/**
* Re-initialize the shapes of all darkness sources in the Scene.
* This happens before initialization of light sources because darkness sources contribute additional edges which
* limit perception.
* Packages can use the "initializeDarknessSources" hook to programmatically add darkness sources.
*/
initializeDarknessSources() {
for ( let source of this.darknessSources ) source.initialize();
Hooks.callAll("initializeDarknessSources", this);
}
/* -------------------------------------------- */
/**
* Refresh the state and uniforms of all light sources and darkness sources objects.
*/
refreshLightSources() {
for ( const source of this.allSources() ) source.refresh();
// FIXME: We need to refresh the field of an AmbientLight only after the initialization of the light source when
// the shape of the source could have changed. We don't need to refresh all fields whenever lighting is refreshed.
canvas.lighting.refreshFields();
}
/* -------------------------------------------- */
/**
* Refresh the state and uniforms of all VisionSource objects.
*/
refreshVisionSources() {
for ( const visionSource of this.visionSources ) visionSource.refresh();
}
/* -------------------------------------------- */
/**
* Refresh the active display of lighting.
*/
refreshLighting() {
// Apply illumination and visibility background color change
this.illumination.backgroundColor = canvas.colors.background;
if ( this.illumination.darknessLevelMeshes.clearColor[0] !== canvas.environment.darknessLevel ) {
this.illumination.darknessLevelMeshes.clearColor[0] = canvas.environment.darknessLevel;
this.illumination.invalidateDarknessLevelContainer(true);
}
const v = canvas.visibility.filter;
if ( v ) {
v.uniforms.visionTexture = canvas.masks.vision.renderTexture;
v.uniforms.primaryTexture = canvas.primary.renderTexture;
canvas.colors.fogExplored.applyRGB(v.uniforms.exploredColor);
canvas.colors.fogUnexplored.applyRGB(v.uniforms.unexploredColor);
canvas.colors.background.applyRGB(v.uniforms.backgroundColor);
}
// Clear effects
canvas.effects.clearEffects();
// Add effect meshes for active light and darkness sources
for ( const source of this.allSources() ) this.#addLightEffect(source);
// Add effect meshes for active vision sources
for ( const visionSource of this.visionSources ) this.#addVisionEffect(visionSource);
// Update vision filters state
this.background.vision.filter.enabled = !!this.background.vision.children.length;
this.background.visionPreferred.filter.enabled = !!this.background.visionPreferred.children.length;
// Hide the background and/or coloration layers if possible
const lightingOptions = canvas.visibility.visionModeData.activeLightingOptions;
this.background.vision.visible = (this.background.vision.children.length > 0);
this.background.visionPreferred.visible = (this.background.visionPreferred.children.length > 0);
this.background.lighting.visible = (this.background.lighting.children.length > 0)
|| (lightingOptions.background?.postProcessingModes?.length > 0);
this.coloration.visible = (this.coloration.children.length > 1)
|| (lightingOptions.coloration?.postProcessingModes?.length > 0);
// Call hooks
Hooks.callAll("lightingRefresh", this);
}
/* -------------------------------------------- */
/**
* Add a vision source to the effect layers.
* @param {RenderedEffectSource & PointVisionSource} source The vision source to add mesh layers
*/
#addVisionEffect(source) {
if ( !source.active || (source.radius <= 0) ) return;
const meshes = source.drawMeshes();
if ( meshes.background ) {
// Is this vision source background need to be rendered into the preferred vision container, over other VS?
const parent = source.preferred ? this.background.visionPreferred : this.background.vision;
parent.addChild(meshes.background);
}
if ( meshes.illumination ) this.illumination.lights.addChild(meshes.illumination);
if ( meshes.coloration ) this.coloration.addChild(meshes.coloration);
}
/* -------------------------------------------- */
/**
* Add a light source or a darkness source to the effect layers
* @param {RenderedEffectSource & BaseLightSource} source The light or darkness source to add to the effect layers.
*/
#addLightEffect(source) {
if ( !source.active ) return;
const meshes = source.drawMeshes();
if ( meshes.background ) this.background.lighting.addChild(meshes.background);
if ( meshes.illumination ) this.illumination.lights.addChild(meshes.illumination);
if ( meshes.coloration ) this.coloration.addChild(meshes.coloration);
if ( meshes.darkness ) this.darkness.addChild(meshes.darkness);
}
/* -------------------------------------------- */
/**
* Test whether the point is inside light.
* @param {Point} point The point.
* @param {number} elevation The elevation of the point.
* @returns {boolean} Is inside light?
*/
testInsideLight(point, elevation) {
// First test light source excluding the global light source
for ( const lightSource of this.lightSources ) {
if ( !lightSource.active || (lightSource instanceof foundry.canvas.sources.GlobalLightSource) ) continue;
if ( lightSource.shape.contains(point.x, point.y) ) return true;
}
// Second test Global Illumination and Darkness Level meshes
const globalLightSource = canvas.environment.globalLightSource;
if ( !globalLightSource.active ) return false;
const {min, max} = globalLightSource.data.darkness;
const darknessLevel = this.getDarknessLevel(point, elevation);
return (darknessLevel >= min) && (darknessLevel <= max);
}
/* -------------------------------------------- */
/**
* Test whether the point is inside darkness.
* @param {Point} point The point.
* @param {number} elevation The elevation of the point.
* @returns {boolean} Is inside a darkness?
*/
testInsideDarkness({x, y}, elevation) {
for ( const source of this.darknessSources ) {
if ( !source.active || source.isPreview ) continue;
for ( let dx = -1; dx <= 1; dx += 1 ) {
for ( let dy = -1; dy <= 1; dy += 1 ) {
if ( source.shape.contains(x + dx, y + dy) ) return true;
}
}
}
return false;
}
/* -------------------------------------------- */
/**
* Get the darkness level at the given point.
* @param {Point} point The point.
* @param {number} elevation The elevation of the point.
* @returns {number} The darkness level.
*/
getDarknessLevel(point, elevation) {
const darknessLevelMeshes = canvas.effects.illumination.darknessLevelMeshes.children;
for ( let i = darknessLevelMeshes.length - 1; i >= 0; i-- ) {
const darknessLevelMesh = darknessLevelMeshes[i];
if ( darknessLevelMesh.region.testPoint(point, elevation) ) {
return darknessLevelMesh.shader.uniforms.darknessLevel;
}
}
return canvas.environment.darknessLevel;
}
/* -------------------------------------------- */
/** @override */
async _tearDown(options) {
CanvasAnimation.terminateAnimation(EffectsCanvasGroup.#DARKNESS_ANIMATION_NAME);
this.deactivateAnimation();
this.darknessSources.clear();
this.lightSources.clear();
for ( const c of this.children ) {
if ( c.clear ) c.clear();
else if ( c.tearDown ) await c.tearDown();
else c.destroy();
}
this.visualEffectsMaskingFilters.clear();
}
/* -------------------------------------------- */
/**
* Activate vision masking for visual effects
* @param {boolean} [enabled=true] Whether to enable or disable vision masking
*/
toggleMaskingFilters(enabled=true) {
for ( const f of this.visualEffectsMaskingFilters ) {
f.uniforms.enableVisionMasking = enabled;
}
}
/* -------------------------------------------- */
/**
* Activate post-processing effects for a certain effects channel.
* @param {string} filterMode The filter mode to target.
* @param {string[]} [postProcessingModes=[]] The post-processing modes to apply to this filter.
* @param {Object} [uniforms={}] The uniforms to update.
*/
activatePostProcessingFilters(filterMode, postProcessingModes=[], uniforms={}) {
for ( const f of this.visualEffectsMaskingFilters ) {
if ( f.uniforms.mode === filterMode ) {
f.updatePostprocessModes(postProcessingModes, uniforms);
}
}
}
/* -------------------------------------------- */
/**
* Reset post-processing modes on all Visual Effects masking filters.
*/
resetPostProcessingFilters() {
for ( const f of this.visualEffectsMaskingFilters ) {
f.reset();
}
}
/* -------------------------------------------- */
/* Animation Management */
/* -------------------------------------------- */
/**
* Activate light source animation for AmbientLight objects within this layer
*/
activateAnimation() {
this.deactivateAnimation();
if ( game.settings.get("core", "lightAnimation") === false ) return;
canvas.app.ticker.add(this.#animateSources, this);
}
/* -------------------------------------------- */
/**
* Deactivate light source animation for AmbientLight objects within this layer
*/
deactivateAnimation() {
canvas.app.ticker.remove(this.#animateSources, this);
}
/* -------------------------------------------- */
/**
* The ticker handler which manages animation delegation
* @param {number} dt Delta time
* @private
*/
#animateSources(dt) {
// Animate light and darkness sources
if ( this.animateLightSources ) {
for ( const source of this.allSources() ) {
source.animate(dt);
}
}
// Animate vision sources
if ( this.animateVisionSources ) {
for ( const source of this.visionSources.values() ) {
source.animate(dt);
}
}
}
/* -------------------------------------------- */
/**
* Animate a smooth transition of the darkness overlay to a target value.
* Only begin animating if another animation is not already in progress.
* @param {number} target The target darkness level between 0 and 1
* @param {number} duration The desired animation time in milliseconds. Default is 10 seconds
* @returns {Promise} A Promise which resolves once the animation is complete
*/
async animateDarkness(target=1.0, {duration=10000}={}) {
CanvasAnimation.terminateAnimation(EffectsCanvasGroup.#DARKNESS_ANIMATION_NAME);
if ( target === canvas.environment.darknessLevel ) return false;
if ( duration <= 0 ) return canvas.environment.initialize({environment: {darknessLevel: target}});
// Update with an animation
const animationData = [{
parent: {darkness: canvas.environment.darknessLevel},
attribute: "darkness",
to: Math.clamp(target, 0, 1)
}];
return CanvasAnimation.animate(animationData, {
name: EffectsCanvasGroup.#DARKNESS_ANIMATION_NAME,
duration: duration,
ontick: (dt, animation) =>
canvas.environment.initialize({environment: {darknessLevel: animation.attributes[0].parent.darkness}})
}).then(completed => {
if ( !completed ) canvas.environment.initialize({environment: {darknessLevel: target}});
});
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get visibility() {
const msg = "EffectsCanvasGroup#visibility has been deprecated and moved to " +
"Canvas#visibility.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
return canvas.visibility;
}
/**
* @deprecated since v12
* @ignore
*/
get globalLightSource() {
const msg = "EffectsCanvasGroup#globalLightSource has been deprecated and moved to " +
"EnvironmentCanvasGroup#globalLightSource.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
return canvas.environment.globalLightSource;
}
/**
* @deprecated since v12
* @ignore
*/
updateGlobalLightSource() {
const msg = "EffectsCanvasGroup#updateGlobalLightSource has been deprecated and is part of " +
"EnvironmentCanvasGroup#initialize workflow.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
canvas.environment.initialize();
}
}

View File

@@ -0,0 +1,328 @@
/**
* A container group which contains the primary canvas group and the effects canvas group.
*
* @category - Canvas
*/
class EnvironmentCanvasGroup extends CanvasGroupMixin(PIXI.Container) {
constructor(...args) {
super(...args);
this.eventMode = "static";
/**
* The global light source attached to the environment
* @type {GlobalLightSource}
*/
Object.defineProperty(this, "globalLightSource", {
value: new CONFIG.Canvas.globalLightSourceClass({object: this, sourceId: "globalLight"}),
configurable: false,
enumerable: true,
writable: false
});
}
/** @override */
static groupName = "environment";
/** @override */
static tearDownChildren = false;
/**
* The scene darkness level.
* @type {number}
*/
#darknessLevel;
/**
* Colors exposed by the manager.
* @enum {Color}
*/
colors = {
darkness: undefined,
halfdark: undefined,
background: undefined,
dim: undefined,
bright: undefined,
ambientBrightest: undefined,
ambientDaylight: undefined,
ambientDarkness: undefined,
sceneBackground: undefined,
fogExplored: undefined,
fogUnexplored: undefined
};
/**
* Weights used by the manager to compute colors.
* @enum {number}
*/
weights = {
dark: undefined,
halfdark: undefined,
dim: undefined,
bright: undefined
};
/**
* Fallback colors.
* @enum {Color}
*/
static #fallbackColors = {
darknessColor: 0x242448,
daylightColor: 0xEEEEEE,
brightestColor: 0xFFFFFF,
backgroundColor: 0x999999,
fogUnexplored: 0x000000,
fogExplored: 0x000000
};
/**
* Contains a list of subscribed function for darkness handler.
* @type {PIXI.EventBoundary}
*/
#eventBoundary;
/* -------------------------------------------- */
/* Properties */
/* -------------------------------------------- */
/**
* Get the darkness level of this scene.
* @returns {number}
*/
get darknessLevel() {
return this.#darknessLevel;
}
/* -------------------------------------------- */
/* Rendering */
/* -------------------------------------------- */
/** @override */
async _draw(options) {
await super._draw(options);
this.#eventBoundary = new PIXI.EventBoundary(this);
this.initialize();
}
/* -------------------------------------------- */
/* Ambience Methods */
/* -------------------------------------------- */
/**
* Initialize the scene environment options.
* @param {object} [config={}]
* @param {ColorSource} [config.backgroundColor] The background canvas color
* @param {ColorSource} [config.brightestColor] The brightest ambient color
* @param {ColorSource} [config.darknessColor] The color of darkness
* @param {ColorSource} [config.daylightColor] The ambient daylight color
* @param {ColorSource} [config.fogExploredColor] The color applied to explored areas
* @param {ColorSource} [config.fogUnexploredColor] The color applied to unexplored areas
* @param {SceneEnvironmentData} [config.environment] The scene environment data
* @fires PIXI.FederatedEvent type: "darknessChange" - event: {environmentData: {darknessLevel, priorDarknessLevel}}
*/
initialize({backgroundColor, brightestColor, darknessColor, daylightColor, fogExploredColor,
fogUnexploredColor, darknessLevel, environment={}}={}) {
const scene = canvas.scene;
// Update base ambient colors, and darkness level
const fbc = EnvironmentCanvasGroup.#fallbackColors;
this.colors.ambientDarkness = Color.from(darknessColor ?? CONFIG.Canvas.darknessColor ?? fbc.darknessColor);
this.colors.ambientDaylight = Color.from(daylightColor
?? (scene.tokenVision ? (CONFIG.Canvas.daylightColor ?? fbc.daylightColor) : 0xFFFFFF));
this.colors.ambientBrightest = Color.from(brightestColor ?? CONFIG.Canvas.brightestColor ?? fbc.brightestColor);
/**
* @deprecated since v12
*/
if ( darknessLevel !== undefined ) {
const msg = "config.darknessLevel parameter into EnvironmentCanvasGroup#initialize is deprecated. " +
"You should pass the darkness level into config.environment.darknessLevel";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
environment.darknessLevel = darknessLevel;
}
// Darkness Level Control
const priorDarknessLevel = this.#darknessLevel ?? 0;
const dl = environment.darknessLevel ?? scene.environment.darknessLevel;
const darknessChanged = (dl !== this.#darknessLevel);
this.#darknessLevel = scene.environment.darknessLevel = dl;
// Update weights
Object.assign(this.weights, CONFIG.Canvas.lightLevels ?? {
dark: 0,
halfdark: 0.5,
dim: 0.25,
bright: 1
});
// Compute colors
this.#configureColors({fogExploredColor, fogUnexploredColor, backgroundColor});
// Configure the scene environment
this.#configureEnvironment(environment);
// Update primary cached container and renderer clear color with scene background color
canvas.app.renderer.background.color = this.colors.rendererBackground;
canvas.primary._backgroundColor = this.colors.sceneBackground.rgb;
// Dispatching the darkness change event
if ( darknessChanged ) {
const event = new PIXI.FederatedEvent(this.#eventBoundary);
event.type = "darknessChange";
event.environmentData = {
darknessLevel: this.#darknessLevel,
priorDarknessLevel
};
this.dispatchEvent(event);
}
// Push a perception update to refresh lighting and sources with the new computed color values
canvas.perception.update({
refreshPrimary: true,
refreshLighting: true,
refreshVision: true
});
}
/* -------------------------------------------- */
/**
* Configure all colors pertaining to a scene.
* @param {object} [options={}] Preview options.
* @param {ColorSource} [options.fogExploredColor] A preview fog explored color.
* @param {ColorSource} [options.fogUnexploredColor] A preview fog unexplored color.
* @param {ColorSource} [options.backgroundColor] The background canvas color.
*/
#configureColors({fogExploredColor, fogUnexploredColor, backgroundColor}={}) {
const scene = canvas.scene;
const fbc = EnvironmentCanvasGroup.#fallbackColors;
// Compute the middle ambient color
this.colors.background = this.colors.ambientDarkness.mix(this.colors.ambientDaylight, 1.0 - this.darknessLevel);
// Compute dark ambient colors
this.colors.darkness = this.colors.ambientDarkness.mix(this.colors.background, this.weights.dark);
this.colors.halfdark = this.colors.darkness.mix(this.colors.background, this.weights.halfdark);
// Compute light ambient colors
this.colors.bright =
this.colors.background.mix(this.colors.ambientBrightest, this.weights.bright);
this.colors.dim = this.colors.background.mix(this.colors.bright, this.weights.dim);
// Compute fog colors
const cfg = CONFIG.Canvas;
const uc = Color.from(fogUnexploredColor ?? scene.fog.colors.unexplored ?? cfg.unexploredColor ?? fbc.fogUnexplored);
this.colors.fogUnexplored = this.colors.background.multiply(uc);
const ec = Color.from(fogExploredColor ?? scene.fog.colors.explored ?? cfg.exploredColor ?? fbc.fogExplored);
this.colors.fogExplored = this.colors.background.multiply(ec);
// Compute scene background color
const sceneBG = Color.from(backgroundColor ?? scene?.backgroundColor ?? fbc.backgroundColor);
this.colors.sceneBackground = sceneBG;
this.colors.rendererBackground = sceneBG.multiply(this.colors.background);
}
/* -------------------------------------------- */
/**
* Configure the ambience filter for scene ambient lighting.
* @param {SceneEnvironmentData} [environment] The scene environment data object.
*/
#configureEnvironment(environment={}) {
const currentEnvironment = canvas.scene.toObject().environment;
/**
* @type {SceneEnvironmentData}
*/
const data = foundry.utils.mergeObject(environment, currentEnvironment, {
inplace: false,
insertKeys: true,
insertValues: true,
overwrite: false
});
// First configure the ambience filter
this.#configureAmbienceFilter(data);
// Then configure the global light
this.#configureGlobalLight(data);
}
/* -------------------------------------------- */
/**
* Configure the ambience filter.
* @param {SceneEnvironmentData} environment
* @param {boolean} environment.cycle The cycle option.
* @param {EnvironmentData} environment.base The base environement data.
* @param {EnvironmentData} environment.dark The dark environment data.
*/
#configureAmbienceFilter({cycle, base, dark}) {
const ambienceFilter = canvas.primary._ambienceFilter;
if ( !ambienceFilter ) return;
const u = ambienceFilter.uniforms;
// Assigning base ambience parameters
const bh = Color.fromHSL([base.hue, 1, 0.5]).linear;
Color.applyRGB(bh, u.baseTint);
u.baseLuminosity = base.luminosity;
u.baseShadows = base.shadows;
u.baseIntensity = base.intensity;
u.baseSaturation = base.saturation;
const baseAmbienceHasEffect = (base.luminosity !== 0) || (base.shadows > 0)
|| (base.intensity > 0) || (base.saturation !== 0);
// Assigning dark ambience parameters
const dh = Color.fromHSL([dark.hue, 1, 0.5]).linear;
Color.applyRGB(dh, u.darkTint);
u.darkLuminosity = dark.luminosity;
u.darkShadows = dark.shadows;
u.darkIntensity = dark.intensity;
u.darkSaturation = dark.saturation;
const darkAmbienceHasEffect = ((dark.luminosity !== 0) || (dark.shadows > 0)
|| (dark.intensity > 0) || (dark.saturation !== 0)) && cycle;
// Assigning the cycle option
u.cycle = cycle;
// Darkness level texture
u.darknessLevelTexture = canvas.effects.illumination.renderTexture;
// Enable ambience filter if it is impacting visuals
ambienceFilter.enabled = baseAmbienceHasEffect || darkAmbienceHasEffect;
}
/* -------------------------------------------- */
/**
* Configure the global light.
* @param {SceneEnvironmentData} environment
* @param {GlobalLightData} environment.globalLight
*/
#configureGlobalLight({globalLight}) {
const maxR = canvas.dimensions.maxR * 1.2;
const globalLightData = foundry.utils.mergeObject({
z: -Infinity,
elevation: Infinity,
dim: globalLight.bright ? 0 : maxR,
bright: globalLight.bright ? maxR : 0,
disabled: !globalLight.enabled
}, globalLight, {overwrite: false});
this.globalLightSource.initialize(globalLightData);
this.globalLightSource.add();
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get darknessPenalty() {
const msg = "EnvironmentCanvasGroup#darknessPenalty is deprecated without replacement. " +
"The darkness penalty is no longer applied on light and vision sources.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
return 0;
}
}

View File

@@ -0,0 +1,116 @@
/**
* A specialized canvas group for rendering hidden containers before all others (like masks).
* @extends {PIXI.Container}
*/
class HiddenCanvasGroup extends CanvasGroupMixin(PIXI.Container) {
constructor() {
super();
this.eventMode = "none";
this.#createMasks();
}
/**
* The container which hold masks.
* @type {PIXI.Container}
*/
masks = new PIXI.Container();
/** @override */
static groupName = "hidden";
/* -------------------------------------------- */
/**
* Add a mask to this group.
* @param {string} name Name of the mask.
* @param {PIXI.DisplayObject} displayObject Display object to add.
* @param {number|undefined} [position=undefined] Position of the mask.
*/
addMask(name, displayObject, position) {
if ( !((typeof name === "string") && (name.length > 0)) ) {
throw new Error(`Adding mask failed. Name ${name} is invalid.`);
}
if ( !displayObject.clear ) {
throw new Error("A mask container must implement a clear method.");
}
// Add the mask to the dedicated `masks` container
this.masks[name] = position
? this.masks.addChildAt(displayObject, position)
: this.masks.addChild(displayObject);
}
/* -------------------------------------------- */
/**
* Invalidate the masks: flag them for rerendering.
*/
invalidateMasks() {
for ( const mask of this.masks.children ) {
if ( !(mask instanceof CachedContainer) ) continue;
mask.renderDirty = true;
}
}
/* -------------------------------------------- */
/* Rendering */
/* -------------------------------------------- */
/** @inheritDoc */
async _draw(options) {
this.invalidateMasks();
this.addChild(this.masks);
await this.#drawMasks();
await super._draw(options);
}
/* -------------------------------------------- */
/**
* Perform necessary draw operations.
*/
async #drawMasks() {
await this.masks.vision.draw();
}
/* -------------------------------------------- */
/**
* Attach masks container to this canvas layer and create tile occlusion, vision masks and depth mask.
*/
#createMasks() {
// The canvas scissor mask is the first thing to render
const canvas = new PIXI.LegacyGraphics();
this.addMask("canvas", canvas);
// The scene scissor mask
const scene = new PIXI.LegacyGraphics();
this.addMask("scene", scene);
// Then we need to render vision mask
const vision = new CanvasVisionMask();
this.addMask("vision", vision);
// Then we need to render occlusion mask
const occlusion = new CanvasOcclusionMask();
this.addMask("occlusion", occlusion);
// Then the depth mask, which need occlusion
const depth = new CanvasDepthMask();
this.addMask("depth", depth);
}
/* -------------------------------------------- */
/* Tear-Down */
/* -------------------------------------------- */
/** @inheritDoc */
async _tearDown(options) {
this.removeChild(this.masks);
// Clear all masks (children of masks)
this.masks.children.forEach(c => c.clear());
// Then proceed normally
await super._tearDown(options);
}
}

View File

@@ -0,0 +1,234 @@
/**
* A container group which displays interface elements rendered above other canvas groups.
* @extends {CanvasGroupMixin(PIXI.Container)}
*/
class InterfaceCanvasGroup extends CanvasGroupMixin(PIXI.Container) {
/** @override */
static groupName = "interface";
/**
* A container dedicated to the display of scrolling text.
* @type {PIXI.Container}
*/
#scrollingText;
/**
* A graphics which represent the scene outline.
* @type {PIXI.Graphics}
*/
#outline;
/**
* The interface drawings container.
* @type {PIXI.Container}
*/
#drawings;
/* -------------------------------------------- */
/* Drawing Management */
/* -------------------------------------------- */
/**
* Add a PrimaryGraphics to the group.
* @param {Drawing} drawing The Drawing being added
* @returns {PIXI.Graphics} The created Graphics instance
*/
addDrawing(drawing) {
const name = drawing.objectId;
const shape = this.drawings.graphics.get(name) ?? this.#drawings.addChild(new PIXI.Graphics());
shape.name = name;
this.drawings.graphics.set(name, shape);
return shape;
}
/* -------------------------------------------- */
/**
* Remove a PrimaryGraphics from the group.
* @param {Drawing} drawing The Drawing being removed
*/
removeDrawing(drawing) {
const name = drawing.objectId;
if ( !this.drawings.graphics.has(name) ) return;
const shape = this.drawings.graphics.get(name);
if ( shape?.destroyed === false ) shape.destroy({children: true});
this.drawings.graphics.delete(name);
}
/* -------------------------------------------- */
/* Rendering */
/* -------------------------------------------- */
/** @inheritDoc */
async _draw(options) {
this.#drawOutline();
this.#createInterfaceDrawingsContainer();
this.#drawScrollingText();
await super._draw(options);
// Necessary so that Token#voidMesh don't earse non-interface elements
this.filters = [new VoidFilter()];
this.filterArea = canvas.app.screen;
}
/* -------------------------------------------- */
/**
* Draw a background outline which emphasizes what portion of the canvas is playable space and what is buffer.
*/
#drawOutline() {
// Create Canvas outline
const outline = this.#outline = this.addChild(new PIXI.Graphics());
const {scene, dimensions} = canvas;
const displayCanvasBorder = scene.padding !== 0;
const displaySceneOutline = !scene.background.src;
if ( !(displayCanvasBorder || displaySceneOutline) ) return;
if ( displayCanvasBorder ) outline.lineStyle({
alignment: 1,
alpha: 0.75,
color: 0x000000,
join: PIXI.LINE_JOIN.BEVEL,
width: 4
}).drawShape(dimensions.rect);
if ( displaySceneOutline ) outline.lineStyle({
alignment: 1,
alpha: 0.25,
color: 0x000000,
join: PIXI.LINE_JOIN.BEVEL,
width: 4
}).drawShape(dimensions.sceneRect).endFill();
}
/* -------------------------------------------- */
/* Scrolling Text */
/* -------------------------------------------- */
/**
* Draw the scrolling text.
*/
#drawScrollingText() {
this.#scrollingText = this.addChild(new PIXI.Container());
const {width, height} = canvas.dimensions;
this.#scrollingText.width = width;
this.#scrollingText.height = height;
this.#scrollingText.eventMode = "none";
this.#scrollingText.interactiveChildren = false;
this.#scrollingText.zIndex = CONFIG.Canvas.groups.interface.zIndexScrollingText;
}
/* -------------------------------------------- */
/**
* Create the interface drawings container.
*/
#createInterfaceDrawingsContainer() {
this.#drawings = this.addChild(new PIXI.Container());
this.#drawings.sortChildren = function() {
const children = this.children;
for ( let i = 0, n = children.length; i < n; i++ ) children[i]._lastSortedIndex = i;
children.sort(InterfaceCanvasGroup.#compareObjects);
this.sortDirty = false;
};
this.#drawings.sortableChildren = true;
this.#drawings.eventMode = "none";
this.#drawings.interactiveChildren = false;
this.#drawings.zIndex = CONFIG.Canvas.groups.interface.zIndexDrawings;
}
/* -------------------------------------------- */
/**
* The sorting function used to order objects inside the Interface Drawings Container
* Overrides the default sorting function defined for the PIXI.Container.
* @param {PrimaryCanvasObject|PIXI.DisplayObject} a An object to display
* @param {PrimaryCanvasObject|PIXI.DisplayObject} b Some other object to display
* @returns {number}
*/
static #compareObjects(a, b) {
return ((a.elevation || 0) - (b.elevation || 0))
|| ((a.sort || 0) - (b.sort || 0))
|| (a.zIndex - b.zIndex)
|| (a._lastSortedIndex - b._lastSortedIndex);
}
/* -------------------------------------------- */
/**
* Display scrolling status text originating from an origin point on the Canvas.
* @param {Point} origin An origin point where the text should first emerge
* @param {string} content The text content to display
* @param {object} [options] Options which customize the text animation
* @param {number} [options.duration=2000] The duration of the scrolling effect in milliseconds
* @param {number} [options.distance] The distance in pixels that the scrolling text should travel
* @param {TEXT_ANCHOR_POINTS} [options.anchor] The original anchor point where the text appears
* @param {TEXT_ANCHOR_POINTS} [options.direction] The direction in which the text scrolls
* @param {number} [options.jitter=0] An amount of randomization between [0, 1] applied to the initial position
* @param {object} [options.textStyle={}] Additional parameters of PIXI.TextStyle which are applied to the text
* @returns {Promise<PreciseText|null>} The created PreciseText object which is scrolling
*/
async createScrollingText(origin, content, {duration=2000, distance, jitter=0, anchor, direction, ...textStyle}={}) {
if ( !game.settings.get("core", "scrollingStatusText") ) return null;
// Create text object
const style = PreciseText.getTextStyle({anchor, ...textStyle});
const text = this.#scrollingText.addChild(new PreciseText(content, style));
text.visible = false;
// Set initial coordinates
const jx = (jitter ? (Math.random()-0.5) * jitter : 0) * text.width;
const jy = (jitter ? (Math.random()-0.5) * jitter : 0) * text.height;
text.position.set(origin.x + jx, origin.y + jy);
// Configure anchor point
text.anchor.set(...{
[CONST.TEXT_ANCHOR_POINTS.CENTER]: [0.5, 0.5],
[CONST.TEXT_ANCHOR_POINTS.BOTTOM]: [0.5, 0],
[CONST.TEXT_ANCHOR_POINTS.TOP]: [0.5, 1],
[CONST.TEXT_ANCHOR_POINTS.LEFT]: [1, 0.5],
[CONST.TEXT_ANCHOR_POINTS.RIGHT]: [0, 0.5]
}[anchor ?? CONST.TEXT_ANCHOR_POINTS.CENTER]);
// Configure animation distance
let dx = 0;
let dy = 0;
switch ( direction ?? CONST.TEXT_ANCHOR_POINTS.TOP ) {
case CONST.TEXT_ANCHOR_POINTS.BOTTOM:
dy = distance ?? (2 * text.height); break;
case CONST.TEXT_ANCHOR_POINTS.TOP:
dy = -1 * (distance ?? (2 * text.height)); break;
case CONST.TEXT_ANCHOR_POINTS.LEFT:
dx = -1 * (distance ?? (2 * text.width)); break;
case CONST.TEXT_ANCHOR_POINTS.RIGHT:
dx = distance ?? (2 * text.width); break;
}
// Fade In
await CanvasAnimation.animate([
{parent: text, attribute: "alpha", from: 0, to: 1.0},
{parent: text.scale, attribute: "x", from: 0.6, to: 1.0},
{parent: text.scale, attribute: "y", from: 0.6, to: 1.0}
], {
context: this,
duration: duration * 0.25,
easing: CanvasAnimation.easeInOutCosine,
ontick: () => text.visible = true
});
// Scroll
const scroll = [{parent: text, attribute: "alpha", to: 0.0}];
if ( dx !== 0 ) scroll.push({parent: text, attribute: "x", to: text.position.x + dx});
if ( dy !== 0 ) scroll.push({parent: text, attribute: "y", to: text.position.y + dy});
await CanvasAnimation.animate(scroll, {
context: this,
duration: duration * 0.75,
easing: CanvasAnimation.easeInOutCosine
});
// Clean-up
this.#scrollingText.removeChild(text);
text.destroy();
}
}

View File

@@ -0,0 +1,13 @@
/**
* A container group which is not bound to the stage world transform.
*
* @category - Canvas
*/
class OverlayCanvasGroup extends CanvasGroupMixin(UnboundContainer) {
/** @override */
static groupName = "overlay";
/** @override */
static tearDownChildren = false;
}

View File

@@ -0,0 +1,613 @@
/**
* The primary Canvas group which generally contains tangible physical objects which exist within the Scene.
* This group is a {@link CachedContainer} which is rendered to the Scene as a {@link SpriteMesh}.
* This allows the rendered result of the Primary Canvas Group to be affected by a {@link BaseSamplerShader}.
* @extends {CachedContainer}
* @mixes CanvasGroupMixin
* @category - Canvas
*/
class PrimaryCanvasGroup extends CanvasGroupMixin(CachedContainer) {
constructor(sprite) {
sprite ||= new SpriteMesh(undefined, BaseSamplerShader);
super(sprite);
this.eventMode = "none";
this.#createAmbienceFilter();
this.on("childAdded", this.#onChildAdded);
this.on("childRemoved", this.#onChildRemoved);
}
/**
* Sort order to break ties on the group/layer level.
* @enum {number}
*/
static SORT_LAYERS = Object.freeze({
SCENE: 0,
TILES: 500,
DRAWINGS: 600,
TOKENS: 700,
WEATHER: 1000
});
/** @override */
static groupName = "primary";
/** @override */
static textureConfiguration = {
scaleMode: PIXI.SCALE_MODES.NEAREST,
format: PIXI.FORMATS.RGB,
multisample: PIXI.MSAA_QUALITY.NONE
};
/** @override */
clearColor = [0, 0, 0, 0];
/**
* The background color in RGB.
* @type {[red: number, green: number, blue: number]}
* @internal
*/
_backgroundColor;
/**
* Track the set of HTMLVideoElements which are currently playing as part of this group.
* @type {Set<SpriteMesh>}
*/
videoMeshes = new Set();
/**
* Occludable objects above this elevation are faded on hover.
* @type {number}
*/
hoverFadeElevation = 0;
/**
* Allow API users to override the default elevation of the background layer.
* This is a temporary solution until more formal support for scene levels is added in a future release.
* @type {number}
*/
static BACKGROUND_ELEVATION = 0;
/* -------------------------------------------- */
/* Group Attributes */
/* -------------------------------------------- */
/**
* The primary background image configured for the Scene, rendered as a SpriteMesh.
* @type {SpriteMesh}
*/
background;
/**
* The primary foreground image configured for the Scene, rendered as a SpriteMesh.
* @type {SpriteMesh}
*/
foreground;
/**
* A Quadtree which partitions and organizes primary canvas objects.
* @type {CanvasQuadtree}
*/
quadtree = new CanvasQuadtree();
/**
* The collection of PrimaryDrawingContainer objects which are rendered in the Scene.
* @type {Collection<string, PrimaryDrawingContainer>}
*/
drawings = new foundry.utils.Collection();
/**
* The collection of SpriteMesh objects which are rendered in the Scene.
* @type {Collection<string, TokenMesh>}
*/
tokens = new foundry.utils.Collection();
/**
* The collection of SpriteMesh objects which are rendered in the Scene.
* @type {Collection<string, PrimarySpriteMesh|TileSprite>}
*/
tiles = new foundry.utils.Collection();
/**
* The ambience filter which is applying post-processing effects.
* @type {PrimaryCanvasGroupAmbienceFilter}
* @internal
*/
_ambienceFilter;
/**
* The objects that are currently hovered in reverse sort order.
* @type {PrimaryCanvasObjec[]>}
*/
#hoveredObjects = [];
/**
* Trace the tiling sprite error to avoid multiple warning.
* FIXME: Remove when the deprecation period for the tiling sprite error is over.
* @type {boolean}
* @internal
*/
#tilingSpriteError = false;
/* -------------------------------------------- */
/* Group Properties */
/* -------------------------------------------- */
/**
* Return the base HTML image or video element which provides the background texture.
* @type {HTMLImageElement|HTMLVideoElement}
*/
get backgroundSource() {
if ( !this.background.texture.valid || this.background.texture === PIXI.Texture.WHITE ) return null;
return this.background.texture.baseTexture.resource.source;
}
/* -------------------------------------------- */
/**
* Return the base HTML image or video element which provides the foreground texture.
* @type {HTMLImageElement|HTMLVideoElement}
*/
get foregroundSource() {
if ( !this.foreground.texture.valid ) return null;
return this.foreground.texture.baseTexture.resource.source;
}
/* -------------------------------------------- */
/* Rendering */
/* -------------------------------------------- */
/**
* Create the ambience filter bound to the primary group.
*/
#createAmbienceFilter() {
if ( this._ambienceFilter ) this._ambienceFilter.enabled = false;
else {
this.filters ??= [];
const f = this._ambienceFilter = PrimaryCanvasGroupAmbienceFilter.create();
f.enabled = false;
this.filterArea = canvas.app.renderer.screen;
this.filters.push(f);
}
}
/* -------------------------------------------- */
/**
* Refresh the primary mesh.
*/
refreshPrimarySpriteMesh() {
const singleSource = canvas.visibility.visionModeData.source;
const vmOptions = singleSource?.visionMode.canvas;
const isBaseSampler = (this.sprite.shader.constructor === BaseSamplerShader);
if ( !vmOptions && isBaseSampler ) return;
// Update the primary sprite shader class (or reset to BaseSamplerShader)
this.sprite.setShaderClass(vmOptions?.shader ?? BaseSamplerShader);
this.sprite.shader.uniforms.sampler = this.renderTexture;
// Need to update uniforms?
if ( !vmOptions?.uniforms ) return;
vmOptions.uniforms.linkedToDarknessLevel = singleSource?.visionMode.vision.darkness.adaptive;
vmOptions.uniforms.darknessLevel = canvas.environment.darknessLevel;
vmOptions.uniforms.darknessLevelTexture = canvas.effects.illumination.renderTexture;
vmOptions.uniforms.screenDimensions = canvas.screenDimensions;
// Assigning color from source if any
vmOptions.uniforms.tint = singleSource?.visionModeOverrides.colorRGB
?? this.sprite.shader.constructor.defaultUniforms.tint;
// Updating uniforms in the primary sprite shader
for ( const [uniform, value] of Object.entries(vmOptions?.uniforms ?? {}) ) {
if ( uniform in this.sprite.shader.uniforms ) this.sprite.shader.uniforms[uniform] = value;
}
}
/* -------------------------------------------- */
/**
* Update this group. Calculates the canvas transform and bounds of all its children and updates the quadtree.
*/
update() {
if ( this.sortDirty ) this.sortChildren();
const children = this.children;
for ( let i = 0, n = children.length; i < n; i++ ) {
children[i].updateCanvasTransform?.();
}
canvas.masks.depth._update();
if ( !CONFIG.debug.canvas.primary.bounds ) return;
const dbg = canvas.controls.debug.clear().lineStyle(5, 0x30FF00);
for ( const child of this.children ) {
if ( child.canvasBounds ) dbg.drawShape(child.canvasBounds);
}
}
/* -------------------------------------------- */
/** @inheritDoc */
async _draw(options) {
this.#drawBackground();
this.#drawForeground();
this.#drawPadding();
this.hoverFadeElevation = 0;
await super._draw(options);
}
/* -------------------------------------------- */
/** @inheritDoc */
_render(renderer) {
const [r, g, b] = this._backgroundColor;
renderer.framebuffer.clear(r, g, b, 1, PIXI.BUFFER_BITS.COLOR);
super._render(renderer);
}
/* -------------------------------------------- */
/**
* Draw the Scene background image.
*/
#drawBackground() {
const bg = this.background = this.addChild(new PrimarySpriteMesh({name: "background", object: this}));
bg.elevation = this.constructor.BACKGROUND_ELEVATION;
const bgTextureSrc = canvas.sceneTextures.background ?? canvas.scene.background.src;
const bgTexture = bgTextureSrc instanceof PIXI.Texture ? bgTextureSrc : getTexture(bgTextureSrc);
this.#drawSceneMesh(bg, bgTexture);
}
/* -------------------------------------------- */
/**
* Draw the Scene foreground image.
*/
#drawForeground() {
const fg = this.foreground = this.addChild(new PrimarySpriteMesh({name: "foreground", object: this}));
fg.elevation = canvas.scene.foregroundElevation;
const fgTextureSrc = canvas.sceneTextures.foreground ?? canvas.scene.foreground;
const fgTexture = fgTextureSrc instanceof PIXI.Texture ? fgTextureSrc : getTexture(fgTextureSrc);
// Compare dimensions with background texture and draw the mesh
const bg = this.background.texture;
if ( fgTexture && bg && ((fgTexture.width !== bg.width) || (fgTexture.height !== bg.height)) ) {
ui.notifications.warn("WARNING.ForegroundDimensionsMismatch", {localize: true});
}
this.#drawSceneMesh(fg, fgTexture);
}
/* -------------------------------------------- */
/**
* Draw a PrimarySpriteMesh that fills the entire Scene rectangle.
* @param {PrimarySpriteMesh} mesh The target PrimarySpriteMesh
* @param {PIXI.Texture|null} texture The loaded Texture or null
*/
#drawSceneMesh(mesh, texture) {
const d = canvas.dimensions;
mesh.texture = texture ?? PIXI.Texture.EMPTY;
mesh.textureAlphaThreshold = 0.75;
mesh.occludedAlpha = 0.5;
mesh.visible = mesh.texture !== PIXI.Texture.EMPTY;
mesh.position.set(d.sceneX, d.sceneY);
mesh.width = d.sceneWidth;
mesh.height = d.sceneHeight;
mesh.sortLayer = PrimaryCanvasGroup.SORT_LAYERS.SCENE;
mesh.zIndex = -Infinity;
mesh.hoverFade = false;
// Manage video playback
const video = game.video.getVideoSource(mesh);
if ( video ) {
this.videoMeshes.add(mesh);
game.video.play(video, {volume: game.settings.get("core", "globalAmbientVolume")});
}
}
/* -------------------------------------------- */
/**
* Draw the Scene padding.
*/
#drawPadding() {
const d = canvas.dimensions;
const g = this.addChild(new PIXI.LegacyGraphics());
g.beginFill(0x000000, 0.025)
.drawShape(d.rect)
.beginHole()
.drawShape(d.sceneRect)
.endHole()
.endFill();
g.elevation = -Infinity;
g.sort = -Infinity;
}
/* -------------------------------------------- */
/* Tear-Down */
/* -------------------------------------------- */
/** @inheritDoc */
async _tearDown(options) {
// Stop video playback
for ( const mesh of this.videoMeshes ) game.video.stop(mesh.sourceElement);
await super._tearDown(options);
// Clear collections
this.videoMeshes.clear();
this.tokens.clear();
this.tiles.clear();
// Clear the quadtree
this.quadtree.clear();
// Reset the tiling sprite tracker
this.#tilingSpriteError = false;
}
/* -------------------------------------------- */
/* Token Management */
/* -------------------------------------------- */
/**
* Draw the SpriteMesh for a specific Token object.
* @param {Token} token The Token being added
* @returns {PrimarySpriteMesh} The added PrimarySpriteMesh
*/
addToken(token) {
const name = token.objectId;
// Create the token mesh
const mesh = this.tokens.get(name) ?? this.addChild(new PrimarySpriteMesh({name, object: token}));
mesh.texture = token.texture ?? PIXI.Texture.EMPTY;
this.tokens.set(name, mesh);
if ( mesh.isVideo ) this.videoMeshes.add(mesh);
return mesh;
}
/* -------------------------------------------- */
/**
* Remove a TokenMesh from the group.
* @param {Token} token The Token being removed
*/
removeToken(token) {
const name = token.objectId;
const mesh = this.tokens.get(name);
if ( mesh?.destroyed === false ) mesh.destroy({children: true});
this.tokens.delete(name);
this.videoMeshes.delete(mesh);
}
/* -------------------------------------------- */
/* Tile Management */
/* -------------------------------------------- */
/**
* Draw the SpriteMesh for a specific Token object.
* @param {Tile} tile The Tile being added
* @returns {PrimarySpriteMesh} The added PrimarySpriteMesh
*/
addTile(tile) {
/** @deprecated since v12 */
if ( !this.#tilingSpriteError && tile.document.getFlag("core", "isTilingSprite") ) {
this.#tilingSpriteError = true;
ui.notifications.warn("WARNING.TilingSpriteDeprecation", {localize: true, permanent: true});
const msg = "Tiling Sprites are deprecated without replacement.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
}
const name = tile.objectId;
let mesh = this.tiles.get(name) ?? this.addChild(new PrimarySpriteMesh({name, object: tile}));
mesh.texture = tile.texture ?? PIXI.Texture.EMPTY;
this.tiles.set(name, mesh);
if ( mesh.isVideo ) this.videoMeshes.add(mesh);
return mesh;
}
/* -------------------------------------------- */
/**
* Remove a TokenMesh from the group.
* @param {Tile} tile The Tile being removed
*/
removeTile(tile) {
const name = tile.objectId;
const mesh = this.tiles.get(name);
if ( mesh?.destroyed === false ) mesh.destroy({children: true});
this.tiles.delete(name);
this.videoMeshes.delete(mesh);
}
/* -------------------------------------------- */
/* Drawing Management */
/* -------------------------------------------- */
/**
* Add a PrimaryGraphics to the group.
* @param {Drawing} drawing The Drawing being added
* @returns {PrimaryGraphics} The created PrimaryGraphics instance
*/
addDrawing(drawing) {
const name = drawing.objectId;
const shape = this.drawings.get(name) ?? this.addChild(new PrimaryGraphics({name, object: drawing}));
this.drawings.set(name, shape);
return shape;
}
/* -------------------------------------------- */
/**
* Remove a PrimaryGraphics from the group.
* @param {Drawing} drawing The Drawing being removed
*/
removeDrawing(drawing) {
const name = drawing.objectId;
if ( !this.drawings.has(name) ) return;
const shape = this.drawings.get(name);
if ( shape?.destroyed === false ) shape.destroy({children: true});
this.drawings.delete(name);
}
/* -------------------------------------------- */
/**
* Override the default PIXI.Container behavior for how objects in this container are sorted.
* @override
*/
sortChildren() {
const children = this.children;
for ( let i = 0, n = children.length; i < n; i++ ) children[i]._lastSortedIndex = i;
children.sort(PrimaryCanvasGroup.#compareObjects);
this.sortDirty = false;
}
/* -------------------------------------------- */
/**
* The sorting function used to order objects inside the Primary Canvas Group.
* Overrides the default sorting function defined for the PIXI.Container.
* Sort Tokens PCO above other objects except WeatherEffects, then Drawings PCO, all else held equal.
* @param {PrimaryCanvasObject|PIXI.DisplayObject} a An object to display
* @param {PrimaryCanvasObject|PIXI.DisplayObject} b Some other object to display
* @returns {number}
*/
static #compareObjects(a, b) {
return ((a.elevation || 0) - (b.elevation || 0))
|| ((a.sortLayer || 0) - (b.sortLayer || 0))
|| ((a.sort || 0) - (b.sort || 0))
|| (a.zIndex - b.zIndex)
|| (a._lastSortedIndex - b._lastSortedIndex);
}
/* -------------------------------------------- */
/* PIXI Events */
/* -------------------------------------------- */
/**
* Called when a child is added.
* @param {PIXI.DisplayObject} child
*/
#onChildAdded(child) {
if ( child.shouldRenderDepth ) canvas.masks.depth._elevationDirty = true;
}
/* -------------------------------------------- */
/**
* Called when a child is removed.
* @param {PIXI.DisplayObject} child
*/
#onChildRemoved(child) {
if ( child.shouldRenderDepth ) canvas.masks.depth._elevationDirty = true;
}
/* -------------------------------------------- */
/* Event Listeners and Handlers */
/* -------------------------------------------- */
/**
* Handle mousemove events on the primary group to update the hovered state of its children.
* @internal
*/
_onMouseMove() {
const time = canvas.app.ticker.lastTime;
// Unset the hovered state of the hovered PCOs
for ( const object of this.#hoveredObjects ) {
if ( !object._hoverFadeState.hovered ) continue;
object._hoverFadeState.hovered = false;
object._hoverFadeState.hoveredTime = time;
}
this.#updateHoveredObjects();
// Set the hovered state of the hovered PCOs
for ( const object of this.#hoveredObjects ) {
if ( !object.hoverFade || !(object.elevation > this.hoverFadeElevation) ) break;
object._hoverFadeState.hovered = true;
object._hoverFadeState.hoveredTime = time;
}
}
/* -------------------------------------------- */
/**
* Update the hovered objects. Returns the hovered objects.
*/
#updateHoveredObjects() {
this.#hoveredObjects.length = 0;
// Get all PCOs that contain the mouse position
const position = canvas.mousePosition;
const collisionTest = ({t}) => t.visible && t.renderable
&& t._hoverFadeState && t.containsCanvasPoint(position);
for ( const object of canvas.primary.quadtree.getObjects(
new PIXI.Rectangle(position.x, position.y, 0, 0), {collisionTest}
)) {
this.#hoveredObjects.push(object);
}
// Sort the hovered PCOs in reverse primary order
this.#hoveredObjects.sort((a, b) => PrimaryCanvasGroup.#compareObjects(b, a));
// Discard hit objects below the hovered placeable
const hoveredPlaceable = canvas.activeLayer?.hover;
if ( hoveredPlaceable ) {
let elevation = 0;
let sortLayer = Infinity;
let sort = Infinity;
let zIndex = Infinity;
if ( (hoveredPlaceable instanceof Token) || (hoveredPlaceable instanceof Tile) ) {
const mesh = hoveredPlaceable.mesh;
if ( mesh ) {
elevation = mesh.elevation;
sortLayer = mesh.sortLayer;
sort = mesh.sort;
zIndex = mesh.zIndex;
}
} else if ( hoveredPlaceable instanceof Drawing ) {
const shape = hoveredPlaceable.shape;
if ( shape ) {
elevation = shape.elevation;
sortLayer = shape.sortLayer;
sort = shape.sort;
zIndex = shape.zIndex;
}
} else if ( hoveredPlaceable.document.schema.has("elevation") ) {
elevation = hoveredPlaceable.document.elevation;
}
const threshold = {elevation, sortLayer, sort, zIndex, _lastSortedIndex: Infinity};
while ( this.#hoveredObjects.length
&& PrimaryCanvasGroup.#compareObjects(this.#hoveredObjects.at(-1), threshold) <= 0 ) {
this.#hoveredObjects.pop();
}
}
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
mapElevationToDepth(elevation) {
const msg = "PrimaryCanvasGroup#mapElevationAlpha is deprecated. "
+ "Use canvas.masks.depth.mapElevation(elevation) instead.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
return canvas.masks.depth.mapElevation(elevation);
}
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
mapElevationAlpha(elevation) {
const msg = "PrimaryCanvasGroup#mapElevationAlpha is deprecated. "
+ "Use canvas.masks.depth.mapElevation(elevation) instead.";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return canvas.masks.depth.mapElevation(elevation);
}
}

View File

@@ -0,0 +1,13 @@
/**
* A container group which contains the environment canvas group and the interface canvas group.
*
* @category - Canvas
*/
class RenderedCanvasGroup extends CanvasGroupMixin(PIXI.Container) {
/** @override */
static groupName = "rendered";
/** @override */
static tearDownChildren = false;
}

View File

@@ -0,0 +1,152 @@
/**
* An abstract pattern for primary layers of the game canvas to implement.
* @category - Canvas
* @abstract
* @interface
*/
class CanvasLayer extends PIXI.Container {
/**
* Options for this layer instance.
* @type {{name: string}}
*/
options = this.constructor.layerOptions;
// Default interactivity
interactiveChildren = false;
/* -------------------------------------------- */
/* Layer Attributes */
/* -------------------------------------------- */
/**
* Customize behaviors of this CanvasLayer by modifying some behaviors at a class level.
* @type {{name: string}}
*/
static get layerOptions() {
return {
name: "",
baseClass: CanvasLayer
};
}
/* -------------------------------------------- */
/**
* Return a reference to the active instance of this canvas layer
* @type {CanvasLayer}
*/
static get instance() {
return canvas[this.layerOptions.name];
}
/* -------------------------------------------- */
/**
* The canonical name of the CanvasLayer is the name of the constructor that is the immediate child of the
* defined baseClass for the layer type.
* @type {string}
*
* @example
* canvas.lighting.name -> "LightingLayer"
*/
get name() {
const baseCls = this.constructor.layerOptions.baseClass;
let cls = Object.getPrototypeOf(this.constructor);
let name = this.constructor.name;
while ( cls ) {
if ( cls !== baseCls ) {
name = cls.name;
cls = Object.getPrototypeOf(cls);
}
else break;
}
return name;
}
/* -------------------------------------------- */
/**
* The name used by hooks to construct their hook string.
* Note: You should override this getter if hookName should not return the class constructor name.
* @type {string}
*/
get hookName() {
return this.name;
}
/* -------------------------------------------- */
/**
* An internal reference to a Promise in-progress to draw the CanvasLayer.
* @type {Promise<CanvasLayer>}
*/
#drawing = Promise.resolve(this);
/* -------------------------------------------- */
/**
* Is the layer drawn?
* @type {boolean}
*/
#drawn = false;
/* -------------------------------------------- */
/* Rendering
/* -------------------------------------------- */
/**
* Draw the canvas layer, rendering its internal components and returning a Promise.
* The Promise resolves to the drawn layer once its contents are successfully rendered.
* @param {object} [options] Options which configure how the layer is drawn
* @returns {Promise<CanvasLayer>}
*/
async draw(options={}) {
return this.#drawing = this.#drawing.finally(async () => {
console.log(`${vtt} | Drawing the ${this.constructor.name} canvas layer`);
await this.tearDown();
await this._draw(options);
Hooks.callAll(`draw${this.hookName}`, this);
this.#drawn = true;
});
}
/**
* The inner _draw method which must be defined by each CanvasLayer subclass.
* @param {object} options Options which configure how the layer is drawn
* @abstract
* @protected
*/
async _draw(options) {
throw new Error(`The ${this.constructor.name} subclass of CanvasLayer must define the _draw method`);
}
/* -------------------------------------------- */
/**
* Deconstruct data used in the current layer in preparation to re-draw the canvas
* @param {object} [options] Options which configure how the layer is deconstructed
* @returns {Promise<CanvasLayer>}
*/
async tearDown(options={}) {
if ( !this.#drawn ) return this;
MouseInteractionManager.emulateMoveEvent();
this.#drawn = false;
this.renderable = false;
await this._tearDown(options);
Hooks.callAll(`tearDown${this.hookName}`, this);
this.renderable = true;
MouseInteractionManager.emulateMoveEvent();
return this;
}
/**
* The inner _tearDown method which may be customized by each CanvasLayer subclass.
* @param {object} options Options which configure how the layer is deconstructed
* @protected
*/
async _tearDown(options) {
this.removeChildren().forEach(c => c.destroy({children: true}));
}
}

View File

@@ -0,0 +1,229 @@
/**
* A subclass of CanvasLayer which provides support for user interaction with its contained objects.
* @category - Canvas
*/
class InteractionLayer extends CanvasLayer {
/**
* Is this layer currently active
* @type {boolean}
*/
get active() {
return this.#active;
}
/** @ignore */
#active = false;
/** @override */
eventMode = "passive";
/**
* Customize behaviors of this CanvasLayer by modifying some behaviors at a class level.
* @type {{name: string, zIndex: number}}
*/
static get layerOptions() {
return Object.assign(super.layerOptions, {
baseClass: InteractionLayer,
zIndex: 0
});
}
/* -------------------------------------------- */
/* Methods */
/* -------------------------------------------- */
/**
* Activate the InteractionLayer, deactivating other layers and marking this layer's children as interactive.
* @param {object} [options] Options which configure layer activation
* @param {string} [options.tool] A specific tool in the control palette to set as active
* @returns {InteractionLayer} The layer instance, now activated
*/
activate({tool}={}) {
// Set this layer as active
const wasActive = this.#active;
this.#active = true;
// Deactivate other layers
for ( const name of Object.keys(Canvas.layers) ) {
const layer = canvas[name];
if ( (layer !== this) && (layer instanceof InteractionLayer) ) layer.deactivate();
}
// Re-render Scene controls
ui.controls?.initialize({layer: this.constructor.layerOptions.name, tool});
if ( wasActive ) return this;
// Reset the interaction manager
canvas.mouseInteractionManager?.reset({state: false});
// Assign interactivity for the active layer
this.zIndex = this.getZIndex();
this.eventMode = "static";
this.interactiveChildren = true;
// Call layer-specific activation procedures
this._activate();
Hooks.callAll(`activate${this.hookName}`, this);
Hooks.callAll("activateCanvasLayer", this);
return this;
}
/**
* The inner _activate method which may be defined by each InteractionLayer subclass.
* @protected
*/
_activate() {}
/* -------------------------------------------- */
/**
* Deactivate the InteractionLayer, removing interactivity from its children.
* @returns {InteractionLayer} The layer instance, now inactive
*/
deactivate() {
if ( !this.#active ) return this;
canvas.highlightObjects(false);
this.#active = false;
this.eventMode = "passive";
this.interactiveChildren = false;
this.zIndex = this.getZIndex();
this._deactivate();
Hooks.callAll(`deactivate${this.hookName}`, this);
return this;
}
/**
* The inner _deactivate method which may be defined by each InteractionLayer subclass.
* @protected
*/
_deactivate() {}
/* -------------------------------------------- */
/** @override */
async _draw(options) {
this.hitArea = canvas.dimensions.rect;
this.zIndex = this.getZIndex();
}
/* -------------------------------------------- */
/**
* Get the zIndex that should be used for ordering this layer vertically relative to others in the same Container.
* @returns {number}
*/
getZIndex() {
return this.options.zIndex;
}
/* -------------------------------------------- */
/* Event Listeners and Handlers */
/* -------------------------------------------- */
/**
* Handle left mouse-click events which originate from the Canvas stage.
* @see {@link Canvas._onClickLeft}
* @param {PIXI.FederatedEvent} event The PIXI InteractionEvent which wraps a PointerEvent
* @protected
*/
_onClickLeft(event) {}
/* -------------------------------------------- */
/**
* Handle double left-click events which originate from the Canvas stage.
* @see {@link Canvas.#onClickLeft2}
* @param {PIXI.FederatedEvent} event The PIXI InteractionEvent which wraps a PointerEvent
* @protected
*/
_onClickLeft2(event) {}
/* -------------------------------------------- */
/**
* Does the User have permission to left-click drag on the Canvas?
* @param {User} user The User performing the action.
* @param {PIXI.FederatedEvent} event The event object.
* @returns {boolean}
* @protected
*/
_canDragLeftStart(user, event) {
return true;
}
/* -------------------------------------------- */
/**
* Start a left-click drag workflow originating from the Canvas stage.
* @see {@link Canvas.#onDragLeftStart}
* @param {PIXI.FederatedEvent} event The PIXI InteractionEvent which wraps a PointerEvent
* @protected
*/
_onDragLeftStart(event) {}
/* -------------------------------------------- */
/**
* Continue a left-click drag workflow originating from the Canvas stage.
* @see {@link Canvas.#onDragLeftMove}
* @param {PIXI.FederatedEvent} event The PIXI InteractionEvent which wraps a PointerEvent
* @protected
*/
_onDragLeftMove(event) {}
/* -------------------------------------------- */
/**
* Conclude a left-click drag workflow originating from the Canvas stage.
* @see {@link Canvas.#onDragLeftDrop}
* @param {PIXI.FederatedEvent} event The PIXI InteractionEvent which wraps a PointerEvent
* @protected
*/
_onDragLeftDrop(event) {}
/* -------------------------------------------- */
/**
* Cancel a left-click drag workflow originating from the Canvas stage.
* @see {@link Canvas.#onDragLeftDrop}
* @param {PointerEvent} event A right-click pointer event on the document.
* @protected
*/
_onDragLeftCancel(event) {}
/* -------------------------------------------- */
/**
* Handle right mouse-click events which originate from the Canvas stage.
* @see {@link Canvas._onClickRight}
* @param {PIXI.FederatedEvent} event The PIXI InteractionEvent which wraps a PointerEvent
* @protected
*/
_onClickRight(event) {}
/* -------------------------------------------- */
/**
* Handle mouse-wheel events which occur for this active layer.
* @see {@link MouseManager._onWheel}
* @param {WheelEvent} event The WheelEvent initiated on the document
* @protected
*/
_onMouseWheel(event) {}
/* -------------------------------------------- */
/**
* Handle a DELETE keypress while this layer is active.
* @see {@link ClientKeybindings._onDelete}
* @param {KeyboardEvent} event The delete key press event
* @protected
*/
async _onDeleteKey(event) {}
}
/* -------------------------------------------- */

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,81 @@
/**
* A single Mouse Cursor
* @type {PIXI.Container}
*/
class Cursor extends PIXI.Container {
constructor(user) {
super();
this.target = {x: 0, y: 0};
this.draw(user);
}
/**
* To know if this cursor is animated
* @type {boolean}
*/
#animating;
/* -------------------------------------------- */
/**
* Update visibility and animations
* @param {User} user The user
*/
refreshVisibility(user) {
const v = this.visible = !user.isSelf && user.hasPermission("SHOW_CURSOR");
if ( v && !this.#animating ) {
canvas.app.ticker.add(this._animate, this);
this.#animating = true; // Set flag to true when animation is added
} else if ( !v && this.#animating ) {
canvas.app.ticker.remove(this._animate, this);
this.#animating = false; // Set flag to false when animation is removed
}
}
/* -------------------------------------------- */
/**
* Draw the user's cursor as a small dot with their user name attached as text
*/
draw(user) {
// Cursor dot
const d = this.addChild(new PIXI.Graphics());
d.beginFill(user.color, 0.35).lineStyle(1, 0x000000, 0.5).drawCircle(0, 0, 6);
// Player name
const style = CONFIG.canvasTextStyle.clone();
style.fontSize = 14;
let n = this.addChild(new PreciseText(user.name, style));
n.x -= n.width / 2;
n.y += 10;
// Refresh
this.refreshVisibility(user);
}
/* -------------------------------------------- */
/**
* Move an existing cursor to a new position smoothly along the animation loop
*/
_animate() {
const dy = this.target.y - this.y;
const dx = this.target.x - this.x;
if ( Math.abs( dx ) + Math.abs( dy ) < 10 ) return;
this.x += dx / 10;
this.y += dy / 10;
}
/* -------------------------------------------- */
/** @inheritdoc */
destroy(options) {
if ( this.#animating ) {
canvas.app.ticker.remove(this._animate, this);
this.#animating = false;
}
super.destroy(options);
}
}

View File

@@ -0,0 +1,215 @@
/**
* An icon representing a Door Control
* @extends {PIXI.Container}
*/
class DoorControl extends PIXI.Container {
constructor(wall) {
super();
this.wall = wall;
this.visible = false; // Door controls are not visible by default
}
/* -------------------------------------------- */
/**
* The center of the wall which contains the door.
* @type {PIXI.Point}
*/
get center() {
return this.wall.center;
}
/* -------------------------------------------- */
/**
* Draw the DoorControl icon, displaying its icon texture and border
* @returns {Promise<DoorControl>}
*/
async draw() {
// Background
this.bg = this.bg || this.addChild(new PIXI.Graphics());
this.bg.clear().beginFill(0x000000, 1.0).drawRoundedRect(-2, -2, 44, 44, 5).endFill();
this.bg.alpha = 0;
// Control Icon
this.icon = this.icon || this.addChild(new PIXI.Sprite());
this.icon.width = this.icon.height = 40;
this.icon.alpha = 0.6;
this.icon.texture = this._getTexture();
// Border
this.border = this.border || this.addChild(new PIXI.Graphics());
this.border.clear().lineStyle(1, 0xFF5500, 0.8).drawRoundedRect(-2, -2, 44, 44, 5).endFill();
this.border.visible = false;
// Add control interactivity
this.eventMode = "static";
this.interactiveChildren = false;
this.hitArea = new PIXI.Rectangle(-2, -2, 44, 44);
this.cursor = "pointer";
// Set position
this.reposition();
this.alpha = 1.0;
// Activate listeners
this.removeAllListeners();
this.on("pointerover", this._onMouseOver).on("pointerout", this._onMouseOut)
.on("pointerdown", this._onMouseDown).on("rightdown", this._onRightDown);
return this;
}
/* -------------------------------------------- */
/**
* Get the icon texture to use for the Door Control icon based on the door state
* @returns {PIXI.Texture}
*/
_getTexture() {
// Determine displayed door state
const ds = CONST.WALL_DOOR_STATES;
let s = this.wall.document.ds;
if ( !game.user.isGM && (s === ds.LOCKED) ) s = ds.CLOSED;
// Determine texture path
const icons = CONFIG.controlIcons;
let path = {
[ds.LOCKED]: icons.doorLocked,
[ds.CLOSED]: icons.doorClosed,
[ds.OPEN]: icons.doorOpen
}[s] || icons.doorClosed;
if ( (s === ds.CLOSED) && (this.wall.document.door === CONST.WALL_DOOR_TYPES.SECRET) ) path = icons.doorSecret;
// Obtain the icon texture
return getTexture(path);
}
/* -------------------------------------------- */
reposition() {
let pos = this.wall.midpoint.map(p => p - 20);
this.position.set(...pos);
}
/* -------------------------------------------- */
/**
* Determine whether the DoorControl is visible to the calling user's perspective.
* The control is always visible if the user is a GM and no Tokens are controlled.
* @see {CanvasVisibility#testVisibility}
* @type {boolean}
*/
get isVisible() {
if ( !canvas.visibility.tokenVision ) return true;
// Hide secret doors from players
const w = this.wall;
if ( (w.document.door === CONST.WALL_DOOR_TYPES.SECRET) && !game.user.isGM ) return false;
// Test two points which are perpendicular to the door midpoint
const ray = this.wall.toRay();
const [x, y] = w.midpoint;
const [dx, dy] = [-ray.dy, ray.dx];
const t = 3 / (Math.abs(dx) + Math.abs(dy)); // Approximate with Manhattan distance for speed
const points = [
{x: x + (t * dx), y: y + (t * dy)},
{x: x - (t * dx), y: y - (t * dy)}
];
// Test each point for visibility
return points.some(p => {
return canvas.visibility.testVisibility(p, {object: this, tolerance: 0});
});
}
/* -------------------------------------------- */
/* Event Handlers */
/* -------------------------------------------- */
/**
* Handle mouse over events on a door control icon.
* @param {PIXI.FederatedEvent} event The originating interaction event
* @protected
*/
_onMouseOver(event) {
event.stopPropagation();
const canControl = game.user.can("WALL_DOORS");
const blockPaused = game.paused && !game.user.isGM;
if ( !canControl || blockPaused ) return false;
this.border.visible = true;
this.icon.alpha = 1.0;
this.bg.alpha = 0.25;
canvas.walls.hover = this.wall;
}
/* -------------------------------------------- */
/**
* Handle mouse out events on a door control icon.
* @param {PIXI.FederatedEvent} event The originating interaction event
* @protected
*/
_onMouseOut(event) {
event.stopPropagation();
if ( game.paused && !game.user.isGM ) return false;
this.border.visible = false;
this.icon.alpha = 0.6;
this.bg.alpha = 0;
canvas.walls.hover = null;
}
/* -------------------------------------------- */
/**
* Handle left mouse down events on a door control icon.
* This should only toggle between the OPEN and CLOSED states.
* @param {PIXI.FederatedEvent} event The originating interaction event
* @protected
*/
_onMouseDown(event) {
if ( event.button !== 0 ) return; // Only support standard left-click
event.stopPropagation();
const { ds } = this.wall.document;
const states = CONST.WALL_DOOR_STATES;
// Determine whether the player can control the door at this time
if ( !game.user.can("WALL_DOORS") ) return false;
if ( game.paused && !game.user.isGM ) {
ui.notifications.warn("GAME.PausedWarning", {localize: true});
return false;
}
const sound = !(game.user.isGM && game.keyboard.isModifierActive(KeyboardManager.MODIFIER_KEYS.ALT));
// Play an audio cue for testing locked doors, only for the current client
if ( ds === states.LOCKED ) {
if ( sound ) this.wall._playDoorSound("test");
return false;
}
// Toggle between OPEN and CLOSED states
return this.wall.document.update({ds: ds === states.CLOSED ? states.OPEN : states.CLOSED}, {sound});
}
/* -------------------------------------------- */
/**
* Handle right mouse down events on a door control icon.
* This should toggle whether the door is LOCKED or CLOSED.
* @param {PIXI.FederatedEvent} event The originating interaction event
* @protected
*/
_onRightDown(event) {
event.stopPropagation();
if ( !game.user.isGM ) return;
let state = this.wall.document.ds;
const states = CONST.WALL_DOOR_STATES;
if ( state === states.OPEN ) return;
state = state === states.LOCKED ? states.CLOSED : states.LOCKED;
const sound = !(game.user.isGM && game.keyboard.isModifierActive(KeyboardManager.MODIFIER_KEYS.ALT));
return this.wall.document.update({ds: state}, {sound});
}
}

View File

@@ -0,0 +1,385 @@
/**
* A CanvasLayer for displaying UI controls which are overlayed on top of other layers.
*
* We track three types of events:
* 1) Cursor movement
* 2) Ruler measurement
* 3) Map pings
*/
class ControlsLayer extends InteractionLayer {
constructor() {
super();
// Always interactive even if disabled for doors controls
this.interactiveChildren = true;
/**
* A container of DoorControl instances
* @type {PIXI.Container}
*/
this.doors = this.addChild(new PIXI.Container());
/**
* A container of cursor interaction elements.
* Contains cursors, rulers, interaction rectangles, and pings
* @type {PIXI.Container}
*/
this.cursors = this.addChild(new PIXI.Container());
this.cursors.eventMode = "none";
this.cursors.mask = canvas.masks.canvas;
/**
* Ruler tools, one per connected user
* @type {PIXI.Container}
*/
this.rulers = this.addChild(new PIXI.Container());
this.rulers.eventMode = "none";
/**
* A graphics instance used for drawing debugging visualization
* @type {PIXI.Graphics}
*/
this.debug = this.addChild(new PIXI.Graphics());
this.debug.eventMode = "none";
}
/**
* The Canvas selection rectangle
* @type {PIXI.Graphics}
*/
select;
/**
* A mapping of user IDs to Cursor instances for quick access
* @type {Record<string, Cursor>}
*/
_cursors = {};
/**
* A mapping of user IDs to Ruler instances for quick access
* @type {Record<string, Ruler>}
* @private
*/
_rulers = {};
/**
* The positions of any offscreen pings we are tracking.
* @type {Record<string, Point>}
* @private
*/
_offscreenPings = {};
/* -------------------------------------------- */
/** @override */
static get layerOptions() {
return foundry.utils.mergeObject(super.layerOptions, {
name: "controls",
zIndex: 1000
});
}
/* -------------------------------------------- */
/* Properties and Public Methods */
/* -------------------------------------------- */
/**
* A convenience accessor to the Ruler for the active game user
* @type {Ruler}
*/
get ruler() {
return this.getRulerForUser(game.user.id);
}
/* -------------------------------------------- */
/**
* Get the Ruler display for a specific User ID
* @param {string} userId
* @returns {Ruler|null}
*/
getRulerForUser(userId) {
return this._rulers[userId] || null;
}
/* -------------------------------------------- */
/* Rendering */
/* -------------------------------------------- */
/** @inheritDoc */
async _draw(options) {
await super._draw(options);
// Create additional elements
this.drawCursors();
this.drawRulers();
this.drawDoors();
this.select = this.cursors.addChild(new PIXI.Graphics());
// Adjust scale
const d = canvas.dimensions;
this.hitArea = d.rect;
}
/* -------------------------------------------- */
/** @override */
async _tearDown(options) {
this._cursors = {};
this._rulers = {};
this.doors.removeChildren();
this.cursors.removeChildren();
this.rulers.removeChildren();
this.debug.clear();
this.debug.debugText?.removeChildren().forEach(c => c.destroy({children: true}));
}
/* -------------------------------------------- */
/**
* Draw the cursors container
*/
drawCursors() {
for ( let u of game.users.filter(u => u.active && !u.isSelf ) ) {
this.drawCursor(u);
}
}
/* -------------------------------------------- */
/**
* Create and add Ruler graphics instances for every game User.
*/
drawRulers() {
const cls = CONFIG.Canvas.rulerClass;
for (let u of game.users) {
let ruler = this.getRulerForUser(u.id);
if ( !ruler ) ruler = this._rulers[u.id] = new cls(u);
this.rulers.addChild(ruler);
}
}
/* -------------------------------------------- */
/**
* Draw door control icons to the doors container.
*/
drawDoors() {
for ( const wall of canvas.walls.placeables ) {
if ( wall.isDoor ) wall.createDoorControl();
}
}
/* -------------------------------------------- */
/**
* Draw the select rectangle given an event originated within the base canvas layer
* @param {Object} coords The rectangle coordinates of the form {x, y, width, height}
*/
drawSelect({x, y, width, height}) {
const s = this.select.clear();
s.lineStyle(3, 0xFF9829, 0.9).drawRect(x, y, width, height);
}
/* -------------------------------------------- */
/** @override */
_deactivate() {
this.interactiveChildren = true;
}
/* -------------------------------------------- */
/* Event Listeners and Handlers
/* -------------------------------------------- */
/**
* Handle mousemove events on the game canvas to broadcast activity of the user's cursor position
*/
_onMouseMove() {
if ( !game.user.hasPermission("SHOW_CURSOR") ) return;
game.user.broadcastActivity({cursor: canvas.mousePosition});
}
/* -------------------------------------------- */
/**
* Handle pinging the canvas.
* @param {PIXI.FederatedEvent} event The triggering canvas interaction event.
* @param {PIXI.Point} origin The local canvas coordinates of the mousepress.
* @protected
*/
_onLongPress(event, origin) {
const isCtrl = game.keyboard.isModifierActive(KeyboardManager.MODIFIER_KEYS.CONTROL);
const isTokenLayer = canvas.activeLayer instanceof TokenLayer;
if ( !game.user.hasPermission("PING_CANVAS") || isCtrl || !isTokenLayer ) return;
canvas.currentMouseManager.cancel(event); // Cancel drag workflow
return canvas.ping(origin);
}
/* -------------------------------------------- */
/**
* Handle the canvas panning to a new view.
* @protected
*/
_onCanvasPan() {
for ( const [name, position] of Object.entries(this._offscreenPings) ) {
const { ray, intersection } = this._findViewportIntersection(position);
if ( intersection ) {
const { x, y } = canvas.canvasCoordinatesFromClient(intersection);
const ping = CanvasAnimation.getAnimation(name).context;
ping.x = x;
ping.y = y;
ping.rotation = Math.normalizeRadians(ray.angle + (Math.PI * 1.5));
} else CanvasAnimation.terminateAnimation(name);
}
}
/* -------------------------------------------- */
/* Methods
/* -------------------------------------------- */
/**
* Create and draw the Cursor object for a given User
* @param {User} user The User document for whom to draw the cursor Container
*/
drawCursor(user) {
if ( user.id in this._cursors ) {
this._cursors[user.id].destroy({children: true});
delete this._cursors[user.id];
}
return this._cursors[user.id] = this.cursors.addChild(new Cursor(user));
}
/* -------------------------------------------- */
/**
* Update the cursor when the user moves to a new position
* @param {User} user The User for whom to update the cursor
* @param {Point} position The new cursor position
*/
updateCursor(user, position) {
if ( !this.cursors ) return;
const cursor = this._cursors[user.id] || this.drawCursor(user);
// Ignore cursors on other Scenes
if ( ( position === null ) || (user.viewedScene !== canvas.scene.id) ) {
if ( cursor ) cursor.visible = false;
return;
}
// Show the cursor in its currently tracked position
cursor.refreshVisibility(user);
cursor.target = {x: position.x || 0, y: position.y || 0};
}
/* -------------------------------------------- */
/**
* Update display of an active Ruler object for a user given provided data
* @param {User} user The User for whom to update the ruler
* @param {RulerMeasurementData|null} rulerData Data which describes the new ruler measurement to display
*/
updateRuler(user, rulerData) {
// Ignore rulers for users who are not permitted to share
if ( (user === game.user) || !user.hasPermission("SHOW_RULER") ) return;
// Update the Ruler display for the user
const ruler = this.getRulerForUser(user.id);
ruler?.update(rulerData);
}
/* -------------------------------------------- */
/**
* Handle a broadcast ping.
* @see {@link Ping#drawPing}
* @param {User} user The user who pinged.
* @param {Point} position The position on the canvas that was pinged.
* @param {PingData} [data] The broadcast ping data.
* @returns {Promise<boolean>} A promise which resolves once the Ping has been drawn and animated
*/
async handlePing(user, position, {scene, style="pulse", pull=false, zoom=1, ...pingOptions}={}) {
if ( !canvas.ready || (canvas.scene?.id !== scene) || !position ) return;
if ( pull && (user.isGM || user.isSelf) ) {
await canvas.animatePan({
x: position.x,
y: position.y,
scale: Math.min(CONFIG.Canvas.maxZoom, zoom),
duration: CONFIG.Canvas.pings.pullSpeed
});
} else if ( canvas.isOffscreen(position) ) this.drawOffscreenPing(position, { style: "arrow", user });
if ( game.settings.get("core", "photosensitiveMode") ) style = CONFIG.Canvas.pings.types.PULL;
return this.drawPing(position, { style, user, ...pingOptions });
}
/* -------------------------------------------- */
/**
* Draw a ping at the edge of the viewport, pointing to the location of an off-screen ping.
* @see {@link Ping#drawPing}
* @param {Point} position The coordinates of the off-screen ping.
* @param {PingOptions} [options] Additional options to configure how the ping is drawn.
* @param {string} [options.style=arrow] The style of ping to draw, from CONFIG.Canvas.pings.
* @param {User} [options.user] The user who pinged.
* @returns {Promise<boolean>} A promise which resolves once the Ping has been drawn and animated
*/
async drawOffscreenPing(position, {style="arrow", user, ...pingOptions}={}) {
const { ray, intersection } = this._findViewportIntersection(position);
if ( !intersection ) return;
const name = `Ping.${foundry.utils.randomID()}`;
this._offscreenPings[name] = position;
position = canvas.canvasCoordinatesFromClient(intersection);
if ( game.settings.get("core", "photosensitiveMode") ) pingOptions.rings = 1;
const animation = this.drawPing(position, { style, user, name, rotation: ray.angle, ...pingOptions });
animation.finally(() => delete this._offscreenPings[name]);
return animation;
}
/* -------------------------------------------- */
/**
* Draw a ping on the canvas.
* @see {@link Ping#animate}
* @param {Point} position The position on the canvas that was pinged.
* @param {PingOptions} [options] Additional options to configure how the ping is drawn.
* @param {string} [options.style=pulse] The style of ping to draw, from CONFIG.Canvas.pings.
* @param {User} [options.user] The user who pinged.
* @returns {Promise<boolean>} A promise which resolves once the Ping has been drawn and animated
*/
async drawPing(position, {style="pulse", user, ...pingOptions}={}) {
const cfg = CONFIG.Canvas.pings.styles[style] ?? CONFIG.Canvas.pings.styles.pulse;
const options = {
duration: cfg.duration,
color: cfg.color ?? user?.color,
size: canvas.dimensions.size * (cfg.size || 1)
};
const ping = new cfg.class(position, foundry.utils.mergeObject(options, pingOptions));
this.cursors.addChild(ping);
return ping.animate();
}
/* -------------------------------------------- */
/**
* Given off-screen coordinates, determine the closest point at the edge of the viewport to these coordinates.
* @param {Point} position The off-screen coordinates.
* @returns {{ray: Ray, intersection: LineIntersection|null}} The closest point at the edge of the viewport to these
* coordinates and a ray cast from the centre of the
* screen towards it.
* @private
*/
_findViewportIntersection(position) {
let { clientWidth: w, clientHeight: h } = document.documentElement;
// Accommodate the sidebar.
if ( !ui.sidebar._collapsed ) w -= ui.sidebar.options.width + 10;
const [cx, cy] = [w / 2, h / 2];
const ray = new Ray({x: cx, y: cy}, canvas.clientCoordinatesFromCanvas(position));
const bounds = [[0, 0, w, 0], [w, 0, w, h], [w, h, 0, h], [0, h, 0, 0]];
const intersections = bounds.map(ray.intersectSegment.bind(ray));
const intersection = intersections.find(i => i !== null);
return { ray, intersection };
}
}

View File

@@ -0,0 +1,903 @@
/**
* @typedef {Object} RulerMeasurementSegment
* @property {Ray} ray The Ray which represents the point-to-point line segment
* @property {PreciseText} label The text object used to display a label for this segment
* @property {number} distance The measured distance of the segment
* @property {number} cost The measured cost of the segment
* @property {number} cumulativeDistance The cumulative measured distance of this segment and the segments before it
* @property {number} cumulativeCost The cumulative measured cost of this segment and the segments before it
* @property {boolean} history Is this segment part of the measurement history?
* @property {boolean} first Is this segment the first one after the measurement history?
* @property {boolean} last Is this segment the last one?
* @property {object} animation Animation options passed to {@link TokenDocument#update}
*/
/**
* @typedef {object} RulerMeasurementHistoryWaypoint
* @property {number} x The x-coordinate of the waypoint
* @property {number} y The y-coordinate of the waypoint
* @property {boolean} teleport Teleported to from the previous waypoint this waypoint?
* @property {number} cost The cost of having moved from the previous waypoint to this waypoint
*/
/**
* @typedef {RulerMeasurementHistoryWaypoint[]} RulerMeasurementHistory
*/
/**
* The Ruler - used to measure distances and trigger movements
*/
class Ruler extends PIXI.Container {
/**
* The Ruler constructor.
* @param {User} [user=game.user] The User for whom to construct the Ruler instance
* @param {object} [options] Additional options
* @param {ColorSource} [options.color] The color of the ruler (defaults to the color of the User)
*/
constructor(user=game.user, {color}={}) {
super();
/**
* Record the User which this Ruler references
* @type {User}
*/
this.user = user;
/**
* The ruler name - used to differentiate between players
* @type {string}
*/
this.name = `Ruler.${user.id}`;
/**
* The ruler color - by default the color of the active user
* @type {Color}
*/
this.color = Color.from(color ?? this.user.color);
/**
* The Ruler element is a Graphics instance which draws the line and points of the measured path
* @type {PIXI.Graphics}
*/
this.ruler = this.addChild(new PIXI.Graphics());
/**
* The Labels element is a Container of Text elements which label the measured path
* @type {PIXI.Container}
*/
this.labels = this.addChild(new PIXI.Container());
}
/* -------------------------------------------- */
/**
* The possible Ruler measurement states.
* @enum {number}
*/
static get STATES() {
return Ruler.#STATES;
}
static #STATES = Object.freeze({
INACTIVE: 0,
STARTING: 1,
MEASURING: 2,
MOVING: 3
});
/* -------------------------------------------- */
/**
* Is the ruler ready for measure?
* @type {boolean}
*/
static get canMeasure() {
return (game.activeTool === "ruler") || game.keyboard.isModifierActive(KeyboardManager.MODIFIER_KEYS.CONTROL);
}
/* -------------------------------------------- */
/**
* The current destination point at the end of the measurement
* @type {Point|null}
*/
destination = null;
/* -------------------------------------------- */
/**
* The origin point of the measurement, which is the first waypoint.
* @type {Point|null}
*/
get origin() {
return this.waypoints.at(0) ?? null;
}
/* -------------------------------------------- */
/**
* This Array tracks individual waypoints along the ruler's measured path.
* The first waypoint is always the origin of the route.
* @type {Point[]}
*/
waypoints = [];
/* -------------------------------------------- */
/**
* The array of most recently computed ruler measurement segments
* @type {RulerMeasurementSegment[]}
*/
segments = [];
/* -------------------------------------------- */
/**
* The measurement history.
* @type {RulerMeasurementHistory}
*/
get history() {
return this.#history;
}
#history = [];
/* -------------------------------------------- */
/**
* The computed total distance of the Ruler.
* @type {number}
*/
totalDistance = 0;
/* -------------------------------------------- */
/**
* The computed total cost of the Ruler.
* @type {number}
*/
totalCost = 0;
/* -------------------------------------------- */
/**
* The current state of the Ruler (one of {@link Ruler.STATES}).
* @type {number}
*/
get state() {
return this._state;
}
/**
* The current state of the Ruler (one of {@link Ruler.STATES}).
* @type {number}
* @protected
*/
_state = Ruler.STATES.INACTIVE;
/* -------------------------------------------- */
/**
* Is the Ruler being actively used to measure distance?
* @type {boolean}
*/
get active() {
return this.state !== Ruler.STATES.INACTIVE;
}
/* -------------------------------------------- */
/**
* Get a GridHighlight layer for this Ruler
* @type {GridHighlight}
*/
get highlightLayer() {
return canvas.interface.grid.highlightLayers[this.name] || canvas.interface.grid.addHighlightLayer(this.name);
}
/* -------------------------------------------- */
/**
* The Token that is moved by the Ruler.
* @type {Token|null}
*/
get token() {
return this.#token;
}
#token = null;
/* -------------------------------------------- */
/* Ruler Methods */
/* -------------------------------------------- */
/**
* Clear display of the current Ruler
*/
clear() {
this._state = Ruler.STATES.INACTIVE;
this.#token = null;
this.destination = null;
this.waypoints = [];
this.segments = [];
this.#history = [];
this.totalDistance = 0;
this.totalCost = 0;
this.ruler.clear();
this.labels.removeChildren().forEach(c => c.destroy());
canvas.interface.grid.clearHighlightLayer(this.name);
}
/* -------------------------------------------- */
/**
* Measure the distance between two points and render the ruler UI to illustrate it
* @param {Point} destination The destination point to which to measure
* @param {object} [options] Additional options
* @param {boolean} [options.snap=true] Snap the destination?
* @param {boolean} [options.force=false] If not forced and the destination matches the current destination
* of this ruler, no measuring is done and nothing is returned
* @returns {RulerMeasurementSegment[]|void} The array of measured segments if measured
*/
measure(destination, {snap=true, force=false}={}) {
if ( this.state !== Ruler.STATES.MEASURING ) return;
// Compute the measurement destination, segments, and distance
const d = this._getMeasurementDestination(destination, {snap});
if ( this.destination && (d.x === this.destination.x) && (d.y === this.destination.y) && !force ) return;
this.destination = d;
this.segments = this._getMeasurementSegments();
this._computeDistance();
this._broadcastMeasurement();
// Draw the ruler graphic
this.ruler.clear();
this._drawMeasuredPath();
// Draw grid highlight
this.highlightLayer.clear();
for ( const segment of this.segments ) this._highlightMeasurementSegment(segment);
return this.segments;
}
/* -------------------------------------------- */
/**
* Get the measurement origin.
* @param {Point} point The waypoint
* @param {object} [options] Additional options
* @param {boolean} [options.snap=true] Snap the waypoint?
* @protected
*/
_getMeasurementOrigin(point, {snap=true}={}) {
if ( this.token && snap ) {
if ( canvas.grid.isGridless ) return this.token.getCenterPoint();
const snapped = this.token.getSnappedPosition();
const dx = this.token.document.x - Math.round(snapped.x);
const dy = this.token.document.y - Math.round(snapped.y);
const center = canvas.grid.getCenterPoint({x: point.x - dx, y: point.y - dy});
return {x: center.x + dx, y: center.y + dy};
}
return snap ? canvas.grid.getCenterPoint(point) : {x: point.x, y: point.y};
}
/* -------------------------------------------- */
/**
* Get the destination point. By default the point is snapped to grid space centers.
* @param {Point} point The point coordinates
* @param {object} [options] Additional options
* @param {boolean} [options.snap=true] Snap the point?
* @returns {Point} The snapped destination point
* @protected
*/
_getMeasurementDestination(point, {snap=true}={}) {
return snap ? canvas.grid.getCenterPoint(point) : {x: point.x, y: point.y};
}
/* -------------------------------------------- */
/**
* Translate the waypoints and destination point of the Ruler into an array of Ray segments.
* @returns {RulerMeasurementSegment[]} The segments of the measured path
* @protected
*/
_getMeasurementSegments() {
const segments = [];
const path = this.history.concat(this.waypoints.concat([this.destination]));
for ( let i = 1; i < path.length; i++ ) {
const label = this.labels.children.at(i - 1) ?? this.labels.addChild(new PreciseText("", CONFIG.canvasTextStyle));
const ray = new Ray(path[i - 1], path[i]);
segments.push({
ray,
teleport: (i < this.history.length) ? path[i].teleport : (i === this.history.length) && (ray.distance > 0),
label,
distance: 0,
cost: 0,
cumulativeDistance: 0,
cumulativeCost: 0,
history: i <= this.history.length,
first: i === this.history.length + 1,
last: i === path.length - 1,
animation: {}
});
}
if ( this.labels.children.length > segments.length ) {
this.labels.removeChildren(segments.length).forEach(c => c.destroy());
}
return segments;
}
/* -------------------------------------------- */
/**
* Handle the start of a Ruler measurement workflow
* @param {Point} origin The origin
* @param {object} [options] Additional options
* @param {boolean} [options.snap=true] Snap the origin?
* @param {Token|null} [options.token] The token that is moved (defaults to {@link Ruler#_getMovementToken})
* @protected
*/
_startMeasurement(origin, {snap=true, token}={}) {
if ( this.state !== Ruler.STATES.INACTIVE ) return;
this.clear();
this._state = Ruler.STATES.STARTING;
this.#token = token !== undefined ? token : this._getMovementToken(origin);
this.#history = this._getMeasurementHistory() ?? [];
this._addWaypoint(origin, {snap});
canvas.hud.token.clear();
}
/* -------------------------------------------- */
/**
* Handle the conclusion of a Ruler measurement workflow
* @protected
*/
_endMeasurement() {
if ( this.state !== Ruler.STATES.MEASURING ) return;
this.clear();
this._broadcastMeasurement();
}
/* -------------------------------------------- */
/**
* Handle the addition of a new waypoint in the Ruler measurement path
* @param {Point} point The waypoint
* @param {object} [options] Additional options
* @param {boolean} [options.snap=true] Snap the waypoint?
* @protected
*/
_addWaypoint(point, {snap=true}={}) {
if ( (this.state !== Ruler.STATES.STARTING) && (this.state !== Ruler.STATES.MEASURING ) ) return;
const waypoint = this.state === Ruler.STATES.STARTING
? this._getMeasurementOrigin(point, {snap})
: this._getMeasurementDestination(point, {snap});
this.waypoints.push(waypoint);
this._state = Ruler.STATES.MEASURING;
this.measure(this.destination ?? point, {snap, force: true});
}
/* -------------------------------------------- */
/**
* Handle the removal of a waypoint in the Ruler measurement path
* @protected
*/
_removeWaypoint() {
if ( (this.state !== Ruler.STATES.STARTING) && (this.state !== Ruler.STATES.MEASURING ) ) return;
if ( (this.state === Ruler.STATES.MEASURING) && (this.waypoints.length > 1) ) {
this.waypoints.pop();
this.measure(this.destination, {snap: false, force: true});
}
else this._endMeasurement();
}
/* -------------------------------------------- */
/**
* Get the cost function to be used for Ruler measurements.
* @returns {GridMeasurePathCostFunction|void}
* @protected
*/
_getCostFunction() {}
/* -------------------------------------------- */
/**
* Compute the distance of each segment and the total distance of the measured path.
* @protected
*/
_computeDistance() {
let path = [];
if ( this.segments.length ) path.push(this.segments[0].ray.A);
for ( const segment of this.segments ) {
const {x, y} = segment.ray.B;
path.push({x, y, teleport: segment.teleport});
}
const measurements = canvas.grid.measurePath(path, {cost: this._getCostFunction()}).segments;
this.totalDistance = 0;
this.totalCost = 0;
for ( let i = 0; i < this.segments.length; i++ ) {
const segment = this.segments[i];
const distance = measurements[i].distance;
const cost = segment.history ? this.history.at(i + 1)?.cost ?? 0 : measurements[i].cost;
this.totalDistance += distance;
this.totalCost += cost;
segment.distance = distance;
segment.cost = cost;
segment.cumulativeDistance = this.totalDistance;
segment.cumulativeCost = this.totalCost;
}
}
/* -------------------------------------------- */
/**
* Get the text label for a segment of the measured path
* @param {RulerMeasurementSegment} segment
* @returns {string}
* @protected
*/
_getSegmentLabel(segment) {
if ( segment.teleport ) return "";
const units = canvas.grid.units;
let label = `${Math.round(segment.distance * 100) / 100}`;
if ( units ) label += ` ${units}`;
if ( segment.last ) {
label += ` [${Math.round(this.totalDistance * 100) / 100}`;
if ( units ) label += ` ${units}`;
label += "]";
}
return label;
}
/* -------------------------------------------- */
/**
* Draw each segment of the measured path.
* @protected
*/
_drawMeasuredPath() {
const paths = [];
let path = null;
for ( const segment of this.segments ) {
const ray = segment.ray;
if ( ray.distance !== 0 ) {
if ( segment.teleport ) path = null;
else {
if ( !path || (path.history !== segment.history) ) {
path = {points: [ray.A], history: segment.history};
paths.push(path);
}
path.points.push(ray.B);
}
}
// Draw Label
const label = segment.label;
if ( label ) {
const text = this._getSegmentLabel(segment, /** @deprecated since v12 */ this.totalDistance);
label.text = text;
label.alpha = segment.last ? 1.0 : 0.5;
label.visible = !!text && (ray.distance !== 0);
label.anchor.set(0.5, 0.5);
let {sizeX, sizeY} = canvas.grid;
if ( canvas.grid.isGridless ) sizeX = sizeY = 6; // The radius of the waypoints
const pad = 8;
const offsetX = (label.width + (2 * pad) + sizeX) / Math.abs(2 * ray.dx);
const offsetY = (label.height + (2 * pad) + sizeY) / Math.abs(2 * ray.dy);
label.position = ray.project(1 + Math.min(offsetX, offsetY));
}
}
const points = paths.map(p => p.points).flat();
// Draw segments
if ( points.length === 1 ) {
this.ruler.beginFill(0x000000, 0.5, true).drawCircle(points[0].x, points[0].y, 3).endFill();
this.ruler.beginFill(this.color, 0.25, true).drawCircle(points[0].x, points[0].y, 2).endFill();
} else {
const dashShader = new PIXI.smooth.DashLineShader();
for ( const {points, history} of paths ) {
this.ruler.lineStyle({width: 6, color: 0x000000, alpha: 0.5, shader: history ? dashShader : null,
join: PIXI.LINE_JOIN.ROUND, cap: PIXI.LINE_CAP.ROUND});
this.ruler.drawPath(points);
this.ruler.lineStyle({width: 4, color: this.color, alpha: 0.25, shader: history ? dashShader : null,
join: PIXI.LINE_JOIN.ROUND, cap: PIXI.LINE_CAP.ROUND});
this.ruler.drawPath(points);
}
}
// Draw waypoints
this.ruler.beginFill(this.color, 0.25, true).lineStyle(2, 0x000000, 0.5);
for ( const {x, y} of points ) this.ruler.drawCircle(x, y, 6);
this.ruler.endFill();
}
/* -------------------------------------------- */
/**
* Highlight the measurement required to complete the move in the minimum number of discrete spaces
* @param {RulerMeasurementSegment} segment
* @protected
*/
_highlightMeasurementSegment(segment) {
if ( segment.teleport ) return;
for ( const offset of canvas.grid.getDirectPath([segment.ray.A, segment.ray.B]) ) {
const {x: x1, y: y1} = canvas.grid.getTopLeftPoint(offset);
canvas.interface.grid.highlightPosition(this.name, {x: x1, y: y1, color: this.color});
}
}
/* -------------------------------------------- */
/* Token Movement Execution */
/* -------------------------------------------- */
/**
* Determine whether a SPACE keypress event entails a legal token movement along a measured ruler
* @returns {Promise<boolean>} An indicator for whether a token was successfully moved or not. If True the
* event should be prevented from propagating further, if False it should move on
* to other handlers.
*/
async moveToken() {
if ( this.state !== Ruler.STATES.MEASURING ) return false;
if ( game.paused && !game.user.isGM ) {
ui.notifications.warn("GAME.PausedWarning", {localize: true});
return false;
}
// Get the Token which should move
const token = this.token;
if ( !token ) return false;
// Verify whether the movement is allowed
let error;
try {
if ( !this._canMove(token) ) error = "RULER.MovementNotAllowed";
} catch(err) {
error = err.message;
}
if ( error ) {
ui.notifications.error(error, {localize: true});
return false;
}
// Animate the movement path defined by each ray segments
this._state = Ruler.STATES.MOVING;
await this._preMove(token);
await this._animateMovement(token);
await this._postMove(token);
// Clear the Ruler
this._state = Ruler.STATES.MEASURING;
this._endMeasurement();
return true;
}
/* -------------------------------------------- */
/**
* Acquire a Token, if any, which is eligible to perform a movement based on the starting point of the Ruler
* @param {Point} origin The origin of the Ruler
* @returns {Token|null} The Token that is to be moved, if any
* @protected
*/
_getMovementToken(origin) {
let tokens = canvas.tokens.controlled;
if ( !tokens.length && game.user.character ) tokens = game.user.character.getActiveTokens();
for ( const token of tokens ) {
if ( !token.visible || !token.shape ) continue;
const {x, y} = token.document;
for ( let dx = -1; dx <= 1; dx++ ) {
for ( let dy = -1; dy <= 1; dy++ ) {
if ( token.shape.contains(origin.x - x + dx, origin.y - y + dy) ) return token;
}
}
}
return null;
}
/* -------------------------------------------- */
/**
* Get the current measurement history.
* @returns {RulerMeasurementHistory|void} The current measurement history, if any
* @protected
*/
_getMeasurementHistory() {}
/* -------------------------------------------- */
/**
* Create the next measurement history from the current history and current Ruler state.
* @returns {RulerMeasurementHistory} The next measurement history
* @protected
*/
_createMeasurementHistory() {
if ( !this.segments.length ) return [];
const origin = this.segments[0].ray.A;
return this.segments.reduce((history, s) => {
if ( s.ray.distance === 0 ) return history;
history.push({x: s.ray.B.x, y: s.ray.B.y, teleport: s.teleport, cost: s.cost});
return history;
}, [{x: origin.x, y: origin.y, teleport: false, cost: 0}]);
}
/* -------------------------------------------- */
/**
* Test whether a Token is allowed to execute a measured movement path.
* @param {Token} token The Token being tested
* @returns {boolean} Whether the movement is allowed
* @throws A specific Error message used instead of returning false
* @protected
*/
_canMove(token) {
const canUpdate = token.document.canUserModify(game.user, "update");
if ( !canUpdate ) throw new Error("RULER.MovementNoPermission");
if ( token.document.locked ) throw new Error("RULER.MovementLocked");
const hasCollision = this.segments.some(s => {
return token.checkCollision(s.ray.B, {origin: s.ray.A, type: "move", mode: "any"});
});
if ( hasCollision ) throw new Error("RULER.MovementCollision");
return true;
}
/* -------------------------------------------- */
/**
* Animate piecewise Token movement along the measured segment path.
* @param {Token} token The Token being animated
* @returns {Promise<void>} A Promise which resolves once all animation is completed
* @protected
*/
async _animateMovement(token) {
const wasPaused = game.paused;
// Determine offset of the initial origin relative to the snapped Token's top-left.
// This is important to position the token relative to the ruler origin for non-1x1 tokens.
const origin = this.segments[this.history.length].ray.A;
const dx = token.document.x - origin.x;
const dy = token.document.y - origin.y;
// Iterate over each measured segment
let priorDest = undefined;
for ( const segment of this.segments ) {
if ( segment.history || (segment.ray.distance === 0) ) continue;
const r = segment.ray;
const {x, y} = token.document._source;
// Break the movement if the game is paused
if ( !wasPaused && game.paused ) break;
// Break the movement if Token is no longer located at the prior destination (some other change override this)
if ( priorDest && ((x !== priorDest.x) || (y !== priorDest.y)) ) break;
// Commit the movement and update the final resolved destination coordinates
const adjustedDestination = {x: Math.round(r.B.x + dx), y: Math.round(r.B.y + dy)};
await this._animateSegment(token, segment, adjustedDestination);
priorDest = adjustedDestination;
}
}
/* -------------------------------------------- */
/**
* Update Token position and configure its animation properties for the next leg of its animation.
* @param {Token} token The Token being updated
* @param {RulerMeasurementSegment} segment The measured segment being moved
* @param {Point} destination The adjusted destination coordinate
* @param {object} [updateOptions] Additional options to configure the `TokenDocument` update
* @returns {Promise<void>} A Promise that resolves once the animation for this segment is done
* @protected
*/
async _animateSegment(token, segment, destination, updateOptions={}) {
let name;
if ( segment.animation?.name === undefined ) name = token.animationName;
else name ||= Symbol(token.animationName);
const {x, y} = token.document._source;
await token.animate({x, y}, {name, duration: 0});
foundry.utils.mergeObject(
updateOptions,
{teleport: segment.teleport, animation: {...segment.animation, name}},
{overwrite: false}
);
await token.document.update(destination, updateOptions);
await CanvasAnimation.getAnimation(name)?.promise;
}
/* -------------------------------------------- */
/**
* An method which can be extended by a subclass of Ruler to define custom behaviors before a confirmed movement.
* @param {Token} token The Token that will be moving
* @returns {Promise<void>}
* @protected
*/
async _preMove(token) {}
/* -------------------------------------------- */
/**
* An event which can be extended by a subclass of Ruler to define custom behaviors before a confirmed movement.
* @param {Token} token The Token that finished moving
* @returns {Promise<void>}
* @protected
*/
async _postMove(token) {}
/* -------------------------------------------- */
/* Saving and Loading
/* -------------------------------------------- */
/**
* A throttled function that broadcasts the measurement data.
* @type {function()}
*/
#throttleBroadcastMeasurement = foundry.utils.throttle(this.#broadcastMeasurement.bind(this), 100);
/* -------------------------------------------- */
/**
* Broadcast Ruler measurement.
*/
#broadcastMeasurement() {
game.user.broadcastActivity({ruler: this.active ? this._getMeasurementData() : null});
}
/* -------------------------------------------- */
/**
* Broadcast Ruler measurement if its User is the connected client.
* The broadcast is throttled to 100ms.
* @protected
*/
_broadcastMeasurement() {
if ( !this.user.isSelf || !game.user.hasPermission("SHOW_RULER") ) return;
this.#throttleBroadcastMeasurement();
}
/* -------------------------------------------- */
/**
* @typedef {object} RulerMeasurementData
* @property {number} state The state ({@link Ruler#state})
* @property {string|null} token The token ID ({@link Ruler#token})
* @property {RulerMeasurementHistory} history The measurement history ({@link Ruler#history})
* @property {Point[]} waypoints The waypoints ({@link Ruler#waypoints})
* @property {Point|null} destination The destination ({@link Ruler#destination})
*/
/**
* Package Ruler data to an object which can be serialized to a string.
* @returns {RulerMeasurementData}
* @protected
*/
_getMeasurementData() {
return foundry.utils.deepClone({
state: this.state,
token: this.token?.id ?? null,
history: this.history,
waypoints: this.waypoints,
destination: this.destination
});
}
/* -------------------------------------------- */
/**
* Update a Ruler instance using data provided through the cursor activity socket
* @param {RulerMeasurementData|null} data Ruler data with which to update the display
*/
update(data) {
if ( !data || (data.state === Ruler.STATES.INACTIVE) ) return this.clear();
this._state = data.state;
this.#token = canvas.tokens.get(data.token) ?? null;
this.#history = data.history;
this.waypoints = data.waypoints;
this.measure(data.destination, {snap: false, force: true});
}
/* -------------------------------------------- */
/* Event Listeners and Handlers
/* -------------------------------------------- */
/**
* Handle the beginning of a new Ruler measurement workflow
* @see {Canvas.#onDragLeftStart}
* @param {PIXI.FederatedEvent} event The drag start event
* @protected
* @internal
*/
_onDragStart(event) {
this._startMeasurement(event.interactionData.origin, {snap: !event.shiftKey});
if ( this.token && (this.state === Ruler.STATES.MEASURING) ) this.token.document.locked = true;
}
/* -------------------------------------------- */
/**
* Handle left-click events on the Canvas during Ruler measurement.
* @see {Canvas._onClickLeft}
* @param {PIXI.FederatedEvent} event The pointer-down event
* @protected
* @internal
*/
_onClickLeft(event) {
const isCtrl = event.ctrlKey || event.metaKey;
if ( !isCtrl ) return;
this._addWaypoint(event.interactionData.origin, {snap: !event.shiftKey});
}
/* -------------------------------------------- */
/**
* Handle right-click events on the Canvas during Ruler measurement.
* @see {Canvas._onClickRight}
* @param {PIXI.FederatedEvent} event The pointer-down event
* @protected
* @internal
*/
_onClickRight(event) {
const token = this.token;
const isCtrl = event.ctrlKey || event.metaKey;
if ( isCtrl ) this._removeWaypoint();
else this._endMeasurement();
if ( this.active ) canvas.mouseInteractionManager._dragRight = false;
else {
if ( token ) token.document.locked = token.document._source.locked;
canvas.mouseInteractionManager.cancel(event);
}
}
/* -------------------------------------------- */
/**
* Continue a Ruler measurement workflow for left-mouse movements on the Canvas.
* @see {Canvas.#onDragLeftMove}
* @param {PIXI.FederatedEvent} event The mouse move event
* @protected
* @internal
*/
_onMouseMove(event) {
const destination = event.interactionData.destination;
if ( !canvas.dimensions.rect.contains(destination.x, destination.y) ) return;
this.measure(destination, {snap: !event.shiftKey});
}
/* -------------------------------------------- */
/**
* Conclude a Ruler measurement workflow by releasing the left-mouse button.
* @see {Canvas.#onDragLeftDrop}
* @param {PIXI.FederatedEvent} event The pointer-up event
* @protected
* @internal
*/
_onMouseUp(event) {
if ( !this.active ) return;
const isCtrl = event.ctrlKey || event.metaKey;
if ( isCtrl || (this.waypoints.length > 1) ) event.preventDefault();
else {
if ( this.token ) this.token.document.locked = this.token.document._source.locked;
this._endMeasurement();
canvas.mouseInteractionManager.cancel(event);
}
}
/* -------------------------------------------- */
/**
* Move the Token along the measured path when the move key is pressed.
* @param {KeyboardEventContext} context
* @protected
* @internal
*/
_onMoveKeyDown(context) {
if ( this.token ) this.token.document.locked = this.token.document._source.locked;
// noinspection ES6MissingAwait
this.moveToken();
if ( this.state !== Ruler.STATES.MEASURING ) canvas.mouseInteractionManager.cancel();
}
}

View File

@@ -0,0 +1,81 @@
/**
* A layer of background alteration effects which change the appearance of the primary group render texture.
* @category - Canvas
*/
class CanvasBackgroundAlterationEffects extends CanvasLayer {
constructor() {
super();
/**
* A collection of effects which provide background vision alterations.
* @type {PIXI.Container}
*/
this.vision = this.addChild(new PIXI.Container());
this.vision.sortableChildren = true;
/**
* A collection of effects which provide background preferred vision alterations.
* @type {PIXI.Container}
*/
this.visionPreferred = this.addChild(new PIXI.Container());
this.visionPreferred.sortableChildren = true;
/**
* A collection of effects which provide other background alterations.
* @type {PIXI.Container}
*/
this.lighting = this.addChild(new PIXI.Container());
this.lighting.sortableChildren = true;
}
/* -------------------------------------------- */
/** @override */
async _draw(options) {
// Add the background vision filter
const vf = this.vision.filter = new VoidFilter();
vf.blendMode = PIXI.BLEND_MODES.NORMAL;
vf.enabled = false;
this.vision.filters = [vf];
this.vision.filterArea = canvas.app.renderer.screen;
// Add the background preferred vision filter
const vpf = this.visionPreferred.filter = new VoidFilter();
vpf.blendMode = PIXI.BLEND_MODES.NORMAL;
vpf.enabled = false;
this.visionPreferred.filters = [vpf];
this.visionPreferred.filterArea = canvas.app.renderer.screen;
// Add the background lighting filter
const maskingFilter = CONFIG.Canvas.visualEffectsMaskingFilter;
const lf = this.lighting.filter = maskingFilter.create({
visionTexture: canvas.masks.vision.renderTexture,
darknessLevelTexture: canvas.effects.illumination.renderTexture,
mode: maskingFilter.FILTER_MODES.BACKGROUND
});
lf.blendMode = PIXI.BLEND_MODES.NORMAL;
this.lighting.filters = [lf];
this.lighting.filterArea = canvas.app.renderer.screen;
canvas.effects.visualEffectsMaskingFilters.add(lf);
}
/* -------------------------------------------- */
/** @override */
async _tearDown(options) {
canvas.effects.visualEffectsMaskingFilters.delete(this.lighting?.filter);
this.clear();
}
/* -------------------------------------------- */
/**
* Clear background alteration effects vision and lighting containers
*/
clear() {
this.vision.removeChildren();
this.visionPreferred.removeChildren();
this.lighting.removeChildren();
}
}

View File

@@ -0,0 +1,59 @@
/**
* A CanvasLayer for displaying coloration visual effects
* @category - Canvas
*/
class CanvasColorationEffects extends CanvasLayer {
constructor() {
super();
this.sortableChildren = true;
this.#background = this.addChild(new PIXI.LegacyGraphics());
this.#background.zIndex = -Infinity;
}
/**
* Temporary solution for the "white scene" bug (foundryvtt/foundryvtt#9957).
* @type {PIXI.LegacyGraphics}
*/
#background;
/**
* The filter used to mask visual effects on this layer
* @type {VisualEffectsMaskingFilter}
*/
filter;
/* -------------------------------------------- */
/**
* Clear coloration effects container
*/
clear() {
this.removeChildren();
this.addChild(this.#background);
}
/* -------------------------------------------- */
/** @override */
async _draw(options) {
const maskingFilter = CONFIG.Canvas.visualEffectsMaskingFilter;
this.filter = maskingFilter.create({
visionTexture: canvas.masks.vision.renderTexture,
darknessLevelTexture: canvas.effects.illumination.renderTexture,
mode: maskingFilter.FILTER_MODES.COLORATION
});
this.filter.blendMode = PIXI.BLEND_MODES.ADD;
this.filterArea = canvas.app.renderer.screen;
this.filters = [this.filter];
canvas.effects.visualEffectsMaskingFilters.add(this.filter);
this.#background.clear().beginFill().drawShape(canvas.dimensions.rect).endFill();
}
/* -------------------------------------------- */
/** @override */
async _tearDown(options) {
canvas.effects.visualEffectsMaskingFilters.delete(this.filter);
this.#background.clear();
}
}

View File

@@ -0,0 +1,29 @@
/**
* A layer of background alteration effects which change the appearance of the primary group render texture.
* @category - Canvas
*/
class CanvasDarknessEffects extends CanvasLayer {
constructor() {
super();
this.sortableChildren = true;
}
/* -------------------------------------------- */
/**
* Clear coloration effects container
*/
clear() {
this.removeChildren();
}
/* -------------------------------------------- */
/** @override */
async _draw(options) {
this.filter = VoidFilter.create();
this.filter.blendMode = PIXI.BLEND_MODES.NORMAL;
this.filterArea = canvas.app.renderer.screen;
this.filters = [this.filter];
}
}

View File

@@ -0,0 +1,253 @@
/**
* A CanvasLayer for displaying illumination visual effects
* @category - Canvas
*/
class CanvasIlluminationEffects extends CanvasLayer {
constructor() {
super();
this.#initialize();
}
/**
* The filter used to mask visual effects on this layer
* @type {VisualEffectsMaskingFilter}
*/
filter;
/**
* The container holding the lights.
* @type {PIXI.Container}
*/
lights = new PIXI.Container();
/**
* A minimalist texture that holds the background color.
* @type {PIXI.Texture}
*/
backgroundColorTexture;
/**
* The background color rgb array.
* @type {number[]}
*/
#backgroundColorRGB;
/**
* The base line mesh.
* @type {SpriteMesh}
*/
baselineMesh = new SpriteMesh();
/**
* The cached container holding the illumination meshes.
* @type {CachedContainer}
*/
darknessLevelMeshes = new DarknessLevelContainer();
/* -------------------------------------------- */
/**
* To know if dynamic darkness level is active on this scene.
* @returns {boolean}
*/
get hasDynamicDarknessLevel() {
return this.darknessLevelMeshes.children.length > 0;
}
/**
* The illumination render texture.
* @returns {PIXI.RenderTexture}
*/
get renderTexture() {
return this.darknessLevelMeshes.renderTexture;
}
/* -------------------------------------------- */
/**
* Initialize the layer.
*/
#initialize() {
// Configure background color texture
this.backgroundColorTexture = this._createBackgroundColorTexture();
// Configure the base line mesh
this.baselineMesh.setShaderClass(BaselineIlluminationSamplerShader);
this.baselineMesh.texture = this.darknessLevelMeshes.renderTexture;
// Add children
canvas.masks.addChild(this.darknessLevelMeshes); // Region meshes cached container
this.addChild(this.lights); // Light and vision illumination
// Add baseline rendering for light
const originalRender = this.lights.render;
const baseMesh = this.baselineMesh;
this.lights.render = renderer => {
baseMesh.render(renderer);
originalRender.call(this.lights, renderer);
};
// Configure
this.lights.sortableChildren = true;
}
/* -------------------------------------------- */
/**
* Set or retrieve the illumination background color.
* @param {number} color
*/
set backgroundColor(color) {
const cb = this.#backgroundColorRGB = Color.from(color).rgb;
if ( this.filter ) this.filter.uniforms.replacementColor = cb;
this.backgroundColorTexture.baseTexture.resource.data.set(cb);
this.backgroundColorTexture.baseTexture.resource.update();
}
/* -------------------------------------------- */
/**
* Clear illumination effects container
*/
clear() {
this.lights.removeChildren();
}
/* -------------------------------------------- */
/**
* Invalidate the cached container state to trigger a render pass.
* @param {boolean} [force=false] Force cached container invalidation?
*/
invalidateDarknessLevelContainer(force=false) {
// If global light is enabled, the darkness level texture is affecting the vision mask
if ( canvas.environment.globalLightSource.active ) canvas.masks.vision.renderDirty = true;
if ( !(this.hasDynamicDarknessLevel || force) ) return;
this.darknessLevelMeshes.renderDirty = true;
// Sort by adjusted darkness level in descending order such that the final darkness level
// at a point is the minimum of the adjusted darkness levels
const compare = (a, b) => b.shader.darknessLevel - a.shader.darknessLevel;
this.darknessLevelMeshes.children.sort(compare);
canvas.visibility.vision.light.global.meshes.children.sort(compare);
}
/* -------------------------------------------- */
/**
* Create the background color texture used by illumination point source meshes.
* 1x1 single pixel texture.
* @returns {PIXI.Texture} The background color texture.
* @protected
*/
_createBackgroundColorTexture() {
return PIXI.Texture.fromBuffer(new Float32Array(3), 1, 1, {
type: PIXI.TYPES.FLOAT,
format: PIXI.FORMATS.RGB,
wrapMode: PIXI.WRAP_MODES.CLAMP,
scaleMode: PIXI.SCALE_MODES.NEAREST,
mipmap: PIXI.MIPMAP_MODES.OFF
});
}
/* -------------------------------------------- */
/** @override */
render(renderer) {
// Prior blend mode is reinitialized. The first render into PointSourceMesh will use the background color texture.
PointSourceMesh._priorBlendMode = undefined;
PointSourceMesh._currentTexture = this.backgroundColorTexture;
super.render(renderer);
}
/* -------------------------------------------- */
/** @override */
async _draw(options) {
const maskingFilter = CONFIG.Canvas.visualEffectsMaskingFilter;
this.darknessLevel = canvas.darknessLevel;
this.filter = maskingFilter.create({
visionTexture: canvas.masks.vision.renderTexture,
darknessLevelTexture: canvas.effects.illumination.renderTexture,
mode: maskingFilter.FILTER_MODES.ILLUMINATION
});
this.filter.blendMode = PIXI.BLEND_MODES.MULTIPLY;
this.filterArea = canvas.app.renderer.screen;
this.filters = [this.filter];
canvas.effects.visualEffectsMaskingFilters.add(this.filter);
}
/* -------------------------------------------- */
/** @override */
async _tearDown(options) {
canvas.effects.visualEffectsMaskingFilters.delete(this.filter);
this.clear();
}
/* -------------------------------------------- */
/* Deprecations */
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
updateGlobalLight() {
const msg = "CanvasIlluminationEffects#updateGlobalLight has been deprecated.";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return false;
}
/**
* @deprecated since v12
* @ignore
*/
background() {
const msg = "CanvasIlluminationEffects#background is now obsolete.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
return null;
}
/**
* @deprecated since v12
* @ignore
*/
get globalLight() {
const msg = "CanvasIlluminationEffects#globalLight has been deprecated without replacement. Check the" +
"canvas.environment.globalLightSource.active instead.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
return canvas.environment.globalLightSource.active;
}
}
/**
* Cached container used for dynamic darkness level. Display objects (of any type) added to this cached container will
* contribute to computing the darkness level of the masked area. Only the red channel is utilized, which corresponds
* to the desired darkness level. Other channels are ignored.
*/
class DarknessLevelContainer extends CachedContainer {
constructor(...args) {
super(...args);
this.autoRender = false;
this.on("childAdded", this.#onChildChange);
this.on("childRemoved", this.#onChildChange);
}
/** @override */
static textureConfiguration = {
scaleMode: PIXI.SCALE_MODES.NEAREST,
format: PIXI.FORMATS.RED,
multisample: PIXI.MSAA_QUALITY.NONE,
mipmap: PIXI.MIPMAP_MODES.OFF
};
/**
* Called when a display object is added or removed from this container.
*/
#onChildChange() {
this.autoRender = this.children.length > 0;
this.renderDirty = true;
canvas.perception.update({refreshVisionSources: true, refreshLightSources: true});
}
}

View File

@@ -0,0 +1,928 @@
// noinspection JSPrimitiveTypeWrapperUsage
/**
* The visibility Layer which implements dynamic vision, lighting, and fog of war
* This layer uses an event-driven workflow to perform the minimal required calculation in response to changes.
* @see {@link PointSource}
*
* ### Hook Events
* - {@link hookEvents.visibilityRefresh}
*
* @category - Canvas
*/
class CanvasVisibility extends CanvasLayer {
/**
* The currently revealed vision.
* @type {CanvasVisionContainer}
*/
vision;
/**
* The exploration container which tracks exploration progress.
* @type {PIXI.Container}
*/
explored;
/**
* The optional visibility overlay sprite that should be drawn instead of the unexplored color in the fog of war.
* @type {PIXI.Sprite}
*/
visibilityOverlay;
/**
* The graphics used to render cached light sources.
* @type {PIXI.LegacyGraphics}
*/
#cachedLights = new PIXI.LegacyGraphics();
/**
* Matrix used for visibility rendering transformation.
* @type {PIXI.Matrix}
*/
#renderTransform = new PIXI.Matrix();
/**
* Dimensions of the visibility overlay texture and base texture used for tiling texture into the visibility filter.
* @type {number[]}
*/
#visibilityOverlayDimensions;
/**
* The active vision source data object
* @type {{source: VisionSource|null, activeLightingOptions: object}}
*/
visionModeData = {
source: undefined,
activeLightingOptions: {}
};
/**
* Define whether each lighting layer is enabled, required, or disabled by this vision mode.
* The value for each lighting channel is a number in LIGHTING_VISIBILITY
* @type {{illumination: number, background: number, coloration: number,
* darkness: number, any: boolean}}
*/
lightingVisibility = {
background: VisionMode.LIGHTING_VISIBILITY.ENABLED,
illumination: VisionMode.LIGHTING_VISIBILITY.ENABLED,
coloration: VisionMode.LIGHTING_VISIBILITY.ENABLED,
darkness: VisionMode.LIGHTING_VISIBILITY.ENABLED,
any: true
};
/**
* The map with the active cached light source IDs as keys and their update IDs as values.
* @type {Map<string, number>}
*/
#cachedLightSourceStates = new Map();
/**
* The maximum allowable visibility texture size.
* @type {number}
*/
static #MAXIMUM_VISIBILITY_TEXTURE_SIZE = 4096;
/* -------------------------------------------- */
/* Canvas Visibility Properties */
/* -------------------------------------------- */
/**
* A status flag for whether the layer initialization workflow has succeeded.
* @type {boolean}
*/
get initialized() {
return this.#initialized;
}
#initialized = false;
/* -------------------------------------------- */
/**
* Indicates whether containment filtering is required when rendering vision into a texture.
* @type {boolean}
* @internal
*/
get needsContainment() {
return this.#needsContainment;
}
#needsContainment = false;
/* -------------------------------------------- */
/**
* Does the currently viewed Scene support Token field of vision?
* @type {boolean}
*/
get tokenVision() {
return canvas.scene.tokenVision;
}
/* -------------------------------------------- */
/**
* The configured options used for the saved fog-of-war texture.
* @type {FogTextureConfiguration}
*/
get textureConfiguration() {
return this.#textureConfiguration;
}
/** @private */
#textureConfiguration;
/* -------------------------------------------- */
/**
* Optional overrides for exploration sprite dimensions.
* @type {FogTextureConfiguration}
*/
set explorationRect(rect) {
this.#explorationRect = rect;
}
/** @private */
#explorationRect;
/* -------------------------------------------- */
/* Layer Initialization */
/* -------------------------------------------- */
/**
* Initialize all Token vision sources which are present on this layer
*/
initializeSources() {
canvas.effects.toggleMaskingFilters(false); // Deactivate vision masking before destroying textures
for ( const source of canvas.effects.visionSources ) source.initialize();
Hooks.callAll("initializeVisionSources", canvas.effects.visionSources);
}
/* -------------------------------------------- */
/**
* Initialize the vision mode.
*/
initializeVisionMode() {
this.visionModeData.source = this.#getSingleVisionSource();
this.#configureLightingVisibility();
this.#updateLightingPostProcessing();
this.#updateTintPostProcessing();
Hooks.callAll("initializeVisionMode", this);
}
/* -------------------------------------------- */
/**
* Identify whether there is one singular vision source active (excluding previews).
* @returns {VisionSource|null} A singular source, or null
*/
#getSingleVisionSource() {
return canvas.effects.visionSources.filter(s => s.active).sort((a, b) =>
(a.isPreview - b.isPreview)
|| (a.isBlinded - b.isBlinded)
|| (b.visionMode.perceivesLight - a.visionMode.perceivesLight)
).at(0) ?? null;
}
/* -------------------------------------------- */
/**
* Configure the visibility of individual lighting channels based on the currently active vision source(s).
*/
#configureLightingVisibility() {
const vs = this.visionModeData.source;
const vm = vs?.visionMode;
const lv = this.lightingVisibility;
const lvs = VisionMode.LIGHTING_VISIBILITY;
Object.assign(lv, {
background: CanvasVisibility.#requireBackgroundShader(vm),
illumination: vm?.lighting.illumination.visibility ?? lvs.ENABLED,
coloration: vm?.lighting.coloration.visibility ?? lvs.ENABLED,
darkness: vm?.lighting.darkness.visibility ?? lvs.ENABLED
});
lv.any = (lv.background + lv.illumination + lv.coloration + lv.darkness) > VisionMode.LIGHTING_VISIBILITY.DISABLED;
}
/* -------------------------------------------- */
/**
* Update the lighting according to vision mode options.
*/
#updateLightingPostProcessing() {
// Check whether lighting configuration has changed
const lightingOptions = this.visionModeData.source?.visionMode.lighting || {};
const diffOpt = foundry.utils.diffObject(this.visionModeData.activeLightingOptions, lightingOptions);
this.visionModeData.activeLightingOptions = lightingOptions;
if ( foundry.utils.isEmpty(lightingOptions) ) canvas.effects.resetPostProcessingFilters();
if ( foundry.utils.isEmpty(diffOpt) ) return;
// Update post-processing filters and refresh lighting
const modes = CONFIG.Canvas.visualEffectsMaskingFilter.FILTER_MODES;
canvas.effects.resetPostProcessingFilters();
for ( const layer of ["background", "illumination", "coloration"] ) {
if ( layer in lightingOptions ) {
const options = lightingOptions[layer];
const filterMode = modes[layer.toUpperCase()];
canvas.effects.activatePostProcessingFilters(filterMode, options.postProcessingModes, options.uniforms);
}
}
}
/* -------------------------------------------- */
/**
* Refresh the tint of the post processing filters.
*/
#updateTintPostProcessing() {
// Update tint
const activeOptions = this.visionModeData.activeLightingOptions;
const singleSource = this.visionModeData.source;
const color = singleSource?.visionModeOverrides.colorRGB;
for ( const f of canvas.effects.visualEffectsMaskingFilters ) {
const defaultTint = f.constructor.defaultUniforms.tint;
const tintedLayer = activeOptions[f.uniforms.mode]?.uniforms?.tint;
f.uniforms.tint = tintedLayer ? (color ?? (tintedLayer ?? defaultTint)) : defaultTint;
}
}
/* -------------------------------------------- */
/**
* Give the visibility requirement of the lighting background shader.
* @param {VisionMode} visionMode The single Vision Mode active at the moment (if any).
* @returns {VisionMode.LIGHTING_VISIBILITY}
*/
static #requireBackgroundShader(visionMode) {
// Do we need to force lighting background shader? Force when :
// - Multiple vision modes are active with a mix of preferred and non preferred visions
// - Or when some have background shader required
const lvs = VisionMode.LIGHTING_VISIBILITY;
let preferred = false;
let nonPreferred = false;
for ( const vs of canvas.effects.visionSources ) {
if ( !vs.active ) continue;
const vm = vs.visionMode;
if ( vm.lighting.background.visibility === lvs.REQUIRED ) return lvs.REQUIRED;
if ( vm.vision.preferred ) preferred = true;
else nonPreferred = true;
}
if ( preferred && nonPreferred ) return lvs.REQUIRED;
return visionMode?.lighting.background.visibility ?? lvs.ENABLED;
}
/* -------------------------------------------- */
/* Layer Rendering */
/* -------------------------------------------- */
/** @override */
async _draw(options) {
this.#configureVisibilityTexture();
// Initialize fog
await canvas.fog.initialize();
// Create the vision container and attach it to the CanvasVisionMask cached container
this.vision = this.#createVision();
canvas.masks.vision.attachVision(this.vision);
this.#cacheLights(true);
// Exploration container
this.explored = this.addChild(this.#createExploration());
// Loading the fog overlay
await this.#drawVisibilityOverlay();
// Apply the visibility filter with a normal blend
this.filter = CONFIG.Canvas.visibilityFilter.create({
unexploredColor: canvas.colors.fogUnexplored.rgb,
exploredColor: canvas.colors.fogExplored.rgb,
backgroundColor: canvas.colors.background.rgb,
visionTexture: canvas.masks.vision.renderTexture,
primaryTexture: canvas.primary.renderTexture,
overlayTexture: this.visibilityOverlay?.texture ?? null,
dimensions: this.#visibilityOverlayDimensions,
hasOverlayTexture: !!this.visibilityOverlay?.texture.valid
}, canvas.visibilityOptions);
this.filter.blendMode = PIXI.BLEND_MODES.NORMAL;
this.filters = [this.filter];
this.filterArea = canvas.app.screen;
// Add the visibility filter to the canvas blur filter list
canvas.addBlurFilter(this.filter);
this.visible = false;
this.#initialized = true;
}
/* -------------------------------------------- */
/**
* Create the exploration container with its exploration sprite.
* @returns {PIXI.Container} The newly created exploration container.
*/
#createExploration() {
const dims = canvas.dimensions;
const explored = new PIXI.Container();
const explorationSprite = explored.addChild(canvas.fog.sprite);
const exr = this.#explorationRect;
// Check if custom exploration dimensions are required
if ( exr ) {
explorationSprite.position.set(exr.x, exr.y);
explorationSprite.width = exr.width;
explorationSprite.height = exr.height;
}
// Otherwise, use the standard behavior
else {
explorationSprite.position.set(dims.sceneX, dims.sceneY);
explorationSprite.width = this.#textureConfiguration.width;
explorationSprite.height = this.#textureConfiguration.height;
}
return explored;
}
/* -------------------------------------------- */
/**
* Create the vision container and all its children.
* @returns {PIXI.Container} The created vision container.
*/
#createVision() {
const dims = canvas.dimensions;
const vision = new PIXI.Container();
// Adding a void filter necessary when commiting fog on a texture for dynamic illumination
vision.containmentFilter = VoidFilter.create();
vision.containmentFilter.blendMode = PIXI.BLEND_MODES.MAX_COLOR;
vision.containmentFilter.enabled = false; // Disabled by default, used only when writing on textures
vision.filters = [vision.containmentFilter];
// Areas visible because of light sources and light perception
vision.light = vision.addChild(new PIXI.Container());
// The global light container, which hold darkness level meshes for dynamic illumination
vision.light.global = vision.light.addChild(new PIXI.Container());
vision.light.global.source = vision.light.global.addChild(new PIXI.LegacyGraphics());
vision.light.global.meshes = vision.light.global.addChild(new PIXI.Container());
vision.light.global.source.blendMode = PIXI.BLEND_MODES.MAX_COLOR;
// The light sources
vision.light.sources = vision.light.addChild(new PIXI.LegacyGraphics());
vision.light.sources.blendMode = PIXI.BLEND_MODES.MAX_COLOR;
// Preview container, which is not cached
vision.light.preview = vision.light.addChild(new PIXI.LegacyGraphics());
vision.light.preview.blendMode = PIXI.BLEND_MODES.MAX_COLOR;
// The cached light to avoid too many geometry drawings
vision.light.cached = vision.light.addChild(new SpriteMesh(Canvas.getRenderTexture({
textureConfiguration: this.textureConfiguration
})));
vision.light.cached.position.set(dims.sceneX, dims.sceneY);
vision.light.cached.blendMode = PIXI.BLEND_MODES.MAX_COLOR;
// The masked area
vision.light.mask = vision.light.addChild(new PIXI.LegacyGraphics());
vision.light.mask.preview = vision.light.mask.addChild(new PIXI.LegacyGraphics());
// Areas visible because of FOV of vision sources
vision.sight = vision.addChild(new PIXI.LegacyGraphics());
vision.sight.blendMode = PIXI.BLEND_MODES.MAX_COLOR;
vision.sight.preview = vision.sight.addChild(new PIXI.LegacyGraphics());
vision.sight.preview.blendMode = PIXI.BLEND_MODES.MAX_COLOR;
// Eraser for darkness sources
vision.darkness = vision.addChild(new PIXI.LegacyGraphics());
vision.darkness.blendMode = PIXI.BLEND_MODES.ERASE;
/** @deprecated since v12 */
Object.defineProperty(vision, "base", {
get() {
const msg = "CanvasVisibility#vision#base is deprecated in favor of CanvasVisibility#vision#light#preview.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return this.fov.preview;
}
});
/** @deprecated since v12 */
Object.defineProperty(vision, "fov", {
get() {
const msg = "CanvasVisibility#vision#fov is deprecated in favor of CanvasVisibility#vision#light.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return this.light;
}
});
/** @deprecated since v12 */
Object.defineProperty(vision, "los", {
get() {
const msg = "CanvasVisibility#vision#los is deprecated in favor of CanvasVisibility#vision#light#mask.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return this.light.mask;
}
});
/** @deprecated since v12 */
Object.defineProperty(vision.light, "lights", {
get: () => {
const msg = "CanvasVisibility#vision#fov#lights is deprecated without replacement.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return this.#cachedLights;
}
});
/** @deprecated since v12 */
Object.defineProperty(vision.light, "lightsSprite", {
get() {
const msg = "CanvasVisibility#vision#fov#lightsSprite is deprecated in favor of CanvasVisibility#vision#light#cached.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return this.cached;
}
});
/** @deprecated since v12 */
Object.defineProperty(vision.light, "tokens", {
get() {
const msg = "CanvasVisibility#vision#tokens is deprecated in favor of CanvasVisibility#vision#light.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return this;
}
});
return vision;
}
/* -------------------------------------------- */
/** @inheritDoc */
async _tearDown(options) {
canvas.masks.vision.detachVision();
this.#cachedLightSourceStates.clear();
await canvas.fog.clear();
// Performs deep cleaning of the detached vision container
this.vision.destroy({children: true, texture: true, baseTexture: true});
this.vision = undefined;
canvas.effects.visionSources.clear();
this.#initialized = false;
return super._tearDown(options);
}
/* -------------------------------------------- */
/**
* Update the display of the sight layer.
* Organize sources into rendering queues and draw lighting containers for each source
*/
refresh() {
if ( !this.initialized ) return;
// Refresh visibility
if ( this.tokenVision ) {
this.refreshVisibility();
this.visible = canvas.effects.visionSources.some(s => s.active) || !game.user.isGM;
}
else this.visible = false;
// Update visibility of objects
this.restrictVisibility();
}
/* -------------------------------------------- */
/**
* Update vision (and fog if necessary)
*/
refreshVisibility() {
canvas.masks.vision.renderDirty = true;
if ( !this.vision ) return;
const vision = this.vision;
// Begin fills
const fillColor = 0xFF0000;
this.#cachedLights.beginFill(fillColor);
vision.light.sources.clear().beginFill(fillColor);
vision.light.preview.clear().beginFill(fillColor);
vision.light.global.source.clear().beginFill(fillColor);
vision.light.mask.clear().beginFill();
vision.light.mask.preview.clear().beginFill();
vision.sight.clear().beginFill(fillColor);
vision.sight.preview.clear().beginFill(fillColor);
vision.darkness.clear().beginFill(fillColor);
// Checking if the lights cache needs a full redraw
const redrawCache = this.#checkCachedLightSources();
if ( redrawCache ) this.#cachedLightSourceStates.clear();
// A flag to know if the lights cache render texture need to be refreshed
let refreshCache = redrawCache;
// A flag to know if fog need to be refreshed.
let commitFog = false;
// Iterating over each active light source
for ( const [sourceId, lightSource] of canvas.effects.lightSources.entries() ) {
// Ignoring inactive sources or global light (which is rendered using the global light mesh)
if ( !lightSource.hasActiveLayer || (lightSource instanceof foundry.canvas.sources.GlobalLightSource) ) continue;
// Is the light source providing vision?
if ( lightSource.data.vision ) {
if ( lightSource.isPreview ) vision.light.mask.preview.drawShape(lightSource.shape);
else {
vision.light.mask.drawShape(lightSource.shape);
commitFog = true;
}
}
// Update the cached state. Skip if already cached.
const isCached = this.#shouldCacheLight(lightSource);
if ( isCached ) {
if ( this.#cachedLightSourceStates.has(sourceId) ) continue;
this.#cachedLightSourceStates.set(sourceId, lightSource.updateId);
refreshCache = true;
}
// Draw the light source
if ( isCached ) this.#cachedLights.drawShape(lightSource.shape);
else if ( lightSource.isPreview ) vision.light.preview.drawShape(lightSource.shape);
else vision.light.sources.drawShape(lightSource.shape);
}
// Refresh the light source cache if necessary.
// Note: With a full redraw, we need to refresh the texture cache, even if no elements are present
if ( refreshCache ) this.#cacheLights(redrawCache);
// Refresh global/dynamic illumination with global source and illumination meshes
this.#refreshDynamicIllumination();
// Iterating over each active vision source
for ( const visionSource of canvas.effects.visionSources ) {
if ( !visionSource.hasActiveLayer ) continue;
const blinded = visionSource.isBlinded;
// Draw vision FOV
if ( (visionSource.radius > 0) && !blinded && !visionSource.isPreview ) {
vision.sight.drawShape(visionSource.shape);
commitFog = true;
}
else vision.sight.preview.drawShape(visionSource.shape);
// Draw light perception
if ( (visionSource.lightRadius > 0) && !blinded && !visionSource.isPreview ) {
vision.light.mask.drawShape(visionSource.light);
commitFog = true;
}
else vision.light.mask.preview.drawShape(visionSource.light);
}
// Call visibility refresh hook
Hooks.callAll("visibilityRefresh", this);
// End fills
vision.light.sources.endFill();
vision.light.preview.endFill();
vision.light.global.source.endFill();
vision.light.mask.endFill();
vision.light.mask.preview.endFill();
vision.sight.endFill();
vision.sight.preview.endFill();
vision.darkness.endFill();
// Update fog of war texture (if fow is activated)
if ( commitFog ) canvas.fog.commit();
}
/* -------------------------------------------- */
/**
* Reset the exploration container with the fog sprite
*/
resetExploration() {
if ( !this.explored ) return;
this.explored.destroy();
this.explored = this.addChild(this.#createExploration());
}
/* -------------------------------------------- */
/**
* Refresh the dynamic illumination with darkness level meshes and global light.
* Tell if a fence filter is needed when vision is rendered into a texture.
*/
#refreshDynamicIllumination() {
// Reset filter containment
this.#needsContainment = false;
// Setting global light source container visibility
const globalLightSource = canvas.environment.globalLightSource;
const v = this.vision.light.global.visible = globalLightSource.active;
if ( !v ) return;
const {min, max} = globalLightSource.data.darkness;
// Draw the global source if necessary
const darknessLevel = canvas.environment.darknessLevel;
if ( (darknessLevel >= min) && (darknessLevel <= max) ) {
this.vision.light.global.source.drawShape(globalLightSource.shape);
}
// Then draw dynamic illumination meshes
const illuminationMeshes = this.vision.light.global.meshes.children;
for ( const mesh of illuminationMeshes ) {
const darknessLevel = mesh.shader.darknessLevel;
if ( (darknessLevel < min) || (darknessLevel > max)) {
mesh.blendMode = PIXI.BLEND_MODES.ERASE;
this.#needsContainment = true;
}
else mesh.blendMode = PIXI.BLEND_MODES.MAX_COLOR;
}
}
/* -------------------------------------------- */
/**
* Returns true if the light source should be cached.
* @param {LightSource} lightSource The light source
* @returns {boolean}
*/
#shouldCacheLight(lightSource) {
return !(lightSource.object instanceof Token) && !lightSource.isPreview;
}
/* -------------------------------------------- */
/**
* Check if the cached light sources need to be fully redrawn.
* @returns {boolean} True if a full redraw is necessary.
*/
#checkCachedLightSources() {
for ( const [sourceId, updateId] of this.#cachedLightSourceStates ) {
const lightSource = canvas.effects.lightSources.get(sourceId);
if ( !lightSource || !lightSource.active || !this.#shouldCacheLight(lightSource)
|| (updateId !== lightSource.updateId) ) return true;
}
return false;
}
/* -------------------------------------------- */
/**
* Render `this.#cachedLights` into `this.vision.light.cached.texture`.
* Note: A full cache redraw needs the texture to be cleared.
* @param {boolean} clearTexture If the texture need to be cleared before rendering.
*/
#cacheLights(clearTexture) {
const dims = canvas.dimensions;
this.#renderTransform.tx = -dims.sceneX;
this.#renderTransform.ty = -dims.sceneY;
this.#cachedLights.blendMode = PIXI.BLEND_MODES.MAX_COLOR;
canvas.app.renderer.render(this.#cachedLights, {
renderTexture: this.vision.light.cached.texture,
clear: clearTexture,
transform: this.#renderTransform
});
this.#cachedLights.clear();
}
/* -------------------------------------------- */
/* Visibility Testing */
/* -------------------------------------------- */
/**
* Restrict the visibility of certain canvas assets (like Tokens or DoorControls) based on the visibility polygon
* These assets should only be displayed if they are visible given the current player's field of view
*/
restrictVisibility() {
// Activate or deactivate visual effects vision masking
canvas.effects.toggleMaskingFilters(this.visible);
// Tokens & Notes
const flags = {refreshVisibility: true};
for ( const token of canvas.tokens.placeables ) token.renderFlags.set(flags);
for ( const note of canvas.notes.placeables ) note.renderFlags.set(flags);
// Door Icons
for ( const door of canvas.controls.doors.children ) door.visible = door.isVisible;
Hooks.callAll("sightRefresh", this);
}
/* -------------------------------------------- */
/**
* @typedef {Object} CanvasVisibilityTestConfig
* @property {object|null} object The target object
* @property {CanvasVisibilityTest[]} tests An array of visibility tests
*/
/**
* @typedef {Object} CanvasVisibilityTest
* @property {Point} point
* @property {number} elevation
* @property {Map<VisionSource, boolean>} los
*/
/**
* Test whether a target point on the Canvas is visible based on the current vision and LOS polygons.
* @param {Point} point The point in space to test, an object with coordinates x and y.
* @param {object} [options] Additional options which modify visibility testing.
* @param {number} [options.tolerance=2] A numeric radial offset which allows for a non-exact match.
* For example, if tolerance is 2 then the test will pass if the point
* is within 2px of a vision polygon.
* @param {object|null} [options.object] An optional reference to the object whose visibility is being tested
* @returns {boolean} Whether the point is currently visible.
*/
testVisibility(point, options={}) {
// If no vision sources are present, the visibility is dependant of the type of user
if ( !canvas.effects.visionSources.some(s => s.active) ) return game.user.isGM;
// Prepare an array of test points depending on the requested tolerance
const object = options.object ?? null;
const config = this._createVisibilityTestConfig(point, options);
// First test basic detection for light sources which specifically provide vision
for ( const lightSource of canvas.effects.lightSources ) {
if ( !lightSource.data.vision || !lightSource.active ) continue;
const result = lightSource.testVisibility(config);
if ( result === true ) return true;
}
// Get scene rect to test that some points are not detected into the padding
const sr = canvas.dimensions.sceneRect;
const inBuffer = !sr.contains(point.x, point.y);
// Skip sources that are not both inside the scene or both inside the buffer
const activeVisionSources = canvas.effects.visionSources.filter(s => s.active
&& (inBuffer !== sr.contains(s.x, s.y)));
const modes = CONFIG.Canvas.detectionModes;
// Second test Basic Sight and Light Perception tests for vision sources
for ( const visionSource of activeVisionSources ) {
if ( visionSource.isBlinded ) continue;
const token = visionSource.object.document;
const basicMode = token.detectionModes.find(m => m.id === "basicSight");
if ( basicMode ) {
const result = modes.basicSight.testVisibility(visionSource, basicMode, config);
if ( result === true ) return true;
}
const lightMode = token.detectionModes.find(m => m.id === "lightPerception");
if ( lightMode ) {
const result = modes.lightPerception.testVisibility(visionSource, lightMode, config);
if ( result === true ) return true;
}
}
// Special detection modes can only detect tokens
if ( !(object instanceof Token) ) return false;
// Lastly test special detection modes for vision sources
for ( const visionSource of activeVisionSources ) {
const token = visionSource.object.document;
for ( const mode of token.detectionModes ) {
if ( (mode.id === "basicSight") || (mode.id === "lightPerception") ) continue;
const dm = modes[mode.id];
const result = dm?.testVisibility(visionSource, mode, config);
if ( result === true ) {
object.detectionFilter = dm.constructor.getDetectionFilter();
return true;
}
}
}
return false;
}
/* -------------------------------------------- */
/**
* Create the visibility test config.
* @param {Point} point The point in space to test, an object with coordinates x and y.
* @param {object} [options] Additional options which modify visibility testing.
* @param {number} [options.tolerance=2] A numeric radial offset which allows for a non-exact match.
* For example, if tolerance is 2 then the test will pass if the point
* is within 2px of a vision polygon.
* @param {object|null} [options.object] An optional reference to the object whose visibility is being tested
* @returns {CanvasVisibilityTestConfig}
* @internal
*/
_createVisibilityTestConfig(point, {tolerance=2, object=null}={}) {
const t = tolerance;
const offsets = t > 0 ? [[0, 0], [-t, -t], [-t, t], [t, t], [t, -t], [-t, 0], [t, 0], [0, -t], [0, t]] : [[0, 0]];
const elevation = object instanceof Token ? object.document.elevation : 0;
return {
object,
tests: offsets.map(o => ({
point: {x: point.x + o[0], y: point.y + o[1]},
elevation,
los: new Map()
}))
};
}
/* -------------------------------------------- */
/* Visibility Overlay and Texture management */
/* -------------------------------------------- */
/**
* Load the scene fog overlay if provided and attach the fog overlay sprite to this layer.
*/
async #drawVisibilityOverlay() {
this.visibilityOverlay = undefined;
this.#visibilityOverlayDimensions = [];
const overlaySrc = canvas.sceneTextures.fogOverlay ?? canvas.scene.fog.overlay;
const overlayTexture = overlaySrc instanceof PIXI.Texture ? overlaySrc : getTexture(overlaySrc);
if ( !overlayTexture ) return;
// Creating the sprite and updating its base texture with repeating wrap mode
const fo = this.visibilityOverlay = new PIXI.Sprite(overlayTexture);
// Set dimensions and position according to overlay <-> scene foreground dimensions
const bkg = canvas.primary.background;
const baseTex = overlayTexture.baseTexture;
if ( bkg && ((fo.width !== bkg.width) || (fo.height !== bkg.height)) ) {
// Set to the size of the scene dimensions
fo.width = canvas.scene.dimensions.width;
fo.height = canvas.scene.dimensions.height;
fo.position.set(0, 0);
// Activate repeat wrap mode for this base texture (to allow tiling)
baseTex.wrapMode = PIXI.WRAP_MODES.REPEAT;
}
else {
// Set the same position and size as the scene primary background
fo.width = bkg.width;
fo.height = bkg.height;
fo.position.set(bkg.x, bkg.y);
}
// The overlay is added to this canvas container to update its transforms only
fo.renderable = false;
this.addChild(this.visibilityOverlay);
// Manage video playback
const video = game.video.getVideoSource(overlayTexture);
if ( video ) {
const playOptions = {volume: 0};
game.video.play(video, playOptions);
}
// Passing overlay and base texture width and height for shader tiling calculations
this.#visibilityOverlayDimensions = [fo.width, fo.height, baseTex.width, baseTex.height];
}
/* -------------------------------------------- */
/**
* @typedef {object} VisibilityTextureConfiguration
* @property {number} resolution
* @property {number} width
* @property {number} height
* @property {number} mipmap
* @property {number} scaleMode
* @property {number} multisample
*/
/**
* Configure the fog texture will all required options.
* Choose an adaptive fog rendering resolution which downscales the saved fog textures for larger dimension Scenes.
* It is important that the width and height of the fog texture is evenly divisible by the downscaling resolution.
* @returns {VisibilityTextureConfiguration}
* @private
*/
#configureVisibilityTexture() {
const dims = canvas.dimensions;
let width = dims.sceneWidth;
let height = dims.sceneHeight;
const maxSize = CanvasVisibility.#MAXIMUM_VISIBILITY_TEXTURE_SIZE;
// Adapt the fog texture resolution relative to some maximum size, and ensure that multiplying the scene dimensions
// by the resolution results in an integer number in order to avoid fog drift.
let resolution = 1.0;
if ( (width >= height) && (width > maxSize) ) {
resolution = maxSize / width;
height = Math.ceil(height * resolution) / resolution;
} else if ( height > maxSize ) {
resolution = maxSize / height;
width = Math.ceil(width * resolution) / resolution;
}
// Determine the fog texture options
return this.#textureConfiguration = {
resolution,
width,
height,
mipmap: PIXI.MIPMAP_MODES.OFF,
multisample: PIXI.MSAA_QUALITY.NONE,
scaleMode: PIXI.SCALE_MODES.LINEAR,
alphaMode: PIXI.ALPHA_MODES.NPM,
format: PIXI.FORMATS.RED
};
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
get fogOverlay() {
const msg = "fogOverlay is deprecated in favor of visibilityOverlay";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return this.visibilityOverlay;
}
}

View File

@@ -0,0 +1,374 @@
/**
* A CanvasLayer for displaying visual effects like weather, transitions, flashes, or more.
*/
class WeatherEffects extends FullCanvasObjectMixin(CanvasLayer) {
constructor() {
super();
this.#initializeFilters();
this.mask = canvas.masks.scene;
this.sortableChildren = true;
this.eventMode = "none";
}
/**
* The container in which effects are added.
* @type {PIXI.Container}
*/
weatherEffects;
/* -------------------------------------------- */
/**
* The container in which suppression meshed are added.
* @type {PIXI.Container}
*/
suppression;
/* -------------------------------------------- */
/**
* Initialize the inverse occlusion and the void filters.
*/
#initializeFilters() {
this.#suppressionFilter = VoidFilter.create();
this.occlusionFilter = WeatherOcclusionMaskFilter.create({
occlusionTexture: canvas.masks.depth.renderTexture
});
this.#suppressionFilter.enabled = this.occlusionFilter.enabled = false;
// FIXME: this does not produce correct results for weather effects that are configured
// with the occlusion filter disabled and use a different blend mode than SCREEN
this.#suppressionFilter.blendMode = PIXI.BLEND_MODES.SCREEN;
this.occlusionFilter.elevation = this.#elevation;
this.filterArea = canvas.app.renderer.screen;
this.filters = [this.occlusionFilter, this.#suppressionFilter];
}
/* -------------------------------------------- */
/** @inheritdoc */
static get layerOptions() {
return foundry.utils.mergeObject(super.layerOptions, {name: "effects"});
}
/* -------------------------------------------- */
/**
* Array of weather effects linked to this weather container.
* @type {Map<string,(ParticleEffect|WeatherShaderEffect)[]>}
*/
effects = new Map();
/**
* @typedef {Object} WeatherTerrainMaskConfiguration
* @property {boolean} enabled Enable or disable this mask.
* @property {number[]} channelWeights An RGBA array of channel weights applied to the mask texture.
* @property {boolean} [reverse=false] If the mask should be reversed.
* @property {PIXI.Texture|PIXI.RenderTexture} texture A texture which defines the mask region.
*/
/**
* A default configuration of the terrain mask that is automatically applied to any shader-based weather effects.
* This configuration is automatically passed to WeatherShaderEffect#configureTerrainMask upon construction.
* @type {WeatherTerrainMaskConfiguration}
*/
terrainMaskConfig;
/**
* @typedef {Object} WeatherOcclusionMaskConfiguration
* @property {boolean} enabled Enable or disable this mask.
* @property {number[]} channelWeights An RGBA array of channel weights applied to the mask texture.
* @property {boolean} [reverse=false] If the mask should be reversed.
* @property {PIXI.Texture|PIXI.RenderTexture} texture A texture which defines the mask region.
*/
/**
* A default configuration of the terrain mask that is automatically applied to any shader-based weather effects.
* This configuration is automatically passed to WeatherShaderEffect#configureTerrainMask upon construction.
* @type {WeatherOcclusionMaskConfiguration}
*/
occlusionMaskConfig;
/**
* The inverse occlusion mask filter bound to this container.
* @type {WeatherOcclusionMaskFilter}
*/
occlusionFilter;
/**
* The filter that is needed for suppression if the occlusion filter isn't enabled.
* @type {VoidFilter}
*/
#suppressionFilter;
/* -------------------------------------------- */
/**
* The elevation of this object.
* @type {number}
* @default Infinity
*/
get elevation() {
return this.#elevation;
}
set elevation(value) {
if ( (typeof value !== "number") || Number.isNaN(value) ) {
throw new Error("WeatherEffects#elevation must be a numeric value.");
}
if ( value === this.#elevation ) return;
this.#elevation = value;
if ( this.parent ) this.parent.sortDirty = true;
}
#elevation = Infinity;
/* -------------------------------------------- */
/**
* A key which resolves ties amongst objects at the same elevation of different layers.
* @type {number}
* @default PrimaryCanvasGroup.SORT_LAYERS.WEATHER
*/
get sortLayer() {
return this.#sortLayer;
}
set sortLayer(value) {
if ( (typeof value !== "number") || Number.isNaN(value) ) {
throw new Error("WeatherEffects#sortLayer must be a numeric value.");
}
if ( value === this.#sortLayer ) return;
this.#sortLayer = value;
if ( this.parent ) this.parent.sortDirty = true;
}
#sortLayer = PrimaryCanvasGroup.SORT_LAYERS.WEATHER;
/* -------------------------------------------- */
/**
* A key which resolves ties amongst objects at the same elevation within the same layer.
* @type {number}
* @default 0
*/
get sort() {
return this.#sort;
}
set sort(value) {
if ( (typeof value !== "number") || Number.isNaN(value) ) {
throw new Error("WeatherEffects#sort must be a numeric value.");
}
if ( value === this.#sort ) return;
this.#sort = value;
if ( this.parent ) this.parent.sortDirty = true;
}
#sort = 0;
/* -------------------------------------------- */
/**
* A key which resolves ties amongst objects at the same elevation within the same layer and same sort.
* @type {number}
* @default 0
*/
get zIndex() {
return this._zIndex;
}
set zIndex(value) {
if ( (typeof value !== "number") || Number.isNaN(value) ) {
throw new Error("WeatherEffects#zIndex must be a numeric value.");
}
if ( value === this._zIndex ) return;
this._zIndex = value;
if ( this.parent ) this.parent.sortDirty = true;
}
/* -------------------------------------------- */
/* Weather Effect Rendering */
/* -------------------------------------------- */
/** @override */
async _draw(options) {
const effect = CONFIG.weatherEffects[canvas.scene.weather];
this.weatherEffects = this.addChild(new PIXI.Container());
this.suppression = this.addChild(new PIXI.Container());
for ( const event of ["childAdded", "childRemoved"] ) {
this.suppression.on(event, () => {
this.#suppressionFilter.enabled = !this.occlusionFilter.enabled && !!this.suppression.children.length;
});
}
this.initializeEffects(effect);
}
/* -------------------------------------------- */
/** @inheritDoc */
async _tearDown(options) {
this.clearEffects();
return super._tearDown(options);
}
/* -------------------------------------------- */
/* Weather Effect Management */
/* -------------------------------------------- */
/**
* Initialize the weather container from a weather config object.
* @param {object} [weatherEffectsConfig] Weather config object (or null/undefined to clear the container).
*/
initializeEffects(weatherEffectsConfig) {
this.#destroyEffects();
Hooks.callAll("initializeWeatherEffects", this, weatherEffectsConfig);
this.#constructEffects(weatherEffectsConfig);
}
/* -------------------------------------------- */
/**
* Clear the weather container.
*/
clearEffects() {
this.initializeEffects(null);
}
/* -------------------------------------------- */
/**
* Destroy all effects associated with this weather container.
*/
#destroyEffects() {
if ( this.effects.size === 0 ) return;
for ( const effect of this.effects.values() ) effect.destroy();
this.effects.clear();
}
/* -------------------------------------------- */
/**
* Construct effects according to the weather effects config object.
* @param {object} [weatherEffectsConfig] Weather config object (or null/undefined to clear the container).
*/
#constructEffects(weatherEffectsConfig) {
if ( !weatherEffectsConfig ) {
this.#suppressionFilter.enabled = this.occlusionFilter.enabled = false;
return;
}
const effects = weatherEffectsConfig.effects;
let zIndex = 0;
// Enable a layer-wide occlusion filter unless it is explicitly disabled by the effect configuration
const useOcclusionFilter = weatherEffectsConfig.filter?.enabled !== false;
if ( useOcclusionFilter ) {
WeatherEffects.configureOcclusionMask(this.occlusionFilter, this.occlusionMaskConfig || {enabled: true});
if ( this.terrainMaskConfig ) WeatherEffects.configureTerrainMask(this.occlusionFilter, this.terrainMaskConfig);
this.occlusionFilter.blendMode = weatherEffectsConfig.filter?.blendMode ?? PIXI.BLEND_MODES.NORMAL;
this.occlusionFilter.enabled = true;
this.#suppressionFilter.enabled = false;
}
else {
this.#suppressionFilter.enabled = !!this.suppression.children.length;
}
// Create each effect
for ( const effect of effects ) {
const requiredPerformanceLevel = Number.isNumeric(effect.performanceLevel) ? effect.performanceLevel : 0;
if ( canvas.performance.mode < requiredPerformanceLevel ) {
console.debug(`Skipping weather effect ${effect.id}. The client performance level ${canvas.performance.mode}`
+ ` is less than the required performance mode ${requiredPerformanceLevel} for the effect`);
continue;
}
// Construct the effect container
let ec;
try {
ec = new effect.effectClass(effect.config, effect.shaderClass);
} catch(err) {
err.message = `Failed to construct weather effect: ${err.message}`;
console.error(err);
continue;
}
// Configure effect container
ec.zIndex = effect.zIndex ?? zIndex++;
ec.blendMode = effect.blendMode ?? PIXI.BLEND_MODES.NORMAL;
// Apply effect-level occlusion and terrain masking only if we are not using a layer-wide filter
if ( effect.shaderClass && !useOcclusionFilter ) {
WeatherEffects.configureOcclusionMask(ec.shader, this.occlusionMaskConfig || {enabled: true});
if ( this.terrainMaskConfig ) WeatherEffects.configureTerrainMask(ec.shader, this.terrainMaskConfig);
}
// Add to the layer, register the effect, and begin play
this.weatherEffects.addChild(ec);
this.effects.set(effect.id, ec);
ec.play();
}
}
/* -------------------------------------------- */
/**
* Set the occlusion uniforms for this weather shader.
* @param {PIXI.Shader} context The shader context
* @param {WeatherOcclusionMaskConfiguration} config Occlusion masking options
* @protected
*/
static configureOcclusionMask(context, {enabled=false, channelWeights=[0, 0, 1, 0], reverse=false, texture}={}) {
if ( !(context instanceof PIXI.Shader) ) return;
const uniforms = context.uniforms;
if ( texture !== undefined ) uniforms.occlusionTexture = texture;
else uniforms.occlusionTexture ??= canvas.masks.depth.renderTexture;
uniforms.useOcclusion = enabled;
uniforms.occlusionWeights = channelWeights;
uniforms.reverseOcclusion = reverse;
if ( enabled && !uniforms.occlusionTexture ) {
console.warn(`The occlusion configuration for the weather shader ${context.constructor.name} is enabled but`
+ " does not have a valid texture");
uniforms.useOcclusion = false;
}
}
/* -------------------------------------------- */
/**
* Set the terrain uniforms for this weather shader.
* @param {PIXI.Shader} context The shader context
* @param {WeatherTerrainMaskConfiguration} config Terrain masking options
* @protected
*/
static configureTerrainMask(context, {enabled=false, channelWeights=[1, 0, 0, 0], reverse=false, texture}={}) {
if ( !(context instanceof PIXI.Shader) ) return;
const uniforms = context.uniforms;
if ( texture !== undefined ) {
uniforms.terrainTexture = texture;
const terrainMatrix = new PIXI.TextureMatrix(texture);
terrainMatrix.update();
uniforms.terrainUvMatrix.copyFrom(terrainMatrix.mapCoord);
}
uniforms.useTerrain = enabled;
uniforms.terrainWeights = channelWeights;
uniforms.reverseTerrain = reverse;
if ( enabled && !uniforms.terrainTexture ) {
console.warn(`The terrain configuration for the weather shader ${context.constructor.name} is enabled but`
+ " does not have a valid texture");
uniforms.useTerrain = false;
}
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
get weather() {
const msg = "The WeatherContainer at canvas.weather.weather is deprecated and combined with the layer itself.";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return this;
}
}

View File

@@ -0,0 +1,78 @@
/**
* An interface for defining particle-based weather effects
* @param {PIXI.Container} parent The parent container within which the effect is rendered
* @param {object} [options] Options passed to the getParticleEmitters method which can be used to customize
* values of the emitter configuration.
* @interface
*/
class ParticleEffect extends FullCanvasObjectMixin(PIXI.Container) {
constructor(options={}) {
super();
/**
* The array of emitters which are active for this particle effect
* @type {PIXI.particles.Emitter[]}
*/
this.emitters = this.getParticleEmitters(options);
}
/* -------------------------------------------- */
/**
* Create an emitter instance which automatically updates using the shared PIXI.Ticker
* @param {PIXI.particles.EmitterConfigV3} config The emitter configuration
* @returns {PIXI.particles.Emitter} The created Emitter instance
*/
createEmitter(config) {
config.autoUpdate = true;
config.emit = false;
return new PIXI.particles.Emitter(this, config);
}
/* -------------------------------------------- */
/**
* Get the particle emitters which should be active for this particle effect.
* This base class creates a single emitter using the explicitly provided configuration.
* Subclasses can override this method for more advanced configurations.
* @param {object} [options={}] Options provided to the ParticleEffect constructor which can be used to customize
* configuration values for created emitters.
* @returns {PIXI.particles.Emitter[]}
*/
getParticleEmitters(options={}) {
if ( foundry.utils.isEmpty(options) ) {
throw new Error("The base ParticleEffect class may only be used with an explicitly provided configuration");
}
return [this.createEmitter(/** @type {PIXI.particles.EmitterConfigV3} */ options)];
}
/* -------------------------------------------- */
/** @override */
destroy(...args) {
for ( const e of this.emitters ) e.destroy();
this.emitters = [];
super.destroy(...args);
}
/* -------------------------------------------- */
/**
* Begin animation for the configured emitters.
*/
play() {
for ( let e of this.emitters ) {
e.emit = true;
}
}
/* -------------------------------------------- */
/**
* Stop animation for the configured emitters.
*/
stop() {
for ( let e of this.emitters ) {
e.emit = false;
}
}
}

View File

@@ -0,0 +1,74 @@
/**
* A full-screen weather effect which renders gently falling autumn leaves.
* @extends {ParticleEffect}
*/
class AutumnLeavesWeatherEffect extends ParticleEffect {
/** @inheritdoc */
static label = "WEATHER.AutumnLeaves";
/**
* Configuration for the particle emitter for falling leaves
* @type {PIXI.particles.EmitterConfigV3}
*/
static LEAF_CONFIG = {
lifetime: {min: 10, max: 10},
behaviors: [
{
type: "alpha",
config: {
alpha: {
list: [{time: 0, value: 0.9}, {time: 1, value: 0.5}]
}
}
},
{
type: "moveSpeed",
config: {
speed: {
list: [{time: 0, value: 20}, {time: 1, value: 60}]
},
minMult: 0.6
}
},
{
type: "scale",
config: {
scale: {
list: [{time: 0, value: 0.2}, {time: 1, value: 0.4}]
},
minMult: 0.5
}
},
{
type: "rotation",
config: {accel: 0, minSpeed: 100, maxSpeed: 200, minStart: 0, maxStart: 365}
},
{
type: "textureRandom",
config: {
textures: Array.fromRange(6).map(n => `ui/particles/leaf${n + 1}.png`)
}
}
]
};
/* -------------------------------------------- */
/** @inheritdoc */
getParticleEmitters() {
const d = canvas.dimensions;
const maxParticles = (d.width / d.size) * (d.height / d.size) * 0.25;
const config = foundry.utils.deepClone(this.constructor.LEAF_CONFIG);
config.maxParticles = maxParticles;
config.frequency = config.lifetime.min / maxParticles;
config.behaviors.push({
type: "spawnShape",
config: {
type: "rect",
data: {x: d.sceneRect.x, y: d.sceneRect.y, w: d.sceneRect.width, h: d.sceneRect.height}
}
});
return [this.createEmitter(config)];
}
}

View File

@@ -0,0 +1,53 @@
/**
* A special Graphics class which handles Grid layer highlighting
* @extends {PIXI.Graphics}
*/
class GridHighlight extends PIXI.Graphics {
constructor(name, ...args) {
super(...args);
/**
* Track the Grid Highlight name
* @type {string}
*/
this.name = name;
/**
* Track distinct positions which have already been highlighted
* @type {Set}
*/
this.positions = new Set();
}
/* -------------------------------------------- */
/**
* Record a position that is highlighted and return whether or not it should be rendered
* @param {number} x The x-coordinate to highlight
* @param {number} y The y-coordinate to highlight
* @return {boolean} Whether or not to draw the highlight for this location
*/
highlight(x, y) {
let key = `${x},${y}`;
if ( this.positions.has(key) ) return false;
this.positions.add(key);
return true;
}
/* -------------------------------------------- */
/** @inheritdoc */
clear() {
this.positions = new Set();
return super.clear();
}
/* -------------------------------------------- */
/** @inheritdoc */
destroy(...args) {
delete canvas.interface.grid.highlightLayers[this.name];
return super.destroy(...args);
}
}

View File

@@ -0,0 +1,301 @@
/**
* A CanvasLayer responsible for drawing a square grid
*/
class GridLayer extends CanvasLayer {
/**
* The grid mesh.
* @type {GridMesh}
*/
mesh;
/**
* The Grid Highlight container
* @type {PIXI.Container}
*/
highlight;
/**
* Map named highlight layers
* @type {Record<string, GridHighlight>}
*/
highlightLayers = {};
/* -------------------------------------------- */
/** @inheritdoc */
static get layerOptions() {
return foundry.utils.mergeObject(super.layerOptions, {name: "grid"});
}
/* -------------------------------------------- */
/** @override */
async _draw(options) {
// Draw the highlight layer
this.highlightLayers = {};
this.highlight = this.addChild(new PIXI.Container());
this.highlight.sortableChildren = true;
// Draw the grid
this.mesh = this.addChild(await this._drawMesh());
// Initialize the mesh appeareance
this.initializeMesh(canvas.grid);
}
/* -------------------------------------------- */
/**
* Creates the grid mesh.
* @returns {Promise<GridMesh>}
* @protected
*/
async _drawMesh() {
return new GridMesh().initialize({
type: canvas.grid.type,
width: canvas.dimensions.width,
height: canvas.dimensions.height,
size: canvas.dimensions.size
});
}
/* -------------------------------------------- */
/**
* Initialize the grid mesh appearance and configure the grid shader.
* @param {object} options
* @param {string} [options.style] The grid style
* @param {number} [options.thickness] The grid thickness
* @param {string} [options.color] The grid color
* @param {number} [options.alpha] The grid alpha
*/
initializeMesh({style, thickness, color, alpha}) {
const {shaderClass, shaderOptions} = CONFIG.Canvas.gridStyles[style] ?? {};
this.mesh.initialize({thickness, color, alpha});
this.mesh.setShaderClass(shaderClass ?? GridShader);
this.mesh.shader.configure(shaderOptions ?? {});
}
/* -------------------------------------------- */
/* Grid Highlighting Methods
/* -------------------------------------------- */
/**
* Define a new Highlight graphic
* @param {string} name The name for the referenced highlight layer
*/
addHighlightLayer(name) {
const layer = this.highlightLayers[name];
if ( !layer || layer._destroyed ) {
this.highlightLayers[name] = this.highlight.addChild(new GridHighlight(name));
}
return this.highlightLayers[name];
}
/* -------------------------------------------- */
/**
* Clear a specific Highlight graphic
* @param {string} name The name for the referenced highlight layer
*/
clearHighlightLayer(name) {
const layer = this.highlightLayers[name];
if ( layer ) layer.clear();
}
/* -------------------------------------------- */
/**
* Destroy a specific Highlight graphic
* @param {string} name The name for the referenced highlight layer
*/
destroyHighlightLayer(name) {
const layer = this.highlightLayers[name];
if ( layer ) {
this.highlight.removeChild(layer);
layer.destroy();
}
}
/* -------------------------------------------- */
/**
* Obtain the highlight layer graphic by name
* @param {string} name The name for the referenced highlight layer
*/
getHighlightLayer(name) {
return this.highlightLayers[name];
}
/* -------------------------------------------- */
/**
* Add highlighting for a specific grid position to a named highlight graphic
* @param {string} name The name for the referenced highlight layer
* @param {object} [options] Options for the grid position that should be highlighted
* @param {number} [options.x] The x-coordinate of the highlighted position
* @param {number} [options.y] The y-coordinate of the highlighted position
* @param {PIXI.ColorSource} [options.color=0x33BBFF] The fill color of the highlight
* @param {PIXI.ColorSource|null} [options.border=null] The border color of the highlight
* @param {number} [options.alpha=0.25] The opacity of the highlight
* @param {PIXI.Polygon} [options.shape=null] A predefined shape to highlight
*/
highlightPosition(name, {x, y, color=0x33BBFF, border=null, alpha=0.25, shape=null}) {
const layer = this.highlightLayers[name];
if ( !layer ) return;
const grid = canvas.grid;
if ( grid.type !== CONST.GRID_TYPES.GRIDLESS ) {
const cx = x + (grid.sizeX / 2);
const cy = y + (grid.sizeY / 2);
const points = grid.getShape();
for ( const point of points ) {
point.x += cx;
point.y += cy;
}
shape = new PIXI.Polygon(points);
} else if ( !shape ) return;
if ( !layer.highlight(x, y) ) return;
layer.beginFill(color, alpha);
if ( border !== null ) layer.lineStyle(2, border, Math.min(alpha * 1.5, 1.0));
layer.drawShape(shape).endFill();
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get type() {
const msg = "GridLayer#type is deprecated. Use canvas.grid.type instead.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return canvas.grid.type;
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get size() {
const msg = "GridLayer#size is deprecated. Use canvas.grid.size instead.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return canvas.grid.size;
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get grid() {
const msg = "GridLayer#grid is deprecated. Use canvas.grid instead.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return canvas.grid;
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
isNeighbor(r0, c0, r1, c1) {
const msg = "GridLayer#isNeighbor is deprecated. Use canvas.grid.testAdjacency instead.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return canvas.grid.testAdjacency({i: r0, j: c0}, {i: r1, j: c1});
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get w() {
const msg = "GridLayer#w is deprecated in favor of canvas.grid.sizeX.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return canvas.grid.sizeX;
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get h() {
const msg = "GridLayer#h is deprecated in favor of canvas.grid.sizeY.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return canvas.grid.sizeY;
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get isHex() {
const msg = "GridLayer#isHex is deprecated. Use canvas.grid.isHexagonal instead.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return canvas.grid.isHexagonal;
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
getTopLeft(x, y) {
const msg = "GridLayer#getTopLeft is deprecated. Use canvas.grid.getTopLeftPoint instead.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return canvas.grid.getTopLeft(x, y);
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
getCenter(x, y) {
const msg = "GridLayer#getCenter is deprecated. Use canvas.grid.getCenterPoint instead.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return canvas.grid.getCenter(x, y);
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
getSnappedPosition(x, y, interval=1, options={}) {
const msg = "GridLayer#getSnappedPosition is deprecated. Use canvas.grid.getSnappedPoint instead.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
if ( interval === 0 ) return {x: Math.round(x), y: Math.round(y)};
return canvas.grid.getSnappedPosition(x, y, interval, options);
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
measureDistance(origin, target, options={}) {
const msg = "GridLayer#measureDistance is deprecated. "
+ "Use canvas.grid.measurePath instead for non-Euclidean measurements.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
const ray = new Ray(origin, target);
const segments = [{ray}];
return canvas.grid.measureDistances(segments, options)[0];
}
}

View File

@@ -0,0 +1,89 @@
/**
* The grid mesh data.
* @typedef {object} GridMeshData
* @property {number} type The type of the grid (see {@link CONST.GRID_TYPES})
* @property {number} width The width of the grid in pixels
* @property {number} height The height of the grid in pixels
* @property {number} size The size of a grid space in pixels
* @property {number} thickness The thickness of the grid lines in pixels
* @property {number} color The color of the grid
* @property {number} alpha The alpha of the grid
*/
/**
* The grid mesh, which uses the {@link GridShader} to render the grid.
*/
class GridMesh extends QuadMesh {
/**
* The grid mesh constructor.
* @param {typeof GridShader} [shaderClass=GridShader] The shader class
*/
constructor(shaderClass=GridShader) {
super(shaderClass);
this.width = 0;
this.height = 0;
this.alpha = 0;
this.renderable = false;
}
/* -------------------------------------------- */
/**
* The data of this mesh.
* @type {GridMeshData}
*/
data = {
type: CONST.GRID_TYPES.GRIDLESS,
width: 0,
height: 0,
size: 0,
thickness: 1,
color: 0,
alpha: 1
};
/* -------------------------------------------- */
/**
* Initialize and update the mesh given the (partial) data.
* @param {Partial<GridMeshData>} data The (partial) data.
* @returns {this}
*/
initialize(data) {
// Update the data
this._initialize(data);
// Update the width, height, and alpha
const d = this.data;
this.width = d.width;
this.height = d.height;
this.alpha = d.alpha;
// Don't render if gridless or the thickness isn't positive positive
this.renderable = (d.type !== CONST.GRID_TYPES.GRIDLESS) && (d.thickness > 0);
return this;
}
/* -------------------------------------------- */
/**
* Initialize the data of this mesh given the (partial) data.
* @param {Partial<GridMeshData>} data The (partial) data.
* @protected
*/
_initialize(data) {
const d = this.data;
if ( data.type !== undefined ) d.type = data.type;
if ( data.width !== undefined ) d.width = data.width;
if ( data.height !== undefined ) d.height = data.height;
if ( data.size !== undefined ) d.size = data.size;
if ( data.thickness !== undefined ) d.thickness = data.thickness;
if ( data.color !== undefined ) {
const color = Color.from(data.color);
d.color = color.valid ? color.valueOf() : 0;
}
if ( data.alpha !== undefined ) d.alpha = data.alpha;
}
}

View File

@@ -0,0 +1,126 @@
/**
* The depth mask which contains a mapping of elevation. Needed to know if we must render objects according to depth.
* Red channel: Lighting occlusion (top).
* Green channel: Lighting occlusion (bottom).
* Blue channel: Weather occlusion.
* @category - Canvas
*/
class CanvasDepthMask extends CachedContainer {
constructor(...args) {
super(...args);
this.#createDepth();
}
/**
* Container in which roofs are rendered with depth data.
* @type {PIXI.Container}
*/
roofs;
/** @override */
static textureConfiguration = {
scaleMode: PIXI.SCALE_MODES.NEAREST,
format: PIXI.FORMATS.RGB,
multisample: PIXI.MSAA_QUALITY.NONE
};
/** @override */
clearColor = [0, 0, 0, 0];
/**
* Update the elevation-to-depth mapping?
* @type {boolean}
* @internal
*/
_elevationDirty = false;
/**
* The elevations of the elevation-to-depth mapping.
* Supported are up to 255 unique elevations.
* @type {Float64Array}
*/
#elevations = new Float64Array([-Infinity]);
/* -------------------------------------------- */
/**
* Map an elevation to a value in the range [0, 1] with 8-bit precision.
* The depth-rendered object are rendered with these values into the render texture.
* @param {number} elevation The elevation in distance units
* @returns {number} The value for this elevation in the range [0, 1] with 8-bit precision
*/
mapElevation(elevation) {
const E = this.#elevations;
if ( elevation < E[0] ) return 0;
let i = 0;
let j = E.length - 1;
while ( i < j ) {
const k = (i + j + 1) >> 1;
const e = E[k];
if ( e <= elevation ) i = k;
else j = k - 1;
}
return (i + 1) / 255;
}
/* -------------------------------------------- */
/**
* Update the elevation-to-depth mapping.
* Needs to be called after the children have been sorted
* and the canvas transform phase.
* @internal
*/
_update() {
if ( !this._elevationDirty ) return;
this._elevationDirty = false;
const elevations = [];
const children = canvas.primary.children;
for ( let i = 0, n = children.length; i < n; i++ ) {
const child = children[i];
if ( !child.shouldRenderDepth ) continue;
const elevation = child.elevation;
if ( elevation === elevations.at(-1) ) continue;
elevations.push(elevation);
}
if ( !elevations.length ) elevations.push(-Infinity);
else elevations.length = Math.min(elevations.length, 255);
this.#elevations = new Float64Array(elevations);
}
/* -------------------------------------------- */
/**
* Initialize the depth mask with the roofs container and token graphics.
*/
#createDepth() {
this.roofs = this.addChild(this.#createRoofsContainer());
}
/* -------------------------------------------- */
/**
* Create the roofs container.
* @returns {PIXI.Container}
*/
#createRoofsContainer() {
const c = new PIXI.Container();
const render = renderer => {
// Render the depth of each primary canvas object
for ( const pco of canvas.primary.children ) {
pco.renderDepthData?.(renderer);
}
};
c.render = render.bind(c);
return c;
}
/* -------------------------------------------- */
/**
* Clear the depth mask.
*/
clear() {
Canvas.clearContainer(this.roofs, false);
}
}

View File

@@ -0,0 +1,204 @@
/**
* The occlusion mask which contains radial occlusion and vision occlusion from tokens.
* Red channel: Fade occlusion.
* Green channel: Radial occlusion.
* Blue channel: Vision occlusion.
* @category - Canvas
*/
class CanvasOcclusionMask extends CachedContainer {
constructor(...args) {
super(...args);
this.#createOcclusion();
}
/** @override */
static textureConfiguration = {
scaleMode: PIXI.SCALE_MODES.NEAREST,
format: PIXI.FORMATS.RGB,
multisample: PIXI.MSAA_QUALITY.NONE
};
/**
* Graphics in which token radial and vision occlusion shapes are drawn.
* @type {PIXI.LegacyGraphics}
*/
tokens;
/**
* The occludable tokens.
* @type {Token[]}
*/
#tokens;
/** @override */
clearColor = [0, 1, 1, 1];
/** @override */
autoRender = false;
/* -------------------------------------------- */
/**
* Is vision occlusion active?
* @type {boolean}
*/
get vision() {
return this.#vision;
}
/**
* @type {boolean}
*/
#vision = false;
/**
* The elevations of the elevation-to-depth mapping.
* Supported are up to 255 unique elevations.
* @type {Float64Array}
*/
#elevations = new Float64Array([-Infinity]);
/* -------------------------------------------- */
/**
* Initialize the depth mask with the roofs container and token graphics.
*/
#createOcclusion() {
this.alphaMode = PIXI.ALPHA_MODES.NO_PREMULTIPLIED_ALPHA;
this.tokens = this.addChild(new PIXI.LegacyGraphics());
this.tokens.blendMode = PIXI.BLEND_MODES.MIN_ALL;
}
/* -------------------------------------------- */
/**
* Clear the occlusion mask.
*/
clear() {
this.tokens.clear();
}
/* -------------------------------------------- */
/* Occlusion Management */
/* -------------------------------------------- */
/**
* Map an elevation to a value in the range [0, 1] with 8-bit precision.
* The radial and vision shapes are drawn with these values into the render texture.
* @param {number} elevation The elevation in distance units
* @returns {number} The value for this elevation in the range [0, 1] with 8-bit precision
*/
mapElevation(elevation) {
const E = this.#elevations;
let i = 0;
let j = E.length - 1;
if ( elevation > E[j] ) return 1;
while ( i < j ) {
const k = (i + j) >> 1;
const e = E[k];
if ( e >= elevation ) j = k;
else i = k + 1;
}
return i / 255;
}
/* -------------------------------------------- */
/**
* Update the set of occludable Tokens, redraw the occlusion mask, and update the occluded state
* of all occludable objects.
*/
updateOcclusion() {
this.#tokens = canvas.tokens._getOccludableTokens();
this._updateOcclusionMask();
this._updateOcclusionStates();
}
/* -------------------------------------------- */
/**
* Draw occlusion shapes to the occlusion mask.
* Fade occlusion draws to the red channel with varying intensity from [0, 1] based on elevation.
* Radial occlusion draws to the green channel with varying intensity from [0, 1] based on elevation.
* Vision occlusion draws to the blue channel with varying intensity from [0, 1] based on elevation.
* @internal
*/
_updateOcclusionMask() {
this.#vision = false;
this.tokens.clear();
const elevations = [];
for ( const token of this.#tokens.sort((a, b) => a.document.elevation - b.document.elevation) ) {
const elevation = token.document.elevation;
if ( elevation !== elevations.at(-1) ) elevations.push(elevation);
const occlusionElevation = Math.min(elevations.length - 1, 255);
// Draw vision occlusion
if ( token.vision?.active ) {
this.#vision = true;
this.tokens.beginFill(0xFFFF00 | occlusionElevation).drawShape(token.vision.los).endFill();
}
// Draw radial occlusion (and radial into the vision channel if this token doesn't have vision)
const origin = token.center;
const occlusionRadius = Math.max(token.externalRadius, token.getLightRadius(token.document.occludable.radius));
this.tokens.beginFill(0xFF0000 | (occlusionElevation << 8) | (token.vision?.active ? 0xFF : occlusionElevation))
.drawCircle(origin.x, origin.y, occlusionRadius).endFill();
}
if ( !elevations.length ) elevations.push(-Infinity);
else elevations.length = Math.min(elevations.length, 255);
this.#elevations = new Float64Array(elevations);
this.renderDirty = true;
}
/* -------------------------------------------- */
/**
* Update the current occlusion status of all Tile objects.
* @internal
*/
_updateOcclusionStates() {
const occluded = this._identifyOccludedObjects(this.#tokens);
for ( const pco of canvas.primary.children ) {
const isOccludable = pco.isOccludable;
if ( (isOccludable === undefined) || (!isOccludable && !pco.occluded) ) continue;
pco.debounceSetOcclusion(occluded.has(pco));
}
}
/* -------------------------------------------- */
/**
* Determine the set of objects which should be currently occluded by a Token.
* @param {Token[]} tokens The set of currently controlled Token objects
* @returns {Set<PrimaryCanvasObjectMixin>} The PCO objects which should be currently occluded
* @protected
*/
_identifyOccludedObjects(tokens) {
const occluded = new Set();
for ( const token of tokens ) {
// Get the occludable primary canvas objects (PCO) according to the token bounds
const matchingPCO = canvas.primary.quadtree.getObjects(token.bounds);
for ( const pco of matchingPCO ) {
// Don't bother re-testing a PCO or an object which is not occludable
if ( !pco.isOccludable || occluded.has(pco) ) continue;
if ( pco.testOcclusion(token, {corners: pco.restrictsLight && pco.restrictsWeather}) ) occluded.add(pco);
}
}
return occluded;
}
/* -------------------------------------------- */
/* Deprecation and compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
_identifyOccludedTiles() {
const msg = "CanvasOcclusionMask#_identifyOccludedTiles has been deprecated in " +
"favor of CanvasOcclusionMask#_identifyOccludedObjects.";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return this._identifyOccludedObjects();
}
}

View File

@@ -0,0 +1,162 @@
/**
* @typedef {object} _CanvasVisionContainerSight
* @property {PIXI.LegacyGraphics} preview FOV that should not be committed to fog exploration.
*/
/**
* The sight part of {@link CanvasVisionContainer}.
* The blend mode is MAX_COLOR.
* @typedef {PIXI.LegacyGraphics & _CanvasVisionContainerSight} CanvasVisionContainerSight
*/
/**
* @typedef {object} _CanvasVisionContainerLight
* @property {PIXI.LegacyGraphics} preview FOV that should not be committed to fog exploration.
* @property {SpriteMesh} cached The sprite with the texture of FOV of cached light sources.
* @property {PIXI.LegacyGraphics & {preview: PIXI.LegacyGraphics}} mask
* The light perception polygons of vision sources and the FOV of vision sources that provide vision.
*/
/**
* The light part of {@link CanvasVisionContainer}.
* The blend mode is MAX_COLOR.
* @typedef {PIXI.LegacyGraphics & _CanvasVisionContainerLight} CanvasVisionContainerLight
*/
/**
* @typedef {object} _CanvasVisionContainerDarkness
* @property {PIXI.LegacyGraphics} darkness Darkness source erasing fog of war.
*/
/**
* The sight part of {@link CanvasVisionContainer}.
* The blend mode is ERASE.
* @typedef {PIXI.LegacyGraphics & _CanvasVisionContainerDarkness} CanvasVisionContainerDarkness
*/
/**
* The sight part of {@link CanvasVisionContainer}.
* The blend mode is MAX_COLOR.
* @typedef {PIXI.LegacyGraphics & _CanvasVisionContainerSight} CanvasVisionContainerSight
*/
/**
* @typedef {object} _CanvasVisionContainer
* @property {CanvasVisionContainerLight} light Areas visible because of light sources and light perception.
* @property {CanvasVisionContainerSight} sight Areas visible because of FOV of vision sources.
* @property {CanvasVisionContainerDarkness} darkness Areas erased by darkness sources.
*/
/**
* The currently visible areas.
* @typedef {PIXI.Container & _CanvasVisionContainer} CanvasVisionContainer
*/
/**
* The vision mask which contains the current line-of-sight texture.
* @category - Canvas
*/
class CanvasVisionMask extends CachedContainer {
/** @override */
static textureConfiguration = {
scaleMode: PIXI.SCALE_MODES.NEAREST,
format: PIXI.FORMATS.RED,
multisample: PIXI.MSAA_QUALITY.NONE
};
/** @override */
clearColor = [0, 0, 0, 0];
/** @override */
autoRender = false;
/**
* The current vision Container.
* @type {CanvasVisionContainer}
*/
vision;
/**
* The BlurFilter which applies to the vision mask texture.
* This filter applies a NORMAL blend mode to the container.
* @type {AlphaBlurFilter}
*/
blurFilter;
/* -------------------------------------------- */
/**
* Create the BlurFilter for the VisionMask container.
* @returns {AlphaBlurFilter}
*/
#createBlurFilter() {
// Initialize filters properties
this.filters ??= [];
this.filterArea = null;
// Check if the canvas blur is disabled and return without doing anything if necessary
const b = canvas.blur;
this.filters.findSplice(f => f === this.blurFilter);
if ( !b.enabled ) return;
// Create the new filter
const f = this.blurFilter = new b.blurClass(b.strength, b.passes, PIXI.Filter.defaultResolution, b.kernels);
f.blendMode = PIXI.BLEND_MODES.NORMAL;
this.filterArea = canvas.app.renderer.screen;
this.filters.push(f);
return canvas.addBlurFilter(this.blurFilter);
}
/* -------------------------------------------- */
async draw() {
this.#createBlurFilter();
}
/* -------------------------------------------- */
/**
* Initialize the vision mask with the los and the fov graphics objects.
* @param {PIXI.Container} vision The vision container to attach
* @returns {CanvasVisionContainer}
*/
attachVision(vision) {
return this.vision = this.addChild(vision);
}
/* -------------------------------------------- */
/**
* Detach the vision mask from the cached container.
* @returns {CanvasVisionContainer} The detached vision container.
*/
detachVision() {
const vision = this.vision;
this.removeChild(vision);
this.vision = undefined;
return vision;
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
get filter() {
foundry.utils.logCompatibilityWarning("CanvasVisionMask#filter has been renamed to blurFilter.", {since: 11, until: 13});
return this.blurFilter;
}
/**
* @deprecated since v11
* @ignore
*/
set filter(f) {
foundry.utils.logCompatibilityWarning("CanvasVisionMask#filter has been renamed to blurFilter.", {since: 11, until: 13});
this.blurFilter = f;
}
}

View File

@@ -0,0 +1,329 @@
/**
* The DrawingsLayer subclass of PlaceablesLayer.
* This layer implements a container for drawings.
* @category - Canvas
*/
class DrawingsLayer extends PlaceablesLayer {
/** @inheritdoc */
static get layerOptions() {
return foundry.utils.mergeObject(super.layerOptions, {
name: "drawings",
controllableObjects: true,
rotatableObjects: true,
zIndex: 500
});
}
/** @inheritdoc */
static documentName = "Drawing";
/**
* The named game setting which persists default drawing configuration for the User
* @type {string}
*/
static DEFAULT_CONFIG_SETTING = "defaultDrawingConfig";
/**
* The collection of drawing objects which are rendered in the interface.
* @type {Collection<string, Drawing>}
*/
graphics = new foundry.utils.Collection();
/* -------------------------------------------- */
/* Properties */
/* -------------------------------------------- */
/** @inheritdoc */
get hud() {
return canvas.hud.drawing;
}
/* -------------------------------------------- */
/** @inheritdoc */
get hookName() {
return DrawingsLayer.name;
}
/* -------------------------------------------- */
/* Methods */
/* -------------------------------------------- */
/** @override */
getSnappedPoint(point) {
const M = CONST.GRID_SNAPPING_MODES;
const size = canvas.dimensions.size;
return canvas.grid.getSnappedPoint(point, canvas.forceSnapVertices ? {mode: M.VERTEX} : {
mode: M.CENTER | M.VERTEX | M.CORNER | M.SIDE_MIDPOINT,
resolution: size >= 128 ? 8 : (size >= 64 ? 4 : 2)
});
}
/* -------------------------------------------- */
/**
* Render a configuration sheet to configure the default Drawing settings
*/
configureDefault() {
const defaults = game.settings.get("core", DrawingsLayer.DEFAULT_CONFIG_SETTING);
const d = DrawingDocument.fromSource(defaults);
new DrawingConfig(d, {configureDefault: true}).render(true);
}
/* -------------------------------------------- */
/** @inheritDoc */
_deactivate() {
super._deactivate();
this.objects.visible = true;
}
/* -------------------------------------------- */
/** @inheritdoc */
async _draw(options) {
await super._draw(options);
this.objects.visible = true;
}
/* -------------------------------------------- */
/**
* Get initial data for a new drawing.
* Start with some global defaults, apply user default config, then apply mandatory overrides per tool.
* @param {Point} origin The initial coordinate
* @returns {object} The new drawing data
*/
_getNewDrawingData(origin) {
const tool = game.activeTool;
// Get saved user defaults
const defaults = game.settings.get("core", this.constructor.DEFAULT_CONFIG_SETTING) || {};
const userColor = game.user.color.css;
const data = foundry.utils.mergeObject(defaults, {
fillColor: userColor,
strokeColor: userColor,
fontFamily: CONFIG.defaultFontFamily
}, {overwrite: false, inplace: false});
// Mandatory additions
delete data._id;
data.x = origin.x;
data.y = origin.y;
data.sort = Math.max(this.getMaxSort() + 1, 0);
data.author = game.user.id;
data.shape = {};
// Information toggle
const interfaceToggle = ui.controls.controls.find(c => c.layer === "drawings").tools.find(t => t.name === "role");
data.interface = interfaceToggle.active;
// Tool-based settings
switch ( tool ) {
case "rect":
data.shape.type = Drawing.SHAPE_TYPES.RECTANGLE;
data.shape.width = 1;
data.shape.height = 1;
break;
case "ellipse":
data.shape.type = Drawing.SHAPE_TYPES.ELLIPSE;
data.shape.width = 1;
data.shape.height = 1;
break;
case "polygon":
data.shape.type = Drawing.SHAPE_TYPES.POLYGON;
data.shape.points = [0, 0];
data.bezierFactor = 0;
break;
case "freehand":
data.shape.type = Drawing.SHAPE_TYPES.POLYGON;
data.shape.points = [0, 0];
data.bezierFactor = data.bezierFactor ?? 0.5;
break;
case "text":
data.shape.type = Drawing.SHAPE_TYPES.RECTANGLE;
data.shape.width = 1;
data.shape.height = 1;
data.fillColor = "#ffffff";
data.fillAlpha = 0.10;
data.strokeColor = "#ffffff";
data.text ||= "";
break;
}
// Return the cleaned data
return DrawingDocument.cleanData(data);
}
/* -------------------------------------------- */
/* Event Listeners and Handlers */
/* -------------------------------------------- */
/** @inheritdoc */
_onClickLeft(event) {
const {preview, drawingsState, destination} = event.interactionData;
// Continue polygon point placement
if ( (drawingsState >= 1) && preview.isPolygon ) {
preview._addPoint(destination, {snap: !event.shiftKey, round: true});
preview._chain = true; // Note that we are now in chain mode
return preview.refresh();
}
// Standard left-click handling
super._onClickLeft(event);
}
/* -------------------------------------------- */
/** @inheritdoc */
_onClickLeft2(event) {
const {drawingsState, preview} = event.interactionData;
// Conclude polygon placement with double-click
if ( (drawingsState >= 1) && preview.isPolygon ) {
event.interactionData.drawingsState = 2;
return;
}
// Standard double-click handling
super._onClickLeft2(event);
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftStart(event) {
super._onDragLeftStart(event);
const interaction = event.interactionData;
// Snap the origin to the grid
const isFreehand = game.activeTool === "freehand";
if ( !event.shiftKey && !isFreehand ) {
interaction.origin = this.getSnappedPoint(interaction.origin);
}
// Create the preview object
const cls = getDocumentClass("Drawing");
let document;
try {
document = new cls(this._getNewDrawingData(interaction.origin), {parent: canvas.scene});
}
catch(e) {
if ( e instanceof foundry.data.validation.DataModelValidationError ) {
ui.notifications.error("DRAWING.JointValidationErrorUI", {localize: true});
}
throw e;
}
const drawing = new this.constructor.placeableClass(document);
interaction.preview = this.preview.addChild(drawing);
interaction.drawingsState = 1;
drawing.draw();
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftMove(event) {
const {preview, drawingsState} = event.interactionData;
if ( !preview || preview._destroyed ) return;
if ( preview.parent === null ) { // In theory this should never happen, but rarely does
this.preview.addChild(preview);
}
if ( drawingsState >= 1 ) {
preview._onMouseDraw(event);
const isFreehand = game.activeTool === "freehand";
if ( !preview.isPolygon || isFreehand ) event.interactionData.drawingsState = 2;
}
}
/* -------------------------------------------- */
/**
* Handling of mouse-up events which conclude a new object creation after dragging
* @param {PIXI.FederatedEvent} event The drag drop event
* @private
*/
_onDragLeftDrop(event) {
const interaction = event.interactionData;
// Snap the destination to the grid
const isFreehand = game.activeTool === "freehand";
if ( !event.shiftKey && !isFreehand ) {
interaction.destination = this.getSnappedPoint(interaction.destination);
}
const {drawingsState, destination, origin, preview} = interaction;
// Successful drawing completion
if ( drawingsState === 2 ) {
const distance = Math.hypot(Math.max(destination.x, origin.x) - preview.x,
Math.max(destination.y, origin.x) - preview.y);
const minDistance = distance >= (canvas.dimensions.size / 8);
const completePolygon = preview.isPolygon && (preview.document.shape.points.length > 4);
// Create a completed drawing
if ( minDistance || completePolygon ) {
event.interactionData.clearPreviewContainer = false;
event.interactionData.drawingsState = 0;
const data = preview.document.toObject(false);
// Create the object
preview._chain = false;
const cls = getDocumentClass("Drawing");
const createData = this.constructor.placeableClass.normalizeShape(data);
cls.create(createData, {parent: canvas.scene}).then(d => {
const o = d.object;
o._creating = true;
if ( game.activeTool !== "freehand" ) o.control({isNew: true});
}).finally(() => this.clearPreviewContainer());
}
}
// In-progress polygon
if ( (drawingsState === 1) && preview.isPolygon ) {
event.preventDefault();
if ( preview._chain ) return;
return this._onClickLeft(event);
}
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftCancel(event) {
const preview = this.preview.children?.[0] || null;
if ( preview?._chain ) {
preview._removePoint();
preview.refresh();
if ( preview.document.shape.points.length ) return event.preventDefault();
}
event.interactionData.drawingsState = 0;
super._onDragLeftCancel(event);
}
/* -------------------------------------------- */
/** @inheritdoc */
_onClickRight(event) {
const preview = this.preview.children?.[0] || null;
if ( preview ) return canvas.mouseInteractionManager._dragRight = false;
super._onClickRight(event);
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get gridPrecision() {
// eslint-disable-next-line no-unused-expressions
super.gridPrecision;
if ( canvas.grid.type === CONST.GRID_TYPES.GRIDLESS ) return 0;
return canvas.dimensions.size >= 128 ? 16 : 8;
}
}

View File

@@ -0,0 +1,175 @@
/**
* The Lighting Layer which ambient light sources as part of the CanvasEffectsGroup.
* @category - Canvas
*/
class LightingLayer extends PlaceablesLayer {
/** @inheritdoc */
static documentName = "AmbientLight";
/** @inheritdoc */
static get layerOptions() {
return foundry.utils.mergeObject(super.layerOptions, {
name: "lighting",
rotatableObjects: true,
zIndex: 900
});
}
/**
* Darkness change event handler function.
* @type {_onDarknessChange}
*/
#onDarknessChange;
/* -------------------------------------------- */
/** @inheritdoc */
get hookName() {
return LightingLayer.name;
}
/* -------------------------------------------- */
/* Rendering */
/* -------------------------------------------- */
/** @inheritDoc */
async _draw(options) {
await super._draw(options);
this.#onDarknessChange = this._onDarknessChange.bind(this);
canvas.environment.addEventListener("darknessChange", this.#onDarknessChange);
}
/* -------------------------------------------- */
/** @inheritDoc */
async _tearDown(options) {
canvas.environment.removeEventListener("darknessChange", this.#onDarknessChange);
this.#onDarknessChange = undefined;
return super._tearDown(options);
}
/* -------------------------------------------- */
/* Methods */
/* -------------------------------------------- */
/**
* Refresh the fields of all the ambient lights on this scene.
*/
refreshFields() {
if ( !this.active ) return;
for ( const ambientLight of this.placeables ) {
ambientLight.renderFlags.set({refreshField: true});
}
}
/* -------------------------------------------- */
/** @override */
_activate() {
super._activate();
for ( const p of this.placeables ) p.renderFlags.set({refreshField: true});
}
/* -------------------------------------------- */
/* Event Listeners and Handlers */
/* -------------------------------------------- */
/** @inheritDoc */
_canDragLeftStart(user, event) {
// Prevent creating a new light if currently previewing one.
if ( this.preview.children.length ) {
ui.notifications.warn("CONTROLS.ObjectConfigured", { localize: true });
return false;
}
return super._canDragLeftStart(user, event);
}
/* -------------------------------------------- */
/** @override */
_onDragLeftStart(event) {
super._onDragLeftStart(event);
const interaction = event.interactionData;
// Snap the origin to the grid
if ( !event.shiftKey ) interaction.origin = this.getSnappedPoint(interaction.origin);
// Create a pending AmbientLightDocument
const cls = getDocumentClass("AmbientLight");
const doc = new cls(interaction.origin, {parent: canvas.scene});
// Create the preview AmbientLight object
const preview = new this.constructor.placeableClass(doc);
// Updating interaction data
interaction.preview = this.preview.addChild(preview);
interaction.lightsState = 1;
// Prepare to draw the preview
preview.draw();
}
/* -------------------------------------------- */
/** @override */
_onDragLeftMove(event) {
const {destination, lightsState, preview, origin} = event.interactionData;
if ( lightsState === 0 ) return;
// Update the light radius
const radius = Math.hypot(destination.x - origin.x, destination.y - origin.y);
// Update the preview object data
preview.document.config.dim = radius * (canvas.dimensions.distance / canvas.dimensions.size);
preview.document.config.bright = preview.document.config.dim / 2;
// Refresh the layer display
preview.initializeLightSource();
preview.renderFlags.set({refreshState: true});
// Confirm the creation state
event.interactionData.lightsState = 2;
}
/* -------------------------------------------- */
/** @override */
_onDragLeftCancel(event) {
super._onDragLeftCancel(event);
canvas.effects.refreshLighting();
event.interactionData.lightsState = 0;
}
/* -------------------------------------------- */
/** @override */
_onMouseWheel(event) {
// Identify the hovered light source
const light = this.hover;
if ( !light || light.isPreview || (light.document.config.angle === 360) ) return;
// Determine the incremental angle of rotation from event data
const snap = event.shiftKey ? 15 : 3;
const delta = snap * Math.sign(event.delta);
return light.rotate(light.document.rotation + delta, snap);
}
/* -------------------------------------------- */
/**
* Actions to take when the darkness level of the Scene is changed
* @param {PIXI.FederatedEvent} event
* @internal
*/
_onDarknessChange(event) {
const {darknessLevel, priorDarknessLevel} = event.environmentData;
for ( const light of this.placeables ) {
const {min, max} = light.document.config.darkness;
if ( darknessLevel.between(min, max) === priorDarknessLevel.between(min, max) ) continue;
light.initializeLightSource();
if ( this.active ) light.renderFlags.set({refreshState: true});
}
}
}

View File

@@ -0,0 +1,213 @@
/**
* The Notes Layer which contains Note canvas objects.
* @category - Canvas
*/
class NotesLayer extends PlaceablesLayer {
/** @inheritdoc */
static get layerOptions() {
return foundry.utils.mergeObject(super.layerOptions, {
name: "notes",
zIndex: 800
});
}
/** @inheritdoc */
static documentName = "Note";
/**
* The named core setting which tracks the toggled visibility state of map notes
* @type {string}
*/
static TOGGLE_SETTING = "notesDisplayToggle";
/* -------------------------------------------- */
/** @inheritdoc */
get hookName() {
return NotesLayer.name;
}
/* -------------------------------------------- */
/** @override */
interactiveChildren = game.settings.get("core", this.constructor.TOGGLE_SETTING);
/* -------------------------------------------- */
/* Methods
/* -------------------------------------------- */
/** @override */
_deactivate() {
super._deactivate();
const isToggled = game.settings.get("core", this.constructor.TOGGLE_SETTING);
this.objects.visible = this.interactiveChildren = isToggled;
}
/* -------------------------------------------- */
/** @inheritDoc */
async _draw(options) {
await super._draw(options);
const isToggled = game.settings.get("core", this.constructor.TOGGLE_SETTING);
this.objects.visible ||= isToggled;
}
/* -------------------------------------------- */
/**
* Register game settings used by the NotesLayer
*/
static registerSettings() {
game.settings.register("core", this.TOGGLE_SETTING, {
name: "Map Note Toggle",
scope: "client",
config: false,
type: new foundry.data.fields.BooleanField({initial: false}),
onChange: value => {
if ( !canvas.ready ) return;
const layer = canvas.notes;
layer.objects.visible = layer.interactiveChildren = layer.active || value;
}
});
}
/* -------------------------------------------- */
/**
* Visually indicate in the Scene Controls that there are visible map notes present in the Scene.
*/
hintMapNotes() {
const hasVisibleNotes = this.placeables.some(n => n.visible);
const i = document.querySelector(".scene-control[data-control='notes'] i");
i.classList.toggle("fa-solid", !hasVisibleNotes);
i.classList.toggle("fa-duotone", hasVisibleNotes);
i.classList.toggle("has-notes", hasVisibleNotes);
}
/* -------------------------------------------- */
/**
* Pan to a given note on the layer.
* @param {Note} note The note to pan to.
* @param {object} [options] Options which modify the pan operation.
* @param {number} [options.scale=1.5] The resulting zoom level.
* @param {number} [options.duration=250] The speed of the pan animation in milliseconds.
* @returns {Promise<void>} A Promise which resolves once the pan animation has concluded.
*/
panToNote(note, {scale=1.5, duration=250}={}) {
if ( !note ) return Promise.resolve();
if ( note.visible && !this.active ) this.activate();
return canvas.animatePan({x: note.x, y: note.y, scale, duration}).then(() => {
if ( this.hover ) this.hover._onHoverOut(new Event("pointerout"));
note._onHoverIn(new Event("pointerover"), {hoverOutOthers: true});
});
}
/* -------------------------------------------- */
/* Event Handlers */
/* -------------------------------------------- */
/** @inheritdoc */
async _onClickLeft(event) {
if ( game.activeTool !== "journal" ) return super._onClickLeft(event);
// Capture the click coordinates
const origin = event.getLocalPosition(canvas.stage);
const {x, y} = canvas.grid.getCenterPoint(origin);
// Render the note creation dialog
const folders = game.journal.folders.filter(f => f.displayed);
const title = game.i18n.localize("NOTE.Create");
const html = await renderTemplate("templates/sidebar/document-create.html", {
folders,
name: game.i18n.localize("NOTE.Unknown"),
hasFolders: folders.length >= 1,
hasTypes: false,
content: `
<div class="form-group">
<label style="display: flex;">
<input type="checkbox" name="journal">
${game.i18n.localize("NOTE.CreateJournal")}
</label>
</div>
`
});
let response;
try {
response = await Dialog.prompt({
title,
content: html,
label: game.i18n.localize("NOTE.Create"),
callback: html => {
const form = html.querySelector("form");
const fd = new FormDataExtended(form).object;
if ( !fd.folder ) delete fd.folder;
if ( fd.journal ) return JournalEntry.implementation.create(fd, {renderSheet: true});
return fd.name;
},
render: html => {
const form = html.querySelector("form");
const folder = form.elements.folder;
if ( !folder ) return;
folder.disabled = true;
form.elements.journal.addEventListener("change", event => {
folder.disabled = !event.currentTarget.checked;
});
},
options: {jQuery: false}
});
} catch(err) {
return;
}
// Create a note for a created JournalEntry
const noteData = {x, y};
if ( response.id ) {
noteData.entryId = response.id;
const cls = getDocumentClass("Note");
return cls.create(noteData, {parent: canvas.scene});
}
// Create a preview un-linked Note
else {
noteData.text = response;
return this._createPreview(noteData, {top: event.clientY - 20, left: event.clientX + 40});
}
}
/* -------------------------------------------- */
/**
* Handle JournalEntry document drop data
* @param {DragEvent} event The drag drop event
* @param {object} data The dropped data transfer data
* @protected
*/
async _onDropData(event, data) {
let entry;
let origin;
if ( (data.x === undefined) || (data.y === undefined) ) {
const coords = this._canvasCoordinatesFromDrop(event, {center: false});
if ( !coords ) return false;
origin = {x: coords[0], y: coords[1]};
} else {
origin = {x: data.x, y: data.y};
}
if ( !event.shiftKey ) origin = this.getSnappedPoint(origin);
if ( !canvas.dimensions.rect.contains(origin.x, origin.y) ) return false;
const noteData = {x: origin.x, y: origin.y};
if ( data.type === "JournalEntry" ) entry = await JournalEntry.implementation.fromDropData(data);
if ( data.type === "JournalEntryPage" ) {
const page = await JournalEntryPage.implementation.fromDropData(data);
entry = page.parent;
noteData.pageId = page.id;
}
if ( entry?.compendium ) {
const journalData = game.journal.fromCompendium(entry);
entry = await JournalEntry.implementation.create(journalData);
}
noteData.entryId = entry?.id;
return this._createPreview(noteData, {top: event.clientY - 20, left: event.clientX + 40});
}
}

View File

@@ -0,0 +1,488 @@
/**
* The Regions Container.
* @category - Canvas
*/
class RegionLayer extends PlaceablesLayer {
/** @inheritDoc */
static get layerOptions() {
return foundry.utils.mergeObject(super.layerOptions, {
name: "regions",
controllableObjects: true,
confirmDeleteKey: true,
quadtree: false,
zIndex: 100,
zIndexActive: 600
});
}
/* -------------------------------------------- */
/** @inheritDoc */
static documentName = "Region";
/* -------------------------------------------- */
/**
* The method to sort the Regions.
* @type {Function}
*/
static #sortRegions = function() {
for ( let i = 0; i < this.children.length; i++ ) {
this.children[i]._lastSortedIndex = i;
}
this.children.sort((a, b) => (a.zIndex - b.zIndex)
|| (a.top - b.top)
|| (a.bottom - b.bottom)
|| (a._lastSortedIndex - b._lastSortedIndex));
this.sortDirty = false;
};
/* -------------------------------------------- */
/** @inheritDoc */
get hookName() {
return RegionLayer.name;
}
/* -------------------------------------------- */
/**
* The RegionLegend application of this RegionLayer.
* @type {foundry.applications.ui.RegionLegend}
*/
get legend() {
return this.#legend ??= new foundry.applications.ui.RegionLegend();
}
#legend;
/* -------------------------------------------- */
/**
* The graphics used to draw the highlighted shape.
* @type {PIXI.Graphics}
*/
#highlight;
/* -------------------------------------------- */
/**
* The graphics used to draw the preview of the shape that is drawn.
* @type {PIXI.Graphics}
*/
#preview;
/* -------------------------------------------- */
/**
* Draw shapes as holes?
* @type {boolean}
* @internal
*/
_holeMode = false;
/* -------------------------------------------- */
/* Methods
/* -------------------------------------------- */
/** @inheritDoc */
_activate() {
super._activate();
// noinspection ES6MissingAwait
this.legend.render({force: true});
}
/* -------------------------------------------- */
/** @inheritDoc */
_deactivate() {
super._deactivate();
this.objects.visible = true;
// noinspection ES6MissingAwait
this.legend.close({animate: false});
}
/* -------------------------------------------- */
/** @inheritDoc */
storeHistory(type, data) {
super.storeHistory(type, type === "update" ? data.map(d => {
if ( "behaviors" in d ) {
d = foundry.utils.deepClone(d);
delete d.behaviors;
}
return d;
}) : data);
}
/* -------------------------------------------- */
/** @override */
copyObjects() {
return []; // Prevent copy & paste
}
/* -------------------------------------------- */
/** @override */
getSnappedPoint(point) {
const M = CONST.GRID_SNAPPING_MODES;
const size = canvas.dimensions.size;
return canvas.grid.getSnappedPoint(point, canvas.forceSnapVertices ? {mode: M.VERTEX} : {
mode: M.CENTER | M.VERTEX | M.CORNER | M.SIDE_MIDPOINT,
resolution: size >= 128 ? 8 : (size >= 64 ? 4 : 2)
});
}
/* -------------------------------------------- */
/** @override */
getZIndex() {
return this.active ? this.options.zIndexActive : this.options.zIndex;
}
/* -------------------------------------------- */
/** @inheritDoc */
async _draw(options) {
await super._draw(options);
this.objects.sortChildren = RegionLayer.#sortRegions;
this.objects.visible = true;
this.#highlight = this.addChild(new PIXI.Graphics());
this.#highlight.eventMode = "none";
this.#highlight.visible = false;
this.#preview = this.addChild(new PIXI.Graphics());
this.#preview.eventMode = "none";
this.#preview.visible = false;
this.filters = [VisionMaskFilter.create()];
this.filterArea = canvas.app.screen;
}
/* -------------------------------------------- */
/**
* Highlight the shape or clear the highlight.
* @param {foundry.data.BaseShapeData|null} data The shape to highlight, or null to clear the highlight
* @internal
*/
_highlightShape(data) {
this.#highlight.clear();
this.#highlight.visible = false;
if ( !data ) return;
this.#highlight.visible = true;
this.#highlight.lineStyle({
width: CONFIG.Canvas.objectBorderThickness,
color: 0x000000,
join: PIXI.LINE_JOIN.ROUND,
shader: new PIXI.smooth.DashLineShader()
});
const shape = foundry.canvas.regions.RegionShape.create(data);
shape._drawShape(this.#highlight);
}
/* -------------------------------------------- */
/**
* Refresh the preview shape.
* @param {PIXI.FederatedEvent} event
*/
#refreshPreview(event) {
this.#preview.clear();
this.#preview.lineStyle({
width: CONFIG.Canvas.objectBorderThickness,
color: 0x000000,
join: PIXI.LINE_JOIN.ROUND,
cap: PIXI.LINE_CAP.ROUND,
alignment: 0.75
});
this.#preview.beginFill(event.interactionData.drawingColor, 0.5);
this.#drawPreviewShape(event);
this.#preview.endFill();
this.#preview.lineStyle({
width: CONFIG.Canvas.objectBorderThickness / 2,
color: CONFIG.Canvas.dispositionColors.CONTROLLED,
join: PIXI.LINE_JOIN.ROUND,
cap: PIXI.LINE_CAP.ROUND,
alignment: 1
});
this.#drawPreviewShape(event);
}
/* -------------------------------------------- */
/**
* Draw the preview shape.
* @param {PIXI.FederatedEvent} event
*/
#drawPreviewShape(event) {
const data = this.#createShapeData(event);
if ( !data ) return;
switch ( data.type ) {
case "rectangle": this.#preview.drawRect(data.x, data.y, data.width, data.height); break;
case "circle": this.#preview.drawCircle(data.x, data.y, data.radius); break;
case "ellipse": this.#preview.drawEllipse(data.x, data.y, data.radiusX, data.radiusY); break;
case "polygon":
const polygon = new PIXI.Polygon(data.points);
if ( !polygon.isPositive ) polygon.reverseOrientation();
this.#preview.drawPath(polygon.points);
break;
}
}
/* -------------------------------------------- */
/**
* Create the shape data.
* @param {PIXI.FederatedEvent} event
* @returns {object|void}
*/
#createShapeData(event) {
let data;
switch ( event.interactionData.drawingTool ) {
case "rectangle": data = this.#createRectangleData(event); break;
case "ellipse": data = this.#createCircleOrEllipseData(event); break;
case "polygon": data = this.#createPolygonData(event); break;
}
if ( data ) {
data.elevation = {
bottom: Number.isFinite(this.legend.elevation.bottom) ? this.legend.elevation.bottom : null,
top: Number.isFinite(this.legend.elevation.top) ? this.legend.elevation.top : null
};
if ( this._holeMode ) data.hole = true;
return data;
}
}
/* -------------------------------------------- */
/**
* Create the rectangle shape data.
* @param {PIXI.FederatedEvent} event
* @returns {object|void}
*/
#createRectangleData(event) {
const {origin, destination} = event.interactionData;
let dx = Math.abs(destination.x - origin.x);
let dy = Math.abs(destination.y - origin.y);
if ( event.altKey ) dx = dy = Math.min(dx, dy);
let x = origin.x;
let y = origin.y;
if ( event.ctrlKey || event.metaKey ) {
x -= dx;
y -= dy;
dx *= 2;
dy *= 2;
} else {
if ( origin.x > destination.x ) x -= dx;
if ( origin.y > destination.y ) y -= dy;
}
if ( (dx === 0) || (dy === 0) ) return;
return {type: "rectangle", x, y, width: dx, height: dy, rotation: 0};
}
/* -------------------------------------------- */
/**
* Create the circle or ellipse shape data.
* @param {PIXI.FederatedEvent} event
* @returns {object|void}
*/
#createCircleOrEllipseData(event) {
const {origin, destination} = event.interactionData;
let dx = Math.abs(destination.x - origin.x);
let dy = Math.abs(destination.y - origin.y);
if ( event.altKey ) dx = dy = Math.min(dx, dy);
let x = origin.x;
let y = origin.y;
if ( !(event.ctrlKey || event.metaKey) ) {
if ( origin.x > destination.x ) x -= dx;
if ( origin.y > destination.y ) y -= dy;
dx /= 2;
dy /= 2;
x += dx;
y += dy;
}
if ( (dx === 0) || (dy === 0) ) return;
return event.altKey
? {type: "circle", x, y, radius: dx}
: {type: "ellipse", x, y, radiusX: dx, radiusY: dy, rotation: 0};
}
/* -------------------------------------------- */
/**
* Create the polygon shape data.
* @param {PIXI.FederatedEvent} event
* @returns {object|void}
*/
#createPolygonData(event) {
let {destination, points, complete} = event.interactionData;
if ( !complete ) points = [...points, destination.x, destination.y];
else if ( points.length < 6 ) return;
return {type: "polygon", points};
}
/* -------------------------------------------- */
/* Event Listeners and Handlers */
/* -------------------------------------------- */
/** @inheritDoc */
_onClickLeft(event) {
const interaction = event.interactionData;
// Continue polygon point placement
if ( interaction.drawingTool === "polygon" ) {
const {destination, points} = interaction;
const point = !event.shiftKey ? this.getSnappedPoint(destination) : destination;
// Clicking on the first point closes the shape
if ( (point.x === points.at(0)) && (point.y === points.at(1)) ) {
interaction.complete = true;
}
// Don't add the point if it is equal to the last one
else if ( (point.x !== points.at(-2)) || (point.y !== points.at(-1)) ) {
interaction.points.push(point.x, point.y);
this.#refreshPreview(event);
}
return;
}
// If one of the drawing tools is selected, prevent left-click-to-release
if ( ["rectangle", "ellipse", "polygon"].includes(game.activeTool) ) return;
// Standard left-click handling
super._onClickLeft(event);
}
/* -------------------------------------------- */
/** @inheritDoc */
_onClickLeft2(event) {
const interaction = event.interactionData;
// Conclude polygon drawing with a double-click
if ( interaction.drawingTool === "polygon" ) {
interaction.complete = true;
return;
}
// Standard double-click handling
super._onClickLeft2(event);
}
/* -------------------------------------------- */
/** @inheritDoc */
_canDragLeftStart(user, event) {
if ( !super._canDragLeftStart(user, event) ) return false;
if ( !["rectangle", "ellipse", "polygon"].includes(game.activeTool) ) return false;
if ( this.controlled.length > 1 ) {
ui.notifications.error("REGION.NOTIFICATIONS.DrawingMultipleRegionsControlled", {localize: true});
return false;
}
if ( this.controlled.at(0)?.document.locked ) {
ui.notifications.warn(game.i18n.format("CONTROLS.ObjectIsLocked", {
type: game.i18n.localize(RegionDocument.metadata.label)}));
return false;
}
return true;
}
/* -------------------------------------------- */
/** @override */
_onDragLeftStart(event) {
const interaction = event.interactionData;
if ( !event.shiftKey ) interaction.origin = this.getSnappedPoint(interaction.origin);
// Set drawing tool
interaction.drawingTool = game.activeTool;
interaction.drawingRegion = this.controlled.at(0);
interaction.drawingColor = interaction.drawingRegion?.document.color
?? Color.from(RegionDocument.schema.fields.color.getInitialValue({}));
// Initialize the polygon points with the origin
if ( interaction.drawingTool === "polygon" ) {
const point = interaction.origin;
interaction.points = [point.x, point.y];
}
this.#refreshPreview(event);
this.#preview.visible = true;
}
/* -------------------------------------------- */
/** @override */
_onDragLeftMove(event) {
const interaction = event.interactionData;
if ( !interaction.drawingTool ) return;
if ( !event.shiftKey ) interaction.destination = this.getSnappedPoint(interaction.destination);
this.#refreshPreview(event);
}
/* -------------------------------------------- */
/** @override */
_onDragLeftDrop(event) {
const interaction = event.interactionData;
if ( !interaction.drawingTool ) return;
if ( !event.shiftKey ) interaction.destination = this.getSnappedPoint(interaction.destination);
// In-progress polygon drawing
if ( (interaction.drawingTool === "polygon") && (interaction.complete !== true) ) {
event.preventDefault();
return;
}
// Clear preview and refresh Regions
this.#preview.clear();
this.#preview.visible = false;
// Create the shape from the preview
const shape = this.#createShapeData(event);
if ( !shape ) return;
// Add the shape to controlled Region or create a new Region if none is controlled
const region = interaction.drawingRegion;
if ( region ) {
if ( !region.document.locked ) region.document.update({shapes: [...region.document.shapes, shape]});
} else RegionDocument.implementation.create({
name: RegionDocument.implementation.defaultName({parent: canvas.scene}),
color: interaction.drawingColor,
shapes: [shape]
}, {parent: canvas.scene, renderSheet: true}).then(r => r.object.control({releaseOthers: true}));
}
/* -------------------------------------------- */
/** @override */
_onDragLeftCancel(event) {
const interaction = event.interactionData;
if ( !interaction.drawingTool ) return;
// Remove point from in-progress polygon drawing
if ( (interaction.drawingTool === "polygon") && (interaction.complete !== true) ) {
interaction.points.splice(-2, 2);
if ( interaction.points.length ) {
event.preventDefault();
this.#refreshPreview(event);
return;
}
}
// Clear preview and refresh Regions
this.#preview.clear();
this.#preview.visible = false;
}
/* -------------------------------------------- */
/** @inheritDoc */
_onClickRight(event) {
const interaction = event.interactionData;
if ( interaction.drawingTool ) return canvas.mouseInteractionManager._dragRight = false;
super._onClickRight(event);
}
}

View File

@@ -0,0 +1,454 @@
/**
* @typedef {Object} AmbientSoundPlaybackConfig
* @property {Sound} sound The Sound node which should be controlled for playback
* @property {foundry.canvas.sources.PointSoundSource} source The SoundSource which defines the area of effect
* for the sound
* @property {AmbientSound} object An AmbientSound object responsible for the sound, or undefined
* @property {Point} listener The coordinates of the closest listener or undefined if there is none
* @property {number} distance The minimum distance between a listener and the AmbientSound origin
* @property {boolean} muffled Is the closest listener muffled
* @property {boolean} walls Is playback constrained or muffled by walls?
* @property {number} volume The final volume at which the Sound should be played
*/
/**
* This Canvas Layer provides a container for AmbientSound objects.
* @category - Canvas
*/
class SoundsLayer extends PlaceablesLayer {
/**
* Track whether to actively preview ambient sounds with mouse cursor movements
* @type {boolean}
*/
livePreview = false;
/**
* A mapping of ambient audio sources which are active within the rendered Scene
* @type {Collection<string,foundry.canvas.sources.PointSoundSource>}
*/
sources = new foundry.utils.Collection();
/**
* Darkness change event handler function.
* @type {_onDarknessChange}
*/
#onDarknessChange;
/* -------------------------------------------- */
/** @inheritdoc */
static get layerOptions() {
return foundry.utils.mergeObject(super.layerOptions, {
name: "sounds",
zIndex: 900
});
}
/** @inheritdoc */
static documentName = "AmbientSound";
/* -------------------------------------------- */
/** @inheritdoc */
get hookName() {
return SoundsLayer.name;
}
/* -------------------------------------------- */
/* Methods */
/* -------------------------------------------- */
/** @inheritDoc */
async _draw(options) {
await super._draw(options);
this.#onDarknessChange = this._onDarknessChange.bind(this);
canvas.environment.addEventListener("darknessChange", this.#onDarknessChange);
}
/* -------------------------------------------- */
/** @inheritDoc */
async _tearDown(options) {
this.stopAll();
canvas.environment.removeEventListener("darknessChange", this.#onDarknessChange);
this.#onDarknessChange = undefined;
return super._tearDown(options);
}
/* -------------------------------------------- */
/** @override */
_activate() {
super._activate();
for ( const p of this.placeables ) p.renderFlags.set({refreshField: true});
}
/* -------------------------------------------- */
/**
* Initialize all AmbientSound sources which are present on this layer
*/
initializeSources() {
for ( let sound of this.placeables ) {
sound.initializeSoundSource();
}
for ( let sound of this.preview.children ) {
sound.initializeSoundSource();
}
}
/* -------------------------------------------- */
/**
* Update all AmbientSound effects in the layer by toggling their playback status.
* Sync audio for the positions of tokens which are capable of hearing.
* @param {object} [options={}] Additional options forwarded to AmbientSound synchronization
*/
refresh(options={}) {
if ( !this.placeables.length ) return;
for ( const sound of this.placeables ) sound.source.refresh();
if ( game.audio.locked ) {
return game.audio.pending.push(() => this.refresh(options));
}
const listeners = this.getListenerPositions();
this._syncPositions(listeners, options);
}
/* -------------------------------------------- */
/**
* Preview ambient audio for a given mouse cursor position
* @param {Point} position The cursor position to preview
*/
previewSound(position) {
if ( !this.placeables.length || game.audio.locked ) return;
return this._syncPositions([position], {fade: 50});
}
/* -------------------------------------------- */
/**
* Terminate playback of all ambient audio sources
*/
stopAll() {
this.placeables.forEach(s => s.sync(false));
}
/* -------------------------------------------- */
/**
* Get an array of listener positions for Tokens which are able to hear environmental sound.
* @returns {Point[]}
*/
getListenerPositions() {
const listeners = canvas.tokens.controlled.map(token => token.center);
if ( !listeners.length && !game.user.isGM ) {
for ( const token of canvas.tokens.placeables ) {
if ( token.actor?.isOwner && token.isVisible ) listeners.push(token.center);
}
}
return listeners;
}
/* -------------------------------------------- */
/**
* Sync the playing state and volume of all AmbientSound objects based on the position of listener points
* @param {Point[]} listeners Locations of listeners which have the capability to hear
* @param {object} [options={}] Additional options forwarded to AmbientSound synchronization
* @protected
*/
_syncPositions(listeners, options) {
if ( !this.placeables.length || game.audio.locked ) return;
/** @type {Record<string, Partial<AmbientSoundPlaybackConfig>>} */
const paths = {};
for ( const /** @type {AmbientSound} */ object of this.placeables ) {
const {path, easing, volume, walls} = object.document;
if ( !path ) continue;
const {sound, source} = object;
// Track a singleton record per unique audio path
paths[path] ||= {sound, source, object, volume: 0};
const config = paths[path];
if ( !config.sound && sound ) Object.assign(config, {sound, source, object}); // First defined Sound
// Identify the closest listener to each sound source
if ( !object.isAudible || !source.active ) continue;
for ( let l of listeners ) {
const v = volume * source.getVolumeMultiplier(l, {easing});
if ( v > config.volume ) {
Object.assign(config, {source, object, listener: l, volume: v, walls});
config.sound ??= sound; // We might already have defined Sound
}
}
}
// Compute the effective volume for each sound path
for ( const config of Object.values(paths) ) {
this._configurePlayback(config);
config.object.sync(config.volume > 0, config.volume, {...options, muffled: config.muffled});
}
}
/* -------------------------------------------- */
/**
* Configure playback by assigning the muffled state and final playback volume for the sound.
* This method should mutate the config object by assigning the volume and muffled properties.
* @param {AmbientSoundPlaybackConfig} config
* @protected
*/
_configurePlayback(config) {
const {source, walls} = config;
// Inaudible sources
if ( !config.listener ) {
config.volume = 0;
return;
}
// Muffled by walls
if ( !walls ) {
if ( config.listener.equals(source) ) return false; // GM users listening to the source
const polygonCls = CONFIG.Canvas.polygonBackends.sound;
const x = polygonCls.testCollision(config.listener, source, {mode: "first", type: "sound"});
config.muffled = x && (x._distance < 1); // Collided before reaching the source
}
else config.muffled = false;
}
/* -------------------------------------------- */
/**
* Actions to take when the darkness level of the Scene is changed
* @param {PIXI.FederatedEvent} event
* @internal
*/
_onDarknessChange(event) {
const {darknessLevel, priorDarknessLevel} = event.environmentData;
for ( const sound of this.placeables ) {
const {min, max} = sound.document.darkness;
if ( darknessLevel.between(min, max) === priorDarknessLevel.between(min, max) ) continue;
sound.initializeSoundSource();
if ( this.active ) sound.renderFlags.set({refreshState: true});
}
}
/* -------------------------------------------- */
/**
* Play a one-shot Sound originating from a predefined point on the canvas.
* The sound plays locally for the current client only.
* To play a sound for all connected clients use SoundsLayer#emitAtPosition.
*
* @param {string} src The sound source path to play
* @param {Point} origin The canvas coordinates from which the sound originates
* @param {number} radius The radius of effect in distance units
* @param {object} options Additional options which configure playback
* @param {number} [options.volume=1.0] The maximum volume at which the effect should be played
* @param {boolean} [options.easing=true] Should volume be attenuated by distance?
* @param {boolean} [options.walls=true] Should the sound be constrained by walls?
* @param {boolean} [options.gmAlways=true] Should the sound always be played for GM users regardless
* of actively controlled tokens?
* @param {AmbientSoundEffect} [options.baseEffect] A base sound effect to apply to playback
* @param {AmbientSoundEffect} [options.muffledEffect] A muffled sound effect to apply to playback, a sound may
* only be muffled if it is not constrained by walls
* @param {Partial<PointSourceData>} [options.sourceData] Additional data passed to the SoundSource constructor
* @param {SoundPlaybackOptions} [options.playbackOptions] Additional options passed to Sound#play
* @returns {Promise<foundry.audio.Sound|null>} A Promise which resolves to the played Sound, or null
*
* @example Play the sound of a trap springing
* ```js
* const src = "modules/my-module/sounds/spring-trap.ogg";
* const origin = {x: 5200, y: 3700}; // The origin point for the sound
* const radius = 30; // Audible in a 30-foot radius
* await canvas.sounds.playAtPosition(src, origin, radius);
* ```
*
* @example A Token casts a spell
* ```js
* const src = "modules/my-module/sounds/spells-sprite.ogg";
* const origin = token.center; // The origin point for the sound
* const radius = 60; // Audible in a 60-foot radius
* await canvas.sounds.playAtPosition(src, origin, radius, {
* walls: false, // Not constrained by walls with a lowpass muffled effect
* muffledEffect: {type: "lowpass", intensity: 6},
* sourceData: {
* angle: 120, // Sound emitted at a limited angle
* rotation: 270 // Configure the direction of sound emission
* }
* playbackOptions: {
* loopStart: 12, // Audio sprite timing
* loopEnd: 16,
* fade: 300, // Fade-in 300ms
* onended: () => console.log("Do something after the spell sound has played")
* }
* });
* ```
*/
async playAtPosition(src, origin, radius, {volume=1, easing=true, walls=true, gmAlways=true,
baseEffect, muffledEffect, sourceData, playbackOptions}={}) {
// Construct a Sound and corresponding SoundSource
const sound = new foundry.audio.Sound(src, {context: game.audio.environment});
const source = new CONFIG.Canvas.soundSourceClass({object: null});
source.initialize({
x: origin.x,
y: origin.y,
radius: canvas.dimensions.distancePixels * radius,
walls,
...sourceData
});
/** @type {Partial<AmbientSoundPlaybackConfig>} */
const config = {sound, source, listener: undefined, volume: 0, walls};
// Identify the closest listener position
const listeners = (gmAlways && game.user.isGM) ? [origin] : this.getListenerPositions();
for ( const l of listeners ) {
const v = volume * source.getVolumeMultiplier(l, {easing});
if ( v > config.volume ) Object.assign(config, {listener: l, volume: v});
}
// Configure playback volume and muffled state
this._configurePlayback(config);
if ( !config.volume ) return null;
// Load the Sound and apply special effects
await sound.load();
const sfx = CONFIG.soundEffects;
let effect;
if ( config.muffled && (muffledEffect?.type in sfx) ) {
const muffledCfg = sfx[muffledEffect.type];
effect = new muffledCfg.effectClass(sound.context, muffledEffect);
}
if ( !effect && (baseEffect?.type in sfx) ) {
const baseCfg = sfx[baseEffect.type];
effect = new baseCfg.effectClass(sound.context, baseEffect);
}
if ( effect ) sound.effects.push(effect);
// Initiate sound playback
await sound.play({...playbackOptions, loop: false, volume: config.volume});
return sound;
}
/* -------------------------------------------- */
/**
* Emit playback to other connected clients to occur at a specified position.
* @param {...*} args Arguments passed to SoundsLayer#playAtPosition
* @returns {Promise<void>} A Promise which resolves once playback for the initiating client has completed
*/
async emitAtPosition(...args) {
game.socket.emit("playAudioPosition", args);
return this.playAtPosition(...args);
}
/* -------------------------------------------- */
/* Event Listeners and Handlers */
/* -------------------------------------------- */
/**
* Handle mouse cursor movements which may cause ambient audio previews to occur
*/
_onMouseMove() {
if ( !this.livePreview ) return;
if ( canvas.tokens.active && canvas.tokens.controlled.length ) return;
this.previewSound(canvas.mousePosition);
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftStart(event) {
super._onDragLeftStart(event);
const interaction = event.interactionData;
// Snap the origin to the grid
if ( !event.shiftKey ) interaction.origin = this.getSnappedPoint(interaction.origin);
// Create a pending AmbientSoundDocument
const cls = getDocumentClass("AmbientSound");
const doc = new cls({type: "l", ...interaction.origin}, {parent: canvas.scene});
// Create the preview AmbientSound object
const sound = new this.constructor.placeableClass(doc);
interaction.preview = this.preview.addChild(sound);
interaction.soundState = 1;
this.preview._creating = false;
sound.draw();
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftMove(event) {
const {destination, soundState, preview, origin} = event.interactionData;
if ( soundState === 0 ) return;
const radius = Math.hypot(destination.x - origin.x, destination.y - origin.y);
preview.document.updateSource({radius: radius / canvas.dimensions.distancePixels});
preview.initializeSoundSource();
preview.renderFlags.set({refreshState: true});
event.interactionData.soundState = 2;
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftDrop(event) {
// Snap the destination to the grid
const interaction = event.interactionData;
if ( !event.shiftKey ) interaction.destination = this.getSnappedPoint(interaction.destination);
const {soundState, destination, origin, preview} = interaction;
if ( soundState !== 2 ) return;
// Render the preview sheet for confirmation
const radius = Math.hypot(destination.x - origin.x, destination.y - origin.y);
if ( radius < (canvas.dimensions.size / 2) ) return;
preview.document.updateSource({radius: radius / canvas.dimensions.distancePixels});
preview.initializeSoundSource();
preview.renderFlags.set({refreshState: true});
preview.sheet.render(true);
this.preview._creating = true;
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftCancel(event) {
if ( this.preview._creating ) return;
return super._onDragLeftCancel(event);
}
/* -------------------------------------------- */
/**
* Handle PlaylistSound document drop data.
* @param {DragEvent} event The drag drop event
* @param {object} data The dropped transfer data.
*/
async _onDropData(event, data) {
const playlistSound = await PlaylistSound.implementation.fromDropData(data);
if ( !playlistSound ) return false;
let origin;
if ( (data.x === undefined) || (data.y === undefined) ) {
const coords = this._canvasCoordinatesFromDrop(event, {center: false});
if ( !coords ) return false;
origin = {x: coords[0], y: coords[1]};
} else {
origin = {x: data.x, y: data.y};
}
if ( !event.shiftKey ) origin = this.getSnappedPoint(origin);
if ( !canvas.dimensions.rect.contains(origin.x, origin.y) ) return false;
const soundData = {
path: playlistSound.path,
volume: playlistSound.volume,
x: origin.x,
y: origin.y,
radius: canvas.dimensions.distance * 2
};
return this._createPreview(soundData, {top: event.clientY - 20, left: event.clientX + 40});
}
}

View File

@@ -0,0 +1,157 @@
/**
* This Canvas Layer provides a container for MeasuredTemplate objects.
* @category - Canvas
*/
class TemplateLayer extends PlaceablesLayer {
/** @inheritdoc */
static get layerOptions() {
return foundry.utils.mergeObject(super.layerOptions, {
name: "templates",
rotatableObjects: true,
zIndex: 400
});
}
/** @inheritdoc */
static documentName = "MeasuredTemplate";
/* -------------------------------------------- */
/** @inheritdoc */
get hookName() {
return TemplateLayer.name;
}
/* -------------------------------------------- */
/* Methods */
/* -------------------------------------------- */
/** @inheritDoc */
_deactivate() {
super._deactivate();
this.objects.visible = true;
}
/* -------------------------------------------- */
/** @inheritDoc */
async _draw(options) {
await super._draw(options);
this.objects.visible = true;
}
/* -------------------------------------------- */
/**
* Register game settings used by the TemplatesLayer
*/
static registerSettings() {
game.settings.register("core", "gridTemplates", {
name: "TEMPLATE.GridTemplatesSetting",
hint: "TEMPLATE.GridTemplatesSettingHint",
scope: "world",
config: true,
type: new foundry.data.fields.BooleanField({initial: false}),
onChange: () => {
if ( canvas.ready ) canvas.templates.draw();
}
});
game.settings.register("core", "coneTemplateType", {
name: "TEMPLATE.ConeTypeSetting",
hint: "TEMPLATE.ConeTypeSettingHint",
scope: "world",
config: true,
type: new foundry.data.fields.StringField({required: true, blank: false, initial: "round", choices: {
round: "TEMPLATE.ConeTypeRound",
flat: "TEMPLATE.ConeTypeFlat"
}}),
onChange: () => {
if ( canvas.ready ) canvas.templates.draw();
}
});
}
/* -------------------------------------------- */
/* Event Listeners and Handlers */
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftStart(event) {
super._onDragLeftStart(event);
const interaction = event.interactionData;
// Snap the origin to the grid
if ( !event.shiftKey ) interaction.origin = this.getSnappedPoint(interaction.origin);
// Create a pending MeasuredTemplateDocument
const tool = game.activeTool;
const previewData = {
user: game.user.id,
t: tool,
x: interaction.origin.x,
y: interaction.origin.y,
sort: Math.max(this.getMaxSort() + 1, 0),
distance: 1,
direction: 0,
fillColor: game.user.color || "#FF0000",
hidden: event.altKey
};
const defaults = CONFIG.MeasuredTemplate.defaults;
if ( tool === "cone") previewData.angle = defaults.angle;
else if ( tool === "ray" ) previewData.width = (defaults.width * canvas.dimensions.distance);
const cls = getDocumentClass("MeasuredTemplate");
const doc = new cls(previewData, {parent: canvas.scene});
// Create a preview MeasuredTemplate object
const template = new this.constructor.placeableClass(doc);
interaction.preview = this.preview.addChild(template);
template.draw();
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftMove(event) {
const interaction = event.interactionData;
// Snap the destination to the grid
if ( !event.shiftKey ) interaction.destination = this.getSnappedPoint(interaction.destination);
// Compute the ray
const {origin, destination, preview} = interaction;
const ray = new Ray(origin, destination);
let distance;
// Grid type
if ( game.settings.get("core", "gridTemplates") ) {
distance = canvas.grid.measurePath([origin, destination]).distance;
}
// Euclidean type
else {
const ratio = (canvas.dimensions.size / canvas.dimensions.distance);
distance = ray.distance / ratio;
}
// Update the preview object
preview.document.direction = Math.normalizeDegrees(Math.toDegrees(ray.angle));
preview.document.distance = distance;
preview.renderFlags.set({refreshShape: true});
}
/* -------------------------------------------- */
/** @inheritdoc */
_onMouseWheel(event) {
// Determine whether we have a hovered template?
const template = this.hover;
if ( !template || template.isPreview ) return;
// Determine the incremental angle of rotation from event data
const snap = event.shiftKey ? 15 : 5;
const delta = snap * Math.sign(event.delta);
return template.rotate(template.document.direction + delta, snap);
}
}

View File

@@ -0,0 +1,254 @@
/**
* A PlaceablesLayer designed for rendering the visual Scene for a specific vertical cross-section.
* @category - Canvas
*/
class TilesLayer extends PlaceablesLayer {
/** @inheritdoc */
static documentName = "Tile";
/* -------------------------------------------- */
/* Layer Attributes */
/* -------------------------------------------- */
/** @inheritdoc */
static get layerOptions() {
return foundry.utils.mergeObject(super.layerOptions, {
name: "tiles",
zIndex: 300,
controllableObjects: true,
rotatableObjects: true
});
}
/* -------------------------------------------- */
/** @inheritdoc */
get hookName() {
return TilesLayer.name;
}
/* -------------------------------------------- */
/** @inheritdoc */
get hud() {
return canvas.hud.tile;
}
/* -------------------------------------------- */
/**
* An array of Tile objects which are rendered within the objects container
* @type {Tile[]}
*/
get tiles() {
return this.objects?.children || [];
}
/* -------------------------------------------- */
/** @override */
*controllableObjects() {
const foreground = ui.controls.control.foreground ?? false;
for ( const placeable of super.controllableObjects() ) {
const overhead = placeable.document.elevation >= placeable.document.parent.foregroundElevation;
if ( overhead === foreground ) yield placeable;
}
}
/* -------------------------------------------- */
/* Layer Methods */
/* -------------------------------------------- */
/** @inheritDoc */
getSnappedPoint(point) {
if ( canvas.forceSnapVertices ) return canvas.grid.getSnappedPoint(point, {mode: CONST.GRID_SNAPPING_MODES.VERTEX});
return super.getSnappedPoint(point);
}
/* -------------------------------------------- */
/** @inheritDoc */
async _tearDown(options) {
for ( const tile of this.tiles ) {
if ( tile.isVideo ) {
game.video.stop(tile.sourceElement);
}
}
return super._tearDown(options);
}
/* -------------------------------------------- */
/* Event Handlers */
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftStart(event) {
super._onDragLeftStart(event);
const interaction = event.interactionData;
// Snap the origin to the grid
if ( !event.shiftKey ) interaction.origin = this.getSnappedPoint(interaction.origin);
// Create the preview
const tile = this.constructor.placeableClass.createPreview(interaction.origin);
interaction.preview = this.preview.addChild(tile);
this.preview._creating = false;
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftMove(event) {
const interaction = event.interactionData;
// Snap the destination to the grid
if ( !event.shiftKey ) interaction.destination = this.getSnappedPoint(interaction.destination);
const {destination, tilesState, preview, origin} = interaction;
if ( tilesState === 0 ) return;
// Determine the drag distance
const dx = destination.x - origin.x;
const dy = destination.y - origin.y;
const dist = Math.min(Math.abs(dx), Math.abs(dy));
// Update the preview object
preview.document.width = (event.altKey ? dist * Math.sign(dx) : dx);
preview.document.height = (event.altKey ? dist * Math.sign(dy) : dy);
preview.renderFlags.set({refreshSize: true});
// Confirm the creation state
interaction.tilesState = 2;
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftDrop(event) {
// Snap the destination to the grid
const interaction = event.interactionData;
if ( !event.shiftKey ) interaction.destination = this.getSnappedPoint(interaction.destination);
const { tilesState, preview } = interaction;
if ( tilesState !== 2 ) return;
const doc = preview.document;
// Re-normalize the dropped shape
const r = new PIXI.Rectangle(doc.x, doc.y, doc.width, doc.height).normalize();
preview.document.updateSource(r);
// Require a minimum created size
if ( Math.hypot(r.width, r.height) < (canvas.dimensions.size / 2) ) return;
// Render the preview sheet for confirmation
preview.sheet.render(true, {preview: true});
this.preview._creating = true;
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftCancel(event) {
if ( this.preview._creating ) return;
return super._onDragLeftCancel(event);
}
/* -------------------------------------------- */
/**
* Handle drop events for Tile data on the Tiles Layer
* @param {DragEvent} event The concluding drag event
* @param {object} data The extracted Tile data
* @private
*/
async _onDropData(event, data) {
if ( !data.texture?.src ) return;
if ( !this.active ) this.activate();
// Get the data for the tile to create
const createData = await this._getDropData(event, data);
// Validate that the drop position is in-bounds and snap to grid
if ( !canvas.dimensions.rect.contains(createData.x, createData.y) ) return false;
// Create the Tile Document
const cls = getDocumentClass(this.constructor.documentName);
return cls.create(createData, {parent: canvas.scene});
}
/* -------------------------------------------- */
/**
* Prepare the data object when a new Tile is dropped onto the canvas
* @param {DragEvent} event The concluding drag event
* @param {object} data The extracted Tile data
* @returns {object} The prepared data to create
*/
async _getDropData(event, data) {
// Determine the tile size
const tex = await loadTexture(data.texture.src);
const ratio = canvas.dimensions.size / (data.tileSize || canvas.dimensions.size);
data.width = tex.baseTexture.width * ratio;
data.height = tex.baseTexture.height * ratio;
// Determine the elevation
const foreground = ui.controls.controls.find(c => c.layer === "tiles").foreground;
data.elevation = foreground ? canvas.scene.foregroundElevation : 0;
data.sort = Math.max(this.getMaxSort() + 1, 0);
foundry.utils.setProperty(data, "occlusion.mode", foreground
? CONST.OCCLUSION_MODES.FADE : CONST.OCCLUSION_MODES.NONE);
// Determine the final position and snap to grid unless SHIFT is pressed
data.x = data.x - (data.width / 2);
data.y = data.y - (data.height / 2);
if ( !event.shiftKey ) {
const {x, y} = this.getSnappedPoint(data);
data.x = x;
data.y = y;
}
// Create the tile as hidden if the ALT key is pressed
if ( event.altKey ) data.hidden = true;
return data;
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get roofs() {
const msg = "TilesLayer#roofs has been deprecated without replacement.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
return this.placeables.filter(t => t.isRoof);
}
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
get textureDataMap() {
const msg = "TilesLayer#textureDataMap has moved to TextureLoader.textureBufferDataMap";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return TextureLoader.textureBufferDataMap;
}
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
get depthMask() {
const msg = "TilesLayer#depthMask is deprecated without replacement. Use canvas.masks.depth instead";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
return canvas.masks.depth;
}
}

View File

@@ -0,0 +1,455 @@
/**
* The Tokens Container.
* @category - Canvas
*/
class TokenLayer extends PlaceablesLayer {
/**
* The current index position in the tab cycle
* @type {number|null}
* @private
*/
_tabIndex = null;
/* -------------------------------------------- */
/** @inheritdoc */
static get layerOptions() {
return foundry.utils.mergeObject(super.layerOptions, {
name: "tokens",
controllableObjects: true,
rotatableObjects: true,
zIndex: 200
});
}
/** @inheritdoc */
static documentName = "Token";
/* -------------------------------------------- */
/**
* The set of tokens that trigger occlusion (a union of {@link CONST.TOKEN_OCCLUSION_MODES}).
* @type {number}
*/
set occlusionMode(value) {
this.#occlusionMode = value;
canvas.perception.update({refreshOcclusion: true});
}
get occlusionMode() {
return this.#occlusionMode;
}
#occlusionMode;
/* -------------------------------------------- */
/** @inheritdoc */
get hookName() {
return TokenLayer.name;
}
/* -------------------------------------------- */
/* Properties
/* -------------------------------------------- */
/**
* Token objects on this layer utilize the TokenHUD
*/
get hud() {
return canvas.hud.token;
}
/**
* An Array of tokens which belong to actors which are owned
* @type {Token[]}
*/
get ownedTokens() {
return this.placeables.filter(t => t.actor && t.actor.isOwner);
}
/* -------------------------------------------- */
/* Methods
/* -------------------------------------------- */
/** @override */
getSnappedPoint(point) {
const M = CONST.GRID_SNAPPING_MODES;
return canvas.grid.getSnappedPoint(point, {mode: M.TOP_LEFT_CORNER, resolution: 1});
}
/* -------------------------------------------- */
/** @inheritDoc */
async _draw(options) {
await super._draw(options);
this.objects.visible = true;
// Reset the Tokens layer occlusion mode for the Scene
const M = CONST.TOKEN_OCCLUSION_MODES;
this.#occlusionMode = game.user.isGM ? M.CONTROLLED | M.HOVERED | M.HIGHLIGHTED : M.OWNED;
canvas.app.ticker.add(this._animateTargets, this);
}
/* -------------------------------------------- */
/** @inheritDoc */
async _tearDown(options) {
this.concludeAnimation();
return super._tearDown(options);
}
/* -------------------------------------------- */
/** @inheritDoc */
_activate() {
super._activate();
if ( canvas.controls ) canvas.controls.doors.visible = true;
this._tabIndex = null;
}
/* -------------------------------------------- */
/** @inheritDoc */
_deactivate() {
super._deactivate();
this.objects.visible = true;
if ( canvas.controls ) canvas.controls.doors.visible = false;
}
/* -------------------------------------------- */
/** @override */
_pasteObject(copy, offset, {hidden=false, snap=true}={}) {
const {x, y} = copy.document;
let position = {x: x + offset.x, y: y + offset.y};
if ( snap ) position = copy.getSnappedPosition(position);
const d = canvas.dimensions;
position.x = Math.clamp(position.x, 0, d.width - 1);
position.y = Math.clamp(position.y, 0, d.height - 1);
const data = copy.document.toObject();
delete data._id;
data.x = position.x;
data.y = position.y;
data.hidden ||= hidden;
return data;
}
/* -------------------------------------------- */
/** @inheritDoc */
_getMovableObjects(ids, includeLocked) {
const ruler = canvas.controls.ruler;
if ( ruler.state === Ruler.STATES.MEASURING ) return [];
const tokens = super._getMovableObjects(ids, includeLocked);
if ( ruler.token ) tokens.findSplice(token => token === ruler.token);
return tokens;
}
/* -------------------------------------------- */
/**
* Target all Token instances which fall within a coordinate rectangle.
*
* @param {object} rectangle The selection rectangle.
* @param {number} rectangle.x The top-left x-coordinate of the selection rectangle
* @param {number} rectangle.y The top-left y-coordinate of the selection rectangle
* @param {number} rectangle.width The width of the selection rectangle
* @param {number} rectangle.height The height of the selection rectangle
* @param {object} [options] Additional options to configure targeting behaviour.
* @param {boolean} [options.releaseOthers=true] Whether or not to release other targeted tokens
* @returns {number} The number of Token instances which were targeted.
*/
targetObjects({x, y, width, height}, {releaseOthers=true}={}) {
const user = game.user;
// Get the set of targeted tokens
const targets = new Set();
const rectangle = new PIXI.Rectangle(x, y, width, height);
for ( const token of this.placeables ) {
if ( !token.visible || token.document.isSecret ) continue;
if ( token._overlapsSelection(rectangle) ) targets.add(token);
}
// Maybe release other targets
if ( releaseOthers ) {
for ( const token of user.targets ) {
if ( targets.has(token) ) continue;
token.setTarget(false, {releaseOthers: false, groupSelection: true});
}
}
// Acquire targets for tokens which are not yet targeted
for ( const token of targets ) {
if ( user.targets.has(token) ) continue;
token.setTarget(true, {releaseOthers: false, groupSelection: true});
}
// Broadcast the target change
user.broadcastActivity({targets: user.targets.ids});
// Return the number of targeted tokens
return user.targets.size;
}
/* -------------------------------------------- */
/**
* Cycle the controlled token by rotating through the list of Owned Tokens that are available within the Scene
* Tokens are currently sorted in order of their TokenID
*
* @param {boolean} forwards Which direction to cycle. A truthy value cycles forward, while a false value
* cycles backwards.
* @param {boolean} reset Restart the cycle order back at the beginning?
* @returns {Token|null} The Token object which was cycled to, or null
*/
cycleTokens(forwards, reset) {
let next = null;
if ( reset ) this._tabIndex = null;
const order = this._getCycleOrder();
// If we are not tab cycling, try and jump to the currently controlled or impersonated token
if ( this._tabIndex === null ) {
this._tabIndex = 0;
// Determine the ideal starting point based on controlled tokens or the primary character
let current = this.controlled.length ? order.find(t => this.controlled.includes(t)) : null;
if ( !current && game.user.character ) {
const actorTokens = game.user.character.getActiveTokens();
current = actorTokens.length ? order.find(t => actorTokens.includes(t)) : null;
}
current = current || order[this._tabIndex] || null;
// Either start cycling, or cancel
if ( !current ) return null;
next = current;
}
// Otherwise, cycle forwards or backwards
else {
if ( forwards ) this._tabIndex = this._tabIndex < (order.length - 1) ? this._tabIndex + 1 : 0;
else this._tabIndex = this._tabIndex > 0 ? this._tabIndex - 1 : order.length - 1;
next = order[this._tabIndex];
if ( !next ) return null;
}
// Pan to the token and control it (if possible)
canvas.animatePan({x: next.center.x, y: next.center.y, duration: 250});
next.control();
return next;
}
/* -------------------------------------------- */
/**
* Get the tab cycle order for tokens by sorting observable tokens based on their distance from top-left.
* @returns {Token[]}
* @private
*/
_getCycleOrder() {
const observable = this.placeables.filter(token => {
if ( game.user.isGM ) return true;
if ( !token.actor?.testUserPermission(game.user, "OBSERVER") ) return false;
return !token.document.hidden;
});
observable.sort((a, b) => Math.hypot(a.x, a.y) - Math.hypot(b.x, b.y));
return observable;
}
/* -------------------------------------------- */
/**
* Immediately conclude the animation of any/all tokens
*/
concludeAnimation() {
this.placeables.forEach(t => t.stopAnimation());
canvas.app.ticker.remove(this._animateTargets, this);
}
/* -------------------------------------------- */
/**
* Animate targeting arrows on targeted tokens.
* @private
*/
_animateTargets() {
if ( !game.user.targets.size ) return;
if ( this._t === undefined ) this._t = 0;
else this._t += canvas.app.ticker.elapsedMS;
const duration = 2000;
const pause = duration * .6;
const fade = (duration - pause) * .25;
const minM = .5; // Minimum margin is half the size of the arrow.
const maxM = 1; // Maximum margin is the full size of the arrow.
// The animation starts with the arrows halfway across the token bounds, then move fully inside the bounds.
const rm = maxM - minM;
const t = this._t % duration;
let dt = Math.max(0, t - pause) / (duration - pause);
dt = CanvasAnimation.easeOutCircle(dt);
const m = t < pause ? minM : minM + (rm * dt);
const ta = Math.max(0, t - duration + fade);
const a = 1 - (ta / fade);
for ( const t of game.user.targets ) {
t._refreshTarget({
margin: m,
alpha: a,
color: CONFIG.Canvas.targeting.color,
size: CONFIG.Canvas.targeting.size
});
}
}
/* -------------------------------------------- */
/**
* Provide an array of Tokens which are eligible subjects for tile occlusion.
* By default, only tokens which are currently controlled or owned by a player are included as subjects.
* @returns {Token[]}
* @protected
* @internal
*/
_getOccludableTokens() {
const M = CONST.TOKEN_OCCLUSION_MODES;
const mode = this.occlusionMode;
if ( (mode & M.VISIBLE) || ((mode & M.HIGHLIGHTED) && this.highlightObjects) ) {
return this.placeables.filter(t => t.visible);
}
const tokens = new Set();
if ( (mode & M.HOVERED) && this.hover ) tokens.add(this.hover);
if ( mode & M.CONTROLLED ) this.controlled.forEach(t => tokens.add(t));
if ( mode & M.OWNED ) this.ownedTokens.filter(t => !t.document.hidden).forEach(t => tokens.add(t));
return Array.from(tokens);
}
/* -------------------------------------------- */
/** @inheritdoc */
storeHistory(type, data) {
super.storeHistory(type, type === "update" ? data.map(d => {
// Clean actorData and delta updates from the history so changes to those fields are not undone.
d = foundry.utils.deepClone(d);
delete d.actorData;
delete d.delta;
delete d._regions;
return d;
}) : data);
}
/* -------------------------------------------- */
/* Event Listeners and Handlers */
/* -------------------------------------------- */
/**
* Handle dropping of Actor data onto the Scene canvas
* @private
*/
async _onDropActorData(event, data) {
// Ensure the user has permission to drop the actor and create a Token
if ( !game.user.can("TOKEN_CREATE") ) {
return ui.notifications.warn("You do not have permission to create new Tokens!");
}
// Acquire dropped data and import the actor
let actor = await Actor.implementation.fromDropData(data);
if ( !actor.isOwner ) {
return ui.notifications.warn(`You do not have permission to create a new Token for the ${actor.name} Actor.`);
}
if ( actor.compendium ) {
const actorData = game.actors.fromCompendium(actor);
actor = await Actor.implementation.create(actorData, {fromCompendium: true});
}
// Prepare the Token document
const td = await actor.getTokenDocument({
hidden: game.user.isGM && event.altKey,
sort: Math.max(this.getMaxSort() + 1, 0)
}, {parent: canvas.scene});
// Set the position of the Token such that its center point is the drop position before snapping
const t = this.createObject(td);
let position = t.getCenterPoint({x: 0, y: 0});
position.x = data.x - position.x;
position.y = data.y - position.y;
if ( !event.shiftKey ) position = t.getSnappedPosition(position);
t.destroy({children: true});
td.updateSource(position);
// Validate the final position
if ( !canvas.dimensions.rect.contains(td.x, td.y) ) return false;
// Submit the Token creation request and activate the Tokens layer (if not already active)
this.activate();
return td.constructor.create(td, {parent: canvas.scene});
}
/* -------------------------------------------- */
/** @inheritDoc */
_onClickLeft(event) {
let tool = game.activeTool;
// If Control is being held, we always want the Tool to be Ruler
if ( game.keyboard.isModifierActive(KeyboardManager.MODIFIER_KEYS.CONTROL) ) tool = "ruler";
switch ( tool ) {
// Clear targets if Left Click Release is set
case "target":
if ( game.settings.get("core", "leftClickRelease") ) {
game.user.updateTokenTargets([]);
game.user.broadcastActivity({targets: []});
}
break;
// Place Ruler waypoints
case "ruler":
return canvas.controls.ruler._onClickLeft(event);
}
// If we don't explicitly return from handling the tool, use the default behavior
super._onClickLeft(event);
}
/* -------------------------------------------- */
/** @override */
_onMouseWheel(event) {
// Prevent wheel rotation during dragging
if ( this.preview.children.length ) return;
// Determine the incremental angle of rotation from event data
const snap = canvas.grid.isHexagonal ? (event.shiftKey ? 60 : 30) : (event.shiftKey ? 45 : 15);
const delta = snap * Math.sign(event.delta);
return this.rotateMany({delta, snap});
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get gridPrecision() {
// eslint-disable-next-line no-unused-expressions
super.gridPrecision;
return 1; // Snap tokens to top-left
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
async toggleCombat(state=true, combat=null, {token=null}={}) {
foundry.utils.logCompatibilityWarning("TokenLayer#toggleCombat is deprecated in favor of"
+ " TokenDocument.implementation.createCombatants and TokenDocument.implementation.deleteCombatants", {since: 12, until: 14});
const tokens = this.controlled.map(t => t.document);
if ( token && !token.controlled && (token.inCombat !== state) ) tokens.push(token.document);
if ( state ) return TokenDocument.implementation.createCombatants(tokens, {combat});
else return TokenDocument.implementation.deleteCombatants(tokens, {combat});
}
}

View File

@@ -0,0 +1,574 @@
/**
* The Walls canvas layer which provides a container for Wall objects within the rendered Scene.
* @category - Canvas
*/
class WallsLayer extends PlaceablesLayer {
/**
* A graphics layer used to display chained Wall selection
* @type {PIXI.Graphics}
*/
chain = null;
/**
* Track whether we are currently within a chained placement workflow
* @type {boolean}
*/
_chain = false;
/**
* Track the most recently created or updated wall data for use with the clone tool
* @type {Object|null}
* @private
*/
_cloneType = null;
/**
* Reference the last interacted wall endpoint for the purposes of chaining
* @type {{point: PointArray}}
* @private
*/
last = {
point: null
};
/* -------------------------------------------- */
/* Properties */
/* -------------------------------------------- */
/** @inheritdoc */
static get layerOptions() {
return foundry.utils.mergeObject(super.layerOptions, {
name: "walls",
controllableObjects: true,
zIndex: 700
});
}
/** @inheritdoc */
static documentName = "Wall";
/* -------------------------------------------- */
/** @inheritdoc */
get hookName() {
return WallsLayer.name;
}
/* -------------------------------------------- */
/**
* The grid used for snapping.
* It's the same as canvas.grid except in the gridless case where this is the square version of the gridless grid.
* @type {BaseGrid}
*/
#grid = canvas.grid;
/* -------------------------------------------- */
/**
* An Array of Wall instances in the current Scene which act as Doors.
* @type {Wall[]}
*/
get doors() {
return this.objects.children.filter(w => w.document.door > CONST.WALL_DOOR_TYPES.NONE);
}
/* -------------------------------------------- */
/* Methods */
/* -------------------------------------------- */
/** @override */
getSnappedPoint(point) {
const M = CONST.GRID_SNAPPING_MODES;
const size = canvas.dimensions.size;
return this.#grid.getSnappedPoint(point, canvas.forceSnapVertices ? {mode: M.VERTEX} : {
mode: M.CENTER | M.VERTEX | M.CORNER | M.SIDE_MIDPOINT,
resolution: size >= 128 ? 8 : (size >= 64 ? 4 : 2)
});
}
/* -------------------------------------------- */
/** @inheritDoc */
async _draw(options) {
this.#grid = canvas.grid.isGridless ? new foundry.grid.SquareGrid({size: canvas.grid.size}) : canvas.grid;
await super._draw(options);
this.chain = this.addChildAt(new PIXI.Graphics(), 0);
this.last = {point: null};
}
/* -------------------------------------------- */
/** @inheritdoc */
_deactivate() {
super._deactivate();
this.chain?.clear();
}
/* -------------------------------------------- */
/**
* Given a point and the coordinates of a wall, determine which endpoint is closer to the point
* @param {Point} point The origin point of the new Wall placement
* @param {Wall} wall The existing Wall object being chained to
* @returns {PointArray} The [x,y] coordinates of the starting endpoint
*/
static getClosestEndpoint(point, wall) {
const c = wall.coords;
const a = [c[0], c[1]];
const b = [c[2], c[3]];
// Exact matches
if ( a.equals([point.x, point.y]) ) return a;
else if ( b.equals([point.x, point.y]) ) return b;
// Closest match
const da = Math.hypot(point.x - a[0], point.y - a[1]);
const db = Math.hypot(point.x - b[0], point.y - b[1]);
return da < db ? a : b;
}
/* -------------------------------------------- */
/** @inheritdoc */
releaseAll(options) {
if ( this.chain ) this.chain.clear();
return super.releaseAll(options);
}
/* -------------------------------------------- */
/** @override */
_pasteObject(copy, offset, options) {
const c = copy.document.c;
const dx = Math.round(offset.x);
const dy = Math.round(offset.y);
const a = {x: c[0] + dx, y: c[1] + dy};
const b = {x: c[2] + dx, y: c[3] + dy};
const data = copy.document.toObject();
delete data._id;
data.c = [a.x, a.y, b.x, b.y];
return data;
}
/* -------------------------------------------- */
/**
* Pan the canvas view when the cursor position gets close to the edge of the frame
* @param {MouseEvent} event The originating mouse movement event
* @param {number} x The x-coordinate
* @param {number} y The y-coordinate
* @private
*/
_panCanvasEdge(event, x, y) {
// Throttle panning by 20ms
const now = Date.now();
if ( now - (event.interactionData.panTime || 0) <= 100 ) return;
event.interactionData.panTime = now;
// Determine the amount of shifting required
const pad = 50;
const shift = 500 / canvas.stage.scale.x;
// Shift horizontally
let dx = 0;
if ( x < pad ) dx = -shift;
else if ( x > window.innerWidth - pad ) dx = shift;
// Shift vertically
let dy = 0;
if ( y < pad ) dy = -shift;
else if ( y > window.innerHeight - pad ) dy = shift;
// Enact panning
if (( dx || dy ) && !this._panning ) {
return canvas.animatePan({x: canvas.stage.pivot.x + dx, y: canvas.stage.pivot.y + dy, duration: 100});
}
}
/* -------------------------------------------- */
/**
* Get the wall endpoint coordinates for a given point.
* @param {Point} point The candidate wall endpoint.
* @param {object} [options]
* @param {boolean} [options.snap=true] Snap to the grid?
* @returns {[x: number, y: number]} The wall endpoint coordinates.
* @internal
*/
_getWallEndpointCoordinates(point, {snap=true}={}) {
if ( snap ) point = this.getSnappedPoint(point);
return [point.x, point.y].map(Math.round);
}
/* -------------------------------------------- */
/**
* The Scene Controls tools provide several different types of prototypical Walls to choose from
* This method helps to translate each tool into a default wall data configuration for that type
* @param {string} tool The active canvas tool
* @private
*/
_getWallDataFromActiveTool(tool) {
// Using the clone tool
if ( tool === "clone" && this._cloneType ) return this._cloneType;
// Default wall data
const wallData = {
light: CONST.WALL_SENSE_TYPES.NORMAL,
sight: CONST.WALL_SENSE_TYPES.NORMAL,
sound: CONST.WALL_SENSE_TYPES.NORMAL,
move: CONST.WALL_SENSE_TYPES.NORMAL
};
// Tool-based wall restriction types
switch ( tool ) {
case "invisible":
wallData.sight = wallData.light = wallData.sound = CONST.WALL_SENSE_TYPES.NONE; break;
case "terrain":
wallData.sight = wallData.light = wallData.sound = CONST.WALL_SENSE_TYPES.LIMITED; break;
case "ethereal":
wallData.move = wallData.sound = CONST.WALL_SENSE_TYPES.NONE; break;
case "doors":
wallData.door = CONST.WALL_DOOR_TYPES.DOOR; break;
case "secret":
wallData.door = CONST.WALL_DOOR_TYPES.SECRET; break;
case "window":
const d = canvas.dimensions.distance;
wallData.sight = wallData.light = CONST.WALL_SENSE_TYPES.PROXIMITY;
wallData.threshold = {light: 2 * d, sight: 2 * d, attenuation: true};
break;
}
return wallData;
}
/* -------------------------------------------- */
/**
* Identify the interior enclosed by the given walls.
* @param {Wall[]} walls The walls that enclose the interior.
* @returns {PIXI.Polygon[]} The polygons of the interior.
* @license MIT
*/
identifyInteriorArea(walls) {
// Build the graph from the walls
const vertices = new Map();
const addEdge = (a, b) => {
let v = vertices.get(a.key);
if ( !v ) vertices.set(a.key, v = {X: a.x, Y: a.y, key: a.key, neighbors: new Set(), visited: false});
let w = vertices.get(b.key);
if ( !w ) vertices.set(b.key, w = {X: b.x, Y: b.y, key: b.key, neighbors: new Set(), visited: false});
if ( v !== w ) {
v.neighbors.add(w);
w.neighbors.add(v);
}
};
for ( const wall of walls ) {
const edge = wall.edge;
const a = new foundry.canvas.edges.PolygonVertex(edge.a.x, edge.a.y);
const b = new foundry.canvas.edges.PolygonVertex(edge.b.x, edge.b.y);
if ( a.key === b.key ) continue;
if ( edge.intersections.length === 0 ) addEdge(a, b);
else {
const p = edge.intersections.map(i => foundry.canvas.edges.PolygonVertex.fromPoint(i.intersection));
p.push(a, b);
p.sort((v, w) => (v.x - w.x) || (v.y - w.y));
for ( let k = 1; k < p.length; k++ ) {
const a = p[k - 1];
const b = p[k];
if ( a.key === b.key ) continue;
addEdge(a, b);
}
}
}
// Find the boundary paths of the interior that enclosed by the walls
const paths = [];
while ( vertices.size !== 0 ) {
let start;
for ( const vertex of vertices.values() ) {
vertex.visited = false;
if ( !start || (start.X > vertex.X) || ((start.X === vertex.X) && (start.Y > vertex.Y)) ) start = vertex;
}
if ( start.neighbors.size >= 2 ) {
const path = [];
let current = start;
let previous = {X: current.X - 1, Y: current.Y - 1};
for ( ;; ) {
current.visited = true;
const x0 = previous.X;
const y0 = previous.Y;
const x1 = current.X;
const y1 = current.Y;
let next;
for ( const vertex of current.neighbors ) {
if ( vertex === previous ) continue;
if ( (vertex !== start) && vertex.visited ) continue;
if ( !next ) {
next = vertex;
continue;
}
const x2 = next.X;
const y2 = next.Y;
const a1 = ((y0 - y1) * (x2 - x1)) + ((x1 - x0) * (y2 - y1));
const x3 = vertex.X;
const y3 = vertex.Y;
const a2 = ((y0 - y1) * (x3 - x1)) + ((x1 - x0) * (y3 - y1));
if ( a1 < 0 ) {
if ( a2 >= 0 ) continue;
} else if ( a1 > 0 ) {
if ( a2 < 0 ) {
next = vertex;
continue;
}
if ( a2 === 0 ) {
const b2 = ((x3 - x1) * (x0 - x1)) + ((y3 - y1) * (y0 - y1)) > 0;
if ( !b2 ) next = vertex;
continue;
}
} else {
if ( a2 < 0 ) {
next = vertex;
continue;
}
const b1 = ((x2 - x1) * (x0 - x1)) + ((y2 - y1) * (y0 - y1)) > 0;
if ( a2 > 0) {
if ( b1 ) next = vertex;
continue;
}
const b2 = ((x3 - x1) * (x0 - x1)) + ((y3 - y1) * (y0 - y1)) > 0;
if ( b1 && !b2 ) next = vertex;
continue;
}
const c = ((y1 - y2) * (x3 - x1)) + ((x2 - x1) * (y3 - y1));
if ( c > 0 ) continue;
if ( c < 0 ) {
next = vertex;
continue;
}
const d1 = ((x2 - x1) * (x2 - x1)) + ((y2 - y1) * (y2 - y1));
const d2 = ((x3 - x1) * (x3 - x1)) + ((y3 - y1) * (y3 - y1));
if ( d2 < d1 ) next = vertex;
}
if (next) {
path.push(current);
previous = current;
current = next;
if ( current === start ) break;
} else {
current = path.pop();
if ( !current ) {
previous = undefined;
break;
}
previous = path.length ? path[path.length - 1] : {X: current.X - 1, Y: current.Y - 1};
}
}
if ( path.length !== 0 ) {
paths.push(path);
previous = path[path.length - 1];
for ( const vertex of path ) {
previous.neighbors.delete(vertex);
if ( previous.neighbors.size === 0 ) vertices.delete(previous.key);
vertex.neighbors.delete(previous);
previous = vertex;
}
if ( previous.neighbors.size === 0 ) vertices.delete(previous.key);
}
}
for ( const vertex of start.neighbors ) {
vertex.neighbors.delete(start);
if ( vertex.neighbors.size === 0 ) vertices.delete(vertex.key);
}
vertices.delete(start.key);
}
// Unionize the paths
const clipper = new ClipperLib.Clipper();
clipper.AddPaths(paths, ClipperLib.PolyType.ptSubject, true);
clipper.Execute(ClipperLib.ClipType.ctUnion, paths, ClipperLib.PolyFillType.pftPositive,
ClipperLib.PolyFillType.pftEvenOdd);
// Convert the paths to polygons
return paths.map(path => {
const points = [];
for ( const point of path ) points.push(point.X, point.Y);
return new PIXI.Polygon(points);
});
}
/* -------------------------------------------- */
/* Event Listeners and Handlers */
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftStart(event) {
this.clearPreviewContainer();
const interaction = event.interactionData;
const origin = interaction.origin;
interaction.wallsState = WallsLayer.CREATION_STATES.NONE;
interaction.clearPreviewContainer = true;
// Create a pending WallDocument
const data = this._getWallDataFromActiveTool(game.activeTool);
const snap = !event.shiftKey;
const isChain = this._chain || game.keyboard.isModifierActive(KeyboardManager.MODIFIER_KEYS.CONTROL);
const pt = (isChain && this.last.point) ? this.last.point : this._getWallEndpointCoordinates(origin, {snap});
data.c = pt.concat(pt);
const cls = getDocumentClass("Wall");
const doc = new cls(data, {parent: canvas.scene});
// Create the preview Wall object
const wall = new this.constructor.placeableClass(doc);
interaction.wallsState = WallsLayer.CREATION_STATES.POTENTIAL;
interaction.preview = wall;
return wall.draw();
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftMove(event) {
const interaction = event.interactionData;
const {preview, destination} = interaction;
const states = WallsLayer.CREATION_STATES;
if ( !preview || preview._destroyed
|| [states.NONE, states.COMPLETED].includes(interaction.wallsState) ) return;
if ( preview.parent === null ) this.preview.addChild(preview); // Should happen the first time it is moved
const snap = !event.shiftKey;
preview.document.updateSource({
c: preview.document.c.slice(0, 2).concat(this._getWallEndpointCoordinates(destination, {snap}))
});
preview.refresh();
interaction.wallsState = WallsLayer.CREATION_STATES.CONFIRMED;
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftDrop(event) {
const interaction = event.interactionData;
const {wallsState, destination, preview} = interaction;
const states = WallsLayer.CREATION_STATES;
// Check preview and state
if ( !preview || preview._destroyed || (interaction.wallsState === states.NONE) ) {
return;
}
// Prevent default to allow chaining to continue
if ( game.keyboard.isModifierActive(KeyboardManager.MODIFIER_KEYS.CONTROL) ) {
event.preventDefault();
this._chain = true;
if ( wallsState < WallsLayer.CREATION_STATES.CONFIRMED ) return;
} else this._chain = false;
// Successful wall completion
if ( wallsState === WallsLayer.CREATION_STATES.CONFIRMED ) {
interaction.wallsState = WallsLayer.CREATION_STATES.COMPLETED;
// Get final endpoint location
const snap = !event.shiftKey;
let dest = this._getWallEndpointCoordinates(destination, {snap});
const coords = preview.document.c.slice(0, 2).concat(dest);
preview.document.updateSource({c: coords});
const clearPreviewAndChain = () => {
this.clearPreviewContainer();
// Maybe chain
if ( this._chain ) {
interaction.origin = {x: dest[0], y: dest[1]};
this._onDragLeftStart(event);
}
};
// Ignore walls which are collapsed
if ( (coords[0] === coords[2]) && (coords[1] === coords[3]) ) {
clearPreviewAndChain();
return;
}
interaction.clearPreviewContainer = false;
// Create the Wall
this.last = {point: dest};
const cls = getDocumentClass(this.constructor.documentName);
cls.create(preview.document.toObject(), {parent: canvas.scene}).finally(clearPreviewAndChain);
}
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftCancel(event) {
this._chain = false;
this.last = {point: null};
super._onDragLeftCancel(event);
}
/* -------------------------------------------- */
/** @inheritdoc */
_onClickRight(event) {
if ( event.interactionData.wallsState > WallsLayer.CREATION_STATES.NONE ) return this._onDragLeftCancel(event);
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
checkCollision(ray, options={}) {
const msg = "WallsLayer#checkCollision is obsolete."
+ "Prefer calls to testCollision from CONFIG.Canvas.polygonBackends[type]";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return CONFIG.Canvas.losBackend.testCollision(ray.A, ray.B, options);
}
/**
* @deprecated since v11
* @ignore
*/
highlightControlledSegments() {
foundry.utils.logCompatibilityWarning("The WallsLayer#highlightControlledSegments function is deprecated in favor"
+ "of calling wall.renderFlags.set(\"refreshHighlight\") on individual Wall objects", {since: 11, until: 13});
for ( const w of this.placeables ) w.renderFlags.set({refreshHighlight: true});
}
/**
* @deprecated since v12
* @ignore
*/
initialize() {
foundry.utils.logCompatibilityWarning("WallsLayer#initialize is deprecated in favor of Canvas#edges#initialize",
{since: 12, until: 14});
return canvas.edges.initialize();
}
/**
* @deprecated since v12
* @ignore
*/
identifyInteriorWalls() {
foundry.utils.logCompatibilityWarning("WallsLayer#identifyInteriorWalls has been deprecated. "
+ "It has no effect anymore and there's no replacement.", {since: 12, until: 14});
}
/**
* @deprecated since v12
* @ignore
*/
identifyWallIntersections() {
foundry.utils.logCompatibilityWarning("WallsLayer#identifyWallIntersections is deprecated in favor of"
+ " foundry.canvas.edges.Edge.identifyEdgeIntersections and has no effect.", {since: 12, until: 14});
}
}

View File

@@ -0,0 +1,721 @@
/**
* @typedef {Map<number,PolygonVertex>} VertexMap
*/
/**
* @typedef {Set<Edge>} EdgeSet
*/
/**
* @typedef {Ray} PolygonRay
* @property {CollisionResult} result
*/
/**
* A PointSourcePolygon implementation that uses CCW (counter-clockwise) geometry orientation.
* Sweep around the origin, accumulating collision points based on the set of active walls.
* This algorithm was created with valuable contributions from https://github.com/caewok
*
* @extends PointSourcePolygon
*/
class ClockwiseSweepPolygon extends PointSourcePolygon {
/**
* A mapping of vertices which define potential collision points
* @type {VertexMap}
*/
vertices = new Map();
/**
* The set of edges which define potential boundaries of the polygon
* @type {EdgeSet}
*/
edges = new Set();
/**
* A collection of rays which are fired at vertices
* @type {PolygonRay[]}
*/
rays = [];
/**
* The squared maximum distance of a ray that is needed for this Scene.
* @type {number}
*/
#rayDistance2;
/* -------------------------------------------- */
/* Initialization */
/* -------------------------------------------- */
/** @inheritDoc */
initialize(origin, config) {
super.initialize(origin, config);
this.#rayDistance2 = Math.pow(canvas.dimensions.maxR, 2);
}
/* -------------------------------------------- */
/** @inheritDoc */
clone() {
const poly = super.clone();
for ( const attr of ["vertices", "edges", "rays", "#rayDistance2"] ) { // Shallow clone only
poly[attr] = this[attr];
}
return poly;
}
/* -------------------------------------------- */
/* Computation */
/* -------------------------------------------- */
/** @inheritdoc */
_compute() {
// Clear prior data
this.points = [];
this.rays = [];
this.vertices.clear();
this.edges.clear();
// Step 1 - Identify candidate edges
this._identifyEdges();
// Step 2 - Construct vertex mapping
this._identifyVertices();
// Step 3 - Radial sweep over endpoints
this._executeSweep();
// Step 4 - Constrain with boundary shapes
this._constrainBoundaryShapes();
}
/* -------------------------------------------- */
/* Edge Configuration */
/* -------------------------------------------- */
/**
* Get the super-set of walls which could potentially apply to this polygon.
* Define a custom collision test used by the Quadtree to obtain candidate Walls.
* @protected
*/
_identifyEdges() {
const bounds = this.config.boundingBox = this._defineBoundingBox();
const edgeTypes = this._determineEdgeTypes();
for ( const edge of canvas.edges.values() ) {
if ( this._testEdgeInclusion(edge, edgeTypes, bounds) ) {
this.edges.add(edge.clone());
}
}
}
/* -------------------------------------------- */
/**
* Determine the edge types and their manner of inclusion for this polygon instance.
* @returns {Record<EdgeTypes, 0|1|2>}
* @protected
*/
_determineEdgeTypes() {
const {type, useInnerBounds, includeDarkness} = this.config;
const edgeTypes = {};
if ( type !== "universal" ) edgeTypes.wall = 1;
if ( includeDarkness ) edgeTypes.darkness = 1;
if ( useInnerBounds && canvas.scene.padding ) edgeTypes.innerBounds = 2;
else edgeTypes.outerBounds = 2;
return edgeTypes;
}
/* -------------------------------------------- */
/**
* Test whether a wall should be included in the computed polygon for a given origin and type
* @param {Edge} edge The Edge being considered
* @param {Record<EdgeTypes, 0|1|2>} edgeTypes Which types of edges are being used? 0=no, 1=maybe, 2=always
* @param {PIXI.Rectangle} bounds The overall bounding box
* @returns {boolean} Should the edge be included?
* @protected
*/
_testEdgeInclusion(edge, edgeTypes, bounds) {
const { type, boundaryShapes, useThreshold, wallDirectionMode, externalRadius } = this.config;
// Only include edges of the appropriate type
const m = edgeTypes[edge.type];
if ( !m ) return false;
if ( m === 2 ) return true;
// Test for inclusion in the overall bounding box
if ( !bounds.lineSegmentIntersects(edge.a, edge.b, { inside: true }) ) return false;
// Specific boundary shapes may impose additional requirements
for ( const shape of boundaryShapes ) {
if ( shape._includeEdge && !shape._includeEdge(edge.a, edge.b) ) return false;
}
// Ignore edges which do not block this polygon type
if ( edge[type] === CONST.WALL_SENSE_TYPES.NONE ) return false;
// Ignore edges which are collinear with the origin
const side = edge.orientPoint(this.origin);
if ( !side ) return false;
// Ignore one-directional walls which are facing away from the origin
const wdm = PointSourcePolygon.WALL_DIRECTION_MODES;
if ( edge.direction && (wallDirectionMode !== wdm.BOTH) ) {
if ( (wallDirectionMode === wdm.NORMAL) === (side === edge.direction) ) return false;
}
// Ignore threshold walls which do not satisfy their required proximity
if ( useThreshold ) return !edge.applyThreshold(type, this.origin, externalRadius);
return true;
}
/* -------------------------------------------- */
/**
* Compute the aggregate bounding box which is the intersection of all boundary shapes.
* Round and pad the resulting rectangle by 1 pixel to ensure it always contains the origin.
* @returns {PIXI.Rectangle}
* @protected
*/
_defineBoundingBox() {
let b = this.config.useInnerBounds ? canvas.dimensions.sceneRect : canvas.dimensions.rect;
for ( const shape of this.config.boundaryShapes ) {
b = b.intersection(shape.getBounds());
}
return new PIXI.Rectangle(b.x, b.y, b.width, b.height).normalize().ceil().pad(1);
}
/* -------------------------------------------- */
/* Vertex Identification */
/* -------------------------------------------- */
/**
* Consolidate all vertices from identified edges and register them as part of the vertex mapping.
* @protected
*/
_identifyVertices() {
const edgeMap = new Map();
for ( let edge of this.edges ) {
edgeMap.set(edge.id, edge);
// Create or reference vertex A
const ak = foundry.canvas.edges.PolygonVertex.getKey(edge.a.x, edge.a.y);
if ( this.vertices.has(ak) ) edge.vertexA = this.vertices.get(ak);
else {
edge.vertexA = new foundry.canvas.edges.PolygonVertex(edge.a.x, edge.a.y);
this.vertices.set(ak, edge.vertexA);
}
// Create or reference vertex B
const bk = foundry.canvas.edges.PolygonVertex.getKey(edge.b.x, edge.b.y);
if ( this.vertices.has(bk) ) edge.vertexB = this.vertices.get(bk);
else {
edge.vertexB = new foundry.canvas.edges.PolygonVertex(edge.b.x, edge.b.y);
this.vertices.set(bk, edge.vertexB);
}
// Learn edge orientation with respect to the origin and ensure B is clockwise of A
const o = foundry.utils.orient2dFast(this.origin, edge.vertexA, edge.vertexB);
if ( o > 0 ) Object.assign(edge, {vertexA: edge.vertexB, vertexB: edge.vertexA}); // Reverse vertices
if ( o !== 0 ) { // Attach non-collinear edges
edge.vertexA.attachEdge(edge, -1, this.config.type);
edge.vertexB.attachEdge(edge, 1, this.config.type);
}
}
// Add edge intersections
this._identifyIntersections(edgeMap);
}
/* -------------------------------------------- */
/**
* Add additional vertices for intersections between edges.
* @param {Map<string, Edge>} edgeMap
* @protected
*/
_identifyIntersections(edgeMap) {
const processed = new Set();
for ( let edge of this.edges ) {
for ( const x of edge.intersections ) {
// Is the intersected edge also included in the polygon?
const other = edgeMap.get(x.edge.id);
if ( !other || processed.has(other) ) continue;
const i = x.intersection;
// Register the intersection point as a vertex
const vk = foundry.canvas.edges.PolygonVertex.getKey(Math.round(i.x), Math.round(i.y));
let v = this.vertices.get(vk);
if ( !v ) {
v = new foundry.canvas.edges.PolygonVertex(i.x, i.y);
v._intersectionCoordinates = i;
this.vertices.set(vk, v);
}
// Attach edges to the intersection vertex
// Due to rounding, it is possible for an edge to be completely cw or ccw or only one of the two
// We know from _identifyVertices that vertex B is clockwise of vertex A for every edge.
// It is important that we use the true intersection coordinates (i) for this orientation test.
if ( !v.edges.has(edge) ) {
const dir = foundry.utils.orient2dFast(this.origin, edge.vertexB, i) < 0 ? 1 // Edge is fully CCW of v
: (foundry.utils.orient2dFast(this.origin, edge.vertexA, i) > 0 ? -1 : 0); // Edge is fully CW of v
v.attachEdge(edge, dir, this.config.type);
}
if ( !v.edges.has(other) ) {
const dir = foundry.utils.orient2dFast(this.origin, other.vertexB, i) < 0 ? 1 // Other is fully CCW of v
: (foundry.utils.orient2dFast(this.origin, other.vertexA, i) > 0 ? -1 : 0); // Other is fully CW of v
v.attachEdge(other, dir, this.config.type);
}
}
processed.add(edge);
}
}
/* -------------------------------------------- */
/* Radial Sweep */
/* -------------------------------------------- */
/**
* Execute the sweep over wall vertices
* @private
*/
_executeSweep() {
// Initialize the set of active walls
const activeEdges = this._initializeActiveEdges();
// Sort vertices from clockwise to counter-clockwise and begin the sweep
const vertices = this._sortVertices();
// Iterate through the vertices, adding polygon points
let i = 1;
for ( const vertex of vertices ) {
if ( vertex._visited ) continue;
vertex._index = i++;
this.#updateActiveEdges(vertex, activeEdges);
// Include collinear vertices in this iteration of the sweep, treating their edges as active also
const hasCollinear = vertex.collinearVertices.size > 0;
if ( hasCollinear ) {
this.#includeCollinearVertices(vertex, vertex.collinearVertices);
for ( const cv of vertex.collinearVertices ) {
cv._index = i++;
this.#updateActiveEdges(cv, activeEdges);
}
}
// Determine the result of the sweep for the given vertex
this._determineSweepResult(vertex, activeEdges, hasCollinear);
}
}
/* -------------------------------------------- */
/**
* Include collinear vertices until they have all been added.
* Do not include the original vertex in the set.
* @param {PolygonVertex} vertex The current vertex
* @param {PolygonVertexSet} collinearVertices
*/
#includeCollinearVertices(vertex, collinearVertices) {
for ( const cv of collinearVertices) {
for ( const ccv of cv.collinearVertices ) {
collinearVertices.add(ccv);
}
}
collinearVertices.delete(vertex);
}
/* -------------------------------------------- */
/**
* Update active edges at a given vertex
* Remove counter-clockwise edges which have now concluded.
* Add clockwise edges which are ongoing or beginning.
* @param {PolygonVertex} vertex The current vertex
* @param {EdgeSet} activeEdges A set of currently active edges
*/
#updateActiveEdges(vertex, activeEdges) {
for ( const ccw of vertex.ccwEdges ) {
if ( !vertex.cwEdges.has(ccw) ) activeEdges.delete(ccw);
}
for ( const cw of vertex.cwEdges ) {
if ( cw.vertexA._visited && cw.vertexB._visited ) continue; // Safeguard in case we have already visited the edge
activeEdges.add(cw);
}
vertex._visited = true; // Record that we have already visited this vertex
}
/* -------------------------------------------- */
/**
* Determine the initial set of active edges as those which intersect with the initial ray
* @returns {EdgeSet} A set of initially active edges
* @private
*/
_initializeActiveEdges() {
const initial = {x: Math.round(this.origin.x - this.#rayDistance2), y: this.origin.y};
const edges = new Set();
for ( let edge of this.edges ) {
const x = foundry.utils.lineSegmentIntersects(this.origin, initial, edge.vertexA, edge.vertexB);
if ( x ) edges.add(edge);
}
return edges;
}
/* -------------------------------------------- */
/**
* Sort vertices clockwise from the initial ray (due west).
* @returns {PolygonVertex[]} The array of sorted vertices
* @private
*/
_sortVertices() {
if ( !this.vertices.size ) return [];
let vertices = Array.from(this.vertices.values());
const o = this.origin;
// Sort vertices
vertices.sort((a, b) => {
// Use true intersection coordinates if they are defined
let pA = a._intersectionCoordinates || a;
let pB = b._intersectionCoordinates || b;
// Sort by hemisphere
const ya = pA.y > o.y ? 1 : -1;
const yb = pB.y > o.y ? 1 : -1;
if ( ya !== yb ) return ya; // Sort N, S
// Sort by quadrant
const qa = pA.x < o.x ? -1 : 1;
const qb = pB.x < o.x ? -1 : 1;
if ( qa !== qb ) { // Sort NW, NE, SE, SW
if ( ya === -1 ) return qa;
else return -qa;
}
// Sort clockwise within quadrant
const orientation = foundry.utils.orient2dFast(o, pA, pB);
if ( orientation !== 0 ) return orientation;
// At this point, we know points are collinear; track for later processing.
a.collinearVertices.add(b);
b.collinearVertices.add(a);
// Otherwise, sort closer points first
a._d2 ||= Math.pow(pA.x - o.x, 2) + Math.pow(pA.y - o.y, 2);
b._d2 ||= Math.pow(pB.x - o.x, 2) + Math.pow(pB.y - o.y, 2);
return a._d2 - b._d2;
});
return vertices;
}
/* -------------------------------------------- */
/**
* Test whether a target vertex is behind some closer active edge.
* If the vertex is to the left of the edge, is must be behind the edge relative to origin.
* If the vertex is collinear with the edge, it should be considered "behind" and ignored.
* We know edge.vertexA is ccw to edge.vertexB because of the logic in _identifyVertices.
* @param {PolygonVertex} vertex The target vertex
* @param {EdgeSet} activeEdges The set of active edges
* @returns {{isBehind: boolean, wasLimited: boolean}} Is the target vertex behind some closer edge?
* @private
*/
_isVertexBehindActiveEdges(vertex, activeEdges) {
let wasLimited = false;
for ( let edge of activeEdges ) {
if ( vertex.edges.has(edge) ) continue;
if ( foundry.utils.orient2dFast(edge.vertexA, edge.vertexB, vertex) > 0 ) {
if ( ( edge.isLimited(this.config.type) ) && !wasLimited ) wasLimited = true;
else return {isBehind: true, wasLimited};
}
}
return {isBehind: false, wasLimited};
}
/* -------------------------------------------- */
/**
* Determine the result for the sweep at a given vertex
* @param {PolygonVertex} vertex The target vertex
* @param {EdgeSet} activeEdges The set of active edges
* @param {boolean} hasCollinear Are there collinear vertices behind the target vertex?
* @private
*/
_determineSweepResult(vertex, activeEdges, hasCollinear=false) {
// Determine whether the target vertex is behind some other active edge
const {isBehind, wasLimited} = this._isVertexBehindActiveEdges(vertex, activeEdges);
// Case 1 - Some vertices can be ignored because they are behind other active edges
if ( isBehind ) return;
// Construct the CollisionResult object
const result = new foundry.canvas.edges.CollisionResult({
target: vertex,
cwEdges: vertex.cwEdges,
ccwEdges: vertex.ccwEdges,
isLimited: vertex.isLimited,
isBehind,
wasLimited
});
// Case 2 - No counter-clockwise edge, so begin a new edge
// Note: activeEdges always contain the vertex edge, so never empty
const nccw = vertex.ccwEdges.size;
if ( !nccw ) {
this._switchEdge(result, activeEdges);
result.collisions.forEach(pt => this.addPoint(pt));
return;
}
// Case 3 - Limited edges in both directions
// We can only guarantee this case if we don't have collinear endpoints
const ccwLimited = !result.wasLimited && vertex.isLimitingCCW;
const cwLimited = !result.wasLimited && vertex.isLimitingCW;
if ( !hasCollinear && cwLimited && ccwLimited ) return;
// Case 4 - Non-limited edges in both directions
if ( !ccwLimited && !cwLimited && nccw && vertex.cwEdges.size ) {
result.collisions.push(result.target);
this.addPoint(result.target);
return;
}
// Case 5 - Otherwise switching edges or edge types
this._switchEdge(result, activeEdges);
result.collisions.forEach(pt => this.addPoint(pt));
}
/* -------------------------------------------- */
/**
* Switch to a new active edge.
* Moving from the origin, a collision that first blocks a side must be stored as a polygon point.
* Subsequent collisions blocking that side are ignored. Once both sides are blocked, we are done.
*
* Collisions that limit a side will block if that side was previously limited.
*
* If neither side is blocked and the ray internally collides with a non-limited edge, n skip without adding polygon
* endpoints. Sight is unaffected before this edge, and the internal collision can be ignored.
* @private
*
* @param {CollisionResult} result The pending collision result
* @param {EdgeSet} activeEdges The set of currently active edges
*/
_switchEdge(result, activeEdges) {
const origin = this.origin;
// Construct the ray from the origin
const ray = Ray.towardsPointSquared(origin, result.target, this.#rayDistance2);
ray.result = result;
this.rays.push(ray); // For visualization and debugging
// Create a sorted array of collisions containing the target vertex, other collinear vertices, and collision points
const vertices = [result.target, ...result.target.collinearVertices];
const keys = new Set();
for ( const v of vertices ) {
keys.add(v.key);
v._d2 ??= Math.pow(v.x - origin.x, 2) + Math.pow(v.y - origin.y, 2);
}
this.#addInternalEdgeCollisions(vertices, keys, ray, activeEdges);
vertices.sort((a, b) => a._d2 - b._d2);
// As we iterate over intersection points we will define the insertion method
let insert = undefined;
const c = result.collisions;
for ( const x of vertices ) {
if ( x.isInternal ) { // Handle internal collisions
// If neither side yet blocked and this is a non-limited edge, return
if ( !result.blockedCW && !result.blockedCCW && !x.isLimited ) return;
// Assume any edge is either limited or normal, so if not limited, must block. If already limited, must block
result.blockedCW ||= !x.isLimited || result.limitedCW;
result.blockedCCW ||= !x.isLimited || result.limitedCCW;
result.limitedCW = true;
result.limitedCCW = true;
} else { // Handle true endpoints
result.blockedCW ||= (result.limitedCW && x.isLimitingCW) || x.isBlockingCW;
result.blockedCCW ||= (result.limitedCCW && x.isLimitingCCW) || x.isBlockingCCW;
result.limitedCW ||= x.isLimitingCW;
result.limitedCCW ||= x.isLimitingCCW;
}
// Define the insertion method and record a collision point
if ( result.blockedCW ) {
insert ||= c.unshift;
if ( !result.blockedCWPrev ) insert.call(c, x);
}
if ( result.blockedCCW ) {
insert ||= c.push;
if ( !result.blockedCCWPrev ) insert.call(c, x);
}
// Update blocking flags
if ( result.blockedCW && result.blockedCCW ) return;
result.blockedCWPrev ||= result.blockedCW;
result.blockedCCWPrev ||= result.blockedCCW;
}
}
/* -------------------------------------------- */
/**
* Identify the collision points between an emitted Ray and a set of active edges.
* @param {PolygonVertex[]} vertices Active vertices
* @param {Set<number>} keys Active vertex keys
* @param {PolygonRay} ray The candidate ray to test
* @param {EdgeSet} activeEdges The set of edges to check for collisions against the ray
*/
#addInternalEdgeCollisions(vertices, keys, ray, activeEdges) {
for ( const edge of activeEdges ) {
if ( keys.has(edge.vertexA.key) || keys.has(edge.vertexB.key) ) continue;
const x = foundry.utils.lineLineIntersection(ray.A, ray.B, edge.vertexA, edge.vertexB);
if ( !x ) continue;
const c = foundry.canvas.edges.PolygonVertex.fromPoint(x);
c.attachEdge(edge, 0, this.config.type);
c.isInternal = true;
c._d2 = Math.pow(x.x - ray.A.x, 2) + Math.pow(x.y - ray.A.y, 2);
vertices.push(c);
}
}
/* -------------------------------------------- */
/* Collision Testing */
/* -------------------------------------------- */
/** @override */
_testCollision(ray, mode) {
const {debug, type} = this.config;
// Identify candidate edges
this._identifyEdges();
// Identify collision points
let collisions = new Map();
for ( const edge of this.edges ) {
const x = foundry.utils.lineSegmentIntersection(this.origin, ray.B, edge.a, edge.b);
if ( !x || (x.t0 <= 0) ) continue;
if ( (mode === "any") && (!edge.isLimited(type) || collisions.size) ) return true;
let c = foundry.canvas.edges.PolygonVertex.fromPoint(x, {distance: x.t0});
if ( collisions.has(c.key) ) c = collisions.get(c.key);
else collisions.set(c.key, c);
c.attachEdge(edge, 0, type);
}
if ( mode === "any" ) return false;
// Sort collisions
collisions = Array.from(collisions.values()).sort((a, b) => a._distance - b._distance);
if ( collisions[0]?.isLimited ) collisions.shift();
// Visualize result
if ( debug ) this._visualizeCollision(ray, collisions);
// Return collision result
if ( mode === "all" ) return collisions;
else return collisions[0] || null;
}
/* -------------------------------------------- */
/* Visualization */
/* -------------------------------------------- */
/** @override */
visualize() {
let dg = canvas.controls.debug;
dg.clear();
// Text debugging
if ( !canvas.controls.debug.debugText ) {
canvas.controls.debug.debugText = canvas.controls.addChild(new PIXI.Container());
}
const text = canvas.controls.debug.debugText;
text.removeChildren().forEach(c => c.destroy({children: true}));
// Define limitation colors
const limitColors = {
[CONST.WALL_SENSE_TYPES.NONE]: 0x77E7E8,
[CONST.WALL_SENSE_TYPES.NORMAL]: 0xFFFFBB,
[CONST.WALL_SENSE_TYPES.LIMITED]: 0x81B90C,
[CONST.WALL_SENSE_TYPES.PROXIMITY]: 0xFFFFBB,
[CONST.WALL_SENSE_TYPES.DISTANCE]: 0xFFFFBB
};
// Draw boundary shapes
for ( const constraint of this.config.boundaryShapes ) {
dg.lineStyle(2, 0xFF4444, 1.0).beginFill(0xFF4444, 0.10).drawShape(constraint).endFill();
}
// Draw the final polygon shape
dg.beginFill(0x00AAFF, 0.25).drawShape(this).endFill();
// Draw candidate edges
for ( let edge of this.edges ) {
const c = limitColors[edge[this.config.type]];
dg.lineStyle(4, c).moveTo(edge.a.x, edge.a.y).lineTo(edge.b.x, edge.b.y);
}
// Draw vertices
for ( let vertex of this.vertices.values() ) {
const r = vertex.restriction;
if ( r ) dg.lineStyle(1, 0x000000).beginFill(limitColors[r]).drawCircle(vertex.x, vertex.y, 8).endFill();
if ( vertex._index ) {
let t = text.addChild(new PIXI.Text(String(vertex._index), CONFIG.canvasTextStyle));
t.position.set(vertex.x, vertex.y);
}
}
// Draw emitted rays
for ( let ray of this.rays ) {
const r = ray.result;
if ( r ) {
dg.lineStyle(2, 0x00FF00, r.collisions.length ? 1.0 : 0.33).moveTo(ray.A.x, ray.A.y).lineTo(ray.B.x, ray.B.y);
for ( let c of r.collisions ) {
dg.lineStyle(1, 0x000000).beginFill(0xFF0000).drawCircle(c.x, c.y, 6).endFill();
}
}
}
return dg;
}
/* -------------------------------------------- */
/**
* Visualize the polygon, displaying its computed area, rays, and collision points
* @param {Ray} ray
* @param {PolygonVertex[]} collisions
* @private
*/
_visualizeCollision(ray, collisions) {
let dg = canvas.controls.debug;
dg.clear();
const limitColors = {
[CONST.WALL_SENSE_TYPES.NONE]: 0x77E7E8,
[CONST.WALL_SENSE_TYPES.NORMAL]: 0xFFFFBB,
[CONST.WALL_SENSE_TYPES.LIMITED]: 0x81B90C,
[CONST.WALL_SENSE_TYPES.PROXIMITY]: 0xFFFFBB,
[CONST.WALL_SENSE_TYPES.DISTANCE]: 0xFFFFBB
};
// Draw edges
for ( let edge of this.edges.values() ) {
const c = limitColors[edge[this.config.type]];
dg.lineStyle(4, c).moveTo(edge.a.x, edge.b.y).lineTo(edge.b.x, edge.b.y);
}
// Draw the attempted ray
dg.lineStyle(4, 0x0066CC).moveTo(ray.A.x, ray.A.y).lineTo(ray.B.x, ray.B.y);
// Draw collision points
for ( let x of collisions ) {
dg.lineStyle(1, 0x000000).beginFill(0xFF0000).drawCircle(x.x, x.y, 6).endFill();
}
}
}

View File

@@ -0,0 +1,362 @@
/**
* A Detection Mode which can be associated with any kind of sense/vision/perception.
* A token could have multiple detection modes.
*/
class DetectionMode extends foundry.abstract.DataModel {
/** @inheritDoc */
static defineSchema() {
const fields = foundry.data.fields;
return {
id: new fields.StringField({blank: false}),
label: new fields.StringField({blank: false}),
tokenConfig: new fields.BooleanField({initial: true}), // If this DM is available in Token Config UI
walls: new fields.BooleanField({initial: true}), // If this DM is constrained by walls
angle: new fields.BooleanField({initial: true}), // If this DM is constrained by the vision angle
type: new fields.NumberField({
initial: this.DETECTION_TYPES.SIGHT,
choices: Object.values(this.DETECTION_TYPES)
})
};
}
/* -------------------------------------------- */
/**
* Get the detection filter pertaining to this mode.
* @returns {PIXI.Filter|undefined}
*/
static getDetectionFilter() {
return this._detectionFilter;
}
/**
* An optional filter to apply on the target when it is detected with this mode.
* @type {PIXI.Filter|undefined}
*/
static _detectionFilter;
static {
/**
* The type of the detection mode.
* @enum {number}
*/
Object.defineProperty(this, "DETECTION_TYPES", {value: Object.freeze({
SIGHT: 0, // Sight, and anything depending on light perception
SOUND: 1, // What you can hear. Includes echolocation for bats per example
MOVE: 2, // This is mostly a sense for touch and vibration, like tremorsense, movement detection, etc.
OTHER: 3 // Can't fit in other types (smell, life sense, trans-dimensional sense, sense of humor...)
})});
/**
* The identifier of the basic sight detection mode.
* @type {string}
*/
Object.defineProperty(this, "BASIC_MODE_ID", {value: "basicSight"});
}
/* -------------------------------------------- */
/* Visibility Testing */
/* -------------------------------------------- */
/**
* Test visibility of a target object or array of points for a specific vision source.
* @param {VisionSource} visionSource The vision source being tested
* @param {TokenDetectionMode} mode The detection mode configuration
* @param {CanvasVisibilityTestConfig} config The visibility test configuration
* @returns {boolean} Is the test target visible?
*/
testVisibility(visionSource, mode, {object, tests}={}) {
if ( !mode.enabled ) return false;
if ( !this._canDetect(visionSource, object) ) return false;
return tests.some(test => this._testPoint(visionSource, mode, object, test));
}
/* -------------------------------------------- */
/**
* Can this VisionSource theoretically detect a certain object based on its properties?
* This check should not consider the relative positions of either object, only their state.
* @param {VisionSource} visionSource The vision source being tested
* @param {PlaceableObject} target The target object being tested
* @returns {boolean} Can the target object theoretically be detected by this vision source?
* @protected
*/
_canDetect(visionSource, target) {
const src = visionSource.object.document;
const isSight = this.type === DetectionMode.DETECTION_TYPES.SIGHT;
// Sight-based detection fails when blinded
if ( isSight && src.hasStatusEffect(CONFIG.specialStatusEffects.BLIND) ) return false;
// Detection fails if burrowing unless walls are ignored
if ( this.walls && src.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
if ( target instanceof Token ) {
const tgt = target.document;
// Sight-based detection cannot see invisible tokens
if ( isSight && tgt.hasStatusEffect(CONFIG.specialStatusEffects.INVISIBLE) ) return false;
// Burrowing tokens cannot be detected unless walls are ignored
if ( this.walls && tgt.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
}
return true;
}
/* -------------------------------------------- */
/**
* Evaluate a single test point to confirm whether it is visible.
* Standard detection rules require that the test point be both within LOS and within range.
* @param {VisionSource} visionSource The vision source being tested
* @param {TokenDetectionMode} mode The detection mode configuration
* @param {PlaceableObject} target The target object being tested
* @param {CanvasVisibilityTest} test The test case being evaluated
* @returns {boolean}
* @protected
*/
_testPoint(visionSource, mode, target, test) {
if ( !this._testRange(visionSource, mode, target, test) ) return false;
return this._testLOS(visionSource, mode, target, test);
}
/* -------------------------------------------- */
/**
* Test whether the line-of-sight requirement for detection is satisfied.
* Always true if the detection mode bypasses walls, otherwise the test point must be contained by the LOS polygon.
* The result of is cached for the vision source so that later checks for other detection modes do not repeat it.
* @param {VisionSource} visionSource The vision source being tested
* @param {TokenDetectionMode} mode The detection mode configuration
* @param {PlaceableObject} target The target object being tested
* @param {CanvasVisibilityTest} test The test case being evaluated
* @returns {boolean} Is the LOS requirement satisfied for this test?
* @protected
*/
_testLOS(visionSource, mode, target, test) {
if ( !this.walls ) return this._testAngle(visionSource, mode, target, test);
const type = visionSource.constructor.sourceType;
const isSight = type === "sight";
if ( isSight && visionSource.blinded.darkness ) return false;
if ( !this.angle && (visionSource.data.angle < 360) ) {
// Constrained by walls but not by vision angle
return !CONFIG.Canvas.polygonBackends[type].testCollision(
{ x: visionSource.x, y: visionSource.y },
test.point,
{ type, mode: "any", source: visionSource, useThreshold: true, includeDarkness: isSight }
);
}
// Constrained by walls and vision angle
let hasLOS = test.los.get(visionSource);
if ( hasLOS === undefined ) {
hasLOS = visionSource.los.contains(test.point.x, test.point.y);
test.los.set(visionSource, hasLOS);
}
return hasLOS;
}
/* -------------------------------------------- */
/**
* Test whether the target is within the vision angle.
* @param {VisionSource} visionSource The vision source being tested
* @param {TokenDetectionMode} mode The detection mode configuration
* @param {PlaceableObject} target The target object being tested
* @param {CanvasVisibilityTest} test The test case being evaluated
* @returns {boolean} Is the point within the vision angle?
* @protected
*/
_testAngle(visionSource, mode, target, test) {
if ( !this.angle ) return true;
const { angle, rotation, externalRadius } = visionSource.data;
if ( angle >= 360 ) return true;
const point = test.point;
const dx = point.x - visionSource.x;
const dy = point.y - visionSource.y;
if ( (dx * dx) + (dy * dy) <= (externalRadius * externalRadius) ) return true;
const aMin = rotation + 90 - (angle / 2);
const a = Math.toDegrees(Math.atan2(dy, dx));
return (((a - aMin) % 360) + 360) % 360 <= angle;
}
/* -------------------------------------------- */
/**
* Verify that a target is in range of a source.
* @param {VisionSource} visionSource The vision source being tested
* @param {TokenDetectionMode} mode The detection mode configuration
* @param {PlaceableObject} target The target object being tested
* @param {CanvasVisibilityTest} test The test case being evaluated
* @returns {boolean} Is the target within range?
* @protected
*/
_testRange(visionSource, mode, target, test) {
if ( mode.range === null ) return true;
if ( mode.range <= 0 ) return false;
const radius = visionSource.object.getLightRadius(mode.range);
const dx = test.point.x - visionSource.x;
const dy = test.point.y - visionSource.y;
return ((dx * dx) + (dy * dy)) <= (radius * radius);
}
}
/* -------------------------------------------- */
/**
* This detection mode tests whether the target is visible due to being illuminated by a light source.
* By default tokens have light perception with an infinite range if light perception isn't explicitely
* configured.
*/
class DetectionModeLightPerception extends DetectionMode {
/** @override */
_canDetect(visionSource, target) {
// Cannot see while blinded or burrowing
const src = visionSource.object.document;
if ( src.hasStatusEffect(CONFIG.specialStatusEffects.BLIND)
|| src.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
// Cannot see invisible or burrowing creatures
if ( target instanceof Token ) {
const tgt = target.document;
if ( tgt.hasStatusEffect(CONFIG.specialStatusEffects.INVISIBLE)
|| tgt.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
}
return true;
}
/* -------------------------------------------- */
/** @inheritDoc */
_testPoint(visionSource, mode, target, test) {
if ( !super._testPoint(visionSource, mode, target, test) ) return false;
return canvas.effects.testInsideLight(test.point, test.elevation);
}
}
/* -------------------------------------------- */
/**
* A special detection mode which models a form of darkvision (night vision).
* This mode is the default case which is tested first when evaluating visibility of objects.
*/
class DetectionModeBasicSight extends DetectionMode {
/** @override */
_canDetect(visionSource, target) {
// Cannot see while blinded or burrowing
const src = visionSource.object.document;
if ( src.hasStatusEffect(CONFIG.specialStatusEffects.BLIND)
|| src.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
// Cannot see invisible or burrowing creatures
if ( target instanceof Token ) {
const tgt = target.document;
if ( tgt.hasStatusEffect(CONFIG.specialStatusEffects.INVISIBLE)
|| tgt.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
}
return true;
}
}
/* -------------------------------------------- */
/**
* Detection mode that see invisible creatures.
* This detection mode allows the source to:
* - See/Detect the invisible target as if visible.
* - The "See" version needs sight and is affected by blindness
*/
class DetectionModeInvisibility extends DetectionMode {
/** @override */
static getDetectionFilter() {
return this._detectionFilter ??= GlowOverlayFilter.create({
glowColor: [0, 0.60, 0.33, 1]
});
}
/** @override */
_canDetect(visionSource, target) {
if ( !(target instanceof Token) ) return false;
const tgt = target.document;
// Only invisible tokens can be detected
if ( !tgt.hasStatusEffect(CONFIG.specialStatusEffects.INVISIBLE) ) return false;
const src = visionSource.object.document;
const isSight = this.type === DetectionMode.DETECTION_TYPES.SIGHT;
// Sight-based detection fails when blinded
if ( isSight && src.hasStatusEffect(CONFIG.specialStatusEffects.BLIND) ) return false;
// Detection fails when the source or target token is burrowing unless walls are ignored
if ( this.walls ) {
if ( src.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
if ( tgt.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
}
return true;
}
}
/* -------------------------------------------- */
/**
* Detection mode that see creatures in contact with the ground.
*/
class DetectionModeTremor extends DetectionMode {
/** @override */
static getDetectionFilter() {
return this._detectionFilter ??= OutlineOverlayFilter.create({
outlineColor: [1, 0, 1, 1],
knockout: true,
wave: true
});
}
/** @override */
_canDetect(visionSource, target) {
if ( !(target instanceof Token) ) return false;
const tgt = target.document;
// Flying and hovering tokens cannot be detected
if ( tgt.hasStatusEffect(CONFIG.specialStatusEffects.FLY) ) return false;
if ( tgt.hasStatusEffect(CONFIG.specialStatusEffects.HOVER) ) return false;
return true;
}
}
/* -------------------------------------------- */
/**
* Detection mode that see ALL creatures (no blockers).
* If not constrained by walls, see everything within the range.
*/
class DetectionModeAll extends DetectionMode {
/** @override */
static getDetectionFilter() {
return this._detectionFilter ??= OutlineOverlayFilter.create({
outlineColor: [0.85, 0.85, 1.0, 1],
knockout: true
});
}
/** @override */
_canDetect(visionSource, target) {
const src = visionSource.object.document;
const isSight = this.type === DetectionMode.DETECTION_TYPES.SIGHT;
// Sight-based detection fails when blinded
if ( isSight && src.hasStatusEffect(CONFIG.specialStatusEffects.BLIND) ) return false;
// Detection fails when the source or target token is burrowing unless walls are ignored
if ( !this.walls ) return true;
if ( src.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
if ( target instanceof Token ) {
const tgt = target.document;
if ( tgt.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
}
return true;
}
}

View File

@@ -0,0 +1,501 @@
/**
* A fog of war management class which is the singleton canvas.fog instance.
* @category - Canvas
*/
class FogManager {
/**
* The FogExploration document which applies to this canvas view
* @type {FogExploration|null}
*/
exploration = null;
/**
* A status flag for whether the layer initialization workflow has succeeded
* @type {boolean}
* @private
*/
#initialized = false;
/**
* Track whether we have pending fog updates which have not yet been saved to the database
* @type {boolean}
* @internal
*/
_updated = false;
/**
* Texture extractor
* @type {TextureExtractor}
*/
get extractor() {
return this.#extractor;
}
#extractor;
/**
* The fog refresh count.
* If > to the refresh threshold, the fog texture is saved to database. It is then reinitialized to 0.
* @type {number}
*/
#refreshCount = 0;
/**
* Matrix used for fog rendering transformation.
* @type {PIXI.Matrix}
*/
#renderTransform = new PIXI.Matrix();
/**
* Define the number of fog refresh needed before the fog texture is extracted and pushed to the server.
* @type {number}
*/
static COMMIT_THRESHOLD = 70;
/**
* A debounced function to save fog of war exploration once a continuous stream of updates has concluded.
* @type {Function}
*/
#debouncedSave;
/**
* Handling of the concurrency for fog loading, saving and reset.
* @type {Semaphore}
*/
#queue = new foundry.utils.Semaphore();
/* -------------------------------------------- */
/* Fog Manager Properties */
/* -------------------------------------------- */
/**
* The exploration SpriteMesh which holds the fog exploration texture.
* @type {SpriteMesh}
*/
get sprite() {
return this.#explorationSprite || (this.#explorationSprite = this._createExplorationObject());
}
#explorationSprite;
/* -------------------------------------------- */
/**
* The configured options used for the saved fog-of-war texture.
* @type {FogTextureConfiguration}
*/
get textureConfiguration() {
return canvas.visibility.textureConfiguration;
}
/* -------------------------------------------- */
/**
* Does the currently viewed Scene support Token field of vision?
* @type {boolean}
*/
get tokenVision() {
return canvas.scene.tokenVision;
}
/* -------------------------------------------- */
/**
* Does the currently viewed Scene support fog of war exploration?
* @type {boolean}
*/
get fogExploration() {
return canvas.scene.fog.exploration;
}
/* -------------------------------------------- */
/* Fog of War Management */
/* -------------------------------------------- */
/**
* Create the exploration display object with or without a provided texture.
* @param {PIXI.Texture|PIXI.RenderTexture} [tex] Optional exploration texture.
* @returns {DisplayObject}
* @internal
*/
_createExplorationObject(tex) {
return new SpriteMesh(tex ?? Canvas.getRenderTexture({
clearColor: [0, 0, 0, 1],
textureConfiguration: this.textureConfiguration
}), FogSamplerShader);
}
/* -------------------------------------------- */
/**
* Initialize fog of war - resetting it when switching scenes or re-drawing the canvas
* @returns {Promise<void>}
*/
async initialize() {
this.#initialized = false;
// Create a TextureExtractor instance
if ( this.#extractor === undefined ) {
try {
this.#extractor = new TextureExtractor(canvas.app.renderer, {
callerName: "FogExtractor",
controlHash: true,
format: PIXI.FORMATS.RED
});
} catch(e) {
this.#extractor = null;
console.error(e);
}
}
this.#extractor?.reset();
// Bind a debounced save handler
this.#debouncedSave = foundry.utils.debounce(this.save.bind(this), 2000);
// Load the initial fog texture
await this.load();
this.#initialized = true;
}
/* -------------------------------------------- */
/**
* Clear the fog and reinitialize properties (commit and save in non reset mode)
* @returns {Promise<void>}
*/
async clear() {
// Save any pending exploration
try {
await this.save();
} catch(e) {
ui.notifications.error("Failed to save fog exploration");
console.error(e);
}
// Deactivate current fog exploration
this.#initialized = false;
this.#deactivate();
}
/* -------------------------------------------- */
/**
* Once a new Fog of War location is explored, composite the explored container with the current staging sprite.
* Once the number of refresh is > to the commit threshold, save the fog texture to the database.
*/
commit() {
const vision = canvas.visibility.vision;
if ( !vision?.children.length || !this.fogExploration || !this.tokenVision ) return;
if ( !this.#explorationSprite?.texture.valid ) return;
// Get a staging texture or clear and render into the sprite if its texture is a RT
// and render the entire fog container to it
const dims = canvas.dimensions;
const isRenderTex = this.#explorationSprite.texture instanceof PIXI.RenderTexture;
const tex = isRenderTex ? this.#explorationSprite.texture : Canvas.getRenderTexture({
clearColor: [0, 0, 0, 1],
textureConfiguration: this.textureConfiguration
});
this.#renderTransform.tx = -dims.sceneX;
this.#renderTransform.ty = -dims.sceneY;
// Render the currently revealed vision (preview excluded) to the texture
vision.containmentFilter.enabled = canvas.visibility.needsContainment;
vision.light.preview.visible = false;
vision.light.mask.preview.visible = false;
vision.sight.preview.visible = false;
canvas.app.renderer.render(isRenderTex ? vision : this.#explorationSprite, {
renderTexture: tex,
clear: false,
transform: this.#renderTransform
});
vision.light.preview.visible = true;
vision.light.mask.preview.visible = true;
vision.sight.preview.visible = true;
vision.containmentFilter.enabled = false;
if ( !isRenderTex ) this.#explorationSprite.texture.destroy(true);
this.#explorationSprite.texture = tex;
this._updated = true;
if ( !this.exploration ) {
const fogExplorationCls = getDocumentClass("FogExploration");
this.exploration = new fogExplorationCls();
}
// Schedule saving the texture to the database
if ( this.#refreshCount > FogManager.COMMIT_THRESHOLD ) {
this.#debouncedSave();
this.#refreshCount = 0;
}
else this.#refreshCount++;
}
/* -------------------------------------------- */
/**
* Load existing fog of war data from local storage and populate the initial exploration sprite
* @returns {Promise<(PIXI.Texture|void)>}
*/
async load() {
return await this.#queue.add(this.#load.bind(this));
}
/* -------------------------------------------- */
/**
* Load existing fog of war data from local storage and populate the initial exploration sprite
* @returns {Promise<(PIXI.Texture|void)>}
*/
async #load() {
if ( CONFIG.debug.fog.manager ) console.debug("FogManager | Loading saved FogExploration for Scene.");
this.#deactivate();
// Take no further action if token vision is not enabled
if ( !this.tokenVision ) return;
// Load existing FOW exploration data or create a new placeholder
const fogExplorationCls = /** @type {typeof FogExploration} */ getDocumentClass("FogExploration");
this.exploration = await fogExplorationCls.load();
// Extract and assign the fog data image
const assign = (tex, resolve) => {
if ( this.#explorationSprite?.texture === tex ) return resolve(tex);
this.#explorationSprite?.destroy(true);
this.#explorationSprite = this._createExplorationObject(tex);
canvas.visibility.resetExploration();
canvas.perception.initialize();
resolve(tex);
};
// Initialize the exploration sprite if no exploration data exists
if ( !this.exploration ) {
return await new Promise(resolve => {
assign(Canvas.getRenderTexture({
clearColor: [0, 0, 0, 1],
textureConfiguration: this.textureConfiguration
}), resolve);
});
}
// Otherwise load the texture from the exploration data
return await new Promise(resolve => {
let tex = this.exploration.getTexture();
if ( tex === null ) assign(Canvas.getRenderTexture({
clearColor: [0, 0, 0, 1],
textureConfiguration: this.textureConfiguration
}), resolve);
else if ( tex.baseTexture.valid ) assign(tex, resolve);
else tex.on("update", tex => assign(tex, resolve));
});
}
/* -------------------------------------------- */
/**
* Dispatch a request to reset the fog of war exploration status for all users within this Scene.
* Once the server has deleted existing FogExploration documents, the _onReset handler will re-draw the canvas.
*/
async reset() {
if ( CONFIG.debug.fog.manager ) console.debug("FogManager | Resetting fog of war exploration for Scene.");
game.socket.emit("resetFog", canvas.scene.id);
}
/* -------------------------------------------- */
/**
* Request a fog of war save operation.
* Note: if a save operation is pending, we're waiting for its conclusion.
*/
async save() {
return await this.#queue.add(this.#save.bind(this));
}
/* -------------------------------------------- */
/**
* Request a fog of war save operation.
* Note: if a save operation is pending, we're waiting for its conclusion.
*/
async #save() {
if ( !this._updated ) return;
this._updated = false;
const exploration = this.exploration;
if ( CONFIG.debug.fog.manager ) {
console.debug("FogManager | Initiate non-blocking extraction of the fog of war progress.");
}
if ( !this.#extractor ) {
console.error("FogManager | Browser does not support texture extraction.");
return;
}
// Get compressed base64 image from the fog texture
const base64Image = await this._extractBase64();
// If the exploration changed, the fog was reloaded while the pixels were extracted
if ( this.exploration !== exploration ) return;
// Need to skip?
if ( !base64Image ) {
if ( CONFIG.debug.fog.manager ) console.debug("FogManager | Fog of war has not changed. Skipping db operation.");
return;
}
// Update the fog exploration document
const updateData = this._prepareFogUpdateData(base64Image);
await this.#updateFogExploration(updateData);
}
/* -------------------------------------------- */
/**
* Extract fog data as a base64 string
* @returns {Promise<string>}
* @protected
*/
async _extractBase64() {
try {
return this.#extractor.extract({
texture: this.#explorationSprite.texture,
compression: TextureExtractor.COMPRESSION_MODES.BASE64,
type: "image/webp",
quality: 0.8,
debug: CONFIG.debug.fog.extractor
});
} catch(err) {
// FIXME this is needed because for some reason .extract() may throw a boolean false instead of an Error
throw new Error("Fog of War base64 extraction failed");
}
}
/* -------------------------------------------- */
/**
* Prepare the data that will be used to update the FogExploration document.
* @param {string} base64Image The extracted base64 image data
* @returns {Partial<FogExplorationData>} Exploration data to update
* @protected
*/
_prepareFogUpdateData(base64Image) {
return {explored: base64Image, timestamp: Date.now()};
}
/* -------------------------------------------- */
/**
* Update the fog exploration document with provided data.
* @param {object} updateData
* @returns {Promise<void>}
*/
async #updateFogExploration(updateData) {
if ( !game.scenes.has(canvas.scene?.id) ) return;
if ( !this.exploration ) return;
if ( CONFIG.debug.fog.manager ) console.debug("FogManager | Saving fog of war progress into exploration document.");
if ( !this.exploration.id ) {
this.exploration.updateSource(updateData);
this.exploration = await this.exploration.constructor.create(this.exploration.toJSON(), {loadFog: false});
}
else await this.exploration.update(updateData, {loadFog: false});
}
/* -------------------------------------------- */
/**
* Deactivate fog of war.
* Clear all shared containers by unlinking them from their parent.
* Destroy all stored textures and graphics.
*/
#deactivate() {
// Remove the current exploration document
this.exploration = null;
this.#extractor?.reset();
// Destroy current exploration texture and provide a new one with transparency
if ( this.#explorationSprite && !this.#explorationSprite.destroyed ) this.#explorationSprite.destroy(true);
this.#explorationSprite = undefined;
this._updated = false;
this.#refreshCount = 0;
}
/* -------------------------------------------- */
/**
* If fog of war data is reset from the server, deactivate the current fog and initialize the exploration.
* @returns {Promise}
* @internal
*/
async _handleReset() {
return await this.#queue.add(this.#handleReset.bind(this));
}
/* -------------------------------------------- */
/**
* If fog of war data is reset from the server, deactivate the current fog and initialize the exploration.
* @returns {Promise}
*/
async #handleReset() {
ui.notifications.info("Fog of War exploration progress was reset for this Scene");
// Remove the current exploration document
this.#deactivate();
// Reset exploration in the visibility layer
canvas.visibility.resetExploration();
// Refresh perception
canvas.perception.initialize();
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
get pending() {
const msg = "pending is deprecated and redirected to the exploration container";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return canvas.visibility.explored;
}
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
get revealed() {
const msg = "revealed is deprecated and redirected to the exploration container";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return canvas.visibility.explored;
}
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
update(source, force=false) {
const msg = "update is obsolete and always returns true. The fog exploration does not record position anymore.";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return true;
}
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
get resolution() {
const msg = "resolution is deprecated and redirected to CanvasVisibility#textureConfiguration";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return canvas.visibility.textureConfiguration;
}
}

View File

@@ -0,0 +1,185 @@
/**
* A helper class which manages the refresh workflow for perception layers on the canvas.
* This controls the logic which batches multiple requested updates to minimize the amount of work required.
* A singleton instance is available as {@link Canvas#perception}.
*/
class PerceptionManager extends RenderFlagsMixin(Object) {
/**
* @typedef {RenderFlags} PerceptionManagerFlags
* @property {boolean} initializeLighting Re-initialize the entire lighting configuration. An aggregate behavior
* which does no work directly but propagates to set several other flags.
* @property {boolean} initializeVision Re-initialize the entire vision configuration.
* See {@link CanvasVisibility#initializeSources}.
* @property {boolean} initializeVisionModes Initialize the active vision modes.
* See {@link CanvasVisibility#initializeVisionMode}.
* @property {boolean} initializeSounds Re-initialize the entire ambient sound configuration.
* See {@link SoundsLayer#initializeSources}.
* @property {boolean} refreshEdges Recompute intersections between all registered edges.
* See {@link CanvasEdges#refresh}.
* @property {boolean} refreshLighting Refresh the rendered appearance of lighting
* @property {boolean} refreshLightSources Update the configuration of light sources
* @property {boolean} refreshOcclusion Refresh occlusion
* @property {boolean} refreshPrimary Refresh the contents of the PrimaryCanvasGroup mesh
* @property {boolean} refreshSounds Refresh the audio state of ambient sounds
* @property {boolean} refreshVision Refresh the rendered appearance of vision
* @property {boolean} refreshVisionSources Update the configuration of vision sources
* @property {boolean} soundFadeDuration Apply a fade duration to sound refresh workflow
*/
/** @override */
static RENDER_FLAGS = {
// Edges
refreshEdges: {},
// Light and Darkness Sources
initializeLighting: {propagate: ["initializeDarknessSources", "initializeLightSources"]},
initializeDarknessSources: {propagate: ["refreshLighting", "refreshVision", "refreshEdges"]},
initializeLightSources: {propagate: ["refreshLighting", "refreshVision"]},
refreshLighting: {propagate: ["refreshLightSources"]},
refreshLightSources: {},
// Vision
initializeVisionModes: {propagate: ["refreshVisionSources", "refreshLighting", "refreshPrimary"]},
initializeVision: {propagate: ["initializeVisionModes", "refreshVision"]},
refreshVision: {propagate: ["refreshVisionSources", "refreshOcclusionMask"]},
refreshVisionSources: {},
// Primary Canvas Group
refreshPrimary: {},
refreshOcclusion: {propagate: ["refreshOcclusionStates", "refreshOcclusionMask"]},
refreshOcclusionStates: {},
refreshOcclusionMask: {},
// Sound
initializeSounds: {propagate: ["refreshSounds"]},
refreshSounds: {},
soundFadeDuration: {},
/** @deprecated since v12 */
refreshTiles: {
propagate: ["refreshOcclusion"],
deprecated: {message: "The refreshTiles flag is deprecated in favor of refreshOcclusion",
since: 12, until: 14, alias: true}
},
/** @deprecated since v12 */
identifyInteriorWalls: {
propagate: ["initializeLighting", "initializeVision"],
deprecated: {
message: "The identifyInteriorWalls is now obsolete and has no replacement.",
since: 12, until: 14, alias: true
}
},
/** @deprecated since v11 */
forceUpdateFog: {
propagate: ["refreshVision"],
deprecated: {
message: "The forceUpdateFog flag is now obsolete and has no replacement. "
+ "The fog is now always updated when the visibility is refreshed", since: 11, until: 13, alias: true
}
}
};
static #deprecatedFlags = ["refreshTiles", "identifyInteriorWalls", "forceUpdateFog"];
/** @override */
static RENDER_FLAG_PRIORITY = "PERCEPTION";
/* -------------------------------------------- */
/** @override */
applyRenderFlags() {
if ( !this.renderFlags.size ) return;
const flags = this.renderFlags.clear();
// Initialize darkness sources
if ( flags.initializeDarknessSources ) canvas.effects.initializeDarknessSources();
// Recompute edge intersections
if ( flags.refreshEdges ) canvas.edges.refresh();
// Initialize positive light sources
if ( flags.initializeLightSources ) canvas.effects.initializeLightSources();
// Initialize active vision sources
if ( flags.initializeVision ) canvas.visibility.initializeSources();
// Initialize the active vision mode
if ( flags.initializeVisionModes ) canvas.visibility.initializeVisionMode();
// Initialize active sound sources
if ( flags.initializeSounds ) canvas.sounds.initializeSources();
// Refresh light, vision, and sound sources
if ( flags.refreshLightSources ) canvas.effects.refreshLightSources();
if ( flags.refreshVisionSources ) canvas.effects.refreshVisionSources();
if ( flags.refreshSounds ) canvas.sounds.refresh({fade: flags.soundFadeDuration ? 250 : 0});
// Refresh the appearance of the Primary Canvas Group environment
if ( flags.refreshPrimary ) canvas.primary.refreshPrimarySpriteMesh();
if ( flags.refreshLighting ) canvas.effects.refreshLighting();
if ( flags.refreshVision ) canvas.visibility.refresh();
// Update roof occlusion states based on token positions and vision
// TODO: separate occlusion state testing from CanvasOcclusionMask
if ( flags.refreshOcclusion ) canvas.masks.occlusion.updateOcclusion();
else {
if ( flags.refreshOcclusionMask ) canvas.masks.occlusion._updateOcclusionMask();
if ( flags.refreshOcclusionStates ) canvas.masks.occlusion._updateOcclusionStates();
}
// Deprecated flags
for ( const f of PerceptionManager.#deprecatedFlags ) {
if ( flags[f] ) {
const {message, since, until} = PerceptionManager.RENDER_FLAGS[f].deprecated;
foundry.utils.logCompatibilityWarning(message, {since, until});
}
}
}
/* -------------------------------------------- */
/**
* Update perception manager flags which configure which behaviors occur on the next frame render.
* @param {object} flags Flag values (true) to assign where the keys belong to PerceptionManager.FLAGS
*/
update(flags) {
if ( !canvas.ready ) return;
this.renderFlags.set(flags);
}
/* -------------------------------------------- */
/**
* A helper function to perform an immediate initialization plus incremental refresh.
*/
initialize() {
return this.update({
refreshEdges: true,
initializeLighting: true,
initializeVision: true,
initializeSounds: true,
refreshOcclusion: true
});
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
refresh() {
foundry.utils.logCompatibilityWarning("PerceptionManager#refresh is deprecated in favor of assigning granular "
+ "refresh flags", {since: 12, until: 14});
return this.update({
refreshLighting: true,
refreshVision: true,
refreshSounds: true,
refreshOcclusion: true
});
}
}

View File

@@ -0,0 +1,178 @@
/**
* A special subclass of DataField used to reference an AbstractBaseShader definition.
*/
class ShaderField extends foundry.data.fields.DataField {
/** @inheritdoc */
static get _defaults() {
const defaults = super._defaults;
defaults.nullable = true;
defaults.initial = undefined;
return defaults;
}
/** @override */
_cast(value) {
if ( !foundry.utils.isSubclass(value, AbstractBaseShader) ) {
throw new Error("The value provided to a ShaderField must be an AbstractBaseShader subclass.");
}
return value;
}
}
/**
* A Vision Mode which can be selected for use by a Token.
* The selected Vision Mode alters the appearance of various aspects of the canvas while that Token is the POV.
*/
class VisionMode extends foundry.abstract.DataModel {
/**
* Construct a Vision Mode using provided configuration parameters and callback functions.
* @param {object} data Data which fulfills the model defined by the VisionMode schema.
* @param {object} [options] Additional options passed to the DataModel constructor.
*/
constructor(data={}, options={}) {
super(data, options);
this.animated = options.animated ?? false;
}
/** @inheritDoc */
static defineSchema() {
const fields = foundry.data.fields;
const shaderSchema = () => new fields.SchemaField({
shader: new ShaderField(),
uniforms: new fields.ObjectField()
});
const lightingSchema = () => new fields.SchemaField({
visibility: new fields.NumberField({
initial: this.LIGHTING_VISIBILITY.ENABLED,
choices: Object.values(this.LIGHTING_VISIBILITY)
}),
postProcessingModes: new fields.ArrayField(new fields.StringField()),
uniforms: new fields.ObjectField()
});
// Return model schema
return {
id: new fields.StringField({blank: false}),
label: new fields.StringField({blank: false}),
tokenConfig: new fields.BooleanField({initial: true}),
canvas: new fields.SchemaField({
shader: new ShaderField(),
uniforms: new fields.ObjectField()
}),
lighting: new fields.SchemaField({
background: lightingSchema(),
coloration: lightingSchema(),
illumination: lightingSchema(),
darkness: lightingSchema(),
levels: new fields.ObjectField({
validate: o => {
const values = Object.values(CONST.LIGHTING_LEVELS);
return Object.entries(o).every(([k, v]) => values.includes(Number(k)) && values.includes(v));
},
validationError: "may only contain a mapping of keys from VisionMode.LIGHTING_LEVELS"
}),
multipliers: new fields.ObjectField({
validate: o => {
const values = Object.values(CONST.LIGHTING_LEVELS);
return Object.entries(o).every(([k, v]) => values.includes(Number(k)) && Number.isFinite(v));
},
validationError: "must provide a mapping of keys from VisionMode.LIGHTING_LEVELS to numeric multiplier values"
})
}),
vision: new fields.SchemaField({
background: shaderSchema(),
coloration: shaderSchema(),
illumination: shaderSchema(),
darkness: new fields.SchemaField({
adaptive: new fields.BooleanField({initial: true})
}),
defaults: new fields.SchemaField({
color: new fields.ColorField({required: false, initial: undefined}),
attenuation: new fields.AlphaField({required: false, initial: undefined}),
brightness: new fields.NumberField({required: false, initial: undefined, nullable: false, min: -1, max: 1}),
saturation: new fields.NumberField({required: false, initial: undefined, nullable: false, min: -1, max: 1}),
contrast: new fields.NumberField({required: false, initial: undefined, nullable: false, min: -1, max: 1})
}),
preferred: new fields.BooleanField({initial: false})
})
};
}
/**
* The lighting illumination levels which are supported.
* @enum {number}
*/
static LIGHTING_LEVELS = CONST.LIGHTING_LEVELS;
/**
* Flags for how each lighting channel should be rendered for the currently active vision modes:
* - Disabled: this lighting layer is not rendered, the shaders does not decide.
* - Enabled: this lighting layer is rendered normally, and the shaders can choose if they should be rendered or not.
* - Required: the lighting layer is rendered, the shaders does not decide.
* @enum {number}
*/
static LIGHTING_VISIBILITY = {
DISABLED: 0,
ENABLED: 1,
REQUIRED: 2
};
/**
* A flag for whether this vision source is animated
* @type {boolean}
*/
animated = false;
/**
* Does this vision mode enable light sources?
* True unless it disables lighting entirely.
* @type {boolean}
*/
get perceivesLight() {
const {background, illumination, coloration} = this.lighting;
return !!(background.visibility || illumination.visibility || coloration.visibility);
}
/**
* Special activation handling that could be implemented by VisionMode subclasses
* @param {VisionSource} source Activate this VisionMode for a specific source
* @abstract
*/
_activate(source) {}
/**
* Special deactivation handling that could be implemented by VisionMode subclasses
* @param {VisionSource} source Deactivate this VisionMode for a specific source
* @abstract
*/
_deactivate(source) {}
/**
* Special handling which is needed when this Vision Mode is activated for a VisionSource.
* @param {VisionSource} source Activate this VisionMode for a specific source
*/
activate(source) {
if ( source._visionModeActivated ) return;
source._visionModeActivated = true;
this._activate(source);
}
/**
* Special handling which is needed when this Vision Mode is deactivated for a VisionSource.
* @param {VisionSource} source Deactivate this VisionMode for a specific source
*/
deactivate(source) {
if ( !source._visionModeActivated ) return;
source._visionModeActivated = false;
this._deactivate(source);
}
/**
* An animation function which runs every frame while this Vision Mode is active.
* @param {number} dt The deltaTime passed by the PIXI Ticker
*/
animate(dt) {
return foundry.canvas.sources.PointVisionSource.prototype.animateTime.call(this, dt);
}
}

View File

@@ -0,0 +1,354 @@
/**
* An implementation of the Weiler Atherton algorithm for clipping polygons.
* This currently only handles combinations that will not result in any holes.
* Support may be added for holes in the future.
*
* This algorithm is faster than the Clipper library for this task because it relies on the unique properties of the
* circle, ellipse, or convex simple clip object.
* It is also more precise in that it uses the actual intersection points between the circle/ellipse and polygon,
* instead of relying on the polygon approximation of the circle/ellipse to find the intersection points.
*
* For more explanation of the underlying algorithm, see:
* https://en.wikipedia.org/wiki/Weiler%E2%80%93Atherton_clipping_algorithm
* https://www.geeksforgeeks.org/weiler-atherton-polygon-clipping-algorithm
* https://h-educate.in/weiler-atherton-polygon-clipping-algorithm/
*/
class WeilerAthertonClipper {
/**
* Construct a WeilerAthertonClipper instance used to perform the calculation.
* @param {PIXI.Polygon} polygon Polygon to clip
* @param {PIXI.Rectangle|PIXI.Circle} clipObject Object used to clip the polygon
* @param {number} clipType Type of clip to use
* @param {object} clipOpts Object passed to the clippingObject methods toPolygon and pointsBetween
*/
constructor(polygon, clipObject, clipType, clipOpts) {
if ( !polygon.isPositive ) {
const msg = "WeilerAthertonClipper#constructor needs a subject polygon with a positive signed area.";
throw new Error(msg);
}
clipType ??= this.constructor.CLIP_TYPES.INTERSECT;
clipOpts ??= {};
this.polygon = polygon;
this.clipObject = clipObject;
this.config = { clipType, clipOpts };
}
/**
* The supported clip types.
* Values are equivalent to those in ClipperLib.ClipType.
* @enum {number}
*/
static CLIP_TYPES = Object.freeze({
INTERSECT: 0,
UNION: 1
});
/**
* The supported intersection types.
* @enum {number}
*/
static INTERSECTION_TYPES = Object.freeze({
OUT_IN: -1,
IN_OUT: 1,
TANGENT: 0
});
/** @type {PIXI.Polygon} */
polygon;
/** @type {PIXI.Rectangle|PIXI.Circle} */
clipObject;
/**
* Configuration settings
* @type {object} [config]
* @param {WeilerAthertonClipper.CLIP_TYPES} [config.clipType] One of CLIP_TYPES
* @param {object} [config.clipOpts] Object passed to the clippingObject methods
* toPolygon and pointsBetween
*/
config = {};
/* -------------------------------------------- */
/**
* Union a polygon and clipObject using the Weiler Atherton algorithm.
* @param {PIXI.Polygon} polygon Polygon to clip
* @param {PIXI.Rectangle|PIXI.Circle} clipObject Object to clip against the polygon
* @param {object} clipOpts Options passed to the clipping object
* methods toPolygon and pointsBetween
* @returns {PIXI.Polygon[]}
*/
static union(polygon, clipObject, clipOpts = {}) {
return this.combine(polygon, clipObject, {clipType: this.CLIP_TYPES.UNION, ...clipOpts});
}
/* -------------------------------------------- */
/**
* Intersect a polygon and clipObject using the Weiler Atherton algorithm.
* @param {PIXI.Polygon} polygon Polygon to clip
* @param {PIXI.Rectangle|PIXI.Circle} clipObject Object to clip against the polygon
* @param {object} clipOpts Options passed to the clipping object
* methods toPolygon and pointsBetween
* @returns {PIXI.Polygon[]}
*/
static intersect(polygon, clipObject, clipOpts = {}) {
return this.combine(polygon, clipObject, {clipType: this.CLIP_TYPES.INTERSECT, ...clipOpts});
}
/* -------------------------------------------- */
/**
* Clip a given clipObject using the Weiler-Atherton algorithm.
*
* At the moment, this will return a single PIXI.Polygon in the array unless clipType is a union and the polygon
* and clipObject do not overlap, in which case the [polygon, clipObject.toPolygon()] array will be returned.
* If this algorithm is expanded in the future to handle holes, an array of polygons may be returned.
*
* @param {PIXI.Polygon} polygon Polygon to clip
* @param {PIXI.Rectangle|PIXI.Circle} clipObject Object to clip against the polygon
* @param {object} [options] Options which configure how the union or intersection is computed
* @param {WeilerAthertonClipper.CLIP_TYPES} [options.clipType] One of CLIP_TYPES
* @param {boolean} [options.canMutate] If the WeilerAtherton constructor could mutate or not
* the subject polygon points
* @param {object} [options.clipOpts] Options passed to the WeilerAthertonClipper constructor
* @returns {PIXI.Polygon[]} Array of polygons and clipObjects
*/
static combine(polygon, clipObject, {clipType, canMutate, ...clipOpts}={}) {
if ( (clipType !== this.CLIP_TYPES.INTERSECT) && (clipType !== this.CLIP_TYPES.UNION) ) {
throw new Error("The Weiler-Atherton clipping algorithm only supports INTERSECT or UNION clip types.");
}
if ( canMutate && !polygon.isPositive ) polygon.reverseOrientation();
const wa = new this(polygon, clipObject, clipType, clipOpts);
const trackingArray = wa.#buildPointTrackingArray();
if ( !trackingArray.length ) return this.testForEnvelopment(polygon, clipObject, clipType, clipOpts);
return wa.#combineNoHoles(trackingArray);
}
/* -------------------------------------------- */
/**
* Clip the polygon with the clipObject, assuming no holes will be created.
* For a union or intersect with no holes, a single pass through the intersections will
* build the resulting union shape.
* @param {PolygonVertex[]} trackingArray Array of linked points and intersections
* @returns {[PIXI.Polygon]}
*/
#combineNoHoles(trackingArray) {
const clipType = this.config.clipType;
const ln = trackingArray.length;
let prevIx = trackingArray[ln - 1];
let wasTracingPolygon = (prevIx.type === this.constructor.INTERSECTION_TYPES.OUT_IN) ^ clipType;
const newPoly = new PIXI.Polygon();
for ( let i = 0; i < ln; i += 1 ) {
const ix = trackingArray[i];
this.#processIntersection(ix, prevIx, wasTracingPolygon, newPoly);
wasTracingPolygon = !wasTracingPolygon;
prevIx = ix;
}
return [newPoly];
}
/* -------------------------------------------- */
/**
* Given an intersection and the previous intersection, fill the points
* between the two intersections, in clockwise order.
* @param {PolygonVertex} ix Intersection to process
* @param {PolygonVertex} prevIx Previous intersection to process
* @param {boolean} wasTracingPolygon Whether we were tracing the polygon (true) or the clipObject (false).
* @param {PIXI.Polygon} newPoly The new polygon that results from this clipping operation
*/
#processIntersection(ix, prevIx, wasTracingPolygon, newPoly) {
const clipOpts = this.config.clipOpts;
const pts = wasTracingPolygon ? ix.leadingPoints : this.clipObject.pointsBetween(prevIx, ix, clipOpts);
for ( const pt of pts ) newPoly.addPoint(pt);
newPoly.addPoint(ix);
}
/* -------------------------------------------- */
/**
* Test if one shape envelops the other. Assumes the shapes do not intersect.
* 1. Polygon is contained within the clip object. Union: clip object; Intersect: polygon
* 2. Clip object is contained with polygon. Union: polygon; Intersect: clip object
* 3. Polygon and clip object are outside one another. Union: both; Intersect: null
* @param {PIXI.Polygon} polygon Polygon to clip
* @param {PIXI.Rectangle|PIXI.Circle} clipObject Object to clip against the polygon
* @param {WeilerAthertonClipper.CLIP_TYPES} clipType One of CLIP_TYPES
* @param {object} clipOpts Clip options which are forwarded to toPolygon methods
* @returns {PIXI.Polygon[]} Returns the polygon, the clipObject.toPolygon(), both, or neither.
*/
static testForEnvelopment(polygon, clipObject, clipType, clipOpts) {
const points = polygon.points;
if ( points.length < 6 ) return [];
const union = clipType === this.CLIP_TYPES.UNION;
// Option 1: Polygon contained within clipObject
// We search for the first point of the polygon that is not on the boundary of the clip object.
// One of these points can be used to determine whether the polygon is contained in the clip object.
// If all points of the polygon are on the boundary of the clip object, which is either a circle
// or a rectangle, then the polygon is contained within the clip object.
let polygonInClipObject = true;
for ( let i = 0; i < points.length; i += 2 ) {
const point = { x: points[i], y: points[i + 1] };
if ( !clipObject.pointIsOn(point) ) {
polygonInClipObject = clipObject.contains(point.x, point.y);
break;
}
}
if ( polygonInClipObject ) return union ? [clipObject.toPolygon(clipOpts)] : [polygon];
// Option 2: ClipObject contained within polygon
const center = clipObject.center;
// PointSourcePolygons need to have a bounds defined in order for polygon.contains to work.
if ( polygon instanceof PointSourcePolygon ) polygon.bounds ??= polygon.getBounds();
const clipObjectInPolygon = polygon.contains(center.x, center.y);
if ( clipObjectInPolygon ) return union ? [polygon] : [clipObject.toPolygon(clipOpts)];
// Option 3: Neither contains the other
return union ? [polygon, clipObject.toPolygon(clipOpts)] : [];
}
/* -------------------------------------------- */
/**
* Construct an array of intersections between the polygon and the clipping object.
* The intersections follow clockwise around the polygon.
* Round all intersections and polygon vertices to the nearest pixel (integer).
* @returns {Point[]}
*/
#buildPointTrackingArray() {
const labeledPoints = this.#buildIntersectionArray();
if ( !labeledPoints.length ) return [];
return WeilerAthertonClipper.#consolidatePoints(labeledPoints);
}
/* -------------------------------------------- */
/**
* Construct an array that holds all the points of the polygon with all the intersections with the clipObject
* inserted, in correct position moving clockwise.
* If an intersection and endpoint are nearly the same, prefer the intersection.
* Intersections are labeled with isIntersection and type = out/in or in/out. Tangents are removed.
* @returns {Point[]} Labeled array of points
*/
#buildIntersectionArray() {
const { polygon, clipObject } = this;
const points = polygon.points;
const ln = points.length;
if ( ln < 6 ) return []; // Minimum 3 Points required
// Need to start with a non-intersecting point on the polygon.
let startIdx = -1;
let a;
for ( let i = 0; i < ln; i += 2 ) {
a = { x: points[i], y: points[i + 1] };
if ( !clipObject.pointIsOn(a) ) {
startIdx = i;
break;
}
}
if ( !~startIdx ) return []; // All intersections, so all tangent
// For each edge a|b, find the intersection point(s) with the clipObject.
// Add intersections and endpoints to the pointsIxs array, taking care to avoid duplicating
// points. For example, if the intersection equals a, add only the intersection, not both.
let previousInside = clipObject.contains(a.x, a.y);
let numPrevIx = 0;
let lastIx = undefined;
let secondLastIx = undefined;
const pointsIxs = [a];
const types = this.constructor.INTERSECTION_TYPES;
const nIter = startIdx + ln + 2; // Add +2 to close the polygon.
for ( let i = startIdx + 2; i < nIter; i += 2 ) {
const j = i >= ln ? i % ln : i; // Circle back around the points as necessary.
const b = { x: points[j], y: points[j + 1] };
const ixs = clipObject.segmentIntersections(a, b);
const ixsLn = ixs.length;
let bIsIx = false;
if ( ixsLn ) {
bIsIx = b.x.almostEqual(ixs[ixsLn - 1].x) && b.y.almostEqual(ixs[ixsLn - 1].y);
// If the intersection equals the current b, get that intersection next iteration.
if ( bIsIx ) ixs.pop();
// Determine whether the intersection is out-->in or in-->out
numPrevIx += ixs.length;
for ( const ix of ixs ) {
ix.isIntersection = true;
ix.type = lastIx ? -lastIx.type : previousInside ? types.IN_OUT : types.OUT_IN;
secondLastIx = lastIx;
lastIx = ix;
}
pointsIxs.push(...ixs);
}
// If b is an intersection, we will return to it next iteration.
if ( bIsIx ) {
a = b;
continue;
}
// Each intersection represents a move across the clipObject border.
// Count them and determine if we are now inside or outside the clipObject.
if ( numPrevIx ) {
const isInside = clipObject.contains(b.x, b.y);
const changedSide = isInside ^ previousInside;
const isOdd = numPrevIx & 1;
// If odd number of intersections, should switch. e.g., outside --> ix --> inside
// If even number of intersections, should stay same. e.g., outside --> ix --> ix --> outside.
if ( isOdd ^ changedSide ) {
if ( numPrevIx === 1 ) lastIx.isIntersection = false;
else {
secondLastIx.isIntersection = false;
lastIx.type = secondLastIx.type;
}
}
previousInside = isInside;
numPrevIx = 0;
secondLastIx = undefined;
lastIx = undefined;
}
pointsIxs.push(b);
a = b;
}
return pointsIxs;
}
/* -------------------------------------------- */
/**
* Given an array of labeled points, consolidate into a tracking array of intersections,
* where each intersection contains its array of leadingPoints.
* @param {Point[]} labeledPoints Array of points, from _buildLabeledIntersectionsArray
* @returns {Point[]} Array of intersections
*/
static #consolidatePoints(labeledPoints) {
// Locate the first intersection
const startIxIdx = labeledPoints.findIndex(pt => pt.isIntersection);
if ( !~startIxIdx ) return []; // No intersections, so no tracking array
const labeledLn = labeledPoints.length;
let leadingPoints = [];
const trackingArray = [];
// Closed polygon, so use the last point to circle back
for ( let i = 0; i < labeledLn; i += 1 ) {
const j = (i + startIxIdx) % labeledLn;
const pt = labeledPoints[j];
if ( pt.isIntersection ) {
pt.leadingPoints = leadingPoints;
leadingPoints = [];
trackingArray.push(pt);
} else leadingPoints.push(pt);
}
// Add leading points to first intersection
trackingArray[0].leadingPoints = leadingPoints;
return trackingArray;
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,482 @@
/**
* An AmbientLight is an implementation of PlaceableObject which represents a dynamic light source within the Scene.
* @category - Canvas
* @see {@link AmbientLightDocument}
* @see {@link LightingLayer}
*/
class AmbientLight extends PlaceableObject {
/**
* The area that is affected by this light.
* @type {PIXI.Graphics}
*/
field;
/**
* A reference to the PointSource object which defines this light or darkness area of effect.
* This is undefined if the AmbientLight does not provide an active source of light.
* @type {PointDarknessSource|PointLightSource}
*/
lightSource;
/* -------------------------------------------- */
/** @inheritdoc */
static embeddedName = "AmbientLight";
/** @override */
static RENDER_FLAGS = {
redraw: {propagate: ["refresh"]},
refresh: {propagate: ["refreshState", "refreshField", "refreshElevation"], alias: true},
refreshField: {propagate: ["refreshPosition"]},
refreshPosition: {},
refreshState: {},
refreshElevation: {}
};
/* -------------------------------------------- */
/** @inheritdoc */
get bounds() {
const {x, y} = this.document;
const r = Math.max(this.dimRadius, this.brightRadius);
return new PIXI.Rectangle(x-r, y-r, 2*r, 2*r);
}
/* -------------------------------------------- */
/** @override */
get sourceId() {
let id = `${this.document.documentName}.${this.document.id}`;
if ( this.isPreview ) id += ".preview";
return id;
}
/* -------------------------------------------- */
/**
* A convenience accessor to the LightData configuration object
* @returns {LightData}
*/
get config() {
return this.document.config;
}
/* -------------------------------------------- */
/**
* Test whether a specific AmbientLight source provides global illumination
* @type {boolean}
*/
get global() {
return this.document.isGlobal;
}
/* -------------------------------------------- */
/**
* The maximum radius in pixels of the light field
* @type {number}
*/
get radius() {
return Math.max(Math.abs(this.dimRadius), Math.abs(this.brightRadius));
}
/* -------------------------------------------- */
/**
* Get the pixel radius of dim light emitted by this light source
* @type {number}
*/
get dimRadius() {
let d = canvas.dimensions;
return ((this.config.dim / d.distance) * d.size);
}
/* -------------------------------------------- */
/**
* Get the pixel radius of bright light emitted by this light source
* @type {number}
*/
get brightRadius() {
let d = canvas.dimensions;
return ((this.config.bright / d.distance) * d.size);
}
/* -------------------------------------------- */
/**
* Is this Ambient Light currently visible? By default, true only if the source actively emits light or darkness.
* @type {boolean}
*/
get isVisible() {
return !this._isLightSourceDisabled();
}
/* -------------------------------------------- */
/**
* Check if the point source is a LightSource instance
* @type {boolean}
*/
get isLightSource() {
return this.lightSource instanceof CONFIG.Canvas.lightSourceClass;
}
/* -------------------------------------------- */
/**
* Check if the point source is a DarknessSource instance
* @type {boolean}
*/
get isDarknessSource() {
return this.lightSource instanceof CONFIG.Canvas.darknessSourceClass;
}
/* -------------------------------------------- */
/**
* Is the source of this Ambient Light disabled?
* @type {boolean}
* @protected
*/
_isLightSourceDisabled() {
const {hidden, config} = this.document;
// Hidden lights are disabled
if ( hidden ) return true;
// Lights with zero radius or angle are disabled
if ( !(this.radius && config.angle) ) return true;
// If the darkness level is outside of the darkness activation range, the light is disabled
const darkness = canvas.darknessLevel;
return !darkness.between(config.darkness.min, config.darkness.max);
}
/* -------------------------------------------- */
/**
* Does this Ambient Light actively emit darkness light given
* its properties and the current darkness level of the Scene?
* @type {boolean}
*/
get emitsDarkness() {
return this.document.config.negative && !this._isLightSourceDisabled();
}
/* -------------------------------------------- */
/**
* Does this Ambient Light actively emit positive light given
* its properties and the current darkness level of the Scene?
* @type {boolean}
*/
get emitsLight() {
return !this.document.config.negative && !this._isLightSourceDisabled();
}
/* -------------------------------------------- */
/* Rendering
/* -------------------------------------------- */
/** @override */
_destroy(options) {
this.#destroyLightSource();
}
/* -------------------------------------------- */
/** @override */
async _draw(options) {
this.field = this.addChild(new PIXI.Graphics());
this.field.eventMode = "none";
this.controlIcon = this.addChild(this.#drawControlIcon());
this.initializeLightSource();
}
/* -------------------------------------------- */
/**
* Draw the ControlIcon for the AmbientLight
* @returns {ControlIcon}
*/
#drawControlIcon() {
const size = Math.max(Math.round((canvas.dimensions.size * 0.5) / 20) * 20, 40);
let icon = new ControlIcon({texture: CONFIG.controlIcons.light, size: size });
icon.x -= (size * 0.5);
icon.y -= (size * 0.5);
return icon;
}
/* -------------------------------------------- */
/* Incremental Refresh */
/* -------------------------------------------- */
/** @override */
_applyRenderFlags(flags) {
if ( flags.refreshState ) this._refreshState();
if ( flags.refreshPosition ) this._refreshPosition();
if ( flags.refreshField ) this._refreshField();
if ( flags.refreshElevation ) this._refreshElevation();
}
/* -------------------------------------------- */
/**
* Refresh the shape of the light field-of-effect. This is refreshed when the AmbientLight fov polygon changes.
* @protected
*/
_refreshField() {
this.field.clear();
if ( !this.lightSource?.shape ) return;
this.field.lineStyle(2, 0xEEEEEE, 0.4).drawShape(this.lightSource.shape);
this.field.position.set(-this.lightSource.x, -this.lightSource.y);
}
/* -------------------------------------------- */
/**
* Refresh the position of the AmbientLight. Called with the coordinates change.
* @protected
*/
_refreshPosition() {
const {x, y} = this.document;
if ( (this.position.x !== x) || (this.position.y !== y) ) MouseInteractionManager.emulateMoveEvent();
this.position.set(x, y);
}
/* -------------------------------------------- */
/**
* Refresh the elevation of the control icon.
* @protected
*/
_refreshElevation() {
this.controlIcon.elevation = this.document.elevation;
}
/* -------------------------------------------- */
/**
* Refresh the state of the light. Called when the disabled state or darkness conditions change.
* @protected
*/
_refreshState() {
this.alpha = this._getTargetAlpha();
this.zIndex = this.hover ? 1 : 0;
this.refreshControl();
}
/* -------------------------------------------- */
/**
* Refresh the display of the ControlIcon for this AmbientLight source.
*/
refreshControl() {
const isHidden = this.id && this.document.hidden;
this.controlIcon.texture = getTexture(this.isVisible ? CONFIG.controlIcons.light : CONFIG.controlIcons.lightOff);
this.controlIcon.tintColor = isHidden ? 0xFF3300 : 0xFFFFFF;
this.controlIcon.borderColor = isHidden ? 0xFF3300 : 0xFF5500;
this.controlIcon.elevation = this.document.elevation;
this.controlIcon.refresh({visible: this.layer.active, borderVisible: this.hover || this.layer.highlightObjects});
this.controlIcon.draw();
}
/* -------------------------------------------- */
/* Light Source Management */
/* -------------------------------------------- */
/**
* Update the LightSource associated with this AmbientLight object.
* @param {object} [options={}] Options which modify how the source is updated
* @param {boolean} [options.deleted=false] Indicate that this light source has been deleted
*/
initializeLightSource({deleted=false}={}) {
const sourceId = this.sourceId;
const wasLight = canvas.effects.lightSources.has(sourceId);
const wasDarkness = canvas.effects.darknessSources.has(sourceId);
const isDarkness = this.document.config.negative;
const perceptionFlags = {
refreshEdges: wasDarkness || isDarkness,
initializeVision: wasDarkness || isDarkness,
initializeLighting: wasDarkness || isDarkness,
refreshLighting: true,
refreshVision: true
};
// Remove the light source from the active collection
if ( deleted ) {
if ( !this.lightSource?.active ) return;
this.#destroyLightSource();
canvas.perception.update(perceptionFlags);
return;
}
// Re-create source if it switches darkness state
if ( (wasLight && isDarkness) || (wasDarkness && !isDarkness) ) this.#destroyLightSource();
// Create the light source if necessary
this.lightSource ??= this.#createLightSource();
// Re-initialize source data and add to the active collection
this.lightSource.initialize(this._getLightSourceData());
this.lightSource.add();
// Assign perception and render flags
canvas.perception.update(perceptionFlags);
if ( this.layer.active ) this.renderFlags.set({refreshField: true});
}
/* -------------------------------------------- */
/**
* Get the light source data.
* @returns {LightSourceData}
* @protected
*/
_getLightSourceData() {
const {x, y, elevation, rotation, walls, vision} = this.document;
const d = canvas.dimensions;
return foundry.utils.mergeObject(this.config.toObject(false), {
x, y, elevation, rotation, walls, vision,
dim: Math.clamp(this.dimRadius, 0, d.maxR),
bright: Math.clamp(this.brightRadius, 0, d.maxR),
seed: this.document.getFlag("core", "animationSeed"),
disabled: this._isLightSourceDisabled(),
preview: this.isPreview
});
}
/* -------------------------------------------- */
/**
* Returns a new point source: DarknessSource or LightSource, depending on the config data.
* @returns {foundry.canvas.sources.PointLightSource|foundry.canvas.sources.PointDarknessSource} The created source
*/
#createLightSource() {
const sourceClass = this.config.negative ? CONFIG.Canvas.darknessSourceClass : CONFIG.Canvas.lightSourceClass;
const sourceId = this.sourceId;
return new sourceClass({sourceId, object: this});
}
/* -------------------------------------------- */
/**
* Destroy the existing BaseEffectSource instance for this AmbientLight.
*/
#destroyLightSource() {
this.lightSource?.destroy();
this.lightSource = undefined;
}
/* -------------------------------------------- */
/* Document Event Handlers */
/* -------------------------------------------- */
/** @inheritDoc */
_onCreate(data, options, userId) {
super._onCreate(data, options, userId);
this.initializeLightSource();
}
/* -------------------------------------------- */
/** @override */
_onUpdate(changed, options, userId) {
super._onUpdate(changed, options, userId);
this.initializeLightSource();
this.renderFlags.set({
refreshState: ("hidden" in changed) || (("config" in changed)
&& ["dim", "bright", "angle", "darkness"].some(k => k in changed.config)),
refreshElevation: "elevation" in changed
});
}
/* -------------------------------------------- */
/** @inheritDoc */
_onDelete(options, userId) {
this.initializeLightSource({deleted: true});
super._onDelete(options, userId);
}
/* -------------------------------------------- */
/* Interactivity */
/* -------------------------------------------- */
/** @inheritdoc */
_canHUD(user, event) {
return user.isGM; // Allow GMs to single right-click
}
/* -------------------------------------------- */
/** @inheritdoc */
_canConfigure(user, event) {
return false; // Double-right does nothing
}
/* -------------------------------------------- */
/** @inheritDoc */
_canDragLeftStart(user, event) {
// Prevent dragging another light if currently previewing one.
if ( this.layer?.preview?.children.length ) {
ui.notifications.warn("CONTROLS.ObjectConfigured", { localize: true });
return false;
}
return super._canDragLeftStart(user, event);
}
/* -------------------------------------------- */
/** @inheritdoc */
_onClickRight(event) {
this.document.update({hidden: !this.document.hidden});
if ( !this._propagateRightClick(event) ) event.stopPropagation();
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftMove(event) {
super._onDragLeftMove(event);
this.initializeLightSource({deleted: true});
const clones = event.interactionData.clones || [];
for ( const c of clones ) c.initializeLightSource();
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragEnd() {
this.initializeLightSource({deleted: true});
this._original?.initializeLightSource();
super._onDragEnd();
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
updateSource({deleted=false}={}) {
const msg = "AmbientLight#updateSource has been deprecated in favor of AmbientLight#initializeLightSource";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
this.initializeLightSource({deleted});
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get source() {
const msg = "AmbientLight#source has been deprecated in favor of AmbientLight#lightSource";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return this.lightSource;
}
}

View File

@@ -0,0 +1,349 @@
/**
* A Note is an implementation of PlaceableObject which represents an annotated location within the Scene.
* Each Note links to a JournalEntry document and represents its location on the map.
* @category - Canvas
* @see {@link NoteDocument}
* @see {@link NotesLayer}
*/
class Note extends PlaceableObject {
/** @inheritdoc */
static embeddedName = "Note";
/** @override */
static RENDER_FLAGS = {
redraw: {propagate: ["refresh"]},
refresh: {propagate: ["refreshState", "refreshPosition", "refreshTooltip", "refreshElevation"], alias: true},
refreshState: {propagate: ["refreshVisibility"]},
refreshVisibility: {},
refreshPosition: {},
refreshTooltip: {},
refreshElevation: {propagate: ["refreshVisibility"]},
/** @deprecated since v12 */
refreshText: {propagate: ["refreshTooltip"], deprecated: {since: 12, until: 14}, alias: true}
};
/* -------------------------------------------- */
/**
* The control icon.
* @type {ControlIcon}
*/
controlIcon;
/* -------------------------------------------- */
/**
* The tooltip.
* @type {PreciseText}
*/
tooltip;
/* -------------------------------------------- */
/** @override */
get bounds() {
const {x, y, iconSize} = this.document;
const r = iconSize / 2;
return new PIXI.Rectangle(x - r, y - r, 2*r, 2*r);
}
/* -------------------------------------------- */
/**
* The associated JournalEntry which is referenced by this Note
* @type {JournalEntry}
*/
get entry() {
return this.document.entry;
}
/* -------------------------------------------- */
/**
* The specific JournalEntryPage within the associated JournalEntry referenced by this Note.
*/
get page() {
return this.document.page;
}
/* -------------------------------------------- */
/**
* Determine whether the Note is visible to the current user based on their perspective of the Scene.
* Visibility depends on permission to the underlying journal entry, as well as the perspective of controlled Tokens.
* If Token Vision is required, the user must have a token with vision over the note to see it.
* @type {boolean}
*/
get isVisible() {
const accessTest = this.document.page ?? this.document.entry;
const access = accessTest?.testUserPermission(game.user, "LIMITED") ?? true;
if ( (access === false) || !canvas.visibility.tokenVision || this.document.global ) return access;
const point = {x: this.document.x, y: this.document.y};
const tolerance = this.document.iconSize / 4;
return canvas.visibility.testVisibility(point, {tolerance, object: this});
}
/* -------------------------------------------- */
/* Rendering
/* -------------------------------------------- */
/** @override */
async _draw(options) {
this.controlIcon = this.addChild(this._drawControlIcon());
this.tooltip = this.addChild(this._drawTooltip());
}
/* -------------------------------------------- */
/**
* Draw the control icon.
* @returns {ControlIcon}
* @protected
*/
_drawControlIcon() {
const {texture, iconSize} = this.document;
const icon = new ControlIcon({texture: texture.src, size: iconSize, tint: texture.tint});
icon.x -= (iconSize / 2);
icon.y -= (iconSize / 2);
return icon;
}
/* -------------------------------------------- */
/**
* Draw the tooltip.
* @returns {PreciseText}
* @protected
*/
_drawTooltip() {
const tooltip = new PreciseText(this.document.label, this._getTextStyle());
tooltip.eventMode = "none";
return tooltip;
}
/* -------------------------------------------- */
/**
* Refresh the tooltip.
* @protected
*/
_refreshTooltip() {
this.tooltip.text = this.document.label;
this.tooltip.style = this._getTextStyle();
const halfPad = (0.5 * this.document.iconSize) + 12;
switch ( this.document.textAnchor ) {
case CONST.TEXT_ANCHOR_POINTS.CENTER:
this.tooltip.anchor.set(0.5, 0.5);
this.tooltip.position.set(0, 0);
break;
case CONST.TEXT_ANCHOR_POINTS.BOTTOM:
this.tooltip.anchor.set(0.5, 0);
this.tooltip.position.set(0, halfPad);
break;
case CONST.TEXT_ANCHOR_POINTS.TOP:
this.tooltip.anchor.set(0.5, 1);
this.tooltip.position.set(0, -halfPad);
break;
case CONST.TEXT_ANCHOR_POINTS.LEFT:
this.tooltip.anchor.set(1, 0.5);
this.tooltip.position.set(-halfPad, 0);
break;
case CONST.TEXT_ANCHOR_POINTS.RIGHT:
this.tooltip.anchor.set(0, 0.5);
this.tooltip.position.set(halfPad, 0);
break;
}
}
/* -------------------------------------------- */
/**
* Define a PIXI TextStyle object which is used for the tooltip displayed for this Note
* @returns {PIXI.TextStyle}
* @protected
*/
_getTextStyle() {
const style = CONFIG.canvasTextStyle.clone();
// Positioning
if ( this.document.textAnchor === CONST.TEXT_ANCHOR_POINTS.LEFT ) style.align = "right";
else if ( this.document.textAnchor === CONST.TEXT_ANCHOR_POINTS.RIGHT ) style.align = "left";
// Font preferences
style.fontFamily = this.document.fontFamily || CONFIG.defaultFontFamily;
style.fontSize = this.document.fontSize;
// Toggle stroke style depending on whether the text color is dark or light
const color = this.document.textColor;
style.fill = color;
style.stroke = color.hsv[2] > 0.6 ? 0x000000 : 0xFFFFFF;
style.strokeThickness = 4;
return style;
}
/* -------------------------------------------- */
/* Incremental Refresh */
/* -------------------------------------------- */
/** @override */
_applyRenderFlags(flags) {
if ( flags.refreshState ) this._refreshState();
if ( flags.refreshVisibility ) this._refreshVisibility();
if ( flags.refreshPosition ) this._refreshPosition();
if ( flags.refreshTooltip ) this._refreshTooltip();
if ( flags.refreshElevation ) this._refreshElevation();
}
/* -------------------------------------------- */
/**
* Refresh the visibility.
* @protected
*/
_refreshVisibility() {
const wasVisible = this.visible;
this.visible = this.isVisible;
if ( this.controlIcon ) this.controlIcon.refresh({
visible: this.visible,
borderVisible: this.hover || this.layer.highlightObjects
});
if ( wasVisible !== this.visible ) {
this.layer.hintMapNotes();
MouseInteractionManager.emulateMoveEvent();
}
}
/* -------------------------------------------- */
/**
* Refresh the state of the Note. Called the Note enters a different interaction state.
* @protected
*/
_refreshState() {
this.alpha = this._getTargetAlpha();
this.tooltip.visible = this.hover || this.layer.highlightObjects;
this.zIndex = this.hover ? 1 : 0;
}
/* -------------------------------------------- */
/**
* Refresh the position of the Note. Called with the coordinates change.
* @protected
*/
_refreshPosition() {
const {x, y} = this.document;
if ( (this.position.x !== x) || (this.position.y !== y) ) MouseInteractionManager.emulateMoveEvent();
this.position.set(this.document.x, this.document.y);
}
/* -------------------------------------------- */
/**
* Refresh the elevation of the control icon.
* @protected
*/
_refreshElevation() {
this.controlIcon.elevation = this.document.elevation;
}
/* -------------------------------------------- */
/* Document Event Handlers */
/* -------------------------------------------- */
/** @inheritDoc */
_onUpdate(changed, options, userId) {
super._onUpdate(changed, options, userId);
// Incremental Refresh
const positionChanged = ("x" in changed) || ("y" in changed);
this.renderFlags.set({
redraw: ("texture" in changed) || ("iconSize" in changed),
refreshVisibility: positionChanged || ["entryId", "pageId", "global"].some(k => k in changed),
refreshPosition: positionChanged,
refreshTooltip: ["text", "fontFamily", "fontSize", "textAnchor", "textColor", "iconSize"].some(k => k in changed),
refreshElevation: "elevation" in changed
});
}
/* -------------------------------------------- */
/* Interactivity */
/* -------------------------------------------- */
/** @override */
_canHover(user) {
return true;
}
/* -------------------------------------------- */
/** @override */
_canView(user) {
const {entry, page} = this.document;
if ( !entry ) return false;
if ( game.user.isGM ) return true;
if ( page?.testUserPermission(game.user, "LIMITED", {exact: true}) ) {
// Special-case handling for image pages.
return page.type === "image";
}
const accessTest = page ?? entry;
return accessTest.testUserPermission(game.user, "OBSERVER");
}
/* -------------------------------------------- */
/** @override */
_canConfigure(user) {
return canvas.notes.active && this.document.canUserModify(game.user, "update");
}
/* -------------------------------------------- */
/** @inheritdoc */
_onClickLeft2(event) {
const {entry, page} = this.document;
if ( !entry ) return;
const options = {};
if ( page ) {
options.mode = JournalSheet.VIEW_MODES.SINGLE;
options.pageId = page.id;
}
const allowed = Hooks.call("activateNote", this, options);
if ( allowed === false ) return;
if ( page?.type === "image" ) {
return new ImagePopout(page.src, {
uuid: page.uuid,
title: page.name,
caption: page.image.caption
}).render(true);
}
entry.sheet.render(true, options);
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get text() {
const msg = "Note#text has been deprecated. Use Note#document#label instead.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return this.document.label;
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get size() {
const msg = "Note#size has been deprecated. Use Note#document#iconSize instead.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
return this.document.iconSize;
}
}

View File

@@ -0,0 +1,455 @@
/**
* A mixin which decorates a DisplayObject with additional properties expected for rendering in the PrimaryCanvasGroup.
* @category - Mixins
* @param {typeof PIXI.DisplayObject} DisplayObject The parent DisplayObject class being mixed
* @returns {typeof PrimaryCanvasObject} A DisplayObject subclass mixed with PrimaryCanvasObject features
* @mixin
*/
function PrimaryCanvasObjectMixin(DisplayObject) {
/**
* A display object rendered in the PrimaryCanvasGroup.
* @param {...*} args The arguments passed to the base class constructor
*/
return class PrimaryCanvasObject extends CanvasTransformMixin(DisplayObject) {
constructor(...args) {
super(...args);
// Activate culling and initialize handlers
this.cullable = true;
this.on("added", this._onAdded);
this.on("removed", this._onRemoved);
}
/**
* An optional reference to the object that owns this PCO.
* This property does not affect the behavior of the PCO itself.
* @type {*}
* @default null
*/
object = null;
/**
* The entry in the quadtree.
* @type {QuadtreeObject|null}
*/
#quadtreeEntry = null;
/**
* Update the quadtree entry?
* @type {boolean}
*/
#quadtreeDirty = false;
/* -------------------------------------------- */
/* Properties */
/* -------------------------------------------- */
/**
* The elevation of this object.
* @type {number}
*/
get elevation() {
return this.#elevation;
}
set elevation(value) {
if ( (typeof value !== "number") || Number.isNaN(value) ) {
throw new Error("PrimaryCanvasObject#elevation must be a numeric value.");
}
if ( value === this.#elevation ) return;
this.#elevation = value;
if ( this.parent ) {
this.parent.sortDirty = true;
if ( this.shouldRenderDepth ) canvas.masks.depth._elevationDirty = true;
}
}
#elevation = 0;
/* -------------------------------------------- */
/**
* A key which resolves ties amongst objects at the same elevation within the same layer.
* @type {number}
*/
get sort() {
return this.#sort;
}
set sort(value) {
if ( (typeof value !== "number") || Number.isNaN(value) ) {
throw new Error("PrimaryCanvasObject#sort must be a numeric value.");
}
if ( value === this.#sort ) return;
this.#sort = value;
if ( this.parent ) this.parent.sortDirty = true;
}
#sort = 0;
/* -------------------------------------------- */
/**
* A key which resolves ties amongst objects at the same elevation of different layers.
* @type {number}
*/
get sortLayer() {
return this.#sortLayer;
}
set sortLayer(value) {
if ( (typeof value !== "number") || Number.isNaN(value) ) {
throw new Error("PrimaryCanvasObject#sortLayer must be a numeric value.");
}
if ( value === this.#sortLayer ) return;
this.#sortLayer = value;
if ( this.parent ) this.parent.sortDirty = true;
}
#sortLayer = 0;
/* -------------------------------------------- */
/**
* A key which resolves ties amongst objects at the same elevation within the same layer and same sort.
* @type {number}
*/
get zIndex() {
return this._zIndex;
}
set zIndex(value) {
if ( (typeof value !== "number") || Number.isNaN(value) ) {
throw new Error("PrimaryCanvasObject#zIndex must be a numeric value.");
}
if ( value === this._zIndex ) return;
this._zIndex = value;
if ( this.parent ) this.parent.sortDirty = true;
}
/* -------------------------------------------- */
/* PIXI Events */
/* -------------------------------------------- */
/**
* Event fired when this display object is added to a parent.
* @param {PIXI.Container} parent The new parent container.
* @protected
*/
_onAdded(parent) {
if ( parent !== canvas.primary ) {
throw new Error("PrimaryCanvasObject instances may only be direct children of the PrimaryCanvasGroup");
}
}
/* -------------------------------------------- */
/**
* Event fired when this display object is removed from its parent.
* @param {PIXI.Container} parent Parent from which the PCO is removed.
* @protected
*/
_onRemoved(parent) {
this.#updateQuadtree(true);
}
/* -------------------------------------------- */
/* Canvas Transform & Quadtree */
/* -------------------------------------------- */
/** @inheritdoc */
updateCanvasTransform() {
super.updateCanvasTransform();
this.#updateQuadtree();
this.#updateDepth();
}
/* -------------------------------------------- */
/** @inheritdoc */
_onCanvasBoundsUpdate() {
super._onCanvasBoundsUpdate();
this.#quadtreeDirty = true;
}
/* -------------------------------------------- */
/**
* Update the quadtree.
* @param {boolean} [remove=false] Remove the quadtree entry?
*/
#updateQuadtree(remove=false) {
if ( !this.#quadtreeDirty && !remove ) return;
this.#quadtreeDirty = false;
if ( !remove && (this.canvasBounds.width > 0) && (this.canvasBounds.height > 0) ) {
this.#quadtreeEntry ??= {r: this.canvasBounds, t: this};
canvas.primary.quadtree.update(this.#quadtreeEntry);
} else if ( this.#quadtreeEntry ) {
this.#quadtreeEntry = null;
canvas.primary.quadtree.remove(this);
}
}
/* -------------------------------------------- */
/* PCO Properties */
/* -------------------------------------------- */
/**
* Does this object render to the depth buffer?
* @type {boolean}
*/
get shouldRenderDepth() {
return this.#shouldRenderDepth;
}
/** @type {boolean} */
#shouldRenderDepth = false;
/* -------------------------------------------- */
/* Depth Rendering */
/* -------------------------------------------- */
/**
* Flag the depth as dirty if necessary.
*/
#updateDepth() {
const shouldRenderDepth = this._shouldRenderDepth();
if ( this.#shouldRenderDepth === shouldRenderDepth ) return;
this.#shouldRenderDepth = shouldRenderDepth;
canvas.masks.depth._elevationDirty = true;
}
/* -------------------------------------------- */
/**
* Does this object render to the depth buffer?
* @returns {boolean}
* @protected
*/
_shouldRenderDepth() {
return false;
}
/* -------------------------------------------- */
/**
* Render the depth of this object.
* @param {PIXI.Renderer} renderer
*/
renderDepthData(renderer) {}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
renderOcclusion(renderer) {
const msg = "PrimaryCanvasObject#renderOcclusion is deprecated in favor of PrimaryCanvasObject#renderDepthData";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
this.renderDepthData(renderer);
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get document() {
foundry.utils.logCompatibilityWarning("PrimaryCanvasObject#document is deprecated.", {since: 12, until: 14});
if ( !(this.object instanceof PlaceableObject) ) return null;
return this.object.document || null;
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
updateBounds() {
const msg = "PrimaryCanvasObject#updateBounds is deprecated and has no effect.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
}
};
}
/**
* A mixin which decorates a DisplayObject with additional properties for canvas transforms and bounds.
* @category - Mixins
* @param {typeof PIXI.Container} DisplayObject The parent DisplayObject class being mixed
* @returns {typeof CanvasTransformMixin} A DisplayObject subclass mixed with CanvasTransformMixin features
* @mixin
*/
function CanvasTransformMixin(DisplayObject) {
return class CanvasTransformMixin extends DisplayObject {
constructor(...args) {
super(...args);
this.on("added", this.#resetCanvasTransformParentID);
this.on("removed", this.#resetCanvasTransformParentID);
}
/* -------------------------------------------- */
/* Properties */
/* -------------------------------------------- */
/**
* The transform matrix from local space to canvas space.
* @type {PIXI.Matrix}
*/
canvasTransform = new PIXI.Matrix();
/* -------------------------------------------- */
/**
* The update ID of canvas transform matrix.
* @type {number}
* @internal
*/
_canvasTransformID = -1;
/* -------------------------------------------- */
/**
* The update ID of the local transform of this object.
* @type {number}
*/
#canvasTransformLocalID = -1;
/* -------------------------------------------- */
/**
* The update ID of the canvas transform of the parent.
* @type {number}
*/
#canvasTransformParentID = -1;
/* -------------------------------------------- */
/**
* The canvas bounds of this object.
* @type {PIXI.Rectangle}
*/
canvasBounds = new PIXI.Rectangle();
/* -------------------------------------------- */
/**
* The canvas bounds of this object.
* @type {PIXI.Bounds}
* @protected
*/
_canvasBounds = new PIXI.Bounds();
/* -------------------------------------------- */
/**
* The update ID of the canvas bounds.
* Increment to force recalculation.
* @type {number}
* @protected
*/
_canvasBoundsID = 0;
/* -------------------------------------------- */
/**
* Reset the parent ID of the canvas transform.
*/
#resetCanvasTransformParentID() {
this.#canvasTransformParentID = -1;
}
/* -------------------------------------------- */
/* Methods */
/* -------------------------------------------- */
/**
* Calculate the canvas bounds of this object.
* @protected
*/
_calculateCanvasBounds() {}
/* -------------------------------------------- */
/**
* Recalculate the canvas transform and bounds of this object and its children, if necessary.
*/
updateCanvasTransform() {
this.transform.updateLocalTransform();
// If the local transform or the parent canvas transform has changed,
// recalculate the canvas transform of this object
if ( (this.#canvasTransformLocalID !== this.transform._localID)
|| (this.#canvasTransformParentID !== (this.parent._canvasTransformID ?? 0)) ) {
this.#canvasTransformLocalID = this.transform._localID;
this.#canvasTransformParentID = this.parent._canvasTransformID ?? 0;
this._canvasTransformID++;
this.canvasTransform.copyFrom(this.transform.localTransform);
// Prepend the parent canvas transform matrix (if exists)
if ( this.parent.canvasTransform ) this.canvasTransform.prepend(this.parent.canvasTransform);
this._canvasBoundsID++;
this._onCanvasTransformUpdate();
}
// Recalculate the canvas bounds of this object if necessary
if ( this._canvasBounds.updateID !== this._canvasBoundsID ) {
this._canvasBounds.updateID = this._canvasBoundsID;
this._canvasBounds.clear();
this._calculateCanvasBounds();
// Set the width and height of the canvas bounds rectangle to 0
// if the bounds are empty. PIXI.Bounds#getRectangle does not
// change the rectangle passed to it if the bounds are empty:
// so we need to handle the empty case here.
if ( this._canvasBounds.isEmpty() ) {
this.canvasBounds.x = this.x;
this.canvasBounds.y = this.y;
this.canvasBounds.width = 0;
this.canvasBounds.height = 0;
}
// Update the canvas bounds rectangle
else this._canvasBounds.getRectangle(this.canvasBounds);
this._onCanvasBoundsUpdate();
}
// Recursively update child canvas transforms
const children = this.children;
for ( let i = 0, n = children.length; i < n; i++ ) {
children[i].updateCanvasTransform?.();
}
}
/* -------------------------------------------- */
/**
* Called when the canvas transform changed.
* @protected
*/
_onCanvasTransformUpdate() {}
/* -------------------------------------------- */
/**
* Called when the canvas bounds changed.
* @protected
*/
_onCanvasBoundsUpdate() {}
/* -------------------------------------------- */
/**
* Is the given point in canvas space contained in this object?
* @param {PIXI.IPointData} point The point in canvas space.
* @returns {boolean}
*/
containsCanvasPoint(point) {
return false;
}
};
}

View File

@@ -0,0 +1,93 @@
/**
* A basic PCO which is handling drawings of any shape.
* @extends {PIXI.Graphics}
* @mixes PrimaryCanvasObject
*
* @param {object} [options] A config object
* @param {PIXI.GraphicsGeometry} [options.geometry] A geometry passed to the graphics.
* @param {string|null} [options.name] The name of the PCO.
* @param {*} [options.object] Any object that owns this PCO.
*/
class PrimaryGraphics extends PrimaryCanvasObjectMixin(PIXI.Graphics) {
constructor(options) {
let geometry;
if ( options instanceof PIXI.GraphicsGeometry ) {
geometry = options;
options = {};
} else if ( options instanceof Object ) {
geometry = options.geometry;
} else {
options = {};
}
super(geometry);
this.name = options.name ?? null;
this.object = options.object ?? null;
}
/* -------------------------------------------- */
/**
* A temporary point used by this class.
* @type {PIXI.Point}
*/
static #TEMP_POINT = new PIXI.Point();
/* -------------------------------------------- */
/**
* The dirty ID of the geometry.
* @type {number}
*/
#geometryDirty = -1;
/* -------------------------------------------- */
/**
* Does the geometry contain points?
* @type {boolean}
*/
#geometryContainsPoints = false;
/* -------------------------------------------- */
/** @override */
_calculateCanvasBounds() {
this.finishPoly();
const geometry = this._geometry;
if ( !geometry.graphicsData.length ) return;
const { minX, minY, maxX, maxY } = geometry.bounds;
this._canvasBounds.addFrameMatrix(this.canvasTransform, minX, minY, maxX, maxY);
}
/* -------------------------------------------- */
/** @inheritdoc */
updateCanvasTransform() {
if ( this.#geometryDirty !== this._geometry.dirty ) {
this.#geometryDirty = this._geometry.dirty;
this.#geometryContainsPoints = false;
const graphicsData = this._geometry.graphicsData;
for ( let i = 0; i < graphicsData.length; i++ ) {
const data = graphicsData[i];
if ( data.shape && data.fillStyle.visible ) {
this.#geometryContainsPoints = true;
break;
}
}
this._canvasBoundsID++;
}
super.updateCanvasTransform();
}
/* -------------------------------------------- */
/** @override */
containsCanvasPoint(point) {
if ( !this.#geometryContainsPoints ) return false;
if ( !this.canvasBounds.contains(point.x, point.y) ) return false;
point = this.canvasTransform.applyInverse(point, PrimaryGraphics.#TEMP_POINT);
return this._geometry.containsPoint(point);
}
}

View File

@@ -0,0 +1,343 @@
/**
* A mixin which decorates a DisplayObject with depth and/or occlusion properties.
* @category - Mixins
* @param {typeof PIXI.DisplayObject} DisplayObject The parent DisplayObject class being mixed
* @returns {typeof PrimaryOccludableObject} A DisplayObject subclass mixed with OccludableObject features
* @mixin
*/
function PrimaryOccludableObjectMixin(DisplayObject) {
class PrimaryOccludableObject extends PrimaryCanvasObjectMixin(DisplayObject) {
/**
* Restrictions options packed into a single value with bitwise logic.
* @type {foundry.utils.BitMask}
*/
#restrictionState = new foundry.utils.BitMask({
light: false,
weather: false
});
/**
* Is this occludable object hidden for Gamemaster visibility only?
* @type {boolean}
*/
hidden = false;
/**
* A flag which tracks whether the primary canvas object is currently in an occluded state.
* @type {boolean}
*/
occluded = false;
/**
* The occlusion mode of this occludable object.
* @type {number}
*/
occlusionMode = CONST.OCCLUSION_MODES.NONE;
/**
* The unoccluded alpha of this object.
* @type {number}
*/
unoccludedAlpha = 1;
/**
* The occlusion alpha of this object.
* @type {number}
*/
occludedAlpha = 0;
/**
* Fade this object on hover?
* @type {boolean}
* @defaultValue true
*/
get hoverFade() {
return this.#hoverFade;
}
set hoverFade(value) {
if ( this.#hoverFade === value ) return;
this.#hoverFade = value;
const state = this._hoverFadeState;
state.hovered = false;
state.faded = false;
state.fading = false;
state.occlusion = 0;
}
/**
* Fade this object on hover?
* @type {boolean}
*/
#hoverFade = true;
/**
* @typedef {object} OcclusionState
* @property {number} fade The amount of FADE occlusion
* @property {number} radial The amount of RADIAL occlusion
* @property {number} vision The amount of VISION occlusion
*/
/**
* The amount of rendered FADE, RADIAL, and VISION occlusion.
* @type {OcclusionState}
* @internal
*/
_occlusionState = {
fade: 0.0,
radial: 0.0,
vision: 0.0
};
/**
* @typedef {object} HoverFadeState
* @property {boolean} hovered The hovered state
* @property {number} hoveredTime The last time when a mouse event was hovering this object
* @property {boolean} faded The faded state
* @property {boolean} fading The fading state
* @property {number} fadingTime The time the fade animation started
* @property {number} occlusion The amount of occlusion
*/
/**
* The state of hover-fading.
* @type {HoverFadeState}
* @internal
*/
_hoverFadeState = {
hovered: false,
hoveredTime: 0,
faded: false,
fading: false,
fadingTime: 0,
occlusion: 0.0
};
/* -------------------------------------------- */
/* Properties */
/* -------------------------------------------- */
/**
* Get the blocking option bitmask value.
* @returns {number}
* @internal
*/
get _restrictionState() {
return this.#restrictionState.valueOf();
}
/* -------------------------------------------- */
/**
* Is this object blocking light?
* @type {boolean}
*/
get restrictsLight() {
return this.#restrictionState.hasState(this.#restrictionState.states.light);
}
set restrictsLight(enabled) {
this.#restrictionState.toggleState(this.#restrictionState.states.light, enabled);
}
/* -------------------------------------------- */
/**
* Is this object blocking weather?
* @type {boolean}
*/
get restrictsWeather() {
return this.#restrictionState.hasState(this.#restrictionState.states.weather);
}
set restrictsWeather(enabled) {
this.#restrictionState.toggleState(this.#restrictionState.states.weather, enabled);
}
/* -------------------------------------------- */
/**
* Is this occludable object... occludable?
* @type {boolean}
*/
get isOccludable() {
return this.occlusionMode > CONST.OCCLUSION_MODES.NONE;
}
/* -------------------------------------------- */
/**
* Debounce assignment of the PCO occluded state to avoid cases like animated token movement which can rapidly
* change PCO appearance.
* Uses a 50ms debounce threshold.
* Objects which are in the hovered state remain occluded until their hovered state ends.
* @type {function(occluded: boolean): void}
*/
debounceSetOcclusion = foundry.utils.debounce(occluded => this.occluded = occluded, 50);
/* -------------------------------------------- */
/** @inheritDoc */
updateCanvasTransform() {
super.updateCanvasTransform();
this.#updateHoverFadeState();
this.#updateOcclusionState();
}
/* -------------------------------------------- */
/* Methods */
/* -------------------------------------------- */
/**
* Update the occlusion state.
*/
#updateOcclusionState() {
const state = this._occlusionState;
state.fade = 0;
state.radial = 0;
state.vision = 0;
const M = CONST.OCCLUSION_MODES;
switch ( this.occlusionMode ) {
case M.FADE: if ( this.occluded ) state.fade = 1; break;
case M.RADIAL: state.radial = 1; break;
case M.VISION:
if ( canvas.masks.occlusion.vision ) state.vision = 1;
else if ( this.occluded ) state.fade = 1;
break;
}
const hoverFade = this._hoverFadeState.occlusion;
if ( canvas.masks.occlusion.vision ) state.vision = Math.max(state.vision, hoverFade);
else state.fade = Math.max(state.fade, hoverFade);
}
/* -------------------------------------------- */
/**
* Update the hover-fade state.
*/
#updateHoverFadeState() {
if ( !this.#hoverFade ) return;
const state = this._hoverFadeState;
const time = canvas.app.ticker.lastTime;
const {delay, duration} = CONFIG.Canvas.hoverFade;
if ( state.fading ) {
const dt = time - state.fadingTime;
if ( dt >= duration ) state.fading = false;
} else if ( state.faded !== state.hovered ) {
const dt = time - state.hoveredTime;
if ( dt >= delay ) {
state.faded = state.hovered;
if ( dt - delay < duration ) {
state.fading = true;
state.fadingTime = time;
}
}
}
let occlusion = 1;
if ( state.fading ) {
if ( state.faded !== state.hovered ) {
state.faded = state.hovered;
state.fadingTime = time - (state.fadingTime + duration - time);
}
occlusion = CanvasAnimation.easeInOutCosine((time - state.fadingTime) / duration);
}
state.occlusion = state.faded ? occlusion : 1 - occlusion;
}
/* -------------------------------------------- */
/* Depth Rendering */
/* -------------------------------------------- */
/** @override */
_shouldRenderDepth() {
return !this.#restrictionState.isEmpty && !this.hidden;
}
/* -------------------------------------------- */
/**
* Test whether a specific Token occludes this PCO.
* Occlusion is tested against 9 points, the center, the four corners-, and the four cardinal directions
* @param {Token} token The Token to test
* @param {object} [options] Additional options that affect testing
* @param {boolean} [options.corners=true] Test corners of the hit-box in addition to the token center?
* @returns {boolean} Is the Token occluded by the PCO?
*/
testOcclusion(token, {corners=true}={}) {
if ( token.document.elevation >= this.elevation ) return false;
const {x, y, w, h} = token;
let testPoints = [[w / 2, h / 2]];
if ( corners ) {
const pad = 2;
const cornerPoints = [
[pad, pad],
[w / 2, pad],
[w - pad, pad],
[w - pad, h / 2],
[w - pad, h - pad],
[w / 2, h - pad],
[pad, h - pad],
[pad, h / 2]
];
testPoints = testPoints.concat(cornerPoints);
}
for ( const [tx, ty] of testPoints ) {
if ( this.containsCanvasPoint({x: x + tx, y: y + ty}) ) return true;
}
return false;
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get roof() {
const msg = `${this.constructor.name}#roof is deprecated in favor of more granular options:
${this.constructor.name}#BlocksLight and ${this.constructor.name}#BlocksWeather`;
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
return this.restrictsLight && this.restrictsWeather;
}
/**
* @deprecated since v12
* @ignore
*/
set roof(enabled) {
const msg = `${this.constructor.name}#roof is deprecated in favor of more granular options:
${this.constructor.name}#BlocksLight and ${this.constructor.name}#BlocksWeather`;
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
this.restrictsWeather = enabled;
this.restrictsLight = enabled;
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
containsPixel(x, y, alphaThreshold=0.75) {
const msg = `${this.constructor.name}#containsPixel is deprecated. Use ${this.constructor.name}#containsCanvasPoint instead.`;
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
return this.containsCanvasPoint({x, y}, alphaThreshold + 1e-6);
}
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
renderOcclusion(renderer) {
const msg = "PrimaryCanvasObject#renderOcclusion is deprecated in favor of PrimaryCanvasObject#renderDepth";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
this.renderDepthData(renderer);
}
}
return PrimaryOccludableObject;
}

View File

@@ -0,0 +1,421 @@
/**
* A basic PCO sprite mesh which is handling occlusion and depth.
* @extends {SpriteMesh}
* @mixes PrimaryOccludableObjectMixin
* @mixes PrimaryCanvasObjectMixin
*
* @property {PrimaryBaseSamplerShader} shader The shader bound to this mesh.
*
* @param {object} [options] The constructor options.
* @param {PIXI.Texture} [options.texture] Texture passed to the SpriteMesh.
* @param {typeof PrimaryBaseSamplerShader} [options.shaderClass] The shader class used to render this sprite.
* @param {string|null} [options.name] The name of this sprite.
* @param {*} [options.object] Any object that owns this sprite.
*/
class PrimarySpriteMesh extends PrimaryOccludableObjectMixin(SpriteMesh) {
constructor(options, shaderClass) {
let texture;
if ( options instanceof PIXI.Texture ) {
texture = options;
options = {};
} else if ( options instanceof Object ) {
texture = options.texture;
shaderClass = options.shaderClass;
} else {
options = {};
}
shaderClass ??= PrimaryBaseSamplerShader;
if ( !foundry.utils.isSubclass(shaderClass, PrimaryBaseSamplerShader) ) {
throw new Error(`${shaderClass.name} in not a subclass of PrimaryBaseSamplerShader`);
}
super(texture, shaderClass);
this.name = options.name ?? null;
this.object = options.object ?? null;
}
/* -------------------------------------------- */
/**
* A temporary point used by this class.
* @type {PIXI.Point}
*/
static #TEMP_POINT = new PIXI.Point();
/* -------------------------------------------- */
/**
* The texture alpha data.
* @type {TextureAlphaData|null}
* @protected
*/
_textureAlphaData = null;
/* -------------------------------------------- */
/**
* The texture alpha threshold used for point containment tests.
* If set to a value larger than 0, the texture alpha data is
* extracted from the texture at 25% resolution.
* @type {number}
*/
textureAlphaThreshold = 0;
/* -------------------------------------------- */
/* PIXI Events */
/* -------------------------------------------- */
/** @inheritDoc */
_onTextureUpdate() {
super._onTextureUpdate();
this._textureAlphaData = null;
this._canvasBoundsID++;
}
/* -------------------------------------------- */
/* Helper Methods */
/* -------------------------------------------- */
/** @inheritdoc */
setShaderClass(shaderClass) {
if ( !foundry.utils.isSubclass(shaderClass, PrimaryBaseSamplerShader) ) {
throw new Error(`${shaderClass.name} in not a subclass of PrimaryBaseSamplerShader`);
}
super.setShaderClass(shaderClass);
}
/* -------------------------------------------- */
/**
* An all-in-one helper method: Resizing the PCO according to desired dimensions and options.
* This helper computes the width and height based on the following factors:
*
* - The ratio of texture width and base width.
* - The ratio of texture height and base height.
*
* Additionally, It takes into account the desired fit options:
*
* - (default) "fill" computes the exact width and height ratio.
* - "cover" takes the maximum ratio of width and height and applies it to both.
* - "contain" takes the minimum ratio of width and height and applies it to both.
* - "width" applies the width ratio to both width and height.
* - "height" applies the height ratio to both width and height.
*
* You can also apply optional scaleX and scaleY options to both width and height. The scale is applied after fitting.
*
* **Important**: By using this helper, you don't need to set the height, width, and scale properties of the DisplayObject.
*
* **Note**: This is a helper method. Alternatively, you could assign properties as you would with a PIXI DisplayObject.
*
* @param {number} baseWidth The base width used for computations.
* @param {number} baseHeight The base height used for computations.
* @param {object} [options] The options.
* @param {"fill"|"cover"|"contain"|"width"|"height"} [options.fit="fill"] The fit type.
* @param {number} [options.scaleX=1] The scale on X axis.
* @param {number} [options.scaleY=1] The scale on Y axis.
*/
resize(baseWidth, baseHeight, {fit="fill", scaleX=1, scaleY=1}={}) {
if ( !((baseWidth >= 0) && (baseHeight >= 0)) ) {
throw new Error(`Invalid baseWidth/baseHeight passed to ${this.constructor.name}#resize.`);
}
const {width: textureWidth, height: textureHeight} = this._texture;
let sx;
let sy;
switch ( fit ) {
case "fill":
sx = baseWidth / textureWidth;
sy = baseHeight / textureHeight;
break;
case "cover":
sx = sy = Math.max(baseWidth / textureWidth, baseHeight / textureHeight);
break;
case "contain":
sx = sy = Math.min(baseWidth / textureWidth, baseHeight / textureHeight);
break;
case "width":
sx = sy = baseWidth / textureWidth;
break;
case "height":
sx = sy = baseHeight / textureHeight;
break;
default:
throw new Error(`Invalid fill type passed to ${this.constructor.name}#resize (fit=${fit}).`);
}
sx *= scaleX;
sy *= scaleY;
this.scale.set(sx, sy);
this._width = Math.abs(sx * textureWidth);
this._height = Math.abs(sy * textureHeight);
}
/* -------------------------------------------- */
/* Methods */
/* -------------------------------------------- */
/** @inheritdoc */
_updateBatchData() {
super._updateBatchData();
const batchData = this._batchData;
batchData.elevation = this.elevation;
batchData.textureAlphaThreshold = this.textureAlphaThreshold;
batchData.unoccludedAlpha = this.unoccludedAlpha;
batchData.occludedAlpha = this.occludedAlpha;
const occlusionState = this._occlusionState;
batchData.fadeOcclusion = occlusionState.fade;
batchData.radialOcclusion = occlusionState.radial;
batchData.visionOcclusion = occlusionState.vision;
batchData.restrictionState = this._restrictionState;
}
/* -------------------------------------------- */
/** @override */
_calculateCanvasBounds() {
if ( !this._texture ) return;
const {width, height} = this._texture;
let minX = 0;
let minY = 0;
let maxX = width;
let maxY = height;
const alphaData = this._textureAlphaData;
if ( alphaData ) {
const scaleX = width / alphaData.width;
const scaleY = height / alphaData.height;
minX = alphaData.minX * scaleX;
minY = alphaData.minY * scaleY;
maxX = alphaData.maxX * scaleX;
maxY = alphaData.maxY * scaleY;
}
let {x: anchorX, y: anchorY} = this.anchor;
anchorX *= width;
anchorY *= height;
minX -= anchorX;
minY -= anchorY;
maxX -= anchorX;
maxY -= anchorY;
this._canvasBounds.addFrameMatrix(this.canvasTransform, minX, minY, maxX, maxY);
}
/* -------------------------------------------- */
/**
* Is the given point in canvas space contained in this object?
* @param {PIXI.IPointData} point The point in canvas space
* @param {number} [textureAlphaThreshold] The minimum texture alpha required for containment
* @returns {boolean}
*/
containsCanvasPoint(point, textureAlphaThreshold=this.textureAlphaThreshold) {
if ( textureAlphaThreshold > 1 ) return false;
if ( !this.canvasBounds.contains(point.x, point.y) ) return false;
point = this.canvasTransform.applyInverse(point, PrimarySpriteMesh.#TEMP_POINT);
return this.#containsLocalPoint(point, textureAlphaThreshold);
}
/* -------------------------------------------- */
/**
* Is the given point in world space contained in this object?
* @param {PIXI.IPointData} point The point in world space
* @param {number} [textureAlphaThreshold] The minimum texture alpha required for containment
* @returns {boolean}
*/
containsPoint(point, textureAlphaThreshold=this.textureAlphaThreshold) {
if ( textureAlphaThreshold > 1 ) return false;
point = this.worldTransform.applyInverse(point, PrimarySpriteMesh.#TEMP_POINT);
return this.#containsLocalPoint(point, textureAlphaThreshold);
}
/* -------------------------------------------- */
/**
* Is the given point in local space contained in this object?
* @param {PIXI.IPointData} point The point in local space
* @param {number} textureAlphaThreshold The minimum texture alpha required for containment
* @returns {boolean}
*/
#containsLocalPoint(point, textureAlphaThreshold) {
const {width, height} = this._texture;
const {x: anchorX, y: anchorY} = this.anchor;
let {x, y} = point;
x += (width * anchorX);
y += (height * anchorY);
if ( textureAlphaThreshold > 0 ) return this.#getTextureAlpha(x, y) >= textureAlphaThreshold;
return (x >= 0) && (x < width) && (y >= 0) && (y < height);
}
/* -------------------------------------------- */
/**
* Get alpha value of texture at the given texture coordinates.
* @param {number} x The x-coordinate
* @param {number} y The y-coordinate
* @returns {number} The alpha value (0-1)
*/
#getTextureAlpha(x, y) {
if ( !this._texture ) return 0;
if ( !this._textureAlphaData ) {
this._textureAlphaData = TextureLoader.getTextureAlphaData(this._texture, 0.25);
this._canvasBoundsID++;
}
// Transform the texture coordinates
const {width, height} = this._texture;
const alphaData = this._textureAlphaData;
x *= (alphaData.width / width);
y *= (alphaData.height / height);
// First test against the bounding box
const {minX, minY, maxX, maxY} = alphaData;
if ( (x < minX) || (x >= maxX) || (y < minY) || (y >= maxY) ) return 0;
// Get the alpha at the local coordinates
return alphaData.data[((maxX - minX) * ((y | 0) - minY)) + ((x | 0) - minX)] / 255;
}
/* -------------------------------------------- */
/* Rendering Methods */
/* -------------------------------------------- */
/** @override */
renderDepthData(renderer) {
if ( !this.shouldRenderDepth || !this.visible || !this.renderable ) return;
const shader = this._shader;
const blendMode = this.blendMode;
this.blendMode = PIXI.BLEND_MODES.MAX_COLOR;
this._shader = shader.depthShader;
if ( this.cullable ) this._renderWithCulling(renderer);
else this._render(renderer);
this._shader = shader;
this.blendMode = blendMode;
}
/* -------------------------------------------- */
/**
* Render the sprite with ERASE blending.
* Note: The sprite must not have visible/renderable children.
* @param {PIXI.Renderer} renderer The renderer
* @internal
*/
_renderVoid(renderer) {
if ( !this.visible || (this.worldAlpha <= 0) || !this.renderable ) return;
// Delegate to PrimarySpriteMesh#renderVoidAdvanced if the sprite has filter or mask
if ( this._mask || this.filters?.length ) this.#renderVoidAdvanced(renderer);
else {
// Set the blend mode to ERASE before rendering
const originalBlendMode = this.blendMode;
this.blendMode = PIXI.BLEND_MODES.ERASE;
// Render the sprite but not its children
if ( this.cullable ) this._renderWithCulling(renderer);
else this._render(renderer);
// Restore the original blend mode after rendering
this.blendMode = originalBlendMode;
}
}
/* -------------------------------------------- */
/**
* Render the sprite that has a filter or a mask with ERASE blending.
* Note: The sprite must not have visible/renderable children.
* @param {PIXI.Renderer} renderer The renderer
*/
#renderVoidAdvanced(renderer) {
// Same code as in PIXI.Container#renderAdvanced
const filters = this.filters;
const mask = this._mask;
if ( filters ) {
this._enabledFilters ||= [];
this._enabledFilters.length = 0;
for ( let i = 0; i < filters.length; i++ ) {
if ( filters[i].enabled ) this._enabledFilters.push(filters[i]);
}
}
const flush = (filters && this._enabledFilters.length) || (mask && (!mask.isMaskData
|| (mask.enabled && (mask.autoDetect || mask.type !== MASK_TYPES.NONE))));
if ( flush ) renderer.batch.flush();
if ( filters && this._enabledFilters.length ) renderer.filter.push(this, this._enabledFilters);
if ( mask ) renderer.mask.push(this, mask);
// Set the blend mode to ERASE before rendering
let filter;
let originalBlendMode;
const filterState = renderer.filter.defaultFilterStack.at(-1);
if ( filterState.target === this ) {
filter = filterState.filters.at(-1);
originalBlendMode = filter.blendMode;
filter.blendMode = PIXI.BLEND_MODES.ERASE;
} else {
originalBlendMode = this.blendMode;
this.blendMode = PIXI.BLEND_MODES.ERASE;
}
// Same code as in PIXI.Container#renderAdvanced without the part that renders children
if ( this.cullable ) this._renderWithCulling(renderer);
else this._render(renderer);
if ( flush ) renderer.batch.flush();
if ( mask ) renderer.mask.pop(this);
if ( filters && this._enabledFilters.length ) renderer.filter.pop();
// Restore the original blend mode after rendering
if ( filter ) filter.blendMode = originalBlendMode;
else this.blendMode = originalBlendMode;
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
getPixelAlpha(x, y) {
const msg = `${this.constructor.name}#getPixelAlpha is deprecated without replacement.`;
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
if ( !this._textureAlphaData ) return null;
if ( !this.canvasBounds.contains(x, y) ) return -1;
const point = PrimarySpriteMesh.#TEMP_POINT.set(x, y);
this.canvasTransform.applyInverse(point, point);
const {width, height} = this._texture;
const {x: anchorX, y: anchorY} = this.anchor;
x = point.x + (width * anchorX);
y = point.y + (height * anchorY);
return this.#getTextureAlpha(x, y) * 255;
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
_getAlphaBounds() {
const msg = `${this.constructor.name}#_getAlphaBounds is deprecated without replacement.`;
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
const m = this._textureAlphaData;
const r = this.rotation;
return PIXI.Rectangle.fromRotation(m.minX, m.minY, m.maxX - m.minX, m.maxY - m.minY, r).normalize();
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
_getTextureCoordinate(testX, testY) {
const msg = `${this.constructor.name}#_getTextureCoordinate is deprecated without replacement.`;
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
const point = {x: testX, y: testY};
let {x, y} = this.canvasTransform.applyInverse(point, point);
point.x = ((x / this._texture.width) + this.anchor.x) * this._textureAlphaData.width;
point.y = ((y / this._texture.height) + this.anchor.y) * this._textureAlphaData.height;
return point;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,500 @@
/**
* An AmbientSound is an implementation of PlaceableObject which represents a dynamic audio source within the Scene.
* @category - Canvas
* @see {@link AmbientSoundDocument}
* @see {@link SoundsLayer}
*/
class AmbientSound extends PlaceableObject {
/**
* The Sound which manages playback for this AmbientSound effect
* @type {foundry.audio.Sound|null}
*/
sound;
/**
* A sound effect attached to the managed Sound instance.
* @type {foundry.audio.BaseSoundEffect}
*/
#baseEffect;
/**
* A sound effect attached to the managed Sound instance when the sound source is muffled.
* @type {foundry.audio.BaseSoundEffect}
*/
#muffledEffect;
/**
* Track whether audio effects have been initialized.
* @type {boolean}
*/
#effectsInitialized = false;
/**
* Is this AmbientSound currently muffled?
* @type {boolean}
*/
#muffled = false;
/**
* A SoundSource object which manages the area of effect for this ambient sound
* @type {foundry.canvas.sources.PointSoundSource}
*/
source;
/**
* The area that is affected by this ambient sound.
* @type {PIXI.Graphics}
*/
field;
/** @inheritdoc */
static embeddedName = "AmbientSound";
/** @override */
static RENDER_FLAGS = {
redraw: {propagate: ["refresh"]},
refresh: {propagate: ["refreshState", "refreshField", "refreshElevation"], alias: true},
refreshField: {propagate: ["refreshPosition"]},
refreshPosition: {},
refreshState: {},
refreshElevation: {}
};
/* -------------------------------------------- */
/**
* Create a Sound used to play this AmbientSound object
* @returns {foundry.audio.Sound|null}
* @protected
*/
_createSound() {
const path = this.document.path;
if ( !this.id || !path ) return null;
return game.audio.create({src: path, context: game.audio.environment, singleton: true});
}
/* -------------------------------------------- */
/**
* Create special effect nodes for the Sound.
* This only happens once the first time the AmbientSound is synced and again if the effect data changes.
*/
#createEffects() {
const sfx = CONFIG.soundEffects;
const {base, muffled} = this.document.effects;
this.#baseEffect = this.#muffledEffect = undefined;
// Base effect
if ( base.type in sfx ) {
const cfg = sfx[base.type];
this.#baseEffect = new cfg.effectClass(this.sound.context, base);
}
// Muffled effect
if ( muffled.type in sfx ) {
const cfg = sfx[muffled.type];
this.#muffledEffect = new cfg.effectClass(this.sound.context, muffled);
}
this.#effectsInitialized = true;
}
/* -------------------------------------------- */
/**
* Update the set of effects which are applied to the managed Sound.
* @param {object} [options]
* @param {boolean} [options.muffled] Is the sound currently muffled?
*/
applyEffects({muffled=false}={}) {
const effects = [];
if ( muffled ) {
const effect = this.#muffledEffect || this.#baseEffect;
if ( effect ) effects.push(effect);
}
else if ( this.#baseEffect ) effects.push(this.#baseEffect);
this.sound.applyEffects(effects);
}
/* -------------------------------------------- */
/* Properties
/* -------------------------------------------- */
/**
* Is this ambient sound is currently audible based on its hidden state and the darkness level of the Scene?
* @type {boolean}
*/
get isAudible() {
if ( this.document.hidden || !this.document.radius ) return false;
return canvas.darknessLevel.between(this.document.darkness.min ?? 0, this.document.darkness.max ?? 1);
}
/* -------------------------------------------- */
/** @inheritdoc */
get bounds() {
const {x, y} = this.document;
const r = this.radius;
return new PIXI.Rectangle(x-r, y-r, 2*r, 2*r);
}
/* -------------------------------------------- */
/**
* A convenience accessor for the sound radius in pixels
* @type {number}
*/
get radius() {
let d = canvas.dimensions;
return ((this.document.radius / d.distance) * d.size);
}
/* -------------------------------------------- */
/* Methods
/* -------------------------------------------- */
/**
* Toggle playback of the sound depending on whether it is audible.
* @param {boolean} isAudible Is the sound audible?
* @param {number} [volume] The target playback volume
* @param {object} [options={}] Additional options which affect sound synchronization
* @param {number} [options.fade=250] A duration in milliseconds to fade volume transition
* @param {boolean} [options.muffled=false] Is the sound current muffled?
* @returns {Promise<void>} A promise which resolves once sound playback is synchronized
*/
async sync(isAudible, volume, {fade=250, muffled=false}={}) {
// Discontinue playback
if ( !isAudible ) {
if ( !this.sound ) return;
this.sound._manager = null;
await this.sound.stop({volume: 0, fade});
this.#muffled = false;
return;
}
// Begin playback
this.sound ||= this._createSound();
if ( this.sound === null ) return;
const sound = this.sound;
// Track whether the AmbientSound placeable managing Sound playback has changed
const objectChange = sound._manager !== this;
const requireLoad = !sound.loaded && !sound._manager;
sound._manager = this;
// Load the buffer if necessary
if ( requireLoad ) await sound.load();
if ( !sound.loaded ) return; // Some other Placeable may be loading the sound
// Update effects
const muffledChange = this.#muffled !== muffled;
this.#muffled = muffled;
if ( objectChange && !this.#effectsInitialized ) this.#createEffects();
if ( objectChange || muffledChange ) this.applyEffects({muffled});
// Begin playback at the desired volume
if ( !sound.playing ) {
const offset = sound.context.currentTime % sound.duration;
await sound.play({volume, offset, fade, loop: true});
return;
}
// Adjust volume
await sound.fade(volume, {duration: fade});
}
/* -------------------------------------------- */
/* Rendering
/* -------------------------------------------- */
/** @inheritdoc */
clear() {
if ( this.controlIcon ) {
this.controlIcon.parent.removeChild(this.controlIcon).destroy();
this.controlIcon = null;
}
return super.clear();
}
/* -------------------------------------------- */
/** @override */
async _draw(options) {
this.field = this.addChild(new PIXI.Graphics());
this.field.eventMode = "none";
this.controlIcon = this.addChild(this.#drawControlIcon());
}
/* -------------------------------------------- */
/** @override */
_destroy(options) {
this.#destroySoundSource();
}
/* -------------------------------------------- */
/**
* Draw the ControlIcon for the AmbientLight
* @returns {ControlIcon}
*/
#drawControlIcon() {
const size = Math.max(Math.round((canvas.dimensions.size * 0.5) / 20) * 20, 40);
let icon = new ControlIcon({texture: CONFIG.controlIcons.sound, size: size});
icon.x -= (size * 0.5);
icon.y -= (size * 0.5);
return icon;
}
/* -------------------------------------------- */
/* Incremental Refresh */
/* -------------------------------------------- */
/** @override */
_applyRenderFlags(flags) {
if ( flags.refreshState ) this._refreshState();
if ( flags.refreshPosition ) this._refreshPosition();
if ( flags.refreshField ) this._refreshField();
if ( flags.refreshElevation ) this._refreshElevation();
}
/* -------------------------------------------- */
/**
* Refresh the shape of the sound field-of-effect. This is refreshed when the SoundSource fov polygon changes.
* @protected
*/
_refreshField() {
this.field.clear();
if ( !this.source?.shape ) return;
this.field.lineStyle(1, 0xFFFFFF, 0.5).beginFill(0xAADDFF, 0.15).drawShape(this.source.shape).endFill();
this.field.position.set(-this.source.x, -this.source.y);
}
/* -------------------------------------------- */
/**
* Refresh the position of the AmbientSound. Called with the coordinates change.
* @protected
*/
_refreshPosition() {
const {x, y} = this.document;
if ( (this.position.x !== x) || (this.position.y !== y) ) MouseInteractionManager.emulateMoveEvent();
this.position.set(x, y);
}
/* -------------------------------------------- */
/**
* Refresh the state of the light. Called when the disabled state or darkness conditions change.
* @protected
*/
_refreshState() {
this.alpha = this._getTargetAlpha();
this.zIndex = this.hover ? 1 : 0;
this.refreshControl();
}
/* -------------------------------------------- */
/**
* Refresh the display of the ControlIcon for this AmbientSound source.
*/
refreshControl() {
const isHidden = this.id && (this.document.hidden || !this.document.path);
this.controlIcon.tintColor = isHidden ? 0xFF3300 : 0xFFFFFF;
this.controlIcon.borderColor = isHidden ? 0xFF3300 : 0xFF5500;
this.controlIcon.texture = getTexture(this.isAudible ? CONFIG.controlIcons.sound : CONFIG.controlIcons.soundOff);
this.controlIcon.elevation = this.document.elevation;
this.controlIcon.refresh({visible: this.layer.active, borderVisible: this.hover || this.layer.highlightObjects});
this.controlIcon.draw();
}
/* -------------------------------------------- */
/**
* Refresh the elevation of the control icon.
* @protected
*/
_refreshElevation() {
this.controlIcon.elevation = this.document.elevation;
}
/* -------------------------------------------- */
/* Sound Source Management */
/* -------------------------------------------- */
/**
* Compute the field-of-vision for an object, determining its effective line-of-sight and field-of-vision polygons
* @param {object} [options={}] Options which modify how the audio source is updated
* @param {boolean} [options.deleted] Indicate that this SoundSource has been deleted.
*/
initializeSoundSource({deleted=false}={}) {
const wasActive = this.layer.sources.has(this.sourceId);
const perceptionFlags = {refreshSounds: true};
// Remove the audio source from the Scene
if ( deleted ) {
if ( !wasActive ) return;
this.#destroySoundSource();
canvas.perception.update(perceptionFlags);
return;
}
// Create the sound source if necessary
this.source ??= this.#createSoundSource();
// Re-initialize source data and add to the active collection
this.source.initialize(this._getSoundSourceData());
this.source.add();
// Schedule a perception refresh, unless that operation is deferred for some later workflow
canvas.perception.update(perceptionFlags);
if ( this.layer.active ) this.renderFlags.set({refreshField: true});
}
/* -------------------------------------------- */
/**
* Create a new point sound source for this AmbientSound.
* @returns {foundry.canvas.sources.PointSoundSource} The created source
*/
#createSoundSource() {
const cls = CONFIG.Canvas.soundSourceClass;
return new cls({sourceId: this.sourceId, object: this});
}
/* -------------------------------------------- */
/**
* Destroy the point sound source for this AmbientSound.
*/
#destroySoundSource() {
this.source?.destroy();
this.source = undefined;
}
/* -------------------------------------------- */
/**
* Get the sound source data.
* @returns {BaseEffectSourceData}
* @protected
*/
_getSoundSourceData() {
return {
x: this.document.x,
y: this.document.y,
elevation: this.document.elevation,
radius: Math.clamp(this.radius, 0, canvas.dimensions.maxR),
walls: this.document.walls,
disabled: !this.isAudible
};
}
/* -------------------------------------------- */
/* Document Event Handlers */
/* -------------------------------------------- */
/** @inheritDoc */
_onCreate(data, options, userId) {
super._onCreate(data, options, userId);
this.initializeSoundSource();
}
/* -------------------------------------------- */
/** @inheritDoc */
_onUpdate(changed, options, userId) {
super._onUpdate(changed, options, userId);
// Change the Sound buffer
if ( "path" in changed ) {
if ( this.sound ) this.sound.stop();
this.sound = this._createSound();
}
// Update special effects
if ( "effects" in changed ) {
this.#effectsInitialized = false;
if ( this.sound?._manager === this ) this.sound._manager = null;
}
// Re-initialize SoundSource
this.initializeSoundSource();
// Incremental Refresh
this.renderFlags.set({
refreshState: ("hidden" in changed) || ("path" in changed) || ("darkness" in changed),
refreshElevation: "elevation" in changed
});
}
/* -------------------------------------------- */
/** @inheritDoc */
_onDelete(options, userId) {
this.sound?.stop();
this.initializeSoundSource({deleted: true});
super._onDelete(options, userId);
}
/* -------------------------------------------- */
/* Interactivity */
/* -------------------------------------------- */
/** @inheritdoc */
_canHUD(user, event) {
return user.isGM; // Allow GMs to single right-click
}
/* -------------------------------------------- */
/** @inheritdoc */
_canConfigure(user, event) {
return false; // Double-right does nothing
}
/* -------------------------------------------- */
/** @override */
_onClickRight(event) {
this.document.update({hidden: !this.document.hidden});
if ( !this._propagateRightClick(event) ) event.stopPropagation();
}
/* -------------------------------------------- */
/** @override */
_onDragLeftMove(event) {
super._onDragLeftMove(event);
const clones = event.interactionData.clones || [];
for ( let c of clones ) {
c.initializeSoundSource();
}
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragEnd() {
this.initializeSoundSource({deleted: true});
this._original?.initializeSoundSource();
super._onDragEnd();
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
updateSource({defer=false, deleted=false}={}) {
const msg = "AmbientSound#updateSource has been deprecated in favor of AmbientSound#initializeSoundSource";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14, once: true});
this.initializeSoundSource({defer, deleted});
}
}

View File

@@ -0,0 +1,658 @@
/**
* A type of Placeable Object which highlights an area of the grid as covered by some area of effect.
* @category - Canvas
* @see {@link MeasuredTemplateDocument}
* @see {@link TemplateLayer}
*/
class MeasuredTemplate extends PlaceableObject {
/**
* The geometry shape used for testing point intersection
* @type {PIXI.Circle | PIXI.Ellipse | PIXI.Polygon | PIXI.Rectangle | PIXI.RoundedRectangle}
*/
shape;
/**
* The tiling texture used for this template, if any
* @type {PIXI.Texture}
*/
texture;
/**
* The template graphics
* @type {PIXI.Graphics}
*/
template;
/**
* The measurement ruler label
* @type {PreciseText}
*/
ruler;
/**
* Internal property used to configure the control border thickness
* @type {number}
* @protected
*/
_borderThickness = 3;
/** @inheritdoc */
static embeddedName = "MeasuredTemplate";
/** @override */
static RENDER_FLAGS = {
redraw: {propagate: ["refresh"]},
refresh: {propagate: ["refreshState", "refreshPosition", "refreshShape", "refreshElevation"], alias: true},
refreshState: {},
refreshPosition: {propagate: ["refreshGrid"]},
refreshShape: {propagate: ["refreshTemplate", "refreshGrid", "refreshText"]},
refreshTemplate: {},
refreshGrid: {},
refreshText: {},
refreshElevation: {}
};
/* -------------------------------------------- */
/* Properties */
/* -------------------------------------------- */
/**
* A convenient reference for whether the current User is the author of the MeasuredTemplate document.
* @type {boolean}
*/
get isAuthor() {
return this.document.isAuthor;
}
/* -------------------------------------------- */
/** @inheritdoc */
get bounds() {
const {x, y} = this.document;
const d = canvas.dimensions;
const r = this.document.distance * (d.size / d.distance);
return new PIXI.Rectangle(x-r, y-r, 2*r, 2*r);
}
/* -------------------------------------------- */
/**
* Is this MeasuredTemplate currently visible on the Canvas?
* @type {boolean}
*/
get isVisible() {
return !this.document.hidden || this.isAuthor || game.user.isGM;
}
/* -------------------------------------------- */
/**
* A unique identifier which is used to uniquely identify related objects like a template effect or grid highlight.
* @type {string}
*/
get highlightId() {
return this.objectId;
}
/* -------------------------------------------- */
/* Initial Drawing */
/* -------------------------------------------- */
/** @override */
async _draw(options) {
// Load Fill Texture
if ( this.document.texture ) {
this.texture = await loadTexture(this.document.texture, {fallback: "icons/svg/hazard.svg"});
} else {
this.texture = null;
}
// Template Shape
this.template = this.addChild(new PIXI.Graphics());
// Control Icon
this.controlIcon = this.addChild(this.#createControlIcon());
await this.controlIcon.draw();
// Ruler Text
this.ruler = this.addChild(this.#drawRulerText());
// Enable highlighting for this template
canvas.interface.grid.addHighlightLayer(this.highlightId);
}
/* -------------------------------------------- */
/**
* Draw the ControlIcon for the MeasuredTemplate
* @returns {ControlIcon}
*/
#createControlIcon() {
const size = Math.max(Math.round((canvas.dimensions.size * 0.5) / 20) * 20, 40);
let icon = new ControlIcon({texture: CONFIG.controlIcons.template, size: size});
icon.x -= (size * 0.5);
icon.y -= (size * 0.5);
return icon;
}
/* -------------------------------------------- */
/**
* Draw the Text label used for the MeasuredTemplate
* @returns {PreciseText}
*/
#drawRulerText() {
const style = CONFIG.canvasTextStyle.clone();
style.fontSize = Math.max(Math.round(canvas.dimensions.size * 0.36 * 12) / 12, 36);
const text = new PreciseText(null, style);
text.anchor.set(0, 1);
return text;
}
/* -------------------------------------------- */
/** @override */
_destroy(options) {
canvas.interface.grid.destroyHighlightLayer(this.highlightId);
this.texture?.destroy();
}
/* -------------------------------------------- */
/* Incremental Refresh */
/* -------------------------------------------- */
/** @override */
_applyRenderFlags(flags) {
if ( flags.refreshState ) this._refreshState();
if ( flags.refreshPosition ) this._refreshPosition();
if ( flags.refreshShape ) this._refreshShape();
if ( flags.refreshTemplate ) this._refreshTemplate();
if ( flags.refreshGrid ) this.highlightGrid();
if ( flags.refreshText ) this._refreshRulerText();
if ( flags.refreshElevation ) this._refreshElevation();
}
/* -------------------------------------------- */
/**
* Refresh the displayed state of the MeasuredTemplate.
* This refresh occurs when the user interaction state changes.
* @protected
*/
_refreshState() {
// Template Visibility
const wasVisible = this.visible;
this.visible = this.isVisible && !this.hasPreview;
if ( this.visible !== wasVisible ) MouseInteractionManager.emulateMoveEvent();
// Sort on top of others on hover
this.zIndex = this.hover ? 1 : 0;
// Control Icon Visibility
const isHidden = this.document.hidden;
this.controlIcon.refresh({
visible: this.visible && this.layer.active && this.document.isOwner,
iconColor: isHidden ? 0xFF3300 : 0xFFFFFF,
borderColor: isHidden ? 0xFF3300 : 0xFF5500,
borderVisible: this.hover || this.layer.highlightObjects
});
// Alpha transparency
const alpha = isHidden ? 0.5 : 1;
this.template.alpha = alpha;
this.ruler.alpha = alpha;
const highlightLayer = canvas.interface.grid.getHighlightLayer(this.highlightId);
highlightLayer.visible = this.visible;
// FIXME the elevation is not considered in sort order of the highlight layers
highlightLayer.zIndex = this.document.sort;
highlightLayer.alpha = alpha;
this.alpha = this._getTargetAlpha();
// Ruler Visibility
this.ruler.visible = this.visible && this.layer.active;
}
/* -------------------------------------------- */
/**
* Refresh the elevation of the control icon.
* @protected
*/
_refreshElevation() {
this.controlIcon.elevation = this.document.elevation;
}
/* -------------------------------------------- */
/** @override */
_getTargetAlpha() {
return this.isPreview ? 0.8 : 1.0;
}
/* -------------------------------------------- */
/**
* Refresh the position of the MeasuredTemplate
* @protected
*/
_refreshPosition() {
const {x, y} = this.document;
if ( (this.position.x !== x) || (this.position.y !== y) ) MouseInteractionManager.emulateMoveEvent();
this.position.set(x, y);
}
/* -------------------------------------------- */
/**
* Refresh the underlying geometric shape of the MeasuredTemplate.
* @protected
*/
_refreshShape() {
let {x, y, direction, distance} = this.document;
// Grid type
if ( game.settings.get("core", "gridTemplates") ) {
this.ray = new Ray({x, y}, canvas.grid.getTranslatedPoint({x, y}, direction, distance));
}
// Euclidean type
else {
this.ray = Ray.fromAngle(x, y, Math.toRadians(direction), distance * canvas.dimensions.distancePixels);
}
// Get the Template shape
this.shape = this._computeShape();
}
/* -------------------------------------------- */
/**
* Compute the geometry for the template using its document data.
* Subclasses can override this method to take control over how different shapes are rendered.
* @returns {PIXI.Circle|PIXI.Rectangle|PIXI.Polygon}
* @protected
*/
_computeShape() {
const {t, distance, direction, angle, width} = this.document;
switch ( t ) {
case "circle":
return this.constructor.getCircleShape(distance);
case "cone":
return this.constructor.getConeShape(distance, direction, angle);
case "rect":
return this.constructor.getRectShape(distance, direction);
case "ray":
return this.constructor.getRayShape(distance, direction, width);
}
}
/* -------------------------------------------- */
/**
* Refresh the display of the template outline and shape.
* Subclasses may override this method to take control over how the template is visually rendered.
* @protected
*/
_refreshTemplate() {
const t = this.template.clear();
// Draw the Template outline
t.lineStyle(this._borderThickness, this.document.borderColor, 0.75).beginFill(0x000000, 0.0);
// Fill Color or Texture
if ( this.texture ) t.beginTextureFill({texture: this.texture});
else t.beginFill(0x000000, 0.0);
// Draw the shape
t.drawShape(this.shape);
// Draw origin and destination points
t.lineStyle(this._borderThickness, 0x000000)
.beginFill(0x000000, 0.5)
.drawCircle(0, 0, 6)
.drawCircle(this.ray.dx, this.ray.dy, 6)
.endFill();
}
/* -------------------------------------------- */
/**
* Get a Circular area of effect given a radius of effect
* @param {number} distance The radius of the circle in grid units
* @returns {PIXI.Circle|PIXI.Polygon}
*/
static getCircleShape(distance) {
// Grid circle
if ( game.settings.get("core", "gridTemplates") ) {
return new PIXI.Polygon(canvas.grid.getCircle({x: 0, y: 0}, distance));
}
// Euclidean circle
return new PIXI.Circle(0, 0, distance * canvas.dimensions.distancePixels);
}
/* -------------------------------------------- */
/**
* Get a Conical area of effect given a direction, angle, and distance
* @param {number} distance The radius of the cone in grid units
* @param {number} direction The direction of the cone in degrees
* @param {number} angle The angle of the cone in degrees
* @returns {PIXI.Polygon|PIXI.Circle}
*/
static getConeShape(distance, direction, angle) {
// Grid cone
if ( game.settings.get("core", "gridTemplates") ) {
return new PIXI.Polygon(canvas.grid.getCone({x: 0, y: 0}, distance, direction, angle));
}
// Euclidean cone
if ( (distance <= 0) || (angle <= 0) ) return new PIXI.Polygon();
distance *= canvas.dimensions.distancePixels;
const coneType = game.settings.get("core", "coneTemplateType");
// For round cones - approximate the shape with a ray every 3 degrees
let angles;
if ( coneType === "round" ) {
if ( angle >= 360 ) return new PIXI.Circle(0, 0, distance);
const da = Math.min(angle, 3);
angles = Array.fromRange(Math.floor(angle/da)).map(a => (angle/-2) + (a*da)).concat([angle/2]);
}
// For flat cones, direct point-to-point
else {
angle = Math.min(angle, 179);
angles = [(angle/-2), (angle/2)];
distance /= Math.cos(Math.toRadians(angle/2));
}
// Get the cone shape as a polygon
const rays = angles.map(a => Ray.fromAngle(0, 0, Math.toRadians(direction + a), distance));
const points = rays.reduce((arr, r) => {
return arr.concat([r.B.x, r.B.y]);
}, [0, 0]).concat([0, 0]);
return new PIXI.Polygon(points);
}
/* -------------------------------------------- */
/**
* Get a Rectangular area of effect given a width and height
* @param {number} distance The length of the diagonal in grid units
* @param {number} direction The direction of the diagonal in degrees
* @returns {PIXI.Rectangle}
*/
static getRectShape(distance, direction) {
let endpoint;
// Grid rectangle
if ( game.settings.get("core", "gridTemplates") ) {
endpoint = canvas.grid.getTranslatedPoint({x: 0, y: 0}, direction, distance);
}
// Euclidean rectangle
else endpoint = Ray.fromAngle(0, 0, Math.toRadians(direction), distance * canvas.dimensions.distancePixels).B;
return new PIXI.Rectangle(0, 0, endpoint.x, endpoint.y).normalize();
}
/* -------------------------------------------- */
/**
* Get a rotated Rectangular area of effect given a width, height, and direction
* @param {number} distance The length of the ray in grid units
* @param {number} direction The direction of the ray in degrees
* @param {number} width The width of the ray in grid units
* @returns {PIXI.Polygon}
*/
static getRayShape(distance, direction, width) {
const d = canvas.dimensions;
width *= d.distancePixels;
const p00 = Ray.fromAngle(0, 0, Math.toRadians(direction - 90), width / 2).B;
const p01 = Ray.fromAngle(0, 0, Math.toRadians(direction + 90), width / 2).B;
let p10;
let p11;
// Grid ray
if ( game.settings.get("core", "gridTemplates") ) {
p10 = canvas.grid.getTranslatedPoint(p00, direction, distance);
p11 = canvas.grid.getTranslatedPoint(p01, direction, distance);
}
// Euclidean ray
else {
distance *= d.distancePixels;
direction = Math.toRadians(direction);
p10 = Ray.fromAngle(p00.x, p00.y, direction, distance).B;
p11 = Ray.fromAngle(p01.x, p01.y, direction, distance).B;
}
return new PIXI.Polygon(p00.x, p00.y, p10.x, p10.y, p11.x, p11.y, p01.x, p01.y);
}
/* -------------------------------------------- */
/**
* Update the displayed ruler tooltip text
* @protected
*/
_refreshRulerText() {
const {distance, t} = this.document;
const grid = canvas.grid;
if ( t === "rect" ) {
const {A: {x: x0, y: y0}, B: {x: x1, y: y1}} = this.ray;
const dx = grid.measurePath([{x: x0, y: y0}, {x: x1, y: y0}]).distance;
const dy = grid.measurePath([{x: x0, y: y0}, {x: x0, y: y1}]).distance;
const w = Math.round(dx * 10) / 10;
const h = Math.round(dy * 10) / 10;
this.ruler.text = `${w}${grid.units} x ${h}${grid.units}`;
} else {
const r = Math.round(distance * 10) / 10;
this.ruler.text = `${r}${grid.units}`;
}
this.ruler.position.set(this.ray.dx + 10, this.ray.dy + 5);
}
/* -------------------------------------------- */
/**
* Highlight the grid squares which should be shown under the area of effect
*/
highlightGrid() {
// Clear the existing highlight layer
canvas.interface.grid.clearHighlightLayer(this.highlightId);
// Highlight colors
const border = this.document.borderColor;
const color = this.document.fillColor;
// If we are in grid-less mode, highlight the shape directly
if ( canvas.grid.type === CONST.GRID_TYPES.GRIDLESS ) {
const shape = this._getGridHighlightShape();
canvas.interface.grid.highlightPosition(this.highlightId, {border, color, shape});
}
// Otherwise, highlight specific grid positions
else {
const positions = this._getGridHighlightPositions();
for ( const {x, y} of positions ) {
canvas.interface.grid.highlightPosition(this.highlightId, {x, y, border, color});
}
}
}
/* -------------------------------------------- */
/**
* Get the shape to highlight on a Scene which uses grid-less mode.
* @returns {PIXI.Polygon|PIXI.Circle|PIXI.Rectangle}
* @protected
*/
_getGridHighlightShape() {
const shape = this.shape.clone();
if ( "points" in shape ) {
shape.points = shape.points.map((p, i) => {
if ( i % 2 ) return this.y + p;
else return this.x + p;
});
} else {
shape.x += this.x;
shape.y += this.y;
}
return shape;
}
/* -------------------------------------------- */
/**
* Get an array of points which define top-left grid spaces to highlight for square or hexagonal grids.
* @returns {Point[]}
* @protected
*/
_getGridHighlightPositions() {
const grid = canvas.grid;
const {x: ox, y: oy} = this.document;
const shape = this.shape;
const bounds = shape.getBounds();
bounds.x += ox;
bounds.y += oy;
bounds.fit(canvas.dimensions.rect);
bounds.pad(1);
// Identify grid space that have their center points covered by the template shape
const positions = [];
const [i0, j0, i1, j1] = grid.getOffsetRange(bounds);
for ( let i = i0; i < i1; i++ ) {
for ( let j = j0; j < j1; j++ ) {
const offset = {i, j};
const {x: cx, y: cy} = grid.getCenterPoint(offset);
// If the origin of the template is a grid space center, this grid space is highlighted
let covered = (Math.max(Math.abs(cx - ox), Math.abs(cy - oy)) < 1);
if ( !covered ) {
for ( let dx = -0.5; dx <= 0.5; dx += 0.5 ) {
for ( let dy = -0.5; dy <= 0.5; dy += 0.5 ) {
if ( shape.contains(cx - ox + dx, cy - oy + dy) ) {
covered = true;
break;
}
}
}
}
if ( !covered ) continue;
positions.push(grid.getTopLeftPoint(offset));
}
}
return positions;
}
/* -------------------------------------------- */
/* Methods */
/* -------------------------------------------- */
/** @override */
async rotate(angle, snap) {
if ( game.paused && !game.user.isGM ) {
ui.notifications.warn("GAME.PausedWarning", {localize: true});
return this;
}
const direction = this._updateRotation({angle, snap});
await this.document.update({direction});
return this;
}
/* -------------------------------------------- */
/* Document Event Handlers */
/* -------------------------------------------- */
/** @inheritDoc */
_onUpdate(changed, options, userId) {
super._onUpdate(changed, options, userId);
// Incremental Refresh
this.renderFlags.set({
redraw: "texture" in changed,
refreshState: ("sort" in changed) || ("hidden" in changed),
refreshPosition: ("x" in changed) || ("y" in changed),
refreshElevation: "elevation" in changed,
refreshShape: ["t", "angle", "direction", "distance", "width"].some(k => k in changed),
refreshTemplate: "borderColor" in changed,
refreshGrid: ("borderColor" in changed) || ("fillColor" in changed)
});
}
/* -------------------------------------------- */
/* Interactivity */
/* -------------------------------------------- */
/** @override */
_canControl(user, event) {
if ( !this.layer.active || this.isPreview ) return false;
return user.isGM || (user === this.document.author);
}
/** @inheritdoc */
_canHUD(user, event) {
return this.isOwner; // Allow template owners to right-click
}
/** @inheritdoc */
_canConfigure(user, event) {
return false; // Double-right does nothing
}
/** @override */
_canView(user, event) {
return this._canControl(user, event);
}
/** @inheritdoc */
_onClickRight(event) {
this.document.update({hidden: !this.document.hidden});
if ( !this._propagateRightClick(event) ) event.stopPropagation();
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get borderColor() {
const msg = "MeasuredTemplate#borderColor has been deprecated. Use MeasuredTemplate#document#borderColor instead.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
return this.document.borderColor.valueOf();
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get fillColor() {
const msg = "MeasuredTemplate#fillColor has been deprecated. Use MeasuredTemplate#document#fillColor instead.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
return this.document.fillColor.valueOf();
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get owner() {
const msg = "MeasuredTemplate#owner has been deprecated. Use MeasuredTemplate#isOwner instead.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
return this.isOwner;
}
}

View File

@@ -0,0 +1,773 @@
/**
* A Tile is an implementation of PlaceableObject which represents a static piece of artwork or prop within the Scene.
* Tiles are drawn inside the {@link TilesLayer} container.
* @category - Canvas
*
* @see {@link TileDocument}
* @see {@link TilesLayer}
*/
class Tile extends PlaceableObject {
/* -------------------------------------------- */
/* Attributes */
/* -------------------------------------------- */
/** @inheritdoc */
static embeddedName = "Tile";
/** @override */
static RENDER_FLAGS = {
redraw: {propagate: ["refresh"]},
refresh: {propagate: ["refreshState", "refreshTransform", "refreshMesh", "refreshElevation", "refreshVideo"], alias: true},
refreshState: {propagate: ["refreshPerception"]},
refreshTransform: {propagate: ["refreshPosition", "refreshRotation", "refreshSize"], alias: true},
refreshPosition: {propagate: ["refreshPerception"]},
refreshRotation: {propagate: ["refreshPerception", "refreshFrame"]},
refreshSize: {propagate: ["refreshPosition", "refreshFrame"]},
refreshMesh: {},
refreshFrame: {},
refreshElevation: {propagate: ["refreshPerception"]},
refreshPerception: {},
refreshVideo: {},
/** @deprecated since v12 */
refreshShape: {
propagate: ["refreshTransform", "refreshMesh", "refreshElevation"],
deprecated: {since: 12, until: 14, alias: true}
}
};
/**
* The Tile border frame
* @type {PIXI.Container}
*/
frame;
/**
* The primary tile image texture
* @type {PIXI.Texture}
*/
texture;
/**
* A Tile background which is displayed if no valid image texture is present
* @type {PIXI.Graphics}
*/
bg;
/**
* A reference to the SpriteMesh which displays this Tile in the PrimaryCanvasGroup.
* @type {PrimarySpriteMesh}
*/
mesh;
/**
* A flag to capture whether this Tile has an unlinked video texture
* @type {boolean}
*/
#unlinkedVideo = false;
/**
* Video options passed by the HUD
* @type {object}
*/
#hudVideoOptions = {
playVideo: undefined,
offset: undefined
};
/* -------------------------------------------- */
/**
* Get the native aspect ratio of the base texture for the Tile sprite
* @type {number}
*/
get aspectRatio() {
if ( !this.texture ) return 1;
let tex = this.texture.baseTexture;
return (tex.width / tex.height);
}
/* -------------------------------------------- */
/** @override */
get bounds() {
let {x, y, width, height, texture, rotation} = this.document;
// Adjust top left coordinate and dimensions according to scale
if ( texture.scaleX !== 1 ) {
const w0 = width;
width *= Math.abs(texture.scaleX);
x += (w0 - width) / 2;
}
if ( texture.scaleY !== 1 ) {
const h0 = height;
height *= Math.abs(texture.scaleY);
y += (h0 - height) / 2;
}
// If the tile is rotated, return recomputed bounds according to rotation
if ( rotation !== 0 ) return PIXI.Rectangle.fromRotation(x, y, width, height, Math.toRadians(rotation)).normalize();
// Normal case
return new PIXI.Rectangle(x, y, width, height).normalize();
}
/* -------------------------------------------- */
/**
* The HTML source element for the primary Tile texture
* @type {HTMLImageElement|HTMLVideoElement}
*/
get sourceElement() {
return this.texture?.baseTexture.resource.source;
}
/* -------------------------------------------- */
/**
* Does this Tile depict an animated video texture?
* @type {boolean}
*/
get isVideo() {
const source = this.sourceElement;
return source?.tagName === "VIDEO";
}
/* -------------------------------------------- */
/**
* Is this Tile currently visible on the Canvas?
* @type {boolean}
*/
get isVisible() {
return !this.document.hidden || game.user.isGM;
}
/* -------------------------------------------- */
/**
* Is this tile occluded?
* @returns {boolean}
*/
get occluded() {
return this.mesh?.occluded ?? false;
}
/* -------------------------------------------- */
/**
* Is the tile video playing?
* @type {boolean}
*/
get playing() {
return this.isVideo && !this.sourceElement.paused;
}
/* -------------------------------------------- */
/**
* The effective volume at which this Tile should be playing, including the global ambient volume modifier
* @type {number}
*/
get volume() {
return this.document.video.volume * game.settings.get("core", "globalAmbientVolume");
}
/* -------------------------------------------- */
/* Interactivity */
/* -------------------------------------------- */
/** @override */
_overlapsSelection(rectangle) {
if ( !this.frame ) return false;
const localRectangle = new PIXI.Rectangle(
rectangle.x - this.document.x,
rectangle.y - this.document.y,
rectangle.width,
rectangle.height
);
return localRectangle.overlaps(this.frame.bounds);
}
/* -------------------------------------------- */
/* Rendering */
/* -------------------------------------------- */
/**
* Create a preview tile with a background texture instead of an image
* @param {object} data Initial data with which to create the preview Tile
* @returns {PlaceableObject}
*/
static createPreview(data) {
data.width = data.height = 1;
data.elevation = data.elevation ?? (ui.controls.control.foreground ? canvas.scene.foregroundElevation : 0);
data.sort = Math.max(canvas.tiles.getMaxSort() + 1, 0);
// Create a pending TileDocument
const cls = getDocumentClass("Tile");
const doc = new cls(data, {parent: canvas.scene});
// Render the preview Tile object
const tile = doc.object;
tile.control({releaseOthers: false});
tile.draw().then(() => { // Swap the z-order of the tile and the frame
tile.removeChild(tile.frame);
tile.addChild(tile.frame);
});
return tile;
}
/* -------------------------------------------- */
/** @override */
async _draw(options={}) {
// Load Tile texture
let texture;
if ( this._original ) texture = this._original.texture?.clone();
else if ( this.document.texture.src ) {
texture = await loadTexture(this.document.texture.src, {fallback: "icons/svg/hazard.svg"});
}
// Manage video playback and clone texture for unlinked video
let video = game.video.getVideoSource(texture);
this.#unlinkedVideo = !!video && !this._original;
if ( this.#unlinkedVideo ) {
texture = await game.video.cloneTexture(video);
video = game.video.getVideoSource(texture);
if ( (this.document.getFlag("core", "randomizeVideo") !== false) && Number.isFinite(video.duration) ) {
video.currentTime = Math.random() * video.duration;
}
}
if ( !video ) this.#hudVideoOptions.playVideo = undefined;
this.#hudVideoOptions.offset = undefined;
this.texture = texture;
// Draw the Token mesh
if ( this.texture ) {
this.mesh = canvas.primary.addTile(this);
this.bg = undefined;
}
// Draw a placeholder background
else {
canvas.primary.removeTile(this);
this.texture = this.mesh = null;
this.bg = this.addChild(new PIXI.Graphics());
this.bg.eventMode = "none";
}
// Control Border
this.frame = this.addChild(this.#drawFrame());
// Interactivity
this.cursor = this.document.isOwner ? "pointer" : null;
}
/* -------------------------------------------- */
/**
* Create elements for the Tile border and handles
* @returns {PIXI.Container}
*/
#drawFrame() {
const frame = new PIXI.Container();
frame.eventMode = "passive";
frame.bounds = new PIXI.Rectangle();
frame.interaction = frame.addChild(new PIXI.Container());
frame.interaction.hitArea = frame.bounds;
frame.interaction.eventMode = "auto";
frame.border = frame.addChild(new PIXI.Graphics());
frame.border.eventMode = "none";
frame.handle = frame.addChild(new ResizeHandle([1, 1]));
frame.handle.eventMode = "static";
return frame;
}
/* -------------------------------------------- */
/** @inheritdoc */
clear(options) {
if ( this.#unlinkedVideo ) this.texture?.baseTexture?.destroy(); // Base texture destroyed for non preview video
this.#unlinkedVideo = false;
super.clear(options);
}
/* -------------------------------------------- */
/** @inheritdoc */
_destroy(options) {
canvas.primary.removeTile(this);
if ( this.texture ) {
if ( this.#unlinkedVideo ) this.texture?.baseTexture?.destroy(); // Base texture destroyed for non preview video
this.texture = undefined;
this.#unlinkedVideo = false;
}
}
/* -------------------------------------------- */
/* Incremental Refresh */
/* -------------------------------------------- */
/** @override */
_applyRenderFlags(flags) {
if ( flags.refreshState ) this._refreshState();
if ( flags.refreshPosition ) this._refreshPosition();
if ( flags.refreshRotation ) this._refreshRotation();
if ( flags.refreshSize ) this._refreshSize();
if ( flags.refreshMesh ) this._refreshMesh();
if ( flags.refreshFrame ) this._refreshFrame();
if ( flags.refreshElevation ) this._refreshElevation();
if ( flags.refreshPerception ) this.#refreshPerception();
if ( flags.refreshVideo ) this._refreshVideo();
}
/* -------------------------------------------- */
/**
* Refresh the position.
* @protected
*/
_refreshPosition() {
const {x, y, width, height} = this.document;
if ( (this.position.x !== x) || (this.position.y !== y) ) MouseInteractionManager.emulateMoveEvent();
this.position.set(x, y);
if ( !this.mesh ) {
this.bg.position.set(width / 2, height / 2);
this.bg.pivot.set(width / 2, height / 2);
return;
}
this.mesh.position.set(x + (width / 2), y + (height / 2));
}
/* -------------------------------------------- */
/**
* Refresh the rotation.
* @protected
*/
_refreshRotation() {
const rotation = this.document.rotation;
if ( !this.mesh ) return this.bg.angle = rotation;
this.mesh.angle = rotation;
}
/* -------------------------------------------- */
/**
* Refresh the size.
* @protected
*/
_refreshSize() {
const {width, height, texture: {fit, scaleX, scaleY}} = this.document;
if ( !this.mesh ) return this.bg.clear().beginFill(0xFFFFFF, 0.5).drawRect(0, 0, width, height).endFill();
this.mesh.resize(width, height, {fit, scaleX, scaleY});
}
/* -------------------------------------------- */
/**
* Refresh the displayed state of the Tile.
* Updated when the tile interaction state changes, when it is hidden, or when its elevation changes.
* @protected
*/
_refreshState() {
const {hidden, locked, elevation, sort} = this.document;
this.visible = this.isVisible;
this.alpha = this._getTargetAlpha();
if ( this.bg ) this.bg.visible = this.layer.active;
const colors = CONFIG.Canvas.dispositionColors;
this.frame.border.tint = this.controlled ? (locked ? colors.HOSTILE : colors.CONTROLLED) : colors.INACTIVE;
this.frame.border.visible = this.controlled || this.hover || this.layer.highlightObjects;
this.frame.handle.visible = this.controlled && !locked;
const foreground = this.layer.active && !!ui.controls.control.foreground;
const overhead = elevation >= this.document.parent.foregroundElevation;
const oldEventMode = this.eventMode;
this.eventMode = overhead === foreground ? "static" : "none";
if ( this.eventMode !== oldEventMode ) MouseInteractionManager.emulateMoveEvent();
const zIndex = this.zIndex = this.controlled ? 2 : this.hover ? 1 : 0;
if ( !this.mesh ) return;
this.mesh.visible = this.visible;
this.mesh.sort = sort;
this.mesh.sortLayer = PrimaryCanvasGroup.SORT_LAYERS.TILES;
this.mesh.zIndex = zIndex;
this.mesh.alpha = this.alpha * (hidden ? 0.5 : 1);
this.mesh.hidden = hidden;
this.mesh.restrictsLight = this.document.restrictions.light;
this.mesh.restrictsWeather = this.document.restrictions.weather;
}
/* -------------------------------------------- */
/**
* Refresh the appearance of the tile.
* @protected
*/
_refreshMesh() {
if ( !this.mesh ) return;
const {width, height, alpha, occlusion, texture} = this.document;
const {anchorX, anchorY, fit, scaleX, scaleY, tint, alphaThreshold} = texture;
this.mesh.anchor.set(anchorX, anchorY);
this.mesh.resize(width, height, {fit, scaleX, scaleY});
this.mesh.unoccludedAlpha = alpha;
this.mesh.occludedAlpha = occlusion.alpha;
this.mesh.occlusionMode = occlusion.mode;
this.mesh.hoverFade = this.mesh.isOccludable;
this.mesh.tint = tint;
this.mesh.textureAlphaThreshold = alphaThreshold;
}
/* -------------------------------------------- */
/**
* Refresh the elevation.
* @protected
*/
_refreshElevation() {
if ( !this.mesh ) return;
this.mesh.elevation = this.document.elevation;
}
/* -------------------------------------------- */
/**
* Refresh the tiles.
*/
#refreshPerception() {
if ( !this.mesh ) return;
canvas.perception.update({refreshOcclusionStates: true});
}
/* -------------------------------------------- */
/**
* Refresh the border frame that encloses the Tile.
* @protected
*/
_refreshFrame() {
const thickness = CONFIG.Canvas.objectBorderThickness;
// Update the frame bounds
const {width, height, rotation} = this.document;
const bounds = this.frame.bounds;
bounds.x = 0;
bounds.y = 0;
bounds.width = width;
bounds.height = height;
bounds.rotate(Math.toRadians(rotation));
const minSize = thickness * 0.25;
if ( bounds.width < minSize ) {
bounds.x -= ((minSize - bounds.width) / 2);
bounds.width = minSize;
}
if ( bounds.height < minSize ) {
bounds.y -= ((minSize - bounds.height) / 2);
bounds.height = minSize;
}
MouseInteractionManager.emulateMoveEvent();
// Draw the border
const border = this.frame.border;
border.clear();
border.lineStyle({width: thickness, color: 0x000000, join: PIXI.LINE_JOIN.ROUND, alignment: 0.75})
.drawShape(bounds);
border.lineStyle({width: thickness / 2, color: 0xFFFFFF, join: PIXI.LINE_JOIN.ROUND, alignment: 1})
.drawShape(bounds);
// Draw the handle
this.frame.handle.refresh(bounds);
}
/* -------------------------------------------- */
/**
* Refresh changes to the video playback state.
* @protected
*/
_refreshVideo() {
if ( !this.texture || !this.#unlinkedVideo ) return;
const video = game.video.getVideoSource(this.texture);
if ( !video ) return;
const playOptions = {...this.document.video, volume: this.volume};
playOptions.playing = (this.#hudVideoOptions.playVideo ?? playOptions.autoplay);
playOptions.offset = this.#hudVideoOptions.offset;
this.#hudVideoOptions.offset = undefined;
game.video.play(video, playOptions);
// Refresh HUD if necessary
if ( this.hasActiveHUD ) this.layer.hud.render();
}
/* -------------------------------------------- */
/* Document Event Handlers */
/* -------------------------------------------- */
/** @inheritDoc */
_onUpdate(changed, options, userId) {
super._onUpdate(changed, options, userId);
const restrictionsChanged = ("restrictions" in changed) && !foundry.utils.isEmpty(changed.restrictions);
// Refresh the Drawing
this.renderFlags.set({
redraw: ("texture" in changed) && ("src" in changed.texture),
refreshState: ("sort" in changed) || ("hidden" in changed) || ("locked" in changed) || restrictionsChanged,
refreshPosition: ("x" in changed) || ("y" in changed),
refreshRotation: "rotation" in changed,
refreshSize: ("width" in changed) || ("height" in changed),
refreshMesh: ("alpha" in changed) || ("occlusion" in changed) || ("texture" in changed),
refreshElevation: "elevation" in changed,
refreshPerception: ("occlusion" in changed) && ("mode" in changed.occlusion),
refreshVideo: ("video" in changed) || ("playVideo" in options) || ("offset" in options)
});
// Set the video options
if ( "playVideo" in options ) this.#hudVideoOptions.playVideo = options.playVideo;
if ( "offset" in options ) this.#hudVideoOptions.offset = options.offset;
}
/* -------------------------------------------- */
/* Interactivity */
/* -------------------------------------------- */
/** @inheritdoc */
activateListeners() {
super.activateListeners();
this.frame.handle.off("pointerover").off("pointerout")
.on("pointerover", this._onHandleHoverIn.bind(this))
.on("pointerout", this._onHandleHoverOut.bind(this));
}
/* -------------------------------------------- */
/** @inheritdoc */
_onClickLeft(event) {
if ( event.target === this.frame.handle ) {
event.interactionData.dragHandle = true;
event.stopPropagation();
return;
}
return super._onClickLeft(event);
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftStart(event) {
if ( event.interactionData.dragHandle ) return this._onHandleDragStart(event);
return super._onDragLeftStart(event);
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftMove(event) {
if ( event.interactionData.dragHandle ) return this._onHandleDragMove(event);
super._onDragLeftMove(event);
}
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftDrop(event) {
if ( event.interactionData.dragHandle ) return this._onHandleDragDrop(event);
return super._onDragLeftDrop(event);
}
/* -------------------------------------------- */
/* Resize Handling */
/* -------------------------------------------- */
/** @inheritdoc */
_onDragLeftCancel(event) {
if ( event.interactionData.dragHandle ) return this._onHandleDragCancel(event);
return super._onDragLeftCancel(event);
}
/* -------------------------------------------- */
/**
* Handle mouse-over event on a control handle
* @param {PIXI.FederatedEvent} event The mouseover event
* @protected
*/
_onHandleHoverIn(event) {
const handle = event.target;
handle?.scale.set(1.5, 1.5);
}
/* -------------------------------------------- */
/**
* Handle mouse-out event on a control handle
* @param {PIXI.FederatedEvent} event The mouseout event
* @protected
*/
_onHandleHoverOut(event) {
const handle = event.target;
handle?.scale.set(1.0, 1.0);
}
/* -------------------------------------------- */
/**
* Handle the beginning of a drag event on a resize handle.
* @param {PIXI.FederatedEvent} event The mousedown event
* @protected
*/
_onHandleDragStart(event) {
const handle = this.frame.handle;
const aw = this.document.width;
const ah = this.document.height;
const x0 = this.document.x + (handle.offset[0] * aw);
const y0 = this.document.y + (handle.offset[1] * ah);
event.interactionData.origin = {x: x0, y: y0, width: aw, height: ah};
}
/* -------------------------------------------- */
/**
* Handle mousemove while dragging a tile scale handler
* @param {PIXI.FederatedEvent} event The mousemove event
* @protected
*/
_onHandleDragMove(event) {
canvas._onDragCanvasPan(event);
const interaction = event.interactionData;
if ( !event.shiftKey ) interaction.destination = this.layer.getSnappedPoint(interaction.destination);
const d = this.#getResizedDimensions(event);
this.document.x = d.x;
this.document.y = d.y;
this.document.width = d.width;
this.document.height = d.height;
this.document.rotation = 0;
// Mirror horizontally or vertically
this.document.texture.scaleX = d.sx;
this.document.texture.scaleY = d.sy;
this.renderFlags.set({refreshTransform: true});
}
/* -------------------------------------------- */
/**
* Handle mouseup after dragging a tile scale handler
* @param {PIXI.FederatedEvent} event The mouseup event
* @protected
*/
_onHandleDragDrop(event) {
const interaction = event.interactionData;
interaction.resetDocument = false;
if ( !event.shiftKey ) interaction.destination = this.layer.getSnappedPoint(interaction.destination);
const d = this.#getResizedDimensions(event);
this.document.update({
x: d.x, y: d.y, width: d.width, height: d.height, "texture.scaleX": d.sx, "texture.scaleY": d.sy
}).then(() => this.renderFlags.set({refreshTransform: true}));
}
/* -------------------------------------------- */
/**
* Get resized Tile dimensions
* @param {PIXI.FederatedEvent} event
* @returns {{x: number, y: number, width: number, height: number, sx: number, sy: number}}
*/
#getResizedDimensions(event) {
const o = this.document._source;
const {origin, destination} = event.interactionData;
// Identify the new width and height as positive dimensions
const dx = destination.x - origin.x;
const dy = destination.y - origin.y;
let w = Math.abs(o.width) + dx;
let h = Math.abs(o.height) + dy;
// Constrain the aspect ratio using the ALT key
if ( event.altKey && this.texture?.valid ) {
const ar = this.texture.width / this.texture.height;
if ( Math.abs(w) > Math.abs(h) ) h = w / ar;
else w = h * ar;
}
const {x, y, width, height} = new PIXI.Rectangle(o.x, o.y, w, h).normalize();
// Comparing destination coord and source coord to apply mirroring and append to nr
const sx = (Math.sign(destination.x - o.x) || 1) * o.texture.scaleX;
const sy = (Math.sign(destination.y - o.y) || 1) * o.texture.scaleY;
return {x, y, width, height, sx, sy};
}
/* -------------------------------------------- */
/**
* Handle cancellation of a drag event for one of the resizing handles
* @param {PIXI.FederatedEvent} event The mouseup event
* @protected
*/
_onHandleDragCancel(event) {
if ( event.interactionData.resetDocument !== false ) {
this.document.reset();
this.renderFlags.set({refreshTransform: true});
}
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get isRoof() {
const msg = "Tile#isRoof has been deprecated without replacement.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
return this.document.roof;
}
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
testOcclusion(...args) {
const msg = "Tile#testOcclusion has been deprecated in favor of PrimaryCanvasObject#testOcclusion";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return this.mesh?.testOcclusion(...args) ?? false;
}
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
containsPixel(...args) {
const msg = "Tile#containsPixel has been deprecated in favor of PrimaryCanvasObject#containsPixel"
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return this.mesh?.containsPixel(...args) ?? false;
}
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
getPixelAlpha(...args) {
const msg = "Tile#getPixelAlpha has been deprecated in favor of PrimaryCanvasObject#getPixelAlpha"
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return this.mesh?.getPixelAlpha(...args) ?? null;
}
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
_getAlphaBounds() {
const msg = "Tile#_getAlphaBounds has been deprecated";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return this.mesh?._getAlphaBounds();
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,174 @@
/**
* A batch renderer with a customizable data transfer function to packed geometries.
* @extends PIXI.AbstractBatchRenderer
*/
class BatchRenderer extends PIXI.BatchRenderer {
/**
* The batch shader generator class.
* @type {typeof BatchShaderGenerator}
*/
static shaderGeneratorClass = BatchShaderGenerator;
/* -------------------------------------------- */
/**
* The default uniform values for the batch shader.
* @type {object | (maxTextures: number) => object}
*/
static defaultUniforms = {};
/* -------------------------------------------- */
/**
* The PackInterleavedGeometry function provided by the sampler.
* @type {Function|undefined}
* @protected
*/
_packInterleavedGeometry;
/* -------------------------------------------- */
/**
* The update function provided by the sampler and that is called just before a flush.
* @type {(batchRenderer: BatchRenderer) => void | undefined}
* @protected
*/
_preRenderBatch;
/* -------------------------------------------- */
/**
* Get the uniforms bound to this abstract batch renderer.
* @returns {object|undefined}
*/
get uniforms() {
return this._shader?.uniforms;
}
/* -------------------------------------------- */
/**
* The number of reserved texture units that the shader generator should not use (maximum 4).
* @param {number} val
* @protected
*/
set reservedTextureUnits(val) {
// Some checks before...
if ( typeof val !== "number" ) {
throw new Error("BatchRenderer#reservedTextureUnits must be a number!");
}
if ( (val < 0) || (val > 4) ) {
throw new Error("BatchRenderer#reservedTextureUnits must be positive and can't exceed 4.");
}
this.#reservedTextureUnits = val;
}
/**
* Number of reserved texture units reserved by the batch shader that cannot be used by the batch renderer.
* @returns {number}
*/
get reservedTextureUnits() {
return this.#reservedTextureUnits;
}
#reservedTextureUnits = 0;
/* -------------------------------------------- */
/** @override */
setShaderGenerator({
vertex=this.constructor.defaultVertexSrc,
fragment=this.constructor.defaultFragmentTemplate,
uniforms=this.constructor.defaultUniforms
}={}) {
this.shaderGenerator = new this.constructor.shaderGeneratorClass(vertex, fragment, uniforms);
}
/* -------------------------------------------- */
/**
* This override allows to allocate a given number of texture units reserved for a custom batched shader.
* These reserved texture units won't be used to batch textures for PIXI.Sprite or SpriteMesh.
* @override
*/
contextChange() {
const gl = this.renderer.gl;
// First handle legacy environment
if ( PIXI.settings.PREFER_ENV === PIXI.ENV.WEBGL_LEGACY ) this.maxTextures = 1;
else
{
// Step 1: first check max texture units the GPU can handle
const gpuMaxTex = Math.min(gl.getParameter(gl.MAX_TEXTURE_IMAGE_UNITS), 65536);
// Step 2: Remove the number of reserved texture units that could be used by a custom batch shader
const batchMaxTex = gpuMaxTex - this.#reservedTextureUnits;
// Step 3: Checking if remainder of texture units is at least 1. Should never happens on GPU < than 20 years old!
if ( batchMaxTex < 1 ) {
const msg = "Impossible to allocate the required number of texture units in contextChange#BatchRenderer. "
+ "Your GPU should handle at least 8 texture units. Currently, it is supporting: "
+ `${gpuMaxTex} texture units.`;
throw new Error(msg);
}
// Step 4: Check with the maximum number of textures of the setting (webGL specifications)
this.maxTextures = Math.min(batchMaxTex, PIXI.settings.SPRITE_MAX_TEXTURES);
// Step 5: Check the maximum number of if statements the shader can have too..
this.maxTextures = PIXI.checkMaxIfStatementsInShader(this.maxTextures, gl);
}
// Generate the batched shader
this._shader = this.shaderGenerator?.generateShader(this.maxTextures) ?? null;
// Initialize packed geometries
for ( let i = 0; i < this._packedGeometryPoolSize; i++ ) {
this._packedGeometries[i] = new (this.geometryClass)();
}
this.initFlushBuffers();
}
/* -------------------------------------------- */
/** @inheritdoc */
onPrerender() {
if ( !this.shaderGenerator ) this.setShaderGenerator();
this._shader ??= this.shaderGenerator.generateShader(this.maxTextures);
super.onPrerender();
}
/* -------------------------------------------- */
/** @override */
start() {
this._preRenderBatch?.(this);
super.start();
}
/* -------------------------------------------- */
/** @override */
packInterleavedGeometry(element, attributeBuffer, indexBuffer, aIndex, iIndex) {
// If we have a specific function to pack data into geometry, we call it
if ( this._packInterleavedGeometry ) {
this._packInterleavedGeometry(element, attributeBuffer, indexBuffer, aIndex, iIndex);
return;
}
// Otherwise, we call the parent method, with the classic packing
super.packInterleavedGeometry(element, attributeBuffer, indexBuffer, aIndex, iIndex);
}
/* -------------------------------------------- */
/**
* Verify if a PIXI plugin exists. Check by name.
* @param {string} name The name of the pixi plugin to check.
* @returns {boolean} True if the plugin exists, false otherwise.
*/
static hasPlugin(name) {
return Object.keys(PIXI.Renderer.__plugins).some(k => k === name);
}
}

View File

@@ -0,0 +1,41 @@
/**
* A batch shader generator that could handle extra uniforms during initialization.
* @param {string} vertexSrc The vertex shader source
* @param {string} fragTemplate The fragment shader source template
* @param {object | (maxTextures: number) => object} [uniforms] Additional uniforms
*/
class BatchShaderGenerator extends PIXI.BatchShaderGenerator {
constructor(vertexSrc, fragTemplate, uniforms={}) {
super(vertexSrc, fragTemplate);
this.#uniforms = uniforms;
}
/**
* Extra uniforms used to create the batch shader.
* @type {object | (maxTextures: number) => object}
*/
#uniforms;
/* -------------------------------------------- */
/** @override */
generateShader(maxTextures) {
if ( !this.programCache[maxTextures] ) {
const sampleValues = Int32Array.from({length: maxTextures}, (n, i) => i);
this.defaultGroupCache[maxTextures] = PIXI.UniformGroup.from({uSamplers: sampleValues}, true);
let fragmentSrc = this.fragTemplate;
fragmentSrc = fragmentSrc.replace(/%count%/gi, `${maxTextures}`);
fragmentSrc = fragmentSrc.replace(/%forloop%/gi, this.generateSampleSrc(maxTextures));
this.programCache[maxTextures] = new PIXI.Program(this.vertexSrc, fragmentSrc);
}
let uniforms = this.#uniforms;
if ( typeof uniforms === "function" ) uniforms = uniforms.call(this, maxTextures);
else uniforms = foundry.utils.deepClone(uniforms);
return new PIXI.Shader(this.programCache[maxTextures], {
...uniforms,
tint: new Float32Array([1, 1, 1, 1]),
translationMatrix: new PIXI.Matrix(),
default: this.defaultGroupCache[maxTextures]
});
}
}

View File

@@ -0,0 +1,40 @@
const BLEND_MODES = {};
/**
* A custom blend mode equation which chooses the maximum color from each channel within the stack.
* @type {number[]}
*/
BLEND_MODES.MAX_COLOR = [
WebGL2RenderingContext.ONE,
WebGL2RenderingContext.ONE,
WebGL2RenderingContext.ONE,
WebGL2RenderingContext.ONE,
WebGL2RenderingContext.MAX,
WebGL2RenderingContext.MAX
];
/**
* A custom blend mode equation which chooses the minimum color from each channel within the stack.
* @type {number[]}
*/
BLEND_MODES.MIN_COLOR = [
WebGL2RenderingContext.ONE,
WebGL2RenderingContext.ONE,
WebGL2RenderingContext.ONE,
WebGL2RenderingContext.ONE,
WebGL2RenderingContext.MIN,
WebGL2RenderingContext.MAX
];
/**
* A custom blend mode equation which chooses the minimum color for color channels and min alpha from alpha channel.
* @type {number[]}
*/
BLEND_MODES.MIN_ALL = [
WebGL2RenderingContext.ONE,
WebGL2RenderingContext.ONE,
WebGL2RenderingContext.ONE,
WebGL2RenderingContext.ONE,
WebGL2RenderingContext.MIN,
WebGL2RenderingContext.MIN
];

View File

@@ -0,0 +1,105 @@
/**
* Provide the necessary methods to get a snapshot of the framebuffer into a render texture.
* Class meant to be used as a singleton.
* Created with the precious advices of dev7355608.
*/
class FramebufferSnapshot {
constructor() {
/**
* The RenderTexture that is the render destination for the framebuffer snapshot.
* @type {PIXI.RenderTexture}
*/
this.framebufferTexture = FramebufferSnapshot.#createRenderTexture();
// Listen for resize events
canvas.app.renderer.on("resize", () => this.#hasResized = true);
}
/**
* To know if we need to update the texture.
* @type {boolean}
*/
#hasResized = true;
/**
* A placeholder for temporary copy.
* @type {PIXI.Rectangle}
*/
#tempSourceFrame = new PIXI.Rectangle();
/* ---------------------------------------- */
/**
* Get the framebuffer texture snapshot.
* @param {PIXI.Renderer} renderer The renderer for this context.
* @returns {PIXI.RenderTexture} The framebuffer snapshot.
*/
getFramebufferTexture(renderer) {
// Need resize?
if ( this.#hasResized ) {
CachedContainer.resizeRenderTexture(renderer, this.framebufferTexture);
this.#hasResized = false;
}
// Flush batched operations before anything else
renderer.batch.flush();
const fb = renderer.framebuffer.current;
const vf = this.#tempSourceFrame.copyFrom(renderer.renderTexture.viewportFrame);
// Inverted Y in the case of canvas
if ( !fb ) vf.y = renderer.view.height - (vf.y + vf.height);
// Empty viewport
if ( !(vf.width > 0 && vf.height > 0) ) return PIXI.Texture.WHITE;
// Computing bounds of the source
let srcX = vf.x;
let srcY = vf.y;
let srcX2 = srcX + vf.width;
let srcY2 = srcY + vf.height;
// Inverted Y in the case of canvas
if ( !fb ) {
srcY = renderer.view.height - 1 - srcY;
srcY2 = srcY - vf.height;
}
// Computing bounds of the destination
let dstX = 0;
let dstY = 0;
let dstX2 = vf.width;
let dstY2 = vf.height;
// Preparing the gl context
const gl = renderer.gl;
const framebufferSys = renderer.framebuffer;
const currentFramebuffer = framebufferSys.current;
// Binding our render texture to the framebuffer
framebufferSys.bind(this.framebufferTexture.framebuffer, framebufferSys.viewport);
// Current framebuffer is binded as a read framebuffer (to prepare the blit)
gl.bindFramebuffer(gl.READ_FRAMEBUFFER, fb?.glFramebuffers[framebufferSys.CONTEXT_UID].framebuffer);
// Blit current framebuffer into our render texture
gl.blitFramebuffer(srcX, srcY, srcX2, srcY2, dstX, dstY, dstX2, dstY2, gl.COLOR_BUFFER_BIT, gl.NEAREST);
// Restore original behavior
framebufferSys.bind(currentFramebuffer, framebufferSys.viewport);
return this.framebufferTexture;
}
/* ---------------------------------------- */
/**
* Create a render texture, provide a render method and an optional clear color.
* @returns {PIXI.RenderTexture} A reference to the created render texture.
*/
static #createRenderTexture() {
const renderer = canvas.app?.renderer;
return PIXI.RenderTexture.create({
width: renderer?.screen.width ?? window.innerWidth,
height: renderer?.screen.height ?? window.innerHeight,
resolution: renderer.resolution ?? PIXI.settings.RESOLUTION
});
}
}

View File

@@ -0,0 +1,75 @@
/**
* A smooth noise generator for one-dimensional values.
* @param {object} options Configuration options for the noise process.
* @param {number} [options.amplitude=1] The generated noise will be on the range [0, amplitude].
* @param {number} [options.scale=1] An adjustment factor for the input x values which place them on an
* appropriate range.
* @param {number} [options.maxReferences=256] The number of pre-generated random numbers to generate.
*/
class SmoothNoise {
constructor({amplitude=1, scale=1, maxReferences=256}={}) {
// Configure amplitude
this.amplitude = amplitude;
// Configure scale
this.scale = scale;
// Create pre-generated random references
if ( !Number.isInteger(maxReferences) || !PIXI.utils.isPow2(maxReferences) ) {
throw new Error("SmoothNoise maxReferences must be a positive power-of-2 integer.");
}
Object.defineProperty(this, "_maxReferences", {value: maxReferences || 1, writable: false});
Object.defineProperty(this, "_references", {value: [], writable: false});
for ( let i = 0; i < this._maxReferences; i++ ) {
this._references.push(Math.random());
}
}
/**
* Amplitude of the generated noise output
* The noise output is multiplied by this value
* @type {number[]}
*/
get amplitude() {
return this._amplitude;
}
set amplitude(amplitude) {
if ( !Number.isFinite(amplitude) || (amplitude === 0) ) {
throw new Error("SmoothNoise amplitude must be a finite non-zero number.");
}
this._amplitude = amplitude;
}
_amplitude;
/**
* Scale factor of the random indices
* @type {number[]}
*/
get scale() {
return this._scale;
}
set scale(scale) {
if ( !Number.isFinite(scale) || (scale <= 0 ) ) {
throw new Error("SmoothNoise scale must be a finite positive number.");
}
this._scale = scale;
}
_scale;
/**
* Generate the noise value corresponding to a provided numeric x value.
* @param {number} x Any finite number
* @return {number} The corresponding smoothed noise value
*/
generate(x) {
const scaledX = x * this._scale; // The input x scaled by some factor
const xFloor = Math.floor(scaledX); // The integer portion of x
const t = scaledX - xFloor; // The fractional remainder, zero in the case of integer x
const tSmooth = t * t * (3 - 2 * t); // Smooth cubic [0, 1] for mixing between random numbers
const i0 = xFloor & (this._maxReferences - 1); // The current index of the references array
const i1 = (i0 + 1) & (this._maxReferences - 1); // The next index of the references array
const y = Math.mix(this._references[i0], this._references[i1], tSmooth); // Smoothly mix between random numbers
return y * this._amplitude; // The final result is multiplied by the requested amplitude
};
}

View File

@@ -0,0 +1,532 @@
/**
* A class or interface that provide support for WebGL async read pixel/texture data extraction.
*/
class TextureExtractor {
constructor(renderer, {callerName, controlHash, format=PIXI.FORMATS.RED}={}) {
this.#renderer = renderer;
this.#callerName = callerName ?? "TextureExtractor";
this.#compressor = new TextureCompressor("Compressor", {debug: false, controlHash});
// Verify that the required format is supported by the texture extractor
if ( !((format === PIXI.FORMATS.RED) || (format === PIXI.FORMATS.RGBA)) ) {
throw new Error("TextureExtractor supports format RED and RGBA only.")
}
// Assign format, types, and read mode
this.#format = format;
this.#type = PIXI.TYPES.UNSIGNED_BYTE;
this.#readFormat = (((format === PIXI.FORMATS.RED) && !canvas.supported.readPixelsRED)
|| format === PIXI.FORMATS.RGBA) ? PIXI.FORMATS.RGBA : PIXI.FORMATS.RED;
// We need to intercept context change
this.#renderer.runners.contextChange.add(this);
}
/**
* List of compression that could be applied with extraction
* @enum {number}
*/
static COMPRESSION_MODES = {
NONE: 0,
BASE64: 1
};
/**
* The WebGL2 renderer.
* @type {Renderer}
*/
#renderer;
/**
* The reference to a WebGL2 sync object.
* @type {WebGLSync}
*/
#glSync;
/**
* The texture format on which the Texture Extractor must work.
* @type {PIXI.FORMATS}
*/
#format
/**
* The texture type on which the Texture Extractor must work.
* @type {PIXI.TYPES}
*/
#type
/**
* The texture format on which the Texture Extractor should read.
* @type {PIXI.FORMATS}
*/
#readFormat
/**
* The reference to the GPU buffer.
* @type {WebGLBuffer}
*/
#gpuBuffer;
/**
* To know if we need to create a GPU buffer.
* @type {boolean}
*/
#createBuffer;
/**
* Debug flag.
* @type {boolean}
*/
debug;
/**
* The reference to the pixel buffer.
* @type {Uint8ClampedArray}
*/
pixelBuffer;
/**
* The caller name associated with this instance of texture extractor (optional, used for debug)
* @type {string}
*/
#callerName;
/**
* Generated RenderTexture for textures.
* @type {PIXI.RenderTexture}
*/
#generatedRenderTexture;
/* -------------------------------------------- */
/* TextureExtractor Compression Worker */
/* -------------------------------------------- */
/**
* The compressor worker wrapper
* @type {TextureCompressor}
*/
#compressor;
/* -------------------------------------------- */
/* TextureExtractor Properties */
/* -------------------------------------------- */
/**
* Returns the read buffer width/height multiplier.
* @returns {number}
*/
get #readBufferMul() {
return this.#readFormat === PIXI.FORMATS.RED ? 1 : 4;
}
/* -------------------------------------------- */
/* TextureExtractor Synchronization */
/* -------------------------------------------- */
/**
* Handling of the concurrency for the extraction (by default a queue of 1)
* @type {Semaphore}
*/
#queue = new foundry.utils.Semaphore();
/* -------------------------------------------- */
/**
* @typedef {Object} TextureExtractionOptions
* @property {PIXI.Texture|PIXI.RenderTexture|null} [texture] The texture the pixels are extracted from.
* Otherwise, extract from the renderer.
* @property {PIXI.Rectangle} [frame] The rectangle which the pixels are extracted from.
* @property {TextureExtractor.COMPRESSION_MODES} [compression] The compression mode to apply, or NONE
* @property {string} [type] The optional image mime type.
* @property {string} [quality] The optional image quality.
* @property {boolean} [debug] The optional debug flag to use.
*/
/**
* Extract a rectangular block of pixels from the texture (without un-pre-multiplying).
* @param {TextureExtractionOptions} options Options which configure extraction behavior
* @returns {Promise}
*/
async extract(options={}) {
return this.#queue.add(this.#extract.bind(this), options);
}
/* -------------------------------------------- */
/* TextureExtractor Methods/Interface */
/* -------------------------------------------- */
/**
* Extract a rectangular block of pixels from the texture (without un-pre-multiplying).
* @param {TextureExtractionOptions} options Options which configure extraction behavior
* @returns {Promise}
*/
async #extract({texture, frame, compression, type, quality, debug}={}) {
// Set the debug flag
this.debug = debug;
if ( this.debug ) this.#consoleDebug("Begin texture extraction.");
// Checking texture validity
const baseTexture = texture?.baseTexture;
if ( texture && (!baseTexture || !baseTexture.valid || baseTexture.parentTextureArray) ) {
throw new Error("Texture passed to extractor is invalid.");
}
// Checking if texture is in RGBA format and premultiplied
if ( texture && (texture.baseTexture.alphaMode > 0) && (texture.baseTexture.format === PIXI.FORMATS.RGBA) ) {
throw new Error("Texture Extractor is not supporting premultiplied textures yet.");
}
let resolution;
// If the texture is a RT, use its frame and resolution
if ( (texture instanceof PIXI.RenderTexture) && ((baseTexture.format === this.#format)
|| (this.#readFormat === PIXI.FORMATS.RGBA) )
&& (baseTexture.type === this.#type) ) {
frame ??= texture.frame;
resolution = baseTexture.resolution;
}
// Case when the texture is not a render texture
// Generate a render texture and assign frame and resolution from it
else {
texture = this.#generatedRenderTexture = this.#renderer.generateTexture(new PIXI.Sprite(texture), {
format: this.#format,
type: this.#type,
resolution: baseTexture.resolution,
multisample: PIXI.MSAA_QUALITY.NONE
});
frame ??= this.#generatedRenderTexture.frame;
resolution = texture.baseTexture.resolution;
}
// Bind the texture
this.#renderer.renderTexture.bind(texture);
// Get the buffer from the GPU
const data = await this.#readPixels(frame, resolution);
// Return the compressed image or the raw buffer
if ( compression ) {
return await this.#compressBuffer(data.buffer, data.width, data.height, {compression, type, quality});
}
else if ( (this.#format === PIXI.FORMATS.RED) && (this.#readFormat === PIXI.FORMATS.RGBA) ) {
const result = await this.#compressor.reduceBufferRGBAToBufferRED(data.buffer, data.width, data.height, {compression, type, quality});
// Returning control of the buffer to the extractor
this.pixelBuffer = result.buffer;
// Returning the result
return result.redBuffer;
}
return data.buffer;
}
/* -------------------------------------------- */
/**
* Free all the bound objects.
*/
reset() {
if ( this.debug ) this.#consoleDebug("Data reset.");
this.#clear({buffer: true, syncObject: true, rt: true});
}
/* -------------------------------------------- */
/**
* Called by the renderer contextChange runner.
*/
contextChange() {
if ( this.debug ) this.#consoleDebug("WebGL context has changed.");
this.#glSync = undefined;
this.#generatedRenderTexture = undefined;
this.#gpuBuffer = undefined;
this.pixelBuffer = undefined;
}
/* -------------------------------------------- */
/* TextureExtractor Management */
/* -------------------------------------------- */
/**
* Compress the buffer and returns a base64 image.
* @param {*} args
* @returns {Promise<string>}
*/
async #compressBuffer(...args) {
if ( canvas.supported.offscreenCanvas ) return this.#compressBufferWorker(...args);
else return this.#compressBufferLocal(...args);
}
/* -------------------------------------------- */
/**
* Compress the buffer into a worker and returns a base64 image
* @param {Uint8ClampedArray} buffer Buffer to convert into a compressed base64 image.
* @param {number} width Width of the image.
* @param {number} height Height of the image.
* @param {object} options
* @param {string} options.type Format of the image.
* @param {number} options.quality Quality of the compression.
* @returns {Promise<string>}
*/
async #compressBufferWorker(buffer, width, height, {type, quality}={}) {
let result;
try {
// Launch compression
result = await this.#compressor.compressBufferBase64(buffer, width, height, {
type: type ?? "image/png",
quality: quality ?? 1,
debug: this.debug,
readFormat: this.#readFormat
});
}
catch(e) {
this.#consoleError("Buffer compression has failed!");
throw e;
}
// Returning control of the buffer to the extractor
this.pixelBuffer = result.buffer;
// Returning the result
return result.base64img;
}
/* -------------------------------------------- */
/**
* Compress the buffer locally (but expand the buffer into a worker) and returns a base64 image.
* The image format is forced to jpeg.
* @param {Uint8ClampedArray} buffer Buffer to convert into a compressed base64 image.
* @param {number} width Width of the image.
* @param {number} height Height of the image.
* @param {object} options
* @param {number} options.quality Quality of the compression.
* @returns {Promise<string>}
*/
async #compressBufferLocal(buffer, width, height, {quality}={}) {
let rgbaBuffer;
if ( this.#readFormat === PIXI.FORMATS.RED ) {
let result;
try {
// Launch buffer expansion on the worker thread
result = await this.#compressor.expandBufferRedToBufferRGBA(buffer, width, height, {
debug: this.debug
});
} catch(e) {
this.#consoleError("Buffer expansion has failed!");
throw e;
}
// Returning control of the buffer to the extractor
this.pixelBuffer = result.buffer;
rgbaBuffer = result.rgbaBuffer;
} else {
rgbaBuffer = buffer;
}
if ( !rgbaBuffer ) return;
// Proceed at the compression locally and return the base64 image
const element = ImageHelper.pixelsToCanvas(rgbaBuffer, width, height);
return await ImageHelper.canvasToBase64(element, "image/jpeg", quality); // Force jpeg compression
}
/* -------------------------------------------- */
/**
* Prepare data for the asynchronous readPixel.
* @param {PIXI.Rectangle} frame
* @param {number} resolution
* @returns {object}
*/
async #readPixels(frame, resolution) {
const gl = this.#renderer.gl;
// Set dimensions and buffer size
const x = Math.round(frame.left * resolution);
const y = Math.round(frame.top * resolution);
const width = Math.round(frame.width * resolution);
const height = Math.round(frame.height * resolution);
const bufSize = width * height * this.#readBufferMul;
// Set format and type needed for the readPixel command
const format = this.#readFormat;
const type = gl.UNSIGNED_BYTE;
// Useful debug information
if ( this.debug ) console.table({x, y, width, height, bufSize, format, type, extractorFormat: this.#format});
// The buffer that will hold the pixel data
const pixels = this.#getPixelCache(bufSize);
// Start the non-blocking read
// Create or reuse the GPU buffer and bind as buffer data
if ( this.#createBuffer ) {
if ( this.debug ) this.#consoleDebug("Creating buffer.");
this.#createBuffer = false;
if ( this.#gpuBuffer ) this.#clear({buffer: true});
this.#gpuBuffer = gl.createBuffer();
gl.bindBuffer(gl.PIXEL_PACK_BUFFER, this.#gpuBuffer);
gl.bufferData(gl.PIXEL_PACK_BUFFER, bufSize, gl.DYNAMIC_READ);
}
else {
if ( this.debug ) this.#consoleDebug("Reusing cached buffer.");
gl.bindBuffer(gl.PIXEL_PACK_BUFFER, this.#gpuBuffer);
}
// Performs read pixels GPU Texture -> GPU Buffer
gl.pixelStorei(gl.PACK_ALIGNMENT, 1);
gl.readPixels(x, y, width, height, format, type, 0);
gl.pixelStorei(gl.PACK_ALIGNMENT, 4);
gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
// Declare the sync object
this.#glSync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
// Flush all pending gl commands, including the commands above (important: flush is non blocking)
// The glSync will be complete when all commands will be executed
gl.flush();
// Waiting for the sync object to resolve
await this.#wait();
// Retrieve the GPU buffer data
const data = this.#getGPUBufferData(pixels, width, height, bufSize);
// Clear the sync object and possible generated render texture
this.#clear({syncObject: true, rt: true});
// Return the data
if ( this.debug ) this.#consoleDebug("Buffer data sent to caller.");
return data;
}
/* -------------------------------------------- */
/**
* Retrieve the content of the GPU buffer and put it pixels.
* Returns an object with the pixel buffer and dimensions.
* @param {Uint8ClampedArray} buffer The pixel buffer.
* @param {number} width The width of the texture.
* @param {number} height The height of the texture.
* @param {number} bufSize The size of the buffer.
* @returns {object<Uint8ClampedArray, number, number>}
*/
#getGPUBufferData(buffer, width, height, bufSize) {
const gl = this.#renderer.gl;
// Retrieve the GPU buffer data
gl.bindBuffer(gl.PIXEL_PACK_BUFFER, this.#gpuBuffer);
gl.getBufferSubData(gl.PIXEL_PACK_BUFFER, 0, buffer, 0, bufSize);
gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
return {buffer, width, height};
}
/* -------------------------------------------- */
/**
* Retrieve a pixel buffer of the given length.
* A cache is provided for the last length passed only (to avoid too much memory consumption)
* @param {number} length Length of the required buffer.
* @returns {Uint8ClampedArray} The cached or newly created buffer.
*/
#getPixelCache(length) {
if ( this.pixelBuffer?.length !== length ) {
this.pixelBuffer = new Uint8ClampedArray(length);
// If the pixel cache need to be (re)created, the same for the GPU buffer
this.#createBuffer = true;
}
return this.pixelBuffer;
}
/* -------------------------------------------- */
/**
* Wait for the synchronization object to resolve.
* @returns {Promise}
*/
async #wait() {
// Preparing data for testFence
const gl = this.#renderer.gl;
const sync = this.#glSync;
// Prepare for fence testing
const result = await new Promise((resolve, reject) => {
/**
* Test the fence sync object
*/
function wait() {
const res = gl.clientWaitSync(sync, 0, 0);
if ( res === gl.WAIT_FAILED ) {
reject(false);
return;
}
if ( res === gl.TIMEOUT_EXPIRED ) {
setTimeout(wait, 10);
return;
}
resolve(true);
}
wait();
});
// The promise was rejected?
if ( !result ) {
this.#clear({buffer: true, syncObject: true, data: true, rt: true});
throw new Error("The sync object has failed to wait.");
}
}
/* -------------------------------------------- */
/**
* Clear some key properties.
* @param {object} options
* @param {boolean} [options.buffer=false]
* @param {boolean} [options.syncObject=false]
* @param {boolean} [options.rt=false]
*/
#clear({buffer=false, syncObject=false, rt=false}={}) {
if ( syncObject && this.#glSync ) {
// Delete the sync object
this.#renderer.gl.deleteSync(this.#glSync);
this.#glSync = undefined;
if ( this.debug ) this.#consoleDebug("Free the sync object.");
}
if ( buffer ) {
// Delete the buffers
if ( this.#gpuBuffer ) {
this.#renderer.gl.deleteBuffer(this.#gpuBuffer);
this.#gpuBuffer = undefined;
}
this.pixelBuffer = undefined;
this.#createBuffer = true;
if ( this.debug ) this.#consoleDebug("Free the cached buffers.");
}
if ( rt && this.#generatedRenderTexture ) {
// Delete the generated render texture
this.#generatedRenderTexture.destroy(true);
this.#generatedRenderTexture = undefined;
if ( this.debug ) this.#consoleDebug("Destroy the generated render texture.");
}
}
/* -------------------------------------------- */
/**
* Convenience method to display the debug messages with the extractor.
* @param {string} message The debug message to display.
*/
#consoleDebug(message) {
console.debug(`${this.#callerName} | ${message}`);
}
/* -------------------------------------------- */
/**
* Convenience method to display the error messages with the extractor.
* @param {string} message The error message to display.
*/
#consoleError(message) {
console.error(`${this.#callerName} | ${message}`);
}
}

View File

@@ -0,0 +1,330 @@
/**
* A mixin which decorates a PIXI.Filter or PIXI.Shader with common properties.
* @category - Mixins
* @param {typeof PIXI.Shader} ShaderClass The parent ShaderClass class being mixed.
* @returns {typeof BaseShaderMixin} A Shader/Filter subclass mixed with BaseShaderMixin features.
* @mixin
*/
const BaseShaderMixin = ShaderClass => {
class BaseShaderMixin extends ShaderClass {
/**
* Useful constant values computed at compile time
* @type {string}
*/
static CONSTANTS = `
const float PI = 3.141592653589793;
const float TWOPI = 6.283185307179586;
const float INVPI = 0.3183098861837907;
const float INVTWOPI = 0.15915494309189535;
const float SQRT2 = 1.4142135623730951;
const float SQRT1_2 = 0.7071067811865476;
const float SQRT3 = 1.7320508075688772;
const float SQRT1_3 = 0.5773502691896257;
const vec3 BT709 = vec3(0.2126, 0.7152, 0.0722);
`;
/* -------------------------------------------- */
/**
* Fast approximate perceived brightness computation
* Using Digital ITU BT.709 : Exact luminance factors
* @type {string}
*/
static PERCEIVED_BRIGHTNESS = `
float perceivedBrightness(in vec3 color) { return sqrt(dot(BT709, color * color)); }
float perceivedBrightness(in vec4 color) { return perceivedBrightness(color.rgb); }
float reversePerceivedBrightness(in vec3 color) { return 1.0 - perceivedBrightness(color); }
float reversePerceivedBrightness(in vec4 color) { return 1.0 - perceivedBrightness(color.rgb); }
`;
/* -------------------------------------------- */
/**
* Convertion functions for sRGB and Linear RGB.
* @type {string}
*/
static COLOR_SPACES = `
float luminance(in vec3 c) { return dot(BT709, c); }
vec3 linear2grey(in vec3 c) { return vec3(luminance(c)); }
vec3 linear2srgb(in vec3 c) {
vec3 a = 12.92 * c;
vec3 b = 1.055 * pow(c, vec3(1.0 / 2.4)) - 0.055;
vec3 s = step(vec3(0.0031308), c);
return mix(a, b, s);
}
vec3 srgb2linear(in vec3 c) {
vec3 a = c / 12.92;
vec3 b = pow((c + 0.055) / 1.055, vec3(2.4));
vec3 s = step(vec3(0.04045), c);
return mix(a, b, s);
}
vec3 srgb2linearFast(in vec3 c) { return c * c; }
vec3 linear2srgbFast(in vec3 c) { return sqrt(c); }
vec3 colorClamp(in vec3 c) { return clamp(c, vec3(0.0), vec3(1.0)); }
vec4 colorClamp(in vec4 c) { return clamp(c, vec4(0.0), vec4(1.0)); }
vec3 tintColorLinear(in vec3 color, in vec3 tint, in float intensity) {
float t = luminance(tint);
float c = luminance(color);
return mix(color, mix(
mix(tint, vec3(1.0), (c - t) / (1.0 - t)),
tint * (c / t),
step(c, t)
), intensity);
}
vec3 tintColor(in vec3 color, in vec3 tint, in float intensity) {
return linear2srgbFast(tintColorLinear(srgb2linearFast(color), srgb2linearFast(tint), intensity));
}
`;
/* -------------------------------------------- */
/**
* Fractional Brownian Motion for a given number of octaves
* @param {number} [octaves=4]
* @param {number} [amp=1.0]
* @returns {string}
*/
static FBM(octaves = 4, amp = 1.0) {
return `float fbm(in vec2 uv) {
float total = 0.0, amp = ${amp.toFixed(1)};
for (int i = 0; i < ${octaves}; i++) {
total += noise(uv) * amp;
uv += uv;
amp *= 0.5;
}
return total;
}`;
}
/* -------------------------------------------- */
/**
* High Quality Fractional Brownian Motion
* @param {number} [octaves=3]
* @returns {string}
*/
static FBMHQ(octaves = 3) {
return `float fbm(in vec2 uv, in float smoothness) {
float s = exp2(-smoothness);
float f = 1.0;
float a = 1.0;
float t = 0.0;
for( int i = 0; i < ${octaves}; i++ ) {
t += a * noise(f * uv);
f *= 2.0;
a *= s;
}
return t;
}`;
}
/* -------------------------------------------- */
/**
* Angular constraint working with coordinates on the range [-1, 1]
* => coord: Coordinates
* => angle: Angle in radians
* => smoothness: Smoothness of the pie
* => l: Length of the pie.
* @type {string}
*/
static PIE = `
float pie(in vec2 coord, in float angle, in float smoothness, in float l) {
coord.x = abs(coord.x);
vec2 va = vec2(sin(angle), cos(angle));
float lg = length(coord) - l;
float clg = length(coord - va * clamp(dot(coord, va) , 0.0, l));
return smoothstep(0.0, smoothness, max(lg, clg * sign(va.y * coord.x - va.x * coord.y)));
}`;
/* -------------------------------------------- */
/**
* A conventional pseudo-random number generator with the "golden" numbers, based on uv position
* @type {string}
*/
static PRNG_LEGACY = `
float random(in vec2 uv) {
return fract(cos(dot(uv, vec2(12.9898, 4.1414))) * 43758.5453);
}`;
/* -------------------------------------------- */
/**
* A pseudo-random number generator based on uv position which does not use cos/sin
* This PRNG replaces the old PRNG_LEGACY to workaround some driver bugs
* @type {string}
*/
static PRNG = `
float random(in vec2 uv) {
uv = mod(uv, 1000.0);
return fract( dot(uv, vec2(5.23, 2.89)
* fract((2.41 * uv.x + 2.27 * uv.y)
* 251.19)) * 551.83);
}`;
/* -------------------------------------------- */
/**
* A Vec2 pseudo-random generator, based on uv position
* @type {string}
*/
static PRNG2D = `
vec2 random(in vec2 uv) {
vec2 uvf = fract(uv * vec2(0.1031, 0.1030));
uvf += dot(uvf, uvf.yx + 19.19);
return fract((uvf.x + uvf.y) * uvf);
}`;
/* -------------------------------------------- */
/**
* A Vec3 pseudo-random generator, based on uv position
* @type {string}
*/
static PRNG3D = `
vec3 random(in vec3 uv) {
return vec3(fract(cos(dot(uv, vec3(12.9898, 234.1418, 152.01))) * 43758.5453),
fract(sin(dot(uv, vec3(80.9898, 545.8937, 151515.12))) * 23411.1789),
fract(cos(dot(uv, vec3(01.9898, 1568.5439, 154.78))) * 31256.8817));
}`;
/* -------------------------------------------- */
/**
* A conventional noise generator
* @type {string}
*/
static NOISE = `
float noise(in vec2 uv) {
const vec2 d = vec2(0.0, 1.0);
vec2 b = floor(uv);
vec2 f = smoothstep(vec2(0.), vec2(1.0), fract(uv));
return mix(
mix(random(b), random(b + d.yx), f.x),
mix(random(b + d.xy), random(b + d.yy), f.x),
f.y
);
}`;
/* -------------------------------------------- */
/**
* Convert a Hue-Saturation-Brightness color to RGB - useful to convert polar coordinates to RGB
* @type {string}
*/
static HSB2RGB = `
vec3 hsb2rgb(in vec3 c) {
vec3 rgb = clamp(abs(mod(c.x*6.0+vec3(0.0,4.0,2.0), 6.0)-3.0)-1.0, 0.0, 1.0 );
rgb = rgb*rgb*(3.0-2.0*rgb);
return c.z * mix(vec3(1.0), rgb, c.y);
}`;
/* -------------------------------------------- */
/**
* Declare a wave function in a shader -> wcos (default), wsin or wtan.
* Wave on the [v1,v2] range with amplitude -> a and speed -> speed.
* @param {string} [func="cos"] the math function to use
* @returns {string}
*/
static WAVE(func="cos") {
return `
float w${func}(in float v1, in float v2, in float a, in float speed) {
float w = ${func}( speed + a ) + 1.0;
return (v1 - v2) * (w * 0.5) + v2;
}`;
}
/* -------------------------------------------- */
/**
* Rotation function.
* @type {string}
*/
static ROTATION = `
mat2 rot(in float a) {
float s = sin(a);
float c = cos(a);
return mat2(c, -s, s, c);
}
`;
/* -------------------------------------------- */
/**
* Voronoi noise function. Needs PRNG2D and CONSTANTS.
* @see PRNG2D
* @see CONSTANTS
* @type {string}
*/
static VORONOI = `
vec3 voronoi(in vec2 uv, in float t, in float zd) {
vec3 vor = vec3(0.0, 0.0, zd);
vec2 uvi = floor(uv);
vec2 uvf = fract(uv);
for ( float j = -1.0; j <= 1.0; j++ ) {
for ( float i = -1.0; i <= 1.0; i++ ) {
vec2 uvn = vec2(i, j);
vec2 uvr = 0.5 * sin(TWOPI * random(uvi + uvn) + t) + 0.5;
uvr = 0.5 * sin(TWOPI * uvr + t) + 0.5;
vec2 uvd = uvn + uvr - uvf;
float dist = length(uvd);
if ( dist < vor.z ) {
vor.xy = uvr;
vor.z = dist;
}
}
}
return vor;
}
vec3 voronoi(in vec2 vuv, in float zd) {
return voronoi(vuv, 0.0, zd);
}
vec3 voronoi(in vec3 vuv, in float zd) {
return voronoi(vuv.xy, vuv.z, zd);
}
`;
/* -------------------------------------------- */
/**
* Enables GLSL 1.0 backwards compatibility in GLSL 3.00 ES vertex shaders.
* @type {string}
*/
static GLSL1_COMPATIBILITY_VERTEX = `
#define attribute in
#define varying out
`;
/* -------------------------------------------- */
/**
* Enables GLSL 1.0 backwards compatibility in GLSL 3.00 ES fragment shaders.
* @type {string}
*/
static GLSL1_COMPATIBILITY_FRAGMENT = `
#define varying in
#define texture2D texture
#define textureCube texture
#define texture2DProj textureProj
#define texture2DLodEXT textureLod
#define texture2DProjLodEXT textureProjLod
#define textureCubeLodEXT textureLod
#define texture2DGradEXT textureGrad
#define texture2DProjGradEXT textureProjGrad
#define textureCubeGradEXT textureGrad
#define gl_FragDepthEXT gl_FragDepth
`;
}
return BaseShaderMixin;
};

View File

@@ -0,0 +1,102 @@
/**
* This class defines an interface which all shaders utilize.
* @extends {PIXI.Shader}
* @property {PIXI.Program} program The program to use with this shader.
* @property {object} uniforms The current uniforms of the Shader.
* @mixes BaseShaderMixin
* @abstract
*/
class AbstractBaseShader extends BaseShaderMixin(PIXI.Shader) {
constructor(program, uniforms) {
super(program, foundry.utils.deepClone(uniforms));
/**
* The initial values of the shader uniforms.
* @type {object}
*/
this.initialUniforms = uniforms;
}
/* -------------------------------------------- */
/**
* The raw vertex shader used by this class.
* A subclass of AbstractBaseShader must implement the vertexShader static field.
* @type {string}
*/
static vertexShader = "";
/**
* The raw fragment shader used by this class.
* A subclass of AbstractBaseShader must implement the fragmentShader static field.
* @type {string}
*/
static fragmentShader = "";
/**
* The default uniform values for the shader.
* A subclass of AbstractBaseShader must implement the defaultUniforms static field.
* @type {object}
*/
static defaultUniforms = {};
/* -------------------------------------------- */
/**
* A factory method for creating the shader using its defined default values
* @param {object} initialUniforms
* @returns {AbstractBaseShader}
*/
static create(initialUniforms) {
const program = PIXI.Program.from(this.vertexShader, this.fragmentShader);
const uniforms = foundry.utils.mergeObject(this.defaultUniforms, initialUniforms,
{inplace: false, insertKeys: false});
const shader = new this(program, uniforms);
shader._configure();
return shader;
}
/* -------------------------------------------- */
/**
* Reset the shader uniforms back to their initial values.
*/
reset() {
for (let [k, v] of Object.entries(this.initialUniforms)) {
this.uniforms[k] = foundry.utils.deepClone(v);
}
}
/* ---------------------------------------- */
/**
* A one time initialization performed on creation.
* @protected
*/
_configure() {}
/* ---------------------------------------- */
/**
* Perform operations which are required before binding the Shader to the Renderer.
* @param {PIXI.DisplayObject} mesh The mesh display object linked to this shader.
* @param {PIXI.Renderer} renderer The renderer
* @protected
* @internal
*/
_preRender(mesh, renderer) {}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
get _defaults() {
const msg = "AbstractBaseShader#_defaults is deprecated in favor of AbstractBaseShader#initialUniforms.";
foundry.utils.logCompatibilityWarning(msg, {since: 12, until: 14});
return this.initialUniforms;
}
}

View File

@@ -0,0 +1,35 @@
/**
* An abstract filter which provides a framework for reusable definition
* @extends {PIXI.Filter}
* @mixes BaseShaderMixin
* @abstract
*/
class AbstractBaseFilter extends BaseShaderMixin(PIXI.Filter) {
/**
* The default uniforms used by the filter
* @type {object}
*/
static defaultUniforms = {};
/**
* The fragment shader which renders this filter.
* @type {string}
*/
static fragmentShader = undefined;
/**
* The vertex shader which renders this filter.
* @type {string}
*/
static vertexShader = undefined;
/**
* A factory method for creating the filter using its defined default values.
* @param {object} [initialUniforms] Initial uniform values which override filter defaults
* @returns {AbstractBaseFilter} The constructed AbstractFilter instance.
*/
static create(initialUniforms={}) {
return new this(this.vertexShader, this.fragmentShader, {...this.defaultUniforms, ...initialUniforms});
}
}

View File

@@ -0,0 +1,46 @@
/**
* This class defines an interface for masked custom filters
*/
class AbstractBaseMaskFilter extends AbstractBaseFilter {
/**
* The default vertex shader used by all instances of AbstractBaseMaskFilter
* @type {string}
*/
static vertexShader = `
attribute vec2 aVertexPosition;
uniform mat3 projectionMatrix;
uniform vec2 screenDimensions;
uniform vec4 inputSize;
uniform vec4 outputFrame;
varying vec2 vTextureCoord;
varying vec2 vMaskTextureCoord;
vec4 filterVertexPosition( void ) {
vec2 position = aVertexPosition * max(outputFrame.zw, vec2(0.)) + outputFrame.xy;
return vec4((projectionMatrix * vec3(position, 1.0)).xy, 0., 1.);
}
// getting normalized coord for the tile texture
vec2 filterTextureCoord( void ) {
return aVertexPosition * (outputFrame.zw * inputSize.zw);
}
// getting normalized coord for a screen sized mask render texture
vec2 filterMaskTextureCoord( in vec2 textureCoord ) {
return (textureCoord * inputSize.xy + outputFrame.xy) / screenDimensions;
}
void main() {
vTextureCoord = filterTextureCoord();
vMaskTextureCoord = filterMaskTextureCoord(vTextureCoord);
gl_Position = filterVertexPosition();
}`;
/** @override */
apply(filterManager, input, output, clear, currentState) {
this.uniforms.screenDimensions = canvas.screenDimensions;
filterManager.applyFilter(this, input, output, clear);
}
}

View File

@@ -0,0 +1,364 @@
/**
* Apply a vertical or horizontal gaussian blur going inward by using alpha as the penetrating channel.
* @param {boolean} horizontal If the pass is horizontal (true) or vertical (false).
* @param {number} [strength=8] Strength of the blur (distance of sampling).
* @param {number} [quality=4] Number of passes to generate the blur. More passes = Higher quality = Lower Perf.
* @param {number} [resolution=PIXI.Filter.defaultResolution] Resolution of the filter.
* @param {number} [kernelSize=5] Number of kernels to use. More kernels = Higher quality = Lower Perf.
*/
class AlphaBlurFilterPass extends PIXI.Filter {
constructor(horizontal, strength=8, quality=4, resolution=PIXI.Filter.defaultResolution, kernelSize=5) {
const vertSrc = AlphaBlurFilterPass.vertTemplate(kernelSize, horizontal);
const fragSrc = AlphaBlurFilterPass.fragTemplate(kernelSize);
super(vertSrc, fragSrc);
this.horizontal = horizontal;
this.strength = strength;
this.passes = quality;
this.resolution = resolution;
}
/**
* If the pass is horizontal (true) or vertical (false).
* @type {boolean}
*/
horizontal;
/**
* Strength of the blur (distance of sampling).
* @type {number}
*/
strength;
/**
* The number of passes to generate the blur.
* @type {number}
*/
passes;
/* -------------------------------------------- */
/**
* The quality of the filter is defined by its number of passes.
* @returns {number}
*/
get quality() {
return this.passes;
}
set quality(value) {
this.passes = value;
}
/* -------------------------------------------- */
/**
* The strength of the blur filter in pixels.
* @returns {number}
*/
get blur() {
return this.strength;
}
set blur(value) {
this.padding = 1 + (Math.abs(value) * 2);
this.strength = value;
}
/* -------------------------------------------- */
/**
* The kernels containing the gaussian constants.
* @type {Record<number, number[]>}
*/
static GAUSSIAN_VALUES = {
5: [0.153388, 0.221461, 0.250301],
7: [0.071303, 0.131514, 0.189879, 0.214607],
9: [0.028532, 0.067234, 0.124009, 0.179044, 0.20236],
11: [0.0093, 0.028002, 0.065984, 0.121703, 0.175713, 0.198596],
13: [0.002406, 0.009255, 0.027867, 0.065666, 0.121117, 0.174868, 0.197641],
15: [0.000489, 0.002403, 0.009246, 0.02784, 0.065602, 0.120999, 0.174697, 0.197448]
};
/* -------------------------------------------- */
/**
* The fragment template generator
* @param {number} kernelSize The number of kernels to use.
* @returns {string} The generated fragment shader.
*/
static fragTemplate(kernelSize) {
return `
varying vec2 vBlurTexCoords[${kernelSize}];
varying vec2 vTextureCoords;
uniform sampler2D uSampler;
void main(void) {
vec4 finalColor = vec4(0.0);
${this.generateBlurFragSource(kernelSize)}
finalColor.rgb *= clamp(mix(-1.0, 1.0, finalColor.a), 0.0, 1.0);
gl_FragColor = finalColor;
}
`;
}
/* -------------------------------------------- */
/**
* The vertex template generator
* @param {number} kernelSize The number of kernels to use.
* @param {boolean} horizontal If the vertex should handle horizontal or vertical pass.
* @returns {string} The generated vertex shader.
*/
static vertTemplate(kernelSize, horizontal) {
return `
attribute vec2 aVertexPosition;
uniform mat3 projectionMatrix;
uniform float strength;
varying vec2 vBlurTexCoords[${kernelSize}];
varying vec2 vTextureCoords;
uniform vec4 inputSize;
uniform vec4 outputFrame;
vec4 filterVertexPosition( void ) {
vec2 position = aVertexPosition * max(outputFrame.zw, vec2(0.)) + outputFrame.xy;
return vec4((projectionMatrix * vec3(position, 1.0)).xy, 0.0, 1.0);
}
vec2 filterTextureCoord( void ) {
return aVertexPosition * (outputFrame.zw * inputSize.zw);
}
void main(void) {
gl_Position = filterVertexPosition();
vec2 textureCoord = vTextureCoords = filterTextureCoord();
${this.generateBlurVertSource(kernelSize, horizontal)}
}
`;
}
/* -------------------------------------------- */
/**
* Generating the dynamic part of the blur in the fragment
* @param {number} kernelSize The number of kernels to use.
* @returns {string} The dynamic blur part.
*/
static generateBlurFragSource(kernelSize) {
const kernel = AlphaBlurFilterPass.GAUSSIAN_VALUES[kernelSize];
const halfLength = kernel.length;
let value;
let blurLoop = "";
for ( let i = 0; i < kernelSize; i++ ) {
blurLoop += `finalColor += texture2D(uSampler, vBlurTexCoords[${i.toString()}])`;
value = i >= halfLength ? kernelSize - i - 1 : i;
blurLoop += ` * ${kernel[value].toString()};\n`;
}
return blurLoop;
}
/* -------------------------------------------- */
/**
* Generating the dynamic part of the blur in the vertex
* @param {number} kernelSize The number of kernels to use.
* @param {boolean} horizontal If the vertex should handle horizontal or vertical pass.
* @returns {string} The dynamic blur part.
*/
static generateBlurVertSource(kernelSize, horizontal) {
const halfLength = Math.ceil(kernelSize / 2);
let blurLoop = "";
for ( let i = 0; i < kernelSize; i++ ) {
const khl = i - (halfLength - 1);
blurLoop += horizontal
? `vBlurTexCoords[${i.toString()}] = textureCoord + vec2(${khl}.0 * strength, 0.0);`
: `vBlurTexCoords[${i.toString()}] = textureCoord + vec2(0.0, ${khl}.0 * strength);`;
blurLoop += "\n";
}
return blurLoop;
}
/* -------------------------------------------- */
/** @override */
apply(filterManager, input, output, clearMode) {
// Define strength
const ow = output ? output.width : filterManager.renderer.width;
const oh = output ? output.height : filterManager.renderer.height;
this.uniforms.strength = (this.horizontal ? (1 / ow) * (ow / input.width) : (1 / oh) * (oh / input.height))
* this.strength / this.passes;
// Single pass
if ( this.passes === 1 ) {
return filterManager.applyFilter(this, input, output, clearMode);
}
// Multi-pass
const renderTarget = filterManager.getFilterTexture();
const renderer = filterManager.renderer;
let flip = input;
let flop = renderTarget;
// Initial application
this.state.blend = false;
filterManager.applyFilter(this, flip, flop, PIXI.CLEAR_MODES.CLEAR);
// Additional passes
for ( let i = 1; i < this.passes - 1; i++ ) {
filterManager.bindAndClear(flip, PIXI.CLEAR_MODES.BLIT);
this.uniforms.uSampler = flop;
const temp = flop;
flop = flip;
flip = temp;
renderer.shader.bind(this);
renderer.geometry.draw(5);
}
// Final pass and return filter texture
this.state.blend = true;
filterManager.applyFilter(this, flop, output, clearMode);
filterManager.returnFilterTexture(renderTarget);
}
}
/* -------------------------------------------- */
/**
* Apply a gaussian blur going inward by using alpha as the penetrating channel.
* @param {number} [strength=8] Strength of the blur (distance of sampling).
* @param {number} [quality=4] Number of passes to generate the blur. More passes = Higher quality = Lower Perf.
* @param {number} [resolution=PIXI.Filter.defaultResolution] Resolution of the filter.
* @param {number} [kernelSize=5] Number of kernels to use. More kernels = Higher quality = Lower Perf.
*/
class AlphaBlurFilter extends PIXI.Filter {
constructor(strength=8, quality=4, resolution=PIXI.Filter.defaultResolution, kernelSize=5) {
super();
this.blurXFilter = new AlphaBlurFilterPass(true, strength, quality, resolution, kernelSize);
this.blurYFilter = new AlphaBlurFilterPass(false, strength, quality, resolution, kernelSize);
this.resolution = resolution;
this._repeatEdgePixels = false;
this.quality = quality;
this.blur = strength;
}
/* -------------------------------------------- */
/** @override */
apply(filterManager, input, output, clearMode) {
const xStrength = Math.abs(this.blurXFilter.strength);
const yStrength = Math.abs(this.blurYFilter.strength);
// Blur both directions
if ( xStrength && yStrength ) {
const renderTarget = filterManager.getFilterTexture();
this.blurXFilter.apply(filterManager, input, renderTarget, PIXI.CLEAR_MODES.CLEAR);
this.blurYFilter.apply(filterManager, renderTarget, output, clearMode);
filterManager.returnFilterTexture(renderTarget);
}
// Only vertical
else if ( yStrength ) this.blurYFilter.apply(filterManager, input, output, clearMode);
// Only horizontal
else this.blurXFilter.apply(filterManager, input, output, clearMode);
}
/* -------------------------------------------- */
/**
* Update the filter padding according to the blur strength value (0 if _repeatEdgePixels is active)
*/
updatePadding() {
this.padding = this._repeatEdgePixels ? 0
: Math.max(Math.abs(this.blurXFilter.strength), Math.abs(this.blurYFilter.strength)) * 2;
}
/* -------------------------------------------- */
/**
* The amount of blur is forwarded to the X and Y filters.
* @type {number}
*/
get blur() {
return this.blurXFilter.blur;
}
set blur(value) {
this.blurXFilter.blur = this.blurYFilter.blur = value;
this.updatePadding();
}
/* -------------------------------------------- */
/**
* The quality of blur defines the number of passes used by subsidiary filters.
* @type {number}
*/
get quality() {
return this.blurXFilter.quality;
}
set quality(value) {
this.blurXFilter.quality = this.blurYFilter.quality = value;
}
/* -------------------------------------------- */
/**
* Whether to repeat edge pixels, adding padding to the filter area.
* @type {boolean}
*/
get repeatEdgePixels() {
return this._repeatEdgePixels;
}
set repeatEdgePixels(value) {
this._repeatEdgePixels = value;
this.updatePadding();
}
/* -------------------------------------------- */
/**
* Provided for completeness with PIXI.BlurFilter
* @type {number}
*/
get blurX() {
return this.blurXFilter.blur;
}
set blurX(value) {
this.blurXFilter.blur = value;
this.updatePadding();
}
/* -------------------------------------------- */
/**
* Provided for completeness with PIXI.BlurFilter
* @type {number}
*/
get blurY() {
return this.blurYFilter.blur;
}
set blurY(value) {
this.blurYFilter.blur = value;
this.updatePadding();
}
/* -------------------------------------------- */
/**
* Provided for completeness with PIXI.BlurFilter
* @type {number}
*/
get blendMode() {
return this.blurYFilter.blendMode;
}
set blendMode(value) {
this.blurYFilter.blendMode = value;
}
}

View File

@@ -0,0 +1,207 @@
/**
* This filter handles masking and post-processing for visual effects.
*/
class VisualEffectsMaskingFilter extends AbstractBaseMaskFilter {
/** @override */
static create({postProcessModes, ...initialUniforms}={}) {
const fragmentShader = this.fragmentShader(postProcessModes);
const uniforms = {...this.defaultUniforms, ...initialUniforms};
return new this(this.vertexShader, fragmentShader, uniforms);
}
/**
* Code to determine which post-processing effect is applied in this filter.
* @type {string[]}
*/
#postProcessModes;
/* -------------------------------------------- */
/**
* Masking modes.
* @enum {number}
*/
static FILTER_MODES = Object.freeze({
BACKGROUND: 0,
ILLUMINATION: 1,
COLORATION: 2
});
/* -------------------------------------------- */
/** @override */
static defaultUniforms = {
tint: [1, 1, 1],
screenDimensions: [1, 1],
enableVisionMasking: true,
visionTexture: null,
darknessLevelTexture: null,
exposure: 0,
contrast: 0,
saturation: 0,
mode: 0,
ambientDarkness: [0, 0, 0],
ambientDaylight: [1, 1, 1],
replacementColor: [0, 0, 0]
};
/* -------------------------------------------- */
/**
* Update the filter shader with new post-process modes.
* @param {string[]} [postProcessModes=[]] New modes to apply.
* @param {object} [uniforms={}] Uniforms value to update.
*/
updatePostprocessModes(postProcessModes=[], uniforms={}) {
// Update shader uniforms
for ( let [uniform, value] of Object.entries(uniforms) ) {
if ( uniform in this.uniforms ) this.uniforms[uniform] = value;
}
// Update the shader program if post-processing modes have changed
if ( postProcessModes.equals(this.#postProcessModes) ) return;
this.#postProcessModes = postProcessModes;
this.program = PIXI.Program.from(this.constructor.vertexShader,
this.constructor.fragmentShader(this.#postProcessModes));
}
/* -------------------------------------------- */
/**
* Remove all post-processing modes and reset some key uniforms.
*/
reset() {
this.#postProcessModes = [];
this.program = PIXI.Program.from(this.constructor.vertexShader,
this.constructor.fragmentShader());
const uniforms = ["tint", "exposure", "contrast", "saturation"];
for ( const uniform of uniforms ) {
this.uniforms[uniform] = this.constructor.defaultUniforms[uniform];
}
}
/* -------------------------------------------- */
/** @override */
apply(filterManager, input, output, clear, currentState) {
const c = canvas.colors;
const u = this.uniforms;
if ( u.mode === this.constructor.FILTER_MODES.ILLUMINATION ) {
c.ambientDarkness.applyRGB(u.ambientDarkness);
c.ambientDaylight.applyRGB(u.ambientDaylight);
}
super.apply(filterManager, input, output, clear, currentState);
}
/* -------------------------------------------- */
/**
* Filter post-process techniques.
* @enum {{id: string, glsl: string}}
*/
static POST_PROCESS_TECHNIQUES = {
EXPOSURE: {
id: "EXPOSURE",
glsl: `if ( exposure != 0.0 ) {
finalColor.rgb *= (1.0 + exposure);
}`
},
CONTRAST: {
id: "CONTRAST",
glsl: `if ( contrast != 0.0 ) {
finalColor.rgb = (finalColor.rgb - 0.5) * (contrast + 1.0) + 0.5;
}`
},
SATURATION: {
id: "SATURATION",
glsl: `if ( saturation != 0.0 ) {
float reflection = perceivedBrightness(finalColor.rgb);
finalColor.rgb = mix(vec3(reflection), finalColor.rgb, 1.0 + saturation) * finalColor.a;
}`
}
};
/* -------------------------------------------- */
/**
* Memory allocations and headers for the VisualEffectsMaskingFilter
* @returns {string} The filter header according to the filter mode.
*/
static fragmentHeader = `
varying vec2 vTextureCoord;
varying vec2 vMaskTextureCoord;
uniform float contrast;
uniform float saturation;
uniform float exposure;
uniform vec3 ambientDarkness;
uniform vec3 ambientDaylight;
uniform vec3 replacementColor;
uniform vec3 tint;
uniform sampler2D uSampler;
uniform sampler2D visionTexture;
uniform sampler2D darknessLevelTexture;
uniform bool enableVisionMasking;
uniform int mode;
vec4 baseColor;
vec4 finalColor;
${this.CONSTANTS}
${this.PERCEIVED_BRIGHTNESS}
vec4 getReplacementColor() {
if ( mode == 0 ) return vec4(0.0);
if ( mode == 2 ) return vec4(replacementColor, 1.0);
float darknessLevel = texture2D(darknessLevelTexture, vMaskTextureCoord).r;
return vec4(mix(ambientDaylight, ambientDarkness, darknessLevel), 1.0);
}
`;
/* -------------------------------------------- */
/**
* The fragment core code.
* @type {string}
*/
static fragmentCore = `
// Get the base color from the filter sampler
finalColor = texture2D(uSampler, vTextureCoord);
// Handling vision masking
if ( enableVisionMasking ) {
finalColor = mix( getReplacementColor(),
finalColor,
texture2D(visionTexture, vMaskTextureCoord).r);
}
`;
/* -------------------------------------------- */
/**
* Construct filter post-processing code according to provided value.
* @param {string[]} postProcessModes Post-process modes to construct techniques.
* @returns {string} The constructed shader code for post-process techniques.
*/
static fragmentPostProcess(postProcessModes=[]) {
return postProcessModes.reduce((s, t) => s + this.POST_PROCESS_TECHNIQUES[t].glsl ?? "", "");
}
/* -------------------------------------------- */
/**
* Specify the fragment shader to use according to mode
* @param {string[]} postProcessModes
* @returns {string}
* @override
*/
static fragmentShader(postProcessModes=[]) {
return `
${this.fragmentHeader}
void main() {
${this.fragmentCore}
${this.fragmentPostProcess(postProcessModes)}
if ( enableVisionMasking ) finalColor *= vec4(tint, 1.0);
gl_FragColor = finalColor;
}
`;
}
}

View File

@@ -0,0 +1,122 @@
/**
* A filter used to apply color adjustments and other modifications to the environment.
*/
class PrimaryCanvasGroupAmbienceFilter extends AbstractBaseMaskFilter {
/** @override */
static fragmentShader = `
precision ${PIXI.settings.PRECISION_FRAGMENT} float;
// Base ambience uniforms
uniform vec3 baseTint;
uniform float baseIntensity;
uniform float baseLuminosity;
uniform float baseSaturation;
uniform float baseShadows;
// Darkness ambience uniforms
uniform vec3 darkTint;
uniform float darkIntensity;
uniform float darkLuminosity;
uniform float darkSaturation;
uniform float darkShadows;
// Cycle enabled or disabled
uniform bool cycle;
// Textures
uniform sampler2D darknessLevelTexture;
uniform sampler2D uSampler;
// Varyings
varying vec2 vTextureCoord;
varying vec2 vMaskTextureCoord;
${this.CONSTANTS}
${this.COLOR_SPACES}
// Ambience parameters computed according to darkness level (per pixel)
vec3 tint;
float intensity;
float luminosity;
float saturation;
float shadows;
/* ----------------------------------------------------------------- */
/* Compute ambience parameters according to darkness level texture */
/* ----------------------------------------------------------------- */
void computeAmbienceParameters() {
float dl = texture2D(darknessLevelTexture, vMaskTextureCoord).r;
// Determine the tint based on base and dark ambience parameters
if ( baseIntensity > 0.0 ) tint = (cycle && darkIntensity > 0.0) ? mix(baseTint, darkTint, dl) : baseTint;
else if ( darkIntensity > 0.0 && cycle ) tint = darkTint;
else tint = vec3(1.0);
// Compute the luminosity based on the cycle condition
float luminosityBase = cycle ? mix(baseLuminosity, darkLuminosity, dl) : baseLuminosity;
luminosity = luminosityBase * (luminosityBase >= 0.0 ? 1.2 : 0.8);
// Compute the shadows based on the cycle condition
shadows = (cycle ? mix(baseShadows, darkShadows, dl) : baseShadows) * 0.15;
// Using a non-linear easing with intensity input value: x^2
intensity = cycle ? mix(baseIntensity * baseIntensity, darkIntensity * darkIntensity, dl)
: baseIntensity * baseIntensity;
// Compute the saturation based on the cycle condition
saturation = cycle ? mix(baseSaturation, darkSaturation, dl) : baseSaturation;
}
/* -------------------------------------------- */
void main() {
vec4 baseColor = texture2D(uSampler, vTextureCoord);
if ( baseColor.a > 0.0 ) {
computeAmbienceParameters();
// Unmultiply rgb with alpha channel
baseColor.rgb /= baseColor.a;
// Apply shadows and luminosity on sRGB values
if ( shadows > 0.0 ) {
float l = luminance(srgb2linearFast(baseColor.rgb));
baseColor.rgb *= min(l / shadows, 1.0);
}
if ( luminosity != 0.0 ) baseColor.rgb *= (1.0 + luminosity);
baseColor.rgb = srgb2linear(baseColor.rgb); // convert to linear before saturating and tinting
// Apply saturation and tint on linearized rgb
if ( saturation != 0.0 ) baseColor.rgb = mix(linear2grey(baseColor.rgb), baseColor.rgb, 1.0 + saturation);
if ( intensity > 0.0 ) baseColor.rgb = tintColorLinear(colorClamp(baseColor.rgb), tint, intensity);
else baseColor.rgb = colorClamp(baseColor.rgb);
baseColor.rgb = linear2srgb(baseColor.rgb); // convert back to sRGB
// Multiply rgb with alpha channel
baseColor.rgb *= baseColor.a;
}
// Output the result
gl_FragColor = baseColor;
}
`;
/** @override */
static defaultUniforms = {
uSampler: null,
darknessLevelTexture: null,
cycle: true,
baseTint: [1, 1, 1], // Important: The base tint uniform must be in linear RGB!
baseIntensity: 0,
baseLuminosity: 0,
baseSaturation: 0,
baseShadows: 0,
darkTint: [1, 1, 1], // Important: The dark tint uniform must be in linear RGB!
darkIntensity: 0,
darkLuminosity: 0,
darkSaturation: 0,
darkShadows: 0
};
}

View File

@@ -0,0 +1,148 @@
/**
* A filter which implements an inner or outer glow around the source texture.
* Inspired from https://github.com/pixijs/filters/tree/main/filters/glow
* @license MIT
*/
class GlowOverlayFilter extends AbstractBaseFilter {
/** @override */
padding = 6;
/**
* The inner strength of the glow.
* @type {number}
*/
innerStrength = 3;
/**
* The outer strength of the glow.
* @type {number}
*/
outerStrength = 3;
/**
* Should this filter auto-animate?
* @type {boolean}
*/
animated = true;
/** @inheritdoc */
static defaultUniforms = {
distance: 10,
glowColor: [1, 1, 1, 1],
quality: 0.1,
time: 0,
knockout: true,
alpha: 1
};
/**
* Dynamically create the fragment shader used for filters of this type.
* @param {number} quality
* @param {number} distance
* @returns {string}
*/
static createFragmentShader(quality, distance) {
return `
precision mediump float;
varying vec2 vTextureCoord;
varying vec4 vColor;
uniform sampler2D uSampler;
uniform float innerStrength;
uniform float outerStrength;
uniform float alpha;
uniform vec4 glowColor;
uniform vec4 inputSize;
uniform vec4 inputClamp;
uniform bool knockout;
const float PI = 3.14159265358979323846264;
const float DIST = ${distance.toFixed(0)}.0;
const float ANGLE_STEP_SIZE = min(${(1 / quality / distance).toFixed(7)}, PI * 2.0);
const float ANGLE_STEP_NUM = ceil(PI * 2.0 / ANGLE_STEP_SIZE);
const float MAX_TOTAL_ALPHA = ANGLE_STEP_NUM * DIST * (DIST + 1.0) / 2.0;
float getClip(in vec2 uv) {
return step(3.5,
step(inputClamp.x, uv.x) +
step(inputClamp.y, uv.y) +
step(uv.x, inputClamp.z) +
step(uv.y, inputClamp.w));
}
void main(void) {
vec2 px = inputSize.zw;
float totalAlpha = 0.0;
vec2 direction;
vec2 displaced;
vec4 curColor;
for (float angle = 0.0; angle < PI * 2.0; angle += ANGLE_STEP_SIZE) {
direction = vec2(cos(angle), sin(angle)) * px;
for (float curDistance = 0.0; curDistance < DIST; curDistance++) {
displaced = vTextureCoord + direction * (curDistance + 1.0);
curColor = texture2D(uSampler, displaced) * getClip(displaced);
totalAlpha += (DIST - curDistance) * (smoothstep(0.5, 1.0, curColor.a));
}
}
curColor = texture2D(uSampler, vTextureCoord);
float alphaRatio = (totalAlpha / MAX_TOTAL_ALPHA);
float innerGlowAlpha = (1.0 - alphaRatio) * innerStrength * smoothstep(0.6, 1.0, curColor.a);
float innerGlowStrength = min(1.0, innerGlowAlpha);
vec4 innerColor = mix(curColor, glowColor, innerGlowStrength);
float outerGlowAlpha = alphaRatio * outerStrength * (1.0 - smoothstep(0.35, 1.0, curColor.a));
float outerGlowStrength = min(1.0 - innerColor.a, outerGlowAlpha);
vec4 outerGlowColor = outerGlowStrength * glowColor.rgba;
if ( knockout ) {
float resultAlpha = outerGlowAlpha + innerGlowAlpha;
gl_FragColor = mix(vec4(glowColor.rgb * resultAlpha, resultAlpha), vec4(0.0), curColor.a);
}
else {
vec4 outerGlowColor = outerGlowStrength * glowColor.rgba * alpha;
gl_FragColor = innerColor + outerGlowColor;
}
}`;
}
/** @inheritdoc */
static vertexShader = `
precision mediump float;
attribute vec2 aVertexPosition;
uniform mat3 projectionMatrix;
uniform vec4 inputSize;
uniform vec4 outputFrame;
varying vec2 vTextureCoord;
void main(void) {
vec2 position = aVertexPosition * max(outputFrame.zw, vec2(0.0)) + outputFrame.xy;
gl_Position = vec4((projectionMatrix * vec3(position, 1.0)).xy, 0.0, 1.0);
vTextureCoord = aVertexPosition * (outputFrame.zw * inputSize.zw);
}`;
/** @inheritdoc */
static create(initialUniforms={}) {
const uniforms = {...this.defaultUniforms, ...initialUniforms};
const fragmentShader = this.createFragmentShader(uniforms.quality, uniforms.distance);
return new this(this.vertexShader, fragmentShader, uniforms);
}
/* -------------------------------------------- */
/** @override */
apply(filterManager, input, output, clear) {
let strength = canvas.stage.worldTransform.d;
if ( this.animated && !canvas.photosensitiveMode ) {
const time = canvas.app.ticker.lastTime;
strength *= Math.oscillation(0.5, 2.0, time, 2000);
}
this.uniforms.outerStrength = this.outerStrength * strength;
this.uniforms.innerStrength = this.innerStrength * strength;
filterManager.applyFilter(this, input, output, clear);
}
}

Some files were not shown because too many files have changed in this diff Show More