This commit is contained in:
2025-01-04 00:34:03 +01:00
parent 41829408dc
commit 0ca14bbc19
18111 changed files with 1871397 additions and 0 deletions

View File

@@ -0,0 +1,721 @@
/**
* @typedef {Map<number,PolygonVertex>} VertexMap
*/
/**
* @typedef {Set<Edge>} EdgeSet
*/
/**
* @typedef {Ray} PolygonRay
* @property {CollisionResult} result
*/
/**
* A PointSourcePolygon implementation that uses CCW (counter-clockwise) geometry orientation.
* Sweep around the origin, accumulating collision points based on the set of active walls.
* This algorithm was created with valuable contributions from https://github.com/caewok
*
* @extends PointSourcePolygon
*/
class ClockwiseSweepPolygon extends PointSourcePolygon {
/**
* A mapping of vertices which define potential collision points
* @type {VertexMap}
*/
vertices = new Map();
/**
* The set of edges which define potential boundaries of the polygon
* @type {EdgeSet}
*/
edges = new Set();
/**
* A collection of rays which are fired at vertices
* @type {PolygonRay[]}
*/
rays = [];
/**
* The squared maximum distance of a ray that is needed for this Scene.
* @type {number}
*/
#rayDistance2;
/* -------------------------------------------- */
/* Initialization */
/* -------------------------------------------- */
/** @inheritDoc */
initialize(origin, config) {
super.initialize(origin, config);
this.#rayDistance2 = Math.pow(canvas.dimensions.maxR, 2);
}
/* -------------------------------------------- */
/** @inheritDoc */
clone() {
const poly = super.clone();
for ( const attr of ["vertices", "edges", "rays", "#rayDistance2"] ) { // Shallow clone only
poly[attr] = this[attr];
}
return poly;
}
/* -------------------------------------------- */
/* Computation */
/* -------------------------------------------- */
/** @inheritdoc */
_compute() {
// Clear prior data
this.points = [];
this.rays = [];
this.vertices.clear();
this.edges.clear();
// Step 1 - Identify candidate edges
this._identifyEdges();
// Step 2 - Construct vertex mapping
this._identifyVertices();
// Step 3 - Radial sweep over endpoints
this._executeSweep();
// Step 4 - Constrain with boundary shapes
this._constrainBoundaryShapes();
}
/* -------------------------------------------- */
/* Edge Configuration */
/* -------------------------------------------- */
/**
* Get the super-set of walls which could potentially apply to this polygon.
* Define a custom collision test used by the Quadtree to obtain candidate Walls.
* @protected
*/
_identifyEdges() {
const bounds = this.config.boundingBox = this._defineBoundingBox();
const edgeTypes = this._determineEdgeTypes();
for ( const edge of canvas.edges.values() ) {
if ( this._testEdgeInclusion(edge, edgeTypes, bounds) ) {
this.edges.add(edge.clone());
}
}
}
/* -------------------------------------------- */
/**
* Determine the edge types and their manner of inclusion for this polygon instance.
* @returns {Record<EdgeTypes, 0|1|2>}
* @protected
*/
_determineEdgeTypes() {
const {type, useInnerBounds, includeDarkness} = this.config;
const edgeTypes = {};
if ( type !== "universal" ) edgeTypes.wall = 1;
if ( includeDarkness ) edgeTypes.darkness = 1;
if ( useInnerBounds && canvas.scene.padding ) edgeTypes.innerBounds = 2;
else edgeTypes.outerBounds = 2;
return edgeTypes;
}
/* -------------------------------------------- */
/**
* Test whether a wall should be included in the computed polygon for a given origin and type
* @param {Edge} edge The Edge being considered
* @param {Record<EdgeTypes, 0|1|2>} edgeTypes Which types of edges are being used? 0=no, 1=maybe, 2=always
* @param {PIXI.Rectangle} bounds The overall bounding box
* @returns {boolean} Should the edge be included?
* @protected
*/
_testEdgeInclusion(edge, edgeTypes, bounds) {
const { type, boundaryShapes, useThreshold, wallDirectionMode, externalRadius } = this.config;
// Only include edges of the appropriate type
const m = edgeTypes[edge.type];
if ( !m ) return false;
if ( m === 2 ) return true;
// Test for inclusion in the overall bounding box
if ( !bounds.lineSegmentIntersects(edge.a, edge.b, { inside: true }) ) return false;
// Specific boundary shapes may impose additional requirements
for ( const shape of boundaryShapes ) {
if ( shape._includeEdge && !shape._includeEdge(edge.a, edge.b) ) return false;
}
// Ignore edges which do not block this polygon type
if ( edge[type] === CONST.WALL_SENSE_TYPES.NONE ) return false;
// Ignore edges which are collinear with the origin
const side = edge.orientPoint(this.origin);
if ( !side ) return false;
// Ignore one-directional walls which are facing away from the origin
const wdm = PointSourcePolygon.WALL_DIRECTION_MODES;
if ( edge.direction && (wallDirectionMode !== wdm.BOTH) ) {
if ( (wallDirectionMode === wdm.NORMAL) === (side === edge.direction) ) return false;
}
// Ignore threshold walls which do not satisfy their required proximity
if ( useThreshold ) return !edge.applyThreshold(type, this.origin, externalRadius);
return true;
}
/* -------------------------------------------- */
/**
* Compute the aggregate bounding box which is the intersection of all boundary shapes.
* Round and pad the resulting rectangle by 1 pixel to ensure it always contains the origin.
* @returns {PIXI.Rectangle}
* @protected
*/
_defineBoundingBox() {
let b = this.config.useInnerBounds ? canvas.dimensions.sceneRect : canvas.dimensions.rect;
for ( const shape of this.config.boundaryShapes ) {
b = b.intersection(shape.getBounds());
}
return new PIXI.Rectangle(b.x, b.y, b.width, b.height).normalize().ceil().pad(1);
}
/* -------------------------------------------- */
/* Vertex Identification */
/* -------------------------------------------- */
/**
* Consolidate all vertices from identified edges and register them as part of the vertex mapping.
* @protected
*/
_identifyVertices() {
const edgeMap = new Map();
for ( let edge of this.edges ) {
edgeMap.set(edge.id, edge);
// Create or reference vertex A
const ak = foundry.canvas.edges.PolygonVertex.getKey(edge.a.x, edge.a.y);
if ( this.vertices.has(ak) ) edge.vertexA = this.vertices.get(ak);
else {
edge.vertexA = new foundry.canvas.edges.PolygonVertex(edge.a.x, edge.a.y);
this.vertices.set(ak, edge.vertexA);
}
// Create or reference vertex B
const bk = foundry.canvas.edges.PolygonVertex.getKey(edge.b.x, edge.b.y);
if ( this.vertices.has(bk) ) edge.vertexB = this.vertices.get(bk);
else {
edge.vertexB = new foundry.canvas.edges.PolygonVertex(edge.b.x, edge.b.y);
this.vertices.set(bk, edge.vertexB);
}
// Learn edge orientation with respect to the origin and ensure B is clockwise of A
const o = foundry.utils.orient2dFast(this.origin, edge.vertexA, edge.vertexB);
if ( o > 0 ) Object.assign(edge, {vertexA: edge.vertexB, vertexB: edge.vertexA}); // Reverse vertices
if ( o !== 0 ) { // Attach non-collinear edges
edge.vertexA.attachEdge(edge, -1, this.config.type);
edge.vertexB.attachEdge(edge, 1, this.config.type);
}
}
// Add edge intersections
this._identifyIntersections(edgeMap);
}
/* -------------------------------------------- */
/**
* Add additional vertices for intersections between edges.
* @param {Map<string, Edge>} edgeMap
* @protected
*/
_identifyIntersections(edgeMap) {
const processed = new Set();
for ( let edge of this.edges ) {
for ( const x of edge.intersections ) {
// Is the intersected edge also included in the polygon?
const other = edgeMap.get(x.edge.id);
if ( !other || processed.has(other) ) continue;
const i = x.intersection;
// Register the intersection point as a vertex
const vk = foundry.canvas.edges.PolygonVertex.getKey(Math.round(i.x), Math.round(i.y));
let v = this.vertices.get(vk);
if ( !v ) {
v = new foundry.canvas.edges.PolygonVertex(i.x, i.y);
v._intersectionCoordinates = i;
this.vertices.set(vk, v);
}
// Attach edges to the intersection vertex
// Due to rounding, it is possible for an edge to be completely cw or ccw or only one of the two
// We know from _identifyVertices that vertex B is clockwise of vertex A for every edge.
// It is important that we use the true intersection coordinates (i) for this orientation test.
if ( !v.edges.has(edge) ) {
const dir = foundry.utils.orient2dFast(this.origin, edge.vertexB, i) < 0 ? 1 // Edge is fully CCW of v
: (foundry.utils.orient2dFast(this.origin, edge.vertexA, i) > 0 ? -1 : 0); // Edge is fully CW of v
v.attachEdge(edge, dir, this.config.type);
}
if ( !v.edges.has(other) ) {
const dir = foundry.utils.orient2dFast(this.origin, other.vertexB, i) < 0 ? 1 // Other is fully CCW of v
: (foundry.utils.orient2dFast(this.origin, other.vertexA, i) > 0 ? -1 : 0); // Other is fully CW of v
v.attachEdge(other, dir, this.config.type);
}
}
processed.add(edge);
}
}
/* -------------------------------------------- */
/* Radial Sweep */
/* -------------------------------------------- */
/**
* Execute the sweep over wall vertices
* @private
*/
_executeSweep() {
// Initialize the set of active walls
const activeEdges = this._initializeActiveEdges();
// Sort vertices from clockwise to counter-clockwise and begin the sweep
const vertices = this._sortVertices();
// Iterate through the vertices, adding polygon points
let i = 1;
for ( const vertex of vertices ) {
if ( vertex._visited ) continue;
vertex._index = i++;
this.#updateActiveEdges(vertex, activeEdges);
// Include collinear vertices in this iteration of the sweep, treating their edges as active also
const hasCollinear = vertex.collinearVertices.size > 0;
if ( hasCollinear ) {
this.#includeCollinearVertices(vertex, vertex.collinearVertices);
for ( const cv of vertex.collinearVertices ) {
cv._index = i++;
this.#updateActiveEdges(cv, activeEdges);
}
}
// Determine the result of the sweep for the given vertex
this._determineSweepResult(vertex, activeEdges, hasCollinear);
}
}
/* -------------------------------------------- */
/**
* Include collinear vertices until they have all been added.
* Do not include the original vertex in the set.
* @param {PolygonVertex} vertex The current vertex
* @param {PolygonVertexSet} collinearVertices
*/
#includeCollinearVertices(vertex, collinearVertices) {
for ( const cv of collinearVertices) {
for ( const ccv of cv.collinearVertices ) {
collinearVertices.add(ccv);
}
}
collinearVertices.delete(vertex);
}
/* -------------------------------------------- */
/**
* Update active edges at a given vertex
* Remove counter-clockwise edges which have now concluded.
* Add clockwise edges which are ongoing or beginning.
* @param {PolygonVertex} vertex The current vertex
* @param {EdgeSet} activeEdges A set of currently active edges
*/
#updateActiveEdges(vertex, activeEdges) {
for ( const ccw of vertex.ccwEdges ) {
if ( !vertex.cwEdges.has(ccw) ) activeEdges.delete(ccw);
}
for ( const cw of vertex.cwEdges ) {
if ( cw.vertexA._visited && cw.vertexB._visited ) continue; // Safeguard in case we have already visited the edge
activeEdges.add(cw);
}
vertex._visited = true; // Record that we have already visited this vertex
}
/* -------------------------------------------- */
/**
* Determine the initial set of active edges as those which intersect with the initial ray
* @returns {EdgeSet} A set of initially active edges
* @private
*/
_initializeActiveEdges() {
const initial = {x: Math.round(this.origin.x - this.#rayDistance2), y: this.origin.y};
const edges = new Set();
for ( let edge of this.edges ) {
const x = foundry.utils.lineSegmentIntersects(this.origin, initial, edge.vertexA, edge.vertexB);
if ( x ) edges.add(edge);
}
return edges;
}
/* -------------------------------------------- */
/**
* Sort vertices clockwise from the initial ray (due west).
* @returns {PolygonVertex[]} The array of sorted vertices
* @private
*/
_sortVertices() {
if ( !this.vertices.size ) return [];
let vertices = Array.from(this.vertices.values());
const o = this.origin;
// Sort vertices
vertices.sort((a, b) => {
// Use true intersection coordinates if they are defined
let pA = a._intersectionCoordinates || a;
let pB = b._intersectionCoordinates || b;
// Sort by hemisphere
const ya = pA.y > o.y ? 1 : -1;
const yb = pB.y > o.y ? 1 : -1;
if ( ya !== yb ) return ya; // Sort N, S
// Sort by quadrant
const qa = pA.x < o.x ? -1 : 1;
const qb = pB.x < o.x ? -1 : 1;
if ( qa !== qb ) { // Sort NW, NE, SE, SW
if ( ya === -1 ) return qa;
else return -qa;
}
// Sort clockwise within quadrant
const orientation = foundry.utils.orient2dFast(o, pA, pB);
if ( orientation !== 0 ) return orientation;
// At this point, we know points are collinear; track for later processing.
a.collinearVertices.add(b);
b.collinearVertices.add(a);
// Otherwise, sort closer points first
a._d2 ||= Math.pow(pA.x - o.x, 2) + Math.pow(pA.y - o.y, 2);
b._d2 ||= Math.pow(pB.x - o.x, 2) + Math.pow(pB.y - o.y, 2);
return a._d2 - b._d2;
});
return vertices;
}
/* -------------------------------------------- */
/**
* Test whether a target vertex is behind some closer active edge.
* If the vertex is to the left of the edge, is must be behind the edge relative to origin.
* If the vertex is collinear with the edge, it should be considered "behind" and ignored.
* We know edge.vertexA is ccw to edge.vertexB because of the logic in _identifyVertices.
* @param {PolygonVertex} vertex The target vertex
* @param {EdgeSet} activeEdges The set of active edges
* @returns {{isBehind: boolean, wasLimited: boolean}} Is the target vertex behind some closer edge?
* @private
*/
_isVertexBehindActiveEdges(vertex, activeEdges) {
let wasLimited = false;
for ( let edge of activeEdges ) {
if ( vertex.edges.has(edge) ) continue;
if ( foundry.utils.orient2dFast(edge.vertexA, edge.vertexB, vertex) > 0 ) {
if ( ( edge.isLimited(this.config.type) ) && !wasLimited ) wasLimited = true;
else return {isBehind: true, wasLimited};
}
}
return {isBehind: false, wasLimited};
}
/* -------------------------------------------- */
/**
* Determine the result for the sweep at a given vertex
* @param {PolygonVertex} vertex The target vertex
* @param {EdgeSet} activeEdges The set of active edges
* @param {boolean} hasCollinear Are there collinear vertices behind the target vertex?
* @private
*/
_determineSweepResult(vertex, activeEdges, hasCollinear=false) {
// Determine whether the target vertex is behind some other active edge
const {isBehind, wasLimited} = this._isVertexBehindActiveEdges(vertex, activeEdges);
// Case 1 - Some vertices can be ignored because they are behind other active edges
if ( isBehind ) return;
// Construct the CollisionResult object
const result = new foundry.canvas.edges.CollisionResult({
target: vertex,
cwEdges: vertex.cwEdges,
ccwEdges: vertex.ccwEdges,
isLimited: vertex.isLimited,
isBehind,
wasLimited
});
// Case 2 - No counter-clockwise edge, so begin a new edge
// Note: activeEdges always contain the vertex edge, so never empty
const nccw = vertex.ccwEdges.size;
if ( !nccw ) {
this._switchEdge(result, activeEdges);
result.collisions.forEach(pt => this.addPoint(pt));
return;
}
// Case 3 - Limited edges in both directions
// We can only guarantee this case if we don't have collinear endpoints
const ccwLimited = !result.wasLimited && vertex.isLimitingCCW;
const cwLimited = !result.wasLimited && vertex.isLimitingCW;
if ( !hasCollinear && cwLimited && ccwLimited ) return;
// Case 4 - Non-limited edges in both directions
if ( !ccwLimited && !cwLimited && nccw && vertex.cwEdges.size ) {
result.collisions.push(result.target);
this.addPoint(result.target);
return;
}
// Case 5 - Otherwise switching edges or edge types
this._switchEdge(result, activeEdges);
result.collisions.forEach(pt => this.addPoint(pt));
}
/* -------------------------------------------- */
/**
* Switch to a new active edge.
* Moving from the origin, a collision that first blocks a side must be stored as a polygon point.
* Subsequent collisions blocking that side are ignored. Once both sides are blocked, we are done.
*
* Collisions that limit a side will block if that side was previously limited.
*
* If neither side is blocked and the ray internally collides with a non-limited edge, n skip without adding polygon
* endpoints. Sight is unaffected before this edge, and the internal collision can be ignored.
* @private
*
* @param {CollisionResult} result The pending collision result
* @param {EdgeSet} activeEdges The set of currently active edges
*/
_switchEdge(result, activeEdges) {
const origin = this.origin;
// Construct the ray from the origin
const ray = Ray.towardsPointSquared(origin, result.target, this.#rayDistance2);
ray.result = result;
this.rays.push(ray); // For visualization and debugging
// Create a sorted array of collisions containing the target vertex, other collinear vertices, and collision points
const vertices = [result.target, ...result.target.collinearVertices];
const keys = new Set();
for ( const v of vertices ) {
keys.add(v.key);
v._d2 ??= Math.pow(v.x - origin.x, 2) + Math.pow(v.y - origin.y, 2);
}
this.#addInternalEdgeCollisions(vertices, keys, ray, activeEdges);
vertices.sort((a, b) => a._d2 - b._d2);
// As we iterate over intersection points we will define the insertion method
let insert = undefined;
const c = result.collisions;
for ( const x of vertices ) {
if ( x.isInternal ) { // Handle internal collisions
// If neither side yet blocked and this is a non-limited edge, return
if ( !result.blockedCW && !result.blockedCCW && !x.isLimited ) return;
// Assume any edge is either limited or normal, so if not limited, must block. If already limited, must block
result.blockedCW ||= !x.isLimited || result.limitedCW;
result.blockedCCW ||= !x.isLimited || result.limitedCCW;
result.limitedCW = true;
result.limitedCCW = true;
} else { // Handle true endpoints
result.blockedCW ||= (result.limitedCW && x.isLimitingCW) || x.isBlockingCW;
result.blockedCCW ||= (result.limitedCCW && x.isLimitingCCW) || x.isBlockingCCW;
result.limitedCW ||= x.isLimitingCW;
result.limitedCCW ||= x.isLimitingCCW;
}
// Define the insertion method and record a collision point
if ( result.blockedCW ) {
insert ||= c.unshift;
if ( !result.blockedCWPrev ) insert.call(c, x);
}
if ( result.blockedCCW ) {
insert ||= c.push;
if ( !result.blockedCCWPrev ) insert.call(c, x);
}
// Update blocking flags
if ( result.blockedCW && result.blockedCCW ) return;
result.blockedCWPrev ||= result.blockedCW;
result.blockedCCWPrev ||= result.blockedCCW;
}
}
/* -------------------------------------------- */
/**
* Identify the collision points between an emitted Ray and a set of active edges.
* @param {PolygonVertex[]} vertices Active vertices
* @param {Set<number>} keys Active vertex keys
* @param {PolygonRay} ray The candidate ray to test
* @param {EdgeSet} activeEdges The set of edges to check for collisions against the ray
*/
#addInternalEdgeCollisions(vertices, keys, ray, activeEdges) {
for ( const edge of activeEdges ) {
if ( keys.has(edge.vertexA.key) || keys.has(edge.vertexB.key) ) continue;
const x = foundry.utils.lineLineIntersection(ray.A, ray.B, edge.vertexA, edge.vertexB);
if ( !x ) continue;
const c = foundry.canvas.edges.PolygonVertex.fromPoint(x);
c.attachEdge(edge, 0, this.config.type);
c.isInternal = true;
c._d2 = Math.pow(x.x - ray.A.x, 2) + Math.pow(x.y - ray.A.y, 2);
vertices.push(c);
}
}
/* -------------------------------------------- */
/* Collision Testing */
/* -------------------------------------------- */
/** @override */
_testCollision(ray, mode) {
const {debug, type} = this.config;
// Identify candidate edges
this._identifyEdges();
// Identify collision points
let collisions = new Map();
for ( const edge of this.edges ) {
const x = foundry.utils.lineSegmentIntersection(this.origin, ray.B, edge.a, edge.b);
if ( !x || (x.t0 <= 0) ) continue;
if ( (mode === "any") && (!edge.isLimited(type) || collisions.size) ) return true;
let c = foundry.canvas.edges.PolygonVertex.fromPoint(x, {distance: x.t0});
if ( collisions.has(c.key) ) c = collisions.get(c.key);
else collisions.set(c.key, c);
c.attachEdge(edge, 0, type);
}
if ( mode === "any" ) return false;
// Sort collisions
collisions = Array.from(collisions.values()).sort((a, b) => a._distance - b._distance);
if ( collisions[0]?.isLimited ) collisions.shift();
// Visualize result
if ( debug ) this._visualizeCollision(ray, collisions);
// Return collision result
if ( mode === "all" ) return collisions;
else return collisions[0] || null;
}
/* -------------------------------------------- */
/* Visualization */
/* -------------------------------------------- */
/** @override */
visualize() {
let dg = canvas.controls.debug;
dg.clear();
// Text debugging
if ( !canvas.controls.debug.debugText ) {
canvas.controls.debug.debugText = canvas.controls.addChild(new PIXI.Container());
}
const text = canvas.controls.debug.debugText;
text.removeChildren().forEach(c => c.destroy({children: true}));
// Define limitation colors
const limitColors = {
[CONST.WALL_SENSE_TYPES.NONE]: 0x77E7E8,
[CONST.WALL_SENSE_TYPES.NORMAL]: 0xFFFFBB,
[CONST.WALL_SENSE_TYPES.LIMITED]: 0x81B90C,
[CONST.WALL_SENSE_TYPES.PROXIMITY]: 0xFFFFBB,
[CONST.WALL_SENSE_TYPES.DISTANCE]: 0xFFFFBB
};
// Draw boundary shapes
for ( const constraint of this.config.boundaryShapes ) {
dg.lineStyle(2, 0xFF4444, 1.0).beginFill(0xFF4444, 0.10).drawShape(constraint).endFill();
}
// Draw the final polygon shape
dg.beginFill(0x00AAFF, 0.25).drawShape(this).endFill();
// Draw candidate edges
for ( let edge of this.edges ) {
const c = limitColors[edge[this.config.type]];
dg.lineStyle(4, c).moveTo(edge.a.x, edge.a.y).lineTo(edge.b.x, edge.b.y);
}
// Draw vertices
for ( let vertex of this.vertices.values() ) {
const r = vertex.restriction;
if ( r ) dg.lineStyle(1, 0x000000).beginFill(limitColors[r]).drawCircle(vertex.x, vertex.y, 8).endFill();
if ( vertex._index ) {
let t = text.addChild(new PIXI.Text(String(vertex._index), CONFIG.canvasTextStyle));
t.position.set(vertex.x, vertex.y);
}
}
// Draw emitted rays
for ( let ray of this.rays ) {
const r = ray.result;
if ( r ) {
dg.lineStyle(2, 0x00FF00, r.collisions.length ? 1.0 : 0.33).moveTo(ray.A.x, ray.A.y).lineTo(ray.B.x, ray.B.y);
for ( let c of r.collisions ) {
dg.lineStyle(1, 0x000000).beginFill(0xFF0000).drawCircle(c.x, c.y, 6).endFill();
}
}
}
return dg;
}
/* -------------------------------------------- */
/**
* Visualize the polygon, displaying its computed area, rays, and collision points
* @param {Ray} ray
* @param {PolygonVertex[]} collisions
* @private
*/
_visualizeCollision(ray, collisions) {
let dg = canvas.controls.debug;
dg.clear();
const limitColors = {
[CONST.WALL_SENSE_TYPES.NONE]: 0x77E7E8,
[CONST.WALL_SENSE_TYPES.NORMAL]: 0xFFFFBB,
[CONST.WALL_SENSE_TYPES.LIMITED]: 0x81B90C,
[CONST.WALL_SENSE_TYPES.PROXIMITY]: 0xFFFFBB,
[CONST.WALL_SENSE_TYPES.DISTANCE]: 0xFFFFBB
};
// Draw edges
for ( let edge of this.edges.values() ) {
const c = limitColors[edge[this.config.type]];
dg.lineStyle(4, c).moveTo(edge.a.x, edge.b.y).lineTo(edge.b.x, edge.b.y);
}
// Draw the attempted ray
dg.lineStyle(4, 0x0066CC).moveTo(ray.A.x, ray.A.y).lineTo(ray.B.x, ray.B.y);
// Draw collision points
for ( let x of collisions ) {
dg.lineStyle(1, 0x000000).beginFill(0xFF0000).drawCircle(x.x, x.y, 6).endFill();
}
}
}

View File

@@ -0,0 +1,362 @@
/**
* A Detection Mode which can be associated with any kind of sense/vision/perception.
* A token could have multiple detection modes.
*/
class DetectionMode extends foundry.abstract.DataModel {
/** @inheritDoc */
static defineSchema() {
const fields = foundry.data.fields;
return {
id: new fields.StringField({blank: false}),
label: new fields.StringField({blank: false}),
tokenConfig: new fields.BooleanField({initial: true}), // If this DM is available in Token Config UI
walls: new fields.BooleanField({initial: true}), // If this DM is constrained by walls
angle: new fields.BooleanField({initial: true}), // If this DM is constrained by the vision angle
type: new fields.NumberField({
initial: this.DETECTION_TYPES.SIGHT,
choices: Object.values(this.DETECTION_TYPES)
})
};
}
/* -------------------------------------------- */
/**
* Get the detection filter pertaining to this mode.
* @returns {PIXI.Filter|undefined}
*/
static getDetectionFilter() {
return this._detectionFilter;
}
/**
* An optional filter to apply on the target when it is detected with this mode.
* @type {PIXI.Filter|undefined}
*/
static _detectionFilter;
static {
/**
* The type of the detection mode.
* @enum {number}
*/
Object.defineProperty(this, "DETECTION_TYPES", {value: Object.freeze({
SIGHT: 0, // Sight, and anything depending on light perception
SOUND: 1, // What you can hear. Includes echolocation for bats per example
MOVE: 2, // This is mostly a sense for touch and vibration, like tremorsense, movement detection, etc.
OTHER: 3 // Can't fit in other types (smell, life sense, trans-dimensional sense, sense of humor...)
})});
/**
* The identifier of the basic sight detection mode.
* @type {string}
*/
Object.defineProperty(this, "BASIC_MODE_ID", {value: "basicSight"});
}
/* -------------------------------------------- */
/* Visibility Testing */
/* -------------------------------------------- */
/**
* Test visibility of a target object or array of points for a specific vision source.
* @param {VisionSource} visionSource The vision source being tested
* @param {TokenDetectionMode} mode The detection mode configuration
* @param {CanvasVisibilityTestConfig} config The visibility test configuration
* @returns {boolean} Is the test target visible?
*/
testVisibility(visionSource, mode, {object, tests}={}) {
if ( !mode.enabled ) return false;
if ( !this._canDetect(visionSource, object) ) return false;
return tests.some(test => this._testPoint(visionSource, mode, object, test));
}
/* -------------------------------------------- */
/**
* Can this VisionSource theoretically detect a certain object based on its properties?
* This check should not consider the relative positions of either object, only their state.
* @param {VisionSource} visionSource The vision source being tested
* @param {PlaceableObject} target The target object being tested
* @returns {boolean} Can the target object theoretically be detected by this vision source?
* @protected
*/
_canDetect(visionSource, target) {
const src = visionSource.object.document;
const isSight = this.type === DetectionMode.DETECTION_TYPES.SIGHT;
// Sight-based detection fails when blinded
if ( isSight && src.hasStatusEffect(CONFIG.specialStatusEffects.BLIND) ) return false;
// Detection fails if burrowing unless walls are ignored
if ( this.walls && src.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
if ( target instanceof Token ) {
const tgt = target.document;
// Sight-based detection cannot see invisible tokens
if ( isSight && tgt.hasStatusEffect(CONFIG.specialStatusEffects.INVISIBLE) ) return false;
// Burrowing tokens cannot be detected unless walls are ignored
if ( this.walls && tgt.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
}
return true;
}
/* -------------------------------------------- */
/**
* Evaluate a single test point to confirm whether it is visible.
* Standard detection rules require that the test point be both within LOS and within range.
* @param {VisionSource} visionSource The vision source being tested
* @param {TokenDetectionMode} mode The detection mode configuration
* @param {PlaceableObject} target The target object being tested
* @param {CanvasVisibilityTest} test The test case being evaluated
* @returns {boolean}
* @protected
*/
_testPoint(visionSource, mode, target, test) {
if ( !this._testRange(visionSource, mode, target, test) ) return false;
return this._testLOS(visionSource, mode, target, test);
}
/* -------------------------------------------- */
/**
* Test whether the line-of-sight requirement for detection is satisfied.
* Always true if the detection mode bypasses walls, otherwise the test point must be contained by the LOS polygon.
* The result of is cached for the vision source so that later checks for other detection modes do not repeat it.
* @param {VisionSource} visionSource The vision source being tested
* @param {TokenDetectionMode} mode The detection mode configuration
* @param {PlaceableObject} target The target object being tested
* @param {CanvasVisibilityTest} test The test case being evaluated
* @returns {boolean} Is the LOS requirement satisfied for this test?
* @protected
*/
_testLOS(visionSource, mode, target, test) {
if ( !this.walls ) return this._testAngle(visionSource, mode, target, test);
const type = visionSource.constructor.sourceType;
const isSight = type === "sight";
if ( isSight && visionSource.blinded.darkness ) return false;
if ( !this.angle && (visionSource.data.angle < 360) ) {
// Constrained by walls but not by vision angle
return !CONFIG.Canvas.polygonBackends[type].testCollision(
{ x: visionSource.x, y: visionSource.y },
test.point,
{ type, mode: "any", source: visionSource, useThreshold: true, includeDarkness: isSight }
);
}
// Constrained by walls and vision angle
let hasLOS = test.los.get(visionSource);
if ( hasLOS === undefined ) {
hasLOS = visionSource.los.contains(test.point.x, test.point.y);
test.los.set(visionSource, hasLOS);
}
return hasLOS;
}
/* -------------------------------------------- */
/**
* Test whether the target is within the vision angle.
* @param {VisionSource} visionSource The vision source being tested
* @param {TokenDetectionMode} mode The detection mode configuration
* @param {PlaceableObject} target The target object being tested
* @param {CanvasVisibilityTest} test The test case being evaluated
* @returns {boolean} Is the point within the vision angle?
* @protected
*/
_testAngle(visionSource, mode, target, test) {
if ( !this.angle ) return true;
const { angle, rotation, externalRadius } = visionSource.data;
if ( angle >= 360 ) return true;
const point = test.point;
const dx = point.x - visionSource.x;
const dy = point.y - visionSource.y;
if ( (dx * dx) + (dy * dy) <= (externalRadius * externalRadius) ) return true;
const aMin = rotation + 90 - (angle / 2);
const a = Math.toDegrees(Math.atan2(dy, dx));
return (((a - aMin) % 360) + 360) % 360 <= angle;
}
/* -------------------------------------------- */
/**
* Verify that a target is in range of a source.
* @param {VisionSource} visionSource The vision source being tested
* @param {TokenDetectionMode} mode The detection mode configuration
* @param {PlaceableObject} target The target object being tested
* @param {CanvasVisibilityTest} test The test case being evaluated
* @returns {boolean} Is the target within range?
* @protected
*/
_testRange(visionSource, mode, target, test) {
if ( mode.range === null ) return true;
if ( mode.range <= 0 ) return false;
const radius = visionSource.object.getLightRadius(mode.range);
const dx = test.point.x - visionSource.x;
const dy = test.point.y - visionSource.y;
return ((dx * dx) + (dy * dy)) <= (radius * radius);
}
}
/* -------------------------------------------- */
/**
* This detection mode tests whether the target is visible due to being illuminated by a light source.
* By default tokens have light perception with an infinite range if light perception isn't explicitely
* configured.
*/
class DetectionModeLightPerception extends DetectionMode {
/** @override */
_canDetect(visionSource, target) {
// Cannot see while blinded or burrowing
const src = visionSource.object.document;
if ( src.hasStatusEffect(CONFIG.specialStatusEffects.BLIND)
|| src.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
// Cannot see invisible or burrowing creatures
if ( target instanceof Token ) {
const tgt = target.document;
if ( tgt.hasStatusEffect(CONFIG.specialStatusEffects.INVISIBLE)
|| tgt.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
}
return true;
}
/* -------------------------------------------- */
/** @inheritDoc */
_testPoint(visionSource, mode, target, test) {
if ( !super._testPoint(visionSource, mode, target, test) ) return false;
return canvas.effects.testInsideLight(test.point, test.elevation);
}
}
/* -------------------------------------------- */
/**
* A special detection mode which models a form of darkvision (night vision).
* This mode is the default case which is tested first when evaluating visibility of objects.
*/
class DetectionModeBasicSight extends DetectionMode {
/** @override */
_canDetect(visionSource, target) {
// Cannot see while blinded or burrowing
const src = visionSource.object.document;
if ( src.hasStatusEffect(CONFIG.specialStatusEffects.BLIND)
|| src.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
// Cannot see invisible or burrowing creatures
if ( target instanceof Token ) {
const tgt = target.document;
if ( tgt.hasStatusEffect(CONFIG.specialStatusEffects.INVISIBLE)
|| tgt.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
}
return true;
}
}
/* -------------------------------------------- */
/**
* Detection mode that see invisible creatures.
* This detection mode allows the source to:
* - See/Detect the invisible target as if visible.
* - The "See" version needs sight and is affected by blindness
*/
class DetectionModeInvisibility extends DetectionMode {
/** @override */
static getDetectionFilter() {
return this._detectionFilter ??= GlowOverlayFilter.create({
glowColor: [0, 0.60, 0.33, 1]
});
}
/** @override */
_canDetect(visionSource, target) {
if ( !(target instanceof Token) ) return false;
const tgt = target.document;
// Only invisible tokens can be detected
if ( !tgt.hasStatusEffect(CONFIG.specialStatusEffects.INVISIBLE) ) return false;
const src = visionSource.object.document;
const isSight = this.type === DetectionMode.DETECTION_TYPES.SIGHT;
// Sight-based detection fails when blinded
if ( isSight && src.hasStatusEffect(CONFIG.specialStatusEffects.BLIND) ) return false;
// Detection fails when the source or target token is burrowing unless walls are ignored
if ( this.walls ) {
if ( src.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
if ( tgt.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
}
return true;
}
}
/* -------------------------------------------- */
/**
* Detection mode that see creatures in contact with the ground.
*/
class DetectionModeTremor extends DetectionMode {
/** @override */
static getDetectionFilter() {
return this._detectionFilter ??= OutlineOverlayFilter.create({
outlineColor: [1, 0, 1, 1],
knockout: true,
wave: true
});
}
/** @override */
_canDetect(visionSource, target) {
if ( !(target instanceof Token) ) return false;
const tgt = target.document;
// Flying and hovering tokens cannot be detected
if ( tgt.hasStatusEffect(CONFIG.specialStatusEffects.FLY) ) return false;
if ( tgt.hasStatusEffect(CONFIG.specialStatusEffects.HOVER) ) return false;
return true;
}
}
/* -------------------------------------------- */
/**
* Detection mode that see ALL creatures (no blockers).
* If not constrained by walls, see everything within the range.
*/
class DetectionModeAll extends DetectionMode {
/** @override */
static getDetectionFilter() {
return this._detectionFilter ??= OutlineOverlayFilter.create({
outlineColor: [0.85, 0.85, 1.0, 1],
knockout: true
});
}
/** @override */
_canDetect(visionSource, target) {
const src = visionSource.object.document;
const isSight = this.type === DetectionMode.DETECTION_TYPES.SIGHT;
// Sight-based detection fails when blinded
if ( isSight && src.hasStatusEffect(CONFIG.specialStatusEffects.BLIND) ) return false;
// Detection fails when the source or target token is burrowing unless walls are ignored
if ( !this.walls ) return true;
if ( src.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
if ( target instanceof Token ) {
const tgt = target.document;
if ( tgt.hasStatusEffect(CONFIG.specialStatusEffects.BURROW) ) return false;
}
return true;
}
}

View File

@@ -0,0 +1,501 @@
/**
* A fog of war management class which is the singleton canvas.fog instance.
* @category - Canvas
*/
class FogManager {
/**
* The FogExploration document which applies to this canvas view
* @type {FogExploration|null}
*/
exploration = null;
/**
* A status flag for whether the layer initialization workflow has succeeded
* @type {boolean}
* @private
*/
#initialized = false;
/**
* Track whether we have pending fog updates which have not yet been saved to the database
* @type {boolean}
* @internal
*/
_updated = false;
/**
* Texture extractor
* @type {TextureExtractor}
*/
get extractor() {
return this.#extractor;
}
#extractor;
/**
* The fog refresh count.
* If > to the refresh threshold, the fog texture is saved to database. It is then reinitialized to 0.
* @type {number}
*/
#refreshCount = 0;
/**
* Matrix used for fog rendering transformation.
* @type {PIXI.Matrix}
*/
#renderTransform = new PIXI.Matrix();
/**
* Define the number of fog refresh needed before the fog texture is extracted and pushed to the server.
* @type {number}
*/
static COMMIT_THRESHOLD = 70;
/**
* A debounced function to save fog of war exploration once a continuous stream of updates has concluded.
* @type {Function}
*/
#debouncedSave;
/**
* Handling of the concurrency for fog loading, saving and reset.
* @type {Semaphore}
*/
#queue = new foundry.utils.Semaphore();
/* -------------------------------------------- */
/* Fog Manager Properties */
/* -------------------------------------------- */
/**
* The exploration SpriteMesh which holds the fog exploration texture.
* @type {SpriteMesh}
*/
get sprite() {
return this.#explorationSprite || (this.#explorationSprite = this._createExplorationObject());
}
#explorationSprite;
/* -------------------------------------------- */
/**
* The configured options used for the saved fog-of-war texture.
* @type {FogTextureConfiguration}
*/
get textureConfiguration() {
return canvas.visibility.textureConfiguration;
}
/* -------------------------------------------- */
/**
* Does the currently viewed Scene support Token field of vision?
* @type {boolean}
*/
get tokenVision() {
return canvas.scene.tokenVision;
}
/* -------------------------------------------- */
/**
* Does the currently viewed Scene support fog of war exploration?
* @type {boolean}
*/
get fogExploration() {
return canvas.scene.fog.exploration;
}
/* -------------------------------------------- */
/* Fog of War Management */
/* -------------------------------------------- */
/**
* Create the exploration display object with or without a provided texture.
* @param {PIXI.Texture|PIXI.RenderTexture} [tex] Optional exploration texture.
* @returns {DisplayObject}
* @internal
*/
_createExplorationObject(tex) {
return new SpriteMesh(tex ?? Canvas.getRenderTexture({
clearColor: [0, 0, 0, 1],
textureConfiguration: this.textureConfiguration
}), FogSamplerShader);
}
/* -------------------------------------------- */
/**
* Initialize fog of war - resetting it when switching scenes or re-drawing the canvas
* @returns {Promise<void>}
*/
async initialize() {
this.#initialized = false;
// Create a TextureExtractor instance
if ( this.#extractor === undefined ) {
try {
this.#extractor = new TextureExtractor(canvas.app.renderer, {
callerName: "FogExtractor",
controlHash: true,
format: PIXI.FORMATS.RED
});
} catch(e) {
this.#extractor = null;
console.error(e);
}
}
this.#extractor?.reset();
// Bind a debounced save handler
this.#debouncedSave = foundry.utils.debounce(this.save.bind(this), 2000);
// Load the initial fog texture
await this.load();
this.#initialized = true;
}
/* -------------------------------------------- */
/**
* Clear the fog and reinitialize properties (commit and save in non reset mode)
* @returns {Promise<void>}
*/
async clear() {
// Save any pending exploration
try {
await this.save();
} catch(e) {
ui.notifications.error("Failed to save fog exploration");
console.error(e);
}
// Deactivate current fog exploration
this.#initialized = false;
this.#deactivate();
}
/* -------------------------------------------- */
/**
* Once a new Fog of War location is explored, composite the explored container with the current staging sprite.
* Once the number of refresh is > to the commit threshold, save the fog texture to the database.
*/
commit() {
const vision = canvas.visibility.vision;
if ( !vision?.children.length || !this.fogExploration || !this.tokenVision ) return;
if ( !this.#explorationSprite?.texture.valid ) return;
// Get a staging texture or clear and render into the sprite if its texture is a RT
// and render the entire fog container to it
const dims = canvas.dimensions;
const isRenderTex = this.#explorationSprite.texture instanceof PIXI.RenderTexture;
const tex = isRenderTex ? this.#explorationSprite.texture : Canvas.getRenderTexture({
clearColor: [0, 0, 0, 1],
textureConfiguration: this.textureConfiguration
});
this.#renderTransform.tx = -dims.sceneX;
this.#renderTransform.ty = -dims.sceneY;
// Render the currently revealed vision (preview excluded) to the texture
vision.containmentFilter.enabled = canvas.visibility.needsContainment;
vision.light.preview.visible = false;
vision.light.mask.preview.visible = false;
vision.sight.preview.visible = false;
canvas.app.renderer.render(isRenderTex ? vision : this.#explorationSprite, {
renderTexture: tex,
clear: false,
transform: this.#renderTransform
});
vision.light.preview.visible = true;
vision.light.mask.preview.visible = true;
vision.sight.preview.visible = true;
vision.containmentFilter.enabled = false;
if ( !isRenderTex ) this.#explorationSprite.texture.destroy(true);
this.#explorationSprite.texture = tex;
this._updated = true;
if ( !this.exploration ) {
const fogExplorationCls = getDocumentClass("FogExploration");
this.exploration = new fogExplorationCls();
}
// Schedule saving the texture to the database
if ( this.#refreshCount > FogManager.COMMIT_THRESHOLD ) {
this.#debouncedSave();
this.#refreshCount = 0;
}
else this.#refreshCount++;
}
/* -------------------------------------------- */
/**
* Load existing fog of war data from local storage and populate the initial exploration sprite
* @returns {Promise<(PIXI.Texture|void)>}
*/
async load() {
return await this.#queue.add(this.#load.bind(this));
}
/* -------------------------------------------- */
/**
* Load existing fog of war data from local storage and populate the initial exploration sprite
* @returns {Promise<(PIXI.Texture|void)>}
*/
async #load() {
if ( CONFIG.debug.fog.manager ) console.debug("FogManager | Loading saved FogExploration for Scene.");
this.#deactivate();
// Take no further action if token vision is not enabled
if ( !this.tokenVision ) return;
// Load existing FOW exploration data or create a new placeholder
const fogExplorationCls = /** @type {typeof FogExploration} */ getDocumentClass("FogExploration");
this.exploration = await fogExplorationCls.load();
// Extract and assign the fog data image
const assign = (tex, resolve) => {
if ( this.#explorationSprite?.texture === tex ) return resolve(tex);
this.#explorationSprite?.destroy(true);
this.#explorationSprite = this._createExplorationObject(tex);
canvas.visibility.resetExploration();
canvas.perception.initialize();
resolve(tex);
};
// Initialize the exploration sprite if no exploration data exists
if ( !this.exploration ) {
return await new Promise(resolve => {
assign(Canvas.getRenderTexture({
clearColor: [0, 0, 0, 1],
textureConfiguration: this.textureConfiguration
}), resolve);
});
}
// Otherwise load the texture from the exploration data
return await new Promise(resolve => {
let tex = this.exploration.getTexture();
if ( tex === null ) assign(Canvas.getRenderTexture({
clearColor: [0, 0, 0, 1],
textureConfiguration: this.textureConfiguration
}), resolve);
else if ( tex.baseTexture.valid ) assign(tex, resolve);
else tex.on("update", tex => assign(tex, resolve));
});
}
/* -------------------------------------------- */
/**
* Dispatch a request to reset the fog of war exploration status for all users within this Scene.
* Once the server has deleted existing FogExploration documents, the _onReset handler will re-draw the canvas.
*/
async reset() {
if ( CONFIG.debug.fog.manager ) console.debug("FogManager | Resetting fog of war exploration for Scene.");
game.socket.emit("resetFog", canvas.scene.id);
}
/* -------------------------------------------- */
/**
* Request a fog of war save operation.
* Note: if a save operation is pending, we're waiting for its conclusion.
*/
async save() {
return await this.#queue.add(this.#save.bind(this));
}
/* -------------------------------------------- */
/**
* Request a fog of war save operation.
* Note: if a save operation is pending, we're waiting for its conclusion.
*/
async #save() {
if ( !this._updated ) return;
this._updated = false;
const exploration = this.exploration;
if ( CONFIG.debug.fog.manager ) {
console.debug("FogManager | Initiate non-blocking extraction of the fog of war progress.");
}
if ( !this.#extractor ) {
console.error("FogManager | Browser does not support texture extraction.");
return;
}
// Get compressed base64 image from the fog texture
const base64Image = await this._extractBase64();
// If the exploration changed, the fog was reloaded while the pixels were extracted
if ( this.exploration !== exploration ) return;
// Need to skip?
if ( !base64Image ) {
if ( CONFIG.debug.fog.manager ) console.debug("FogManager | Fog of war has not changed. Skipping db operation.");
return;
}
// Update the fog exploration document
const updateData = this._prepareFogUpdateData(base64Image);
await this.#updateFogExploration(updateData);
}
/* -------------------------------------------- */
/**
* Extract fog data as a base64 string
* @returns {Promise<string>}
* @protected
*/
async _extractBase64() {
try {
return this.#extractor.extract({
texture: this.#explorationSprite.texture,
compression: TextureExtractor.COMPRESSION_MODES.BASE64,
type: "image/webp",
quality: 0.8,
debug: CONFIG.debug.fog.extractor
});
} catch(err) {
// FIXME this is needed because for some reason .extract() may throw a boolean false instead of an Error
throw new Error("Fog of War base64 extraction failed");
}
}
/* -------------------------------------------- */
/**
* Prepare the data that will be used to update the FogExploration document.
* @param {string} base64Image The extracted base64 image data
* @returns {Partial<FogExplorationData>} Exploration data to update
* @protected
*/
_prepareFogUpdateData(base64Image) {
return {explored: base64Image, timestamp: Date.now()};
}
/* -------------------------------------------- */
/**
* Update the fog exploration document with provided data.
* @param {object} updateData
* @returns {Promise<void>}
*/
async #updateFogExploration(updateData) {
if ( !game.scenes.has(canvas.scene?.id) ) return;
if ( !this.exploration ) return;
if ( CONFIG.debug.fog.manager ) console.debug("FogManager | Saving fog of war progress into exploration document.");
if ( !this.exploration.id ) {
this.exploration.updateSource(updateData);
this.exploration = await this.exploration.constructor.create(this.exploration.toJSON(), {loadFog: false});
}
else await this.exploration.update(updateData, {loadFog: false});
}
/* -------------------------------------------- */
/**
* Deactivate fog of war.
* Clear all shared containers by unlinking them from their parent.
* Destroy all stored textures and graphics.
*/
#deactivate() {
// Remove the current exploration document
this.exploration = null;
this.#extractor?.reset();
// Destroy current exploration texture and provide a new one with transparency
if ( this.#explorationSprite && !this.#explorationSprite.destroyed ) this.#explorationSprite.destroy(true);
this.#explorationSprite = undefined;
this._updated = false;
this.#refreshCount = 0;
}
/* -------------------------------------------- */
/**
* If fog of war data is reset from the server, deactivate the current fog and initialize the exploration.
* @returns {Promise}
* @internal
*/
async _handleReset() {
return await this.#queue.add(this.#handleReset.bind(this));
}
/* -------------------------------------------- */
/**
* If fog of war data is reset from the server, deactivate the current fog and initialize the exploration.
* @returns {Promise}
*/
async #handleReset() {
ui.notifications.info("Fog of War exploration progress was reset for this Scene");
// Remove the current exploration document
this.#deactivate();
// Reset exploration in the visibility layer
canvas.visibility.resetExploration();
// Refresh perception
canvas.perception.initialize();
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
get pending() {
const msg = "pending is deprecated and redirected to the exploration container";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return canvas.visibility.explored;
}
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
get revealed() {
const msg = "revealed is deprecated and redirected to the exploration container";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return canvas.visibility.explored;
}
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
update(source, force=false) {
const msg = "update is obsolete and always returns true. The fog exploration does not record position anymore.";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return true;
}
/* -------------------------------------------- */
/**
* @deprecated since v11
* @ignore
*/
get resolution() {
const msg = "resolution is deprecated and redirected to CanvasVisibility#textureConfiguration";
foundry.utils.logCompatibilityWarning(msg, {since: 11, until: 13});
return canvas.visibility.textureConfiguration;
}
}

View File

@@ -0,0 +1,185 @@
/**
* A helper class which manages the refresh workflow for perception layers on the canvas.
* This controls the logic which batches multiple requested updates to minimize the amount of work required.
* A singleton instance is available as {@link Canvas#perception}.
*/
class PerceptionManager extends RenderFlagsMixin(Object) {
/**
* @typedef {RenderFlags} PerceptionManagerFlags
* @property {boolean} initializeLighting Re-initialize the entire lighting configuration. An aggregate behavior
* which does no work directly but propagates to set several other flags.
* @property {boolean} initializeVision Re-initialize the entire vision configuration.
* See {@link CanvasVisibility#initializeSources}.
* @property {boolean} initializeVisionModes Initialize the active vision modes.
* See {@link CanvasVisibility#initializeVisionMode}.
* @property {boolean} initializeSounds Re-initialize the entire ambient sound configuration.
* See {@link SoundsLayer#initializeSources}.
* @property {boolean} refreshEdges Recompute intersections between all registered edges.
* See {@link CanvasEdges#refresh}.
* @property {boolean} refreshLighting Refresh the rendered appearance of lighting
* @property {boolean} refreshLightSources Update the configuration of light sources
* @property {boolean} refreshOcclusion Refresh occlusion
* @property {boolean} refreshPrimary Refresh the contents of the PrimaryCanvasGroup mesh
* @property {boolean} refreshSounds Refresh the audio state of ambient sounds
* @property {boolean} refreshVision Refresh the rendered appearance of vision
* @property {boolean} refreshVisionSources Update the configuration of vision sources
* @property {boolean} soundFadeDuration Apply a fade duration to sound refresh workflow
*/
/** @override */
static RENDER_FLAGS = {
// Edges
refreshEdges: {},
// Light and Darkness Sources
initializeLighting: {propagate: ["initializeDarknessSources", "initializeLightSources"]},
initializeDarknessSources: {propagate: ["refreshLighting", "refreshVision", "refreshEdges"]},
initializeLightSources: {propagate: ["refreshLighting", "refreshVision"]},
refreshLighting: {propagate: ["refreshLightSources"]},
refreshLightSources: {},
// Vision
initializeVisionModes: {propagate: ["refreshVisionSources", "refreshLighting", "refreshPrimary"]},
initializeVision: {propagate: ["initializeVisionModes", "refreshVision"]},
refreshVision: {propagate: ["refreshVisionSources", "refreshOcclusionMask"]},
refreshVisionSources: {},
// Primary Canvas Group
refreshPrimary: {},
refreshOcclusion: {propagate: ["refreshOcclusionStates", "refreshOcclusionMask"]},
refreshOcclusionStates: {},
refreshOcclusionMask: {},
// Sound
initializeSounds: {propagate: ["refreshSounds"]},
refreshSounds: {},
soundFadeDuration: {},
/** @deprecated since v12 */
refreshTiles: {
propagate: ["refreshOcclusion"],
deprecated: {message: "The refreshTiles flag is deprecated in favor of refreshOcclusion",
since: 12, until: 14, alias: true}
},
/** @deprecated since v12 */
identifyInteriorWalls: {
propagate: ["initializeLighting", "initializeVision"],
deprecated: {
message: "The identifyInteriorWalls is now obsolete and has no replacement.",
since: 12, until: 14, alias: true
}
},
/** @deprecated since v11 */
forceUpdateFog: {
propagate: ["refreshVision"],
deprecated: {
message: "The forceUpdateFog flag is now obsolete and has no replacement. "
+ "The fog is now always updated when the visibility is refreshed", since: 11, until: 13, alias: true
}
}
};
static #deprecatedFlags = ["refreshTiles", "identifyInteriorWalls", "forceUpdateFog"];
/** @override */
static RENDER_FLAG_PRIORITY = "PERCEPTION";
/* -------------------------------------------- */
/** @override */
applyRenderFlags() {
if ( !this.renderFlags.size ) return;
const flags = this.renderFlags.clear();
// Initialize darkness sources
if ( flags.initializeDarknessSources ) canvas.effects.initializeDarknessSources();
// Recompute edge intersections
if ( flags.refreshEdges ) canvas.edges.refresh();
// Initialize positive light sources
if ( flags.initializeLightSources ) canvas.effects.initializeLightSources();
// Initialize active vision sources
if ( flags.initializeVision ) canvas.visibility.initializeSources();
// Initialize the active vision mode
if ( flags.initializeVisionModes ) canvas.visibility.initializeVisionMode();
// Initialize active sound sources
if ( flags.initializeSounds ) canvas.sounds.initializeSources();
// Refresh light, vision, and sound sources
if ( flags.refreshLightSources ) canvas.effects.refreshLightSources();
if ( flags.refreshVisionSources ) canvas.effects.refreshVisionSources();
if ( flags.refreshSounds ) canvas.sounds.refresh({fade: flags.soundFadeDuration ? 250 : 0});
// Refresh the appearance of the Primary Canvas Group environment
if ( flags.refreshPrimary ) canvas.primary.refreshPrimarySpriteMesh();
if ( flags.refreshLighting ) canvas.effects.refreshLighting();
if ( flags.refreshVision ) canvas.visibility.refresh();
// Update roof occlusion states based on token positions and vision
// TODO: separate occlusion state testing from CanvasOcclusionMask
if ( flags.refreshOcclusion ) canvas.masks.occlusion.updateOcclusion();
else {
if ( flags.refreshOcclusionMask ) canvas.masks.occlusion._updateOcclusionMask();
if ( flags.refreshOcclusionStates ) canvas.masks.occlusion._updateOcclusionStates();
}
// Deprecated flags
for ( const f of PerceptionManager.#deprecatedFlags ) {
if ( flags[f] ) {
const {message, since, until} = PerceptionManager.RENDER_FLAGS[f].deprecated;
foundry.utils.logCompatibilityWarning(message, {since, until});
}
}
}
/* -------------------------------------------- */
/**
* Update perception manager flags which configure which behaviors occur on the next frame render.
* @param {object} flags Flag values (true) to assign where the keys belong to PerceptionManager.FLAGS
*/
update(flags) {
if ( !canvas.ready ) return;
this.renderFlags.set(flags);
}
/* -------------------------------------------- */
/**
* A helper function to perform an immediate initialization plus incremental refresh.
*/
initialize() {
return this.update({
refreshEdges: true,
initializeLighting: true,
initializeVision: true,
initializeSounds: true,
refreshOcclusion: true
});
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
refresh() {
foundry.utils.logCompatibilityWarning("PerceptionManager#refresh is deprecated in favor of assigning granular "
+ "refresh flags", {since: 12, until: 14});
return this.update({
refreshLighting: true,
refreshVision: true,
refreshSounds: true,
refreshOcclusion: true
});
}
}

View File

@@ -0,0 +1,178 @@
/**
* A special subclass of DataField used to reference an AbstractBaseShader definition.
*/
class ShaderField extends foundry.data.fields.DataField {
/** @inheritdoc */
static get _defaults() {
const defaults = super._defaults;
defaults.nullable = true;
defaults.initial = undefined;
return defaults;
}
/** @override */
_cast(value) {
if ( !foundry.utils.isSubclass(value, AbstractBaseShader) ) {
throw new Error("The value provided to a ShaderField must be an AbstractBaseShader subclass.");
}
return value;
}
}
/**
* A Vision Mode which can be selected for use by a Token.
* The selected Vision Mode alters the appearance of various aspects of the canvas while that Token is the POV.
*/
class VisionMode extends foundry.abstract.DataModel {
/**
* Construct a Vision Mode using provided configuration parameters and callback functions.
* @param {object} data Data which fulfills the model defined by the VisionMode schema.
* @param {object} [options] Additional options passed to the DataModel constructor.
*/
constructor(data={}, options={}) {
super(data, options);
this.animated = options.animated ?? false;
}
/** @inheritDoc */
static defineSchema() {
const fields = foundry.data.fields;
const shaderSchema = () => new fields.SchemaField({
shader: new ShaderField(),
uniforms: new fields.ObjectField()
});
const lightingSchema = () => new fields.SchemaField({
visibility: new fields.NumberField({
initial: this.LIGHTING_VISIBILITY.ENABLED,
choices: Object.values(this.LIGHTING_VISIBILITY)
}),
postProcessingModes: new fields.ArrayField(new fields.StringField()),
uniforms: new fields.ObjectField()
});
// Return model schema
return {
id: new fields.StringField({blank: false}),
label: new fields.StringField({blank: false}),
tokenConfig: new fields.BooleanField({initial: true}),
canvas: new fields.SchemaField({
shader: new ShaderField(),
uniforms: new fields.ObjectField()
}),
lighting: new fields.SchemaField({
background: lightingSchema(),
coloration: lightingSchema(),
illumination: lightingSchema(),
darkness: lightingSchema(),
levels: new fields.ObjectField({
validate: o => {
const values = Object.values(CONST.LIGHTING_LEVELS);
return Object.entries(o).every(([k, v]) => values.includes(Number(k)) && values.includes(v));
},
validationError: "may only contain a mapping of keys from VisionMode.LIGHTING_LEVELS"
}),
multipliers: new fields.ObjectField({
validate: o => {
const values = Object.values(CONST.LIGHTING_LEVELS);
return Object.entries(o).every(([k, v]) => values.includes(Number(k)) && Number.isFinite(v));
},
validationError: "must provide a mapping of keys from VisionMode.LIGHTING_LEVELS to numeric multiplier values"
})
}),
vision: new fields.SchemaField({
background: shaderSchema(),
coloration: shaderSchema(),
illumination: shaderSchema(),
darkness: new fields.SchemaField({
adaptive: new fields.BooleanField({initial: true})
}),
defaults: new fields.SchemaField({
color: new fields.ColorField({required: false, initial: undefined}),
attenuation: new fields.AlphaField({required: false, initial: undefined}),
brightness: new fields.NumberField({required: false, initial: undefined, nullable: false, min: -1, max: 1}),
saturation: new fields.NumberField({required: false, initial: undefined, nullable: false, min: -1, max: 1}),
contrast: new fields.NumberField({required: false, initial: undefined, nullable: false, min: -1, max: 1})
}),
preferred: new fields.BooleanField({initial: false})
})
};
}
/**
* The lighting illumination levels which are supported.
* @enum {number}
*/
static LIGHTING_LEVELS = CONST.LIGHTING_LEVELS;
/**
* Flags for how each lighting channel should be rendered for the currently active vision modes:
* - Disabled: this lighting layer is not rendered, the shaders does not decide.
* - Enabled: this lighting layer is rendered normally, and the shaders can choose if they should be rendered or not.
* - Required: the lighting layer is rendered, the shaders does not decide.
* @enum {number}
*/
static LIGHTING_VISIBILITY = {
DISABLED: 0,
ENABLED: 1,
REQUIRED: 2
};
/**
* A flag for whether this vision source is animated
* @type {boolean}
*/
animated = false;
/**
* Does this vision mode enable light sources?
* True unless it disables lighting entirely.
* @type {boolean}
*/
get perceivesLight() {
const {background, illumination, coloration} = this.lighting;
return !!(background.visibility || illumination.visibility || coloration.visibility);
}
/**
* Special activation handling that could be implemented by VisionMode subclasses
* @param {VisionSource} source Activate this VisionMode for a specific source
* @abstract
*/
_activate(source) {}
/**
* Special deactivation handling that could be implemented by VisionMode subclasses
* @param {VisionSource} source Deactivate this VisionMode for a specific source
* @abstract
*/
_deactivate(source) {}
/**
* Special handling which is needed when this Vision Mode is activated for a VisionSource.
* @param {VisionSource} source Activate this VisionMode for a specific source
*/
activate(source) {
if ( source._visionModeActivated ) return;
source._visionModeActivated = true;
this._activate(source);
}
/**
* Special handling which is needed when this Vision Mode is deactivated for a VisionSource.
* @param {VisionSource} source Deactivate this VisionMode for a specific source
*/
deactivate(source) {
if ( !source._visionModeActivated ) return;
source._visionModeActivated = false;
this._deactivate(source);
}
/**
* An animation function which runs every frame while this Vision Mode is active.
* @param {number} dt The deltaTime passed by the PIXI Ticker
*/
animate(dt) {
return foundry.canvas.sources.PointVisionSource.prototype.animateTime.call(this, dt);
}
}

View File

@@ -0,0 +1,354 @@
/**
* An implementation of the Weiler Atherton algorithm for clipping polygons.
* This currently only handles combinations that will not result in any holes.
* Support may be added for holes in the future.
*
* This algorithm is faster than the Clipper library for this task because it relies on the unique properties of the
* circle, ellipse, or convex simple clip object.
* It is also more precise in that it uses the actual intersection points between the circle/ellipse and polygon,
* instead of relying on the polygon approximation of the circle/ellipse to find the intersection points.
*
* For more explanation of the underlying algorithm, see:
* https://en.wikipedia.org/wiki/Weiler%E2%80%93Atherton_clipping_algorithm
* https://www.geeksforgeeks.org/weiler-atherton-polygon-clipping-algorithm
* https://h-educate.in/weiler-atherton-polygon-clipping-algorithm/
*/
class WeilerAthertonClipper {
/**
* Construct a WeilerAthertonClipper instance used to perform the calculation.
* @param {PIXI.Polygon} polygon Polygon to clip
* @param {PIXI.Rectangle|PIXI.Circle} clipObject Object used to clip the polygon
* @param {number} clipType Type of clip to use
* @param {object} clipOpts Object passed to the clippingObject methods toPolygon and pointsBetween
*/
constructor(polygon, clipObject, clipType, clipOpts) {
if ( !polygon.isPositive ) {
const msg = "WeilerAthertonClipper#constructor needs a subject polygon with a positive signed area.";
throw new Error(msg);
}
clipType ??= this.constructor.CLIP_TYPES.INTERSECT;
clipOpts ??= {};
this.polygon = polygon;
this.clipObject = clipObject;
this.config = { clipType, clipOpts };
}
/**
* The supported clip types.
* Values are equivalent to those in ClipperLib.ClipType.
* @enum {number}
*/
static CLIP_TYPES = Object.freeze({
INTERSECT: 0,
UNION: 1
});
/**
* The supported intersection types.
* @enum {number}
*/
static INTERSECTION_TYPES = Object.freeze({
OUT_IN: -1,
IN_OUT: 1,
TANGENT: 0
});
/** @type {PIXI.Polygon} */
polygon;
/** @type {PIXI.Rectangle|PIXI.Circle} */
clipObject;
/**
* Configuration settings
* @type {object} [config]
* @param {WeilerAthertonClipper.CLIP_TYPES} [config.clipType] One of CLIP_TYPES
* @param {object} [config.clipOpts] Object passed to the clippingObject methods
* toPolygon and pointsBetween
*/
config = {};
/* -------------------------------------------- */
/**
* Union a polygon and clipObject using the Weiler Atherton algorithm.
* @param {PIXI.Polygon} polygon Polygon to clip
* @param {PIXI.Rectangle|PIXI.Circle} clipObject Object to clip against the polygon
* @param {object} clipOpts Options passed to the clipping object
* methods toPolygon and pointsBetween
* @returns {PIXI.Polygon[]}
*/
static union(polygon, clipObject, clipOpts = {}) {
return this.combine(polygon, clipObject, {clipType: this.CLIP_TYPES.UNION, ...clipOpts});
}
/* -------------------------------------------- */
/**
* Intersect a polygon and clipObject using the Weiler Atherton algorithm.
* @param {PIXI.Polygon} polygon Polygon to clip
* @param {PIXI.Rectangle|PIXI.Circle} clipObject Object to clip against the polygon
* @param {object} clipOpts Options passed to the clipping object
* methods toPolygon and pointsBetween
* @returns {PIXI.Polygon[]}
*/
static intersect(polygon, clipObject, clipOpts = {}) {
return this.combine(polygon, clipObject, {clipType: this.CLIP_TYPES.INTERSECT, ...clipOpts});
}
/* -------------------------------------------- */
/**
* Clip a given clipObject using the Weiler-Atherton algorithm.
*
* At the moment, this will return a single PIXI.Polygon in the array unless clipType is a union and the polygon
* and clipObject do not overlap, in which case the [polygon, clipObject.toPolygon()] array will be returned.
* If this algorithm is expanded in the future to handle holes, an array of polygons may be returned.
*
* @param {PIXI.Polygon} polygon Polygon to clip
* @param {PIXI.Rectangle|PIXI.Circle} clipObject Object to clip against the polygon
* @param {object} [options] Options which configure how the union or intersection is computed
* @param {WeilerAthertonClipper.CLIP_TYPES} [options.clipType] One of CLIP_TYPES
* @param {boolean} [options.canMutate] If the WeilerAtherton constructor could mutate or not
* the subject polygon points
* @param {object} [options.clipOpts] Options passed to the WeilerAthertonClipper constructor
* @returns {PIXI.Polygon[]} Array of polygons and clipObjects
*/
static combine(polygon, clipObject, {clipType, canMutate, ...clipOpts}={}) {
if ( (clipType !== this.CLIP_TYPES.INTERSECT) && (clipType !== this.CLIP_TYPES.UNION) ) {
throw new Error("The Weiler-Atherton clipping algorithm only supports INTERSECT or UNION clip types.");
}
if ( canMutate && !polygon.isPositive ) polygon.reverseOrientation();
const wa = new this(polygon, clipObject, clipType, clipOpts);
const trackingArray = wa.#buildPointTrackingArray();
if ( !trackingArray.length ) return this.testForEnvelopment(polygon, clipObject, clipType, clipOpts);
return wa.#combineNoHoles(trackingArray);
}
/* -------------------------------------------- */
/**
* Clip the polygon with the clipObject, assuming no holes will be created.
* For a union or intersect with no holes, a single pass through the intersections will
* build the resulting union shape.
* @param {PolygonVertex[]} trackingArray Array of linked points and intersections
* @returns {[PIXI.Polygon]}
*/
#combineNoHoles(trackingArray) {
const clipType = this.config.clipType;
const ln = trackingArray.length;
let prevIx = trackingArray[ln - 1];
let wasTracingPolygon = (prevIx.type === this.constructor.INTERSECTION_TYPES.OUT_IN) ^ clipType;
const newPoly = new PIXI.Polygon();
for ( let i = 0; i < ln; i += 1 ) {
const ix = trackingArray[i];
this.#processIntersection(ix, prevIx, wasTracingPolygon, newPoly);
wasTracingPolygon = !wasTracingPolygon;
prevIx = ix;
}
return [newPoly];
}
/* -------------------------------------------- */
/**
* Given an intersection and the previous intersection, fill the points
* between the two intersections, in clockwise order.
* @param {PolygonVertex} ix Intersection to process
* @param {PolygonVertex} prevIx Previous intersection to process
* @param {boolean} wasTracingPolygon Whether we were tracing the polygon (true) or the clipObject (false).
* @param {PIXI.Polygon} newPoly The new polygon that results from this clipping operation
*/
#processIntersection(ix, prevIx, wasTracingPolygon, newPoly) {
const clipOpts = this.config.clipOpts;
const pts = wasTracingPolygon ? ix.leadingPoints : this.clipObject.pointsBetween(prevIx, ix, clipOpts);
for ( const pt of pts ) newPoly.addPoint(pt);
newPoly.addPoint(ix);
}
/* -------------------------------------------- */
/**
* Test if one shape envelops the other. Assumes the shapes do not intersect.
* 1. Polygon is contained within the clip object. Union: clip object; Intersect: polygon
* 2. Clip object is contained with polygon. Union: polygon; Intersect: clip object
* 3. Polygon and clip object are outside one another. Union: both; Intersect: null
* @param {PIXI.Polygon} polygon Polygon to clip
* @param {PIXI.Rectangle|PIXI.Circle} clipObject Object to clip against the polygon
* @param {WeilerAthertonClipper.CLIP_TYPES} clipType One of CLIP_TYPES
* @param {object} clipOpts Clip options which are forwarded to toPolygon methods
* @returns {PIXI.Polygon[]} Returns the polygon, the clipObject.toPolygon(), both, or neither.
*/
static testForEnvelopment(polygon, clipObject, clipType, clipOpts) {
const points = polygon.points;
if ( points.length < 6 ) return [];
const union = clipType === this.CLIP_TYPES.UNION;
// Option 1: Polygon contained within clipObject
// We search for the first point of the polygon that is not on the boundary of the clip object.
// One of these points can be used to determine whether the polygon is contained in the clip object.
// If all points of the polygon are on the boundary of the clip object, which is either a circle
// or a rectangle, then the polygon is contained within the clip object.
let polygonInClipObject = true;
for ( let i = 0; i < points.length; i += 2 ) {
const point = { x: points[i], y: points[i + 1] };
if ( !clipObject.pointIsOn(point) ) {
polygonInClipObject = clipObject.contains(point.x, point.y);
break;
}
}
if ( polygonInClipObject ) return union ? [clipObject.toPolygon(clipOpts)] : [polygon];
// Option 2: ClipObject contained within polygon
const center = clipObject.center;
// PointSourcePolygons need to have a bounds defined in order for polygon.contains to work.
if ( polygon instanceof PointSourcePolygon ) polygon.bounds ??= polygon.getBounds();
const clipObjectInPolygon = polygon.contains(center.x, center.y);
if ( clipObjectInPolygon ) return union ? [polygon] : [clipObject.toPolygon(clipOpts)];
// Option 3: Neither contains the other
return union ? [polygon, clipObject.toPolygon(clipOpts)] : [];
}
/* -------------------------------------------- */
/**
* Construct an array of intersections between the polygon and the clipping object.
* The intersections follow clockwise around the polygon.
* Round all intersections and polygon vertices to the nearest pixel (integer).
* @returns {Point[]}
*/
#buildPointTrackingArray() {
const labeledPoints = this.#buildIntersectionArray();
if ( !labeledPoints.length ) return [];
return WeilerAthertonClipper.#consolidatePoints(labeledPoints);
}
/* -------------------------------------------- */
/**
* Construct an array that holds all the points of the polygon with all the intersections with the clipObject
* inserted, in correct position moving clockwise.
* If an intersection and endpoint are nearly the same, prefer the intersection.
* Intersections are labeled with isIntersection and type = out/in or in/out. Tangents are removed.
* @returns {Point[]} Labeled array of points
*/
#buildIntersectionArray() {
const { polygon, clipObject } = this;
const points = polygon.points;
const ln = points.length;
if ( ln < 6 ) return []; // Minimum 3 Points required
// Need to start with a non-intersecting point on the polygon.
let startIdx = -1;
let a;
for ( let i = 0; i < ln; i += 2 ) {
a = { x: points[i], y: points[i + 1] };
if ( !clipObject.pointIsOn(a) ) {
startIdx = i;
break;
}
}
if ( !~startIdx ) return []; // All intersections, so all tangent
// For each edge a|b, find the intersection point(s) with the clipObject.
// Add intersections and endpoints to the pointsIxs array, taking care to avoid duplicating
// points. For example, if the intersection equals a, add only the intersection, not both.
let previousInside = clipObject.contains(a.x, a.y);
let numPrevIx = 0;
let lastIx = undefined;
let secondLastIx = undefined;
const pointsIxs = [a];
const types = this.constructor.INTERSECTION_TYPES;
const nIter = startIdx + ln + 2; // Add +2 to close the polygon.
for ( let i = startIdx + 2; i < nIter; i += 2 ) {
const j = i >= ln ? i % ln : i; // Circle back around the points as necessary.
const b = { x: points[j], y: points[j + 1] };
const ixs = clipObject.segmentIntersections(a, b);
const ixsLn = ixs.length;
let bIsIx = false;
if ( ixsLn ) {
bIsIx = b.x.almostEqual(ixs[ixsLn - 1].x) && b.y.almostEqual(ixs[ixsLn - 1].y);
// If the intersection equals the current b, get that intersection next iteration.
if ( bIsIx ) ixs.pop();
// Determine whether the intersection is out-->in or in-->out
numPrevIx += ixs.length;
for ( const ix of ixs ) {
ix.isIntersection = true;
ix.type = lastIx ? -lastIx.type : previousInside ? types.IN_OUT : types.OUT_IN;
secondLastIx = lastIx;
lastIx = ix;
}
pointsIxs.push(...ixs);
}
// If b is an intersection, we will return to it next iteration.
if ( bIsIx ) {
a = b;
continue;
}
// Each intersection represents a move across the clipObject border.
// Count them and determine if we are now inside or outside the clipObject.
if ( numPrevIx ) {
const isInside = clipObject.contains(b.x, b.y);
const changedSide = isInside ^ previousInside;
const isOdd = numPrevIx & 1;
// If odd number of intersections, should switch. e.g., outside --> ix --> inside
// If even number of intersections, should stay same. e.g., outside --> ix --> ix --> outside.
if ( isOdd ^ changedSide ) {
if ( numPrevIx === 1 ) lastIx.isIntersection = false;
else {
secondLastIx.isIntersection = false;
lastIx.type = secondLastIx.type;
}
}
previousInside = isInside;
numPrevIx = 0;
secondLastIx = undefined;
lastIx = undefined;
}
pointsIxs.push(b);
a = b;
}
return pointsIxs;
}
/* -------------------------------------------- */
/**
* Given an array of labeled points, consolidate into a tracking array of intersections,
* where each intersection contains its array of leadingPoints.
* @param {Point[]} labeledPoints Array of points, from _buildLabeledIntersectionsArray
* @returns {Point[]} Array of intersections
*/
static #consolidatePoints(labeledPoints) {
// Locate the first intersection
const startIxIdx = labeledPoints.findIndex(pt => pt.isIntersection);
if ( !~startIxIdx ) return []; // No intersections, so no tracking array
const labeledLn = labeledPoints.length;
let leadingPoints = [];
const trackingArray = [];
// Closed polygon, so use the last point to circle back
for ( let i = 0; i < labeledLn; i += 1 ) {
const j = (i + startIxIdx) % labeledLn;
const pt = labeledPoints[j];
if ( pt.isIntersection ) {
pt.leadingPoints = leadingPoints;
leadingPoints = [];
trackingArray.push(pt);
} else leadingPoints.push(pt);
}
// Add leading points to first intersection
trackingArray[0].leadingPoints = leadingPoints;
return trackingArray;
}
}