// Generated by dts-bundle-generator v9.3.1 /** * Minimal `EventEmitter` interface that is molded against the Node.js * `EventEmitter` interface. */ export declare class EventEmitter { static prefixed: string | boolean; /** * Return an array listing the events for which the emitter has registered * listeners. */ eventNames(): Array>; /** * Return the listeners registered for a given event. */ listeners>(event: T): Array>; /** * Return the number of listeners listening to a given event. */ listenerCount(event: EventEmitter.EventNames): number; /** * Calls each of the listeners registered for a given event. */ emit>(event: T, ...args: EventEmitter.EventArgs): boolean; /** * Add a listener for a given event. */ on>(event: T, fn: EventEmitter.EventListener, context?: Context): this; addListener>(event: T, fn: EventEmitter.EventListener, context?: Context): this; /** * Add a one-time listener for a given event. */ once>(event: T, fn: EventEmitter.EventListener, context?: Context): this; /** * Remove the listeners of a given event. */ removeListener>(event: T, fn?: EventEmitter.EventListener, context?: Context, once?: boolean): this; off>(event: T, fn?: EventEmitter.EventListener, context?: Context, once?: boolean): this; /** * Remove all listeners, or those of the specified event. */ removeAllListeners(event?: EventEmitter.EventNames): this; } export declare namespace EventEmitter { export interface ListenerFn { (...args: Args): void; } export interface EventEmitterStatic { new (): EventEmitter; } /** * `object` should be in either of the following forms: * ``` * interface EventTypes { * 'event-with-parameters': any[] * 'event-with-example-handler': (...args: any[]) => void * } * ``` */ export type ValidEventTypes = string | symbol | object; export type EventNames = T extends string | symbol ? T : keyof T; export type ArgumentMap = { [K in keyof T]: T[K] extends (...args: any[]) => void ? Parameters : T[K] extends any[] ? T[K] : any[]; }; export type EventListener> = T extends string | symbol ? (...args: any[]) => void : (...args: ArgumentMap>[Extract]) => void; export type EventArgs> = Parameters>; export const EventEmitter: EventEmitterStatic; } declare type RgbColor = { r: number; g: number; b: number; }; declare type HslColor = { h: number; s: number; l: number; }; declare type HsvColor = { h: number; s: number; v: number; }; declare type WithAlpha = O & { a: number; }; declare type RgbaColor = WithAlpha; declare type HslaColor = WithAlpha; declare type HsvaColor = WithAlpha; /** * Pixi supports multiple color formats, including CSS color strings, hex, numbers, and arrays. * * When providing values for any of the color properties, you can use any of the {@link color.ColorSource} formats. * ```typescript * import { Color } from 'pixi.js'; * * // All of these are valid: * sprite.tint = 'red'; * sprite.tint = 0xff0000; * sprite.tint = '#ff0000'; * sprite.tint = new Color('red'); * * // Same for graphics fill/stroke colors and other color values: * graphics.fill({ color: 'red' }); * graphics.fill({ color: 0xff0000 }); * graphics.stroke({ color: '#ff0000' }); * graphics.stroke({ color: new Color('red')}; * ``` * @namespace color */ /** * RGBA color array. * * `[number, number, number, number]` * @memberof color */ export type RgbaArray = [ number, number, number, number ]; /** * Valid formats to use when defining any color properties, also valid for the {@link color.Color} constructor. * * These types are extended from [colord](https://www.npmjs.com/package/colord) with some PixiJS-specific extensions. * * Possible value types are: * - [Color names](https://www.w3.org/TR/css-color-4/#named-colors): * `'red'`, `'green'`, `'blue'`, `'white'`, etc. * - RGB hex integers (`0xRRGGBB`): * `0xff0000`, `0x00ff00`, `0x0000ff`, etc. * - [RGB(A) hex strings](https://www.w3.org/TR/css-color-4/#hex-notation): * - 6 digits (`RRGGBB`): `'ff0000'`, `'#00ff00'`, `'0x0000ff'`, etc. * - 3 digits (`RGB`): `'f00'`, `'#0f0'`, `'0x00f'`, etc. * - 8 digits (`RRGGBBAA`): `'ff000080'`, `'#00ff0080'`, `'0x0000ff80'`, etc. * - 4 digits (`RGBA`): `'f008'`, `'#0f08'`, `'0x00f8'`, etc. * - RGB(A) objects: * `{ r: 255, g: 0, b: 0 }`, `{ r: 255, g: 0, b: 0, a: 0.5 }`, etc. * - [RGB(A) strings](https://www.w3.org/TR/css-color-4/#rgb-functions): * `'rgb(255, 0, 0)'`, `'rgb(100% 0% 0%)'`, `'rgba(255, 0, 0, 0.5)'`, `'rgba(100% 0% 0% / 50%)'`, etc. * - RGB(A) arrays: * `[1, 0, 0]`, `[1, 0, 0, 0.5]`, etc. * - RGB(A) Float32Array: * `new Float32Array([1, 0, 0])`, `new Float32Array([1, 0, 0, 0.5])`, etc. * - RGB(A) Uint8Array: * `new Uint8Array([255, 0, 0])`, `new Uint8Array([255, 0, 0, 128])`, etc. * - RGB(A) Uint8ClampedArray: * `new Uint8ClampedArray([255, 0, 0])`, `new Uint8ClampedArray([255, 0, 0, 128])`, etc. * - HSL(A) objects: * `{ h: 0, s: 100, l: 50 }`, `{ h: 0, s: 100, l: 50, a: 0.5 }`, etc. * - [HSL(A) strings](https://www.w3.org/TR/css-color-4/#the-hsl-notation): * `'hsl(0, 100%, 50%)'`, `'hsl(0deg 100% 50%)'`, `'hsla(0, 100%, 50%, 0.5)'`, `'hsla(0deg 100% 50% / 50%)'`, etc. * - HSV(A) objects: * `{ h: 0, s: 100, v: 100 }`, `{ h: 0, s: 100, v: 100, a: 0.5 }`, etc. * - {@link color.Color} objects. * @since 7.2.0 * @memberof color */ export type ColorSource = string | number | number[] | Float32Array | Uint8Array | Uint8ClampedArray | HslColor | HslaColor | HsvColor | HsvaColor | RgbColor | RgbaColor | Color | Number; /** * Color utility class. Can accept any {@link color.ColorSource} format in its constructor. * ```js * import { Color } from 'pixi.js'; * * new Color('red').toArray(); // [1, 0, 0, 1] * new Color(0xff0000).toArray(); // [1, 0, 0, 1] * new Color('ff0000').toArray(); // [1, 0, 0, 1] * new Color('#f00').toArray(); // [1, 0, 0, 1] * new Color('0xff0000ff').toArray(); // [1, 0, 0, 1] * new Color('#f00f').toArray(); // [1, 0, 0, 1] * new Color({ r: 255, g: 0, b: 0, a: 0.5 }).toArray(); // [1, 0, 0, 0.5] * new Color('rgb(255, 0, 0, 0.5)').toArray(); // [1, 0, 0, 0.5] * new Color([1, 1, 1]).toArray(); // [1, 1, 1, 1] * new Color([1, 0, 0, 0.5]).toArray(); // [1, 0, 0, 0.5] * new Color(new Float32Array([1, 0, 0, 0.5])).toArray(); // [1, 0, 0, 0.5] * new Color(new Uint8Array([255, 0, 0, 255])).toArray(); // [1, 0, 0, 1] * new Color(new Uint8ClampedArray([255, 0, 0, 255])).toArray(); // [1, 0, 0, 1] * new Color({ h: 0, s: 100, l: 50, a: 0.5 }).toArray(); // [1, 0, 0, 0.5] * new Color('hsl(0, 100%, 50%, 50%)').toArray(); // [1, 0, 0, 0.5] * new Color({ h: 0, s: 100, v: 100, a: 0.5 }).toArray(); // [1, 0, 0, 0.5] * ``` * @since 7.2.0 * @memberof color */ export declare class Color { /** * Default Color object for static uses * @example * import { Color } from 'pixi.js'; * Color.shared.setValue(0xffffff).toHex(); // '#ffffff' */ static readonly shared: Color; /** * Temporary Color object for static uses internally. * As to not conflict with Color.shared. * @ignore */ private static readonly _temp; /** Pattern for hex strings */ private static readonly HEX_PATTERN; /** Internal color source, from constructor or set value */ private _value; /** Normalized rgba component, floats from 0-1 */ private _components; /** Cache color as number */ private _int; /** An array of the current Color. Only populated when `toArray` functions are called */ private _arrayRgba; private _arrayRgb; /** * @param {ColorSource} value - Optional value to use, if not provided, white is used. */ constructor(value?: ColorSource); /** Get red component (0 - 1) */ get red(): number; /** Get green component (0 - 1) */ get green(): number; /** Get blue component (0 - 1) */ get blue(): number; /** Get alpha component (0 - 1) */ get alpha(): number; /** * Set the value, suitable for chaining * @param value * @see Color.value */ setValue(value: ColorSource): this; /** * The current color source. * * When setting: * - Setting to an instance of `Color` will copy its color source and components. * - Otherwise, `Color` will try to normalize the color source and set the components. * If the color source is invalid, an `Error` will be thrown and the `Color` will left unchanged. * * Note: The `null` in the setter's parameter type is added to match the TypeScript rule: return type of getter * must be assignable to its setter's parameter type. Setting `value` to `null` will throw an `Error`. * * When getting: * - A return value of `null` means the previous value was overridden (e.g., {@link Color.multiply multiply}, * {@link Color.premultiply premultiply} or {@link Color.round round}). * - Otherwise, the color source used when setting is returned. */ set value(value: ColorSource | null); get value(): Exclude | null; /** * Copy a color source internally. * @param value - Color source */ private _cloneSource; /** * Equality check for color sources. * @param value1 - First color source * @param value2 - Second color source * @returns `true` if the color sources are equal, `false` otherwise. */ private _isSourceEqual; /** * Convert to a RGBA color object. * @example * import { Color } from 'pixi.js'; * new Color('white').toRgb(); // returns { r: 1, g: 1, b: 1, a: 1 } */ toRgba(): RgbaColor; /** * Convert to a RGB color object. * @example * import { Color } from 'pixi.js'; * new Color('white').toRgb(); // returns { r: 1, g: 1, b: 1 } */ toRgb(): RgbColor; /** Convert to a CSS-style rgba string: `rgba(255,255,255,1.0)`. */ toRgbaString(): string; /** * Convert to an [R, G, B] array of clamped uint8 values (0 to 255). * @example * import { Color } from 'pixi.js'; * new Color('white').toUint8RgbArray(); // returns [255, 255, 255] * @param {number[]|Uint8Array|Uint8ClampedArray} [out] - Output array */ toUint8RgbArray(): number[]; toUint8RgbArray(out: T): T; /** * Convert to an [R, G, B, A] array of normalized floats (numbers from 0.0 to 1.0). * @example * import { Color } from 'pixi.js'; * new Color('white').toArray(); // returns [1, 1, 1, 1] * @param {number[]|Float32Array} [out] - Output array */ toArray(): number[]; toArray(out: T): T; /** * Convert to an [R, G, B] array of normalized floats (numbers from 0.0 to 1.0). * @example * import { Color } from 'pixi.js'; * new Color('white').toRgbArray(); // returns [1, 1, 1] * @param {number[]|Float32Array} [out] - Output array */ toRgbArray(): number[]; toRgbArray(out: T): T; /** * Convert to a hexadecimal number. * @example * import { Color } from 'pixi.js'; * new Color('white').toNumber(); // returns 16777215 */ toNumber(): number; /** * Convert to a BGR number * @example * import { Color } from 'pixi.js'; * new Color(0xffcc99).toBgrNumber(); // returns 0x99ccff */ toBgrNumber(): number; /** * Convert to a hexadecimal number in little endian format (e.g., BBGGRR). * @example * import { Color } from 'pixi.js'; * new Color(0xffcc99).toLittleEndianNumber(); // returns 0x99ccff * @returns {number} - The color as a number in little endian format. */ toLittleEndianNumber(): number; /** * Multiply with another color. This action is destructive, and will * override the previous `value` property to be `null`. * @param {ColorSource} value - The color to multiply by. */ multiply(value: ColorSource): this; /** * Converts color to a premultiplied alpha format. This action is destructive, and will * override the previous `value` property to be `null`. * @param alpha - The alpha to multiply by. * @param {boolean} [applyToRGB=true] - Whether to premultiply RGB channels. * @returns {Color} - Itself. */ premultiply(alpha: number, applyToRGB?: boolean): this; /** * Premultiplies alpha with current color. * @param {number} alpha - The alpha to multiply by. * @param {boolean} [applyToRGB=true] - Whether to premultiply RGB channels. * @returns {number} tint multiplied by alpha */ toPremultiplied(alpha: number, applyToRGB?: boolean): number; /** * Convert to a hexadecimal string. * @example * import { Color } from 'pixi.js'; * new Color('white').toHex(); // returns "#ffffff" */ toHex(): string; /** * Convert to a hexadecimal string with alpha. * @example * import { Color } from 'pixi.js'; * new Color('white').toHexa(); // returns "#ffffffff" */ toHexa(): string; /** * Set alpha, suitable for chaining. * @param alpha */ setAlpha(alpha: number): this; /** * Normalize the input value into rgba * @param value - Input value */ private _normalize; /** Refresh the internal color rgb number */ private _refreshInt; /** * Clamps values to a range. Will override original values * @param value - Value(s) to clamp * @param min - Minimum value * @param max - Maximum value */ private _clamp; /** * Check if the value is a color-like object * @param value - Value to check * @returns True if the value is a color-like object * @static * @example * import { Color } from 'pixi.js'; * Color.isColorLike('white'); // returns true * Color.isColorLike(0xffffff); // returns true * Color.isColorLike([1, 1, 1]); // returns true */ static isColorLike(value: unknown): value is ColorSource; } /** * Common interface for points. Both Point and ObservablePoint implement it * @memberof maths */ export interface PointData { /** X coord */ x: number; /** Y coord */ y: number; } /** * Common interface for points. Both Point and ObservablePoint implement it * @memberof maths */ export interface PointLike extends PointData { /** * Copies x and y from the given point * @param {PointData} p - The point to copy from * @returns {this} Returns itself. */ copyFrom: (p: PointData) => this; /** * Copies x and y into the given point * @param {PointLike} p - The point to copy. * @returns {PointLike} Given point with values updated */ copyTo: (p: T) => T; /** * Returns true if the given point is equal to this point * @param {PointData} p - The point to check * @returns {boolean} Whether the given point equal to this point */ equals: (p: PointData) => boolean; /** * Sets the point to a new x and y position. * If y is omitted, both x and y will be set to x. * @param {number} [x=0] - position of the point on the x axis * @param {number} [y=x] - position of the point on the y axis */ set: (x?: number, y?: number) => void; } export interface Point extends PixiMixins.Point { } /** * The Point object represents a location in a two-dimensional coordinate system, where `x` represents * the position on the horizontal axis and `y` represents the position on the vertical axis. *
* Many Pixi functions accept the `PointData` type as an alternative to `Point`, * which only requires `x` and `y` properties. * @class * @implements {PointLike} * @memberof maths */ export declare class Point implements PointLike { /** Position of the point on the x axis */ x: number; /** Position of the point on the y axis */ y: number; /** * Creates a new `Point` * @param {number} [x=0] - position of the point on the x axis * @param {number} [y=0] - position of the point on the y axis */ constructor(x?: number, y?: number); /** * Creates a clone of this point * @returns A clone of this point */ clone(): Point; /** * Copies `x` and `y` from the given point into this point * @param p - The point to copy from * @returns The point instance itself */ copyFrom(p: PointData): this; /** * Copies this point's x and y into the given point (`p`). * @param p - The point to copy to. Can be any of type that is or extends `PointData` * @returns The point (`p`) with values updated */ copyTo(p: T): T; /** * Accepts another point (`p`) and returns `true` if the given point is equal to this point * @param p - The point to check * @returns Returns `true` if both `x` and `y` are equal */ equals(p: PointData): boolean; /** * Sets the point to a new `x` and `y` position. * If `y` is omitted, both `x` and `y` will be set to `x`. * @param {number} [x=0] - position of the point on the `x` axis * @param {number} [y=x] - position of the point on the `y` axis * @returns The point instance itself */ set(x?: number, y?: number): this; toString(): string; /** * A static Point object with `x` and `y` values of `0`. Can be used to avoid creating new objects multiple times. * @readonly */ static get shared(): Point; } interface TransformableObject { position: PointData; scale: PointData; pivot: PointData; skew: PointData; rotation: number; } /** * A fast matrix for 2D transformations. * ```js * | a | c | tx| * | b | d | ty| * | 0 | 0 | 1 | * ``` * @memberof maths */ export declare class Matrix { /** @default 1 */ a: number; /** @default 0 */ b: number; /** @default 0 */ c: number; /** @default 1 */ d: number; /** @default 0 */ tx: number; /** @default 0 */ ty: number; /** An array of the current matrix. Only populated when `toArray` is called */ array: Float32Array | null; /** * @param a - x scale * @param b - y skew * @param c - x skew * @param d - y scale * @param tx - x translation * @param ty - y translation */ constructor(a?: number, b?: number, c?: number, d?: number, tx?: number, ty?: number); /** * Creates a Matrix object based on the given array. The Element to Matrix mapping order is as follows: * * a = array[0] * b = array[1] * c = array[3] * d = array[4] * tx = array[2] * ty = array[5] * @param array - The array that the matrix will be populated from. */ fromArray(array: number[]): void; /** * Sets the matrix properties. * @param a - Matrix component * @param b - Matrix component * @param c - Matrix component * @param d - Matrix component * @param tx - Matrix component * @param ty - Matrix component * @returns This matrix. Good for chaining method calls. */ set(a: number, b: number, c: number, d: number, tx: number, ty: number): this; /** * Creates an array from the current Matrix object. * @param transpose - Whether we need to transpose the matrix or not * @param [out=new Float32Array(9)] - If provided the array will be assigned to out * @returns The newly created array which contains the matrix */ toArray(transpose?: boolean, out?: Float32Array): Float32Array; /** * Get a new position with the current transformation applied. * Can be used to go from a child's coordinate space to the world coordinate space. (e.g. rendering) * @param pos - The origin * @param {Point} [newPos] - The point that the new position is assigned to (allowed to be same as input) * @returns {Point} The new point, transformed through this matrix */ apply

(pos: PointData, newPos?: P): P; /** * Get a new position with the inverse of the current transformation applied. * Can be used to go from the world coordinate space to a child's coordinate space. (e.g. input) * @param pos - The origin * @param {Point} [newPos] - The point that the new position is assigned to (allowed to be same as input) * @returns {Point} The new point, inverse-transformed through this matrix */ applyInverse

(pos: PointData, newPos?: P): P; /** * Translates the matrix on the x and y. * @param x - How much to translate x by * @param y - How much to translate y by * @returns This matrix. Good for chaining method calls. */ translate(x: number, y: number): this; /** * Applies a scale transformation to the matrix. * @param x - The amount to scale horizontally * @param y - The amount to scale vertically * @returns This matrix. Good for chaining method calls. */ scale(x: number, y: number): this; /** * Applies a rotation transformation to the matrix. * @param angle - The angle in radians. * @returns This matrix. Good for chaining method calls. */ rotate(angle: number): this; /** * Appends the given Matrix to this Matrix. * @param matrix - The matrix to append. * @returns This matrix. Good for chaining method calls. */ append(matrix: Matrix): this; /** * Appends two matrix's and sets the result to this matrix. AB = A * B * @param a - The matrix to append. * @param b - The matrix to append. * @returns This matrix. Good for chaining method calls. */ appendFrom(a: Matrix, b: Matrix): this; /** * Sets the matrix based on all the available properties * @param x - Position on the x axis * @param y - Position on the y axis * @param pivotX - Pivot on the x axis * @param pivotY - Pivot on the y axis * @param scaleX - Scale on the x axis * @param scaleY - Scale on the y axis * @param rotation - Rotation in radians * @param skewX - Skew on the x axis * @param skewY - Skew on the y axis * @returns This matrix. Good for chaining method calls. */ setTransform(x: number, y: number, pivotX: number, pivotY: number, scaleX: number, scaleY: number, rotation: number, skewX: number, skewY: number): this; /** * Prepends the given Matrix to this Matrix. * @param matrix - The matrix to prepend * @returns This matrix. Good for chaining method calls. */ prepend(matrix: Matrix): this; /** * Decomposes the matrix (x, y, scaleX, scaleY, and rotation) and sets the properties on to a transform. * @param transform - The transform to apply the properties to. * @returns The transform with the newly applied properties */ decompose(transform: TransformableObject): TransformableObject; /** * Inverts this matrix * @returns This matrix. Good for chaining method calls. */ invert(): this; /** Checks if this matrix is an identity matrix */ isIdentity(): boolean; /** * Resets this Matrix to an identity (default) matrix. * @returns This matrix. Good for chaining method calls. */ identity(): this; /** * Creates a new Matrix object with the same values as this one. * @returns A copy of this matrix. Good for chaining method calls. */ clone(): Matrix; /** * Changes the values of the given matrix to be the same as the ones in this matrix * @param matrix - The matrix to copy to. * @returns The matrix given in parameter with its values updated. */ copyTo(matrix: Matrix): Matrix; /** * Changes the values of the matrix to be the same as the ones in given matrix * @param matrix - The matrix to copy from. * @returns this */ copyFrom(matrix: Matrix): this; /** * check to see if two matrices are the same * @param matrix - The matrix to compare to. */ equals(matrix: Matrix): boolean; toString(): string; /** * A default (identity) matrix. * * This is a shared object, if you want to modify it consider creating a new `Matrix` * @readonly */ static get IDENTITY(): Readonly; /** * A static Matrix that can be used to avoid creating new objects. * Will always ensure the matrix is reset to identity when requested. * Use this object for fast but temporary calculations, as it may be mutated later on. * This is a different object to the `IDENTITY` object and so can be modified without changing `IDENTITY`. * @readonly */ static get shared(): Matrix; } export interface ObservablePoint extends PixiMixins.ObservablePoint { } /** * Observer used to listen for observable point changes. * @memberof maths */ export interface Observer { /** Callback to call when the point has updated. */ _onUpdate: (point?: T) => void; } /** * The ObservablePoint object represents a location in a two-dimensional coordinate system, where `x` represents * the position on the horizontal axis and `y` represents the position on the vertical axis. * * An `ObservablePoint` is a point that triggers the `onUpdate` method on an observer when the point's position is changed. * @memberof maths */ export declare class ObservablePoint implements PointLike { /** @ignore */ _x: number; /** @ignore */ _y: number; /** This object used to call the `onUpdate` callback when the point changes. */ private readonly _observer; /** * Creates a new `ObservablePoint` * @param observer - Observer to pass to listen for change events. * @param {number} [x=0] - position of the point on the x axis * @param {number} [y=0] - position of the point on the y axis */ constructor(observer: Observer, x?: number, y?: number); /** * Creates a clone of this point. * @param observer - Optional observer to pass to the new observable point. * @returns a copy of this observable point */ clone(observer?: Observer): ObservablePoint; /** * Sets the point to a new `x` and `y` position. * If `y` is omitted, both `x` and `y` will be set to `x`. * @param {number} [x=0] - position of the point on the x axis * @param {number} [y=x] - position of the point on the y axis * @returns The observable point instance itself */ set(x?: number, y?: number): this; /** * Copies x and y from the given point (`p`) * @param p - The point to copy from. Can be any of type that is or extends `PointData` * @returns The observable point instance itself */ copyFrom(p: PointData): this; /** * Copies this point's x and y into that of the given point (`p`) * @param p - The point to copy to. Can be any of type that is or extends `PointData` * @returns The point (`p`) with values updated */ copyTo(p: T): T; /** * Accepts another point (`p`) and returns `true` if the given point is equal to this point * @param p - The point to check * @returns Returns `true` if both `x` and `y` are equal */ equals(p: PointData): boolean; toString(): string; /** Position of the observable point on the x axis. */ get x(): number; set x(value: number); /** Position of the observable point on the y axis. */ get y(): number; set y(value: number); } /** * Two Pi. * @static * @member {number} * @memberof maths */ export declare const PI_2: number; /** * Conversion factor for converting radians to degrees. * @static * @member {number} RAD_TO_DEG * @memberof maths */ export declare const RAD_TO_DEG: number; /** * Conversion factor for converting degrees to radians. * @static * @member {number} * @memberof maths */ export declare const DEG_TO_RAD: number; /** * Constants that identify shapes, mainly to prevent `instanceof` calls. * @memberof maths */ export type SHAPE_PRIMITIVE = "polygon" | "rectangle" | "circle" | "ellipse" | "triangle" | "roundedRectangle"; /** * A basic object to define a Pixi shape. * @memberof maths */ export interface ShapePrimitive { /** The type of the object, mainly used to avoid `instanceof` checks */ readonly type: SHAPE_PRIMITIVE; /** Checks whether the x and y coordinates passed to this function are contained within this ShapePrimitive. */ contains(x: number, y: number): boolean; /** Checks whether the x and y coordinates passed to this function are contained within the stroke of this shape */ strokeContains(x: number, y: number, strokeWidth: number): boolean; /** Creates a clone of this ShapePrimitive instance. */ clone(): ShapePrimitive; /** Copies the properties from another ShapePrimitive to this ShapePrimitive. */ copyFrom(source: ShapePrimitive): void; /** Copies the properties from this ShapePrimitive to another ShapePrimitive. */ copyTo(destination: ShapePrimitive): void; /** Returns the framing rectangle of the ShapePrimitive as a Rectangle object. */ getBounds(out?: Rectangle): Rectangle; /** The X coordinate of the shape */ readonly x: number; /** The Y coordinate of the shape */ readonly y: number; } export interface Rectangle extends PixiMixins.Rectangle { } /** * The `Rectangle` object is an area defined by its position, as indicated by its top-left corner * point (`x`, `y`) and by its `width` and its `height`. * * It also provides convenience methods to get and set the position and size of the rectangle such as * {@link maths.Rectangle#bottom|bottom}, {@link maths.Rectangle#right|right} and {@link maths.Rectangle#isEmpty|isEmpty}. * @memberof maths */ export declare class Rectangle implements ShapePrimitive { /** * The type of the object, mainly used to avoid `instanceof` checks * @default 'rectangle' */ readonly type: SHAPE_PRIMITIVE; /** * The X coordinate of the upper-left corner of the rectangle * @default 0 */ x: number; /** * The Y coordinate of the upper-left corner of the rectangle * @default 0 */ y: number; /** * The overall width of this rectangle * @default 0 */ width: number; /** * The overall height of this rectangle * @default 0 */ height: number; /** * @param x - The X coordinate of the upper-left corner of the rectangle * @param y - The Y coordinate of the upper-left corner of the rectangle * @param width - The overall width of the rectangle * @param height - The overall height of the rectangle */ constructor(x?: string | number, y?: string | number, width?: string | number, height?: string | number); /** Returns the left edge of the rectangle. */ get left(): number; /** Returns the right edge of the rectangle. */ get right(): number; /** Returns the top edge of the rectangle. */ get top(): number; /** Returns the bottom edge of the rectangle. */ get bottom(): number; /** Determines whether the Rectangle is empty. */ isEmpty(): boolean; /** A constant empty rectangle. This is a new object every time the property is accessed */ static get EMPTY(): Rectangle; /** * Creates a clone of this Rectangle * @returns a copy of the rectangle */ clone(): Rectangle; /** * Converts a Bounds object to a Rectangle object. * @param bounds - The bounds to copy and convert to a rectangle. * @returns Returns itself. */ copyFromBounds(bounds: Bounds): this; /** * Copies another rectangle to this one. * @param rectangle - The rectangle to copy from. * @returns Returns itself. */ copyFrom(rectangle: Rectangle): Rectangle; /** * Copies this rectangle to another one. * @param rectangle - The rectangle to copy to. * @returns Returns given parameter. */ copyTo(rectangle: Rectangle): Rectangle; /** * Checks whether the x and y coordinates given are contained within this Rectangle * @param x - The X coordinate of the point to test * @param y - The Y coordinate of the point to test * @returns Whether the x/y coordinates are within this Rectangle */ contains(x: number, y: number): boolean; /** * Checks whether the x and y coordinates given are contained within this rectangle including the stroke. * @param x - The X coordinate of the point to test * @param y - The Y coordinate of the point to test * @param strokeWidth - The width of the line to check * @returns Whether the x/y coordinates are within this rectangle */ strokeContains(x: number, y: number, strokeWidth: number): boolean; /** * Determines whether the `other` Rectangle transformed by `transform` intersects with `this` Rectangle object. * Returns true only if the area of the intersection is >0, this means that Rectangles * sharing a side are not overlapping. Another side effect is that an arealess rectangle * (width or height equal to zero) can't intersect any other rectangle. * @param {Rectangle} other - The Rectangle to intersect with `this`. * @param {Matrix} transform - The transformation matrix of `other`. * @returns {boolean} A value of `true` if the transformed `other` Rectangle intersects with `this`; otherwise `false`. */ intersects(other: Rectangle, transform?: Matrix): boolean; /** * Pads the rectangle making it grow in all directions. * If paddingY is omitted, both paddingX and paddingY will be set to paddingX. * @param paddingX - The horizontal padding amount. * @param paddingY - The vertical padding amount. * @returns Returns itself. */ pad(paddingX?: number, paddingY?: number): this; /** * Fits this rectangle around the passed one. * @param rectangle - The rectangle to fit. * @returns Returns itself. */ fit(rectangle: Rectangle): this; /** * Enlarges rectangle that way its corners lie on grid * @param resolution - resolution * @param eps - precision * @returns Returns itself. */ ceil(resolution?: number, eps?: number): this; /** * Enlarges this rectangle to include the passed rectangle. * @param rectangle - The rectangle to include. * @returns Returns itself. */ enlarge(rectangle: Rectangle): this; /** * Returns the framing rectangle of the rectangle as a Rectangle object * @param out - optional rectangle to store the result * @returns The framing rectangle */ getBounds(out?: Rectangle): Rectangle; toString(): string; } /** * Simple bounds implementation instead of more ambiguous [number, number, number, number] * @memberof rendering */ export interface BoundsData { minX: number; minY: number; maxX: number; maxY: number; } /** * A representation of an AABB bounding box. * @memberof rendering */ export declare class Bounds { /** @default Infinity */ minX: number; /** @default Infinity */ minY: number; /** @default -Infinity */ maxX: number; /** @default -Infinity */ maxY: number; matrix: Matrix; private _rectangle; constructor(minX?: number, minY?: number, maxX?: number, maxY?: number); /** * Checks if bounds are empty. * @returns - True if empty. */ isEmpty(): boolean; /** The bounding rectangle of the bounds. */ get rectangle(): Rectangle; /** Clears the bounds and resets. */ clear(): this; /** * Sets the bounds. * @param x0 - left X of frame * @param y0 - top Y of frame * @param x1 - right X of frame * @param y1 - bottom Y of frame */ set(x0: number, y0: number, x1: number, y1: number): void; /** * Adds sprite frame * @param x0 - left X of frame * @param y0 - top Y of frame * @param x1 - right X of frame * @param y1 - bottom Y of frame * @param matrix */ addFrame(x0: number, y0: number, x1: number, y1: number, matrix?: Matrix): void; /** * Adds a rectangle to the bounds. * @param rect - The rectangle to be added. * @param matrix - The matrix to apply to the bounds. */ addRect(rect: Rectangle, matrix?: Matrix): void; /** * Adds other {@link Bounds}. * @param bounds - The Bounds to be added * @param matrix */ addBounds(bounds: BoundsData, matrix?: Matrix): void; /** * Adds other Bounds, masked with Bounds. * @param mask - The Bounds to be added. */ addBoundsMask(mask: Bounds): void; /** * Adds other Bounds, multiplied with matrix. * @param matrix - The matrix to apply to the bounds. */ applyMatrix(matrix: Matrix): void; /** * Resizes the bounds object to include the given rectangle. * @param rect - The rectangle to be included. */ fit(rect: Rectangle): this; /** * Resizes the bounds object to include the given bounds. * @param left - The left value of the bounds. * @param right - The right value of the bounds. * @param top - The top value of the bounds. * @param bottom - The bottom value of the bounds. */ fitBounds(left: number, right: number, top: number, bottom: number): this; /** * Pads bounds object, making it grow in all directions. * If paddingY is omitted, both paddingX and paddingY will be set to paddingX. * @param paddingX - The horizontal padding amount. * @param paddingY - The vertical padding amount. */ pad(paddingX: number, paddingY?: number): this; /** Ceils the bounds. */ ceil(): this; /** Clones the bounds. */ clone(): Bounds; /** * Scales the bounds by the given values * @param x - The X value to scale by. * @param y - The Y value to scale by. */ scale(x: number, y?: number): this; /** the x value of the bounds. */ get x(): number; set x(value: number); /** the y value of the bounds. */ get y(): number; set y(value: number); /** the width value of the bounds. */ get width(): number; set width(value: number); /** the height value of the bounds. */ get height(): number; set height(value: number); /** the left value of the bounds. */ get left(): number; /** the right value of the bounds. */ get right(): number; /** the top value of the bounds. */ get top(): number; /** the bottom value of the bounds. */ get bottom(): number; /** Is the bounds positive. */ get isPositive(): boolean; get isValid(): boolean; /** * Adds screen vertices from array * @param vertexData - calculated vertices * @param beginOffset - begin offset * @param endOffset - end offset, excluded * @param matrix */ addVertexData(vertexData: Float32Array, beginOffset: number, endOffset: number, matrix?: Matrix): void; /** * Checks if the point is contained within the bounds. * @param x - x coordinate * @param y - y coordinate */ containsPoint(x: number, y: number): boolean; toString(): string; } export interface ViewObserver { onViewUpdate: () => void; } /** * A view is something that is able to be rendered by the renderer. * @memberof scene */ export interface View { /** a unique id for this view */ readonly uid: number; /** whether or not this view should be batched */ batched: boolean; /** * an identifier that is used to identify the type of system that will be used to render this renderable * eg, 'sprite' will use the sprite system (based on the systems name */ readonly renderPipeId: string; /** this is an int because it is packed directly into an attribute in the shader */ _roundPixels: 0 | 1; /** @private */ _lastUsed: number; /** @private */ _lastInstructionTick: number; /** * Whether or not to round the x/y position of the object. * @type {boolean} */ get roundPixels(): boolean; /** if true, the view will have its position rounded to the nearest whole number */ set roundPixels(value: boolean); /** this is the AABB rectangle bounds of the view in local untransformed space. */ bounds: BoundsData; /** Adds the current bounds of this view to the supplied bounds */ addBounds: (bounds: Bounds) => void; /** Checks if the point is within the view */ containsPoint: (point: Point) => boolean; } export interface Renderable extends Container, View { } /** * An instruction that can be executed by the renderer * @memberof rendering */ export interface Instruction { /** a the id of the render pipe that can run this instruction */ renderPipeId: string; /** the name of the instruction */ action?: string; /** true if this instruction can be compiled into a WebGPU bundle */ canBundle: boolean; } /** * A set of instructions that can be executed by the renderer. * Basically wraps an array, but with some extra properties that help the renderer * to keep things nice and optimised. * * Note: * InstructionSet.instructions contains all the instructions, but does not resize (for performance). * So for the true length of the instructions you need to use InstructionSet.instructionSize * @memberof rendering */ export declare class InstructionSet { /** a unique id for this instruction set used through the renderer */ readonly uid: number; /** the array of instructions */ readonly instructions: Instruction[]; /** the actual size of the array (any instructions passed this should be ignored) */ instructionSize: number; /** allows for access to the render pipes of the renderer */ renderPipes: any; renderables: Renderable[]; tick: number; /** reset the instruction set so it can be reused set size back to 0 */ reset(): void; /** * Add an instruction to the set * @param instruction - add an instruction to the set */ add(instruction: Instruction): void; /** * Log the instructions to the console (for debugging) * @internal * @ignore */ log(): void; } /** * A RenderGroup is a class that is responsible for I generating a set of instructions that are used to render the * root container and its children. It also watches for any changes in that container or its children, * these changes are analysed and either the instruction set is rebuild or the instructions data is updated. * @memberof rendering */ export declare class RenderGroup implements Instruction { renderPipeId: string; root: Container; canBundle: boolean; renderGroupParent: RenderGroup; renderGroupChildren: RenderGroup[]; worldTransform: Matrix; worldColorAlpha: number; worldColor: number; worldAlpha: number; readonly childrenToUpdate: Record; updateTick: number; readonly childrenRenderablesToUpdate: { list: Container[]; index: number; }; structureDidChange: boolean; instructionSet: InstructionSet; private readonly _onRenderContainers; init(root: Container): void; reset(): void; get localTransform(): Matrix; addRenderGroupChild(renderGroupChild: RenderGroup): void; private _removeRenderGroupChild; addChild(child: Container): void; removeChild(child: Container): void; removeChildren(children: Container[]): void; onChildUpdate(child: Container): void; updateRenderable(container: Container): void; onChildViewUpdate(child: Container): void; get isRenderable(): boolean; /** * adding a container to the onRender list will make sure the user function * passed in to the user defined 'onRender` callBack * @param container - the container to add to the onRender list */ addOnRender(container: Container): void; removeOnRender(container: Container): void; runOnRender(): void; destroy(): void; getChildren(out?: Container[]): Container[]; private _getChildren; } /** * Defines a size with a width and a height. * @memberof maths */ export interface Size { /** The width. */ width: number; /** The height. */ height: number; } /** * Various blend modes supported by Pixi * @memberof filters */ export type BLEND_MODES = "inherit" | "normal" | "add" | "multiply" | "screen" | "darken" | "lighten" | "erase" | "color-dodge" | "color-burn" | "linear-burn" | "linear-dodge" | "linear-light" | "hard-light" | "soft-light" | "pin-light" | "difference" | "exclusion" | "overlay" | "saturation" | "color" | "luminosity" | "normal-npm" | "add-npm" | "screen-npm" | "none" | "subtract" | "divide" | "vivid-light" | "hard-mix" | "negation" | "min" | "max"; /** * The map of blend modes supported by Pixi * @memberof rendering */ export declare const BLEND_TO_NPM: { normal: string; add: string; screen: string; }; /** * The stencil operation to perform when using the stencil buffer * @memberof rendering */ export declare enum STENCIL_MODES { DISABLED = 0, RENDERING_MASK_ADD = 1, MASK_ACTIVE = 2, RENDERING_MASK_REMOVE = 3, NONE = 4 } /** * The culling mode to use. It can be either `none`, `front` or `back`. * @memberof rendering */ export type CULL_MODES = "none" | "back" | "front"; export type ArrayFixed = [ T, ...Array ] & { length: L; }; export type Dict = { [key: string]: T; }; export type Optional = Omit & Partial>; export interface MeasureMixinConstructor { width?: number; height?: number; } export interface MeasureMixin extends Required { getSize(out?: Size): Size; setSize(width: number, height?: number): void; setSize(value: Optional): void; getLocalBounds(bounds?: Bounds): Bounds; getBounds(skipUpdate?: boolean, bounds?: Bounds): Bounds; _localBoundsCacheData: LocalBoundsCacheData; _localBoundsCacheId: number; _setWidth(width: number, localWidth: number): void; _setHeight(height: number, localHeight: number): void; } interface LocalBoundsCacheData { data: number[]; index: number; didChange: boolean; localBounds: Bounds; } export declare const measureMixin: Partial; /** * Base destroy options. * @example * // Destroy the sprite and all its children. * sprite.destroy({ children: true }); * @memberof scene */ export interface BaseDestroyOptions { /** Destroy children recursively. */ children?: boolean; } /** * Options when destroying textures. Most of these use cases are internal. * ```js * // destroy the graphics context and its texture * graphicsContext.destroy({ texture: true }); * ``` * @memberof scene */ export interface TextureDestroyOptions { /** Destroy the texture as well. */ texture?: boolean; /** Destroy the texture source as well. */ textureSource?: boolean; } /** * Options when destroying a graphics context. * ```js * // destroy the graphics context and its texture * graphicsContext.destroy({ context: true, texture: true }); * ``` * @memberof scene */ export interface ContextDestroyOptions { /** Destroy the graphics context as well. */ context?: boolean; } /** * Options when destroying a text. * ```js * // destroy the text and its style * text.destroy({ style: true }); * ``` * @memberof scene */ export interface TextDestroyOptions { /** Destroy the text style as well. */ style?: boolean; } export type TypeOrBool = T | boolean; /** * Options for destroying a container. * @property {boolean} [children=false] - Destroy the children of the container as well. * @property {boolean} [texture=false] - Destroy the texture of the container's children. * @property {boolean} [textureSource=false] - Destroy the texture source of the container's children. * @property {boolean} [context=false] - Destroy the context of the container's children. * @property {boolean} [style=false] - Destroy the style of the container's children. * @memberof scene */ export type DestroyOptions = TypeOrBool; export type ContainerChild = Container; export interface ContainerEvents extends PixiMixins.ContainerEvents { added: [ container: Container ]; childAdded: [ child: C, container: Container, index: number ]; removed: [ container: Container ]; childRemoved: [ child: C, container: Container, index: number ]; destroyed: [ container: Container ]; } type AnyEvent = { [K: ({} & string) | ({} & symbol)]: any; }; export declare const UPDATE_COLOR = 1; export declare const UPDATE_BLEND = 2; export declare const UPDATE_VISIBLE = 4; export declare const UPDATE_TRANSFORM = 8; export interface UpdateTransformOptions { x: number; y: number; scaleX: number; scaleY: number; rotation: number; skewX: number; skewY: number; pivotX: number; pivotY: number; } /** * Constructor options used for `Container` instances. * ```js * const container = new Container({ * position: new Point(100, 200), * scale: new Point(2, 2), * rotation: Math.PI / 2, * }); * ``` * @memberof scene * @see scene.Container */ export interface ContainerOptions extends PixiMixins.ContainerOptions { /** @see scene.Container#isRenderGroup */ isRenderGroup?: boolean; /** @see scene.Container#blendMode */ blendMode?: BLEND_MODES; /** @see scene.Container#tint */ tint?: ColorSource; /** @see scene.Container#alpha */ alpha?: number; /** @see scene.Container#angle */ angle?: number; /** @see scene.Container#children */ children?: C[]; /** @see scene.Container#parent */ parent?: Container; /** @see scene.Container#renderable */ renderable?: boolean; /** @see scene.Container#rotation */ rotation?: number; /** @see scene.Container#scale */ scale?: PointData | number; /** @see scene.Container#pivot */ pivot?: PointData | number; /** @see scene.Container#position */ position?: PointData; /** @see scene.Container#skew */ skew?: PointData; /** @see scene.Container#visible */ visible?: boolean; /** @see scene.Container#x */ x?: number; /** @see scene.Container#y */ y?: number; /** @see scene.Container#boundArea */ boundsArea?: Rectangle; } export interface Container extends PixiMixins.Container, EventEmitter & AnyEvent> { } /** * Container is a general-purpose display object that holds children. It also adds built-in support for advanced * rendering features like masking and filtering. * * It is the base class of all display objects that act as a container for other objects, including Graphics * and Sprite. * *

* * Transforms * * The [transform]{@link scene.Container#transform} of a display object describes the projection from its * local coordinate space to its parent's local coordinate space. The following properties are derived * from the transform: * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
PropertyDescription
[pivot]{@link scene.Container#pivot} * Invariant under rotation, scaling, and skewing. The projection of into the parent's space of the pivot * is equal to position, regardless of the other three transformations. In other words, It is the center of * rotation, scaling, and skewing. *
[position]{@link scene.Container#position} * Translation. This is the position of the [pivot]{@link scene.Container#pivot} in the parent's local * space. The default value of the pivot is the origin (0,0). If the top-left corner of your display object * is (0,0) in its local space, then the position will be its top-left corner in the parent's local space. *
[scale]{@link scene.Container#scale} * Scaling. This will stretch (or compress) the display object's projection. The scale factors are along the * local coordinate axes. In other words, the display object is scaled before rotated or skewed. The center * of scaling is the [pivot]{@link scene.Container#pivot}. *
[rotation]{@link scene.Container#rotation} * Rotation. This will rotate the display object's projection by this angle (in radians). *
[skew]{@link scene.Container#skew} *

Skewing. This can be used to deform a rectangular display object into a parallelogram.

*

* In PixiJS, skew has a slightly different behaviour than the conventional meaning. It can be * thought of the net rotation applied to the coordinate axes (separately). For example, if "skew.x" is * ⍺ and "skew.y" is β, then the line x = 0 will be rotated by ⍺ (y = -x*cot⍺) and the line y = 0 will be * rotated by β (y = x*tanβ). A line y = x*tanϴ (i.e. a line at angle ϴ to the x-axis in local-space) will * be rotated by an angle between ⍺ and β. *

*

* It can be observed that if skew is applied equally to both axes, then it will be equivalent to applying * a rotation. Indeed, if "skew.x" = -ϴ and "skew.y" = ϴ, it will produce an equivalent of "rotation" = ϴ. *

*

* Another quite interesting observation is that "skew.x", "skew.y", rotation are commutative operations. Indeed, * because rotation is essentially a careful combination of the two. *

*
[angle]{@link scene.Container#angle}Rotation. This is an alias for [rotation]{@link scene.Container#rotation}, but in degrees.
[x]{@link scene.Container#x}Translation. This is an alias for position.x!
[y]{@link scene.Container#y}Translation. This is an alias for position.y!
[width]{@link scene.Container#width} * Implemented in [Container]{@link scene.Container}. Scaling. The width property calculates scale.x by dividing * the "requested" width by the local bounding box width. It is indirectly an abstraction over scale.x, and there * is no concept of user-defined width. *
[height]{@link scene.Container#height} * Implemented in [Container]{@link scene.Container}. Scaling. The height property calculates scale.y by dividing * the "requested" height by the local bounding box height. It is indirectly an abstraction over scale.y, and there * is no concept of user-defined height. *
*
* *
* Alpha * * This alpha sets a display object's **relative opacity** w.r.t its parent. For example, if the alpha of a display * object is 0.5 and its parent's alpha is 0.5, then it will be rendered with 25% opacity (assuming alpha is not * applied on any ancestor further up the chain). *
* *
* Renderable vs Visible * * The `renderable` and `visible` properties can be used to prevent a display object from being rendered to the * screen. However, there is a subtle difference between the two. When using `renderable`, the transforms of the display * object (and its children subtree) will continue to be calculated. When using `visible`, the transforms will not * be calculated. * ```ts * import { BlurFilter, Container, Graphics, Sprite } from 'pixi.js'; * * const container = new Container(); * const sprite = Sprite.from('https://s3-us-west-2.amazonaws.com/s.cdpn.io/693612/IaUrttj.png'); * * sprite.width = 512; * sprite.height = 512; * * // Adds a sprite as a child to this container. As a result, the sprite will be rendered whenever the container * // is rendered. * container.addChild(sprite); * * // Blurs whatever is rendered by the container * container.filters = [new BlurFilter()]; * * // Only the contents within a circle at the center should be rendered onto the screen. * container.mask = new Graphics() * .beginFill(0xffffff) * .drawCircle(sprite.width / 2, sprite.height / 2, Math.min(sprite.width, sprite.height) / 2) * .endFill(); * ``` * *
* *
* RenderGroup * * In PixiJS v8, containers can be set to operate in 'render group mode', * transforming them into entities akin to a stage in traditional rendering paradigms. * A render group is a root renderable entity, similar to a container, * but it's rendered in a separate pass with its own unique set of rendering instructions. * This approach enhances rendering efficiency and organization, particularly in complex scenes. * * You can enable render group mode on any container using container.enableRenderGroup() * or by initializing a new container with the render group property set to true (new Container({isRenderGroup: true})). * The method you choose depends on your specific use case and setup requirements. * * An important aspect of PixiJS’s rendering process is the automatic treatment of rendered scenes as render groups. * This conversion streamlines the rendering process, but understanding when and how this happens is crucial * to fully leverage its benefits. * * One of the key advantages of using render groups is the performance efficiency in moving them. Since transformations * are applied at the GPU level, moving a render group, even one with complex and numerous children, * doesn't require recalculating the rendering instructions or performing transformations on each child. * This makes operations like panning a large game world incredibly efficient. * * However, it's crucial to note that render groups do not batch together. * This means that turning every container into a render group could actually slow things down, * as each render group is processed separately. It's best to use render groups judiciously, at a broader level, * rather than on a per-child basis. * This approach ensures you get the performance benefits without overburdening the rendering process. * * RenderGroups maintain their own set of rendering instructions, * ensuring that changes or updates within a render group don't affect the rendering * instructions of its parent or other render groups. * This isolation ensures more stable and predictable rendering behavior. * * Additionally, renderGroups can be nested, allowing for powerful options in organizing different aspects of your scene. * This feature is particularly beneficial for separating complex game graphics from UI elements, * enabling intricate and efficient scene management in complex applications. * * This means that Containers have 3 levels of matrix to be mindful of: * * 1. localTransform, this is the transform of the container based on its own properties * 2. groupTransform, this it the transform of the container relative to the renderGroup it belongs too * 3. worldTransform, this is the transform of the container relative to the Scene being rendered *
* @memberof scene */ export declare class Container extends EventEmitter & AnyEvent> { /** * Mixes all enumerable properties and methods from a source object to Container. * @param source - The source of properties and methods to mix in. */ static mixin(source: Dict): void; /** unique id for this container */ readonly uid: number; /** @private */ _updateFlags: number; /** @private */ renderGroup: RenderGroup; /** @private */ parentRenderGroup: RenderGroup; /** @private */ parentRenderGroupIndex: number; /** @private */ didChange: boolean; /** @private */ didViewUpdate: boolean; /** @private */ relativeRenderGroupDepth: number; /** * The array of children of this container. * @readonly */ children: C[]; /** The display object container that contains this display object. */ parent: Container; /** @private */ includeInBuild: boolean; /** @private */ measurable: boolean; /** @private */ isSimple: boolean; /** * @internal * @ignore */ updateTick: number; /** * Current transform of the object based on local factors: position, scale, other stuff. * @readonly */ localTransform: Matrix; /** * The relative group transform is a transform relative to the render group it belongs too. It will include all parent * transforms and up to the render group (think of it as kind of like a stage - but the stage can be nested). * If this container is is self a render group matrix will be relative to its parent render group * @readonly */ relativeGroupTransform: Matrix; /** * The group transform is a transform relative to the render group it belongs too. * If this container is render group then this will be an identity matrix. other wise it * will be the same as the relativeGroupTransform. * Use this value when actually rendering things to the screen * @readonly */ groupTransform: Matrix; private _worldTransform; /** If the object has been destroyed via destroy(). If true, it should not be used. */ destroyed: boolean; /** * The coordinate of the object relative to the local coordinates of the parent. * @internal * @ignore */ _position: ObservablePoint; /** * The scale factor of the object. * @internal * @ignore */ _scale: ObservablePoint; /** * The pivot point of the container that it rotates around. * @internal * @ignore */ _pivot: ObservablePoint; /** * The skew amount, on the x and y axis. * @internal * @ignore */ _skew: ObservablePoint; /** * The X-coordinate value of the normalized local X axis, * the first column of the local transformation matrix without a scale. * @internal * @ignore */ _cx: number; /** * The Y-coordinate value of the normalized local X axis, * the first column of the local transformation matrix without a scale. * @internal * @ignore */ _sx: number; /** * The X-coordinate value of the normalized local Y axis, * the second column of the local transformation matrix without a scale. * @internal * @ignore */ _cy: number; /** * The Y-coordinate value of the normalized local Y axis, * the second column of the local transformation matrix without a scale. * @internal * @ignore */ _sy: number; /** * The rotation amount. * @internal * @ignore */ private _rotation; localColor: number; localAlpha: number; groupAlpha: number; groupColor: number; groupColorAlpha: number; /** * @internal * @ignore */ localBlendMode: BLEND_MODES; /** * @internal * @ignore */ groupBlendMode: BLEND_MODES; /** * This property holds three bits: culled, visible, renderable * the third bit represents culling (0 = culled, 1 = not culled) 0b100 * the second bit represents visibility (0 = not visible, 1 = visible) 0b010 * the first bit represents renderable (0 = not renderable, 1 = renderable) 0b001 * @internal * @ignore */ localDisplayStatus: number; /** * @internal * @ignore */ globalDisplayStatus: number; readonly renderPipeId: string; /** * An optional bounds area for this container. Setting this rectangle will stop the renderer * from recursively measuring the bounds of each children and instead use this single boundArea. * This is great for optimisation! If for example you have a 1000 spinning particles and you know they all sit * within a specific bounds, then setting it will mean the renderer will not need to measure the * 1000 children to find the bounds. Instead it will just use the bounds you set. */ boundsArea: Rectangle; /** * A value that increments each time the containe is modified * eg children added, removed etc * @ignore */ _didContainerChangeTick: number; /** * A value that increments each time the container view is modified * eg texture swap, geometry change etc * @ignore */ _didViewChangeTick: number; /** * We now use the _didContainerChangeTick and _didViewChangeTick to track changes * @deprecated since 8.2.6 * @ignore */ set _didChangeId(value: number); get _didChangeId(): number; /** * property that tracks if the container transform has changed * @ignore */ private _didLocalTransformChangeId; constructor(options?: ContainerOptions); /** * Adds one or more children to the container. * * Multiple items can be added like so: `myContainer.addChild(thingOne, thingTwo, thingThree)` * @param {...Container} children - The Container(s) to add to the container * @returns {Container} - The first child that was added. */ addChild(...children: U): U[0]; /** * Removes one or more children from the container. * @param {...Container} children - The Container(s) to remove * @returns {Container} The first child that was removed. */ removeChild(...children: U): U[0]; /** @ignore */ _onUpdate(point?: ObservablePoint): void; set isRenderGroup(value: boolean); /** * Returns true if this container is a render group. * This means that it will be rendered as a separate pass, with its own set of instructions */ get isRenderGroup(): boolean; /** * Calling this enables a render group for this container. * This means it will be rendered as a separate set of instructions. * The transform of the container will also be handled on the GPU rather than the CPU. */ enableRenderGroup(): void; /** This will disable the render group for this container. */ disableRenderGroup(): void; /** @ignore */ _updateIsSimple(): void; /** * Current transform of the object based on world (parent) factors. * @readonly */ get worldTransform(): Matrix; /** * The position of the container on the x axis relative to the local coordinates of the parent. * An alias to position.x */ get x(): number; set x(value: number); /** * The position of the container on the y axis relative to the local coordinates of the parent. * An alias to position.y */ get y(): number; set y(value: number); /** * The coordinate of the object relative to the local coordinates of the parent. * @since 4.0.0 */ get position(): ObservablePoint; set position(value: PointData); /** * The rotation of the object in radians. * 'rotation' and 'angle' have the same effect on a display object; rotation is in radians, angle is in degrees. */ get rotation(): number; set rotation(value: number); /** * The angle of the object in degrees. * 'rotation' and 'angle' have the same effect on a display object; rotation is in radians, angle is in degrees. */ get angle(): number; set angle(value: number); /** * The center of rotation, scaling, and skewing for this display object in its local space. The `position` * is the projection of `pivot` in the parent's local space. * * By default, the pivot is the origin (0, 0). * @since 4.0.0 */ get pivot(): ObservablePoint; set pivot(value: PointData | number); /** * The skew factor for the object in radians. * @since 4.0.0 */ get skew(): ObservablePoint; set skew(value: PointData); /** * The scale factors of this object along the local coordinate axes. * * The default scale is (1, 1). * @since 4.0.0 */ get scale(): ObservablePoint; set scale(value: PointData | number); /** * The width of the Container, setting this will actually modify the scale to achieve the value set. * @memberof scene.Container# */ get width(): number; set width(value: number); /** * The height of the Container, setting this will actually modify the scale to achieve the value set. * @memberof scene.Container# */ get height(): number; set height(value: number); /** * Retrieves the size of the container as a [Size]{@link Size} object. * This is faster than get the width and height separately. * @param out - Optional object to store the size in. * @returns - The size of the container. * @memberof scene.Container# */ getSize(out?: Size): Size; /** * Sets the size of the container to the specified width and height. * This is faster than setting the width and height separately. * @param value - This can be either a number or a [Size]{@link Size} object. * @param height - The height to set. Defaults to the value of `width` if not provided. * @memberof scene.Container# */ setSize(value: number | Optional, height?: number): void; /** Called when the skew or the rotation changes. */ private _updateSkew; /** * Updates the transform properties of the container (accepts partial values). * @param {object} opts - The options for updating the transform. * @param {number} opts.x - The x position of the container. * @param {number} opts.y - The y position of the container. * @param {number} opts.scaleX - The scale factor on the x-axis. * @param {number} opts.scaleY - The scale factor on the y-axis. * @param {number} opts.rotation - The rotation of the container, in radians. * @param {number} opts.skewX - The skew factor on the x-axis. * @param {number} opts.skewY - The skew factor on the y-axis. * @param {number} opts.pivotX - The x coordinate of the pivot point. * @param {number} opts.pivotY - The y coordinate of the pivot point. */ updateTransform(opts: Partial): this; /** * Updates the local transform using the given matrix. * @param matrix - The matrix to use for updating the transform. */ setFromMatrix(matrix: Matrix): void; /** Updates the local transform. */ updateLocalTransform(): void; set alpha(value: number); /** The opacity of the object. */ get alpha(): number; set tint(value: ColorSource); /** * The tint applied to the sprite. This is a hex value. * * A value of 0xFFFFFF will remove any tint effect. * @default 0xFFFFFF */ get tint(): number; set blendMode(value: BLEND_MODES); /** * The blend mode to be applied to the sprite. Apply a value of `'normal'` to reset the blend mode. * @default 'normal' */ get blendMode(): BLEND_MODES; /** The visibility of the object. If false the object will not be drawn, and the transform will not be updated. */ get visible(): boolean; set visible(value: boolean); /** @ignore */ get culled(): boolean; /** @ignore */ set culled(value: boolean); /** Can this object be rendered, if false the object will not be drawn but the transform will still be updated. */ get renderable(): boolean; set renderable(value: boolean); /** Whether or not the object should be rendered. */ get isRenderable(): boolean; /** * Removes all internal references and listeners as well as removes children from the display list. * Do not use a Container after calling `destroy`. * @param options - Options parameter. A boolean will act as if all options * have been set to that value * @param {boolean} [options.children=false] - if set to true, all the children will have their destroy * method called as well. 'options' will be passed on to those calls. * @param {boolean} [options.texture=false] - Only used for children with textures e.g. Sprites. If options.children * is set to true it should destroy the texture of the child sprite * @param {boolean} [options.textureSource=false] - Only used for children with textures e.g. Sprites. * If options.children is set to true it should destroy the texture source of the child sprite * @param {boolean} [options.context=false] - Only used for children with graphicsContexts e.g. Graphics. * If options.children is set to true it should destroy the context of the child graphics */ destroy(options?: DestroyOptions): void; } /** * The type of the pointer event to listen for. * Can be any of the following: * - `auto` * - `none` * - `visiblePainted` * - `visibleFill` * - `visibleStroke` * - `visible` * - `painted` * - `fill` * - `stroke` * - `all` * - `inherit` * @memberof accessibility * @see https://developer.mozilla.org/en-US/docs/Web/CSS/pointer-events */ export type PointerEvents = "auto" | "none" | "visiblePainted" | "visibleFill" | "visibleStroke" | "visible" | "painted" | "fill" | "stroke" | "all" | "inherit"; /** * When `accessible` is enabled on any display object, these properties will affect its accessibility. * @memberof accessibility */ export interface AccessibleOptions { /** * Flag for if the object is accessible. If true AccessibilityManager will overlay a * shadow div with attributes set * @default false */ accessible: boolean; /** * Sets the title attribute of the shadow div * If accessibleTitle AND accessibleHint has not been this will default to 'container [tabIndex]' * @member {string} */ accessibleTitle: string | null; /** Sets the aria-label attribute of the shadow div */ accessibleHint: string | null; /** * @default 0 */ tabIndex: number; /** * Specify the type of div the accessible layer is. Screen readers treat the element differently * depending on this type. Defaults to button. * @default 'button' */ accessibleType: string; /** * Specify the pointer-events the accessible div will use * Defaults to auto. * @default 'auto' * @type {accessibility.PointerEvents} */ accessiblePointerEvents: PointerEvents; /** * Setting to false will prevent any children inside this container to * be accessible. Defaults to true. * @default true */ accessibleChildren: boolean; } /** * The Accessibility object is attached to the {@link Container}. * @private */ export interface AccessibleTarget extends AccessibleOptions { _accessibleActive: boolean; _accessibleDiv: AccessibleHTMLElement | null; _renderId: number; } export interface AccessibleHTMLElement extends HTMLElement { type?: string; container?: Container; } /** * Default property values of accessible objects * used by {@link AccessibilitySystem}. * @private * @example * import { accessibleTarget } from 'pixi.js'; * * function MyObject() {} * Object.assign(MyObject.prototype, accessibleTarget); */ export declare const accessibilityTarget: AccessibleTarget; /** * `extensions` is a global object that holds all the extensions registered with PixiJS. * PixiJS uses a this extensions architecture a lot to make the library more modular and * flexible. * * For example, if you want to add load a new type of asset, you can register a new * {@link assets.LoaderParser} with the `extensions` object. * * ```js * import { extensions, ExtensionType } from 'pixi.js'; * * // create a custom asset loader * const customAssetLoader = { * extension: { * type: ExtensionType.LoadParser, * name: 'custom-asset-loader', * }, * test(url) { * // check if this new loader should be used... * }, * load(url) { * // load the asset... * }, * }; * * // add the custom asset loader to pixi * extensions.add(customAssetLoader); * ``` * * This would add the `customAssetLoader` to the list of available loaders that PixiJS can use. * * There are many different types of extensions, which are listed in {@link extensions.ExtensionType}. * @namespace extensions */ /** * Collection of valid extension types. * @memberof extensions */ export declare enum ExtensionType { /** extensions that are registered as Application plugins */ Application = "application", /** extensions that are registered as WebGL render pipes */ WebGLPipes = "webgl-pipes", /** extensions that are registered as WebGL render pipes adaptors */ WebGLPipesAdaptor = "webgl-pipes-adaptor", /** extensions that are registered as WebGL render systems */ WebGLSystem = "webgl-system", /** extensions that are registered as WebGPU render pipes */ WebGPUPipes = "webgpu-pipes", /** extensions that are registered as WebGPU render pipes adaptors */ WebGPUPipesAdaptor = "webgpu-pipes-adaptor", /** extensions that are registered as WebGPU render systems */ WebGPUSystem = "webgpu-system", /** extensions that are registered as Canvas render pipes */ CanvasSystem = "canvas-system", /** extensions that are registered as Canvas render pipes adaptors */ CanvasPipesAdaptor = "canvas-pipes-adaptor", /** extensions that are registered as Canvas render systems */ CanvasPipes = "canvas-pipes", /** extensions that combine the other Asset extensions */ Asset = "asset", /** extensions that are used to load assets through Assets */ LoadParser = "load-parser", /** extensions that are used to resolve asset urls through Assets */ ResolveParser = "resolve-parser", /** extensions that are used to handle how urls are cached by Assets */ CacheParser = "cache-parser", /** extensions that are used to add/remove available resources from Assets */ DetectionParser = "detection-parser", /** extensions that are registered with the MaskEffectManager */ MaskEffect = "mask-effect", /** A type of extension for creating a new advanced blend mode */ BlendMode = "blend-mode", /** A type of extension that will be used to auto detect a resource type */ TextureSource = "texture-source", /** A type of extension that will be used to auto detect an environment */ Environment = "environment", /** A type of extension for building and triangulating custom shapes used in graphics. */ ShapeBuilder = "shape-builder", /** A type of extension for creating custom batchers used in rendering. */ Batcher = "batcher" } /** * The metadata for an extension. * @memberof extensions * @ignore */ export interface ExtensionMetadataDetails { /** The extension type, can be multiple types */ type: ExtensionType | ExtensionType[]; /** Optional. Some plugins provide an API name/property, to make them more easily accessible */ name?: string; /** Optional, used for sorting the plugins in a particular order */ priority?: number; } /** * The metadata for an extension. * @memberof extensions */ export type ExtensionMetadata = ExtensionType | ExtensionMetadataDetails; /** * Format when registering an extension. Generally, the extension * should have these values as `extension` static property, * but you can override name or type by providing an object. * @memberof extensions */ interface ExtensionFormat { /** The extension type, can be multiple types */ type: ExtensionType | ExtensionType[]; /** Optional. Some plugins provide an API name/property, such as Renderer plugins */ name?: string; /** Optional, used for sorting the plugins in a particular order */ priority?: number; /** Reference to the plugin object/class */ ref: any; } /** * Extension format that is used internally for registrations. * @memberof extensions * @ignore */ interface StrictExtensionFormat extends ExtensionFormat { /** The extension type, always expressed as multiple, even if a single */ type: ExtensionType[]; } export type ExtensionHandler = (extension: StrictExtensionFormat) => void; /** * Get the priority for an extension. * @ignore * @param ext - Any extension * @param defaultPriority - Fallback priority if none is defined. * @returns The priority for the extension. * @memberof extensions */ export declare const normalizeExtensionPriority: (ext: ExtensionFormat | any, defaultPriority: number) => number; /** * Global registration of all PixiJS extensions. One-stop-shop for extensibility. * * Import the `extensions` object and use it to register new functionality via the described methods below. * ```js * import { extensions } from 'pixi.js'; * * // register a new extension * extensions.add(myExtension); * ``` * @property {Function} remove - Remove extensions from PixiJS. * @property {Function} add - Register new extensions with PixiJS. * @property {Function} handle - Internal method to handle extensions by name. * @property {Function} handleByMap - Handle a type, but using a map by `name` property. * @property {Function} handleByNamedList - Handle a type, but using a list of extensions with a `name` property. * @property {Function} handleByList - Handle a type, but using a list of extensions. * @memberof extensions */ export declare const extensions: { /** @ignore */ _addHandlers: Partial>; /** @ignore */ _removeHandlers: Partial>; /** @ignore */ _queue: Partial>; /** * Remove extensions from PixiJS. * @param extensions - Extensions to be removed. * @returns {extensions} For chaining. */ remove(...extensions: Array): any; /** * Register new extensions with PixiJS. * @param extensions - The spread of extensions to add to PixiJS. * @returns {extensions} For chaining. */ add(...extensions: Array): any; /** * Internal method to handle extensions by name. * @param type - The extension type. * @param onAdd - Function handler when extensions are added/registered {@link StrictExtensionFormat}. * @param onRemove - Function handler when extensions are removed/unregistered {@link StrictExtensionFormat}. * @returns {extensions} For chaining. */ handle(type: ExtensionType, onAdd: ExtensionHandler, onRemove: ExtensionHandler): any; /** * Handle a type, but using a map by `name` property. * @param type - Type of extension to handle. * @param map - The object map of named extensions. * @returns {extensions} For chaining. */ handleByMap(type: ExtensionType, map: Record): any; /** * Handle a type, but using a list of extensions with a `name` property. * @param type - Type of extension to handle. * @param map - The array of named extensions. * @param defaultPriority - Fallback priority if none is defined. * @returns {extensions} For chaining. */ handleByNamedList(type: ExtensionType, map: { name: string; value: any; }[], defaultPriority?: number): any; /** * Handle a type, but using a list of extensions. * @param type - Type of extension to handle. * @param list - The list of extensions. * @param defaultPriority - The default priority to use if none is specified. * @returns {extensions} For chaining. */ handleByList(type: ExtensionType, list: any[], defaultPriority?: number): any; }; /** * Common interface for CanvasRenderingContext2D, OffscreenCanvasRenderingContext2D, and other custom canvas 2D context. * @memberof environment */ export interface ICanvasRenderingContext2D extends CanvasState, CanvasTransform, CanvasCompositing, CanvasImageSmoothing, CanvasFillStrokeStyles, CanvasShadowStyles, CanvasFilters, CanvasRect, CanvasDrawPath, CanvasText, CanvasDrawImage, CanvasImageData, CanvasPathDrawingStyles, Omit, CanvasPath { /** creates a pattern using the specified image and repetition. */ createPattern(image: CanvasImageSource | ICanvas, repetition: string | null): CanvasPattern | null; /** provides different ways to draw an image onto the canvas */ drawImage(image: CanvasImageSource | ICanvas, dx: number, dy: number): void; drawImage(image: CanvasImageSource | ICanvas, dx: number, dy: number, dw: number, dh: number): void; drawImage(image: CanvasImageSource | ICanvas, sx: number, sy: number, sw: number, sh: number, dx: number, dy: number, dw: number, dh: number): void; /** sets the horizontal spacing behavior between text characters. */ letterSpacing?: string; /** sets the horizontal spacing behavior between text characters. */ textLetterSpacing?: string; } export type ContextIds = "2d" | "bitmaprenderer" | "webgl" | "experimental-webgl" | "webgl2" | "experimental-webgl2" | "webgpu"; type PredefinedColorSpace$1 = "srgb" | "display-p3"; type RenderingContext$1 = ICanvasRenderingContext2D | ImageBitmapRenderingContext | WebGLRenderingContext | WebGL2RenderingContext; export interface ICanvasRenderingContext2DSettings { alpha?: boolean; colorSpace?: PredefinedColorSpace$1; desynchronized?: boolean; willReadFrequently?: boolean; } export type ContextSettings = ICanvasRenderingContext2DSettings | ImageBitmapRenderingContextSettings | WebGLContextAttributes; export interface ICanvasParentNode { /** Adds a node to the end of the list of children of the parent node. */ appendChild(element: HTMLElement): void; /** Removes a child node from the parent node. */ removeChild(element: HTMLElement): void; removeChild(element: ICanvas): void; } export interface ICanvasStyle { width?: string; height?: string; cursor?: string; touchAction?: string; msTouchAction?: string; msContentZooming?: string; } export interface ICanvasRect { x: number; y: number; width: number; height: number; } export interface WebGLContextEventMap { "webglcontextlost": WebGLContextEvent; "webglcontextrestore": WebGLContextEvent; } /** * Common interface for HTMLCanvasElement, OffscreenCanvas, and other custom canvas classes. * @extends PixiMixins.ICanvas * @extends Partial * @memberof environment */ export interface ICanvas extends PixiMixins.ICanvas, Partial { /** Width of the canvas. */ width: number; /** Height of the canvas. */ height: number; /** * Get rendering context of the canvas. * @param {ContextIds} contextId - The identifier of the type of context to create. * @param {ContextSettings} options - The options for creating context. * @returns {RenderingContext | null} The created context, or null if contextId is not supported. */ getContext(contextId: "2d", options?: ICanvasRenderingContext2DSettings): ICanvasRenderingContext2D | null; getContext(contextId: "bitmaprenderer", options?: ImageBitmapRenderingContextSettings): ImageBitmapRenderingContext | null; getContext(contextId: "webgl" | "experimental-webgl", options?: WebGLContextAttributes): WebGLRenderingContext | null; getContext(contextId: "webgl2" | "experimental-webgl2", options?: WebGLContextAttributes): WebGL2RenderingContext | null; getContext(contextId: "webgpu"): GPUCanvasContext | null; getContext(contextId: ContextIds, options?: ContextSettings): RenderingContext$1 | null; /** * Get the content of the canvas as data URL. * @param {string} [type] - A string indicating the image format. The default type is `image/png`; * that type is also used if the given type isn't supported. * @param {string} [quality] - A number between 0 and 1 indicating the image quality to be used when * creating images using file formats that support lossy compression (such as `image/jpeg` or `image/webp`). * A user agent will use its default quality value if this option is not specified, or if the number * is outside the allowed range. * @returns {string} A string containing the requested data URL. */ toDataURL?(type?: string, quality?: number): string; /** * Creates a Blob from the content of the canvas. * @param {(blob: Blob | null) => void} callback - A callback function with the resulting `Blob` object * as a single argument. `null` may be passed if the image cannot be created for any reason. * @param {string} [type] - A string indicating the image format. The default type is `image/png`; * that type is also used if the given type isn't supported. * @param {string} [quality] - A number between 0 and 1 indicating the image quality to be used when * creating images using file formats that support lossy compression (such as `image/jpeg` or `image/webp`). * A user agent will use its default quality value if this option is not specified, or if the number * is outside the allowed range. * @returns {void} */ toBlob?(callback: (blob: Blob | null) => void, type?: string, quality?: number): void; /** * Get the content of the canvas as Blob. * @param {object} [options] - The options for creating Blob. * @param {string} [options.type] - A string indicating the image format. The default type is `image/png`; * that type is also used if the given type isn't supported. * @param {string} [options.quality] - A number between 0 and 1 indicating the image quality to be used when * creating images using file formats that support lossy compression (such as `image/jpeg` or `image/webp`). * A user agent will use its default quality value if this option is not specified, or if the number * is outside the allowed range. * @returns {Promise} A `Promise` returning a Blob object representing the image contained in the canvas. */ convertToBlob?(options?: { type?: string; quality?: number; }): Promise; /** * Adds the listener for the specified event. * @method * @param {string} type - The type of event to listen for. * @param {EventListenerOrEventListenerObject} listener - The callback to invoke when the event is fired. * @param {boolean | AddEventListenerOptions} options - The options for adding event listener. * @returns {void} */ addEventListener?: { (type: string, listener: EventListenerOrEventListenerObject, options?: boolean | AddEventListenerOptions): void; (type: K, listener: (this: ICanvas, ev: WebGLContextEventMap[K]) => any, options?: boolean | AddEventListenerOptions): void; }; /** * Removes the listener for the specified event. * @method * @param {string} type - The type of event to listen for. * @param {EventListenerOrEventListenerObject} listener - The callback to invoke when the event is fired. * @param {boolean | EventListenerOptions} options - The options for removing event listener. * @returns {void} */ removeEventListener?: { (type: string, listener: EventListenerOrEventListenerObject, options?: boolean | EventListenerOptions): void; (type: K, listener: (this: ICanvas, ev: WebGLContextEventMap[K]) => any, options?: boolean | EventListenerOptions): void; }; /** * Dispatches a event. * @param {Event} event - The Event object to dispatch. Its Event.target property will be set to the current EventTarget. * @returns {boolean} Returns false if event is cancelable, and at least one of the event handlers which received event * called Event.preventDefault(). Otherwise true. */ dispatchEvent(event: Event): boolean; /** Parent node of the canvas. */ readonly parentNode?: ICanvasParentNode | null; /** Style of the canvas. */ readonly style?: ICanvasStyle; /** * Get the position and the size of the canvas. * @returns The smallest rectangle which contains the entire canvas. */ getBoundingClientRect?(): ICanvasRect; } interface EarcutStatic { /** * Triangulate an outline. * * @param vertices A flat array of vertice coordinates like [x0,y0, x1,y1, x2,y2, ...]. * @param holes An array of hole indices if any (e.g. [5, 8] for a 12-vertice input would mean one hole with vertices 5–7 and another with 8–11). * @param dimensions The number of coordinates per vertice in the input array (2 by default). * @return A flat array with each group of three numbers indexing a triangle in the `vertices` array. * @example earcut([10,0, 0,50, 60,60, 70,10]); // returns [1,0,3, 3,2,1] * @example with a hole: earcut([0,0, 100,0, 100,100, 0,100, 20,20, 80,20, 80,80, 20,80], [4]); // [3,0,4, 5,4,0, 3,4,7, 5,0,1, 2,3,7, 6,5,1, 2,7,6, 6,1,2] * @example with 3d coords: earcut([10,0,1, 0,50,2, 60,60,3, 70,10,4], null, 3); // [1,0,3, 3,2,1] */ (vertices: ArrayLike, holes?: ArrayLike, dimensions?: number): number[]; /** * Transforms multi-dimensional array (e.g. GeoJSON Polygon) into the format expected by earcut. * @example Transforming GeoJSON data. * const data = earcut.flatten(geojson.geometry.coordinates); * const triangles = earcut(data.vertices, data.holes, data.dimensions); * @example Transforming simple triangle with hole: * const data = earcut.flatten([[[0, 0], [100, 0], [0, 100]], [[10, 10], [0, 10], [10, 0]]]); * const triangles = earcut(data.vertices, data.holes, data.dimensions); * @param data Arrays of rings, with the first being the outline and the rest holes. A ring is an array points, each point being an array of numbers. */ flatten(data: ArrayLike>>): { vertices: number[]; holes: number[]; dimensions: number; }; /** * Returns the relative difference between the total area of triangles and the area of the input polygon. 0 means the triangulation is fully correct. * @param vertices same as earcut * @param holes same as earcut * @param dimensions same as earcut * @param triangles see return value of earcut * @example * const triangles = earcut(vertices, holes, dimensions); * const deviation = earcut.deviation(vertices, holes, dimensions, triangles); */ deviation(vertices: ArrayLike, holes: ArrayLike | undefined, dimensions: number, triangles: ArrayLike): number; default: EarcutStatic; } declare const exports$1: EarcutStatic; /** * SystemRunner is used internally by the renderers as an efficient way for systems to * be notified about what the renderer is up to during the rendering phase. * * ``` * import { SystemRunner } from 'pixi.js'; * * const myObject = { * loaded: new SystemRunner('loaded') * } * * const listener = { * loaded: function(){ * // thin * } * } * * myObject.loaded.add(listener); * * myObject.loaded.emit(); * ``` * * Or for handling calling the same function on many items * ``` * import { SystemRunner } from 'pixi.js'; * * const myGame = { * update: new SystemRunner('update') * } * * const gameObject = { * update: function(time){ * // update my gamey state * } * } * * myGame.update.add(gameObject); * * myGame.update.emit(time); * ``` * @memberof rendering */ export declare class SystemRunner { items: any[]; private _name; /** * @param name - The function name that will be executed on the listeners added to this Runner. */ constructor(name: string); /** * Dispatch/Broadcast Runner to all listeners added to the queue. * @param {...any} params - (optional) parameters to pass to each listener */ emit(a0?: unknown, a1?: unknown, a2?: unknown, a3?: unknown, a4?: unknown, a5?: unknown, a6?: unknown, a7?: unknown): this; /** * Add a listener to the Runner * * Runners do not need to have scope or functions passed to them. * All that is required is to pass the listening object and ensure that it has contains a function that has the same name * as the name provided to the Runner when it was created. * * Eg A listener passed to this Runner will require a 'complete' function. * * ``` * import { Runner } from 'pixi.js'; * * const complete = new Runner('complete'); * ``` * * The scope used will be the object itself. * @param {any} item - The object that will be listening. */ add(item: unknown): this; /** * Remove a single listener from the dispatch queue. * @param {any} item - The listener that you would like to remove. */ remove(item: unknown): this; /** * Check to see if the listener is already in the Runner * @param {any} item - The listener that you would like to check. */ contains(item: unknown): boolean; /** Remove all listeners from the Runner */ removeAll(): this; /** Remove all references, don't use after this. */ destroy(): void; /** * `true` if there are no this Runner contains no listeners * @readonly */ get empty(): boolean; /** * The name of the runner. * @readonly */ get name(): string; } export declare enum CLEAR { NONE = 0, COLOR = 16384, STENCIL = 1024, DEPTH = 256, COLOR_DEPTH = 16640, COLOR_STENCIL = 17408, DEPTH_STENCIL = 1280, ALL = 17664 } /** Used for clearing render textures. true is the same as `ALL` false is the same as `NONE` */ export type CLEAR_OR_BOOL = CLEAR | boolean; /** * Options for the background system. * @property {ColorSource} [backgroundColor='black'] * The background color used to clear the canvas. See {@link ColorSource} for accepted color values. * @property {ColorSource} [background] - Alias for backgroundColor * @property {number} [backgroundAlpha=1] - * Transparency of the background color, value from `0` (fully transparent) to `1` (fully opaque). * @property {boolean} [clearBeforeRender=true] - Whether to clear the canvas before new render passes. * @memberof rendering */ export interface BackgroundSystemOptions { /** * The background color used to clear the canvas. See {@link ColorSource} for accepted color values. * @memberof rendering.SharedRendererOptions * @default 'black' */ backgroundColor: ColorSource; /** * Alias for backgroundColor * @memberof rendering.SharedRendererOptions */ background?: ColorSource; /** * Transparency of the background color, value from `0` (fully transparent) to `1` (fully opaque). * @memberof rendering.SharedRendererOptions * @default 1 */ backgroundAlpha: number; /** * Whether to clear the canvas before new render passes. * @memberof rendering.SharedRendererOptions * @default true */ clearBeforeRender: boolean; } /** * The background system manages the background color and alpha of the main view. * @memberof rendering */ export declare class BackgroundSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem, ExtensionType.CanvasSystem ]; readonly name: "background"; readonly priority: 0; }; /** default options used by the system */ static defaultOptions: BackgroundSystemOptions; /** * This sets if the CanvasRenderer will clear the canvas or not before the new render pass. * If the scene is NOT transparent PixiJS will use a canvas sized fillRect operation every * frame to set the canvas background color. If the scene is transparent PixiJS will use clearRect * to clear the canvas every frame. Disable this by setting this to false. For example, if * your game has a canvas filling background image you often don't need this set. */ clearBeforeRender: boolean; private readonly _backgroundColor; constructor(); /** * initiates the background system * @param options - the options for the background colors */ init(options: BackgroundSystemOptions): void; /** The background color to fill if not transparent */ get color(): Color; set color(value: ColorSource); /** The background color alpha. Setting this to 0 will make the canvas transparent. */ get alpha(): number; set alpha(value: number); /** The background color as an [R, G, B, A] array. */ get colorRgba(): RgbaArray; /** * destroys the background system * @internal * @ignore */ destroy(): void; } /** * A bind group is a collection of resources that are bound together for use by a shader. * They are essentially a wrapper for the WebGPU BindGroup class. But with the added bonus * that WebGL can also work with them. * @see https://gpuweb.github.io/gpuweb/#dictdef-gpubindgroupdescriptor * @example * // Create a bind group with a single texture and sampler * const bindGroup = new BindGroup({ * uTexture: texture.source, * uTexture: texture.style, * }); * * Bind groups resources must implement the {@link BindResource} interface. * The following resources are supported: * - {@link TextureSource} * - {@link TextureStyle} * - {@link Buffer} * - {@link BufferResource} * - {@link UniformGroup} * * The keys in the bind group must correspond to the names of the resources in the GPU program. * * This bind group class will also watch for changes in its resources ensuring that the changes * are reflected in the WebGPU BindGroup. * @memberof rendering */ export declare class BindGroup { /** The resources that are bound together for use by a shader. */ resources: Record; /** * a key used internally to match it up to a WebGPU Bindgroup * @internal * @ignore */ _key: string; private _dirty; /** * Create a new instance eof the Bind Group. * @param resources - The resources that are bound together for use by a shader. */ constructor(resources?: Record); /** * Updates the key if its flagged as dirty. This is used internally to * match this bind group to a WebGPU BindGroup. * @internal * @ignore */ _updateKey(): void; /** * Set a resource at a given index. this function will * ensure that listeners will be removed from the current resource * and added to the new resource. * @param resource - The resource to set. * @param index - The index to set the resource at. */ setResource(resource: BindResource, index: number): void; /** * Returns the resource at the current specified index. * @param index - The index of the resource to get. * @returns - The resource at the specified index. */ getResource(index: number): BindResource; /** * Used internally to 'touch' each resource, to ensure that the GC * knows that all resources in this bind group are still being used. * @param tick - The current tick. * @internal * @ignore */ _touch(tick: number): void; /** Destroys this bind group and removes all listeners. */ destroy(): void; protected onResourceChange(resource: BindResource): void; } /** * an interface that allows a resource to be bound to the gpu in a bind group * @memberof rendering */ export interface BindResource { /** * The type of resource this is * @ignore */ _resourceType: string; /** * Unique id for this resource this can change and is used to link the gpu * @ignore */ _resourceId: number; _touched: number; /** * a boolean that indicates if the resource has been destroyed. * If true, the resource should not be used and any bind groups * that will release any references to this resource. * @ignore */ destroyed: boolean; /** * event dispatch whenever the underlying resource needs to change * this could be a texture or buffer that has been resized. * This is important as it allows the renderer to know that it needs to rebind the resource */ on?(event: "change", listenerFunction: (resource: BindResource) => void, listener: BindGroup): void; /** @todo */ off?(event: "change", listenerFunction: (resource: BindResource) => void, listener: BindGroup): void; } /** * Specifies the alpha composition mode for textures. * * - `no-premultiply-alpha`: Does not premultiply alpha. * - `premultiply-alpha-on-upload`: Premultiplies alpha on texture upload. * - `premultiplied-alpha`: Assumes the texture is already in premultiplied alpha format. * @typedef {'no-premultiply-alpha' | 'premultiply-alpha-on-upload' | 'premultiplied-alpha'} ALPHA_MODES */ export type ALPHA_MODES = "no-premultiply-alpha" | "premultiply-alpha-on-upload" | "premultiplied-alpha"; /** * Constants for multi-sampling antialiasing. * @see Framebuffer#multisample * @name MSAA_QUALITY * @static * @enum {number} * @property {number} NONE - No multisampling for this renderTexture * @property {number} LOW - Try 2 samples * @property {number} MEDIUM - Try 4 samples * @property {number} HIGH - Try 8 samples */ export declare enum MSAA_QUALITY { NONE = 0, LOW = 2, MEDIUM = 4, HIGH = 8 } export type TEXTURE_FORMATS = "r8unorm" | "r8snorm" | "r8uint" | "r8sint" | "r16uint" | "r16sint" | "r16float" | "rg8unorm" | "rg8snorm" | "rg8uint" | "rg8sint" | "r32uint" | "r32sint" | "r32float" | "rg16uint" | "rg16sint" | "rg16float" | "rgba8unorm" | "rgba8unorm-srgb" | "rgba8snorm" | "rgba8uint" | "rgba8sint" | "bgra8unorm" | "bgra8unorm-srgb" | "rgb9e5ufloat" | "rgb10a2unorm" | "rg11b10ufloat" | "rg32uint" | "rg32sint" | "rg32float" | "rgba16uint" | "rgba16sint" | "rgba16float" | "rgba32uint" | "rgba32sint" | "rgba32float" | "stencil8" | "depth16unorm" | "depth24plus" | "depth24plus-stencil8" | "depth32float" | "depth32float-stencil8" | "bc1-rgba-unorm" | "bc1-rgba-unorm-srgb" | "bc2-rgba-unorm" | "bc2-rgba-unorm-srgb" | "bc3-rgba-unorm" | "bc3-rgba-unorm-srgb" | "bc4-r-unorm" | "bc4-r-snorm" | "bc5-rg-unorm" | "bc5-rg-snorm" | "bc6h-rgb-ufloat" | "bc6h-rgb-float" | "bc7-rgba-unorm" | "bc7-rgba-unorm-srgb" | "etc2-rgb8unorm" | "etc2-rgb8unorm-srgb" | "etc2-rgb8a1unorm" | "etc2-rgb8a1unorm-srgb" | "etc2-rgba8unorm" | "etc2-rgba8unorm-srgb" | "eac-r11unorm" | "eac-r11snorm" | "eac-rg11unorm" | "eac-rg11snorm" | "astc-4x4-unorm" | "astc-4x4-unorm-srgb" | "astc-5x4-unorm" | "astc-5x4-unorm-srgb" | "astc-5x5-unorm" | "astc-5x5-unorm-srgb" | "astc-6x5-unorm" | "astc-6x5-unorm-srgb" | "astc-6x6-unorm" | "astc-6x6-unorm-srgb" | "astc-8x5-unorm" | "astc-8x5-unorm-srgb" | "astc-8x6-unorm" | "astc-8x6-unorm-srgb" | "astc-8x8-unorm" | "astc-8x8-unorm-srgb" | "astc-10x5-unorm" | "astc-10x5-unorm-srgb" | "astc-10x6-unorm" | "astc-10x6-unorm-srgb" | "astc-10x8-unorm" | "astc-10x8-unorm-srgb" | "astc-10x10-unorm" | "astc-10x10-unorm-srgb" | "astc-12x10-unorm" | "astc-12x10-unorm-srgb" | "astc-12x12-unorm" | "astc-12x12-unorm-srgb"; export type TEXTURE_VIEW_DIMENSIONS = "1d" | "2d" | "2d-array" | "cube" | "cube-array" | "3d"; export type TEXTURE_DIMENSIONS = "1d" | "2d" | "3d"; export type WRAP_MODE = /** * The texture uvs are clamped * @default 33071 */ "clamp-to-edge" /** * The texture uvs tile and repeat * @default 10497 */ | "repeat" /** * The texture uvs tile and repeat with mirroring * @default 33648 */ | "mirror-repeat"; export declare enum DEPRECATED_WRAP_MODES { CLAMP = "clamp-to-edge", REPEAT = "repeat", MIRRORED_REPEAT = "mirror-repeat" } /** @deprecated since 8.0.0 */ export declare const WRAP_MODES: typeof DEPRECATED_WRAP_MODES; /** * The scale modes that are supported by pixi. * * The {@link settings.SCALE_MODE} scale mode affects the default scaling mode of future operations. * It can be re-assigned to either LINEAR or NEAREST, depending upon suitability. * @static */ export type SCALE_MODE = /** Pixelating scaling */ "nearest" /** Smooth scaling */ | "linear"; export declare enum DEPRECATED_SCALE_MODES { NEAREST = "nearest", LINEAR = "linear" } /** * @deprecated since 8.0.0 */ export declare const SCALE_MODES: typeof DEPRECATED_SCALE_MODES; export type COMPARE_FUNCTION = "never" | "less" | "equal" | "less-equal" | "greater" | "not-equal" | "greater-equal" | "always"; export interface TextureStyleOptions extends Partial { /** setting this will set wrapModeU,wrapModeV and wrapModeW all at once! */ addressMode?: WRAP_MODE; /** specifies the {{GPUAddressMode|address modes}} for the texture width, height, and depth coordinates, respectively. */ addressModeU?: WRAP_MODE; /** specifies the {{GPUAddressMode|address modes}} for the texture width, height, and depth coordinates, respectively. */ addressModeV?: WRAP_MODE; /** Specifies the {{GPUAddressMode|address modes}} for the texture width, height, and depth coordinates, respectively. */ addressModeW?: WRAP_MODE; /** setting this will set magFilter,minFilter and mipmapFilter all at once! */ scaleMode?: SCALE_MODE; /** specifies the sampling behavior when the sample footprint is smaller than or equal to one texel. */ magFilter?: SCALE_MODE; /** specifies the sampling behavior when the sample footprint is larger than one texel. */ minFilter?: SCALE_MODE; /** specifies behavior for sampling between mipmap levels. */ mipmapFilter?: SCALE_MODE; /** specifies the minimum and maximum levels of detail, respectively, used internally when sampling a texture. */ lodMinClamp?: number; /** Specifies the minimum and maximum levels of detail, respectively, used internally when sampling a texture. */ lodMaxClamp?: number; /** * When provided the sampler will be a comparison sampler with the specified * {@link GPUCompareFunction}. * Note: Comparison samplers may use filtering, but the sampling results will be * implementation-dependent and may differ from the normal filtering rules. */ compare?: COMPARE_FUNCTION; /** * Specifies the maximum anisotropy value clamp used by the sampler. * Note: Most implementations support {@link GPUSamplerDescriptor#maxAnisotropy} values in range * between 1 and 16, inclusive. The used value of {@link GPUSamplerDescriptor#maxAnisotropy} will * be clamped to the maximum value that the platform supports. * * setting this to anything higher than 1 will set scale modes to 'linear' */ maxAnisotropy?: number; } /** * A texture style describes how a texture should be sampled by a shader. * @memberof rendering */ export declare class TextureStyle extends EventEmitter<{ change: TextureStyle; destroy: TextureStyle; }> implements BindResource { _resourceType: string; _touched: number; private _sharedResourceId; /** default options for the style */ static readonly defaultOptions: TextureStyleOptions; /** */ addressModeU?: WRAP_MODE; /** */ addressModeV?: WRAP_MODE; /** Specifies the {{GPUAddressMode|address modes}} for the texture width, height, and depth coordinates, respectively. */ addressModeW?: WRAP_MODE; /** Specifies the sampling behavior when the sample footprint is smaller than or equal to one texel. */ magFilter?: SCALE_MODE; /** Specifies the sampling behavior when the sample footprint is larger than one texel. */ minFilter?: SCALE_MODE; /** Specifies behavior for sampling between mipmap levels. */ mipmapFilter?: SCALE_MODE; /** */ lodMinClamp?: number; /** Specifies the minimum and maximum levels of detail, respectively, used internally when sampling a texture. */ lodMaxClamp?: number; /** * When provided the sampler will be a comparison sampler with the specified * {@link GPUCompareFunction}. * Note: Comparison samplers may use filtering, but the sampling results will be * implementation-dependent and may differ from the normal filtering rules. */ compare?: COMPARE_FUNCTION; /** * Specifies the maximum anisotropy value clamp used by the sampler. * Note: Most implementations support {@link GPUSamplerDescriptor#maxAnisotropy} values in range * between 1 and 16, inclusive. The used value of {@link GPUSamplerDescriptor#maxAnisotropy} will * be clamped to the maximum value that the platform supports. * @internal * @ignore */ _maxAnisotropy?: number; /** * Has the style been destroyed? * @readonly */ destroyed: boolean; /** * @param options - options for the style */ constructor(options?: TextureStyleOptions); set addressMode(value: WRAP_MODE); /** setting this will set wrapModeU,wrapModeV and wrapModeW all at once! */ get addressMode(): WRAP_MODE; set wrapMode(value: WRAP_MODE); get wrapMode(): WRAP_MODE; set scaleMode(value: SCALE_MODE); /** setting this will set magFilter,minFilter and mipmapFilter all at once! */ get scaleMode(): SCALE_MODE; /** Specifies the maximum anisotropy value clamp used by the sampler. */ set maxAnisotropy(value: number); get maxAnisotropy(): number; get _resourceId(): number; update(): void; private _generateResourceId; /** Destroys the style */ destroy(): void; } export interface CanvasSourceOptions extends TextureSourceOptions { /** should the canvas be resized to preserve its screen width and height regardless of the resolution of the renderer */ autoDensity?: boolean; /** if true, this canvas will be set up to be transparent where possible */ transparent?: boolean; } export declare class CanvasSource extends TextureSource { static extension: ExtensionMetadata; uploadMethodId: string; autoDensity: boolean; transparent: boolean; private _context2D; constructor(options: CanvasSourceOptions); resizeCanvas(): void; resize(width?: number, height?: number, resolution?: number): boolean; static test(resource: any): resource is ICanvas; /** * Returns the 2D rendering context for the canvas. * Caches the context after creating it. * @returns The 2D rendering context of the canvas. */ get context2D(): CanvasRenderingContext2D; } export type ImageResource = ImageBitmap | HTMLCanvasElement | OffscreenCanvas | ICanvas | VideoFrame | HTMLImageElement | HTMLVideoElement; export declare class ImageSource extends TextureSource { static extension: ExtensionMetadata; uploadMethodId: string; constructor(options: TextureSourceOptions); static test(resource: any): resource is ImageResource; } export type TextureResourceOrOptions = ImageResource | TextureSourceOptions | BufferSourceOptions | CanvasSourceOptions; /** * @param options * @deprecated since v8.2.0 * @see TextureSource.from */ export declare function autoDetectSource(options?: TextureResourceOrOptions): TextureSource; export declare function resourceToTexture(options?: TextureResourceOrOptions, skipCache?: boolean): Texture; /** * Helper function that creates a returns Texture based on the source you provide. * The source should be loaded and ready to go. If not its best to grab the asset using Assets. * @param id - String or Source to create texture from * @param skipCache - Skip adding the texture to the cache * @returns The texture based on the Id provided */ export declare function textureFrom(id: TextureSourceLike, skipCache?: boolean): Texture; /** * options for creating a new TextureSource * @memberof rendering */ export interface TextureSourceOptions = any> extends TextureStyleOptions { /** * the resource that will be uploaded to the GPU. This is where we get our pixels from * eg an ImageBimt / Canvas / Video etc */ resource?: T; /** the pixel width of this texture source. This is the REAL pure number, not accounting resolution */ width?: number; /** the pixel height of this texture source. This is the REAL pure number, not accounting resolution */ height?: number; /** the resolution of the texture. */ resolution?: number; /** the format that the texture data has */ format?: TEXTURE_FORMATS; /** * Used by internal textures * @ignore */ sampleCount?: number; /** * Only really affects RenderTextures. * Should we use antialiasing for this texture. It will look better, but may impact performance as a * Blit operation will be required to resolve the texture. */ antialias?: boolean; /** how many dimensions does this texture have? currently v8 only supports 2d */ dimensions?: TEXTURE_DIMENSIONS; /** The number of mip levels to generate for this texture. this is overridden if autoGenerateMipmaps is true */ mipLevelCount?: number; /** * Should we auto generate mipmaps for this texture? This will automatically generate mipmaps * for this texture when uploading to the GPU. Mipmapped textures take up more memory, but * can look better when scaled down. * * For performance reasons, it is recommended to NOT use this with RenderTextures, as they are often updated every frame. * If you do, make sure to call `updateMipmaps` after you update the texture. */ autoGenerateMipmaps?: boolean; /** the alpha mode of the texture */ alphaMode?: ALPHA_MODES; /** optional label, can be used for debugging */ label?: string; /** If true, the Garbage Collector will unload this texture if it is not used after a period of time */ autoGarbageCollect?: boolean; } /** * A TextureSource stores the information that represents an image. * All textures have require TextureSource, which contains information about the source. * Therefore you can have many textures all using a single TextureSource (eg a sprite sheet) * * This is an class is extended depending on the source of the texture. * Eg if you are using an an image as your resource, then an ImageSource is used. * @memberof rendering * @typeParam T - The TextureSource's Resource type. */ export declare class TextureSource = any> extends EventEmitter<{ change: BindResource; update: TextureSource; unload: TextureSource; destroy: TextureSource; resize: TextureSource; styleChange: TextureSource; updateMipmaps: TextureSource; error: Error; }> implements BindResource { protected readonly options: TextureSourceOptions; /** The default options used when creating a new TextureSource. override these to add your own defaults */ static defaultOptions: TextureSourceOptions; /** unique id for this Texture source */ readonly uid: number; /** optional label, can be used for debugging */ label: string; /** * The resource type used by this TextureSource. This is used by the bind groups to determine * how to handle this resource. * @ignore * @internal */ readonly _resourceType = "textureSource"; /** * i unique resource id, used by the bind group systems. * This can change if the texture is resized or its resource changes */ _resourceId: number; /** * this is how the backends know how to upload this texture to the GPU * It changes depending on the resource type. Classes that extend TextureSource * should override this property. * @ignore * @internal */ uploadMethodId: string; _resolution: number; /** the pixel width of this texture source. This is the REAL pure number, not accounting resolution */ pixelWidth: number; /** the pixel height of this texture source. This is the REAL pure number, not accounting resolution */ pixelHeight: number; /** * the width of this texture source, accounting for resolution * eg pixelWidth 200, resolution 2, then width will be 100 */ width: number; /** * the height of this texture source, accounting for resolution * eg pixelHeight 200, resolution 2, then height will be 100 */ height: number; /** * the resource that will be uploaded to the GPU. This is where we get our pixels from * eg an ImageBimt / Canvas / Video etc */ resource: T; /** * The number of samples of a multisample texture. This is always 1 for non-multisample textures. * To enable multisample for a texture, set antialias to true * @internal * @ignore */ sampleCount: number; /** The number of mip levels to generate for this texture. this is overridden if autoGenerateMipmaps is true */ mipLevelCount: number; /** * Should we auto generate mipmaps for this texture? This will automatically generate mipmaps * for this texture when uploading to the GPU. Mipmapped textures take up more memory, but * can look better when scaled down. * * For performance reasons, it is recommended to NOT use this with RenderTextures, as they are often updated every frame. * If you do, make sure to call `updateMipmaps` after you update the texture. */ autoGenerateMipmaps: boolean; /** the format that the texture data has */ format: TEXTURE_FORMATS; /** how many dimensions does this texture have? currently v8 only supports 2d */ dimension: TEXTURE_DIMENSIONS; /** the alpha mode of the texture */ alphaMode: ALPHA_MODES; private _style; /** * Only really affects RenderTextures. * Should we use antialiasing for this texture. It will look better, but may impact performance as a * Blit operation will be required to resolve the texture. */ antialias: boolean; /** * Has the source been destroyed? * @readonly */ destroyed: boolean; /** * Used by automatic texture Garbage Collection, stores last GC tick when it was bound * @protected */ _touched: number; /** * Used by the batcher to build texture batches. faster to have the variable here! * @protected */ _batchTick: number; /** * A temporary batch location for the texture batching. Here for performance reasons only! * @protected */ _textureBindLocation: number; isPowerOfTwo: boolean; /** If true, the Garbage Collector will unload this texture if it is not used after a period of time */ autoGarbageCollect: boolean; /** * used internally to know where a texture came from. Usually assigned by the asset loader! * @ignore */ _sourceOrigin: string; /** * @param options - options for creating a new TextureSource */ constructor(options?: TextureSourceOptions); /** returns itself */ get source(): TextureSource; /** the style of the texture */ get style(): TextureStyle; set style(value: TextureStyle); /** setting this will set wrapModeU,wrapModeV and wrapModeW all at once! */ get addressMode(): WRAP_MODE; set addressMode(value: WRAP_MODE); /** setting this will set wrapModeU,wrapModeV and wrapModeW all at once! */ get repeatMode(): WRAP_MODE; set repeatMode(value: WRAP_MODE); /** Specifies the sampling behavior when the sample footprint is smaller than or equal to one texel. */ get magFilter(): SCALE_MODE; set magFilter(value: SCALE_MODE); /** Specifies the sampling behavior when the sample footprint is larger than one texel. */ get minFilter(): SCALE_MODE; set minFilter(value: SCALE_MODE); /** Specifies behavior for sampling between mipmap levels. */ get mipmapFilter(): SCALE_MODE; set mipmapFilter(value: SCALE_MODE); /** Specifies the minimum and maximum levels of detail, respectively, used internally when sampling a texture. */ get lodMinClamp(): number; set lodMinClamp(value: number); /** Specifies the minimum and maximum levels of detail, respectively, used internally when sampling a texture. */ get lodMaxClamp(): number; set lodMaxClamp(value: number); private _onStyleChange; /** call this if you have modified the texture outside of the constructor */ update(): void; /** Destroys this texture source */ destroy(): void; /** * This will unload the Texture source from the GPU. This will free up the GPU memory * As soon as it is required fore rendering, it will be re-uploaded. */ unload(): void; /** the width of the resource. This is the REAL pure number, not accounting resolution */ get resourceWidth(): number; /** the height of the resource. This is the REAL pure number, not accounting resolution */ get resourceHeight(): number; /** * the resolution of the texture. Changing this number, will not change the number of pixels in the actual texture * but will the size of the texture when rendered. * * changing the resolution of this texture to 2 for example will make it appear twice as small when rendered (as pixel * density will have increased) */ get resolution(): number; set resolution(resolution: number); /** * Resize the texture, this is handy if you want to use the texture as a render texture * @param width - the new width of the texture * @param height - the new height of the texture * @param resolution - the new resolution of the texture * @returns - if the texture was resized */ resize(width?: number, height?: number, resolution?: number): boolean; /** * Lets the renderer know that this texture has been updated and its mipmaps should be re-generated. * This is only important for RenderTexture instances, as standard Texture instances will have their * mipmaps generated on upload. You should call this method after you make any change to the texture * * The reason for this is is can be quite expensive to update mipmaps for a texture. So by default, * We want you, the developer to specify when this action should happen. * * Generally you don't want to have mipmaps generated on Render targets that are changed every frame, */ updateMipmaps(): void; set wrapMode(value: WRAP_MODE); get wrapMode(): WRAP_MODE; set scaleMode(value: SCALE_MODE); /** setting this will set magFilter,minFilter and mipmapFilter all at once! */ get scaleMode(): SCALE_MODE; /** * Refresh check for isPowerOfTwo texture based on size * @private */ protected _refreshPOT(): void; static test(_resource: any): any; /** * A helper function that creates a new TextureSource based on the resource you provide. * @param resource - The resource to create the texture source from. */ static from: (resource: TextureResourceOrOptions) => TextureSource; } /** * Buffer usage flags. they can be combined using the bitwise OR operator * eg : BufferUsage.VERTEX | BufferUsage.INDEX * @memberof rendering */ export declare enum BufferUsage { /** * The buffer can be mapped for reading. (Example: calling mapAsync() with GPUMapMode.READ) * May only be combined with COPY_DST. */ MAP_READ = 1, /** * The buffer can be mapped for writing. (Example: calling mapAsync() with GPUMapMode.WRITE) * May only be combined with COPY_SRC. */ MAP_WRITE = 2, /** * The buffer can be used as the source of a copy operation. * (Examples: as the source argument of a copyBufferToBuffer() or copyBufferToTexture() call.) */ COPY_SRC = 4, /** * The buffer can be used as the destination of a copy or write operation. * (Examples: as the destination argument of a copyBufferToBuffer() or * copyTextureToBuffer() call, or as the target of a writeBuffer() call.) */ COPY_DST = 8, /** The buffer can be used as an index buffer. (Example: passed to setIndexBuffer().) */ INDEX = 16, /** The buffer can be used as a vertex buffer. (Example: passed to setVertexBuffer().) */ VERTEX = 32, /** * The buffer can be used as a uniform buffer. * (Example: as a bind group entry for a GPUBufferBindingLayout with a buffer.type of "uniform".) */ UNIFORM = 64, /** * The buffer can be used as a storage buffer. * (Example: as a bind group entry for a GPUBufferBindingLayout with a buffer.type of "storage" or "read-only-storage".) */ STORAGE = 128, /** * The buffer can be used as to store indirect command arguments. * (Examples: as the indirectBuffer argument of a drawIndirect() or dispatchWorkgroupsIndirect() call.) */ INDIRECT = 256, /** * The buffer can be used to capture query results. * (Example: as the destination argument of a resolveQuerySet() call.) */ QUERY_RESOLVE = 512, /** the buffer will not be updated frequently */ STATIC = 1024 } /** All the various typed arrays that exist in js */ export type TypedArray = Int8Array | Uint8Array | Int16Array | Uint16Array | Int32Array | Uint32Array | Uint8ClampedArray | Float32Array | Float64Array; /** Options for creating a buffer */ export interface BufferOptions { /** * the data to initialize the buffer with, this can be a typed array, * or a regular number array. If it is a number array, it will be converted to a Float32Array */ data?: TypedArray | number[]; /** the size of the buffer in bytes, if not supplied, it will be inferred from the data */ size?: number; /** the usage of the buffer, see {@link rendering.BufferUsage} */ usage: number; /** a label for the buffer, this is useful for debugging */ label?: string; /** * should the GPU buffer be shrunk when the data becomes smaller? * changing this will cause the buffer to be destroyed and a new one created on the GPU * this can be expensive, especially if the buffer is already big enough! * setting this to false will prevent the buffer from being shrunk. This will yield better performance * if you are constantly setting data that is changing size often. * @default true */ shrinkToFit?: boolean; } export interface BufferDescriptor { label?: string; size: GPUSize64; usage: BufferUsage; mappedAtCreation?: boolean; } /** * A wrapper for a WebGPU/WebGL Buffer. * In PixiJS, the Buffer class is used to manage the data that is sent to the GPU rendering pipeline. * It abstracts away the underlying GPU buffer and provides an interface for uploading typed arrays or other data to the GPU, * They are used in the following places: *

* .1. {@link Geometry} as attribute data or index data for geometry *
* .2. {@link UniformGroup} as an underlying buffer for uniform data *
* .3. {@link BufferResource} as an underlying part of a buffer used directly by the GPU program *
* * It is important to note that you must provide a usage type when creating a buffer. This is because * the underlying GPU buffer needs to know how it will be used. For example, if you are creating a buffer * to hold vertex data, you would use `BufferUsage.VERTEX`. This will tell the GPU that this buffer will be * used as a vertex buffer. This is important because it will affect how you can use the buffer. * * Buffers are updated by calling the {@link Buffer.update} method. This immediately updates the buffer on the GPU. * Be mindful of calling this more often than you need to. It is recommended to update buffers only when needed. * * In WebGPU, a GPU buffer cannot resized. This limitation is abstracted away, but know that resizing a buffer means * creating a brand new one and destroying the old, so it is best to limit this if possible. * @example * * const buffer = new Buffer({ * data: new Float32Array([1, 2, 3, 4]), * usage: BufferUsage.VERTEX, * }); * @memberof rendering */ declare class Buffer$1 extends EventEmitter<{ change: BindResource; update: Buffer$1; destroy: Buffer$1; }> implements BindResource { /** * emits when the underlying buffer has changed shape (i.e. resized) * letting the renderer know that it needs to discard the old buffer on the GPU and create a new one * @event change */ /** * emits when the underlying buffer data has been updated. letting the renderer know * that it needs to update the buffer on the GPU * @event update */ /** * emits when the buffer is destroyed. letting the renderer know that it needs to destroy the buffer on the GPU * @event destroy */ /** a unique id for this uniform group used through the renderer */ readonly uid: number; /** * a resource type, used to identify how to handle it when its in a bind group / shader resource * @internal * @ignore */ readonly _resourceType = "buffer"; /** * the resource id used internally by the renderer to build bind group keys * @internal * @ignore */ _resourceId: number; /** * used internally to know if a uniform group was used in the last render pass * @internal * @ignore */ _touched: number; /** * a description of the buffer and how it should be set up on the GPU * @internal * @ignore */ readonly descriptor: BufferDescriptor; /** * @internal * @ignore */ _updateID: number; /** * @internal * @ignore */ _updateSize: number; private _data; /** * should the GPU buffer be shrunk when the data becomes smaller? * changing this will cause the buffer to be destroyed and a new one created on the GPU * this can be expensive, especially if the buffer is already big enough! * setting this to false will prevent the buffer from being shrunk. This will yield better performance * if you are constantly setting data that is changing size often. * @default true */ shrinkToFit: boolean; /** * Has the buffer been destroyed? * @readonly */ destroyed: boolean; /** * Creates a new Buffer with the given options * @param options - the options for the buffer */ constructor(options: BufferOptions); /** the data in the buffer */ get data(): TypedArray; set data(value: TypedArray); /** whether the buffer is static or not */ get static(): boolean; set static(value: boolean); /** * Sets the data in the buffer to the given value. This will immediately update the buffer on the GPU. * If you only want to update a subset of the buffer, you can pass in the size of the data. * @param value - the data to set * @param size - the size of the data in bytes * @param syncGPU - should the buffer be updated on the GPU immediately? */ setDataWithSize(value: TypedArray, size: number, syncGPU: boolean): void; /** * updates the buffer on the GPU to reflect the data in the buffer. * By default it will update the entire buffer. If you only want to update a subset of the buffer, * you can pass in the size of the buffer to update. * @param sizeInBytes - the new size of the buffer in bytes */ update(sizeInBytes?: number): void; /** Destroys the buffer */ destroy(): void; } export interface BufferSourceOptions extends TextureSourceOptions { width: number; height: number; } export declare class BufferImageSource extends TextureSource { static extension: ExtensionMetadata; uploadMethodId: string; constructor(options: BufferSourceOptions); static test(resource: any): resource is TypedArray | ArrayBuffer; } /** * Class controls uv mapping from Texture normal space to BaseTexture normal space. * * Takes `trim` and `rotate` into account. May contain clamp settings for Meshes and TilingSprite. * * Can be used in Texture `uvMatrix` field, or separately, you can use different clamp settings on the same texture. * If you want to add support for texture region of certain feature or filter, that's what you're looking for. * * Takes track of Texture changes through `_lastTextureID` private field. * Use `update()` method call to track it from outside. * @see Texture * @see Mesh * @see TilingSprite * @memberof rendering */ export declare class TextureMatrix { /** * Matrix operation that converts texture region coords to texture coords * @readonly */ mapCoord: Matrix; /** * Changes frame clamping * Works with TilingSprite and Mesh * Change to 1.5 if you texture has repeated right and bottom lines, that leads to smoother borders * @default 0 */ clampOffset: number; /** * Changes frame clamping * Works with TilingSprite and Mesh * Change to -0.5 to add a pixel to the edge, recommended for transparent trimmed textures in atlas * @default 0.5 */ clampMargin: number; /** * Clamp region for normalized coords, left-top pixel center in xy , bottom-right in zw. * Calculated based on clampOffset. */ readonly uClampFrame: Float32Array; /** Normalized clamp offset. Calculated based on clampOffset. */ readonly uClampOffset: Float32Array; /** * Tracks Texture frame changes. * @ignore */ _updateID: number; /** * Tracks Texture frame changes. * @protected */ protected _textureID: number; protected _texture: Texture; /** * If texture size is the same as baseTexture. * @default false * @readonly */ isSimple: boolean; /** * @param texture - observed texture * @param clampMargin - Changes frame clamping, 0.5 by default. Use -0.5 for extra border. */ constructor(texture: Texture, clampMargin?: number); /** Texture property. */ get texture(): Texture; set texture(value: Texture); /** * Multiplies uvs array to transform * @param uvs - mesh uvs * @param [out=uvs] - output * @returns - output */ multiplyUvs(uvs: Float32Array, out?: Float32Array): Float32Array; /** * Updates matrices if texture was changed * @returns - whether or not it was updated */ update(): boolean; } /** * Stores the width of the non-scalable borders, for example when used with {@link scene.NineSlicePlane} texture. * @memberof rendering */ export interface TextureBorders { /** left border in pixels */ left: number; /** top border in pixels */ top: number; /** right border in pixels */ right: number; /** bottom border in pixels */ bottom: number; } /** * The UVs data structure for a texture. * @memberof rendering */ export type UVs = { x0: number; y0: number; x1: number; y1: number; x2: number; y2: number; x3: number; y3: number; }; /** * The options that can be passed to a new Texture * @memberof rendering */ export interface TextureOptions { /** the underlying texture data that this texture will use */ source?: TextureSourceType; /** optional label, for debugging */ label?: string; /** The rectangle frame of the texture to show */ frame?: Rectangle; /** The area of original texture */ orig?: Rectangle; /** Trimmed rectangle of original texture */ trim?: Rectangle; /** Default anchor point used for sprite placement / rotation */ defaultAnchor?: { x: number; y: number; }; /** Default borders used for 9-slice scaling {@link NineSlicePlane}*/ defaultBorders?: TextureBorders; /** indicates how the texture was rotated by texture packer. See {@link groupD8} */ rotate?: number; /** set to true if you plan on modifying the uvs of this texture - can affect performance with high numbers of sprites*/ dynamic?: boolean; } export interface BindableTexture { source: TextureSource; } export type TextureSourceLike = TextureSource | TextureResourceOrOptions | string; /** * A texture stores the information that represents an image or part of an image. * * A texture must have a loaded resource passed to it to work. It does not contain any * loading mechanisms. * * The Assets class can be used to load an texture from a file. This is the recommended * way as it will handle the loading and caching for you. * * ```js * * const texture = await Assets.load('assets/image.png'); * * // once Assets has loaded the image it will be available via the from method * const sameTexture = Texture.from('assets/image.png'); * // another way to access the texture once loaded * const sameAgainTexture = Asset.get('assets/image.png'); * * const sprite1 = new Sprite(texture); * * ``` * * It cannot be added to the display list directly; instead use it as the texture for a Sprite. * If no frame is provided for a texture, then the whole image is used. * * You can directly create a texture from an image and then reuse it multiple times like this : * * ```js * import { Sprite, Texture } from 'pixi.js'; * * const texture = await Assets.load('assets/image.png'); * const sprite1 = new Sprite(texture); * const sprite2 = new Sprite(texture); * ``` * * If you didn't pass the texture frame to constructor, it enables `noFrame` mode: * it subscribes on baseTexture events, it automatically resizes at the same time as baseTexture. * @memberof rendering * @class */ export declare class Texture extends EventEmitter<{ update: Texture; destroy: Texture; }> implements BindableTexture { /** * Helper function that creates a returns Texture based on the source you provide. * The source should be loaded and ready to go. If not its best to grab the asset using Assets. * @param id - String or Source to create texture from * @param skipCache - Skip adding the texture to the cache * @returns The texture based on the Id provided */ static from: (id: TextureSourceLike, skipCache?: boolean) => Texture; /** label used for debugging */ label?: string; /** unique id for this texture */ readonly uid: number; /** * Has the texture been destroyed? * @readonly */ destroyed: boolean; _source: TextureSourceType; /** * Indicates whether the texture is rotated inside the atlas * set to 2 to compensate for texture packer rotation * set to 6 to compensate for spine packer rotation * can be used to rotate or mirror sprites * See {@link maths.groupD8} for explanation */ readonly rotate: number; /** A uvs object based on the given frame and the texture source */ readonly uvs: UVs; /** * Anchor point that is used as default if sprite is created with this texture. * Changing the `defaultAnchor` at a later point of time will not update Sprite's anchor point. * @default {0,0} */ readonly defaultAnchor?: { x: number; y: number; }; /** * Default width of the non-scalable border that is used if 9-slice plane is created with this texture. * @since 7.2.0 * @see scene.NineSliceSprite */ readonly defaultBorders?: TextureBorders; /** * This is the area of the BaseTexture image to actually copy to the Canvas / WebGL when rendering, * irrespective of the actual frame size or placement (which can be influenced by trimmed texture atlases) */ readonly frame: Rectangle; /** This is the area of original texture, before it was put in atlas. */ readonly orig: Rectangle; /** * This is the trimmed area of original texture, before it was put in atlas * Please call `updateUvs()` after you change coordinates of `trim` manually. */ readonly trim: Rectangle; /** * Does this Texture have any frame data assigned to it? * * This mode is enabled automatically if no frame was passed inside constructor. * * In this mode texture is subscribed to baseTexture events, and fires `update` on any change. * * Beware, after loading or resize of baseTexture event can fired two times! * If you want more control, subscribe on baseTexture itself. * @example * texture.on('update', () => {}); */ noFrame: boolean; /** * Set to true if you plan on modifying the uvs of this texture. * When this is the case, sprites and other objects using the texture will * make sure to listen for changes to the uvs and update their vertices accordingly. */ dynamic: boolean; private _textureMatrix; /** is it a texture? yes! used for type checking */ readonly isTexture = true; /** * @param {rendering.TextureOptions} options - Options for the texture */ constructor({ source, label, frame, orig, trim, defaultAnchor, defaultBorders, rotate, dynamic }?: TextureOptions); set source(value: TextureSourceType); /** the underlying source of the texture (equivalent of baseTexture in v7) */ get source(): TextureSourceType; /** returns a TextureMatrix instance for this texture. By default, that object is not created because its heavy. */ get textureMatrix(): TextureMatrix; /** The width of the Texture in pixels. */ get width(): number; /** The height of the Texture in pixels. */ get height(): number; /** Call this function when you have modified the frame of this texture. */ updateUvs(): void; /** * Destroys this texture * @param destroySource - Destroy the source when the texture is destroyed. */ destroy(destroySource?: boolean): void; /** call this if you have modified the `texture outside` of the constructor */ update(): void; /** @deprecated since 8.0.0 */ get baseTexture(): TextureSource; /** an Empty Texture used internally by the engine */ static EMPTY: Texture; /** a White texture used internally by the engine */ static WHITE: Texture; } /** * A render texture, extends `Texture`. * @see {@link rendering.Texture} * @memberof rendering */ export declare class RenderTexture extends Texture { static create(options: TextureSourceOptions): RenderTexture; /** * Resizes the render texture. * @param width - The new width of the render texture. * @param height - The new height of the render texture. * @param resolution - The new resolution of the render texture. * @returns This texture. */ resize(width: number, height: number, resolution?: number): this; } export type GenerateTextureSourceOptions = Omit; /** * Options for generating a texture from a container. * @memberof rendering */ export type GenerateTextureOptions = { /** The container to generate the texture from */ target: Container; /** * The region of the container, that shall be rendered, * if no region is specified, defaults to the local bounds of the container. */ frame?: Rectangle; /** The resolution of the texture being generated. */ resolution?: number; /** The color used to clear the texture. */ clearColor?: ColorSource; /** Whether to enable anti-aliasing. This may affect performance. */ antialias?: boolean; /** The options passed to the texture source. */ textureSourceOptions?: GenerateTextureSourceOptions; }; /** * System that manages the generation of textures from the renderer * * * Do not instantiate these plugins directly. It is available from the `renderer.textureGenerator` property. * @memberof rendering */ export declare class GenerateTextureSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem ]; readonly name: "textureGenerator"; }; private readonly _renderer; constructor(renderer: Renderer); /** * A Useful function that returns a texture of the display object that can then be used to create sprites * This can be quite useful if your container is complicated and needs to be reused multiple times. * @param {GenerateTextureOptions | Container} options - Generate texture options. * @param {Container} [options.container] - If not given, the renderer's resolution is used. * @param {Rectangle} options.region - The region of the container, that shall be rendered, * @param {number} [options.resolution] - The resolution of the texture being generated. * if no region is specified, defaults to the local bounds of the container. * @param {GenerateTextureSourceOptions} [options.textureSourceOptions] - Texture options for GPU. * @returns a shiny new texture of the container passed in */ generateTexture(options: GenerateTextureOptions | Container): RenderTexture; destroy(): void; } /** * An effect that can be applied to a container. This is used to create effects such as filters/masks etc. * @memberof rendering */ export interface Effect { pipe: string; priority: number; addBounds?(bounds: Bounds, skipUpdateTransform?: boolean): void; addLocalBounds?(bounds: Bounds, localRoot: Container): void; containsPoint?(point: PointData, hitTestFn: (container: Container, point: Point) => boolean): boolean; destroy(): void; } export interface EffectConstructor { new (options?: any): Effect; test?(options: any): boolean; } type TypedArray$1 = Float32Array | Uint32Array | Int32Array | Uint8Array; /** * Flexible wrapper around `ArrayBuffer` that also provides typed array views on demand. * @memberof utils */ export declare class ViewableBuffer { /** The size of the buffer in bytes. */ size: number; /** Underlying `ArrayBuffer` that holds all the data and is of capacity `this.size`. */ rawBinaryData: ArrayBuffer; /** View on the raw binary data as a `Uint32Array`. */ uint32View: Uint32Array; /** View on the raw binary data as a `Float32Array`. */ float32View: Float32Array; uint16View: Uint16Array; private _int8View; private _uint8View; private _int16View; private _int32View; private _float64Array; private _bigUint64Array; /** * @param length - The size of the buffer in bytes. */ constructor(length: number); /** * @param arrayBuffer - The source array buffer. */ constructor(arrayBuffer: ArrayBuffer); /** View on the raw binary data as a `Int8Array`. */ get int8View(): Int8Array; /** View on the raw binary data as a `Uint8Array`. */ get uint8View(): Uint8Array; /** View on the raw binary data as a `Int16Array`. */ get int16View(): Int16Array; /** View on the raw binary data as a `Int32Array`. */ get int32View(): Int32Array; /** View on the raw binary data as a `Float64Array`. */ get float64View(): Float64Array; /** View on the raw binary data as a `BigUint64Array`. */ get bigUint64View(): BigUint64Array; /** * Returns the view of the given type. * @param type - One of `int8`, `uint8`, `int16`, * `uint16`, `int32`, `uint32`, and `float32`. * @returns - typed array of given type */ view(type: string): TypedArray$1; /** Destroys all buffer references. Do not use after calling this. */ destroy(): void; /** * Returns the size of the given type in bytes. * @param type - One of `int8`, `uint8`, `int16`, * `uint16`, `int32`, `uint32`, and `float32`. * @returns - size of the type in bytes */ static sizeOf(type: string): number; } /** * Used by the batcher to build texture batches. Holds list of textures and their respective locations. * @memberof rendering */ export declare class BatchTextureArray { /** Inside textures array. */ textures: TextureSource[]; /** Respective locations for textures. */ ids: Record; /** Number of filled elements. */ count: number; constructor(); /** Clear the textures and their locations. */ clear(): void; } /** * The different topology types supported by the renderer used to describe how the geometry should be renderer * @memberof rendering */ export type Topology = "point-list" | "line-list" | "line-strip" | "triangle-list" | "triangle-strip"; /** @deprecated since 8.0.0 */ export declare const DRAW_MODES: { POINTS: string; LINES: string; LINE_STRIP: string; TRIANGLES: string; TRIANGLE_STRIP: string; }; /** * The different types of vertex formats supported by the renderer * @memberof rendering */ export type VertexFormat = "uint8x2" | "uint8x4" | "sint8x2" | "sint8x4" | "unorm8x2" | "unorm8x4" | "snorm8x2" | "snorm8x4" | "uint16x2" | "uint16x4" | "sint16x2" | "sint16x4" | "unorm16x2" | "unorm16x4" | "snorm16x2" | "snorm16x4" | "float16x2" | "float16x4" | "float32" | "float32x2" | "float32x3" | "float32x4" | "uint32" | "uint32x2" | "uint32x3" | "uint32x4" | "sint32" | "sint32x2" | "sint32x3" | "sint32x4"; export type IndexBufferArray = Uint16Array | Uint32Array; /** * The attribute data for a geometries attributes * @memberof rendering */ export interface Attribute { /** the buffer that this attributes data belongs to */ buffer: Buffer$1; /** the format of the attribute */ format?: VertexFormat; /** the stride of the data in the buffer*/ stride?: number; /** the offset of the attribute from the buffer, defaults to 0 */ offset?: number; /** is this an instanced buffer? (defaults to false) */ instance?: boolean; /** the number of elements to be rendered. If not specified, all vertices after the starting vertex will be drawn. */ size?: number; /** the type of attribute */ type?: number; /** * the starting vertex in the geometry to start drawing from. If not specified, * drawing will start from the first vertex. */ start?: number; /** * attribute divisor for instanced rendering. Note: this is a **WebGL-only** feature, the WebGPU renderer will * issue a warning if one of the attributes has divisor set. */ divisor?: number; } type AttributeOption = Omit & { buffer: Buffer$1 | TypedArray | number[]; } | Buffer$1 | TypedArray | number[]; export type AttributeOptions = Record; /** * the interface that describes the structure of the geometry * @memberof rendering */ export interface GeometryDescriptor { /** an optional label to easily identify the geometry */ label?: string; /** the attributes that make up the geometry */ attributes: AttributeOptions; /** optional index buffer for this geometry */ indexBuffer?: Buffer$1 | TypedArray | number[]; /** the topology of the geometry, defaults to 'triangle-list' */ topology?: Topology; instanceCount?: number; } /** * A Geometry is a low-level object that represents the structure of 2D shapes in terms of vertices and attributes. * It's a crucial component for rendering as it describes the shape and format of the data that will go through the shaders. * Essentially, a Geometry object holds the data you'd send to a GPU buffer. * * A geometry is basically made of two components: *
* Attributes: These are essentially arrays that define properties of the vertices like position, color, * texture coordinates, etc. They map directly to attributes in your vertex shaders. *
* Indices: An optional array that describes how the vertices are connected. * If not provided, vertices will be interpreted in the sequence they're given. * @example * * const geometry = new Geometry({ * attributes: { * aPosition: [ // add some positions * 0, 0, * 0, 100, * 100, 100, * 100, 0, * ], * aUv: [ // add some uvs * 0, 0, * 0, 1, * 1, 1, * 1, 0, * ] * } * }); * @memberof rendering * @class */ export declare class Geometry extends EventEmitter<{ update: Geometry; destroy: Geometry; }> { /** The topology of the geometry. */ topology: Topology; /** The unique id of the geometry. */ readonly uid: number; /** A record of the attributes of the geometry. */ readonly attributes: Record; /** The buffers that the attributes use */ readonly buffers: Buffer$1[]; /** The index buffer of the geometry */ readonly indexBuffer: Buffer$1; /** * the layout key will be generated by WebGPU all geometries that have the same structure * will have the same layout key. This is used to cache the pipeline layout * @internal * @ignore */ _layoutKey: number; /** the instance count of the geometry to draw */ instanceCount: number; private readonly _bounds; private _boundsDirty; /** * Create a new instance of a geometry * @param options - The options for the geometry. */ constructor(options: GeometryDescriptor); protected onBufferUpdate(): void; /** * Returns the requested attribute. * @param id - The name of the attribute required * @returns - The attribute requested. */ getAttribute(id: string): Attribute; /** * Returns the index buffer * @returns - The index buffer. */ getIndex(): Buffer$1; /** * Returns the requested buffer. * @param id - The name of the buffer required. * @returns - The buffer requested. */ getBuffer(id: string): Buffer$1; /** * Used to figure out how many vertices there are in this geometry * @returns the number of vertices in the geometry */ getSize(): number; /** Returns the bounds of the geometry. */ get bounds(): Bounds; /** * destroys the geometry. * @param destroyBuffers - destroy the buffers associated with this geometry */ destroy(destroyBuffers?: boolean): void; } export interface ExtractedAttributeData extends Omit { /** set where the shader location is for this attribute */ location?: number; } /** * returns the attribute data from the program * @private * @param {WebGLProgram} [program] - the WebGL program * @param {WebGLRenderingContext} [gl] - the WebGL context * @returns {object} the attribute data for this program */ export declare function extractAttributesFromGlProgram(program: WebGLProgram, gl: WebGLRenderingContextBase, sortAttributes?: boolean): Record; export interface GlAttributeData { type: string; size: number; location: number; name: string; } export interface GlUniformData { name: string; index: number; type: string; size: number; isArray: boolean; value: any; } export interface GlUniformBlockData { index: number; name: string; size: number; value?: TypedArray; } /** * The options for the gl program * @memberof rendering */ export interface GlProgramOptions { /** The fragment glsl shader source. */ fragment: string; /** The vertex glsl shader source. */ vertex: string; /** the name of the program, defaults to 'pixi-program' */ name?: string; /** the preferred vertex precision for the shader, this may not be used if the device does not support it */ preferredVertexPrecision?: string; /** the preferred fragment precision for the shader, this may not be used if the device does not support it */ preferredFragmentPrecision?: string; } /** * A wrapper for a WebGL Program. You can create one and then pass it to a shader. * This will manage the WebGL program that is compiled and uploaded to the GPU. * * To get the most out of this class, you should be familiar with glsl shaders and how they work. * @see https://developer.mozilla.org/en-US/docs/Web/API/WebGLProgram * @example * * // Create a new program * const program = new GlProgram({ * vertex: '...', * fragment: '...', * }); * * * There are a few key things that pixi shader will do for you automatically: *
* - If no precision is provided in the shader, it will be injected into the program source for you. * This precision will be taken form the options provided, if none is provided, * then the program will default to the defaultOptions. *
* - It will inject the program name into the shader source if none is provided. *
* - It will set the program version to 300 es. * * For optimal usage and best performance, its best to reuse programs as much as possible. * You should use the {@link GlProgram.from} helper function to create programs. * @class * @memberof rendering */ export declare class GlProgram { /** The default options used by the program. */ static defaultOptions: Partial; /** the fragment glsl shader source. */ readonly fragment?: string; /** the vertex glsl shader source */ readonly vertex?: string; /** * attribute data extracted from the program once created this happens when the program is used for the first time * @internal * @ignore */ _attributeData: Record; /** * uniform data extracted from the program once created this happens when the program is used for the first time * @internal * @ignore */ _uniformData: Record; /** * uniform data extracted from the program once created this happens when the program is used for the first time * @internal * @ignore */ _uniformBlockData: Record; /** details on how to use this program with transform feedback */ transformFeedbackVaryings?: { names: string[]; bufferMode: "separate" | "interleaved"; }; /** * the key that identifies the program via its source vertex + fragment * @internal * @ignore */ readonly _key: number; /** * Creates a shiny new GlProgram. Used by WebGL renderer. * @param options - The options for the program. */ constructor(options: GlProgramOptions); /** destroys the program */ destroy(): void; /** * Helper function that creates a program for a given source. * It will check the program cache if the program has already been created. * If it has that one will be returned, if not a new one will be created and cached. * @param options - The options for the program. * @returns A program using the same source */ static from(options: GlProgramOptions): GlProgram; } export interface StructsAndGroups { groups: { group: number; binding: number; name: string; isUniform: boolean; type: string; }[]; structs: { name: string; members: Record; }[]; } export declare function extractStructAndGroups(wgsl: string): StructsAndGroups; /** * a WebGPU descriptions of how the program is laid out * @see https://gpuweb.github.io/gpuweb/#gpupipelinelayout * @memberof rendering */ export type ProgramPipelineLayoutDescription = GPUBindGroupLayoutEntry[][]; /** * a map the maps names of uniforms to group indexes * @memberof rendering */ export type ProgramLayout = Record[]; /** * the program source * @memberof rendering */ export interface ProgramSource { /** The wgsl source code of the shader. */ source: string; /** The main function to run in this shader */ entryPoint?: string; } /** * The options for the gpu program * @memberof rendering */ export interface GpuProgramOptions { /** * the name of the program, this is added to the label of the GPU Program created * under the hood. Makes it much easier to debug! */ name?: string; /** The fragment glsl shader source. */ fragment?: ProgramSource; /** The vertex glsl shader source. */ vertex?: ProgramSource; /** The layout of the program. If not provided, it will be generated from the shader sources. */ layout?: ProgramLayout; /** The gpu layout of the program. If not provided, it will be generated from the shader sources. */ gpuLayout?: ProgramPipelineLayoutDescription; } /** * A wrapper for a WebGPU Program, specifically designed for the WebGPU renderer. * This class facilitates the creation and management of shader code that integrates with the WebGPU pipeline. * * To leverage the full capabilities of this class, familiarity with WGSL shaders is recommended. * @see https://gpuweb.github.io/gpuweb/#index * @example * * // Create a new program * const program = new GpuProgram({ * vertex: { * source: '...', * entryPoint: 'main', * }, * fragment:{ * source: '...', * entryPoint: 'main', * }, * }); * * * Note: Both fragment and vertex shader sources can coexist within a single WGSL source file * this can make things a bit simpler. * * For optimal usage and best performance, it help to reuse programs whenever possible. * The {@link GpuProgram.from} helper function is designed for this purpose, utilizing an * internal cache to efficiently manage and retrieve program instances. * By leveraging this function, you can significantly reduce overhead and enhance the performance of your rendering pipeline. * * An important distinction between WebGL and WebGPU regarding program data retrieval: * While WebGL allows extraction of program information directly from its compiled state, * WebGPU does not offer such a capability. Therefore, in the context of WebGPU, we're required * to manually extract the program layout information from the source code itself. * @memberof rendering */ export declare class GpuProgram { /** The fragment glsl shader source. */ readonly fragment?: ProgramSource; /** The vertex glsl shader source */ readonly vertex?: ProgramSource; /** * Mapping of uniform names to group indexes for organizing shader program uniforms. * Automatically generated from shader sources if not provided. * @example * // Assuming a shader with two uniforms, `u_time` and `u_resolution`, grouped respectively: * [ * { "u_time": 0 }, * { "u_resolution": 1 } * ] */ readonly layout: ProgramLayout; /** * Configuration for the WebGPU bind group layouts, detailing resource organization for the shader. * Generated from shader sources if not explicitly provided. * @example * // Assuming a shader program that requires two bind groups: * [ * // First bind group layout entries * [{ binding: 0, visibility: GPUShaderStage.VERTEX, type: "uniform-buffer" }], * // Second bind group layout entries * [{ binding: 1, visibility: GPUShaderStage.FRAGMENT, type: "sampler" }, * { binding: 2, visibility: GPUShaderStage.FRAGMENT, type: "sampled-texture" }] * ] */ readonly gpuLayout: ProgramPipelineLayoutDescription; /** * @internal * @ignore */ _layoutKey: number; /** * @internal * @ignore */ _attributeLocationsKey: number; /** the structs and groups extracted from the shader sources */ readonly structsAndGroups: StructsAndGroups; /** * the name of the program, this is added to the label of the GPU Program created under the hood. * Makes it much easier to debug! */ readonly name: string; private _attributeData; /** if true, the program will automatically assign global uniforms to group[0] */ autoAssignGlobalUniforms: boolean; /** if true, the program will automatically assign local uniforms to group[1] */ autoAssignLocalUniforms: boolean; /** * Create a new GpuProgram * @param options - The options for the gpu program */ constructor(options: GpuProgramOptions); private _generateProgramKey; get attributeData(): Record; /** destroys the program */ destroy(): void; /** * Helper function that creates a program for a given source. * It will check the program cache if the program has already been created. * If it has that one will be returned, if not a new one will be created and cached. * @param options - The options for the program. * @returns A program using the same source */ static from(options: GpuProgramOptions): GpuProgram; } /** * A record of {@link BindGroup}'s used by the shader. * * `Record` * @memberof rendering */ export type ShaderGroups = Record; interface ShaderBase { /** The WebGL program used by the WebGL renderer. */ glProgram?: GlProgram; /** The WebGPU program used by the WebGPU renderer. */ gpuProgram?: GpuProgram; /** * A number that uses two bits on whether the shader is compatible with the WebGL renderer and/or the WebGPU renderer. * 0b00 - not compatible with either * 0b01 - compatible with WebGL * 0b10 - compatible with WebGPU * This is automatically set based on if a {@link GlProgram} or {@link GpuProgram} is provided. */ compatibleRenderers?: number; } export interface GlShaderWith extends ShaderBase { /** The WebGL program used by the WebGL renderer. */ glProgram: GlProgram; } export interface GpuShaderWith extends ShaderBase { /** The WebGPU program used by the WebGPU renderer. */ gpuProgram: GpuProgram; } export interface ShaderWithGroupsDescriptor { /** A record of {@link BindGroup}'s used by the shader. */ groups: ShaderGroups; /** an optional map of how to bind the groups. This is automatically generated by reading the WebGPU program */ groupMap?: Record>; } interface ShaderWithResourcesDescriptor { /** * A key value of uniform resources used by the shader. * Under the hood pixi will look at the provided shaders and figure out where * the resources are mapped. Its up to you to make sure the resource key * matches the uniform name in the webGPU program. WebGL is a little more forgiving! */ resources?: Record; } /** * A descriptor for a shader * @memberof rendering */ export type ShaderWith = GlShaderWith | GpuShaderWith; /** * A descriptor for a shader with groups. * @memberof rendering */ export type ShaderWithGroups = ShaderWithGroupsDescriptor & ShaderWith; export interface IShaderWithGroups extends ShaderWithGroupsDescriptor, ShaderBase { } /** * A descriptor for a shader with resources. This is an easier way to work with uniforms. * especially when you are not working with bind groups * @memberof rendering */ export type ShaderWithResources = ShaderWithResourcesDescriptor & ShaderWith; export interface IShaderWithResources extends ShaderWithResourcesDescriptor, ShaderBase { } export type ShaderDescriptor = ShaderWithGroups & ShaderWithResources; type GlShaderFromWith = { gpu?: GpuProgramOptions; gl: GlProgramOptions; }; type GpuShaderFromWith = { gpu: GpuProgramOptions; gl?: GlProgramOptions; }; export type ShaderFromGroups = (GlShaderFromWith | GpuShaderFromWith) & Omit; export type ShaderFromResources = (GlShaderFromWith | GpuShaderFromWith) & Omit; /** * The Shader class is an integral part of the PixiJS graphics pipeline. * Central to rendering in PixiJS are two key elements: A [shader] and a [geometry]. * The shader incorporates a {@link GlProgram} for WebGL or a {@link GpuProgram} for WebGPU, * instructing the respective technology on how to render the geometry. * * The primary goal of the Shader class is to offer a unified interface compatible with both WebGL and WebGPU. * When constructing a shader, you need to provide both a WebGL program and a WebGPU program due to the distinctions * between the two rendering engines. If only one is provided, the shader won't function with the omitted renderer. * * Both WebGL and WebGPU utilize the same resource object when passed into the shader. * Post-creation, the shader's interface remains consistent across both WebGL and WebGPU. * The sole distinction lies in whether a glProgram or a gpuProgram is employed. * * Modifying shader uniforms, which can encompass: * - TextureSampler {@link TextureStyle} * - TextureSource {@link TextureSource} * - UniformsGroups {@link UniformGroup} * @example * * const shader = new Shader({ * glProgram: glProgram, * gpuProgram: gpuProgram, * resources: { * uTexture: texture.source, * uSampler: texture.sampler, * uColor: [1, 0, 0, 1], * }, * }); * * // update the uniforms * shader.resources.uColor[1] = 1; * shader.resources.uTexture = texture2.source; * @class * @memberof rendering */ export declare class Shader extends EventEmitter<{ "destroy": Shader; }> { /** An instance of the GPU program used by the WebGPU renderer */ gpuProgram: GpuProgram; /** An instance of the GL program used by the WebGL renderer */ glProgram: GlProgram; /** * A number that uses two bits on whether the shader is compatible with the WebGL renderer and/or the WebGPU renderer. * 0b00 - not compatible with either * 0b01 - compatible with WebGL * 0b10 - compatible with WebGPU * This is automatically set based on if a {@link GlProgram} or {@link GpuProgram} is provided. */ readonly compatibleRenderers: number; /** */ groups: Record; /** A record of the resources used by the shader. */ resources: Record; /** * A record of the uniform groups and resources used by the shader. * This is used by WebGL renderer to sync uniform data. * @internal * @ignore */ _uniformBindMap: Record>; private readonly _ownedBindGroups; /** * Fired after rendering finishes. * @event rendering.Shader#destroy */ /** * There are two ways to create a shader. * one is to pass in resources which is a record of uniform groups and resources. * another is to pass in groups which is a record of {@link BindGroup}s. * this second method is really to make use of shared {@link BindGroup}s. * For most cases you will want to use resources as they are easier to work with. * USe Groups if you want to share {@link BindGroup}s between shaders. * you cannot mix and match - either use resources or groups. * @param {ShaderWithResourcesDescriptor} options - The options for the shader using ShaderWithResourcesDescriptor. */ constructor(options: ShaderWithResources); constructor(options: ShaderWithGroups); /** * Sometimes a resource group will be provided later (for example global uniforms) * In such cases, this method can be used to let the shader know about the group. * @param name - the name of the resource group * @param groupIndex - the index of the group (should match the webGPU shader group location) * @param bindIndex - the index of the bind point (should match the webGPU shader bind point) */ addResource(name: string, groupIndex: number, bindIndex: number): void; private _buildResourceAccessor; /** * Use to destroy the shader when its not longer needed. * It will destroy the resources and remove listeners. * @param destroyPrograms - if the programs should be destroyed as well. * Make sure its not being used by other shaders! */ destroy(destroyPrograms?: boolean): void; /** * A short hand function to create a shader based of a vertex and fragment shader. * @param options * @returns A shiny new PixiJS shader! */ static from(options: ShaderFromGroups): Shader; static from(options: ShaderFromResources): Shader; } export type BatchAction = "startBatch" | "renderBatch"; /** * A batch pool is used to store batches when they are not currently in use. * @memberof rendering */ export declare class Batch implements Instruction { renderPipeId: string; action: BatchAction; start: number; size: number; textures: BatchTextureArray; blendMode: BLEND_MODES; canBundle: boolean; /** * breaking rules slightly here in the name of performance.. * storing references to these bindgroups here is just faster for access! * keeps a reference to the GPU bind group to set when rendering this batch for WebGPU. Will be null is using WebGL. */ gpuBindGroup: GPUBindGroup; /** * breaking rules slightly here in the name of performance.. * storing references to these bindgroups here is just faster for access! * keeps a reference to the bind group to set when rendering this batch for WebGPU. Will be null if using WebGl. */ bindGroup: BindGroup; batcher: Batcher; destroy(): void; } /** * Represents an element that can be batched for rendering. * @interface * @memberof rendering */ export interface BatchableElement { /** * The name of the batcher to use. Must be registered. * @type {string} */ batcherName: string; /** * The texture to be used for rendering. * @type {Texture} */ texture: Texture; /** * The blend mode to be applied. * @type {BLEND_MODES} */ blendMode: BLEND_MODES; /** * The size of the index data. * @type {number} */ indexSize: number; /** * The size of the attribute data. * @type {number} */ attributeSize: number; /** * Whether the element should be packed as a quad for better performance. * @type {boolean} */ packAsQuad: boolean; /** * The texture ID, stored for efficient updating. * @type {number} * @private */ _textureId: number; /** * The starting position in the attribute buffer. * @type {number} * @private */ _attributeStart: number; /** * The starting position in the index buffer. * @type {number} * @private */ _indexStart: number; /** * Reference to the batcher. * @type {Batcher} * @private */ _batcher: Batcher; /** * Reference to the batch. * @type {Batch} * @private */ _batch: Batch; } /** * Represents a batchable quad element. * @extends BatchableElement * @memberof rendering */ export interface BatchableQuadElement extends BatchableElement { /** * Indicates that this element should be packed as a quad. * @type {true} */ packAsQuad: true; /** * The size of the attribute data for this quad element. * @type {4} */ attributeSize: 4; /** * The size of the index data for this quad element. * @type {6} */ indexSize: 6; /** * The bounds data for this quad element. * @type {BoundsData} */ bounds: BoundsData; } /** * Represents a batchable mesh element. * @extends BatchableElement * @memberof rendering */ export interface BatchableMeshElement extends BatchableElement { /** * The UV coordinates of the mesh. * @type {number[] | Float32Array} */ uvs: number[] | Float32Array; /** * The vertex positions of the mesh. * @type {number[] | Float32Array} */ positions: number[] | Float32Array; /** * The indices of the mesh. * @type {number[] | Uint16Array | Uint32Array} */ indices: number[] | Uint16Array | Uint32Array; /** * The offset in the index buffer. * @type {number} */ indexOffset: number; /** * The offset in the attribute buffer. * @type {number} */ attributeOffset: number; /** * Indicates that this element should not be packed as a quad. * @type {false} */ packAsQuad: false; } /** * The options for the batcher. * @memberof rendering */ export interface BatcherOptions { /** The maximum number of textures per batch. */ maxTextures?: number; attributesInitialSize?: number; indicesInitialSize?: number; } /** * A batcher is used to batch together objects with the same texture. * It is an abstract class that must be extended. see DefaultBatcher for an example. * @memberof rendering */ export declare abstract class Batcher { static defaultOptions: Partial; /** unique id for this batcher */ readonly uid: number; /** The buffer containing attribute data for all elements in the batch. */ attributeBuffer: ViewableBuffer; /** The buffer containing index data for all elements in the batch. */ indexBuffer: IndexBufferArray; /** The current size of the attribute data in the batch. */ attributeSize: number; /** The current size of the index data in the batch. */ indexSize: number; /** The total number of elements currently in the batch. */ elementSize: number; /** The starting index of elements in the current batch. */ elementStart: number; /** Indicates whether the batch data has been modified and needs updating. */ dirty: boolean; /** The current index of the batch being processed. */ batchIndex: number; /** An array of all batches created during the current rendering process. */ batches: Batch[]; private _elements; private _batchIndexStart; private _batchIndexSize; /** The maximum number of textures per batch. */ readonly maxTextures: number; /** The name of the batcher. Must be implemented by subclasses. */ abstract name: string; /** The vertex size of the batcher. Must be implemented by subclasses. */ protected abstract vertexSize: number; /** The geometry used by this batcher. Must be implemented by subclasses. */ abstract geometry: Geometry; /** * The shader used by this batcher. Must be implemented by subclasses. * this can be shared by multiple batchers of the same type. */ abstract shader: Shader; /** * Packs the attributes of a BatchableMeshElement into the provided views. * Must be implemented by subclasses. * @param element - The BatchableMeshElement to pack. * @param float32View - The Float32Array view to pack into. * @param uint32View - The Uint32Array view to pack into. * @param index - The starting index in the views. * @param textureId - The texture ID to use. */ abstract packAttributes(element: BatchableMeshElement, float32View: Float32Array, uint32View: Uint32Array, index: number, textureId: number): void; /** * Packs the attributes of a BatchableQuadElement into the provided views. * Must be implemented by subclasses. * @param element - The BatchableQuadElement to pack. * @param float32View - The Float32Array view to pack into. * @param uint32View - The Uint32Array view to pack into. * @param index - The starting index in the views. * @param textureId - The texture ID to use. */ abstract packQuadAttributes(element: BatchableQuadElement, float32View: Float32Array, uint32View: Uint32Array, index: number, textureId: number): void; constructor(options?: BatcherOptions); begin(): void; add(batchableObject: BatchableElement): void; checkAndUpdateTexture(batchableObject: BatchableElement, texture: Texture): boolean; updateElement(batchableObject: BatchableElement): void; /** * breaks the batcher. This happens when a batch gets too big, * or we need to switch to a different type of rendering (a filter for example) * @param instructionSet */ break(instructionSet: InstructionSet): void; private _finishBatch; finish(instructionSet: InstructionSet): void; /** * Resizes the attribute buffer to the given size (1 = 1 float32) * @param size - the size in vertices to ensure (not bytes!) */ ensureAttributeBuffer(size: number): void; /** * Resizes the index buffer to the given size (1 = 1 float32) * @param size - the size in vertices to ensure (not bytes!) */ ensureIndexBuffer(size: number): void; private _resizeAttributeBuffer; private _resizeIndexBuffer; packQuadIndex(indexBuffer: IndexBufferArray, index: number, indicesOffset: number): void; packIndex(element: BatchableMeshElement, indexBuffer: IndexBufferArray, index: number, indicesOffset: number): void; destroy(): void; } /** * An interface for a pipe that can be used to build instructions for the renderer. * InstructionPipes are specifically used to manage the state of the renderer. * For example, the BlendModePipe is used to set the blend mode of the renderer. * @memberof rendering */ export interface InstructionPipe { /** * called just before we execute the draw calls , this is where the pipes have an opportunity to * upload data to the GPU. This is only called if data changes. * @param instructionSet - the instruction set currently being built */ upload?: (instructionSet: InstructionSet) => void; /** * this is where the actual instruction is executed - eg make the draw call * activate a filter. Any instructions that have the same renderPipeId have their * execute method called * @param instruction - the instruction to execute */ execute?: (instruction: INSTRUCTION) => void; buildReset?: (instructionSet: InstructionSet) => void; buildStart?: (instructionSet: InstructionSet) => void; buildEnd?: (instructionSet: InstructionSet) => void; /** Called just after the render ends giving the RenderPipes a chance to do any cleanup */ renderEnd?: () => void; /** Called just before the render starts giving the RenderPipes a chance to do any setup */ renderStart?: () => void; /** * Used by the effect pipes push and pop effects to the renderer. A push effect allows * the renderer to change its state to support the effect. A pop effect allows the renderer * to return to its previous state. An example of this would be the filter effect. * @param effect - the effect to push * @param targetContainer - the container that the effect is being applied to * @param instructionSet - the instruction set currently being built */ push?: (effect: Effect, targetContainer: Container, instructionSet: InstructionSet) => void; /** * Used by effect pipes to pop effects from the renderer. * @param effect - the effect to pop * @param targetContainer - the container that the effect is being applied to * @param instructionSet - the instruction set currently being built */ pop?: (effect: Effect, targetContainer: Container, instructionSet: InstructionSet) => void; } /** * An interface for a pipe that can be used to build instructions for the renderer. * RenderPipes are specifically used to render Renderables like a Mesh. * @memberof rendering */ export interface RenderPipe { /** * This is where the renderable is added to the instruction set. This is called once per renderable. * For instance, a MeshRenderPipe could be used to enqueue a 'draw mesh' command * to the rendering instruction set, catering to the rendering of mesh geometry. * In more complex scenarios, such as the SpritePipe, this seamlessly coordinates * with a batchPipe to efficiently batch and add batch instructions to the instructions set * * Add is called when the instructions set is being built. * @param renderable - the renderable that needs to be rendered * @param instructionSet - the instruction set currently being built */ addRenderable: (renderable: RENDERABLE, instructionSet: InstructionSet) => void; /** * Called whenever a renderable has been been updated, eg its position has changed. * This is only called in the render loop if the instructions set is being reused * from the last frame. Otherwise addRenderable is called. * @param renderable - the renderable that needs to be rendered */ updateRenderable: (renderable: RENDERABLE) => void; /** * Called whenever a renderable is destroyed, often the pipes keep a webGL / webGPU specific representation * of the renderable that needs to be tidied up when the renderable is destroyed. * @param renderable - the renderable that needs to be rendered */ destroyRenderable: (renderable: RENDERABLE) => void; /** * This function is called when the renderer is determining if it can use the same instruction set again to * improve performance. If this function returns true, the renderer will rebuild the whole instruction set * for the scene. This is only called if the scene has not its changed its structure . * @param renderable * @returns {boolean} */ validateRenderable: (renderable: RENDERABLE) => boolean; } /** * An interface for a pipe that can be used to build instructions for the renderer. * BatchPipes are specifically used to build and render Batches. */ export interface BatchPipe { /** * Add a add a batchable object to the batch. * @param renderable - a batchable object that can be added to the batch * @param instructionSet - the instruction set currently being built */ addToBatch: (renderable: BatchableElement, instructionSet: InstructionSet) => void; /** * Forces the batch to break. This can happen if for example you need to render everything and then * change the render target. * @param instructionSet - the instruction set currently being built */ break: (instructionSet: InstructionSet) => void; } /** A helpful type that can be used to create a new RenderPipe, BatchPipe or InstructionPipe */ export interface PipeConstructor { new (renderer: Renderer, adaptor?: any): RenderPipe | BatchPipe | InstructionPipe; } /** * Options for creating a render target. * @memberof rendering */ export interface RenderTargetOptions { /** the width of the RenderTarget */ width?: number; /** the height of the RenderTarget */ height?: number; /** the resolution of the RenderTarget */ resolution?: number; /** an array of textures, or a number indicating how many color textures there should be */ colorTextures?: BindableTexture[] | number; /** should this render target have a stencil buffer? */ stencil?: boolean; /** should this render target have a depth buffer? */ depth?: boolean; /** a depth stencil texture that the depth and stencil outputs will be written to */ depthStencilTexture?: BindableTexture | boolean; /** should this render target be antialiased? */ antialias?: boolean; /** is this a root element, true if this is gl context owners render target */ isRoot?: boolean; } /** * A class that describes what the renderers are rendering to. * This can be as simple as a Texture, or as complex as a multi-texture, multi-sampled render target. * Support for stencil and depth buffers is also included. * * If you need something more complex than a Texture to render to, you should use this class. * Under the hood, all textures you render to have a RenderTarget created on their behalf. * @memberof rendering */ export declare class RenderTarget { /** The default options for a render target */ static defaultOptions: RenderTargetOptions; /** unique id for this render target */ readonly uid: number; /** * An array of textures that can be written to by the GPU - mostly this has one texture in Pixi, but you could * write to multiple if required! (eg deferred lighting) */ colorTextures: TextureSource[]; /** the stencil and depth buffer will right to this texture in WebGPU */ depthStencilTexture: TextureSource; /** if true, will ensure a stencil buffer is added. For WebGPU, this will automatically create a depthStencilTexture */ stencil: boolean; /** if true, will ensure a depth buffer is added. For WebGPU, this will automatically create a depthStencilTexture */ depth: boolean; dirtyId: number; isRoot: boolean; private readonly _size; /** if true, then when the render target is destroyed, it will destroy all the textures that were created for it. */ private readonly _managedColorTextures; /** * @param [descriptor] - Options for creating a render target. */ constructor(descriptor?: RenderTargetOptions); get size(): [ number, number ]; get width(): number; get height(): number; get pixelWidth(): number; get pixelHeight(): number; get resolution(): number; get colorTexture(): TextureSource; protected onSourceResize(source: TextureSource): void; /** * This will ensure a depthStencil texture is created for this render target. * Most likely called by the mask system to make sure we have stencil buffer added. * @internal * @ignore */ ensureDepthStencilTexture(): void; resize(width: number, height: number, resolution?: number, skipColorTexture?: boolean): void; destroy(): void; } /** * Represents a render target. * @memberof rendering * @ignore */ export declare class GlRenderTarget { width: number; height: number; msaa: boolean; framebuffer: WebGLFramebuffer; resolveTargetFramebuffer: WebGLFramebuffer; msaaRenderBuffer: WebGLRenderbuffer[]; depthStencilRenderBuffer: WebGLRenderbuffer; } /** * A class which holds the canvas contexts and textures for a render target. * @memberof rendering * @ignore */ export declare class GpuRenderTarget { contexts: GPUCanvasContext[]; msaaTextures: TextureSource[]; msaa: boolean; msaaSamples: number; width: number; height: number; descriptor: GPURenderPassDescriptor; } /** * A render surface is a texture, canvas, or render target * @memberof rendering * @see environment.ICanvas * @see rendering.Texture * @see rendering.RenderTarget */ export type RenderSurface = ICanvas | BindableTexture | RenderTarget; /** * An adaptor interface for RenderTargetSystem to support WebGL and WebGPU. * This is used internally by the renderer, and is not intended to be used directly. * @ignore */ export interface RenderTargetAdaptor { init( /** the renderer */ renderer: Renderer, /** the render target system */ renderTargetSystem: RenderTargetSystem): void; /** A function copies the contents of a render surface to a texture */ copyToTexture( /** the render surface to copy from */ sourceRenderSurfaceTexture: RenderTarget, /** the texture to copy to */ destinationTexture: Texture, /** the origin of the copy */ originSrc: { x: number; y: number; }, /** the size of the copy */ size: { width: number; height: number; }, /** the destination origin (top left to paste from!) */ originDest?: { x: number; y: number; }): Texture; /** starts a render pass on the render target */ startRenderPass( /** the render target to start the render pass on */ renderTarget: RenderTarget, clear: CLEAR_OR_BOOL, /** the color to clear to */ clearColor?: RgbaArray, /** the viewport to use */ viewport?: Rectangle): void; /** clears the current render target to the specified color */ clear( /** the render target to clear */ renderTarget: RenderTarget, /** the clear mode to use. Can be true or a CLEAR number 'COLOR | DEPTH | STENCIL' 0b111 */ clear: CLEAR_OR_BOOL, /** the color to clear to */ clearColor?: RgbaArray, /** the viewport to use */ viewport?: Rectangle): void; /** finishes the current render pass */ finishRenderPass(renderTarget: RenderTarget): void; /** called after the render pass is finished */ postrender?(renderTarget: RenderTarget): void; /** * initializes a gpu render target. Both renderers use this function to initialize a gpu render target * Its different type of object depending on the renderer. */ initGpuRenderTarget( /** the render target to initialize */ renderTarget: RenderTarget): RENDER_TARGET; /** called when a render target is resized */ resizeGpuRenderTarget( /** the render target to resize */ renderTarget: RenderTarget): void; /** destroys the gpu render target */ destroyGpuRenderTarget( /** the render target to destroy */ gpuRenderTarget: RENDER_TARGET): void; } /** * A system that manages render targets. A render target is essentially a place where the shaders can color in the pixels. * The render target system is responsible for binding the render target to the renderer, and managing the viewport. * Render targets can be pushed and popped. * * To make it easier, you can also bind textures and canvases too. This will automatically create a render target for you. * The render target itself is a lot more powerful than just a texture or canvas, * as it can have multiple textures attached to it. * It will also give ou fine grain control over the stencil buffer / depth texture. * @example * * ```js * * // create a render target * const renderTarget = new RenderTarget({ * colorTextures: [new TextureSource({ width: 100, height: 100 })], * }); * * // bind the render target * renderer.renderTarget.bind(renderTarget); * * // draw something! * ``` * @memberof rendering */ export declare class RenderTargetSystem implements System$1 { /** When rendering of a scene begins, this is where the root render surface is stored */ rootRenderTarget: RenderTarget; /** This is the root viewport for the render pass*/ rootViewPort: Rectangle; /** A boolean that lets the dev know if the current render pass is rendering to the screen. Used by some plugins */ renderingToScreen: boolean; /** the current active render target */ renderTarget: RenderTarget; /** the current active render surface that the render target is created from */ renderSurface: RenderSurface; /** the current viewport that the gpu is using */ readonly viewport: Rectangle; /** * a runner that lets systems know if the active render target has changed. * Eg the Stencil System needs to know so it can manage the stencil buffer */ readonly onRenderTargetChange: SystemRunner; /** the projection matrix that is used by the shaders based on the active render target and the viewport */ readonly projectionMatrix: Matrix; /** the default clear color for render targets */ readonly defaultClearColor: RgbaArray; /** a reference to the adaptor that interfaces with WebGL / WebGP */ readonly adaptor: RenderTargetAdaptor; /** * a hash that stores the render target for a given render surface. When you pass in a texture source, * a render target is created for it. This map stores and makes it easy to retrieve the render target */ private readonly _renderSurfaceToRenderTargetHash; /** A hash that stores a gpu render target for a given render target. */ private _gpuRenderTargetHash; /** * A stack that stores the render target and frame that is currently being rendered to. * When push is called, the current render target is stored in this stack. * When pop is called, the previous render target is restored. */ private readonly _renderTargetStack; /** A reference to the renderer */ private readonly _renderer; constructor(renderer: Renderer); /** called when dev wants to finish a render pass */ finishRenderPass(): void; /** * called when the renderer starts to render a scene. * @param options * @param options.target - the render target to render to * @param options.clear - the clear mode to use. Can be true or a CLEAR number 'COLOR | DEPTH | STENCIL' 0b111 * @param options.clearColor - the color to clear to * @param options.frame - the frame to render to */ renderStart({ target, clear, clearColor, frame }: { target: RenderSurface; clear: CLEAR_OR_BOOL; clearColor: RgbaArray; frame?: Rectangle; }): void; postrender(): void; /** * Binding a render surface! This is the main function of the render target system. * It will take the RenderSurface (which can be a texture, canvas, or render target) and bind it to the renderer. * Once bound all draw calls will be rendered to the render surface. * * If a frame is not provide and the render surface is a texture, the frame of the texture will be used. * @param renderSurface - the render surface to bind * @param clear - the clear mode to use. Can be true or a CLEAR number 'COLOR | DEPTH | STENCIL' 0b111 * @param clearColor - the color to clear to * @param frame - the frame to render to * @returns the render target that was bound */ bind(renderSurface: RenderSurface, clear?: CLEAR_OR_BOOL, clearColor?: RgbaArray, frame?: Rectangle): RenderTarget; clear(target?: RenderSurface, clear?: CLEAR_OR_BOOL, clearColor?: RgbaArray): void; protected contextChange(): void; /** * Push a render surface to the renderer. This will bind the render surface to the renderer, * @param renderSurface - the render surface to push * @param clear - the clear mode to use. Can be true or a CLEAR number 'COLOR | DEPTH | STENCIL' 0b111 * @param clearColor - the color to clear to * @param frame - the frame to use when rendering to the render surface */ push(renderSurface: RenderSurface, clear?: CLEAR | boolean, clearColor?: RgbaArray, frame?: Rectangle): RenderTarget; /** Pops the current render target from the renderer and restores the previous render target. */ pop(): void; /** * Gets the render target from the provide render surface. Eg if its a texture, * it will return the render target for the texture. * If its a render target, it will return the same render target. * @param renderSurface - the render surface to get the render target for * @returns the render target for the render surface */ getRenderTarget(renderSurface: RenderSurface): RenderTarget; /** * Copies a render surface to another texture * @param sourceRenderSurfaceTexture - the render surface to copy from * @param destinationTexture - the texture to copy to * @param originSrc - the origin of the copy * @param originSrc.x - the x origin of the copy * @param originSrc.y - the y origin of the copy * @param size - the size of the copy * @param size.width - the width of the copy * @param size.height - the height of the copy * @param originDest - the destination origin (top left to paste from!) * @param originDest.x - the x origin of the paste * @param originDest.y - the y origin of the paste */ copyToTexture(sourceRenderSurfaceTexture: RenderTarget, destinationTexture: Texture, originSrc: { x: number; y: number; }, size: { width: number; height: number; }, originDest: { x: number; y: number; }): Texture>; /** * ensures that we have a depth stencil buffer available to render to * This is used by the mask system to make sure we have a stencil buffer. */ ensureDepthStencil(): void; /** nukes the render target system */ destroy(): void; private _initRenderTarget; getGpuRenderTarget(renderTarget: RenderTarget): RENDER_TARGET; } /** * Options passed to the ViewSystem * @memberof rendering * @property {number} [width=800] - The width of the screen. * @property {number} [height=600] - The height of the screen. * @property {ICanvas} [canvas] - The canvas to use as a view, optional. * @property {boolean} [autoDensity=false] - Resizes renderer view in CSS pixels to allow for resolutions other than 1. * @property {number} [resolution] - The resolution / device pixel ratio of the renderer. * @property {boolean} [antialias=false] - Whether to enable anti-aliasing. This may affect performance. * @property {boolean} [depth] - * Whether to ensure the main view has can make use of the depth buffer. Always true for WebGL renderer. * @property {boolean} [multiView] - TODO: multiView * @property {number} [backgroundAlpha] - The alpha of the background. */ export interface ViewSystemOptions { /** * The width of the screen. * @default 800 * @memberof rendering.SharedRendererOptions */ width?: number; /** * The height of the screen. * @default 600 * @memberof rendering.SharedRendererOptions */ height?: number; /** * The canvas to use as a view, optional. * @memberof rendering.SharedRendererOptions */ canvas?: ICanvas; /** @deprecated */ view?: ICanvas; /** * Resizes renderer view in CSS pixels to allow for resolutions other than 1. * @memberof rendering.SharedRendererOptions */ autoDensity?: boolean; /** * The resolution / device pixel ratio of the renderer. * @memberof rendering.SharedRendererOptions */ resolution?: number; /** * Whether to enable anti-aliasing. This may affect performance. * @memberof rendering.SharedRendererOptions */ antialias?: boolean; /** * Whether to ensure the main view has can make use of the depth buffer. Always true for WebGL renderer. * @memberof rendering.SharedRendererOptions */ depth?: boolean; /** * Transparency of the background color, value from `0` (fully transparent) to `1` (fully opaque). * @default 1 */ backgroundAlpha?: number; } export interface ViewSystemDestroyOptions { /** Whether to remove the view element from the DOM. Defaults to `false`. */ removeView?: boolean; } /** * The view system manages the main canvas that is attached to the DOM. * This main role is to deal with how the holding the view reference and dealing with how it is resized. * @memberof rendering */ export declare class ViewSystem implements System$1> { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem, ExtensionType.CanvasSystem ]; readonly name: "view"; readonly priority: 0; }; /** The default options for the view system. */ static defaultOptions: ViewSystemOptions; /** The canvas element that everything is drawn to. */ canvas: ICanvas; /** The texture that is used to draw the canvas to the screen. */ texture: Texture; /** * Whether CSS dimensions of canvas view should be resized to screen dimensions automatically. * @member {boolean} */ get autoDensity(): boolean; set autoDensity(value: boolean); /** Whether to enable anti-aliasing. This may affect performance. */ antialias: boolean; /** * Measurements of the screen. (0, 0, screenWidth, screenHeight). * * Its safe to use as filterArea or hitArea for the whole stage. */ screen: Rectangle; /** The render target that the view is drawn to. */ renderTarget: RenderTarget; /** The resolution / device pixel ratio of the renderer. */ get resolution(): number; set resolution(value: number); /** * initiates the view system * @param options - the options for the view */ init(options: ViewSystemOptions): void; /** * Resizes the screen and canvas to the specified dimensions. * @param desiredScreenWidth - The new width of the screen. * @param desiredScreenHeight - The new height of the screen. * @param resolution */ resize(desiredScreenWidth: number, desiredScreenHeight: number, resolution: number): void; /** * Destroys this System and optionally removes the canvas from the dom. * @param {options | false} options - The options for destroying the view, or "false". * @param options.removeView - Whether to remove the view element from the DOM. Defaults to `false`. */ destroy(options?: TypeOrBool): void; } type RenderFunction = (renderer: Renderer) => void; /** * Options for the {@link scene.RenderContainer} constructor. * @memberof scene */ export interface RenderContainerOptions extends ContainerOptions { /** the optional custom render function if you want to inject the function via the constructor */ render?: RenderFunction; /** how to know if the custom render logic contains a point or not, used for interaction */ containsPoint?: (point: Point) => boolean; /** how to add the bounds of this object when measuring */ addBounds?: (bounds: BoundsData) => void; } /** * A container that allows for custom rendering logic. Its essentially calls the render function each frame * and allows for custom rendering logic - the render could be a WebGL renderer or WebGPU render or even a canvas render. * Its up to you to define the logic. * * This can be used in two ways, either by extending the class and overriding the render method, * or by passing a custom render function * @example * ```js * import { RenderContainer } from 'pixi.js'; * * // extend the class * class MyRenderContainer extends RenderContainer * { * render(renderer) * { * renderer.clear({ * clearColor: 'green', // clear the screen to green when rendering this item * }); * } * } * * // override the render method * const renderContainer = new RenderContainer( * (renderer) => { * renderer.clear({ * clearColor: 'green', // clear the screen to green when rendering this item * }); * }) * ``` * @memberof scene * @extends scene.Container */ export declare class RenderContainer extends Container implements View, Instruction { batched: boolean; /** * Whether or not to round the x/y position of the sprite. * @type {boolean} */ roundPixels: boolean; _roundPixels: 0 | 1; _lastUsed: number; _lastInstructionTick: number; /** * The local bounds of the sprite. * @type {rendering.Bounds} */ bounds: Bounds; /** * Checks if the object contains the given point. * @param point - The point to check */ containsPoint: (point: Point) => boolean; /** * Adds the bounds of this text to the bounds object. * @param bounds - The output bounds object. */ addBounds: (bounds: Bounds) => void; canBundle: boolean; readonly renderPipeId: string; /** * @param options - The options for the container. */ constructor(options: RenderContainerOptions | RenderFunction); /** * An overridable function that can be used to render the object using the current renderer. * @param _renderer - The current renderer */ render(_renderer: Renderer): void; } /** * The CustomRenderPipe is a render pipe that allows for custom rendering logic for your renderable objects. * @example * import { RenderContainer } from 'pixi.js'; * * const renderContainer = new RenderContainer( * (renderer) => { * renderer.clear({ * clearColor: 'green', // clear the screen to green when rendering this item * }); * }) * @memberof rendering */ export declare class CustomRenderPipe implements InstructionPipe { static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "customRender"; }; private _renderer; constructor(renderer: Renderer); addRenderable(container: RenderContainer, instructionSet: InstructionSet): void; execute(container: RenderContainer): void; destroy(): void; } export declare class RenderGroupPipe implements InstructionPipe { static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "renderGroup"; }; private _renderer; constructor(renderer: Renderer); addRenderGroup(renderGroup: RenderGroup, instructionSet: InstructionSet): void; execute(renderGroup: RenderGroup): void; destroy(): void; } /** * The view system manages the main canvas that is attached to the DOM. * This main role is to deal with how the holding the view reference and dealing with how it is resized. * @memberof rendering */ export declare class RenderGroupSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem, ExtensionType.CanvasSystem ]; readonly name: "renderGroup"; }; private readonly _renderer; constructor(renderer: Renderer); protected render({ container, transform }: { container: Container; transform: Matrix; }): void; destroy(): void; } /** * A ViewContainer is a type of container that represents a view. * This view can be a Sprite, a Graphics object, or any other object that can be rendered. * This class is abstract and should not be used directly. * @memberof scene */ export declare abstract class ViewContainer extends Container implements View { /** @private */ readonly renderPipeId: string; /** @private */ readonly canBundle = true; /** @private */ allowChildren: boolean; /** @private */ _roundPixels: 0 | 1; /** @private */ _lastUsed: number; /** @private */ _lastInstructionTick: number; protected _bounds: Bounds; protected _boundsDirty: boolean; /** * The local bounds of the view. * @type {rendering.Bounds} */ abstract get bounds(): BoundsData; /** @private */ abstract addBounds(bounds: Bounds): void; /** @private */ protected _updateBounds(): void; /** * Whether or not to round the x/y position of the sprite. * @type {boolean} */ get roundPixels(): boolean; set roundPixels(value: boolean); /** * Checks if the object contains the given point. * @param point - The point to check */ containsPoint(point: PointData): boolean; /** @private */ abstract batched: boolean; /** @private */ protected abstract onViewUpdate(): void; destroy(options?: DestroyOptions): void; } /** * Options for the {@link scene.Sprite} constructor. * @memberof scene */ export interface SpriteOptions extends ContainerOptions { /** The texture to use for the sprite. */ texture?: Texture; /** The anchor point of the sprite. */ anchor?: PointData | number; /** Whether or not to round the x/y position. */ roundPixels?: boolean; } /** * The Sprite object is one of the most important objects in PixiJS. It is a * drawing item that can be added to a scene and rendered to the screen. * * A sprite can be created directly from an image like this: * * ```js * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('assets/image.png'); * ``` * * The more efficient way to create sprites is using a {@link assets.Spritesheet}, * as swapping base textures when rendering to the screen is inefficient. * * ```js * import { Assets, Sprite } from 'pixi.js'; * * const sheet = await Assets.load('assets/spritesheet.json'); * const sprite = new Sprite(sheet.textures['image.png']); * ``` * @memberof scene * @extends scene.Container */ export declare class Sprite extends ViewContainer { /** * Helper function that creates a new sprite based on the source you provide. * The source can be - frame id, image, video, canvas element, video element, texture * @param source - Source to create texture from * @param [skipCache] - Whether to skip the cache or not * @returns The newly created sprite */ static from(source: Texture | TextureSourceLike, skipCache?: boolean): Sprite; readonly renderPipeId: string; batched: boolean; readonly _anchor: ObservablePoint; _texture: Texture; _didSpriteUpdate: boolean; private readonly _sourceBounds; private _sourceBoundsDirty; private _width; private _height; /** * @param options - The options for creating the sprite. */ constructor(options?: SpriteOptions | Texture); set texture(value: Texture); /** The texture that the sprite is using. */ get texture(): Texture; /** * The local bounds of the sprite. * @type {rendering.Bounds} */ get bounds(): Bounds; /** * The bounds of the sprite, taking the texture's trim into account. * @type {rendering.Bounds} */ get sourceBounds(): BoundsData; /** * Checks if the object contains the given point. * @param point - The point to check */ containsPoint(point: PointData): boolean; /** * Adds the bounds of this object to the bounds object. * @param bounds - The output bounds object. */ addBounds(bounds: Bounds): void; onViewUpdate(): void; protected _updateBounds(): void; private _updateSourceBounds; /** * Destroys this sprite renderable and optionally its texture. * @param options - Options parameter. A boolean will act as if all options * have been set to that value * @param {boolean} [options.texture=false] - Should it destroy the current texture of the renderable as well * @param {boolean} [options.textureSource=false] - Should it destroy the textureSource of the renderable as well */ destroy(options?: DestroyOptions): void; /** * The anchor sets the origin point of the sprite. The default value is taken from the {@link Texture} * and passed to the constructor. * * The default is `(0,0)`, this means the sprite's origin is the top left. * * Setting the anchor to `(0.5,0.5)` means the sprite's origin is centered. * * Setting the anchor to `(1,1)` would mean the sprite's origin point will be the bottom right corner. * * If you pass only single parameter, it will set both x and y to the same value as shown in the example below. * @example * import { Sprite } from 'pixi.js'; * * const sprite = new Sprite({texture: Texture.WHITE}); * sprite.anchor.set(0.5); // This will set the origin to center. (0.5) is same as (0.5, 0.5). */ get anchor(): ObservablePoint; set anchor(value: PointData | number); /** The width of the sprite, setting this will actually modify the scale to achieve the value set. */ get width(): number; set width(value: number); /** The height of the sprite, setting this will actually modify the scale to achieve the value set. */ get height(): number; set height(value: number); /** * Retrieves the size of the Sprite as a [Size]{@link Size} object. * This is faster than get the width and height separately. * @param out - Optional object to store the size in. * @returns - The size of the Sprite. */ getSize(out?: Size): Size; /** * Sets the size of the Sprite to the specified width and height. * This is faster than setting the width and height separately. * @param value - This can be either a number or a [Size]{@link Size} object. * @param height - The height to set. Defaults to the value of `width` if not provided. */ setSize(value: number | Optional, height?: number): void; } export declare class SpritePipe implements RenderPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "sprite"; }; private _renderer; private _gpuSpriteHash; private readonly _destroyRenderableBound; constructor(renderer: Renderer); addRenderable(sprite: Sprite, instructionSet: InstructionSet): void; updateRenderable(sprite: Sprite): void; validateRenderable(sprite: Sprite): boolean; destroyRenderable(sprite: Sprite): void; private _updateBatchableSprite; private _getGpuSprite; private _initGPUSprite; destroy(): void; } /** The GPU object. */ interface GPU$1 { /** The GPU adapter */ adapter: GPUAdapter; /** The GPU device */ device: GPUDevice; } /** * Options for the WebGPU context. * @property {GpuPowerPreference} [powerPreference=default] - An optional hint indicating what configuration of GPU * is suitable for the WebGPU context, can be `'high-performance'` or `'low-power'`. * Setting to `'high-performance'` will prioritize rendering performance over power consumption, * while setting to `'low-power'` will prioritize power saving over rendering performance. * @property {boolean} [forceFallbackAdapter=false] - Force the use of the fallback adapter * @memberof rendering */ export interface GpuContextOptions { /** * An optional hint indicating what configuration of GPU is suitable for the WebGPU context, * can be `'high-performance'` or `'low-power'`. * Setting to `'high-performance'` will prioritize rendering performance over power consumption, * while setting to `'low-power'` will prioritize power saving over rendering performance. * @default undefined * @memberof rendering.WebGPUOptions */ powerPreference?: GpuPowerPreference; /** * Force the use of the fallback adapter * @default false * @memberof rendering.WebGPUOptions */ forceFallbackAdapter: boolean; } /** * System plugin to the renderer to manage the context. * @class * @memberof rendering */ export declare class GpuDeviceSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "device"; }; /** The default options for the GpuDeviceSystem. */ static defaultOptions: GpuContextOptions; /** The GPU device */ gpu: GPU$1; private _renderer; private _initPromise; /** * @param {WebGPURenderer} renderer - The renderer this System works for. */ constructor(renderer: WebGPURenderer); init(options: GpuContextOptions): Promise; /** * Handle the context change event * @param gpu */ protected contextChange(gpu: GPU$1): void; /** * Helper class to create a WebGL Context * @param {object} options - An options object that gets passed in to the canvas element containing the * context attributes * @see https://developer.mozilla.org/en/docs/Web/API/HTMLCanvasElement/getContext * @returns {WebGLRenderingContext} the WebGL context */ private _createDeviceAndAdaptor; destroy(): void; } /** * This manages the WebGPU bind groups. this is how data is bound to a shader when rendering * @memberof rendering */ export declare class BindGroupSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "bindGroup"; }; private readonly _renderer; private _hash; private _gpu; constructor(renderer: WebGPURenderer); protected contextChange(gpu: GPU$1): void; getBindGroup(bindGroup: BindGroup, program: GpuProgram, groupIndex: number): GPUBindGroup; private _createBindGroup; destroy(): void; } /** * System plugin to the renderer to manage buffers. * @memberof rendering */ export declare class GpuBufferSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "buffer"; }; protected CONTEXT_UID: number; private _gpuBuffers; private readonly _managedBuffers; private _gpu; protected contextChange(gpu: GPU$1): void; getGPUBuffer(buffer: Buffer$1): GPUBuffer; updateBuffer(buffer: Buffer$1): GPUBuffer; /** dispose all WebGL resources of all managed buffers */ destroyAll(): void; createGPUBuffer(buffer: Buffer$1): GPUBuffer; protected onBufferChange(buffer: Buffer$1): void; /** * Disposes buffer * @param buffer - buffer with data */ protected onBufferDestroy(buffer: Buffer$1): void; destroy(): void; private _destroyBuffer; } /** * The system that handles color masking for the GPU. * @memberof rendering */ export declare class GpuColorMaskSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "colorMask"; }; private readonly _renderer; private _colorMaskCache; constructor(renderer: WebGPURenderer); setMask(colorMask: number): void; destroy(): void; } /** * This is a WebGL state, and is is passed to {@link StateSystem}. * * Each mesh rendered may require WebGL to be in a different state. * For example you may want different blend mode or to enable polygon offsets * @memberof rendering */ export declare class State { /** * The data is a unique number based on the states settings. * This lets us quickly compare states with a single number rather than looking * at all the individual settings. */ data: number; _blendModeId: number; private _blendMode; private _polygonOffset; constructor(); /** * Activates blending of the computed fragment color values. * @default true */ get blend(): boolean; set blend(value: boolean); /** * Activates adding an offset to depth values of polygon's fragments * @default false */ get offsets(): boolean; set offsets(value: boolean); /** The culling settings for this state none - No culling back - Back face culling front - Front face culling */ set cullMode(value: CULL_MODES); get cullMode(): CULL_MODES; /** * Activates culling of polygons. * @default false */ get culling(): boolean; set culling(value: boolean); /** * Activates depth comparisons and updates to the depth buffer. * @default false */ get depthTest(): boolean; set depthTest(value: boolean); /** * Enables or disables writing to the depth buffer. * @default true */ get depthMask(): boolean; set depthMask(value: boolean); /** * Specifies whether or not front or back-facing polygons can be culled. * @default false */ get clockwiseFrontFace(): boolean; set clockwiseFrontFace(value: boolean); /** * The blend mode to be applied when this state is set. Apply a value of `normal` to reset the blend mode. * Setting this mode to anything other than NO_BLEND will automatically switch blending on. * @default 'normal' */ get blendMode(): BLEND_MODES; set blendMode(value: BLEND_MODES); /** * The polygon offset. Setting this property to anything other than 0 will automatically enable polygon offset fill. * @default 0 */ get polygonOffset(): number; set polygonOffset(value: number); toString(): string; /** * A quickly getting an instance of a State that is configured for 2d rendering. * @returns a new State with values set for 2d rendering */ static for2d(): State; static default2d: State; } /** * The system that handles encoding commands for the GPU. * @memberof rendering */ export declare class GpuEncoderSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "encoder"; readonly priority: 1; }; commandEncoder: GPUCommandEncoder; renderPassEncoder: GPURenderPassEncoder; commandFinished: Promise; private _resolveCommandFinished; private _gpu; private _boundBindGroup; private _boundVertexBuffer; private _boundIndexBuffer; private _boundPipeline; private readonly _renderer; constructor(renderer: WebGPURenderer); renderStart(): void; beginRenderPass(gpuRenderTarget: GpuRenderTarget): void; endRenderPass(): void; setViewport(viewport: Rectangle): void; setPipelineFromGeometryProgramAndState(geometry: Geometry, program: GpuProgram, state: any, topology?: Topology): void; setPipeline(pipeline: GPURenderPipeline): void; private _setVertexBuffer; private _setIndexBuffer; resetBindGroup(index: number): void; setBindGroup(index: number, bindGroup: BindGroup, program: GpuProgram): void; setGeometry(geometry: Geometry, program: GpuProgram): void; private _setShaderBindGroups; private _syncBindGroup; draw(options: { geometry: Geometry; shader: Shader; state?: State; topology?: Topology; size?: number; start?: number; instanceCount?: number; skipSync?: boolean; }): void; finishRenderPass(): void; postrender(): void; restoreRenderPass(): void; private _clearCache; destroy(): void; protected contextChange(gpu: GPU$1): void; } /** * This manages the stencil buffer. Used primarily for masking * @memberof rendering */ export declare class GpuStencilSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "stencil"; }; private readonly _renderer; private _renderTargetStencilState; private _activeRenderTarget; constructor(renderer: WebGPURenderer); protected onRenderTargetChange(renderTarget: RenderTarget): void; setStencilMode(stencilMode: STENCIL_MODES, stencilReference: number): void; destroy(): void; } export declare const UNIFORM_TYPES_VALUES: readonly [ "f32", "i32", "vec2", "vec3", "vec4", "mat2x2", "mat3x3", "mat4x4", "mat3x2", "mat4x2", "mat2x3", "mat4x3", "mat2x4", "mat3x4" ]; /** useful for checking if a type is supported - a map of supported types with a true value. */ export declare const UNIFORM_TYPES_MAP: Record; export type UNIFORM_TYPES_SINGLE = typeof UNIFORM_TYPES_VALUES[number]; type OPTIONAL_SPACE = " " | ""; export type UNIFORM_TYPES_ARRAY = `array<${UNIFORM_TYPES_SINGLE},${OPTIONAL_SPACE}${number}>`; export type UNIFORM_TYPES = UNIFORM_TYPES_SINGLE | UNIFORM_TYPES_ARRAY; export interface UniformData { /** the value of the uniform, this could be any object - a parser will figure out how to write it to the buffer */ value: unknown; type: UNIFORM_TYPES; /** the size of the variable (eg 2 for vec2, 3 for vec3, 4 for vec4) */ size?: number; name?: string; } export interface UboElement { data: UniformData; offset: number; size: number; } export interface UboLayout { uboElements: UboElement[]; /** float32 size // TODO change to bytes */ size: number; } export type UniformsSyncCallback = (...args: any[]) => void; type FLOPS = T extends { value: infer V; } ? V : never; type ExtractUniformObject> = { [K in keyof T]: FLOPS; }; /** * Uniform group options * @memberof rendering */ export type UniformGroupOptions = { /** * if true the UniformGroup is handled as an Uniform buffer object. * This is the only way WebGPU can work with uniforms. WebGL2 can also use this. * So don't set to true if you want to use WebGPU :D */ ubo?: boolean; /** if true, then you are responsible for when the data is uploaded to the GPU by calling `update()` */ isStatic?: boolean; }; /** * Uniform group holds uniform map and some ID's for work * * `UniformGroup` has two modes: * * 1: Normal mode * Normal mode will upload the uniforms with individual function calls as required. This is the default mode * for WebGL rendering. * * 2: Uniform buffer mode * This mode will treat the uniforms as a uniform buffer. You can pass in either a buffer that you manually handle, or * or a generic object that PixiJS will automatically map to a buffer for you. * For maximum benefits, make Ubo UniformGroups static, and only update them each frame. * This is the only way uniforms can be used with WebGPU. * * Rules of UBOs: * - UBOs only work with WebGL2, so make sure you have a fallback! * - Only floats are supported (including vec[2,3,4], mat[2,3,4]) * - Samplers cannot be used in ubo's (a GPU limitation) * - You must ensure that the object you pass in exactly matches in the shader ubo structure. * Otherwise, weirdness will ensue! * - The name of the ubo object added to the group must match exactly the name of the ubo in the shader. * * When declaring your uniform options, you ust parse in the value and the type of the uniform. * The types correspond to the WebGPU types {@link UNIFORM_TYPES} * Uniforms can be modified via the classes 'uniforms' property. It will contain all the uniforms declared in the constructor. * * ```glsl * // UBO in shader: * uniform myCoolData { // Declaring a UBO... * mat4 uCoolMatrix; * float uFloatyMcFloatFace; * }; * ``` * * ```js * // A new Uniform Buffer Object... * const myCoolData = new UniformGroup({ * uCoolMatrix: {value:new Matrix(), type: 'mat4'}, * uFloatyMcFloatFace: {value:23, type: 'f32'}, * }} * * // modify the data * myCoolData.uniforms.uFloatyMcFloatFace = 42; * // Build a shader... * const shader = Shader.from(srcVert, srcFrag, { * myCoolData // Name matches the UBO name in the shader. Will be processed accordingly. * }) * * * ``` * @memberof rendering */ export declare class UniformGroup implements BindResource { /** The default options used by the uniform group. */ static defaultOptions: UniformGroupOptions; /** used internally to know if a uniform group was used in the last render pass */ _touched: number; /** a unique id for this uniform group used through the renderer */ readonly uid: number; /** a resource type, used to identify how to handle it when its in a bind group / shader resource */ _resourceType: string; /** the resource id used internally by the renderer to build bind group keys */ _resourceId: number; /** the structures of the uniform group */ uniformStructures: UNIFORMS; /** the uniforms as an easily accessible map of properties */ uniforms: ExtractUniformObject; /** true if it should be used as a uniform buffer object */ ubo: boolean; /** an underlying buffer that will be uploaded to the GPU when using this UniformGroup */ buffer?: Buffer$1; /** * if true, then you are responsible for when the data is uploaded to the GPU. * otherwise, the data is reuploaded each frame. */ isStatic: boolean; /** used ito identify if this is a uniform group */ readonly isUniformGroup = true; /** * used to flag if this Uniform groups data is different from what it has stored in its buffer / on the GPU * @internal * @ignore */ _dirtyId: number; /** * a signature string generated for internal use * @internal * @ignore */ readonly _signature: number; readonly destroyed = false; /** * Create a new Uniform group * @param uniformStructures - The structures of the uniform group * @param options - The optional parameters of this uniform group */ constructor(uniformStructures: UNIFORMS, options?: UniformGroupOptions); /** Call this if you want the uniform groups data to be uploaded to the GPU only useful if `isStatic` is true. */ update(): void; } export interface UboAdaptor { createUboElements: (uniformData: UniformData[]) => UboLayout; generateUboSync: (uboElements: UboElement[]) => UniformsSyncCallback; } /** * System plugin to the renderer to manage uniform buffers. * @memberof rendering */ export declare class UboSystem implements System$1 { /** Cache of uniform buffer layouts and sync functions, so we don't have to re-create them */ private _syncFunctionHash; private readonly _adaptor; constructor(adaptor: UboAdaptor); /** * Overridable function by `pixi.js/unsafe-eval` to silence * throwing an error if platform doesn't support unsafe-evals. * @private */ private _systemCheck; ensureUniformGroup(uniformGroup: UniformGroup): void; getUniformGroupData(uniformGroup: UniformGroup): { layout: UboLayout; syncFunction: (uniforms: Record, data: Float32Array, offset: number) => void; }; private _initUniformGroup; private _generateUboSync; syncUniformGroup(uniformGroup: UniformGroup, data?: Float32Array, offset?: number): boolean; updateUniformGroup(uniformGroup: UniformGroup): boolean; destroy(): void; } /** * System plugin to the renderer to manage uniform buffers. With a WGSL twist! * @memberof rendering */ export declare class GpuUboSystem extends UboSystem { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "ubo"; }; constructor(); } /** * A resource that can be bound to a bind group and used in a shader. * Whilst a buffer can be used as a resource, this class allows you to specify an offset and size of the buffer to use. * This is useful if you have a large buffer and only part of it is used in a shader. * * This resource, will listen for changes on the underlying buffer and emit a itself if the buffer changes shape. * @example * * const buffer = new Buffer({ * data: new Float32Array(1000), * usage: BufferUsage.UNIFORM, * }); * // Create a buffer resource that uses the first 100 bytes of a buffer * const bufferResource = new BufferResource({ * buffer, * offset: 0, * size: 100, * }); * @memberof rendering */ export declare class BufferResource extends EventEmitter<{ change: BindResource; }> implements BindResource { /** * emits when the underlying buffer has changed shape (i.e. resized) * letting the renderer know that it needs to discard the old buffer on the GPU and create a new one * @event change */ /** a unique id for this uniform group used through the renderer */ readonly uid: number; /** * a resource type, used to identify how to handle it when its in a bind group / shader resource * @internal * @ignore */ readonly _resourceType = "bufferResource"; /** * used internally to know if a uniform group was used in the last render pass * @internal * @ignore */ _touched: number; /** * the resource id used internally by the renderer to build bind group keys * @internal * @ignore */ _resourceId: number; /** the underlying buffer that this resource is using */ buffer: Buffer$1; /** the offset of the buffer this resource is using. If not provided, then it will use the offset of the buffer. */ readonly offset: number; /** the size of the buffer this resource is using. If not provided, then it will use the size of the buffer. */ readonly size: number; /** * A cheeky hint to the GL renderer to let it know this is a BufferResource * @internal * @ignore */ readonly _bufferResource = true; /** * Has the Buffer resource been destroyed? * @readonly */ destroyed: boolean; /** * Create a new Buffer Resource. * @param options - The options for the buffer resource * @param options.buffer - The underlying buffer that this resource is using * @param options.offset - The offset of the buffer this resource is using. * If not provided, then it will use the offset of the buffer. * @param options.size - The size of the buffer this resource is using. * If not provided, then it will use the size of the buffer. */ constructor({ buffer, offset, size }: { buffer: Buffer$1; offset?: number; size?: number; }); protected onBufferChange(): void; /** * Destroys this resource. Make sure the underlying buffer is not used anywhere else * if you want to destroy it as well, or code will explode * @param destroyBuffer - Should the underlying buffer be destroyed as well? */ destroy(destroyBuffer?: boolean): void; } export declare class GpuUniformBatchPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUPipes ]; readonly name: "uniformBatch"; }; private _renderer; private _bindGroupHash; private readonly _batchBuffer; private _buffers; private _bindGroups; private _bufferResources; constructor(renderer: WebGPURenderer); renderEnd(): void; private _resetBindGroups; getUniformBindGroup(group: UniformGroup, duplicate: boolean): BindGroup; getUboResource(group: UniformGroup): BufferResource; getArrayBindGroup(data: Float32Array): BindGroup; getArrayBufferResource(data: Float32Array): BufferResource; private _getBufferResource; private _getBindGroup; private _uploadBindGroups; destroy(): void; } /** * A system that creates and manages the GPU pipelines. * * Caching Mechanism: At its core, the system employs a two-tiered caching strategy to minimize * the redundant creation of GPU pipelines (or "pipes"). This strategy is based on generating unique * keys that represent the state of the graphics settings and the specific requirements of the * item being rendered. By caching these pipelines, subsequent draw calls with identical configurations * can reuse existing pipelines instead of generating new ones. * * State Management: The system differentiates between "global" state properties (like color masks * and stencil masks, which do not change frequently) and properties that may vary between draw calls * (such as geometry, shaders, and blend modes). Unique keys are generated for both these categories * using getStateKey for global state and getGraphicsStateKey for draw-specific settings. These keys are * then then used to caching the pipe. The next time we need a pipe we can check * the cache by first looking at the state cache and then the pipe cache. * @memberof rendering */ export declare class PipelineSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "pipeline"; }; private readonly _renderer; protected CONTEXT_UID: number; private _moduleCache; private _bufferLayoutsCache; private readonly _bindingNamesCache; private _pipeCache; private readonly _pipeStateCaches; private _gpu; private _stencilState; private _stencilMode; private _colorMask; private _multisampleCount; private _depthStencilAttachment; constructor(renderer: WebGPURenderer); protected contextChange(gpu: GPU$1): void; setMultisampleCount(multisampleCount: number): void; setRenderTarget(renderTarget: GpuRenderTarget): void; setColorMask(colorMask: number): void; setStencilMode(stencilMode: STENCIL_MODES): void; setPipeline(geometry: Geometry, program: GpuProgram, state: State, passEncoder: GPURenderPassEncoder): void; getPipeline(geometry: Geometry, program: GpuProgram, state: State, topology?: Topology): GPURenderPipeline; private _createPipeline; private _getModule; private _createModule; private _generateBufferKey; private _generateAttributeLocationsKey; /** * Returns a hash of buffer names mapped to bind locations. * This is used to bind the correct buffer to the correct location in the shader. * @param geometry - The geometry where to get the buffer names * @param program - The program where to get the buffer names * @returns An object of buffer names mapped to the bind location. */ getBufferNamesToBind(geometry: Geometry, program: GpuProgram): Record; private _createVertexBufferLayouts; private _updatePipeHash; destroy(): void; } /** * The WebGPU adaptor for the render target system. Allows the Render Target System to * be used with the WebGPU renderer * @memberof rendering * @ignore */ export declare class GpuRenderTargetAdaptor implements RenderTargetAdaptor { private _renderTargetSystem; private _renderer; init(renderer: WebGPURenderer, renderTargetSystem: RenderTargetSystem): void; copyToTexture(sourceRenderSurfaceTexture: RenderTarget, destinationTexture: Texture, originSrc: { x: number; y: number; }, size: { width: number; height: number; }, originDest: { x: number; y: number; }): Texture>; startRenderPass(renderTarget: RenderTarget, clear?: CLEAR_OR_BOOL, clearColor?: RgbaArray, viewport?: Rectangle): void; finishRenderPass(): void; /** * returns the gpu texture for the first color texture in the render target * mainly used by the filter manager to get copy the texture for blending * @param renderTarget * @returns a gpu texture */ private _getGpuColorTexture; getDescriptor(renderTarget: RenderTarget, clear: CLEAR_OR_BOOL, clearValue: RgbaArray): GPURenderPassDescriptor; clear(renderTarget: RenderTarget, clear?: CLEAR_OR_BOOL, clearColor?: RgbaArray, viewport?: Rectangle): void; initGpuRenderTarget(renderTarget: RenderTarget): GpuRenderTarget; destroyGpuRenderTarget(gpuRenderTarget: GpuRenderTarget): void; ensureDepthStencilTexture(renderTarget: RenderTarget): void; resizeGpuRenderTarget(renderTarget: RenderTarget): void; } /** * The WebGL adaptor for the render target system. Allows the Render Target System to be used with the WebGl renderer * @memberof rendering */ export declare class GpuRenderTargetSystem extends RenderTargetSystem { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "renderTarget"; }; adaptor: GpuRenderTargetAdaptor; constructor(renderer: WebGPURenderer); } /** * System plugin to the renderer to manage the shaders. * @memberof rendering */ export interface ShaderSystem extends System$1 { /** the maximum number of textures that can be bound to a shader */ readonly maxTextures: number; } export interface GPUProgramData { bindGroups: GPUBindGroupLayout[]; pipeline: GPUPipelineLayout; } /** * A system that manages the rendering of GpuPrograms. * @memberof rendering */ export declare class GpuShaderSystem implements ShaderSystem { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "shader"; }; maxTextures: number; private _gpu; private readonly _gpuProgramData; protected contextChange(gpu: GPU$1): void; getProgramData(program: GpuProgram): GPUProgramData; private _createGPUProgramData; destroy(): void; } /** * System plugin to the renderer to manage WebGL state machines. * @memberof rendering */ export declare class GpuStateSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "state"; }; /** * State ID * @readonly */ stateId: number; /** * Polygon offset * @readonly */ polygonOffset: number; /** * Blend mode * @default 'none' * @readonly */ blendMode: BLEND_MODES; /** Whether current blend equation is different */ protected _blendEq: boolean; /** * GL context * @member {WebGLRenderingContext} * @readonly */ protected gpu: GPU$1; /** * Default WebGL State * @readonly */ protected defaultState: State; constructor(); protected contextChange(gpu: GPU$1): void; /** * Gets the blend mode data for the current state * @param state - The state to get the blend mode from */ getColorTargets(state: State): GPUColorTargetState[]; destroy(): void; } export type GetPixelsOutput = { pixels: Uint8ClampedArray; width: number; height: number; }; export interface CanvasGenerator { generateCanvas(texture: Texture): ICanvas; getPixels(texture: Texture): GetPixelsOutput; } /** * The system that handles textures for the GPU. * @memberof rendering */ export declare class GpuTextureSystem implements System$1, CanvasGenerator { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "texture"; }; readonly managedTextures: TextureSource[]; protected CONTEXT_UID: number; private _gpuSources; private _gpuSamplers; private _bindGroupHash; private _textureViewHash; private readonly _uploads; private _gpu; private _mipmapGenerator?; private readonly _renderer; constructor(renderer: WebGPURenderer); protected contextChange(gpu: GPU$1): void; initSource(source: TextureSource): GPUTexture; protected onSourceUpdate(source: TextureSource): void; protected onSourceUnload(source: TextureSource): void; protected onUpdateMipmaps(source: TextureSource): void; protected onSourceDestroy(source: TextureSource): void; protected onSourceResize(source: TextureSource): void; private _initSampler; getGpuSampler(sampler: TextureStyle): GPUSampler; getGpuSource(source: TextureSource): GPUTexture; /** * this returns s bind group for a specific texture, the bind group contains * - the texture source * - the texture style * - the texture matrix * This is cached so the bind group should only be created once per texture * @param texture - the texture you want the bindgroup for * @returns the bind group for the texture */ getTextureBindGroup(texture: Texture): BindGroup; private _createTextureBindGroup; getTextureView(texture: BindableTexture): GPUTextureView; private _createTextureView; generateCanvas(texture: Texture): ICanvas; getPixels(texture: Texture): GetPixelsOutput; destroy(): void; } interface System { extension: { name: string; }; defaultOptions?: any; new (...args: any): any; } type SystemsWithExtensionList = System[]; type InstanceType$1 any> = T extends new (...args: any) => infer R ? R : any; type NameType = T[number]["extension"]["name"]; export type ExtractSystemTypes = { [K in NameType]: InstanceType$1>; }; type NotUnknown = T extends unknown ? keyof T extends never ? never : T : T; type KnownProperties = { [K in keyof T as NotUnknown extends never ? never : K]: T[K]; }; type FlattenOptions = T extends { [K: string]: infer U; } ? U : never; type OptionsUnion = FlattenOptions>; type DefaultOptionsTypes = { [K in NameType]: Extract["defaultOptions"]; }; type SeparateOptions = KnownProperties>; type UnionToIntersection = (U extends any ? (k: U) => void : never) extends ((k: infer I) => void) ? I : never; export type ExtractRendererOptions = UnionToIntersection>; export interface BatcherAdaptor { start(batchPipe: BatcherPipe, geometry: Geometry, shader: Shader): void; init?(batchPipe: BatcherPipe): void; execute(batchPipe: BatcherPipe, batch: Batch): void; contextChange?(): void; } /** * A pipe that batches elements into batches and sends them to the renderer. * * You can install new Batchers using ExtensionType.Batcher. Each render group will * have a default batcher and any required ones will be created on demand. * @memberof rendering */ export declare class BatcherPipe implements InstructionPipe, BatchPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "batch"; }; state: State; renderer: Renderer; private readonly _batchersByInstructionSet; private _adaptor; /** A record of all active batchers, keyed by their names */ private _activeBatches; /** The currently active batcher being used to batch elements */ private _activeBatch; static _availableBatchers: Record Batcher>; static getBatcher(name: string): Batcher; constructor(renderer: Renderer, adaptor: BatcherAdaptor); buildStart(instructionSet: InstructionSet): void; addToBatch(batchableObject: BatchableElement, instructionSet: InstructionSet): void; break(instructionSet: InstructionSet): void; buildEnd(instructionSet: InstructionSet): void; upload(instructionSet: InstructionSet): void; execute(batch: Batch): void; destroy(): void; } /** * A BatcherAdaptor that uses WebGL to render batches. * @memberof rendering * @ignore */ export declare class GlBatchAdaptor implements BatcherAdaptor { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipesAdaptor ]; readonly name: "batch"; }; private _didUpload; private readonly _tempState; init(batcherPipe: BatcherPipe): void; contextChange(): void; start(batchPipe: BatcherPipe, geometry: Geometry, shader: Shader): void; execute(batchPipe: BatcherPipe, batch: Batch): void; } export type GlRenderingContext = WebGL2RenderingContext; export declare function checkMaxIfStatementsInShader(maxIfs: number, gl: GlRenderingContext): number; /** * Returns the maximum number of textures that can be batched. This uses WebGL1's `MAX_TEXTURE_IMAGE_UNITS`. * The response for this is that to get this info via WebGPU, we would need to make a context, which * would make this function async, and we want to avoid that. * @private * @returns {number} The maximum number of textures that can be batched */ export declare function getMaxTexturesPerBatch(): number; export declare function generateGPULayout(maxTextures: number): GPUBindGroupLayoutEntry[]; export declare function generateLayout(maxTextures: number): Record; export declare function getTextureBatchBindGroup(textures: TextureSource[], size: number): BindGroup; /** * A BatcherAdaptor that uses the GPU to render batches. * @memberof rendering * @ignore */ export declare class GpuBatchAdaptor implements BatcherAdaptor { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUPipesAdaptor ]; readonly name: "batch"; }; private _shader; private _geometry; start(batchPipe: BatcherPipe, geometry: Geometry, shader: Shader): void; execute(batchPipe: BatcherPipe, batch: Batch): void; } export declare class BatchGeometry extends Geometry { constructor(); } /** * Represents the common elements for default batch rendering. * This interface defines the properties that are used by the DefaultBatcher * to render elements efficiently in a batch. * @memberof rendering */ export interface DefaultBatchElements { /** * The color of the element that will be multiplied with the texture color. * This is typically represented as a 32-bit integer in RGBA format. */ color: number; /** * Determines whether the element should be rounded to the nearest pixel. * - 0: No rounding (default) * - 1: Round to nearest pixel * This can help with visual consistency, especially for pixel art styles. */ roundPixels: 0 | 1; /** * The transform matrix of the element. * This matrix represents the position, scale, rotation, and skew of the element. */ transform: Matrix; } /** * Represents a batchable quad element with default batch properties. * @memberof rendering */ export interface DefaultBatchableQuadElement extends BatchableQuadElement, DefaultBatchElements { } /** * Represents a batchable mesh element with default batch properties. * @memberof rendering */ export interface DefaultBatchableMeshElement extends BatchableMeshElement, DefaultBatchElements { } /** * The default batcher is used to batch quads and meshes. This batcher will batch the following elements: * - tints * - roundPixels * - texture * - transform * @memberof rendering */ export declare class DefaultBatcher extends Batcher { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.Batcher ]; readonly name: "default"; }; geometry: BatchGeometry; shader: Shader; name: "default"; /** The size of one attribute. 1 = 32 bit. x, y, u, v, color, textureIdAndRound -> total = 6 */ vertexSize: number; /** * Packs the attributes of a DefaultBatchableMeshElement into the provided views. * @param element - The DefaultBatchableMeshElement to pack. * @param float32View - The Float32Array view to pack into. * @param uint32View - The Uint32Array view to pack into. * @param index - The starting index in the views. * @param textureId - The texture ID to use. */ packAttributes(element: DefaultBatchableMeshElement, float32View: Float32Array, uint32View: Uint32Array, index: number, textureId: number): void; /** * Packs the attributes of a DefaultBatchableQuadElement into the provided views. * @param element - The DefaultBatchableQuadElement to pack. * @param float32View - The Float32Array view to pack into. * @param uint32View - The Uint32Array view to pack into. * @param index - The starting index in the views. * @param textureId - The texture ID to use. */ packQuadAttributes(element: DefaultBatchableQuadElement, float32View: Float32Array, uint32View: Uint32Array, index: number, textureId: number): void; } /** * DefaultShader is a specialized shader class designed for batch rendering. * It extends the base Shader class and provides functionality for handling * color, texture batching, and pixel rounding in both WebGL and WebGPU contexts. * * It is used by the default batcher * @extends Shader * @memberof rendering */ export declare class DefaultShader extends Shader { constructor(maxTextures: number); } /** the vertex source code, an obj */ export type Vertex = { /** stick uniforms and functions in here all headers will be compiled at the top of the shader */ header?: string; /** code will be added at the start of the shader */ start?: string; /** code will be run here before lighting happens */ main?: string; /** code here will to modify anything before it is passed to the fragment shader */ end?: string; }; export type Fragment = { /** stick uniforms and functions in here all headers will be compiled at the top of the shader */ header?: string; /** code will be added at the start of the shader */ start?: string; /** code will be run here before lighting happens */ main?: string; /** code here will to modify anything before it is passed to the fragment shader */ end?: string; }; /** * HighShaderBit is a part of a shader. * it is used to compile HighShaders. * * Internally shaders are made up of many of these. * You can even write your own and compile them in. */ export interface HighShaderBit { /** used to make the shader easier to understand! */ name?: string; /** the snippets of vertex code */ vertex?: Vertex; /** the snippets of fragment code */ fragment?: Fragment; } /** source code to compile a shader. this can be directly used by pixi and should be good to go! */ export interface HighShaderSource { fragment: string; vertex: string; } export declare function compileHighShaderGpuProgram({ bits, name }: { bits: HighShaderBit[]; name: string; }): GpuProgram; export declare function compileHighShaderGlProgram({ bits, name }: { bits: HighShaderBit[]; name: string; }): GlProgram; /** A high template consists of vertex and fragment source */ export interface HighShaderTemplate { name?: string; fragment: string; vertex: string; } export interface CompileHighShaderOptions { template: HighShaderTemplate; bits: HighShaderBit[]; } /** * This function will take a HighShader template, some High fragments and then merge them in to a shader source. * @param options * @param options.template * @param options.bits */ export declare function compileHighShader({ template, bits }: CompileHighShaderOptions): HighShaderSource; export declare function compileHighShaderGl({ template, bits }: CompileHighShaderOptions): HighShaderSource; /** * takes the HighFragment source parts and adds them to the hook hash * @param srcParts - the hash of hook arrays * @param parts - the code to inject into the hooks * @param name - optional the name of the part to add */ export declare function addBits(srcParts: Record, parts: Record, name?: string): void; export declare const findHooksRx: RegExp; /** * takes a program string and returns an hash mapping the hooks to empty arrays * @param programSrc - the program containing hooks */ export declare function compileHooks(programSrc: string): Record; export declare function compileInputs(fragments: any[], template: string, sort?: boolean): string; export declare function compileOutputs(fragments: any[], template: string): string; /** * formats a shader so its more pleasant to read! * @param shader - a glsl shader program source */ export declare function formatShader(shader: string): string; /** * takes a shader src and replaces any hooks with the HighFragment code. * @param templateSrc - the program src template * @param fragmentParts - the fragments to inject */ export declare function injectBits(templateSrc: string, fragmentParts: Record): string; export declare const vertexGPUTemplate = "\n @in aPosition: vec2;\n @in aUV: vec2;\n\n @out @builtin(position) vPosition: vec4;\n @out vUV : vec2;\n @out vColor : vec4;\n\n {{header}}\n\n struct VSOutput {\n {{struct}}\n };\n\n @vertex\n fn main( {{in}} ) -> VSOutput {\n\n var worldTransformMatrix = globalUniforms.uWorldTransformMatrix;\n var modelMatrix = mat3x3(\n 1.0, 0.0, 0.0,\n 0.0, 1.0, 0.0,\n 0.0, 0.0, 1.0\n );\n var position = aPosition;\n var uv = aUV;\n\n {{start}}\n \n vColor = vec4(1., 1., 1., 1.);\n\n {{main}}\n\n vUV = uv;\n\n var modelViewProjectionMatrix = globalUniforms.uProjectionMatrix * worldTransformMatrix * modelMatrix;\n\n vPosition = vec4((modelViewProjectionMatrix * vec3(position, 1.0)).xy, 0.0, 1.0);\n \n vColor *= globalUniforms.uWorldColorAlpha;\n\n {{end}}\n\n {{return}}\n };\n"; export declare const fragmentGPUTemplate = "\n @in vUV : vec2;\n @in vColor : vec4;\n \n {{header}}\n\n @fragment\n fn main(\n {{in}}\n ) -> @location(0) vec4 {\n \n {{start}}\n\n var outColor:vec4;\n \n {{main}}\n \n var finalColor:vec4 = outColor * vColor;\n\n {{end}}\n\n return finalColor;\n };\n"; export declare const vertexGlTemplate = "\n in vec2 aPosition;\n in vec2 aUV;\n\n out vec4 vColor;\n out vec2 vUV;\n\n {{header}}\n\n void main(void){\n\n mat3 worldTransformMatrix = uWorldTransformMatrix;\n mat3 modelMatrix = mat3(\n 1.0, 0.0, 0.0,\n 0.0, 1.0, 0.0,\n 0.0, 0.0, 1.0\n );\n vec2 position = aPosition;\n vec2 uv = aUV;\n \n {{start}}\n \n vColor = vec4(1.);\n \n {{main}}\n \n vUV = uv;\n \n mat3 modelViewProjectionMatrix = uProjectionMatrix * worldTransformMatrix * modelMatrix;\n\n gl_Position = vec4((modelViewProjectionMatrix * vec3(position, 1.0)).xy, 0.0, 1.0);\n\n vColor *= uWorldColorAlpha;\n\n {{end}}\n }\n"; export declare const fragmentGlTemplate = "\n \n in vec4 vColor;\n in vec2 vUV;\n\n out vec4 finalColor;\n\n {{header}}\n\n void main(void) {\n \n {{start}}\n\n vec4 outColor;\n \n {{main}}\n \n finalColor = outColor * vColor;\n \n {{end}}\n }\n"; export declare const colorBit: { name: string; vertex: { header: string; main: string; }; }; export declare const colorBitGl: { name: string; vertex: { header: string; main: string; }; }; export declare function generateTextureBatchBit(maxTextures: number): HighShaderBit; export declare function generateTextureBatchBitGl(maxTextures: number): HighShaderBit; export declare const globalUniformsBit: { name: string; vertex: { header: string; }; }; export declare const globalUniformsUBOBitGl: { name: string; vertex: { header: string; }; }; export declare const globalUniformsBitGl: { name: string; vertex: { header: string; }; }; export declare const localUniformBit: { name: string; vertex: { header: string; main: string; end: string; }; }; export declare const localUniformBitGroup2: { vertex: { header: string; main: string; end: string; }; name: string; }; export declare const localUniformBitGl: { name: string; vertex: { header: string; main: string; end: string; }; }; export declare const roundPixelsBit: { name: string; vertex: { header: string; }; }; export declare const roundPixelsBitGl: { name: string; vertex: { header: string; }; }; export declare const textureBit: { name: string; vertex: { header: string; main: string; }; fragment: { header: string; main: string; }; }; export declare const textureBitGl: { name: string; vertex: { header: string; main: string; }; fragment: { header: string; main: string; }; }; /** * A generic class for managing a pool of items. * @template T The type of items in the pool. Must implement {@link utils.PoolItem}. * @memberof utils */ export declare class Pool { readonly _classType: PoolItemConstructor; private readonly _pool; private _count; private _index; /** * Constructs a new Pool. * @param ClassType - The constructor of the items in the pool. * @param {number} [initialSize] - The initial size of the pool. */ constructor(ClassType: PoolItemConstructor, initialSize?: number); /** * Prepopulates the pool with a given number of items. * @param total - The number of items to add to the pool. */ prepopulate(total: number): void; /** * Gets an item from the pool. Calls the item's `init` method if it exists. * If there are no items left in the pool, a new one will be created. * @param {unknown} [data] - Optional data to pass to the item's constructor. * @returns {T} The item from the pool. */ get(data?: unknown): T; /** * Returns an item to the pool. Calls the item's `reset` method if it exists. * @param {T} item - The item to return to the pool. */ return(item: T): void; /** * Gets the number of items in the pool. * @readonly * @member {number} */ get totalSize(): number; /** * Gets the number of items in the pool that are free to use without needing to create more. * @readonly * @member {number} */ get totalFree(): number; /** * Gets the number of items in the pool that are currently in use. * @readonly * @member {number} */ get totalUsed(): number; /** clears the pool - mainly used for debugging! */ clear(): void; } /** * An object that can be stored in a {@link utils.Pool}. * @memberof utils */ export type PoolItem = { init?: (data?: any) => void; reset?: () => void; [key: string]: any; }; /** * The constructor of an object that can be stored in a {@link utils.Pool}. * @typeParam K - The type of the object that can be stored in a {@link utils.Pool}. * @memberof utils */ export type PoolItemConstructor = new () => K; export declare class AlphaMask implements Effect, PoolItem { static extension: ExtensionMetadata; priority: number; mask: Container; pipe: string; renderMaskToTexture: boolean; constructor(options?: { mask: Container; }); init(mask: Container): void; reset(): void; addBounds(bounds: Bounds, skipUpdateTransform?: boolean): void; addLocalBounds(bounds: Bounds, localRoot: Container): void; containsPoint(point: Point, hitTestFn: (container: Container, point: Point) => boolean): boolean; destroy(): void; static test(mask: any): boolean; } type FilterAction = "pushFilter" | "popFilter"; /** * The filter pipeline is responsible for applying filters scene items! * * KNOWN BUGS: * 1. Global bounds calculation is incorrect if it is used when flip flopping filters. The maths can be found below * eg: filters [noiseFilter, blurFilter] noiseFilter will calculate the global bounds incorrectly. * * 2. RenderGroups do not work with filters. This is because the renderGroup matrix is not currently taken into account. * * Implementation notes: * 1. Gotcha - nesting filters that require blending will not work correctly. This creates a chicken and egg problem * the complexity and performance required to do this is not worth it i feel.. but lets see if others agree! * * 2. Filters are designed to be changed on the fly, this is means that changing filter information each frame will * not trigger an instruction rebuild. If you are constantly turning a filter on and off.. its therefore better to set * enabled to true or false on the filter. Or setting an empty array. * * 3. Need to look at perhaps aliasing when flip flopping filters. Really we should only need to antialias the FIRST * Texture we render too. The rest can be non aliased. This might help performance. * Currently we flip flop with an antialiased texture if antialiasing is enabled on the filter. */ export interface FilterInstruction extends Instruction { renderPipeId: "filter"; action: FilterAction; container?: Container; renderables?: Renderable[]; filterEffect: FilterEffect; } export interface FilterData { skip: boolean; enabledLength?: number; inputTexture: Texture; bounds: Bounds; blendRequired: boolean; container: Container; filterEffect: FilterEffect; previousRenderSurface: RenderSurface; backTexture?: Texture; } /** * System that manages the filter pipeline * @memberof rendering */ export declare class FilterSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem ]; readonly name: "filter"; }; readonly renderer: Renderer; private _filterStackIndex; private _filterStack; private readonly _filterGlobalUniforms; private readonly _globalFilterBindGroup; private _activeFilterData; constructor(renderer: Renderer); /** * The back texture of the currently active filter. Requires the filter to have `blendRequired` set to true. * @readonly */ get activeBackTexture(): Texture | undefined; push(instruction: FilterInstruction): void; pop(): void; getBackTexture(lastRenderSurface: RenderTarget, bounds: Bounds, previousBounds?: Bounds): Texture>; applyFilter(filter: Filter, input: Texture, output: RenderSurface, clear: boolean): void; private _getFilterData; /** * Multiply _input normalized coordinates_ to this matrix to get _sprite texture normalized coordinates_. * * Use `outputMatrix * vTextureCoord` in the shader. * @param outputMatrix - The matrix to output to. * @param {Sprite} sprite - The sprite to map to. * @returns The mapped matrix. */ calculateSpriteMatrix(outputMatrix: Matrix, sprite: Sprite): Matrix; destroy?: () => void; } /** * Filters provide additional shading and post-processing effects to any display object and its children * they are attached to. * * You attached filters to a display object using its `filters` array property. * * ```js * import { Sprite, BlurFilter, HardMixBlend } from 'pixi.js'; * * const sprite = Sprite.from('myTexture.png'); * * // single filter * sprite.filters = new BlurFilter({ strength: 8 }); * * // or multiple filters * sprite.filters = [new BlurFilter({ strength: 8 }), new HardMixBlend()]; * ``` * * Pixi has a number of built-in filters which can be used in your game or application: * * - {@link filters.AlphaFilter} - Applies alpha to the display object and any of its children. * - {@link filters.BlurFilter} - Applies a Gaussian blur to the display object. * - {@link filters.BlurFilterPass} - Applies a blur pass to an object. * - {@link filters.ColorBurnBlend} - Blend mode to add color burn to display objects. * - {@link filters.ColorDodgeBlend} - Blend mode to add color dodge to display objects. * - {@link filters.ColorMatrixFilter} - Transform the color channels by matrix multiplication. * - {@link filters.DarkenBlend} - Blend mode to darken display objects. * - {@link filters.DisplacementFilter} - Applies a displacement map to distort an object. * - {@link filters.DivideBlend} - Blend mode to divide display objects. * - {@link filters.HardMixBlend} - Blend mode to hard mix display objects. * - {@link filters.LinearBurnBlend} - Blend mode to add linear burn to display objects. * - {@link filters.LinearDodgeBlend} - Blend mode to add linear dodge to display objects. * - {@link filters.LinearLightBlend} - Blend mode to add linear light to display objects. * - {@link filters.NoiseFilter} - Applies random noise to an object. * - {@link filters.PinLightBlend} - Blend mode to add pin light to display objects. * - {@link filters.SubtractBlend} - Blend mode to subtract display objects. * *
* For more available filters, check out the * {@link https://pixijs.io/filters/docs/ pixi-filters} repository. * * You can also check out the awesome {@link https://pixijs.io/filters/examples/ Filter demo} to see * filters in action and combine them! * @namespace filters */ /** * The options to use when creating a new filter. * @memberof filters */ export interface FilterOptions { /** optional blend mode used by the filter when rendering (defaults to 'normal') */ blendMode?: BLEND_MODES; /** * the resolution the filter should be rendered at. The lower the resolution, the more performant * the filter will be, but the lower the quality of the output. (default 1) * If 'inherit', the resolution of the render target is used. * Consider lowering this for things like blurs filters */ resolution?: number | "inherit"; /** * the amount of pixels to pad the container with when applying the filter. For example a blur extends the * container out as it blurs, so padding is applied to ensure that extra detail is rendered as well * without clipping occurring. (default 0) */ padding?: number; /** * If true the filter will make use of antialiasing. Although it looks better this can have a performance impact. * If set to 'inherit', the filter will detect the antialiasing of the render target and change this automatically. * Definitely don't set this to true if the render target has antialiasing set to false. As it will antialias, * but you won't see the difference. (default 'off') * * This can be a boolean or [FilterAntialias]{@link filters.FilterAntialias} string. */ antialias?: FilterAntialias | boolean; /** * If this is set to true, the filter system will grab a snap shot of the area being rendered * to and pass this into the shader. This is useful for blend modes that need to be aware of the pixels * they are rendering to. Only use if you need that data, otherwise its an extra gpu copy you don't need! * (default false) */ blendRequired?: boolean; } /** Filter options mixed with shader resources. A filter needs a shader and some resources to work. */ export type FilterWithShader = FilterOptions & IShaderWithResources; /** * The antialiasing mode of the filter. This can be either: * - `on` - the filter is always antialiased regardless of the render target settings * - `off` - (default) the filter is never antialiased regardless of the render target settings * - `inherit` - the filter uses the antialias settings of the render target * @memberof filters */ export type FilterAntialias = "on" | "off" | "inherit"; /** * The Filter class is the base for all filter effects used in Pixi.js * As it extends a shader, it requires that a glProgram is parsed in to work with WebGL and a gpuProgram for WebGPU. * If you don't proved one, then the filter is skipped and just rendered as if it wasn't there for that renderer. * * A filter can be applied to anything that extends Container in Pixi.js which also includes Sprites, Graphics etc. * * Its worth noting Performance-wise filters can be pretty expensive if used too much in a single scene. * The following happens under the hood when a filter is applied: * * .1. Break the current batch *
* .2. The target is measured using getGlobalBounds * (recursively go through all children and figure out how big the object is) *
* .3. Get the closest Po2 Textures from the texture pool *
* .4. Render the target to that texture *
* .5. Render that texture back to the main frame buffer as a quad using the filters program. *
*
* Some filters (such as blur) require multiple passes too which can result in an even bigger performance hit. So be careful! * Its not generally the complexity of the shader that is the bottle neck, * but all the framebuffer / shader switching that has to take place. * One filter applied to a container with many objects is MUCH faster than many filter applied to many objects. * @class * @memberof filters */ export declare class Filter extends Shader { /** * The default filter settings * @static */ static readonly defaultOptions: FilterOptions; /** * The padding of the filter. Some filters require extra space to breath such as a blur. * Increasing this will add extra width and height to the bounds of the object that the * filter is applied to. * @default 0 */ padding: number; /** * should the filter use antialiasing? * @default inherit */ antialias: FilterAntialias; /** If enabled is true the filter is applied, if false it will not. */ enabled: boolean; /** * The gpu state the filter requires to render. * @internal * @ignore */ _state: State; /** * The resolution of the filter. Setting this to be lower will lower the quality but * increase the performance of the filter. * @default 1 */ resolution: number | "inherit"; /** * Whether or not this filter requires the previous render texture for blending. * @default false */ blendRequired: boolean; /** * @param options - The optional parameters of this filter. */ constructor(options: FilterWithShader); /** * Applies the filter * @param filterManager - The renderer to retrieve the filter from * @param input - The input render target. * @param output - The target to output to. * @param clearMode - Should the output be cleared before rendering to it */ apply(filterManager: FilterSystem, input: Texture, output: RenderSurface, clearMode: boolean): void; /** * Get the blend mode of the filter. * @default "normal" */ get blendMode(): BLEND_MODES; /** Sets the blend mode of the filter. */ set blendMode(value: BLEND_MODES); /** * A short hand function to create a filter based of a vertex and fragment shader src. * @param options * @returns A shiny new PixiJS filter! */ static from(options: FilterOptions & ShaderFromResources): Filter; } /** * A filter effect is an effect that can be applied to a container that involves applying special pixel effects * to that container as it is rendered. Used internally when the filters property is modified on a container */ export declare class FilterEffect implements Effect { /** read only filters array - to modify, set it again! */ filters: readonly Filter[]; /** * If specified, rather than calculating the bounds of the container that the filter * will apply to, we use this rect instead. This is a local rect - so will have the containers transform * applied to it */ filterArea?: Rectangle; /** the pipe that knows how to handle this effect */ pipe: string; /** the priority of this effect */ priority: number; destroy(): void; } type MaskMode = "pushMaskBegin" | "pushMaskEnd" | "popMaskBegin" | "popMaskEnd"; declare class AlphaMaskEffect extends FilterEffect implements PoolItem { constructor(); get sprite(): Sprite; set sprite(value: Sprite); init: () => void; } export interface AlphaMaskInstruction extends Instruction { renderPipeId: "alphaMask"; action: MaskMode; mask: AlphaMask; maskedContainer: Container; renderMask: boolean; } export interface AlphaMaskData { filterEffect: AlphaMaskEffect; maskedContainer: Container; previousRenderTarget?: RenderTarget; filterTexture?: Texture; } export declare class AlphaMaskPipe implements InstructionPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "alphaMask"; }; private _renderer; private _activeMaskStage; constructor(renderer: Renderer); push(mask: Effect, maskedContainer: Container, instructionSet: InstructionSet): void; pop(mask: Effect, _maskedContainer: Container, instructionSet: InstructionSet): void; execute(instruction: AlphaMaskInstruction): void; destroy(): void; } export declare class ColorMask implements Effect, PoolItem { static extension: ExtensionMetadata; priority: number; mask: number; pipe: string; constructor(options: { mask: number; }); init(mask: number): void; destroy(): void; static test(mask: any): boolean; } export interface ColorMaskInstruction extends Instruction { renderPipeId: "colorMask"; colorMask: number; } export declare class ColorMaskPipe implements InstructionPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "colorMask"; }; private readonly _renderer; private _colorStack; private _colorStackIndex; private _currentColor; constructor(renderer: Renderer); buildStart(): void; push(mask: Effect, _container: Container, instructionSet: InstructionSet): void; pop(_mask: Effect, _container: Container, instructionSet: InstructionSet): void; execute(instruction: ColorMaskInstruction): void; destroy(): void; } interface MaskConversionTest { test: (item: any) => boolean; maskClass: new (item: any) => Effect & PoolItem; } export type MaskEffect = { mask: unknown; } & Effect; /** * A class that manages the conversion of masks to mask effects. * @memberof rendering * @ignore */ export declare class MaskEffectManagerClass { /** * @private */ readonly _effectClasses: EffectConstructor[]; private readonly _tests; private _initialized; init(): void; add(test: MaskConversionTest): void; getMaskEffect(item: any): MaskEffect; returnMaskEffect(effect: Effect & PoolItem): void; } export declare const MaskEffectManager: MaskEffectManagerClass; export declare class ScissorMask implements Effect { priority: number; mask: Container; pipe: string; constructor(mask: Container); addBounds(bounds: Bounds, skipUpdateTransform?: boolean): void; addLocalBounds(bounds: Bounds, localRoot: Container): void; containsPoint(point: Point, hitTestFn: (container: Container, point: Point) => boolean): boolean; reset(): void; destroy(): void; } export declare class StencilMask implements Effect, PoolItem { static extension: ExtensionMetadata; priority: number; mask: Container; pipe: string; constructor(options: { mask: Container; }); init(mask: Container): void; reset(): void; addBounds(bounds: Bounds, skipUpdateTransform: boolean): void; addLocalBounds(bounds: Bounds, localRoot: Container): void; containsPoint(point: Point, hitTestFn: (container: Container, point: Point) => boolean): boolean; destroy(): void; static test(mask: any): boolean; } type MaskMode$1 = "pushMaskBegin" | "pushMaskEnd" | "popMaskBegin" | "popMaskEnd"; export interface StencilMaskInstruction extends Instruction { renderPipeId: "stencilMask"; action: MaskMode$1; mask: StencilMask; } export declare class StencilMaskPipe implements InstructionPipe { static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "stencilMask"; }; private _renderer; private _maskStackHash; private _maskHash; constructor(renderer: Renderer); push(mask: Effect, _container: Container, instructionSet: InstructionSet): void; pop(mask: Effect, _container: Container, instructionSet: InstructionSet): void; execute(instruction: StencilMaskInstruction): void; destroy(): void; } export declare function addMaskBounds(mask: Container, bounds: Bounds, skipUpdateTransform: boolean): void; export declare function addMaskLocalBounds(mask: Container, bounds: Bounds, localRoot: Container): void; export declare function getMatrixRelativeToParent(target: Container, root: Container, matrix: Matrix): Matrix; /** * Constants for various buffer types in Pixi * @see BUFFER_TYPE * @name BUFFER_TYPE * @static * @enum {number} * @property {number} ELEMENT_ARRAY_BUFFER - buffer type for using as an index buffer * @property {number} ARRAY_BUFFER - buffer type for using attribute data * @property {number} UNIFORM_BUFFER - the buffer type is for uniform buffer objects */ export declare enum BUFFER_TYPE { ELEMENT_ARRAY_BUFFER = 34963, ARRAY_BUFFER = 34962, UNIFORM_BUFFER = 35345 } export declare class GlBuffer { buffer: WebGLBuffer; updateID: number; byteLength: number; type: number; constructor(buffer: WebGLBuffer, type: BUFFER_TYPE); } /** * System plugin to the renderer to manage buffers. * * WebGL uses Buffers as a way to store objects to the GPU. * This system makes working with them a lot easier. * * Buffers are used in three main places in WebGL * - geometry information * - Uniform information (via uniform buffer objects - a WebGL 2 only feature) * - Transform feedback information. (WebGL 2 only feature) * * This system will handle the binding of buffers to the GPU as well as uploading * them. With this system, you never need to work directly with GPU buffers, but instead work with * the Buffer class. * @class * @memberof rendering */ export declare class GlBufferSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "buffer"; }; private _gl; private _gpuBuffers; /** Cache keeping track of the base bound buffer bases */ private readonly _boundBufferBases; private _renderer; /** * @param {Renderer} renderer - The renderer this System works for. */ constructor(renderer: WebGLRenderer); /** * @ignore */ destroy(): void; /** Sets up the renderer context and necessary buffers. */ protected contextChange(): void; getGlBuffer(buffer: Buffer$1): GlBuffer; /** * This binds specified buffer. On first run, it will create the webGL buffers for the context too * @param buffer - the buffer to bind to the renderer */ bind(buffer: Buffer$1): void; /** * Binds an uniform buffer to at the given index. * * A cache is used so a buffer will not be bound again if already bound. * @param buffer - the buffer to bind * @param index - the base index to bind it to. */ bindBufferBase(buffer: Buffer$1, index: number): void; /** * Binds a buffer whilst also binding its range. * This will make the buffer start from the offset supplied rather than 0 when it is read. * @param buffer - the buffer to bind * @param index - the base index to bind at, defaults to 0 * @param offset - the offset to bind at (this is blocks of 256). 0 = 0, 1 = 256, 2 = 512 etc */ bindBufferRange(buffer: Buffer$1, index?: number, offset?: number): void; /** * Will ensure the data in the buffer is uploaded to the GPU. * @param {Buffer} buffer - the buffer to update */ updateBuffer(buffer: Buffer$1): GlBuffer; /** dispose all WebGL resources of all managed buffers */ destroyAll(): void; /** * Disposes buffer * @param {Buffer} buffer - buffer with data * @param {boolean} [contextLost=false] - If context was lost, we suppress deleteVertexArray */ protected onBufferDestroy(buffer: Buffer$1, contextLost?: boolean): void; /** * creates and attaches a GLBuffer object tied to the current context. * @param buffer * @protected */ protected createGLBuffer(buffer: Buffer$1): GlBuffer; } interface WEBGL_compressed_texture_pvrtc$1 { COMPRESSED_RGB_PVRTC_4BPPV1_IMG: number; COMPRESSED_RGBA_PVRTC_4BPPV1_IMG: number; COMPRESSED_RGB_PVRTC_2BPPV1_IMG: number; COMPRESSED_RGBA_PVRTC_2BPPV1_IMG: number; } interface WEBGL_compressed_texture_etc$1 { COMPRESSED_R11_EAC: number; COMPRESSED_SIGNED_R11_EAC: number; COMPRESSED_RG11_EAC: number; COMPRESSED_SIGNED_RG11_EAC: number; COMPRESSED_RGB8_ETC2: number; COMPRESSED_RGBA8_ETC2_EAC: number; COMPRESSED_SRGB8_ETC2: number; COMPRESSED_SRGB8_ALPHA8_ETC2_EAC: number; COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2: number; COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2: number; } interface WEBGL_compressed_texture_etc1$1 { COMPRESSED_RGB_ETC1_WEBGL: number; } export interface WEBGL_compressed_texture_atc { COMPRESSED_RGB_ATC_WEBGL: number; COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL: number; COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL: number; } interface EXT_texture_compression_bptc$1 { COMPRESSED_RGBA_BPTC_UNORM_EXT: number; COMPRESSED_RGB_BPTC_SIGNED_FLOAT_EXT: number; COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT_EXT: number; COMPRESSED_SRGB_ALPHA_BPTC_UNORM_EXT: number; } interface EXT_texture_compression_rgtc$1 { COMPRESSED_RED_RGTC1_EXT: number; COMPRESSED_SIGNED_RED_RGTC1_EXT: number; COMPRESSED_RED_GREEN_RGTC2_EXT: number; COMPRESSED_SIGNED_RED_GREEN_RGTC2_EXT: number; } export interface WebGLExtensions { drawBuffers?: WEBGL_draw_buffers; depthTexture?: OES_texture_float; loseContext?: WEBGL_lose_context; vertexArrayObject?: OES_vertex_array_object; anisotropicFiltering?: EXT_texture_filter_anisotropic; uint32ElementIndex?: OES_element_index_uint; floatTexture?: OES_texture_float; floatTextureLinear?: OES_texture_float_linear; textureHalfFloat?: OES_texture_half_float; textureHalfFloatLinear?: OES_texture_half_float_linear; colorBufferFloat?: EXT_color_buffer_float; vertexAttribDivisorANGLE?: ANGLE_instanced_arrays; s3tc?: WEBGL_compressed_texture_s3tc; s3tc_sRGB?: WEBGL_compressed_texture_s3tc_srgb; etc?: WEBGL_compressed_texture_etc$1; etc1?: WEBGL_compressed_texture_etc1$1; pvrtc?: WEBGL_compressed_texture_pvrtc$1; atc?: WEBGL_compressed_texture_atc; astc?: WEBGL_compressed_texture_astc; bptc?: EXT_texture_compression_bptc$1; rgtc?: EXT_texture_compression_rgtc$1; srgb?: EXT_sRGB; } /** * Options for the context system. * @memberof rendering * @property {WebGL2RenderingContext | null} [context=null] - User-provided WebGL rendering context object. * @property {GpuPowerPreference} [powerPreference='default'] - An optional hint indicating what configuration * of GPU is suitable for the WebGL context, can be `'high-performance'` or `'low-power'`. Setting to `'high-performance'` * will prioritize rendering performance over power consumption, while setting to `'low-power'` will prioritize power saving * over rendering performance. * @property {boolean} [premultipliedAlpha=true] - Whether the compositor will assume the drawing buffer contains * colors with premultiplied alpha. * @property {boolean} [preserveDrawingBuffer=false] - Whether to enable drawing buffer preservation. * If enabled, the drawing buffer will preserve * its value until cleared or overwritten. Enable this if you need to call `toDataUrl` on the WebGL context. * @property {boolean} [antialias] - Whether to enable antialiasing. * @property {1 | 2} [preferWebGLVersion=2] - The preferred WebGL version to use. */ export interface ContextSystemOptions { /** * User-provided WebGL rendering context object. * @default null * @memberof rendering.SharedRendererOptions */ context: WebGL2RenderingContext | null; /** * An optional hint indicating what configuration of GPU is suitable for the WebGL context, * can be `'high-performance'` or `'low-power'`. * Setting to `'high-performance'` will prioritize rendering performance over power consumption, * while setting to `'low-power'` will prioritize power saving over rendering performance. * @memberof rendering.SharedRendererOptions * @default undefined */ powerPreference?: GpuPowerPreference; /** * Whether the compositor will assume the drawing buffer contains colors with premultiplied alpha. * @default true * @memberof rendering.SharedRendererOptions */ premultipliedAlpha: boolean; /** * Whether to enable drawing buffer preservation. If enabled, the drawing buffer will preserve * its value until cleared or overwritten. Enable this if you need to call `toDataUrl` on the WebGL context. * @default false * @memberof rendering.SharedRendererOptions */ preserveDrawingBuffer: boolean; antialias?: boolean; /** * The preferred WebGL version to use. * @default 2 * @memberof rendering.SharedRendererOptions */ preferWebGLVersion?: 1 | 2; /** * Whether to enable multi-view rendering. Set to true when rendering to multiple * canvases on the dom. * @default false * @memberof rendering.SharedRendererOptions */ multiView: boolean; } /** * System plugin to the renderer to manage the context * @memberof rendering */ export declare class GlContextSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "context"; }; /** The default options for the system. */ static defaultOptions: ContextSystemOptions; protected CONTEXT_UID: number; protected gl: WebGL2RenderingContext; /** * Features supported by current renderer. * @type {object} * @readonly */ supports: { /** Support for 32-bit indices buffer. */ uint32Indices: boolean; /** Support for UniformBufferObjects */ uniformBufferObject: boolean; /** Support for VertexArrayObjects */ vertexArrayObject: boolean; /** Support for SRGB texture format */ srgbTextures: boolean; /** Support for wrapping modes if a texture is non-power of two */ nonPowOf2wrapping: boolean; /** Support for MSAA (antialiasing of dynamic textures) */ msaa: boolean; /** Support for mipmaps if a texture is non-power of two */ nonPowOf2mipmaps: boolean; }; /** * Extensions available. * @type {object} * @readonly * @property {WEBGL_draw_buffers} drawBuffers - WebGL v1 extension * @property {WEBGL_depth_texture} depthTexture - WebGL v1 extension * @property {OES_texture_float} floatTexture - WebGL v1 extension * @property {WEBGL_lose_context} loseContext - WebGL v1 extension * @property {OES_vertex_array_object} vertexArrayObject - WebGL v1 extension * @property {EXT_texture_filter_anisotropic} anisotropicFiltering - WebGL v1 and v2 extension */ extensions: WebGLExtensions; webGLVersion: 1 | 2; /** * Whether to enable multi-view rendering. Set to true when rendering to multiple * canvases on the dom. * @default false */ multiView: boolean; /** * The canvas that the WebGL Context is rendering to. * This will be the view canvas. But if multiView is enabled, this canvas will not be attached to the DOM. * It will be rendered to and then copied to the target canvas. * @readonly */ canvas: ICanvas; private _renderer; private _contextLossForced; /** @param renderer - The renderer this System works for. */ constructor(renderer: WebGLRenderer); /** * `true` if the context is lost * @readonly */ get isLost(): boolean; /** * Handles the context change event. * @param {WebGLRenderingContext} gl - New WebGL context. */ protected contextChange(gl: WebGL2RenderingContext): void; init(options: ContextSystemOptions): void; ensureCanvasSize(targetCanvas: ICanvas): void; /** * Initializes the context. * @protected * @param {WebGLRenderingContext} gl - WebGL context */ protected initFromContext(gl: WebGL2RenderingContext): void; /** * Initialize from context options * @protected * @see https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/getContext * @param preferWebGLVersion * @param {object} options - context attributes */ protected createContext(preferWebGLVersion: 1 | 2, options: WebGLContextAttributes): void; /** Auto-populate the {@link GlContextSystem.extensions extensions}. */ protected getExtensions(): void; /** * Handles a lost webgl context * @param {WebGLContextEvent} event - The context lost event. */ protected handleContextLost(event: WebGLContextEvent): void; /** Handles a restored webgl context. */ protected handleContextRestored(): void; destroy(): void; /** * this function can be called to force a webGL context loss * this will release all resources on the GPU. * Useful if you need to put Pixi to sleep, and save some GPU memory * * As soon as render is called - all resources will be created again. */ forceContextLoss(): void; /** * Validate context. * @param {WebGLRenderingContext} gl - Render context. */ protected validateContext(gl: WebGL2RenderingContext): void; } /** * System plugin to the renderer to manage geometry. * @memberof rendering */ export declare class GlGeometrySystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "geometry"; }; /** * `true` if we has `*_vertex_array_object` extension. * @readonly */ hasVao: boolean; /** * `true` if has `ANGLE_instanced_arrays` extension. * @readonly */ hasInstance: boolean; protected gl: GlRenderingContext; protected _activeGeometry: Geometry; protected _activeVao: WebGLVertexArrayObject; protected _geometryVaoHash: Record>; /** Renderer that owns this {@link GeometrySystem}. */ private _renderer; /** @param renderer - The renderer this System works for. */ constructor(renderer: WebGLRenderer); /** Sets up the renderer context and necessary buffers. */ protected contextChange(): void; /** * Binds geometry so that is can be drawn. Creating a Vao if required * @param geometry - Instance of geometry to bind. * @param program - Instance of program to use vao for. */ bind(geometry?: Geometry, program?: GlProgram): void; /** Reset and unbind any active VAO and geometry. */ reset(): void; /** Update buffers of the currently bound geometry. */ updateBuffers(): void; /** * Check compatibility between a geometry and a program * @param geometry - Geometry instance. * @param program - Program instance. */ protected checkCompatibility(geometry: Geometry, program: GlProgram): void; /** * Takes a geometry and program and generates a unique signature for them. * @param geometry - To get signature from. * @param program - To test geometry against. * @returns - Unique signature of the geometry and program */ protected getSignature(geometry: Geometry, program: GlProgram): string; protected getVao(geometry: Geometry, program: GlProgram): WebGLVertexArrayObject; /** * Creates or gets Vao with the same structure as the geometry and stores it on the geometry. * If vao is created, it is bound automatically. We use a shader to infer what and how to set up the * attribute locations. * @param geometry - Instance of geometry to to generate Vao for. * @param program * @param _incRefCount - Increment refCount of all geometry buffers. */ protected initGeometryVao(geometry: Geometry, program: GlProgram, _incRefCount?: boolean): WebGLVertexArrayObject; /** * Disposes geometry. * @param geometry - Geometry with buffers. Only VAO will be disposed * @param [contextLost=false] - If context was lost, we suppress deleteVertexArray */ protected onGeometryDestroy(geometry: Geometry, contextLost?: boolean): void; /** * Dispose all WebGL resources of all managed geometries. * @param [contextLost=false] - If context was lost, we suppress `gl.delete` calls */ destroyAll(contextLost?: boolean): void; /** * Activate vertex array object. * @param geometry - Geometry instance. * @param program - Shader program instance. */ protected activateVao(geometry: Geometry, program: GlProgram): void; /** * Draws the currently bound geometry. * @param topology - The type primitive to render. * @param size - The number of elements to be rendered. If not specified, all vertices after the * starting vertex will be drawn. * @param start - The starting vertex in the geometry to start drawing from. If not specified, * drawing will start from the first vertex. * @param instanceCount - The number of instances of the set of elements to execute. If not specified, * all instances will be drawn. */ draw(topology?: Topology, size?: number, start?: number, instanceCount?: number): this; /** Unbind/reset everything. */ protected unbind(): void; destroy(): void; } export declare function getGlTypeFromFormat(format: VertexFormat): number; /** * The options for the back buffer system. * @memberof rendering * @property {boolean} [useBackBuffer=false] - if true will use the back buffer where required * @property {boolean} [antialias=false] - if true will ensure the texture is antialiased */ export interface GlBackBufferOptions { /** * if true will use the back buffer where required * @default false * @memberof rendering.WebGLOptions */ useBackBuffer?: boolean; /** if true will ensure the texture is antialiased */ antialias?: boolean; } /** * For blend modes you need to know what pixels you are actually drawing to. For this to be possible in WebGL * we need to render to a texture and then present that texture to the screen. This system manages that process. * * As the main scene is rendered to a texture, it means we can sample it and copy its pixels, * something not possible on the main canvas. * * If antialiasing is set to to true and useBackBuffer is set to true, then the back buffer will be antialiased. * and the main gl context will not. * * You only need to activate this back buffer if you are using a blend mode that requires it. * * to activate is simple, you pass `useBackBuffer:true` to your render options * @memberof rendering */ export declare class GlBackBufferSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "backBuffer"; readonly priority: 1; }; /** default options for the back buffer system */ static defaultOptions: GlBackBufferOptions; /** if true, the back buffer is used */ useBackBuffer: boolean; private _backBufferTexture; private readonly _renderer; private _targetTexture; private _useBackBufferThisRender; private _antialias; private _state; private _bigTriangleShader; constructor(renderer: WebGLRenderer); init(options?: GlBackBufferOptions): void; /** * This is called before the RenderTargetSystem is started. This is where * we replace the target with the back buffer if required. * @param options - The options for this render. */ protected renderStart(options: RenderOptions): void; protected renderEnd(): void; private _presentBackBuffer; private _getBackBufferTexture; /** destroys the back buffer */ destroy(): void; } /** * The system that handles color masking for the WebGL. * @memberof rendering */ export declare class GlColorMaskSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "colorMask"; }; private readonly _renderer; private _colorMaskCache; constructor(renderer: WebGLRenderer); setMask(colorMask: number): void; destroy?: () => void; } /** * The system that handles encoding commands for the WebGL. * @memberof rendering */ export declare class GlEncoderSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "encoder"; }; readonly commandFinished: Promise; private readonly _renderer; constructor(renderer: WebGLRenderer); setGeometry(geometry: Geometry, shader?: Shader): void; finishRenderPass(): void; draw(options: { geometry: Geometry; shader: Shader; state?: State; topology?: Topology; size?: number; start?: number; instanceCount?: number; skipSync?: boolean; }): void; destroy(): void; } /** * This manages the stencil buffer. Used primarily for masking * @memberof rendering */ export declare class GlStencilSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "stencil"; }; private _gl; private readonly _stencilCache; private _renderTargetStencilState; private _stencilOpsMapping; private _comparisonFuncMapping; private _activeRenderTarget; constructor(renderer: WebGLRenderer); protected contextChange(gl: WebGLRenderingContext): void; protected onRenderTargetChange(renderTarget: RenderTarget): void; setStencilMode(stencilMode: STENCIL_MODES, stencilReference: number): void; destroy?: () => void; } /** * System plugin to the renderer to manage uniform buffers. But with an WGSL adaptor. * @memberof rendering */ export declare class GlUboSystem extends UboSystem { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "ubo"; }; constructor(); } /** * The WebGL adaptor for the render target system. Allows the Render Target System to be used with the WebGL renderer * @memberof rendering * @ignore */ export declare class GlRenderTargetAdaptor implements RenderTargetAdaptor { private _renderTargetSystem; private _renderer; private _clearColorCache; private _viewPortCache; init(renderer: WebGLRenderer, renderTargetSystem: RenderTargetSystem): void; contextChange(): void; copyToTexture(sourceRenderSurfaceTexture: RenderTarget, destinationTexture: Texture, originSrc: { x: number; y: number; }, size: { width: number; height: number; }, originDest: { x: number; y: number; }): Texture>; startRenderPass(renderTarget: RenderTarget, clear?: CLEAR_OR_BOOL, clearColor?: RgbaArray, viewport?: Rectangle): void; finishRenderPass(renderTarget?: RenderTarget): void; initGpuRenderTarget(renderTarget: RenderTarget): GlRenderTarget; destroyGpuRenderTarget(gpuRenderTarget: GlRenderTarget): void; clear(_renderTarget: RenderTarget, clear: CLEAR_OR_BOOL, clearColor?: RgbaArray): void; resizeGpuRenderTarget(renderTarget: RenderTarget): void; private _initColor; private _resizeColor; private _initStencil; private _resizeStencil; postrender(renderTarget: RenderTarget): void; } /** * The WebGL adaptor for the render target system. Allows the Render Target System to be used with the WebGl renderer * @memberof rendering */ export declare class GlRenderTargetSystem extends RenderTargetSystem { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "renderTarget"; }; adaptor: GlRenderTargetAdaptor; constructor(renderer: WebGLRenderer); } export type PRECISION = `highp` | `mediump` | `lowp`; /** * @private */ export declare class IGLUniformData { location: WebGLUniformLocation; value: number | boolean | Float32Array | Int32Array | Uint32Array | boolean[]; } /** * Helper class to create a WebGL Program * @private */ export declare class GlProgramData { /** The shader program. */ program: WebGLProgram; /** * Holds the uniform data which contains uniform locations * and current uniform values used for caching and preventing unneeded GPU commands. */ uniformData: Record; /** * UniformGroups holds the various upload functions for the shader. Each uniform group * and program have a unique upload function generated. */ uniformGroups: Record; /** A hash that stores where UBOs are bound to on the program. */ uniformBlockBindings: Record; /** A hash for lazily-generated uniform uploading functions. */ uniformSync: Record; /** * A place where dirty ticks are stored for groups * If a tick here does not match with the Higher level Programs tick, it means * we should re upload the data. */ uniformDirtyGroups: Record; /** * Makes a new Pixi program. * @param program - webgl program * @param uniformData - uniforms */ constructor(program: WebGLProgram, uniformData: { [key: string]: IGLUniformData; }); /** Destroys this program. */ destroy(): void; } export interface ShaderSyncData { textureCount: number; blockIndex: number; } export type ShaderSyncFunction = (renderer: WebGLRenderer, shader: Shader, syncData: ShaderSyncData) => void; /** * System plugin to the renderer to manage the shaders for WebGL. * @memberof rendering */ export declare class GlShaderSystem implements ShaderSystem { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "shader"; }; maxTextures: number; /** * @internal * @private */ _activeProgram: GlProgram; private _programDataHash; private readonly _renderer; _gl: WebGL2RenderingContext; private _maxBindings; private _nextIndex; private _boundUniformsIdsToIndexHash; private _boundIndexToUniformsHash; private _shaderSyncFunctions; constructor(renderer: WebGLRenderer); protected contextChange(gl: GlRenderingContext): void; /** * Changes the current shader to the one given in parameter. * @param shader - the new shader * @param skipSync - false if the shader should automatically sync its uniforms. * @returns the glProgram that belongs to the shader. */ bind(shader: Shader, skipSync?: boolean): void; /** * Updates the uniform group. * @param uniformGroup - the uniform group to update */ updateUniformGroup(uniformGroup: UniformGroup): void; /** * Binds a uniform block to the shader. * @param uniformGroup - the uniform group to bind * @param name - the name of the uniform block * @param index - the index of the uniform block */ bindUniformBlock(uniformGroup: UniformGroup | BufferResource, name: string, index?: number): void; private _setProgram; /** * @param program - the program to get the data for * @internal * @private */ _getProgramData(program: GlProgram): GlProgramData; private _createProgramData; destroy(): void; /** * Creates a function that can be executed that will sync the shader as efficiently as possible. * Overridden by the unsafe eval package if you don't want eval used in your project. * @param shader - the shader to generate the sync function for * @param shaderSystem - the shader system to use * @returns - the generated sync function * @ignore */ _generateShaderSync(shader: Shader, shaderSystem: GlShaderSystem): ShaderSyncFunction; } /** * Generates the a function that will efficiently sync shader resources with the GPU. * @param shader - The shader to generate the code for * @param shaderSystem - An instance of the shader system */ export declare function generateShaderSyncCode(shader: Shader, shaderSystem: GlShaderSystem): ShaderSyncFunction; /** * Automatically generates a uniform group that holds the texture samplers for a shader. * This is used mainly by the shaders that batch textures! * @param maxTextures - the number of textures that this uniform group will contain. * @returns a uniform group that holds the texture samplers. */ export declare function getBatchSamplersUniformGroup(maxTextures: number): UniformGroup; /** * System plugin to the renderer to manage shaders. * @memberof rendering */ export declare class GlUniformGroupSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "uniformGroup"; }; /** * The current WebGL rendering context. * @member {WebGLRenderingContext} */ protected gl: GlRenderingContext; /** Cache to holds the generated functions. Stored against UniformObjects unique signature. */ private _cache; private _renderer; private _uniformGroupSyncHash; /** @param renderer - The renderer this System works for. */ constructor(renderer: WebGLRenderer); protected contextChange(gl: GlRenderingContext): void; /** * Uploads the uniforms values to the currently bound shader. * @param group - the uniforms values that be applied to the current shader * @param program * @param syncData * @param syncData.textureCount */ updateUniformGroup(group: UniformGroup, program: GlProgram, syncData: { textureCount: number; }): void; /** * Overridable by the pixi.js/unsafe-eval package to use static syncUniforms instead. * @param group * @param program */ private _getUniformSyncFunction; private _createUniformSyncFunction; private _generateUniformsSync; /** * Takes a uniform group and data and generates a unique signature for them. * @param group - The uniform group to get signature of * @param group.uniforms * @param uniformData - Uniform information generated by the shader * @param preFix * @returns Unique signature of the uniform group */ private _getSignature; /** Destroys this System and removes all its textures. */ destroy(): void; } export declare function migrateFragmentFromV7toV8(fragmentShader: string): string; /** * @private * @param {WebGLRenderingContext} gl - The current WebGL context {WebGLProgram} * @param {number} type - the type, can be either VERTEX_SHADER or FRAGMENT_SHADER * @param {string} src - The vertex shader source as an array of strings. * @returns {WebGLShader} the shader */ export declare function compileShader(gl: WebGLRenderingContextBase, type: number, src: string): WebGLShader; /** * @method defaultValue * @param {string} type - Type of value * @param {number} size * @private */ export declare function defaultValue(type: string, size: number): number | Float32Array | Int32Array | Uint32Array | boolean | boolean[]; /** * This function looks at the attribute information provided to the geometry and attempts * to fill in an gaps. WE do this by looking at the extracted data from the shader and * making best guesses. * * Most of th etime users don't need to provide all the attribute info beyond the data itself, so we * can fill in the gaps for them. If you are using attributes in a more advanced way, you can * don't forget to add all the info at creation! * @param geometry - the geometry to ensure attributes for * @param extractedData - the extracted data from the shader */ export declare function ensureAttributes(geometry: Geometry, extractedData: Record): void; /** * generates a WebGL Program object from a high level Pixi Program. * @param gl - a rendering context on which to generate the program * @param program - the high level Pixi Program. * @private */ export declare function generateProgram(gl: GlRenderingContext, program: GlProgram): GlProgramData; export declare function getMaxFragmentPrecision(): PRECISION; /** * returns a little WebGL context to use for program inspection. * @static * @private * @returns {WebGLRenderingContext} a gl context to test with */ export declare function getTestContext(): GlRenderingContext; /** * returns the uniform block data from the program * @private * @param program - the webgl program * @param gl - the WebGL context * @returns {object} the uniform data for this program */ export declare function getUboData(program: WebGLProgram, gl: WebGL2RenderingContext): Record; /** * returns the uniform data from the program * @private * @param program - the webgl program * @param gl - the WebGL context * @returns {object} the uniform data for this program */ export declare function getUniformData(program: WebGLProgram, gl: WebGLRenderingContextBase): { [key: string]: GlUniformData; }; /** * * logs out any program errors * @param gl - The current WebGL context * @param program - the WebGL program to display errors for * @param vertexShader - the fragment WebGL shader program * @param fragmentShader - the vertex WebGL shader program * @private */ export declare function logProgramError(gl: WebGLRenderingContext, program: WebGLProgram, vertexShader: WebGLShader, fragmentShader: WebGLShader): void; /** * @private * @method mapSize * @param {string} type */ export declare function mapSize(type: string): number; export declare function mapType(gl: any, type: number): string; export declare function mapGlToVertexFormat(gl: any, type: number): VertexFormat; export declare function addProgramDefines(src: string, isES300: boolean, isFragment?: boolean): string; interface EnsurePrecisionOptions { requestedVertexPrecision: PRECISION; requestedFragmentPrecision: PRECISION; maxSupportedVertexPrecision: PRECISION; maxSupportedFragmentPrecision: PRECISION; } /** * Sets the float precision on the shader, ensuring the device supports the request precision. * If the precision is already present, it just ensures that the device is able to handle it. * @param src * @param options * @param options.requestedVertexPrecision * @param options.requestedFragmentPrecision * @param options.maxSupportedVertexPrecision * @param options.maxSupportedFragmentPrecision * @param isFragment * @private */ export declare function ensurePrecision(src: string, options: EnsurePrecisionOptions, isFragment: boolean): string; export declare function insertVersion(src: string, isES300: boolean): string; export declare function setProgramName(src: string, { name }: { name: string; }, isFragment?: boolean): string; export declare function stripVersion(src: string, isES300: boolean): string; export declare const WGSL_TO_STD40_SIZE: Record; export declare function createUboElementsSTD40(uniformData: UniformData[]): UboLayout; export declare function createUboSyncFunctionSTD40(uboElements: UboElement[]): UniformsSyncCallback; /** * This generates a function that will sync an array to the uniform buffer * following the std140 layout * @param uboElement - the element to generate the array sync for * @param offsetToAdd - the offset to append at the start of the code * @returns - the generated code */ export declare function generateArraySyncSTD40(uboElement: UboElement, offsetToAdd: number): string; export declare function generateUniformsSync(group: UniformGroup, uniformData: Record): UniformsSyncCallback; export type ArraySetterFunction = (v: any, location: WebGLUniformLocation, gl: any) => void; export declare const UNIFORM_TO_SINGLE_SETTERS: Record; export declare const UNIFORM_TO_ARRAY_SETTERS: Record; /** * System plugin to the renderer to manage WebGL state machines * @memberof rendering */ export declare class GlStateSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "state"; }; /** * State ID * @readonly */ stateId: number; /** * Polygon offset * @readonly */ polygonOffset: number; /** * Blend mode * @default 'none' * @readonly */ blendMode: BLEND_MODES; /** Whether current blend equation is different */ protected _blendEq: boolean; /** * GL context * @member {WebGLRenderingContext} * @readonly */ protected gl: GlRenderingContext; protected blendModesMap: Record; /** * Collection of calls * @member {Function[]} */ protected readonly map: ((value: boolean) => void)[]; /** * Collection of check calls * @member {Function[]} */ protected readonly checks: ((system: this, state: State) => void)[]; /** * Default WebGL State * @readonly */ protected defaultState: State; constructor(); protected contextChange(gl: GlRenderingContext): void; /** * Sets the current state * @param {*} state - The state to set. */ set(state: State): void; /** * Sets the state, when previous state is unknown. * @param {*} state - The state to set */ forceState(state: State): void; /** * Sets whether to enable or disable blending. * @param value - Turn on or off WebGl blending. */ setBlend(value: boolean): void; /** * Sets whether to enable or disable polygon offset fill. * @param value - Turn on or off webgl polygon offset testing. */ setOffset(value: boolean): void; /** * Sets whether to enable or disable depth test. * @param value - Turn on or off webgl depth testing. */ setDepthTest(value: boolean): void; /** * Sets whether to enable or disable depth mask. * @param value - Turn on or off webgl depth mask. */ setDepthMask(value: boolean): void; /** * Sets whether to enable or disable cull face. * @param {boolean} value - Turn on or off webgl cull face. */ setCullFace(value: boolean): void; /** * Sets the gl front face. * @param {boolean} value - true is clockwise and false is counter-clockwise */ setFrontFace(value: boolean): void; /** * Sets the blend mode. * @param {number} value - The blend mode to set to. */ setBlendMode(value: BLEND_MODES): void; /** * Sets the polygon offset. * @param {number} value - the polygon offset * @param {number} scale - the polygon offset scale */ setPolygonOffset(value: number, scale: number): void; /** Resets all the logic and disables the VAOs. */ reset(): void; /** * Checks to see which updates should be checked based on which settings have been activated. * * For example, if blend is enabled then we should check the blend modes each time the state is changed * or if polygon fill is activated then we need to check if the polygon offset changes. * The idea is that we only check what we have too. * @param func - the checking function to add or remove * @param value - should the check function be added or removed. */ private _updateCheck; /** * A private little wrapper function that we call to check the blend mode. * @param system - the System to perform the state check on * @param state - the state that the blendMode will pulled from */ private static _checkBlendMode; /** * A private little wrapper function that we call to check the polygon offset. * @param system - the System to perform the state check on * @param state - the state that the blendMode will pulled from */ private static _checkPolygonOffset; /** * @ignore */ destroy(): void; } /** * Maps gl blend combinations to WebGL. * @param gl * @returns {object} Map of gl blend combinations to WebGL. */ export declare function mapWebGLBlendModesToPixi(gl: GlRenderingContext): Record; /** * Various GL texture/resources formats. * @static * @name FORMATS * @enum {number} * @property {number} [RGBA=6408] - * @property {number} [RGB=6407] - * @property {number} [RG=33319] - * @property {number} [RED=6403] - * @property {number} [RGBA_INTEGER=36249] - * @property {number} [RGB_INTEGER=36248] - * @property {number} [RG_INTEGER=33320] - * @property {number} [RED_INTEGER=36244] - * @property {number} [ALPHA=6406] - * @property {number} [LUMINANCE=6409] - * @property {number} [LUMINANCE_ALPHA=6410] - * @property {number} [DEPTH_COMPONENT=6402] - * @property {number} [DEPTH_STENCIL=34041] - */ export declare enum GL_FORMATS { RGBA = 6408, RGB = 6407, RG = 33319, RED = 6403, RGBA_INTEGER = 36249, RGB_INTEGER = 36248, RG_INTEGER = 33320, RED_INTEGER = 36244, ALPHA = 6406, LUMINANCE = 6409, LUMINANCE_ALPHA = 6410, DEPTH_COMPONENT = 6402, DEPTH_STENCIL = 34041 } /** * Various GL target types. * @static * @name TARGETS * @enum {number} * @property {number} [TEXTURE_2D=3553] - * @property {number} [TEXTURE_CUBE_MAP=34067] - * @property {number} [TEXTURE_2D_ARRAY=35866] - * @property {number} [TEXTURE_CUBE_MAP_POSITIVE_X=34069] - * @property {number} [TEXTURE_CUBE_MAP_NEGATIVE_X=34070] - * @property {number} [TEXTURE_CUBE_MAP_POSITIVE_Y=34071] - * @property {number} [TEXTURE_CUBE_MAP_NEGATIVE_Y=34072] - * @property {number} [TEXTURE_CUBE_MAP_POSITIVE_Z=34073] - * @property {number} [TEXTURE_CUBE_MAP_NEGATIVE_Z=34074] - */ export declare enum GL_TARGETS { TEXTURE_2D = 3553, TEXTURE_CUBE_MAP = 34067, TEXTURE_2D_ARRAY = 35866, TEXTURE_CUBE_MAP_POSITIVE_X = 34069, TEXTURE_CUBE_MAP_NEGATIVE_X = 34070, TEXTURE_CUBE_MAP_POSITIVE_Y = 34071, TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072, TEXTURE_CUBE_MAP_POSITIVE_Z = 34073, TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074 } /** * The wrap modes that are supported by pixi. * * The {@link settings.WRAP_MODE} wrap mode affects the default wrapping mode of future operations. * It can be re-assigned to either CLAMP or REPEAT, depending upon suitability. * If the texture is non power of two then clamp will be used regardless as WebGL can * only use REPEAT if the texture is po2. * * This property only affects WebGL. * @name WRAP_MODES * @static * @enum {number} * @property {number} CLAMP - The textures uvs are clamped * @property {number} REPEAT - The texture uvs tile and repeat * @property {number} MIRRORED_REPEAT - The texture uvs tile and repeat with mirroring */ export declare enum GL_WRAP_MODES { CLAMP = 33071, REPEAT = 10497, MIRRORED_REPEAT = 33648 } export declare enum GL_TYPES { /** * 8 bits per channel for gl.RGBA * @default 5121 */ UNSIGNED_BYTE = 5121, /** * @default 5123 */ UNSIGNED_SHORT = 5123, /** * 5 red bits, 6 green bits, 5 blue bits. * @default 33635 */ UNSIGNED_SHORT_5_6_5 = 33635, /** * 4 red bits, 4 green bits, 4 blue bits, 4 alpha bits. * @default 32819 */ UNSIGNED_SHORT_4_4_4_4 = 32819, /** * 5 red bits, 5 green bits, 5 blue bits, 1 alpha bit. * @default 32820 */ UNSIGNED_SHORT_5_5_5_1 = 32820, /** * @default 5125 */ UNSIGNED_INT = 5125, /** * @default 35899 */ UNSIGNED_INT_10F_11F_11F_REV = 35899, /** * @default 33640 */ UNSIGNED_INT_2_10_10_10_REV = 33640, /** * @default 34042 */ UNSIGNED_INT_24_8 = 34042, /** * @default 35902 */ UNSIGNED_INT_5_9_9_9_REV = 35902, /** * @default 5120 */ BYTE = 5120, /** * @default 5122 */ SHORT = 5122, /** * @default 5124 */ INT = 5124, /** * @default 5126 */ FLOAT = 5126, /** * @default 36269 */ FLOAT_32_UNSIGNED_INT_24_8_REV = 36269, /** * @default 36193 */ HALF_FLOAT = 36193 } /** * Internal texture for WebGL context * @memberof rendering * @ignore */ export declare class GlTexture { target: GL_TARGETS; /** The WebGL texture. */ texture: WebGLTexture; /** Width of texture that was used in texImage2D. */ width: number; /** Height of texture that was used in texImage2D. */ height: number; /** Whether mip levels has to be generated. */ mipmap: boolean; /** Type copied from texture source. */ type: number; /** Type copied from texture source. */ internalFormat: number; /** Type of sampler corresponding to this texture. See {@link SAMPLER_TYPES} */ samplerType: number; format: GL_FORMATS; constructor(texture: WebGLTexture); } /** * The system for managing textures in WebGL. * @memberof rendering */ export declare class GlTextureSystem implements System$1, CanvasGenerator { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "texture"; }; readonly managedTextures: TextureSource[]; private readonly _renderer; private _glTextures; private _glSamplers; private _boundTextures; private _activeTextureLocation; private _boundSamplers; private readonly _uploads; private _gl; private _mapFormatToInternalFormat; private _mapFormatToType; private _mapFormatToFormat; private readonly _useSeparateSamplers; constructor(renderer: WebGLRenderer); protected contextChange(gl: GlRenderingContext): void; initSource(source: TextureSource): void; bind(texture: BindableTexture, location?: number): void; bindSource(source: TextureSource, location?: number): void; private _bindSampler; unbind(texture: BindableTexture): void; private _activateLocation; private _initSource; protected onStyleChange(source: TextureSource): void; protected updateStyle(source: TextureSource, firstCreation: boolean): void; protected onSourceUnload(source: TextureSource): void; protected onSourceUpdate(source: TextureSource): void; protected onUpdateMipmaps(source: TextureSource, bind?: boolean): void; protected onSourceDestroy(source: TextureSource): void; private _initSampler; private _getGlSampler; getGlSource(source: TextureSource): GlTexture; generateCanvas(texture: Texture): ICanvas; getPixels(texture: Texture): GetPixelsOutput; destroy(): void; } export interface GLTextureUploader { id: string; upload(source: TextureSource, glTexture: GlTexture, gl: GlRenderingContext, webGLVersion: number): void; } export declare const glUploadBufferImageResource: GLTextureUploader; export declare const glUploadCompressedTextureResource: GLTextureUploader; export declare const glUploadImageResource: GLTextureUploader; export declare const glUploadVideoResource: GLTextureUploader; export declare function applyStyleParams(style: TextureStyle, gl: WebGL2RenderingContext, mipmaps: boolean, anisotropicExt: EXT_texture_filter_anisotropic, glFunctionName: "samplerParameteri" | "texParameteri", firstParam: 3553 | WebGLSampler, forceClamp: boolean, /** if true we can skip setting certain values if the values is the same as the default gl values */ firstCreation: boolean): void; export declare function getSupportedGlCompressedTextureFormats(): TEXTURE_FORMATS[]; /** * Returns a lookup table that maps each type-format pair to a compatible internal format. * @function mapTypeAndFormatToInternalFormat * @private * @param {WebGLRenderingContext} gl - The rendering context. * @returns Lookup table. */ export declare function mapFormatToGlFormat(gl: GlRenderingContext): Record; /** * Returns a lookup table that maps each type-format pair to a compatible internal format. * @function mapTypeAndFormatToInternalFormat * @private * @param gl - The rendering context. * @param extensions - The WebGL extensions. * @returns Lookup table. */ export declare function mapFormatToGlInternalFormat(gl: GlRenderingContext, extensions: WebGLExtensions): Record; /** * Returns a lookup table that maps each type-format pair to a compatible internal format. * @function mapTypeAndFormatToInternalFormat * @private * @param {WebGLRenderingContext} gl - The rendering context. * @returns Lookup table. */ export declare function mapFormatToGlType(gl: GlRenderingContext): Record; export declare const scaleModeToGlFilter: { linear: number; nearest: number; }; export declare const mipmapScaleModeToGlFilter: { linear: { linear: number; nearest: number; }; nearest: { linear: number; nearest: number; }; }; export declare const wrapModeToGlAddress: { "clamp-to-edge": number; repeat: number; "mirror-repeat": number; }; export declare const compareModeToGlCompare: { never: number; less: number; equal: number; "less-equal": number; greater: number; "not-equal": number; "greater-equal": number; always: number; }; export declare function unpremultiplyAlpha(pixels: Uint8Array | Uint8ClampedArray): void; export declare function GpuReadBuffer(buffer: Buffer$1, renderer: WebGPURenderer): void; export declare class UboBatch { private _buffer; data: Float32Array; private readonly _minUniformOffsetAlignment; byteIndex: number; constructor({ minUniformOffsetAlignment }: { minUniformOffsetAlignment: number; }); clear(): void; addEmptyGroup(size: number): number; addGroup(array: Float32Array): number; destroy(): void; } export declare function calculateProjection(pm: Matrix, x: number, y: number, width: number, height: number, flipY: boolean): Matrix; export declare const WGSL_ALIGN_SIZE_DATA: Record; export declare function createUboElementsWGSL(uniformData: UniformData[]): UboLayout; export declare function createUboSyncFunctionWGSL(uboElements: UboElement[]): UniformsSyncCallback; export declare function extractAttributesFromGpuProgram({ source, entryPoint }: ProgramSource): Record; /** * This generates a function that will sync an array to the uniform buffer * following the wgsl layout * @param uboElement - the element to generate the array sync for * @param offsetToAdd - the offset to append at the start of the code * @returns - the generated code */ export declare function generateArraySyncWGSL(uboElement: UboElement, offsetToAdd: number): string; export declare function generateGpuLayoutGroups({ groups }: StructsAndGroups): ProgramPipelineLayoutDescription; export declare function generateLayoutHash({ groups }: StructsAndGroups): ProgramLayout; export declare function removeStructAndGroupDuplicates(vertexStructsAndGroups: StructsAndGroups, fragmentStructsAndGroups: StructsAndGroups): { structs: { name: string; members: Record; }[]; groups: { group: number; binding: number; name: string; isUniform: boolean; type: string; }[]; }; export declare const GpuBlendModesToPixi: Partial>; export interface StencilState { stencilWriteMask?: number; stencilReadMask?: number; stencilFront?: { compare: "always" | "equal"; passOp: "increment-clamp" | "decrement-clamp" | "keep"; }; stencilBack?: { compare: "always" | "equal"; passOp: "increment-clamp" | "decrement-clamp" | "keep"; }; } export declare const GpuStencilModesToPixi: StencilState[]; export interface GpuTextureUploader { type: string; upload(source: T, gpuTexture: GPUTexture, gpu: GPU$1): void; } export declare const gpuUploadBufferImageResource: GpuTextureUploader; export declare class CompressedSource extends TextureSource { readonly uploadMethodId = "compressed"; constructor(options: TextureSourceOptions); } export declare const blockDataMap: Record; export declare const gpuUploadCompressedTextureResource: GpuTextureUploader; export declare const gpuUploadImageResource: GpuTextureUploader>; type VideoResource = HTMLVideoElement; /** * Options for video sources. * @memberof rendering */ export interface VideoSourceOptions extends TextureSourceOptions { /** If true, the video will start loading immediately. */ autoLoad?: boolean; /** If true, the video will start playing as soon as it is loaded. */ autoPlay?: boolean; /** The number of times a second to update the texture from the video. Leave at 0 to update at every render. */ updateFPS?: number; /** If true, the video will be loaded with the `crossorigin` attribute. */ crossorigin?: boolean | string; /** If true, the video will loop when it ends. */ loop?: boolean; /** If true, the video will be muted. */ muted?: boolean; /** If true, the video will play inline. */ playsinline?: boolean; /** If true, the video will be preloaded. */ preload?: boolean; /** The time in milliseconds to wait for the video to preload before timing out. */ preloadTimeoutMs?: number; /** The alpha mode of the video. */ alphaMode?: ALPHA_MODES; } export interface VideoResourceOptionsElement { src: string; mime: string; } /** * A source for video-based textures. * @memberof rendering */ export declare class VideoSource extends TextureSource { static extension: ExtensionMetadata; /** The default options for video sources. */ static defaultOptions: VideoSourceOptions; /** Whether or not the video is ready to play. */ isReady: boolean; /** The upload method for this texture. */ uploadMethodId: string; /** * When set to true will automatically play videos used by this texture once * they are loaded. If false, it will not modify the playing state. * @default true */ protected autoPlay: boolean; /** * `true` to use Ticker.shared to auto update the base texture. * @default true */ private _autoUpdate; /** * `true` if the instance is currently connected to Ticker.shared to auto update the base texture. * @default false */ private _isConnectedToTicker; /** * Promise when loading. * @default null */ private _load; private _msToNextUpdate; private _preloadTimeout; /** Callback when completed with load. */ private _resolve; private _reject; private _updateFPS; private _videoFrameRequestCallbackHandle; constructor(options: VideoSourceOptions); /** Update the video frame if the source is not destroyed and meets certain conditions. */ protected updateFrame(): void; /** Callback to update the video frame and potentially request the next frame update. */ private _videoFrameRequestCallback; /** * Checks if the resource has valid dimensions. * @returns {boolean} True if width and height are set, otherwise false. */ get isValid(): boolean; /** * Start preloading the video resource. * @returns {Promise} Handle the validate event */ load(): Promise; /** * Handle video error events. * @param event - The error event */ private _onError; /** * Checks if the underlying source is playing. * @returns True if playing. */ private _isSourcePlaying; /** * Checks if the underlying source is ready for playing. * @returns True if ready. */ private _isSourceReady; /** Runs the update loop when the video is ready to play. */ private _onPlayStart; /** Stops the update loop when a pause event is triggered. */ private _onPlayStop; /** Handles behavior when the video completes seeking to the current playback position. */ private _onSeeked; private _onCanPlay; private _onCanPlayThrough; /** Fired when the video is loaded and ready to play. */ private _mediaReady; /** Cleans up resources and event listeners associated with this texture. */ destroy(): void; /** Should the base texture automatically update itself, set to true by default. */ get autoUpdate(): boolean; set autoUpdate(value: boolean); /** * How many times a second to update the texture from the video. * Leave at 0 to update at every render. * A lower fps can help performance, as updating the texture at 60fps on a 30ps video may not be efficient. */ get updateFPS(): number; set updateFPS(value: number); /** * Configures the updating mechanism based on the current state and settings. * * This method decides between using the browser's native video frame callback or a custom ticker * for updating the video frame. It ensures optimal performance and responsiveness * based on the video's state, playback status, and the desired frames-per-second setting. * * - If `_autoUpdate` is enabled and the video source is playing: * - It will prefer the native video frame callback if available and no specific FPS is set. * - Otherwise, it will use a custom ticker for manual updates. * - If `_autoUpdate` is disabled or the video isn't playing, any active update mechanisms are halted. */ private _configureAutoUpdate; /** * Map of video MIME types that can't be directly derived from file extensions. * @readonly */ static MIME_TYPES: Dict; static test(resource: any): resource is VideoResource; } export declare const gpuUploadVideoResource: GpuTextureUploader; export declare function getSupportedGPUCompressedTextureFormats(): Promise; /** * A class which generates mipmaps for a GPUTexture. * Thanks to @toji for the original implementation * https://github.com/toji/web-texture-tool/blob/main/src/webgpu-mipmap-generator.js * @memberof rendering * @ignore */ export declare class GpuMipmapGenerator { device: GPUDevice; sampler: GPUSampler; pipelines: Record; mipmapShaderModule: any; constructor(device: GPUDevice); private _getMipmapPipeline; /** * Generates mipmaps for the given GPUTexture from the data in level 0. * @param {module:External.GPUTexture} texture - Texture to generate mipmaps for. * @returns {module:External.GPUTexture} - The originally passed texture */ generateMipmap(texture: GPUTexture): GPUTexture; } interface AdvancedBlendInstruction extends Instruction { renderPipeId: "blendMode"; blendMode: BLEND_MODES; activeBlend: Renderable[]; } /** * This Pipe handles the blend mode switching of the renderer. * It will insert instructions into the {@link renderers.InstructionSet} to switch the blend mode according to the * blend modes of the scene graph. * * This pipe is were wwe handle Advanced blend modes. Advanced blend modes essentially wrap the renderables * in a filter that applies the blend mode. * * You only need to use this class if you are building your own render instruction set rather than letting PixiJS build * the instruction set for you by traversing the scene graph * @memberof rendering */ export declare class BlendModePipe implements InstructionPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "blendMode"; }; private _renderer; private _renderableList; private _activeBlendMode; private _isAdvanced; private _filterHash; constructor(renderer: Renderer); /** * This ensures that a blendMode switch is added to the instruction set if the blend mode has changed. * @param renderable - The renderable we are adding to the instruction set * @param blendMode - The blend mode of the renderable * @param instructionSet - The instruction set we are adding to */ setBlendMode(renderable: Renderable, blendMode: BLEND_MODES, instructionSet: InstructionSet): void; private _beginAdvancedBlendMode; private _endAdvancedBlendMode; /** * called when the instruction build process is starting this will reset internally to the default blend mode * @internal * @ignore */ buildStart(): void; /** * called when the instruction build process is finished, ensuring that if there is an advanced blend mode * active, we add the final render instructions added to the instruction set * @param instructionSet - The instruction set we are adding to * @internal * @ignore */ buildEnd(instructionSet: InstructionSet): void; /** * @internal * @ignore */ destroy(): void; } /** * Copies from one buffer to another. * This is an optimised function that will use `Float64Array` window. * This means it can copy twice as fast! * @param sourceBuffer - the array buffer to copy from * @param destinationBuffer - the array buffer to copy to * @private */ export declare function fastCopy(sourceBuffer: ArrayBuffer, destinationBuffer: ArrayBuffer): void; declare const imageTypes: { png: string; jpg: string; webp: string; }; type Formats = keyof typeof imageTypes; /** * Options for creating an image from a renderer. * @memberof rendering */ export interface ImageOptions { /** The format of the image. */ format?: Formats; /** The quality of the image. */ quality?: number; } /** * Options for extracting content from a renderer. * @memberof rendering */ export interface BaseExtractOptions { /** The target to extract. */ target: Container | Texture; /** The region of the target to extract. */ frame?: Rectangle; /** The resolution of the extracted content. */ resolution?: number; /** The color used to clear the extracted content. */ clearColor?: ColorSource; /** Whether to enable anti-aliasing. This may affect performance. */ antialias?: boolean; } /** * Options for extracting an HTMLImage from the renderer. * @memberof rendering */ export type ExtractImageOptions = BaseExtractOptions & ImageOptions; /** * Options for extracting and downloading content from a renderer. * @memberof rendering */ export type ExtractDownloadOptions = BaseExtractOptions & { /** The filename to use when downloading the content. */ filename: string; }; /** * Options for extracting content from a renderer. * @memberof rendering */ export type ExtractOptions = BaseExtractOptions | ExtractImageOptions | ExtractDownloadOptions; /** * This class provides renderer-specific plugins for exporting content from a renderer. * For instance, these plugins can be used for saving an Image, Canvas element or for exporting the raw image data (pixels). * * Do not instantiate these plugins directly. It is available from the `renderer.extract` property. * @example * import { Application, Graphics } from 'pixi.js'; * * // Create a new application (extract will be auto-added to renderer) * const app = new Application(); * await app.init(); * * // Draw a red circle * const graphics = new Graphics() * .circle(0, 0, 50); * .fill(0xFF0000) * * // Render the graphics as an HTMLImageElement * const image = await app.renderer.extract.image(graphics); * document.body.appendChild(image); * @memberof rendering */ export declare class ExtractSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem ]; readonly name: "extract"; }; /** Default options for creating an image. */ static defaultImageOptions: ImageOptions; private _renderer; /** @param renderer - The renderer this System works for. */ constructor(renderer: Renderer); private _normalizeOptions; /** * Will return a HTML Image of the target * @param options - The options for creating the image, or the target to extract * @returns - HTML Image of the target */ image(options: ExtractImageOptions | Container | Texture): Promise; /** * Will return a base64 encoded string of this target. It works by calling * `Extract.canvas` and then running toDataURL on that. * @param options - The options for creating the image, or the target to extract */ base64(options: ExtractImageOptions | Container | Texture): Promise; /** * Creates a Canvas element, renders this target to it and then returns it. * @param options - The options for creating the canvas, or the target to extract * @returns - A Canvas element with the texture rendered on. */ canvas(options: ExtractOptions | Container | Texture): ICanvas; /** * Will return a one-dimensional array containing the pixel data of the entire texture in RGBA * order, with integer values between 0 and 255 (included). * @param options - The options for extracting the image, or the target to extract * @returns - One-dimensional array containing the pixel data of the entire texture */ pixels(options: ExtractOptions | Container | Texture): GetPixelsOutput; /** * Will return a texture of the target * @param options - The options for creating the texture, or the target to extract * @returns - A texture of the target */ texture(options: ExtractOptions | Container | Texture): Texture; /** * Will extract a HTMLImage of the target and download it * @param options - The options for downloading and extracting the image, or the target to extract */ download(options: ExtractDownloadOptions | Container | Texture): void; /** * Logs the target to the console as an image. This is a useful way to debug what's happening in the renderer. * @param options - The options for logging the image, or the target to log */ log(options: (ExtractOptions & { width?: number; }) | Container | Texture): void; destroy(): void; } /** * Takes a vertices array and a matrix and transforms the vertices based on the matrix. * this out put is written to the uvs array * @param vertices - the vertices to calculate uvs from * @param verticesStride - the stride of the vertice * @param verticesOffset - the offset of the vertices * @param uvs - the uvs to fill * @param uvsOffset - the offset of the uvs * @param uvsStride - the stride of the uvs * @param size - the size of the vertices * @param matrix - the matrix to apply to the uvs * @memberof rendering */ export declare function buildUvs(vertices: number[], verticesStride: number, verticesOffset: number, uvs: number[], uvsOffset: number, uvsStride: number, size: number, matrix?: Matrix): void; export declare function buildSimpleUvs(uvs: number[], uvsOffset: number, uvsStride: number, size: number): void; /** * Converts something into a buffer. If it is already a buffer it will pass it through * if it is a number array it will convert it to a float32 array before being passed into a buffer * the buffer will be created with the correct usage flags for geometry attributes * @param buffer - number array * @param index - is this an index buffer? * @returns a buffer * @memberof rendering */ export declare function ensureIsBuffer(buffer: Buffer$1 | TypedArray | number[], index: boolean): Buffer$1; export declare function getAttributeInfoFromFormat(format: VertexFormat): { size: number; stride: number; normalised: boolean; }; /** * Gets the 2D bounds of a geometry, based on a specific attribute. * @param geometry - Geometry to to measure * @param attributeId - AttributeId that contains the x,y data * @param bounds - Bounds to store the result in * @returns the bounds */ export declare function getGeometryBounds(geometry: Geometry, attributeId: string, bounds: Bounds): Bounds; /** * Transforms the vertices in an array with the given matrix. * @param vertices - the vertices to transform * @param m - the matrix to apply to the vertices * @param offset - the offset of the vertices (defaults to 0) * @param stride - the stride of the vertices (defaults to 2) * @param size - the size of the vertices (defaults to vertices.length / stride - offset) * @memberof rendering */ export declare function transformVertices(vertices: number[], m: Matrix, offset?: number, stride?: number, size?: number): void; export type GlobalUniformGroup = UniformGroup<{ uProjectionMatrix: { value: Matrix; type: "mat3x3"; }; uWorldTransformMatrix: { value: Matrix; type: "mat3x3"; }; uWorldColorAlpha: { value: Float32Array; type: "vec4"; }; uResolution: { value: number[]; type: "vec2"; }; }>; export interface GlobalUniformOptions { size?: number[]; projectionMatrix?: Matrix; worldTransformMatrix?: Matrix; worldColor?: number; offset?: PointData; } export interface GlobalUniformData { projectionMatrix: Matrix; worldTransformMatrix: Matrix; worldColor: number; resolution: number[]; offset: PointData; bindGroup: BindGroup; } export interface GlobalUniformRenderer { renderTarget: GlRenderTargetSystem | GpuRenderTargetSystem; renderPipes: Renderer["renderPipes"]; ubo: UboSystem; type: RendererType; } /** * System plugin to the renderer to manage global uniforms for the renderer. * @memberof rendering */ export declare class GlobalUniformSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem, ExtensionType.CanvasSystem ]; readonly name: "globalUniforms"; }; private readonly _renderer; private _stackIndex; private _globalUniformDataStack; private readonly _uniformsPool; private readonly _activeUniforms; private readonly _bindGroupPool; private readonly _activeBindGroups; private _currentGlobalUniformData; constructor(renderer: GlobalUniformRenderer); reset(): void; start(options: GlobalUniformOptions): void; bind({ size, projectionMatrix, worldTransformMatrix, worldColor, offset, }: GlobalUniformOptions): void; push(options: GlobalUniformOptions): void; pop(): void; get bindGroup(): BindGroup; get uniformGroup(): UniformGroup; private _createUniforms; destroy(): void; } /** * Checks if the render target is viewable on the screen * Basically, is it a canvas element and is that canvas element in the DOM * @param renderTarget - the render target to check * @returns true if the render target is viewable on the screen */ export declare function isRenderingToScreen(renderTarget: RenderTarget): boolean; /** * Takes a Texture source and a normalised frame * and returns a viewport for that frame. * @param viewport - The viewport rectangle to set. * @param source - The source to get the pixel width and height from. * @param frame - The frame to get the viewport from. * @returns the passed in viewport. */ export declare function viewportFromFrame(viewport: Rectangle, source: TextureSource, frame?: Rectangle): Rectangle; /** * The SchedulerSystem manages scheduled tasks with specific intervals. * @memberof rendering */ export declare class SchedulerSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem, ExtensionType.CanvasSystem ]; readonly name: "scheduler"; readonly priority: 0; }; private readonly _tasks; /** Initializes the scheduler system and starts the ticker. */ init(): void; /** * Schedules a repeating task. * @param func - The function to execute. * @param duration - The interval duration in milliseconds. * @returns The unique identifier for the scheduled task. */ repeat(func: (elapsed: number) => void, duration: number): number; /** * Cancels a scheduled task. * @param id - The unique identifier of the task to cancel. */ cancel(id: number): void; /** * Updates and executes the scheduled tasks. * @private */ private _update; /** * Destroys the scheduler system and removes all tasks. * @internal * @ignore */ destroy(): void; } export declare enum ShaderStage { VERTEX = 1, FRAGMENT = 2, COMPUTE = 4 } export declare function createUboSyncFunction(uboElements: UboElement[], parserCode: "uboWgsl" | "uboStd40", arrayGenerationFunction: (uboElement: UboElement, offsetToAdd: number) => string, singleSettersMap: Record): UniformsSyncCallback; /** * @method defaultValue * @param {string} type - Type of value * @param {number} size * @private */ export declare function getDefaultUniformValue(type: string, size: number): number | Float32Array | Int32Array | Uint32Array | boolean | boolean[]; export declare const uboSyncFunctionsSTD40: Record; export declare const uboSyncFunctionsWGSL: Record; export interface UniformParserDefinition { type: UNIFORM_TYPES; test(data: UniformData): boolean; ubo?: string; uboWgsl?: string; uboStd40?: string; uniform?: string; } export declare const uniformParsers: UniformParserDefinition[]; /** * Options for the startup system. * @property {boolean} [hello=false] - Whether to log the version and type information of renderer to console. * @memberof rendering */ export interface HelloSystemOptions { /** * Whether to log the version and type information of renderer to console. * @memberof rendering.SharedRendererOptions * @default false */ hello: boolean; } /** * A simple system responsible for initiating the renderer. * @memberof rendering */ export declare class HelloSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem, ExtensionType.CanvasSystem ]; readonly name: "hello"; readonly priority: -2; }; /** The default options for the system. */ static defaultOptions: HelloSystemOptions; private readonly _renderer; constructor(renderer: Renderer); /** * It all starts here! This initiates every system, passing in the options for any system by name. * @param options - the config for the renderer and all its systems */ init(options: HelloSystemOptions): void; } /** * Adjusts a blend mode for the current alpha mode. Returns the blend mode that works with that format. * eg 'normal' blend mode will return 'normal-npm' when rendering with premultiplied alpha. * and 'normal' if the texture is already premultiplied (the default) * @param blendMode - The blend mode to get the adjusted blend mode for. * @param textureSource - The texture to test the format of. * @returns - the blend mode that should be used to render this texture correctly based on its alphaMode */ export declare function getAdjustedBlendModeBlend(blendMode: BLEND_MODES, textureSource: TextureSource): BLEND_MODES; export interface CanvasAndContext { canvas: ICanvas; context: ICanvasRenderingContext2D; } /** * Texture pool, used by FilterSystem and plugins. * * Stores collection of temporary pow2 or screen-sized renderTextures * * If you use custom RenderTexturePool for your filters, you can use methods * `getFilterTexture` and `returnFilterTexture` same as in * @name CanvasPool * @memberof rendering */ export declare class CanvasPoolClass { canvasOptions: ICanvasRenderingContext2DSettings; /** * Allow renderTextures of the same size as screen, not just pow2 * * Automatically sets to true after `setScreenSize` * @default false */ enableFullScreen: boolean; private _canvasPool; constructor(canvasOptions?: ICanvasRenderingContext2DSettings); /** * Creates texture with params that were specified in pool constructor. * @param pixelWidth - Width of texture in pixels. * @param pixelHeight - Height of texture in pixels. */ private _createCanvasAndContext; /** * Gets a Power-of-Two render texture or fullScreen texture * @param minWidth - The minimum width of the render texture. * @param minHeight - The minimum height of the render texture. * @param resolution - The resolution of the render texture. * @returns The new render texture. */ getOptimalCanvasAndContext(minWidth: number, minHeight: number, resolution?: number): CanvasAndContext; /** * Place a render texture back into the pool. * @param canvasAndContext */ returnCanvasAndContext(canvasAndContext: CanvasAndContext): void; clear(): void; } export declare const CanvasPool: CanvasPoolClass; /** * Options for the {@link RenderableGCSystem}. * @memberof rendering * @property {boolean} [renderableGCActive=true] - If set to true, this will enable the garbage collector on the renderables. * @property {number} [renderableGCAMaxIdle=60000] - * The maximum idle frames before a texture is destroyed by garbage collection. * @property {number} [renderableGCCheckCountMax=60000] - time between two garbage collections. */ export interface RenderableGCSystemOptions { /** * If set to true, this will enable the garbage collector on the GPU. * @default true * @memberof rendering.SharedRendererOptions */ renderableGCActive: boolean; /** * The maximum idle frames before a texture is destroyed by garbage collection. * @default 60 * 60 * @memberof rendering.SharedRendererOptions */ renderableGCMaxUnusedTime: number; /** * Frames between two garbage collections. * @default 600 * @memberof rendering.SharedRendererOptions */ renderableGCFrequency: number; } /** * System plugin to the renderer to manage renderable garbage collection. When rendering * stuff with the renderer will assign resources to each renderable. This could be for example * a batchable Sprite, or a text texture. If the renderable is not used for a certain amount of time * its resources will be tided up by its render pipe. * @memberof rendering */ export declare class RenderableGCSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem ]; readonly name: "renderableGC"; }; /** default options for the renderableGCSystem */ static defaultOptions: RenderableGCSystemOptions; /** * Maximum idle frames before a texture is destroyed by garbage collection. * @see renderableGCSystem.defaultMaxIdle */ maxUnusedTime: number; private _renderer; private readonly _managedRenderables; private _handler; private _frequency; private _now; /** @param renderer - The renderer this System works for. */ constructor(renderer: Renderer); init(options: RenderableGCSystemOptions): void; get enabled(): boolean; set enabled(value: boolean); prerender(): void; addRenderable(renderable: Renderable, instructionSet: InstructionSet): void; /** Runs the scheduled garbage collection */ run(): void; destroy(): void; private _removeRenderable; } /** * Options for the {@link TextureGCSystem}. * @memberof rendering * @property {boolean} [textureGCActive=true] - If set to true, this will enable the garbage collector on the GPU. * @property {number} [textureGCAMaxIdle=60 * 60] - * The maximum idle frames before a texture is destroyed by garbage collection. * @property {number} [textureGCCheckCountMax=600] - Frames between two garbage collections. */ export interface TextureGCSystemOptions { /** * If set to true, this will enable the garbage collector on the GPU. * @default true * @memberof rendering.SharedRendererOptions */ textureGCActive: boolean; /** * @deprecated since 8.3.0 * @see {@link TextureGCSystem.textureGCMaxIdle} * @memberof rendering.SharedRendererOptions */ textureGCAMaxIdle: number; /** * The maximum idle frames before a texture is destroyed by garbage collection. * @default 60 * 60 * @memberof rendering.SharedRendererOptions */ textureGCMaxIdle: number; /** * Frames between two garbage collections. * @default 600 * @memberof rendering.SharedRendererOptions */ textureGCCheckCountMax: number; } /** * System plugin to the renderer to manage texture garbage collection on the GPU, * ensuring that it does not get clogged up with textures that are no longer being used. * @memberof rendering */ export declare class TextureGCSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem ]; readonly name: "textureGC"; }; /** default options for the TextureGCSystem */ static defaultOptions: TextureGCSystemOptions; /** * Frame count since started. * @readonly */ count: number; /** * Frame count since last garbage collection. * @readonly */ checkCount: number; /** * Maximum idle frames before a texture is destroyed by garbage collection. * @see TextureGCSystem.defaultMaxIdle */ maxIdle: number; /** * Frames between two garbage collections. * @see TextureGCSystem.defaultCheckCountMax */ checkCountMax: number; /** * Current garbage collection mode. * @see TextureGCSystem.defaultMode */ active: boolean; private _renderer; /** @param renderer - The renderer this System works for. */ constructor(renderer: Renderer); init(options: TextureGCSystemOptions): void; /** * Checks to see when the last time a texture was used. * If the texture has not been used for a specified amount of time, it will be removed from the GPU. */ protected postrender(): void; /** * Checks to see when the last time a texture was used. * If the texture has not been used for a specified amount of time, it will be removed from the GPU. */ run(): void; destroy(): void; } /** * Texture pool, used by FilterSystem and plugins. * * Stores collection of temporary pow2 or screen-sized renderTextures * * If you use custom RenderTexturePool for your filters, you can use methods * `getFilterTexture` and `returnFilterTexture` same as in default pool * @memberof rendering * @name TexturePool */ export declare class TexturePoolClass { /** The default options for texture pool */ textureOptions: TextureSourceOptions; /** * Allow renderTextures of the same size as screen, not just pow2 * * Automatically sets to true after `setScreenSize` * @default false */ enableFullScreen: boolean; private _texturePool; private _poolKeyHash; /** * @param textureOptions - options that will be passed to BaseRenderTexture constructor * @param {SCALE_MODE} [textureOptions.scaleMode] - See {@link SCALE_MODE} for possible values. */ constructor(textureOptions?: TextureSourceOptions); /** * Creates texture with params that were specified in pool constructor. * @param pixelWidth - Width of texture in pixels. * @param pixelHeight - Height of texture in pixels. * @param antialias */ createTexture(pixelWidth: number, pixelHeight: number, antialias: boolean): Texture; /** * Gets a Power-of-Two render texture or fullScreen texture * @param frameWidth - The minimum width of the render texture. * @param frameHeight - The minimum height of the render texture. * @param resolution - The resolution of the render texture. * @param antialias * @returns The new render texture. */ getOptimalTexture(frameWidth: number, frameHeight: number, resolution: number, antialias: boolean): Texture; /** * Gets extra texture of the same size as input renderTexture * @param texture - The texture to check what size it is. * @param antialias - Whether to use antialias. * @returns A texture that is a power of two */ getSameSizeTexture(texture: Texture, antialias?: boolean): Texture>; /** * Place a render texture back into the pool. * @param renderTexture - The renderTexture to free */ returnTexture(renderTexture: Texture): void; /** * Clears the pool. * @param destroyTextures - Destroy all stored textures. */ clear(destroyTextures?: boolean): void; } export declare const TexturePool: TexturePoolClass; /** * Stores a texture's frame in UV coordinates, in * which everything lies in the rectangle `[(0,0), (1,0), * (1,1), (0,1)]`. * * | Corner | Coordinates | * |--------------|-------------| * | Top-Left | `(x0,y0)` | * | Top-Right | `(x1,y1)` | * | Bottom-Right | `(x2,y2)` | * | Bottom-Left | `(x3,y3)` | * @protected * @memberof rendering */ export declare class TextureUvs { /** X-component of top-left corner `(x0,y0)`. */ x0: number; /** Y-component of top-left corner `(x0,y0)`. */ y0: number; /** X-component of top-right corner `(x1,y1)`. */ x1: number; /** Y-component of top-right corner `(x1,y1)`. */ y1: number; /** X-component of bottom-right corner `(x2,y2)`. */ x2: number; /** Y-component of bottom-right corner `(x2,y2)`. */ y2: number; /** X-component of bottom-left corner `(x3,y3)`. */ x3: number; /** Y-component of bottom-right corner `(x3,y3)`. */ y3: number; uvsFloat32: Float32Array; constructor(); /** * Sets the texture Uvs based on the given frame information. * @protected * @param frame - The frame of the texture * @param baseFrame - The base frame of the texture * @param rotate - Rotation of frame, see {@link groupD8} */ set(frame: Rectangle, baseFrame: Size, rotate: number): void; toString(): string; } export declare function generateUID(): number; export declare function getCanvasTexture(canvas: ICanvas, options?: CanvasSourceOptions): Texture; export declare function hasCachedCanvasTexture(canvas: ICanvas): boolean; export declare function getSupportedCompressedTextureFormats(): Promise; export declare const nonCompressedFormats: TEXTURE_FORMATS[]; export declare function getSupportedTextureFormats(): Promise; export declare function createIdFromString(value: string, groupId: string): number; export declare function parseFunctionBody(fn: (...args: any[]) => any): string; declare const DefaultWebGPUSystems: (typeof BackgroundSystem | typeof GenerateTextureSystem | typeof GlobalUniformSystem | typeof HelloSystem | typeof ViewSystem | typeof RenderGroupSystem | typeof TextureGCSystem | typeof ExtractSystem | typeof RendererInitHook | typeof RenderableGCSystem | typeof SchedulerSystem | typeof GpuUboSystem | typeof GpuEncoderSystem | typeof GpuDeviceSystem | typeof GpuBufferSystem | typeof GpuTextureSystem | typeof GpuRenderTargetSystem | typeof GpuShaderSystem | typeof GpuStateSystem | typeof PipelineSystem | typeof GpuColorMaskSystem | typeof GpuStencilSystem | typeof BindGroupSystem)[]; declare const DefaultWebGPUPipes: (typeof BlendModePipe | typeof BatcherPipe | typeof SpritePipe | typeof RenderGroupPipe | typeof AlphaMaskPipe | typeof StencilMaskPipe | typeof ColorMaskPipe | typeof CustomRenderPipe | typeof GpuUniformBatchPipe)[]; type WebGPUSystems = ExtractSystemTypes & PixiMixins.RendererSystems & PixiMixins.WebGPUSystems; export type WebGPUPipes = ExtractSystemTypes & PixiMixins.RendererPipes & PixiMixins.WebGPUPipes; /** * Options for WebGPURenderer. * @memberof rendering */ export interface WebGPUOptions extends SharedRendererOptions, ExtractRendererOptions, PixiMixins.WebGPUOptions { } export interface WebGPURenderer extends AbstractRenderer, WebGPUSystems { } /** * The WebGPU PixiJS Renderer. This renderer allows you to use the next-generation graphics API, WebGPU. * ```ts * // Create a new renderer * const renderer = new WebGPURenderer(); * await renderer.init(); * * // Add the renderer to the stage * document.body.appendChild(renderer.canvas); * * // Create a new stage * const stage = new Container(); * * // Render the stage * renderer.render(stage); * ``` * * You can use {@link rendering.autoDetectRenderer} to create a renderer that will automatically detect the best * renderer for the environment. * ```ts * // Create a new renderer * const renderer = await rendering.autoDetectRenderer(); * ``` * * The renderer is composed of systems that manage specific tasks. The following systems are added by default * whenever you create a WebGPU renderer: * * | WebGPU Core Systems | Systems that are specific to the WebGL renderer | * | ---------------------------------------- | ----------------------------------------------------------------------------- | * | {@link rendering.GpuUboSystem} | This manages WebGPU uniform buffer objects feature for shaders | * | {@link rendering.GpuEncoderSystem} | This manages the WebGPU command encoder | * | {@link rendering.GpuDeviceSystem} | This manages the WebGPU Device and its extensions | * | {@link rendering.GpuBufferSystem} | This manages buffers and their GPU resources, keeps everything in sync | * | {@link rendering.GpuTextureSystem} | This manages textures and their GPU resources, keeps everything in sync | * | {@link rendering.GpuRenderTargetSystem} | This manages what we render too. For example the screen, or another texture | * | {@link rendering.GpuShaderSystem} | This manages shaders, programs that run on the GPU to output lovely pixels | * | {@link rendering.GpuStateSystem} | This manages the state of the WebGPU Pipelines. eg the various flags that can be set blend modes / depthTesting etc | * | {@link rendering.PipelineSystem} | This manages the WebGPU pipelines, used for rendering | * | {@link rendering.GpuColorMaskSystem} | This manages the color mask. Used for color masking | * | {@link rendering.GpuStencilSystem} | This manages the stencil buffer. Used primarily for masking | * | {@link rendering.BindGroupSystem} | This manages the WebGPU bind groups. this is how data is bound to a shader when rendering | * * The breadth of the API surface provided by the renderer is contained within these systems. * @memberof rendering * @property {rendering.GpuUboSystem} ubo - UboSystem instance. * @property {rendering.GpuEncoderSystem} encoder - EncoderSystem instance. * @property {rendering.GpuDeviceSystem} device - DeviceSystem instance. * @property {rendering.GpuBufferSystem} buffer - BufferSystem instance. * @property {rendering.GpuTextureSystem} texture - TextureSystem instance. * @property {rendering.GpuRenderTargetSystem} renderTarget - RenderTargetSystem instance. * @property {rendering.GpuShaderSystem} shader - ShaderSystem instance. * @property {rendering.GpuStateSystem} state - StateSystem instance. * @property {rendering.PipelineSystem} pipeline - PipelineSystem instance. * @property {rendering.GpuColorMaskSystem} colorMask - ColorMaskSystem instance. * @property {rendering.GpuStencilSystem} stencil - StencilSystem instance. * @property {rendering.BindGroupSystem} bindGroup - BindGroupSystem instance. * @extends rendering.AbstractRenderer */ export declare class WebGPURenderer extends AbstractRenderer implements WebGPUSystems { /** The WebGPU Device. */ gpu: GPU$1; constructor(); } /** * Options for {@link rendering.autoDetectRenderer}. * @memberof rendering */ export interface AutoDetectOptions extends RendererOptions { /** The preferred renderer type. WebGPU is recommended as its generally faster than WebGL. */ preference?: "webgl" | "webgpu"; /** Optional WebGPUOptions to pass only to WebGPU renderer. */ webgpu?: Partial; /** Optional WebGLOptions to pass only to the WebGL renderer */ webgl?: Partial; } /** * Automatically determines the most appropriate renderer for the current environment. * * The function will prioritize the WebGL renderer as it is the most tested safe API to use. * In the near future as WebGPU becomes more stable and ubiquitous, it will be prioritized over WebGL. * * The selected renderer's code is then dynamically imported to optimize * performance and minimize the initial bundle size. * * To maximize the benefits of dynamic imports, it's recommended to use a modern bundler * that supports code splitting. This will place the renderer code in a separate chunk, * which is loaded only when needed. * @example * * // create a renderer * const renderer = await autoDetectRenderer({ * width: 800, * height: 600, * antialias: true, * }); * * // custom for each renderer * const renderer = await autoDetectRenderer({ * width: 800, * height: 600, * webgpu:{ * antialias: true, * backgroundColor: 'red' * }, * webgl:{ * antialias: true, * backgroundColor: 'green' * } * }); * @param options - A partial configuration object based on the `AutoDetectOptions` type. * @returns A Promise that resolves to an instance of the selected renderer. * @memberof rendering */ export declare function autoDetectRenderer(options: Partial): Promise; /** * The app module provides a set of classes to use as a starting point when building applications. * * * * ```js * import { Application } from 'pixi.js'; * * const app = new Application(); * * await app.init(); * * // don't forget to add the canvas to the DOM * document.body.appendChild(app.canvas); * ``` * @namespace app */ /** * Any plugin that's usable for Application should contain these methods. * @example * import { ApplicationPlugin } from 'pixi.js'; * * const plugin: ApplicationPlugin = { * init: (options: Partial) => * { * // handle init here, use app options if needed * }, * destroy: () => * { * // handle destruction code here * } * } * @memberof app * @see {@link app.ApplicationOptions} * @ignore */ export interface ApplicationPlugin { /** * Called when Application is constructed, scoped to Application instance. * Passes in `options` as the only argument, which are Application `init()` options. * @param {object} options - Application options. */ init(options: Partial): void; /** Called when destroying Application, scoped to Application instance. */ destroy(): void; } /** * Application options supplied to the {@link app.Application#init} method. * @memberof app * @example * import { Application } from 'pixi.js'; * * const app = new Application(); * * await app.init({ * autoStart: false, * resizeTo: window, * sharedTicker: true, * }); */ export interface ApplicationOptions extends AutoDetectOptions, PixiMixins.ApplicationOptions { } export interface Application extends PixiMixins.Application { } /** * Convenience class to create a new PixiJS application. * * This class automatically creates the renderer, ticker and root container. * @example * import { Application, Sprite } from 'pixi.js'; * * // Create the application * const app = new Application(); * * await app.init({ width: 800, height: 600 }); * * // Add the view to the DOM * document.body.appendChild(app.canvas); * * // ex, add display objects * app.stage.addChild(Sprite.from('something.png')); * @memberof app */ export declare class Application { /** * Collection of installed plugins. * @alias _plugins */ static _plugins: ApplicationPlugin[]; /** The root display container that's rendered. */ stage: Container; /** * WebGL renderer if available, otherwise CanvasRenderer. * @member {rendering.Renderer} */ renderer: R; /** Create new Application instance */ constructor(); /** @deprecated since 8.0.0 */ constructor(options?: Partial); /** * @param options - The optional application and renderer parameters. */ init(options?: Partial): Promise; /** Render the current stage. */ render(): void; /** * Reference to the renderer's canvas element. * @readonly * @member {HTMLCanvasElement} */ get canvas(): R["canvas"]; /** * Reference to the renderer's canvas element. * @member {HTMLCanvasElement} * @deprecated since 8.0.0 */ get view(): R["canvas"]; /** * Reference to the renderer's screen rectangle. Its safe to use as `filterArea` or `hitArea` for the whole screen. * @readonly */ get screen(): Rectangle; /** * Destroys the application and all of its resources. * @param {object|boolean}[rendererDestroyOptions=false] - The options for destroying the renderer. * @param {boolean}[rendererDestroyOptions.removeView=false] - Removes the Canvas element from the DOM. * @param {object|boolean} [options=false] - The options for destroying the stage. * @param {boolean} [options.children=false] - If set to true, all the children will have their destroy method * called as well. `options` will be passed on to those calls. * @param {boolean} [options.texture=false] - Only used for children with textures e.g. Sprites. * If options.children is set to true, * it should destroy the texture of the child sprite. * @param {boolean} [options.textureSource=false] - Only used for children with textures e.g. Sprites. * If options.children is set to true, * it should destroy the texture source of the child sprite. * @param {boolean} [options.context=false] - Only used for children with graphicsContexts e.g. Graphics. * If options.children is set to true, * it should destroy the context of the child graphics. */ destroy(rendererDestroyOptions?: RendererDestroyOptions, options?: DestroyOptions): void; } declare global { var __PIXI_APP_INIT__: undefined | ((arg: Application | Renderer, version: string) => void); var __PIXI_RENDERER_INIT__: undefined | ((arg: Application | Renderer, version: string) => void); } /** * Calls global __PIXI_APP_INIT__ hook with the application instance, after the application is initialized. * @memberof app */ export declare class ApplicationInitHook { /** @ignore */ static extension: ExtensionMetadata; static init(): void; static destroy(): void; } /** * Calls global __PIXI_RENDERER_INIT__ hook with the renderer instance, after the renderer is initialized. * @memberof rendering */ export declare class RendererInitHook implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem ]; readonly name: "initHook"; readonly priority: -10; }; private _renderer; constructor(renderer: Renderer); init(): void; destroy(): void; } export declare const SharedSystems: (typeof BackgroundSystem | typeof GenerateTextureSystem | typeof GlobalUniformSystem | typeof HelloSystem | typeof ViewSystem | typeof RenderGroupSystem | typeof TextureGCSystem | typeof ExtractSystem | typeof RendererInitHook | typeof RenderableGCSystem | typeof SchedulerSystem)[]; export declare const SharedRenderPipes: (typeof BlendModePipe | typeof BatcherPipe | typeof SpritePipe | typeof RenderGroupPipe | typeof AlphaMaskPipe | typeof StencilMaskPipe | typeof ColorMaskPipe | typeof CustomRenderPipe)[]; /** * Options for the shared systems of a renderer. * @memberof rendering */ export interface SharedRendererOptions extends ExtractRendererOptions, PixiMixins.RendererOptions { /** * Whether to stop PixiJS from dynamically importing default extensions for the renderer. * It is false by default, and means PixiJS will load all the default extensions, based * on the environment e.g browser/webworker. * If you set this to true, then you will need to manually import the systems and extensions you need. * * e.g. * ```js * import 'accessibility'; * import 'app'; * import 'events'; * import 'spritesheet'; * import 'graphics'; * import 'mesh'; * import 'text'; * import 'text-bitmap'; * import 'text-html'; * import { autoDetectRenderer } from 'pixi.js'; * * const renderer = await autoDetectRenderer({ * width: 800, * height: 600, * skipExtensionImports: true, * }); * ``` * @default false */ skipExtensionImports?: boolean; /** * @default true * @deprecated since 8.1.6 * @see `skipExtensionImports` */ manageImports?: boolean; } export interface RendererConfig { type: number; name: string; runners?: string[]; systems: { name: string; value: SystemConstructor; }[]; renderPipes: { name: string; value: PipeConstructor; }[]; renderPipeAdaptors: { name: string; value: any; }[]; } /** * The options for rendering a view. * @memberof rendering */ export interface RenderOptions extends ClearOptions { /** The container to render. */ container: Container; /** the transform to apply to the container. */ transform?: Matrix; } /** * The options for clearing the render target. * @memberof rendering */ export interface ClearOptions { /** * The render target to render. if this target is a canvas and you are using the WebGL renderer, * please ensure you have set `multiView` to `true` on renderer. */ target?: RenderSurface; /** The color to clear with. */ clearColor?: ColorSource; /** The clear mode to use. */ clear?: CLEAR_OR_BOOL; } export type RendererDestroyOptions = TypeOrBool; declare const defaultRunners: readonly [ "init", "destroy", "contextChange", "resolutionChange", "reset", "renderEnd", "renderStart", "render", "update", "postrender", "prerender" ]; type DefaultRunners = typeof defaultRunners[number]; type Runners = { [key in DefaultRunners]: SystemRunner; } & { [K: ({} & string) | ({} & symbol)]: SystemRunner; }; /** * The base class for a PixiJS Renderer. It contains the shared logic for all renderers. * * You should not use this class directly, but instead use {@linkrendering.WebGLRenderer} * or {@link rendering.WebGPURenderer}. * Alternatively, you can also use {@link rendering.autoDetectRenderer} if you want us to * determine the best renderer for you. * * The renderer is composed of systems that manage specific tasks. The following systems are added by default * whenever you create a renderer: * * * | Generic Systems | Systems that manage functionality that all renderer types share | * | ------------------------------------ | ----------------------------------------------------------------------------- | * | {@link rendering.ViewSystem} | This manages the main view of the renderer usually a Canvas | * | {@link rendering.BackgroundSystem} | This manages the main views background color and alpha | * | {@link events.EventSystem} | This manages UI events. | * | {@link accessibility.AccessibilitySystem} | This manages accessibility features. Requires `import 'pixi.js/accessibility'`| * * | Core Systems | Provide an optimised, easy to use API to work with WebGL/WebGPU | * | ------------------------------------ | ----------------------------------------------------------------------------- | * | {@link rendering.RenderGroupSystem} | This manages the what what we are rendering to (eg - canvas or texture) | * | {@link rendering.GlobalUniformSystem} | This manages shaders, programs that run on the GPU to calculate 'em pixels. | * | {@link rendering.TextureGCSystem} | This will automatically remove textures from the GPU if they are not used. | * * | PixiJS High-Level Systems | Set of specific systems designed to work with PixiJS objects | * | ------------------------------------ | ----------------------------------------------------------------------------- | * | {@link rendering.HelloSystem} | Says hello, buy printing out the pixi version into the console log (along with the renderer type) | * | {@link rendering.GenerateTextureSystem} | This adds the ability to generate textures from any Container | * | {@link rendering.FilterSystem} | This manages the filtering pipeline for post-processing effects. | * | {@link rendering.PrepareSystem} | This manages uploading assets to the GPU. Requires `import 'pixi.js/prepare'`| * | {@link rendering.ExtractSystem} | This extracts image data from display objects. | * * The breadth of the API surface provided by the renderer is contained within these systems. * @abstract * @memberof rendering * @property {rendering.HelloSystem} hello - HelloSystem instance. * @property {rendering.RenderGroupSystem} renderGroup - RenderGroupSystem instance. * @property {rendering.TextureGCSystem} textureGC - TextureGCSystem instance. * @property {rendering.FilterSystem} filter - FilterSystem instance. * @property {rendering.GlobalUniformSystem} globalUniforms - GlobalUniformSystem instance. * @property {rendering.TextureSystem} texture - TextureSystem instance. * @property {rendering.EventSystem} events - EventSystem instance. * @property {rendering.ExtractSystem} extract - ExtractSystem instance. Requires `import 'pixi.js/extract'`. * @property {rendering.PrepareSystem} prepare - PrepareSystem instance. Requires `import 'pixi.js/prepare'`. * @property {rendering.AccessibilitySystem} accessibility - AccessibilitySystem instance. Requires `import 'pixi.js/accessibility'`. */ export declare class AbstractRenderer extends EventEmitter<{ resize: [ screenWidth: number, screenHeight: number, resolution: number ]; }> { /** The default options for the renderer. */ static defaultOptions: { /** * Default resolution / device pixel ratio of the renderer. * @default 1 */ resolution: number; /** * Should the `failIfMajorPerformanceCaveat` flag be enabled as a context option used in the `isWebGLSupported` * function. If set to true, a WebGL renderer can fail to be created if the browser thinks there could be * performance issues when using WebGL. * * In PixiJS v6 this has changed from true to false by default, to allow WebGL to work in as many * scenarios as possible. However, some users may have a poor experience, for example, if a user has a gpu or * driver version blacklisted by the * browser. * * If your application requires high performance rendering, you may wish to set this to false. * We recommend one of two options if you decide to set this flag to false: * * 1: Use the Canvas renderer as a fallback in case high performance WebGL is * not supported. * * 2: Call `isWebGLSupported` (which if found in the utils package) in your code before attempting to create a * PixiJS renderer, and show an error message to the user if the function returns false, explaining that their * device & browser combination does not support high performance WebGL. * This is a much better strategy than trying to create a PixiJS renderer and finding it then fails. * @default false */ failIfMajorPerformanceCaveat: boolean; /** * Should round pixels be forced when rendering? * @default false */ roundPixels: boolean; }; readonly type: number; /** The name of the renderer. */ readonly name: string; _roundPixels: 0 | 1; readonly runners: Runners; readonly renderPipes: PIPES; /** The view system manages the main canvas that is attached to the DOM */ view: ViewSystem; /** The background system manages the background color and alpha of the main view. */ background: BackgroundSystem; /** System that manages the generation of textures from the renderer */ textureGenerator: GenerateTextureSystem; protected _initOptions: OPTIONS; protected config: RendererConfig; private _systemsHash; private _lastObjectRendered; /** * Set up a system with a collection of SystemClasses and runners. * Systems are attached dynamically to this class when added. * @param config - the config for the system manager */ constructor(config: RendererConfig); /** * Initialize the renderer. * @param options - The options to use to create the renderer. */ init(options?: Partial): Promise; /** * Renders the object to its view. * @param options - The options to render with. * @param options.container - The container to render. * @param [options.target] - The target to render to. */ render(options: RenderOptions | Container): void; /** @deprecated since 8.0.0 */ render(container: Container, options: { renderTexture: any; }): void; /** * Resizes the WebGL view to the specified width and height. * @param desiredScreenWidth - The desired width of the screen. * @param desiredScreenHeight - The desired height of the screen. * @param resolution - The resolution / device pixel ratio of the renderer. */ resize(desiredScreenWidth: number, desiredScreenHeight: number, resolution?: number): void; clear(options?: ClearOptions): void; /** The resolution / device pixel ratio of the renderer. */ get resolution(): number; set resolution(value: number); /** * Same as view.width, actual number of pixels in the canvas by horizontal. * @member {number} * @readonly * @default 800 */ get width(): number; /** * Same as view.height, actual number of pixels in the canvas by vertical. * @default 600 */ get height(): number; /** * The canvas element that everything is drawn to. * @type {environment.ICanvas} */ get canvas(): CANVAS; /** * the last object rendered by the renderer. Useful for other plugins like interaction managers * @readonly */ get lastObjectRendered(): Container; /** * Flag if we are rendering to the screen vs renderTexture * @readonly * @default true */ get renderingToScreen(): boolean; /** * Measurements of the screen. (0, 0, screenWidth, screenHeight). * * Its safe to use as filterArea or hitArea for the whole stage. */ get screen(): Rectangle; /** * Create a bunch of runners based of a collection of ids * @param runnerIds - the runner ids to add */ private _addRunners; private _addSystems; /** * Add a new system to the renderer. * @param ClassRef - Class reference * @param name - Property name for system, if not specified * will use a static `name` property on the class itself. This * name will be assigned as s property on the Renderer so make * sure it doesn't collide with properties on Renderer. * @returns Return instance of renderer */ private _addSystem; private _addPipes; destroy(options?: RendererDestroyOptions): void; /** * Generate a texture from a container. * @param options - options or container target to use when generating the texture * @returns a texture */ generateTexture(options: GenerateTextureOptions | Container): Texture; /** * Whether the renderer will round coordinates to whole pixels when rendering. * Can be overridden on a per scene item basis. */ get roundPixels(): boolean; /** * Overridable function by `pixi.js/unsafe-eval` to silence * throwing an error if platform doesn't support unsafe-evals. * @private * @ignore */ _unsafeEvalCheck(): void; } declare const DefaultWebGLSystems: (typeof BackgroundSystem | typeof GenerateTextureSystem | typeof GlobalUniformSystem | typeof HelloSystem | typeof ViewSystem | typeof RenderGroupSystem | typeof TextureGCSystem | typeof ExtractSystem | typeof RendererInitHook | typeof RenderableGCSystem | typeof SchedulerSystem | typeof GlUboSystem | typeof GlBackBufferSystem | typeof GlContextSystem | typeof GlBufferSystem | typeof GlTextureSystem | typeof GlRenderTargetSystem | typeof GlGeometrySystem | typeof GlUniformGroupSystem | typeof GlShaderSystem | typeof GlEncoderSystem | typeof GlStateSystem | typeof GlStencilSystem | typeof GlColorMaskSystem)[]; declare const DefaultWebGLPipes: (typeof BlendModePipe | typeof BatcherPipe | typeof SpritePipe | typeof RenderGroupPipe | typeof AlphaMaskPipe | typeof StencilMaskPipe | typeof ColorMaskPipe | typeof CustomRenderPipe)[]; type WebGLSystems = ExtractSystemTypes & PixiMixins.RendererSystems & PixiMixins.WebGLSystems; /** The default WebGL renderer, uses WebGL2 contexts. */ export type WebGLPipes = ExtractSystemTypes & PixiMixins.RendererPipes & PixiMixins.WebGLPipes; /** * Options for WebGLRenderer. * @memberof rendering */ export interface WebGLOptions extends SharedRendererOptions, ExtractRendererOptions, PixiMixins.WebGLOptions { } /** * The default WebGL renderer, uses WebGL2 contexts. * @memberof rendering */ export interface WebGLRenderer extends AbstractRenderer, WebGLSystems { } /** * The WebGL PixiJS Renderer. This renderer allows you to use the most common graphics API, WebGL (and WebGL2). * * ```ts * // Create a new renderer * const renderer = new WebGLRenderer(); * await renderer.init(); * * // Add the renderer to the stage * document.body.appendChild(renderer.canvas); * * // Create a new stage * const stage = new Container(); * * // Render the stage * renderer.render(stage); * ``` * * You can use {@link rendering.autoDetectRenderer} to create a renderer that will automatically detect the best * renderer for the environment. * * * ```ts * // Create a new renderer * const renderer = await rendering.autoDetectRenderer({ * preference:'webgl', * }); * ``` * * The renderer is composed of systems that manage specific tasks. The following systems are added by default * whenever you create a WebGL renderer: * * | WebGL Core Systems | Systems that are specific to the WebGL renderer | * | ------------------------------------------- | ----------------------------------------------------------------------------- | * | {@link rendering.GlUboSystem} | This manages WebGL2 uniform buffer objects feature for shaders | * | {@link rendering.GlBackBufferSystem} | manages the back buffer, used so that we can pixi can pixels from the screen | * | {@link rendering.GlContextSystem} | This manages the WebGL context and its extensions | * | {@link rendering.GlBufferSystem} | This manages buffers and their GPU resources, keeps everything in sync | * | {@link rendering.GlTextureSystem} | This manages textures and their GPU resources, keeps everything in sync | * | {@link rendering.GlRenderTargetSystem} | This manages what we render too. For example the screen, or another texture | * | {@link rendering.GlGeometrySystem} | This manages geometry, used for drawing meshes via the GPU | * | {@link rendering.GlUniformGroupSystem} | This manages uniform groups. Syncing shader properties with the GPU | * | {@link rendering.GlShaderSystem} | This manages shaders, programs that run on the GPU to output lovely pixels | * | {@link rendering.GlEncoderSystem} | This manages encoders, a WebGPU Paradigm, use it to draw a mesh + shader | * | {@link rendering.GlStateSystem} | This manages the state of the WebGL context. eg the various flags that can be set blend modes / depthTesting etc | * | {@link rendering.GlStencilSystem} | This manages the stencil buffer. Used primarily for masking | * | {@link rendering.GlColorMaskSystem} | This manages the color mask. Used for color masking | * * The breadth of the API surface provided by the renderer is contained within these systems. * @memberof rendering * @property {rendering.GlUboSystem} ubo - UboSystem instance. * @property {rendering.GlBackBufferSystem} backBuffer - BackBufferSystem instance. * @property {rendering.GlContextSystem} context - ContextSystem instance. * @property {rendering.GlBufferSystem} buffer - BufferSystem instance. * @property {rendering.GlTextureSystem} texture - TextureSystem instance. * @property {rendering.GlRenderTargetSystem} renderTarget - RenderTargetSystem instance. * @property {rendering.GlGeometrySystem} geometry - GeometrySystem instance. * @property {rendering.GlUniformGroupSystem} uniformGroup - UniformGroupSystem instance. * @property {rendering.GlShaderSystem} shader - ShaderSystem instance. * @property {rendering.GlEncoderSystem} encoder - EncoderSystem instance. * @property {rendering.GlStateSystem} state - StateSystem instance. * @property {rendering.GlStencilSystem} stencil - StencilSystem instance. * @property {rendering.GlColorMaskSystem} colorMask - ColorMaskSystem instance. * @extends rendering.AbstractRenderer */ export declare class WebGLRenderer extends AbstractRenderer implements WebGLSystems { gl: GlRenderingContext; constructor(); } /** A generic renderer. */ /** * @memberof rendering * @extends rendering.WebGLRenderer * @extends rendering.WebGPURenderer */ export type Renderer = WebGLRenderer | WebGPURenderer; export type RenderPipes = WebGLPipes | WebGPUPipes; /** * @extends rendering.WebGLOptions * @extends rendering.WebGPUOptions */ export interface RendererOptions extends WebGLOptions, WebGPUOptions { } /** * Ids for the different render types. * The idea is that you can use bitwise operations to filter whether or not you want to do something * in a certain render type. * Filters for example can be compatible for both webGL or WebGPU but not compatible with canvas. * So internally if it works with both we set filter.compatibleRenderers = RendererType.WEBGL | RendererType.WEBGPU * if it only works with webgl we set filter.compatibleRenderers = RendererType.WEBGL * */ export declare enum RendererType { WEBGL = 1, WEBGPU = 2, BOTH = 3 } export type GpuPowerPreference = "low-power" | "high-performance"; interface System$1 { init?: (options: INIT_OPTIONS) => void; /** Generic destroy methods to be overridden by the subclass */ destroy?: (options?: DESTROY_OPTIONS) => void; } export interface SystemConstructor { new (renderer: Renderer): System$1; } /** * The result of the {@link utils.isMobile} function. * @ignore * @memberof utils */ export type isMobileResult = { /** * Whether the device is an Apple device. * @memberof utils.isMobile */ apple: { phone: boolean; ipod: boolean; tablet: boolean; universal: boolean; device: boolean; }; /** * Whether the device is an Amazon device. * @memberof utils.isMobile */ amazon: { phone: boolean; tablet: boolean; device: boolean; }; /** * Whether the device is an Android device. * @memberof utils.isMobile */ android: { phone: boolean; tablet: boolean; device: boolean; }; /** * Whether the device is a Windows device. * @memberof utils.isMobile */ windows: { phone: boolean; tablet: boolean; device: boolean; }; /** * Whether the device is a specific device. * @memberof utils.isMobile */ other: { blackberry: boolean; blackberry10: boolean; opera: boolean; firefox: boolean; chrome: boolean; device: boolean; }; /** * Whether the device is a phone device. * @memberof utils.isMobile */ phone: boolean; /** * Whether the device is a tablet device. * @memberof utils.isMobile */ tablet: boolean; /** * Whether the device is any kind of device. * @memberof utils.isMobile */ any: boolean; }; /** * Detects whether the device is mobile and what type of mobile device it is. * ```js * import { isMobile } from 'pixi.js'; * * if (isMobile.apple.tablet) { * // The device is an Apple tablet device. * } * ``` * @memberof utils */ export declare const isMobile: isMobileResult; /** @ignore */ export interface AccessibilityOptions { /** Setting this to true will visually show the divs. */ debug?: boolean; } /** * The Accessibility system recreates the ability to tab and have content read by screen readers. * This is very important as it can possibly help people with disabilities access PixiJS content. * * A Container can be made accessible just like it can be made interactive. This manager will map the * events as if the mouse was being used, minimizing the effort required to implement. * * An instance of this class is automatically created by default, and can be found at `renderer.accessibility` * @memberof accessibility */ export declare class AccessibilitySystem implements System$1 { private readonly _mobileInfo; /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem ]; readonly name: "accessibility"; }; /** Setting this to true will visually show the divs. */ debug: boolean; /** * The renderer this accessibility manager works for. * @type {WebGLRenderer|WebGPURenderer} */ private _renderer; /** Internal variable, see isActive getter. */ private _isActive; /** Internal variable, see isMobileAccessibility getter. */ private _isMobileAccessibility; /** Button element for handling touch hooks. */ private _hookDiv; /** This is the dom element that will sit over the PixiJS element. This is where the div overlays will go. */ private _div; /** A simple pool for storing divs. */ private _pool; /** This is a tick used to check if an object is no longer being rendered. */ private _renderId; /** The array of currently active accessible items. */ private _children; /** Count to throttle div updates on android devices. */ private _androidUpdateCount; /** The frequency to update the div elements. */ private readonly _androidUpdateFrequency; /** * @param {WebGLRenderer|WebGPURenderer} renderer - A reference to the current renderer */ constructor(renderer: Renderer, _mobileInfo?: isMobileResult); /** * Value of `true` if accessibility is currently active and accessibility layers are showing. * @member {boolean} * @readonly */ get isActive(): boolean; /** * Value of `true` if accessibility is enabled for touch devices. * @member {boolean} * @readonly */ get isMobileAccessibility(): boolean; get hookDiv(): HTMLElement; /** * Creates the touch hooks. * @private */ private _createTouchHook; /** * Destroys the touch hooks. * @private */ private _destroyTouchHook; /** * Activating will cause the Accessibility layer to be shown. * This is called when a user presses the tab key. * @private */ private _activate; /** * Deactivating will cause the Accessibility layer to be hidden. * This is called when a user moves the mouse. * @private */ private _deactivate; /** * This recursive function will run through the scene graph and add any new accessible objects to the DOM layer. * @private * @param {Container} container - The Container to check. */ private _updateAccessibleObjects; /** * Runner init called, view is available at this point. * @ignore */ init(options?: AccessibilityOptions): void; /** * Runner postrender was called, ensure that all divs are mapped correctly to their Containers. * Only fires while active. * @ignore */ postrender(): void; /** * private function that will visually add the information to the * accessibility div * @param {HTMLElement} div - */ private _updateDebugHTML; /** * Adjust the hit area based on the bounds of a display object * @param {Rectangle} hitArea - Bounds of the child */ private _capHitArea; /** * Adds a Container to the accessibility manager * @private * @param {Container} container - The child to make accessible. */ private _addChild; /** * Dispatch events with the EventSystem. * @param e * @param type * @private */ private _dispatchEvent; /** * Maps the div button press to pixi's EventSystem (click) * @private * @param {MouseEvent} e - The click event. */ private _onClick; /** * Maps the div focus events to pixi's EventSystem (mouseover) * @private * @param {FocusEvent} e - The focus event. */ private _onFocus; /** * Maps the div focus events to pixi's EventSystem (mouseout) * @private * @param {FocusEvent} e - The focusout event. */ private _onFocusOut; /** * Is called when a key is pressed * @private * @param {KeyboardEvent} e - The keydown event. */ private _onKeyDown; /** * Is called when the mouse moves across the renderer element * @private * @param {MouseEvent} e - The mouse event. */ private _onMouseMove; /** Destroys the accessibility manager */ destroy(): void; } declare global { namespace PixiMixins { // eslint-disable-next-line @typescript-eslint/no-empty-interface interface Container extends Partial { } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface ContainerOptions extends Partial { } interface RendererSystems { accessibility: AccessibilitySystem; } } } /** * A callback which can be added to a ticker. * ```js * ticker.add(() => { * // do something every frame * }); * ``` * @memberof ticker */ export type TickerCallback = (this: T, ticker: Ticker) => any; /** * {@link ticker.Ticker|Tickers} provide periodic callbacks based on the system clock. * Your game update logic will generally be run in response to a tick once per frame. * You can have multiple tickers in use at one time. * ```js * import { Ticker } from 'pixi.js'; * * const callback = (ticker: Ticker) => { * // do something on the next animation frame * }; * * // create a ticker * const ticker = new Ticker(); * * // register the callback and start the ticker * ticker.add(callback); * ticker.start(); * ``` * * You can always use the {@link ticker.Ticker.shared|shared} ticker that Pixi renders with by default. * ```js * Ticker.shared.add(callback); * ``` * @namespace ticker */ /** * A Ticker class that runs an update loop that other objects listen to. * * This class is composed around listeners meant for execution on the next requested animation frame. * Animation frames are requested only when necessary, e.g. When the ticker is started and the emitter has listeners. * @class * @memberof ticker */ export declare class Ticker { /** * Target frames per millisecond. * @static */ static targetFPMS: number; /** The private shared ticker instance */ private static _shared; /** The private system ticker instance */ private static _system; /** * Whether or not this ticker should invoke the method * {@link ticker.Ticker#start|start} automatically when a listener is added. */ autoStart: boolean; /** * Scalar time value from last frame to this frame. * This value is capped by setting {@link ticker.Ticker#minFPS|minFPS} * and is scaled with {@link ticker.Ticker#speed|speed}. * **Note:** The cap may be exceeded by scaling. */ deltaTime: number; /** * Scalar time elapsed in milliseconds from last frame to this frame. * This value is capped by setting {@link ticker.Ticker#minFPS|minFPS} * and is scaled with {@link ticker.Ticker#speed|speed}. * **Note:** The cap may be exceeded by scaling. * If the platform supports DOMHighResTimeStamp, * this value will have a precision of 1 µs. * Defaults to target frame time * @default 16.66 */ deltaMS: number; /** * Time elapsed in milliseconds from last frame to this frame. * Opposed to what the scalar {@link ticker.Ticker#deltaTime|deltaTime} * is based, this value is neither capped nor scaled. * If the platform supports DOMHighResTimeStamp, * this value will have a precision of 1 µs. * Defaults to target frame time * @default 16.66 */ elapsedMS: number; /** * The last time {@link ticker.Ticker#update|update} was invoked. * This value is also reset internally outside of invoking * update, but only when a new animation frame is requested. * If the platform supports DOMHighResTimeStamp, * this value will have a precision of 1 µs. */ lastTime: number; /** * Factor of current {@link ticker.Ticker#deltaTime|deltaTime}. * @example * // Scales ticker.deltaTime to what would be * // the equivalent of approximately 120 FPS * ticker.speed = 2; */ speed: number; /** * Whether or not this ticker has been started. * `true` if {@link ticker.Ticker#start|start} has been called. * `false` if {@link ticker.Ticker#stop|Stop} has been called. * While `false`, this value may change to `true` in the * event of {@link ticker.Ticker#autoStart|autoStart} being `true` * and a listener is added. */ started: boolean; /** The first listener. All new listeners added are chained on this. */ private _head; /** Internal current frame request ID */ private _requestId; /** * Internal value managed by minFPS property setter and getter. * This is the maximum allowed milliseconds between updates. */ private _maxElapsedMS; /** * Internal value managed by minFPS property setter and getter. * This is the minimum allowed milliseconds between updates. */ private _minElapsedMS; /** If enabled, deleting is disabled.*/ private _protected; /** The last time keyframe was executed. Maintains a relatively fixed interval with the previous value. */ private _lastFrame; /** * Internal tick method bound to ticker instance. * This is because in early 2015, Function.bind * is still 60% slower in high performance scenarios. * Also separating frame requests from update method * so listeners may be called at any time and with * any animation API, just invoke ticker.update(time). * @param time - Time since last tick. */ private readonly _tick; constructor(); /** * Conditionally requests a new animation frame. * If a frame has not already been requested, and if the internal * emitter has listeners, a new frame is requested. * @private */ private _requestIfNeeded; /** * Conditionally cancels a pending animation frame. * @private */ private _cancelIfNeeded; /** * Conditionally requests a new animation frame. * If the ticker has been started it checks if a frame has not already * been requested, and if the internal emitter has listeners. If these * conditions are met, a new frame is requested. If the ticker has not * been started, but autoStart is `true`, then the ticker starts now, * and continues with the previous conditions to request a new frame. * @private */ private _startIfPossible; /** * Register a handler for tick events. Calls continuously unless * it is removed or the ticker is stopped. * @param fn - The listener function to be added for updates * @param context - The listener context * @param {number} [priority=UPDATE_PRIORITY.NORMAL] - The priority for emitting * @returns This instance of a ticker */ add(fn: TickerCallback, context?: T, priority?: number): this; /** * Add a handler for the tick event which is only execute once. * @param fn - The listener function to be added for one update * @param context - The listener context * @param {number} [priority=UPDATE_PRIORITY.NORMAL] - The priority for emitting * @returns This instance of a ticker */ addOnce(fn: TickerCallback, context?: T, priority?: number): this; /** * Internally adds the event handler so that it can be sorted by priority. * Priority allows certain handler (user, AnimatedSprite, Interaction) to be run * before the rendering. * @private * @param listener - Current listener being added. * @returns This instance of a ticker */ private _addListener; /** * Removes any handlers matching the function and context parameters. * If no handlers are left after removing, then it cancels the animation frame. * @param fn - The listener function to be removed * @param context - The listener context to be removed * @returns This instance of a ticker */ remove(fn: TickerCallback, context?: T): this; /** * The number of listeners on this ticker, calculated by walking through linked list * @readonly * @member {number} */ get count(): number; /** Starts the ticker. If the ticker has listeners a new animation frame is requested at this point. */ start(): void; /** Stops the ticker. If the ticker has requested an animation frame it is canceled at this point. */ stop(): void; /** Destroy the ticker and don't use after this. Calling this method removes all references to internal events. */ destroy(): void; /** * Triggers an update. An update entails setting the * current {@link ticker.Ticker#elapsedMS|elapsedMS}, * the current {@link ticker.Ticker#deltaTime|deltaTime}, * invoking all listeners with current deltaTime, * and then finally setting {@link ticker.Ticker#lastTime|lastTime} * with the value of currentTime that was provided. * This method will be called automatically by animation * frame callbacks if the ticker instance has been started * and listeners are added. * @param {number} [currentTime=performance.now()] - the current time of execution */ update(currentTime?: number): void; /** * The frames per second at which this ticker is running. * The default is approximately 60 in most modern browsers. * **Note:** This does not factor in the value of * {@link ticker.Ticker#speed|speed}, which is specific * to scaling {@link ticker.Ticker#deltaTime|deltaTime}. * @member {number} * @readonly */ get FPS(): number; /** * Manages the maximum amount of milliseconds allowed to * elapse between invoking {@link ticker.Ticker#update|update}. * This value is used to cap {@link ticker.Ticker#deltaTime|deltaTime}, * but does not effect the measured value of {@link ticker.Ticker#FPS|FPS}. * When setting this property it is clamped to a value between * `0` and `Ticker.targetFPMS * 1000`. * @member {number} * @default 10 */ get minFPS(): number; set minFPS(fps: number); /** * Manages the minimum amount of milliseconds required to * elapse between invoking {@link ticker.Ticker#update|update}. * This will effect the measured value of {@link ticker.Ticker#FPS|FPS}. * If it is set to `0`, then there is no limit; PixiJS will render as many frames as it can. * Otherwise it will be at least `minFPS` * @member {number} * @default 0 */ get maxFPS(): number; set maxFPS(fps: number); /** * The shared ticker instance used by {@link AnimatedSprite} and by * {@link VideoResource} to update animation frames / video textures. * * It may also be used by {@link Application} if created with the `sharedTicker` option property set to true. * * The property {@link ticker.Ticker#autoStart|autoStart} is set to `true` for this instance. * Please follow the examples for usage, including how to opt-out of auto-starting the shared ticker. * @example * import { Ticker } from 'pixi.js'; * * const ticker = Ticker.shared; * // Set this to prevent starting this ticker when listeners are added. * // By default this is true only for the Ticker.shared instance. * ticker.autoStart = false; * * // FYI, call this to ensure the ticker is stopped. It should be stopped * // if you have not attempted to render anything yet. * ticker.stop(); * * // Call this when you are ready for a running shared ticker. * ticker.start(); * @example * import { autoDetectRenderer, Container } from 'pixi.js'; * * // You may use the shared ticker to render... * const renderer = autoDetectRenderer(); * const stage = new Container(); * document.body.appendChild(renderer.view); * ticker.add((time) => renderer.render(stage)); * * // Or you can just update it manually. * ticker.autoStart = false; * ticker.stop(); * const animate = (time) => { * ticker.update(time); * renderer.render(stage); * requestAnimationFrame(animate); * }; * animate(performance.now()); * @member {ticker.Ticker} * @readonly * @static */ static get shared(): Ticker; /** * The system ticker instance used by {@link BasePrepare} for core timing * functionality that shouldn't usually need to be paused, unlike the `shared` * ticker which drives visual animations and rendering which may want to be paused. * * The property {@link ticker.Ticker#autoStart|autoStart} is set to `true` for this instance. * @member {ticker.Ticker} * @readonly * @static */ static get system(): Ticker; } type ResizeableRenderer = Pick; /** * Application options for the {@link app.ResizePlugin}. * @memberof app * @property {Window|HTMLElement} [resizeTo=window] - Element to automatically resize the renderer to. */ export interface ResizePluginOptions { /** * Element to automatically resize the renderer to. * @memberof app.ApplicationOptions */ resizeTo?: Window | HTMLElement; } /** * Middleware for Application's resize functionality. * * Adds the following methods to {@link app.Application}: * * {@link app.Application#resizeTo} * * {@link app.Application#resize} * * {@link app.Application#queueResize} * * {@link app.Application#cancelResize} * @example * import { extensions, ResizePlugin } from 'pixi.js'; * * extensions.add(ResizePlugin); * @memberof app */ export declare class ResizePlugin { /** @ignore */ static extension: ExtensionMetadata; static resizeTo: Window | HTMLElement; static resize: () => void; static renderer: ResizeableRenderer; static queueResize: () => void; static render: () => void; private static _resizeId; private static _resizeTo; private static _cancelResize; /** * Initialize the plugin with scope of application instance * @static * @private * @param {object} [options] - See application options */ static init(options: ResizePluginOptions): void; /** * Clean up the ticker, scoped to application * @static * @private */ static destroy(): void; } /** * Application options for the {@link app.TickerPlugin}. * @memberof app * @property {boolean} [autoStart=true] - Automatically starts the rendering after the construction. * **Note**: Setting this parameter to `false` does NOT stop the shared ticker even if you set * `options.sharedTicker` to `true` in case that it is already started. Stop it by your own. * @property {boolean} [sharedTicker=false] - Set`true` to use `Ticker.shared`, `false` to create new ticker. * If set to `false`, you cannot register a handler to occur before anything that runs on the shared ticker. * The system ticker will always run before both the shared ticker and the app ticker. */ export interface TickerPluginOptions { /** * Automatically starts the rendering after the construction. * **Note**: Setting this parameter to `false` does NOT stop the shared ticker even if you set * `options.sharedTicker` to `true` in case that it is already started. Stop it by your own. * @memberof app.ApplicationOptions * @default true */ autoStart?: boolean; /** * Set`true` to use `Ticker.shared`, `false` to create new ticker. * If set to `false`, you cannot register a handler to occur before anything that runs on the shared ticker. * The system ticker will always run before both the shared ticker and the app ticker. * @memberof app.ApplicationOptions * @default false */ sharedTicker?: boolean; } /** * Middleware for Application's {@link ticker.Ticker} functionality. * * Adds the following methods to {@link app.Application}: * * {@link app.Application#start} * * {@link app.Application#stop} * * {@link app.Application#ticker} * @example * import { extensions, TickerPlugin } from 'pixi.js'; * * extensions.add(TickerPlugin); * @memberof app */ export declare class TickerPlugin { /** @ignore */ static extension: ExtensionMetadata; static start: () => void; static stop: () => void; private static _ticker; static ticker: Ticker; /** * Initialize the plugin with scope of application instance * @static * @private * @param {object} [options] - See application options */ static init(options?: PixiMixins.ApplicationOptions): void; /** * Clean up the ticker, scoped to application. * @static * @private */ static destroy(): void; } declare global { namespace PixiMixins { // Extend the Application interface with resize and ticker functionalities interface Application { resizeTo: Window | HTMLElement; resize(): void; queueResize(): void; cancelResize(): void; ticker: Ticker; stop(): void; start(): void; } // Combine ResizePluginOptions and TickerPluginOptions into ApplicationOptions interface ApplicationOptions extends ResizePluginOptions, TickerPluginOptions { } } } declare global { namespace PixiMixins { // eslint-disable-next-line @typescript-eslint/no-empty-interface interface AssetsPreferences { } } } export interface CullingMixinConstructor { /** * If set, this shape is used for culling instead of the bounds of this object. * It can improve the culling performance of objects with many children. * The culling area is defined in local space. * @memberof scene.Container# */ cullArea: Rectangle; /** * Should this object be rendered if the bounds of this object are out of frame? * * Culling has no effect on whether updateTransform is called. * @default false * @memberof scene.Container# */ cullable: boolean; /** * Determines if the children to the container can be culled * Setting this to false allows PixiJS to bypass a recursive culling function * Which can help to optimize very complex scenes * @default true * @memberof scene.Container# */ cullableChildren: boolean; } export declare const cullingMixin: CullingMixinConstructor; declare global { namespace PixiMixins { // eslint-disable-next-line @typescript-eslint/no-empty-interface interface Container extends Partial { } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface ContainerOptions extends Partial { } } } /** * A {@link FederatedEvent} for wheel events. * @memberof events */ export declare class FederatedWheelEvent extends FederatedMouseEvent implements WheelEvent { /** * The units of `deltaX`, `deltaY`, and `deltaZ`. This is one of `DOM_DELTA_LINE`, * `DOM_DELTA_PAGE`, `DOM_DELTA_PIXEL`. */ deltaMode: number; /** Horizontal scroll amount */ deltaX: number; /** Vertical scroll amount */ deltaY: number; /** z-axis scroll amount. */ deltaZ: number; /** Units specified in pixels. */ static readonly DOM_DELTA_PIXEL = 0; /** Units specified in pixels. */ readonly DOM_DELTA_PIXEL = 0; /** Units specified in lines. */ static readonly DOM_DELTA_LINE = 1; /** Units specified in lines. */ readonly DOM_DELTA_LINE = 1; /** Units specified in pages. */ static readonly DOM_DELTA_PAGE = 2; /** Units specified in pages. */ readonly DOM_DELTA_PAGE = 2; } /** * The tracking data for each pointer held in the state of an {@link EventBoundary}. * * ```ts * pressTargetsByButton: { * [id: number]: Container[]; * }; * clicksByButton: { * [id: number]: { * clickCount: number; * target: Container; * timeStamp: number; * }; * }; * overTargets: Container[]; * ``` * @typedef {object} TrackingData * @property {Record.} pressTargetsByButton - The pressed containers' * propagation paths by each button of the pointer. * @property {Record.} clicksByButton - Holds clicking data for each button of the pointer. * @property {Container[]} overTargets - The Container propagation path over which the pointer is hovering. * @memberof events */ export type TrackingData = { pressTargetsByButton: { [id: number]: Container[]; }; clicksByButton: { [id: number]: { clickCount: number; target: Container; timeStamp: number; }; }; overTargets: Container[]; }; type EmitterListener = { fn(...args: any[]): any; context: any; once: boolean; }; /** * Internal storage of event listeners in EventEmitter. * @ignore */ export type EmitterListeners = Record; /** * The type of cursor to use when the mouse pointer is hovering over. * @see https://developer.mozilla.org/en-US/docs/Web/CSS/cursor * * Can be any valid CSS cursor value: * `auto`, `default`, `none`, `context-menu`, `help`, `pointer`, `progress`, * `wait`, `cell`, `crosshair`, `text`, `verticaltext`, `alias`, `copy`, `move`, * `nodrop`, `notallowed`, `eresize`, `nresize`, `neresize`, `nwresize`, `sresize`, * `seresize`, `swresize`, `wresize`, `nsresize`, `ewresize`, `neswresize`, `colresize`, * `nwseresize`, `rowresize`, `allscroll`, `zoomin`, `zoomout`, `grab`, `grabbing` * @memberof events */ export type Cursor = "auto" | "default" | "none" | "context-menu" | "help" | "pointer" | "progress" | "wait" | "cell" | "crosshair" | "text" | "vertical-text" | "alias" | "copy" | "move" | "no-drop" | "not-allowed" | "e-resize" | "n-resize" | "ne-resize" | "nw-resize" | "s-resize" | "se-resize" | "sw-resize" | "w-resize" | "ns-resize" | "ew-resize" | "nesw-resize" | "col-resize" | "nwse-resize" | "row-resize" | "all-scroll" | "zoom-in" | "zoom-out" | "grab" | "grabbing"; /** * The hit area specifies the area for which pointer events should be captured by this event target. * @memberof events */ export interface IHitArea { /** * Checks if the x and y coordinates given are contained within this hit area. * @returns Whether the x and y coordinates are contained within this hit area. */ contains(x: number, y: number): boolean; } /** * Function type for handlers, e.g., onclick * @memberof events */ export type FederatedEventHandler = (event: T) => void; /** * The type of interaction a Container can be. * This is the {@link scene.Container#eventMode|Container.eventMode} property of any {@link scene.Container}. * * Can be one of the following: * - `'none'`: Ignores all interaction events, even on its children. * - `'passive'`: **(default)** Does not emit events and ignores all hit testing on itself and non-interactive children. * Interactive children will still emit events. * - `'auto'`: Does not emit events but is hit tested if parent is interactive. Same as `interactive = false` in v7 * - `'static'`: Emit events and is hit tested. Same as `interaction = true` in v7 * - `'dynamic'`: Emits events and is hit tested but will also receive mock interaction events fired from a ticker to * allow for interaction when the mouse isn't moving * * `none` and `passive` are useful for optimizing interaction events on objects as it reduces the number of hit tests * PixiJS has to do. `auto` is useful for when you want to recreate how the DOM handles interaction events with * `pointer-events: auto`. * @since 7.2.0 * @memberof events */ export type EventMode = "none" | "passive" | "auto" | "static" | "dynamic"; /** * The properties available for any interactive object. * @memberof events */ export interface FederatedOptions { /** The cursor preferred when the mouse pointer is hovering over. */ cursor?: Cursor | string; /** The mode of interaction for this object */ eventMode?: EventMode; /** Whether this event target should fire UI events. */ interactive?: boolean; /** Whether this event target has any children that need UI events. This can be used optimize event propagation. */ interactiveChildren?: boolean; /** The hit-area specifies the area for which pointer events should be captured by this event target. */ hitArea?: IHitArea | null; /** Handler for 'click' event */ onclick?: FederatedEventHandler | null; /** Handler for 'mousedown' event */ onmousedown?: FederatedEventHandler | null; /** Handler for 'mouseenter' event */ onmouseenter?: FederatedEventHandler | null; /** Handler for 'mouseleave' event */ onmouseleave?: FederatedEventHandler | null; /** Handler for 'mousemove' event */ onmousemove?: FederatedEventHandler | null; /** Handler for 'globalmousemove' event */ onglobalmousemove?: FederatedEventHandler | null; /** Handler for 'mouseout' event */ onmouseout?: FederatedEventHandler | null; /** Handler for 'mouseover' event */ onmouseover?: FederatedEventHandler | null; /** Handler for 'mouseup' event */ onmouseup?: FederatedEventHandler | null; /** Handler for 'mouseupoutside' event */ onmouseupoutside?: FederatedEventHandler | null; /** Handler for 'pointercancel' event */ onpointercancel?: FederatedEventHandler | null; /** Handler for 'pointerdown' event */ onpointerdown?: FederatedEventHandler | null; /** Handler for 'pointerenter' event */ onpointerenter?: FederatedEventHandler | null; /** Handler for 'pointerleave' event */ onpointerleave?: FederatedEventHandler | null; /** Handler for 'pointermove' event */ onpointermove?: FederatedEventHandler | null; /** Handler for 'globalpointermove' event */ onglobalpointermove?: FederatedEventHandler | null; /** Handler for 'pointerout' event */ onpointerout?: FederatedEventHandler | null; /** Handler for 'pointerover' event */ onpointerover?: FederatedEventHandler | null; /** Handler for 'pointertap' event */ onpointertap?: FederatedEventHandler | null; /** Handler for 'pointerup' event */ onpointerup?: FederatedEventHandler | null; /** Handler for 'pointerupoutside' event */ onpointerupoutside?: FederatedEventHandler | null; /** Handler for 'rightclick' event */ onrightclick?: FederatedEventHandler | null; /** Handler for 'rightdown' event */ onrightdown?: FederatedEventHandler | null; /** Handler for 'rightup' event */ onrightup?: FederatedEventHandler | null; /** Handler for 'rightupoutside' event */ onrightupoutside?: FederatedEventHandler | null; /** Handler for 'tap' event */ ontap?: FederatedEventHandler | null; /** Handler for 'touchcancel' event */ ontouchcancel?: FederatedEventHandler | null; /** Handler for 'touchend' event */ ontouchend?: FederatedEventHandler | null; /** Handler for 'touchendoutside' event */ ontouchendoutside?: FederatedEventHandler | null; /** Handler for 'touchmove' event */ ontouchmove?: FederatedEventHandler | null; /** Handler for 'globaltouchmove' event */ onglobaltouchmove?: FederatedEventHandler | null; /** Handler for 'touchstart' event */ ontouchstart?: FederatedEventHandler | null; /** Handler for 'wheel' event */ onwheel?: FederatedEventHandler | null; } type AddListenerOptions = boolean | AddEventListenerOptions; type RemoveListenerOptions = boolean | EventListenerOptions; export interface IFederatedContainer extends FederatedOptions { /** The parent of this event target. */ readonly parent?: Container; /** The children of this event target. */ readonly children?: ReadonlyArray; _internalEventMode: EventMode; /** Returns true if the Container has interactive 'static' or 'dynamic' */ isInteractive: () => boolean; addEventListener(type: K, listener: (e: AllFederatedEventMap[K]) => any, options?: AddListenerOptions): void; addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: AddListenerOptions): void; removeEventListener(type: K, listener: (e: AllFederatedEventMap[K]) => any, options?: RemoveListenerOptions): void; removeEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: RemoveListenerOptions): void; dispatchEvent(e: FederatedEvent): boolean; } export declare const FederatedContainer: IFederatedContainer; /** * Event boundaries are "barriers" where events coming from an upstream scene are modified before downstream propagation. * * ## Root event boundary * * The {@link EventSystem#rootBoundary rootBoundary} handles events coming from the <canvas />. * {@link EventSystem} handles the normalization from native {@link https://dom.spec.whatwg.org/#event Events} * into {@link FederatedEvent FederatedEvents}. The rootBoundary then does the hit-testing and event dispatch * for the upstream normalized event. * * ## Additional event boundaries * * An additional event boundary may be desired within an application's scene graph. For example, if a portion of the scene is * is flat with many children at one level - a spatial hash maybe needed to accelerate hit testing. In this scenario, the * container can be detached from the scene and glued using a custom event boundary. * * ```ts * import { Container } from 'pixi.js'; * import { EventBoundary } from 'pixi.js'; * import { SpatialHash } from 'pixi-spatial-hash'; * * class HashedHitTestingEventBoundary * { * private spatialHash: SpatialHash; * * constructor(scene: Container, spatialHash: SpatialHash) * { * super(scene); * this.spatialHash = spatialHash; * } * * hitTestRecursive(...) * { * // TODO: If target === this.rootTarget, then use spatial hash to get a * // list of possible children that match the given (x,y) coordinates. * } * } * * class VastScene extends Container * { * protected eventBoundary: EventBoundary; * protected scene: Container; * protected spatialHash: SpatialHash; * * constructor() * { * this.scene = new Container(); * this.spatialHash = new SpatialHash(); * this.eventBoundary = new HashedHitTestingEventBoundary(this.scene, this.spatialHash); * * // Populate this.scene with a ton of children, while updating this.spatialHash * } * } * ``` * @memberof events */ export declare class EventBoundary { /** * The root event-target residing below the event boundary. * All events are dispatched trickling down and bubbling up to this `rootTarget`. */ rootTarget: Container; /** * Emits events after they were dispatched into the scene graph. * * This can be used for global events listening, regardless of the scene graph being used. It should * not be used by interactive libraries for normal use. * * Special events that do not bubble all the way to the root target are not emitted from here, * e.g. pointerenter, pointerleave, click. */ dispatch: EventEmitter; /** The cursor preferred by the event targets underneath this boundary. */ cursor: Cursor | string; /** * This flag would emit `pointermove`, `touchmove`, and `mousemove` events on all Containers. * * The `moveOnAll` semantics mirror those of earlier versions of PixiJS. This was disabled in favor of * the Pointer Event API's approach. */ moveOnAll: boolean; /** Enables the global move events. `globalpointermove`, `globaltouchmove`, and `globalmousemove` */ enableGlobalMoveEvents: boolean; /** * Maps event types to forwarding handles for them. * * {@link EventBoundary EventBoundary} provides mapping for "pointerdown", "pointermove", * "pointerout", "pointerleave", "pointerover", "pointerup", and "pointerupoutside" by default. * @see EventBoundary#addEventMapping */ protected mappingTable: Record void; priority: number; }>>; /** * State object for mapping methods. * @see EventBoundary#trackingData */ protected mappingState: Record; /** * The event pool maps event constructors to an free pool of instances of those specific events. * @see EventBoundary#allocateEvent * @see EventBoundary#freeEvent */ protected eventPool: Map; /** Every interactive element gathered from the scene. Only used in `pointermove` */ private readonly _allInteractiveElements; /** Every element that passed the hit test. Only used in `pointermove` */ private _hitElements; /** Whether or not to collect all the interactive elements from the scene. Enabled in `pointermove` */ private _isPointerMoveEvent; /** * @param rootTarget - The holder of the event boundary. */ constructor(rootTarget?: Container); /** * Adds an event mapping for the event `type` handled by `fn`. * * Event mappings can be used to implement additional or custom events. They take an event * coming from the upstream scene (or directly from the {@link EventSystem}) and dispatch new downstream events * generally trickling down and bubbling up to {@link EventBoundary.rootTarget this.rootTarget}. * * To modify the semantics of existing events, the built-in mapping methods of EventBoundary should be overridden * instead. * @param type - The type of upstream event to map. * @param fn - The mapping method. The context of this function must be bound manually, if desired. */ addEventMapping(type: string, fn: (e: FederatedEvent) => void): void; /** * Dispatches the given event * @param e - The event to dispatch. * @param type - The type of event to dispatch. Defaults to `e.type`. */ dispatchEvent(e: FederatedEvent, type?: string): void; /** * Maps the given upstream event through the event boundary and propagates it downstream. * @param e - The event to map. */ mapEvent(e: FederatedEvent): void; /** * Finds the Container that is the target of a event at the given coordinates. * * The passed (x,y) coordinates are in the world space above this event boundary. * @param x - The x coordinate of the event. * @param y - The y coordinate of the event. */ hitTest(x: number, y: number): Container; /** * Propagate the passed event from from {@link EventBoundary.rootTarget this.rootTarget} to its * target {@code e.target}. * @param e - The event to propagate. * @param type - The type of event to propagate. Defaults to `e.type`. */ propagate(e: FederatedEvent, type?: string): void; /** * Emits the event {@code e} to all interactive containers. The event is propagated in the bubbling phase always. * * This is used in the `globalpointermove` event. * @param e - The emitted event. * @param type - The listeners to notify. * @param targets - The targets to notify. */ all(e: FederatedEvent, type?: string | string[], targets?: Container[]): void; /** * Finds the propagation path from {@link EventBoundary.rootTarget rootTarget} to the passed * {@code target}. The last element in the path is {@code target}. * @param target - The target to find the propagation path to. */ propagationPath(target: Container): Container[]; protected hitTestMoveRecursive(currentTarget: Container, eventMode: EventMode, location: Point, testFn: (object: Container, pt: Point) => boolean, pruneFn: (object: Container, pt: Point) => boolean, ignore?: boolean): Container[]; /** * Recursive implementation for {@link EventBoundary.hitTest hitTest}. * @param currentTarget - The Container that is to be hit tested. * @param eventMode - The event mode for the `currentTarget` or one of its parents. * @param location - The location that is being tested for overlap. * @param testFn - Callback that determines whether the target passes hit testing. This callback * can assume that `pruneFn` failed to prune the container. * @param pruneFn - Callback that determiness whether the target and all of its children * cannot pass the hit test. It is used as a preliminary optimization to prune entire subtrees * of the scene graph. * @returns An array holding the hit testing target and all its ancestors in order. The first element * is the target itself and the last is {@link EventBoundary.rootTarget rootTarget}. This is the opposite * order w.r.t. the propagation path. If no hit testing target is found, null is returned. */ protected hitTestRecursive(currentTarget: Container, eventMode: EventMode, location: Point, testFn: (object: Container, pt: Point) => boolean, pruneFn: (object: Container, pt: Point) => boolean): Container[]; private _isInteractive; private _interactivePrune; /** * Checks whether the container or any of its children cannot pass the hit test at all. * * {@link EventBoundary}'s implementation uses the {@link Container.hitArea hitArea} * and {@link Container._maskEffect} for pruning. * @param container - The container to prune. * @param location - The location to test for overlap. */ protected hitPruneFn(container: Container, location: Point): boolean; /** * Checks whether the container passes hit testing for the given location. * @param container - The container to test. * @param location - The location to test for overlap. * @returns - Whether `container` passes hit testing for `location`. */ protected hitTestFn(container: Container, location: Point): boolean; /** * Notify all the listeners to the event's `currentTarget`. * * If the `currentTarget` contains the property `on`, then it is called here, * simulating the behavior from version 6.x and prior. * @param e - The event passed to the target. * @param type - The type of event to notify. Defaults to `e.type`. */ protected notifyTarget(e: FederatedEvent, type?: string): void; /** * Maps the upstream `pointerdown` events to a downstream `pointerdown` event. * * `touchstart`, `rightdown`, `mousedown` events are also dispatched for specific pointer types. * @param from - The upstream `pointerdown` event. */ protected mapPointerDown(from: FederatedEvent): void; /** * Maps the upstream `pointermove` to downstream `pointerout`, `pointerover`, and `pointermove` events, in that order. * * The tracking data for the specific pointer has an updated `overTarget`. `mouseout`, `mouseover`, * `mousemove`, and `touchmove` events are fired as well for specific pointer types. * @param from - The upstream `pointermove` event. */ protected mapPointerMove(from: FederatedEvent): void; /** * Maps the upstream `pointerover` to downstream `pointerover` and `pointerenter` events, in that order. * * The tracking data for the specific pointer gets a new `overTarget`. * @param from - The upstream `pointerover` event. */ protected mapPointerOver(from: FederatedEvent): void; /** * Maps the upstream `pointerout` to downstream `pointerout`, `pointerleave` events, in that order. * * The tracking data for the specific pointer is cleared of a `overTarget`. * @param from - The upstream `pointerout` event. */ protected mapPointerOut(from: FederatedEvent): void; /** * Maps the upstream `pointerup` event to downstream `pointerup`, `pointerupoutside`, * and `click`/`rightclick`/`pointertap` events, in that order. * * The `pointerupoutside` event bubbles from the original `pointerdown` target to the most specific * ancestor of the `pointerdown` and `pointerup` targets, which is also the `click` event's target. `touchend`, * `rightup`, `mouseup`, `touchendoutside`, `rightupoutside`, `mouseupoutside`, and `tap` are fired as well for * specific pointer types. * @param from - The upstream `pointerup` event. */ protected mapPointerUp(from: FederatedEvent): void; /** * Maps the upstream `pointerupoutside` event to a downstream `pointerupoutside` event, bubbling from the original * `pointerdown` target to `rootTarget`. * * (The most specific ancestor of the `pointerdown` event and the `pointerup` event must the * `{@link EventBoundary}'s root because the `pointerup` event occurred outside of the boundary.) * * `touchendoutside`, `mouseupoutside`, and `rightupoutside` events are fired as well for specific pointer * types. The tracking data for the specific pointer is cleared of a `pressTarget`. * @param from - The upstream `pointerupoutside` event. */ protected mapPointerUpOutside(from: FederatedEvent): void; /** * Maps the upstream `wheel` event to a downstream `wheel` event. * @param from - The upstream `wheel` event. */ protected mapWheel(from: FederatedEvent): void; /** * Finds the most specific event-target in the given propagation path that is still mounted in the scene graph. * * This is used to find the correct `pointerup` and `pointerout` target in the case that the original `pointerdown` * or `pointerover` target was unmounted from the scene graph. * @param propagationPath - The propagation path was valid in the past. * @returns - The most specific event-target still mounted at the same location in the scene graph. */ protected findMountedTarget(propagationPath: Container[]): Container; /** * Creates an event whose {@code originalEvent} is {@code from}, with an optional `type` and `target` override. * * The event is allocated using {@link EventBoundary#allocateEvent this.allocateEvent}. * @param from - The {@code originalEvent} for the returned event. * @param [type=from.type] - The type of the returned event. * @param target - The target of the returned event. */ protected createPointerEvent(from: FederatedPointerEvent, type?: string, target?: Container): FederatedPointerEvent; /** * Creates a wheel event whose {@code originalEvent} is {@code from}. * * The event is allocated using {@link EventBoundary#allocateEvent this.allocateEvent}. * @param from - The upstream wheel event. */ protected createWheelEvent(from: FederatedWheelEvent): FederatedWheelEvent; /** * Clones the event {@code from}, with an optional {@code type} override. * * The event is allocated using {@link EventBoundary#allocateEvent this.allocateEvent}. * @param from - The event to clone. * @param [type=from.type] - The type of the returned event. */ protected clonePointerEvent(from: FederatedPointerEvent, type?: string): FederatedPointerEvent; /** * Copies wheel {@link FederatedWheelEvent} data from {@code from} into {@code to}. * * The following properties are copied: * + deltaMode * + deltaX * + deltaY * + deltaZ * @param from - The event to copy data from. * @param to - The event to copy data into. */ protected copyWheelData(from: FederatedWheelEvent, to: FederatedWheelEvent): void; /** * Copies pointer {@link FederatedPointerEvent} data from {@code from} into {@code to}. * * The following properties are copied: * + pointerId * + width * + height * + isPrimary * + pointerType * + pressure * + tangentialPressure * + tiltX * + tiltY * @param from - The event to copy data from. * @param to - The event to copy data into. */ protected copyPointerData(from: FederatedEvent, to: FederatedEvent): void; /** * Copies mouse {@link FederatedMouseEvent} data from {@code from} to {@code to}. * * The following properties are copied: * + altKey * + button * + buttons * + clientX * + clientY * + metaKey * + movementX * + movementY * + pageX * + pageY * + x * + y * + screen * + shiftKey * + global * @param from - The event to copy data from. * @param to - The event to copy data into. */ protected copyMouseData(from: FederatedEvent, to: FederatedEvent): void; /** * Copies base {@link FederatedEvent} data from {@code from} into {@code to}. * * The following properties are copied: * + isTrusted * + srcElement * + timeStamp * + type * @param from - The event to copy data from. * @param to - The event to copy data into. */ protected copyData(from: FederatedEvent, to: FederatedEvent): void; /** * @param id - The pointer ID. * @returns The tracking data stored for the given pointer. If no data exists, a blank * state will be created. */ protected trackingData(id: number): TrackingData; /** * Allocate a specific type of event from {@link EventBoundary#eventPool this.eventPool}. * * This allocation is constructor-agnostic, as long as it only takes one argument - this event * boundary. * @param constructor - The event's constructor. */ protected allocateEvent(constructor: { new (boundary: EventBoundary): T; }): T; /** * Frees the event and puts it back into the event pool. * * It is illegal to reuse the event until it is allocated again, using `this.allocateEvent`. * * It is also advised that events not allocated from {@link EventBoundary#allocateEvent this.allocateEvent} * not be freed. This is because of the possibility that the same event is freed twice, which can cause * it to be allocated twice & result in overwriting. * @param event - The event to be freed. * @throws Error if the event is managed by another event boundary. */ protected freeEvent(event: T): void; /** * Similar to {@link EventEmitter.emit}, except it stops if the `propagationImmediatelyStopped` flag * is set on the event. * @param e - The event to call each listener with. * @param type - The event key. */ private _notifyListeners; } /** * A PixiJS compatible {@code Touch} event. * @memberof events */ export interface PixiTouch extends Touch { button: number; buttons: number; isPrimary: boolean; width: number; height: number; tiltX: number; tiltY: number; pointerType: string; pointerId: number; pressure: number; twist: number; tangentialPressure: number; layerX: number; layerY: number; offsetX: number; offsetY: number; isNormalized: boolean; type: string; } /** * An DOM-compatible synthetic event implementation that is "forwarded" on behalf of an original * FederatedEvent or native {@link https://dom.spec.whatwg.org/#event Event}. * @typeParam N - The type of native event held. * @memberof events */ export declare class FederatedEvent implements UIEvent { /** Flags whether this event bubbles. This will take effect only if it is set before propagation. */ bubbles: boolean; /** @deprecated since 7.0.0 */ cancelBubble: boolean; /** * Flags whether this event can be canceled using {@link FederatedEvent.preventDefault}. This is always * false (for now). */ readonly cancelable = false; /** * Flag added for compatibility with DOM {@code Event}. It is not used in the Federated Events * API. * @see https://dom.spec.whatwg.org/#dom-event-composed */ readonly composed = false; /** The listeners of the event target that are being notified. */ currentTarget: Container; /** Flags whether the default response of the user agent was prevent through this event. */ defaultPrevented: boolean; /** * The propagation phase. * @default {@link FederatedEvent.NONE} */ eventPhase: number; /** Flags whether this is a user-trusted event */ isTrusted: boolean; /** @deprecated since 7.0.0 */ returnValue: boolean; /** @deprecated since 7.0.0 */ srcElement: EventTarget; /** The event target that this will be dispatched to. */ target: Container; /** The timestamp of when the event was created. */ timeStamp: number; /** The type of event, e.g. {@code "mouseup"}. */ type: string; /** The native event that caused the foremost original event. */ nativeEvent: N; /** The original event that caused this event, if any. */ originalEvent: FederatedEvent; /** Flags whether propagation was stopped. */ propagationStopped: boolean; /** Flags whether propagation was immediately stopped. */ propagationImmediatelyStopped: boolean; /** The composed path of the event's propagation. The {@code target} is at the end. */ path: Container[]; /** The {@link EventBoundary} that manages this event. Null for root events. */ readonly manager: EventBoundary; /** Event-specific detail */ detail: number; /** The global Window object. */ view: WindowProxy; /** * Not supported. * @deprecated since 7.0.0 */ which: number; /** The coordinates of the event relative to the nearest DOM layer. This is a non-standard property. */ layer: Point; /** @readonly */ get layerX(): number; /** @readonly */ get layerY(): number; /** The coordinates of the event relative to the DOM document. This is a non-standard property. */ page: Point; /** @readonly */ get pageX(): number; /** @readonly */ get pageY(): number; /** * @param manager - The event boundary which manages this event. Propagation can only occur * within the boundary's jurisdiction. */ constructor(manager: EventBoundary); /** * Fallback for the deprecated @code{InteractionEvent.data}. * @deprecated since 7.0.0 */ get data(): this; /** The propagation path for this event. Alias for {@link EventBoundary.propagationPath}. */ composedPath(): Container[]; /** * Unimplemented method included for implementing the DOM interface {@code Event}. It will throw an {@code Error}. * @deprecated * @param _type * @param _bubbles * @param _cancelable */ initEvent(_type: string, _bubbles?: boolean, _cancelable?: boolean): void; /** * Unimplemented method included for implementing the DOM interface {@code UIEvent}. It will throw an {@code Error}. * @deprecated * @param _typeArg * @param _bubblesArg * @param _cancelableArg * @param _viewArg * @param _detailArg */ initUIEvent(_typeArg: string, _bubblesArg?: boolean, _cancelableArg?: boolean, _viewArg?: Window | null, _detailArg?: number): void; /** Prevent default behavior of PixiJS and the user agent. */ preventDefault(): void; /** * Stop this event from propagating to any addition listeners, including on the * {@link FederatedEventTarget.currentTarget currentTarget} and also the following * event targets on the propagation path. */ stopImmediatePropagation(): void; /** * Stop this event from propagating to the next {@link FederatedEventTarget}. The rest of the listeners * on the {@link FederatedEventTarget.currentTarget currentTarget} will still be notified. */ stopPropagation(): void; readonly NONE = 0; readonly CAPTURING_PHASE = 1; readonly AT_TARGET = 2; readonly BUBBLING_PHASE = 3; } /** * A {@link FederatedEvent} for mouse events. * @memberof events */ export declare class FederatedMouseEvent extends FederatedEvent implements MouseEvent { /** Whether the "alt" key was pressed when this mouse event occurred. */ altKey: boolean; /** The specific button that was pressed in this mouse event. */ button: number; /** The button depressed when this event occurred. */ buttons: number; /** Whether the "control" key was pressed when this mouse event occurred. */ ctrlKey: boolean; /** Whether the "meta" key was pressed when this mouse event occurred. */ metaKey: boolean; /** This is currently not implemented in the Federated Events API. */ relatedTarget: EventTarget; /** Whether the "shift" key was pressed when this mouse event occurred. */ shiftKey: boolean; /** The coordinates of the mouse event relative to the canvas. */ client: Point; /** @readonly */ get clientX(): number; /** @readonly */ get clientY(): number; /** * Alias for {@link FederatedMouseEvent.clientX this.clientX}. * @readonly */ get x(): number; /** * Alias for {@link FederatedMouseEvent.clientY this.clientY}. * @readonly */ get y(): number; /** This is the number of clicks that occurs in 200ms/click of each other. */ detail: number; /** The movement in this pointer relative to the last `mousemove` event. */ movement: Point; /** @readonly */ get movementX(): number; /** @readonly */ get movementY(): number; /** The offset of the pointer coordinates w.r.t. target Container in world space. This is not supported at the moment. */ offset: Point; /** @readonly */ get offsetX(): number; /** @readonly */ get offsetY(): number; /** The pointer coordinates in world space. */ global: Point; /** @readonly */ get globalX(): number; /** @readonly */ get globalY(): number; /** * The pointer coordinates in the renderer's {@link Renderer.screen screen}. This has slightly * different semantics than native PointerEvent screenX/screenY. */ screen: Point; /** * The pointer coordinates in the renderer's screen. Alias for {@code screen.x}. * @readonly */ get screenX(): number; /** * The pointer coordinates in the renderer's screen. Alias for {@code screen.y}. * @readonly */ get screenY(): number; /** * This will return the local coordinates of the specified container for this InteractionData * @param {Container} container - The Container that you would like the local * coords off * @param {PointData} point - A Point object in which to store the value, optional (otherwise * will create a new point) * @param {PointData} globalPos - A Point object containing your custom global coords, optional * (otherwise will use the current global coords) * @returns - A point containing the coordinates of the InteractionData position relative * to the Container */ getLocalPosition

(container: Container, point?: P, globalPos?: PointData): P; /** * Whether the modifier key was pressed when this event natively occurred. * @param key - The modifier key. */ getModifierState(key: string): boolean; /** * Not supported. * @param _typeArg * @param _canBubbleArg * @param _cancelableArg * @param _viewArg * @param _detailArg * @param _screenXArg * @param _screenYArg * @param _clientXArg * @param _clientYArg * @param _ctrlKeyArg * @param _altKeyArg * @param _shiftKeyArg * @param _metaKeyArg * @param _buttonArg * @param _relatedTargetArg * @deprecated since 7.0.0 */ initMouseEvent(_typeArg: string, _canBubbleArg: boolean, _cancelableArg: boolean, _viewArg: Window, _detailArg: number, _screenXArg: number, _screenYArg: number, _clientXArg: number, _clientYArg: number, _ctrlKeyArg: boolean, _altKeyArg: boolean, _shiftKeyArg: boolean, _metaKeyArg: boolean, _buttonArg: number, _relatedTargetArg: EventTarget): void; } /** * A {@link FederatedEvent} for pointer events. * @memberof events */ export declare class FederatedPointerEvent extends FederatedMouseEvent implements PointerEvent { /** * The unique identifier of the pointer. * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/PointerEvent/pointerId} */ pointerId: number; /** * The width of the pointer's contact along the x-axis, measured in CSS pixels. * radiusX of TouchEvents will be represented by this value. * @see https://developer.mozilla.org/en-US/docs/Web/API/PointerEvent/width */ width: number; /** * The height of the pointer's contact along the y-axis, measured in CSS pixels. * radiusY of TouchEvents will be represented by this value. * @see https://developer.mozilla.org/en-US/docs/Web/API/PointerEvent/height */ height: number; /** * Indicates whether or not the pointer device that created the event is the primary pointer. * @see https://developer.mozilla.org/en-US/docs/Web/API/PointerEvent/isPrimary */ isPrimary: boolean; /** * The type of pointer that triggered the event. * @see https://developer.mozilla.org/en-US/docs/Web/API/PointerEvent/pointerType */ pointerType: string; /** * Pressure applied by the pointing device during the event. *s * A Touch's force property will be represented by this value. * @see https://developer.mozilla.org/en-US/docs/Web/API/PointerEvent/pressure */ pressure: number; /** * Barrel pressure on a stylus pointer. * @see https://w3c.github.io/pointerevents/#pointerevent-interface */ tangentialPressure: number; /** * The angle, in degrees, between the pointer device and the screen. * @see https://developer.mozilla.org/en-US/docs/Web/API/PointerEvent/tiltX */ tiltX: number; /** * The angle, in degrees, between the pointer device and the screen. * @see https://developer.mozilla.org/en-US/docs/Web/API/PointerEvent/tiltY */ tiltY: number; /** * Twist of a stylus pointer. * @see https://w3c.github.io/pointerevents/#pointerevent-interface */ twist: number; /** This is the number of clicks that occurs in 200ms/click of each other. */ detail: number; getCoalescedEvents(): PointerEvent[]; getPredictedEvents(): PointerEvent[]; } export type FederatedEventMap = { click: FederatedPointerEvent; mousedown: FederatedPointerEvent; mouseenter: FederatedPointerEvent; mouseleave: FederatedPointerEvent; mousemove: FederatedPointerEvent; mouseout: FederatedPointerEvent; mouseover: FederatedPointerEvent; mouseup: FederatedPointerEvent; mouseupoutside: FederatedPointerEvent; pointercancel: FederatedPointerEvent; pointerdown: FederatedPointerEvent; pointerenter: FederatedPointerEvent; pointerleave: FederatedPointerEvent; pointermove: FederatedPointerEvent; pointerout: FederatedPointerEvent; pointerover: FederatedPointerEvent; pointertap: FederatedPointerEvent; pointerup: FederatedPointerEvent; pointerupoutside: FederatedPointerEvent; rightclick: FederatedPointerEvent; rightdown: FederatedPointerEvent; rightup: FederatedPointerEvent; rightupoutside: FederatedPointerEvent; tap: FederatedPointerEvent; touchcancel: FederatedPointerEvent; touchend: FederatedPointerEvent; touchendoutside: FederatedPointerEvent; touchmove: FederatedPointerEvent; touchstart: FederatedPointerEvent; wheel: FederatedWheelEvent; }; export type GlobalFederatedEventMap = { globalmousemove: FederatedPointerEvent; globalpointermove: FederatedPointerEvent; globaltouchmove: FederatedPointerEvent; }; export type AllFederatedEventMap = FederatedEventMap & GlobalFederatedEventMap; export type FederatedEventEmitterTypes = { [K in keyof FederatedEventMap as K | `${K}capture`]: [ event: FederatedEventMap[K] ]; } & { [K in keyof GlobalFederatedEventMap]: [ event: GlobalFederatedEventMap[K] ]; }; /** @ignore */ export interface EventSystemOptions { /** * The default event mode mode for all display objects. * (included in the **pixi.js** and **pixi.js-legacy** bundle), otherwise it will be ignored. */ eventMode?: EventMode; /** * The event features that are enabled by the EventSystem * (included in the **pixi.js** and **pixi.js-legacy** bundle), otherwise it will be ignored. * @example * const app = new Application({ * view: canvas, * events: { * move: true, * globalMove: false, * click: true, * wheel: true, * }, * }); */ eventFeatures?: Partial; } /** * The event features that are enabled by the EventSystem * (included in the **pixi.js** and **pixi.js-legacy** bundle), otherwise it will be ignored. * @since 7.2.0 * @memberof events */ export interface EventSystemFeatures { /** * Enables pointer events associated with pointer movement: * - `pointermove` / `mousemove` / `touchmove` * - `pointerout` / `mouseout` * - `pointerover` / `mouseover` */ move: boolean; /** * Enables global pointer move events: * - `globalpointermove` * - `globalmousemove` * - `globaltouchemove` */ globalMove: boolean; /** * Enables pointer events associated with clicking: * - `pointerup` / `mouseup` / `touchend` / 'rightup' * - `pointerupoutside` / `mouseupoutside` / `touchendoutside` / 'rightupoutside' * - `pointerdown` / 'mousedown' / `touchstart` / 'rightdown' * - `click` / `tap` */ click: boolean; /** - Enables wheel events. */ wheel: boolean; } /** * The system for handling UI events. * @memberof events */ export declare class EventSystem implements System$1 { /** @ignore */ static extension: ExtensionMetadata; /** * The event features that are enabled by the EventSystem * (included in the **pixi.js** and **pixi.js-legacy** bundle), otherwise it will be ignored. * @since 7.2.0 */ static defaultEventFeatures: EventSystemFeatures; private static _defaultEventMode; /** * The default interaction mode for all display objects. * @see Container.eventMode * @type {EventMode} * @readonly * @since 7.2.0 */ static get defaultEventMode(): EventMode; /** * The {@link EventBoundary} for the stage. * * The {@link EventBoundary#rootTarget rootTarget} of this root boundary is automatically set to * the last rendered object before any event processing is initiated. This means the main scene * needs to be rendered atleast once before UI events will start propagating. * * The root boundary should only be changed during initialization. Otherwise, any state held by the * event boundary may be lost (like hovered & pressed Containers). */ readonly rootBoundary: EventBoundary; /** Does the device support touch events https://www.w3.org/TR/touch-events/ */ readonly supportsTouchEvents: boolean; /** Does the device support pointer events https://www.w3.org/Submission/pointer-events/ */ readonly supportsPointerEvents: boolean; /** * Should default browser actions automatically be prevented. * Does not apply to pointer events for backwards compatibility * preventDefault on pointer events stops mouse events from firing * Thus, for every pointer event, there will always be either a mouse of touch event alongside it. * @default true */ autoPreventDefault: boolean; /** * Dictionary of how different cursor modes are handled. Strings are handled as CSS cursor * values, objects are handled as dictionaries of CSS values for {@code domElement}, * and functions are called instead of changing the CSS. * Default CSS cursor values are provided for 'default' and 'pointer' modes. */ cursorStyles: Record void) | CSSStyleDeclaration>; /** * The DOM element to which the root event listeners are bound. This is automatically set to * the renderer's {@link Renderer#view view}. */ domElement: HTMLElement; /** The resolution used to convert between the DOM client space into world space. */ resolution: number; /** The renderer managing this {@link EventSystem}. */ renderer: Renderer; /** * The event features that are enabled by the EventSystem * (included in the **pixi.js** and **pixi.js-legacy** bundle), otherwise it will be ignored. * @since 7.2.0 * @example * const app = new Application() * app.renderer.events.features.globalMove = false * * // to override all features use Object.assign * Object.assign(app.renderer.events.features, { * move: false, * globalMove: false, * click: false, * wheel: false, * }) */ readonly features: EventSystemFeatures; private _currentCursor; private readonly _rootPointerEvent; private readonly _rootWheelEvent; private _eventsAdded; /** * @param {Renderer} renderer */ constructor(renderer: Renderer); /** * Runner init called, view is available at this point. * @ignore */ init(options: EventSystemOptions): void; /** * Handle changing resolution. * @ignore */ resolutionChange(resolution: number): void; /** Destroys all event listeners and detaches the renderer. */ destroy(): void; /** * Sets the current cursor mode, handling any callbacks or CSS style changes. * @param mode - cursor mode, a key from the cursorStyles dictionary */ setCursor(mode: string): void; /** * The global pointer event. * Useful for getting the pointer position without listening to events. * @since 7.2.0 */ get pointer(): Readonly; /** * Event handler for pointer down events on {@link EventSystem#domElement this.domElement}. * @param nativeEvent - The native mouse/pointer/touch event. */ private _onPointerDown; /** * Event handler for pointer move events on on {@link EventSystem#domElement this.domElement}. * @param nativeEvent - The native mouse/pointer/touch events. */ private _onPointerMove; /** * Event handler for pointer up events on {@link EventSystem#domElement this.domElement}. * @param nativeEvent - The native mouse/pointer/touch event. */ private _onPointerUp; /** * Event handler for pointer over & out events on {@link EventSystem#domElement this.domElement}. * @param nativeEvent - The native mouse/pointer/touch event. */ private _onPointerOverOut; /** * Passive handler for `wheel` events on {@link EventSystem.domElement this.domElement}. * @param nativeEvent - The native wheel event. */ protected onWheel(nativeEvent: WheelEvent): void; /** * Sets the {@link EventSystem#domElement domElement} and binds event listeners. * * To deregister the current DOM element without setting a new one, pass {@code null}. * @param element - The new DOM element. */ setTargetElement(element: HTMLElement): void; /** Register event listeners on {@link Renderer#domElement this.domElement}. */ private _addEvents; /** Unregister event listeners on {@link EventSystem#domElement this.domElement}. */ private _removeEvents; /** * Maps x and y coords from a DOM object and maps them correctly to the PixiJS view. The * resulting value is stored in the point. This takes into account the fact that the DOM * element could be scaled and positioned anywhere on the screen. * @param {PointData} point - the point that the result will be stored in * @param {number} x - the x coord of the position to map * @param {number} y - the y coord of the position to map */ mapPositionToPoint(point: PointData, x: number, y: number): void; /** * Ensures that the original event object contains all data that a regular pointer event would have * @param event - The original event data from a touch or mouse event * @returns An array containing a single normalized pointer event, in the case of a pointer * or mouse event, or a multiple normalized pointer events if there are multiple changed touches */ private _normalizeToPointerData; /** * Normalizes the native {@link https://w3c.github.io/uievents/#interface-wheelevent WheelEvent}. * * The returned {@link FederatedWheelEvent} is a shared instance. It will not persist across * multiple native wheel events. * @param nativeEvent - The native wheel event that occurred on the canvas. * @returns A federated wheel event. */ protected normalizeWheelEvent(nativeEvent: WheelEvent): FederatedWheelEvent; /** * Normalizes the `nativeEvent` into a federateed {@link FederatedPointerEvent}. * @param event * @param nativeEvent */ private _bootstrapEvent; /** * Transfers base & mouse event data from the {@code nativeEvent} to the federated event. * @param event * @param nativeEvent */ private _transferMouseData; } declare global { namespace PixiMixins { // eslint-disable-next-line @typescript-eslint/no-empty-interface interface Container extends IFederatedContainer { } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface ContainerOptions extends FederatedOptions { } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface ContainerEvents extends FederatedEventEmitterTypes { } interface RendererOptions { /** * The default event mode for all display objects. * @since 7.2.0 */ eventMode?: EventMode; /** * The event features that are enabled by the EventSystem. * @since 7.2.0 */ eventFeatures?: EventSystemOptions["eventFeatures"]; } interface RendererSystems { events: EventSystem; } } } export declare class FilterPipe implements InstructionPipe { static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "filter"; }; private _renderer; constructor(renderer: Renderer); push(filterEffect: Effect, container: Container, instructionSet: InstructionSet): void; pop(_filterEffect: Effect, _container: Container, instructionSet: InstructionSet): void; execute(instruction: FilterInstruction): void; destroy(): void; } declare global { namespace PixiMixins { interface RendererSystems { filter: FilterSystem; } interface RendererPipes { filter: FilterPipe; } } } /* eslint-disable max-len */ declare global { namespace PixiMixins { // eslint-disable-next-line @typescript-eslint/no-empty-interface interface Point extends Vector2Math { } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface ObservablePoint extends Vector2Math { } interface Rectangle { containsRect(other: Rectangle): boolean; equals(other: Rectangle): boolean; intersection(other: Rectangle): Rectangle; intersection(other: Rectangle, outRect: T): T; union(other: Rectangle): Rectangle; union(other: Rectangle, outRect: T): T; } } interface Vector2Math { add(other: PointData): Point; add(other: PointData, outPoint: T): T; subtract(other: PointData): Point; subtract(other: PointData, outPoint: T): T; multiply(other: PointData): Point; multiply(other: PointData, outPoint: T): T; // divide(other: import('../maths/point/PointData').PointData): import('../maths/point/Point').Point; // divide(other: import('../maths/point/PointData').PointData, outPoint: T): T; multiplyScalar(scalar: number): Point; multiplyScalar(scalar: number, outPoint: T): T; dot(other: PointData): number; cross(other: PointData): number; normalize(): Point; normalize(outPoint: T): T; magnitude(): number; magnitudeSquared(): number; project(onto: PointData): Point; project(onto: PointData, outPoint: T): T; reflect(normal: PointData): Point; reflect(normal: PointData, outPoint: T): T; } } /** * Typed and cleaned up version of: * https://stackoverflow.com/questions/44855794/html5-canvas-triangle-with-rounded-corners/44856925#44856925 * @param g - Graphics to be drawn on. * @param points - Corners of the shape to draw. Minimum length is 3. * @param radius - Corners default radius. * @ignore */ export declare function roundedShapeArc(g: ShapePath, points: RoundedPoint[], radius: number): void; export type RoundedPoint = PointData & { radius?: number; }; /** * Typed and cleaned up version of: * https://stackoverflow.com/questions/44855794/html5-canvas-triangle-with-rounded-corners/56214413#56214413 * @param g - Graphics to be drawn on. * @param points - Corners of the shape to draw. Minimum length is 3. * @param radius - Corners default radius. * @ignore */ export declare function roundedShapeQuadraticCurve(g: ShapePath, points: RoundedPoint[], radius: number, smoothness?: number): void; /** * The `ShapePath` class acts as a bridge between high-level drawing commands * and the lower-level `GraphicsContext` rendering engine. * It translates drawing commands, such as those for creating lines, arcs, ellipses, rectangles, and complex polygons, into a * format that can be efficiently processed by a `GraphicsContext`. This includes handling path starts, * ends, and transformations for shapes. * * It is used internally by `GraphicsPath` to build up the path. * @memberof scene */ export declare class ShapePath { /** The list of shape primitives that make up the path. */ shapePrimitives: { shape: ShapePrimitive; transform?: Matrix; }[]; private _currentPoly; private readonly _graphicsPath2D; private readonly _bounds; constructor(graphicsPath2D: GraphicsPath); /** * Sets the starting point for a new sub-path. Any subsequent drawing commands are considered part of this path. * @param x - The x-coordinate for the starting point. * @param y - The y-coordinate for the starting point. * @returns The instance of the current object for chaining. */ moveTo(x: number, y: number): this; /** * Connects the current point to a new point with a straight line. This method updates the current path. * @param x - The x-coordinate of the new point to connect to. * @param y - The y-coordinate of the new point to connect to. * @returns The instance of the current object for chaining. */ lineTo(x: number, y: number): this; /** * Adds an arc to the path. The arc is centered at (x, y) * position with radius `radius` starting at `startAngle` and ending at `endAngle`. * @param x - The x-coordinate of the arc's center. * @param y - The y-coordinate of the arc's center. * @param radius - The radius of the arc. * @param startAngle - The starting angle of the arc, in radians. * @param endAngle - The ending angle of the arc, in radians. * @param counterclockwise - Specifies whether the arc should be drawn in the anticlockwise direction. False by default. * @returns The instance of the current object for chaining. */ arc(x: number, y: number, radius: number, startAngle: number, endAngle: number, counterclockwise: boolean): this; /** * Adds an arc to the path with the arc tangent to the line joining two specified points. * The arc radius is specified by `radius`. * @param x1 - The x-coordinate of the first point. * @param y1 - The y-coordinate of the first point. * @param x2 - The x-coordinate of the second point. * @param y2 - The y-coordinate of the second point. * @param radius - The radius of the arc. * @returns The instance of the current object for chaining. */ arcTo(x1: number, y1: number, x2: number, y2: number, radius: number): this; /** * Adds an SVG-style arc to the path, allowing for elliptical arcs based on the SVG spec. * @param rx - The x-radius of the ellipse. * @param ry - The y-radius of the ellipse. * @param xAxisRotation - The rotation of the ellipse's x-axis relative * to the x-axis of the coordinate system, in degrees. * @param largeArcFlag - Determines if the arc should be greater than or less than 180 degrees. * @param sweepFlag - Determines if the arc should be swept in a positive angle direction. * @param x - The x-coordinate of the arc's end point. * @param y - The y-coordinate of the arc's end point. * @returns The instance of the current object for chaining. */ arcToSvg(rx: number, ry: number, xAxisRotation: number, largeArcFlag: number, sweepFlag: number, x: number, y: number): this; /** * Adds a cubic Bezier curve to the path. * It requires three points: the first two are control points and the third one is the end point. * The starting point is the last point in the current path. * @param cp1x - The x-coordinate of the first control point. * @param cp1y - The y-coordinate of the first control point. * @param cp2x - The x-coordinate of the second control point. * @param cp2y - The y-coordinate of the second control point. * @param x - The x-coordinate of the end point. * @param y - The y-coordinate of the end point. * @param smoothness - Optional parameter to adjust the smoothness of the curve. * @returns The instance of the current object for chaining. */ bezierCurveTo(cp1x: number, cp1y: number, cp2x: number, cp2y: number, x: number, y: number, smoothness?: number): this; /** * Adds a quadratic curve to the path. It requires two points: the control point and the end point. * The starting point is the last point in the current path. * @param cp1x - The x-coordinate of the control point. * @param cp1y - The y-coordinate of the control point. * @param x - The x-coordinate of the end point. * @param y - The y-coordinate of the end point. * @param smoothing - Optional parameter to adjust the smoothness of the curve. * @returns The instance of the current object for chaining. */ quadraticCurveTo(cp1x: number, cp1y: number, x: number, y: number, smoothing?: number): this; /** * Closes the current path by drawing a straight line back to the start. * If the shape is already closed or there are no points in the path, this method does nothing. * @returns The instance of the current object for chaining. */ closePath(): this; /** * Adds another path to the current path. This method allows for the combination of multiple paths into one. * @param path - The `GraphicsPath` object representing the path to add. * @param transform - An optional `Matrix` object to apply a transformation to the path before adding it. * @returns The instance of the current object for chaining. */ addPath(path: GraphicsPath, transform?: Matrix): this; /** * Finalizes the drawing of the current path. Optionally, it can close the path. * @param closePath - A boolean indicating whether to close the path after finishing. False by default. */ finish(closePath?: boolean): void; /** * Draws a rectangle shape. This method adds a new rectangle path to the current drawing. * @param x - The x-coordinate of the top-left corner of the rectangle. * @param y - The y-coordinate of the top-left corner of the rectangle. * @param w - The width of the rectangle. * @param h - The height of the rectangle. * @param transform - An optional `Matrix` object to apply a transformation to the rectangle. * @returns The instance of the current object for chaining. */ rect(x: number, y: number, w: number, h: number, transform?: Matrix): this; /** * Draws a circle shape. This method adds a new circle path to the current drawing. * @param x - The x-coordinate of the center of the circle. * @param y - The y-coordinate of the center of the circle. * @param radius - The radius of the circle. * @param transform - An optional `Matrix` object to apply a transformation to the circle. * @returns The instance of the current object for chaining. */ circle(x: number, y: number, radius: number, transform?: Matrix): this; /** * Draws a polygon shape. This method allows for the creation of complex polygons by specifying a sequence of points. * @param points - An array of numbers, or or an array of PointData objects eg [{x,y}, {x,y}, {x,y}] * representing the x and y coordinates of the polygon's vertices, in sequence. * @param close - A boolean indicating whether to close the polygon path. True by default. * @param transform - An optional `Matrix` object to apply a transformation to the polygon. * @returns The instance of the current object for chaining. */ poly(points: number[] | PointData[], close?: boolean, transform?: Matrix): this; /** * Draws a regular polygon with a specified number of sides. All sides and angles are equal. * @param x - The x-coordinate of the center of the polygon. * @param y - The y-coordinate of the center of the polygon. * @param radius - The radius of the circumscribed circle of the polygon. * @param sides - The number of sides of the polygon. Must be 3 or more. * @param rotation - The rotation angle of the polygon, in radians. Zero by default. * @param transform - An optional `Matrix` object to apply a transformation to the polygon. * @returns The instance of the current object for chaining. */ regularPoly(x: number, y: number, radius: number, sides: number, rotation?: number, transform?: Matrix): this; /** * Draws a polygon with rounded corners. * Similar to `regularPoly` but with the ability to round the corners of the polygon. * @param x - The x-coordinate of the center of the polygon. * @param y - The y-coordinate of the center of the polygon. * @param radius - The radius of the circumscribed circle of the polygon. * @param sides - The number of sides of the polygon. Must be 3 or more. * @param corner - The radius of the rounding of the corners. * @param rotation - The rotation angle of the polygon, in radians. Zero by default. * @param smoothness - Optional parameter to adjust the smoothness of the rounding. * @returns The instance of the current object for chaining. */ roundPoly(x: number, y: number, radius: number, sides: number, corner: number, rotation?: number, smoothness?: number): this; /** * Draws a shape with rounded corners. This function supports custom radius for each corner of the shape. * Optionally, corners can be rounded using a quadratic curve instead of an arc, providing a different aesthetic. * @param points - An array of `RoundedPoint` representing the corners of the shape to draw. * A minimum of 3 points is required. * @param radius - The default radius for the corners. * This radius is applied to all corners unless overridden in `points`. * @param useQuadratic - If set to true, rounded corners are drawn using a quadraticCurve * method instead of an arc method. Defaults to false. * @param smoothness - Specifies the smoothness of the curve when `useQuadratic` is true. * Higher values make the curve smoother. * @returns The instance of the current object for chaining. */ roundShape(points: RoundedPoint[], radius: number, useQuadratic?: boolean, smoothness?: number): this; /** * Draw Rectangle with fillet corners. This is much like rounded rectangle * however it support negative numbers as well for the corner radius. * @param x - Upper left corner of rect * @param y - Upper right corner of rect * @param width - Width of rect * @param height - Height of rect * @param fillet - accept negative or positive values */ filletRect(x: number, y: number, width: number, height: number, fillet: number): this; /** * Draw Rectangle with chamfer corners. These are angled corners. * @param x - Upper left corner of rect * @param y - Upper right corner of rect * @param width - Width of rect * @param height - Height of rect * @param chamfer - non-zero real number, size of corner cutout * @param transform */ chamferRect(x: number, y: number, width: number, height: number, chamfer: number, transform?: Matrix): this; /** * Draws an ellipse at the specified location and with the given x and y radii. * An optional transformation can be applied, allowing for rotation, scaling, and translation. * @param x - The x-coordinate of the center of the ellipse. * @param y - The y-coordinate of the center of the ellipse. * @param radiusX - The horizontal radius of the ellipse. * @param radiusY - The vertical radius of the ellipse. * @param transform - An optional `Matrix` object to apply a transformation to the ellipse. This can include rotations. * @returns The instance of the current object for chaining. */ ellipse(x: number, y: number, radiusX: number, radiusY: number, transform?: Matrix): this; /** * Draws a rectangle with rounded corners. * The corner radius can be specified to determine how rounded the corners should be. * An optional transformation can be applied, which allows for rotation, scaling, and translation of the rectangle. * @param x - The x-coordinate of the top-left corner of the rectangle. * @param y - The y-coordinate of the top-left corner of the rectangle. * @param w - The width of the rectangle. * @param h - The height of the rectangle. * @param radius - The radius of the rectangle's corners. If not specified, corners will be sharp. * @param transform - An optional `Matrix` object to apply a transformation to the rectangle. * @returns The instance of the current object for chaining. */ roundRect(x: number, y: number, w: number, h: number, radius?: number, transform?: Matrix): this; /** * Draws a given shape on the canvas. * This is a generic method that can draw any type of shape specified by the `ShapePrimitive` parameter. * An optional transformation matrix can be applied to the shape, allowing for complex transformations. * @param shape - The shape to draw, defined as a `ShapePrimitive` object. * @param matrix - An optional `Matrix` for transforming the shape. This can include rotations, * scaling, and translations. * @returns The instance of the current object for chaining. */ drawShape(shape: ShapePrimitive, matrix?: Matrix): this; /** * Starts a new polygon path from the specified starting point. * This method initializes a new polygon or ends the current one if it exists. * @param x - The x-coordinate of the starting point of the new polygon. * @param y - The y-coordinate of the starting point of the new polygon. * @returns The instance of the current object for chaining. */ startPoly(x: number, y: number): this; /** * Ends the current polygon path. If `closePath` is set to true, * the path is closed by connecting the last point to the first one. * This method finalizes the current polygon and prepares it for drawing or adding to the shape primitives. * @param closePath - A boolean indicating whether to close the polygon by connecting the last point * back to the starting point. False by default. * @returns The instance of the current object for chaining. */ endPoly(closePath?: boolean): this; private _ensurePoly; /** Builds the path. */ buildPath(): void; /** Gets the bounds of the path. */ get bounds(): Bounds; } export interface PathInstruction { action: "moveTo" | "lineTo" | "quadraticCurveTo" | "bezierCurveTo" | "arc" | "closePath" | "addPath" | "arcTo" | "ellipse" | "rect" | "roundRect" | "arcToSvg" | "poly" | "circle" | "regularPoly" | "roundPoly" | "roundShape" | "filletRect" | "chamferRect"; data: any[]; } /** * The `GraphicsPath` class is designed to represent a graphical path consisting of multiple drawing instructions. * This class serves as a collection of drawing commands that can be executed to render shapes and paths on a canvas or * similar graphical context. It supports high-level drawing operations like lines, arcs, curves, and more, enabling * complex graphic constructions with relative ease. */ export declare class GraphicsPath { instructions: PathInstruction[]; /** unique id for this graphics path */ readonly uid: number; private _dirty; private _shapePath; /** * Provides access to the internal shape path, ensuring it is up-to-date with the current instructions. * @returns The `ShapePath` instance associated with this `GraphicsPath`. */ get shapePath(): ShapePath; /** * Creates a `GraphicsPath` instance optionally from an SVG path string or an array of `PathInstruction`. * @param instructions - An SVG path string or an array of `PathInstruction` objects. */ constructor(instructions?: string | PathInstruction[]); /** * Adds another `GraphicsPath` to this path, optionally applying a transformation. * @param path - The `GraphicsPath` to add. * @param transform - An optional transformation to apply to the added path. * @returns The instance of the current object for chaining. */ addPath(path: GraphicsPath, transform?: Matrix): this; /** * Adds an arc to the path. The arc is centered at (x, y) * position with radius `radius` starting at `startAngle` and ending at `endAngle`. * @param x - The x-coordinate of the arc's center. * @param y - The y-coordinate of the arc's center. * @param radius - The radius of the arc. * @param startAngle - The starting angle of the arc, in radians. * @param endAngle - The ending angle of the arc, in radians. * @param counterclockwise - Specifies whether the arc should be drawn in the anticlockwise direction. False by default. * @returns The instance of the current object for chaining. */ arc(x: number, y: number, radius: number, startAngle: number, endAngle: number, counterclockwise?: boolean): this; /** * Adds an arc to the path with the arc tangent to the line joining two specified points. * The arc radius is specified by `radius`. * @param x1 - The x-coordinate of the first point. * @param y1 - The y-coordinate of the first point. * @param x2 - The x-coordinate of the second point. * @param y2 - The y-coordinate of the second point. * @param radius - The radius of the arc. * @returns The instance of the current object for chaining. */ arcTo(x1: number, y1: number, x2: number, y2: number, radius: number): this; /** * Adds an SVG-style arc to the path, allowing for elliptical arcs based on the SVG spec. * @param rx - The x-radius of the ellipse. * @param ry - The y-radius of the ellipse. * @param xAxisRotation - The rotation of the ellipse's x-axis relative * to the x-axis of the coordinate system, in degrees. * @param largeArcFlag - Determines if the arc should be greater than or less than 180 degrees. * @param sweepFlag - Determines if the arc should be swept in a positive angle direction. * @param x - The x-coordinate of the arc's end point. * @param y - The y-coordinate of the arc's end point. * @returns The instance of the current object for chaining. */ arcToSvg(rx: number, ry: number, xAxisRotation: number, largeArcFlag: number, sweepFlag: number, x: number, y: number): this; /** * Adds a cubic Bezier curve to the path. * It requires three points: the first two are control points and the third one is the end point. * The starting point is the last point in the current path. * @param cp1x - The x-coordinate of the first control point. * @param cp1y - The y-coordinate of the first control point. * @param cp2x - The x-coordinate of the second control point. * @param cp2y - The y-coordinate of the second control point. * @param x - The x-coordinate of the end point. * @param y - The y-coordinate of the end point. * @param smoothness - Optional parameter to adjust the smoothness of the curve. * @returns The instance of the current object for chaining. */ bezierCurveTo(cp1x: number, cp1y: number, cp2x: number, cp2y: number, x: number, y: number, smoothness?: number): this; /** * Adds a cubic Bezier curve to the path. * It requires two points: the second control point and the end point. The first control point is assumed to be * The starting point is the last point in the current path. * @param cp2x - The x-coordinate of the second control point. * @param cp2y - The y-coordinate of the second control point. * @param x - The x-coordinate of the end point. * @param y - The y-coordinate of the end point. * @param smoothness - Optional parameter to adjust the smoothness of the curve. * @returns The instance of the current object for chaining. */ bezierCurveToShort(cp2x: number, cp2y: number, x: number, y: number, smoothness?: number): this; /** * Closes the current path by drawing a straight line back to the start. * If the shape is already closed or there are no points in the path, this method does nothing. * @returns The instance of the current object for chaining. */ closePath(): this; /** * Draws an ellipse at the specified location and with the given x and y radii. * An optional transformation can be applied, allowing for rotation, scaling, and translation. * @param x - The x-coordinate of the center of the ellipse. * @param y - The y-coordinate of the center of the ellipse. * @param radiusX - The horizontal radius of the ellipse. * @param radiusY - The vertical radius of the ellipse. * @param transform - An optional `Matrix` object to apply a transformation to the ellipse. This can include rotations. * @returns The instance of the current object for chaining. */ ellipse(x: number, y: number, radiusX: number, radiusY: number, matrix?: Matrix): this; /** * Connects the current point to a new point with a straight line. This method updates the current path. * @param x - The x-coordinate of the new point to connect to. * @param y - The y-coordinate of the new point to connect to. * @returns The instance of the current object for chaining. */ lineTo(x: number, y: number): this; /** * Sets the starting point for a new sub-path. Any subsequent drawing commands are considered part of this path. * @param x - The x-coordinate for the starting point. * @param y - The y-coordinate for the starting point. * @returns The instance of the current object for chaining. */ moveTo(x: number, y: number): this; /** * Adds a quadratic curve to the path. It requires two points: the control point and the end point. * The starting point is the last point in the current path. * @param cp1x - The x-coordinate of the control point. * @param cp1y - The y-coordinate of the control point. * @param x - The x-coordinate of the end point. * @param y - The y-coordinate of the end point. * @param smoothness - Optional parameter to adjust the smoothness of the curve. * @returns The instance of the current object for chaining. */ quadraticCurveTo(cpx: number, cpy: number, x: number, y: number, smoothness?: number): this; /** * Adds a quadratic curve to the path. It uses the previous point as the control point. * @param x - The x-coordinate of the end point. * @param y - The y-coordinate of the end point. * @param smoothness - Optional parameter to adjust the smoothness of the curve. * @returns The instance of the current object for chaining. */ quadraticCurveToShort(x: number, y: number, smoothness?: number): this; /** * Draws a rectangle shape. This method adds a new rectangle path to the current drawing. * @param x - The x-coordinate of the top-left corner of the rectangle. * @param y - The y-coordinate of the top-left corner of the rectangle. * @param w - The width of the rectangle. * @param h - The height of the rectangle. * @param transform - An optional `Matrix` object to apply a transformation to the rectangle. * @returns The instance of the current object for chaining. */ rect(x: number, y: number, w: number, h: number, transform?: Matrix): this; /** * Draws a circle shape. This method adds a new circle path to the current drawing. * @param x - The x-coordinate of the center of the circle. * @param y - The y-coordinate of the center of the circle. * @param radius - The radius of the circle. * @param transform - An optional `Matrix` object to apply a transformation to the circle. * @returns The instance of the current object for chaining. */ circle(x: number, y: number, radius: number, transform?: Matrix): this; /** * Draws a rectangle with rounded corners. * The corner radius can be specified to determine how rounded the corners should be. * An optional transformation can be applied, which allows for rotation, scaling, and translation of the rectangle. * @param x - The x-coordinate of the top-left corner of the rectangle. * @param y - The y-coordinate of the top-left corner of the rectangle. * @param w - The width of the rectangle. * @param h - The height of the rectangle. * @param radius - The radius of the rectangle's corners. If not specified, corners will be sharp. * @param transform - An optional `Matrix` object to apply a transformation to the rectangle. * @returns The instance of the current object for chaining. */ roundRect(x: number, y: number, w: number, h: number, radius?: number, transform?: Matrix): this; /** * Draws a polygon shape by specifying a sequence of points. This method allows for the creation of complex polygons, * which can be both open and closed. An optional transformation can be applied, enabling the polygon to be scaled, * rotated, or translated as needed. * @param points - An array of numbers representing the x and y coordinates of the polygon's vertices, in sequence. * @param close - A boolean indicating whether to close the polygon path. True by default. * @param transform - An optional `Matrix` object to apply a transformation to the polygon. * @returns The instance of the current object for chaining further drawing commands. */ poly(points: number[] | PointData[], close?: boolean, transform?: Matrix): this; /** * Draws a regular polygon with a specified number of sides. All sides and angles are equal. * @param x - The x-coordinate of the center of the polygon. * @param y - The y-coordinate of the center of the polygon. * @param radius - The radius of the circumscribed circle of the polygon. * @param sides - The number of sides of the polygon. Must be 3 or more. * @param rotation - The rotation angle of the polygon, in radians. Zero by default. * @param transform - An optional `Matrix` object to apply a transformation to the polygon. * @returns The instance of the current object for chaining. */ regularPoly(x: number, y: number, radius: number, sides: number, rotation?: number, transform?: Matrix): this; /** * Draws a polygon with rounded corners. * Similar to `regularPoly` but with the ability to round the corners of the polygon. * @param x - The x-coordinate of the center of the polygon. * @param y - The y-coordinate of the center of the polygon. * @param radius - The radius of the circumscribed circle of the polygon. * @param sides - The number of sides of the polygon. Must be 3 or more. * @param corner - The radius of the rounding of the corners. * @param rotation - The rotation angle of the polygon, in radians. Zero by default. * @returns The instance of the current object for chaining. */ roundPoly(x: number, y: number, radius: number, sides: number, corner: number, rotation?: number): this; /** * Draws a shape with rounded corners. This function supports custom radius for each corner of the shape. * Optionally, corners can be rounded using a quadratic curve instead of an arc, providing a different aesthetic. * @param points - An array of `RoundedPoint` representing the corners of the shape to draw. * A minimum of 3 points is required. * @param radius - The default radius for the corners. * This radius is applied to all corners unless overridden in `points`. * @param useQuadratic - If set to true, rounded corners are drawn using a quadraticCurve * method instead of an arc method. Defaults to false. * @param smoothness - Specifies the smoothness of the curve when `useQuadratic` is true. * Higher values make the curve smoother. * @returns The instance of the current object for chaining. */ roundShape(points: RoundedPoint[], radius: number, useQuadratic?: boolean, smoothness?: number): this; /** * Draw Rectangle with fillet corners. This is much like rounded rectangle * however it support negative numbers as well for the corner radius. * @param x - Upper left corner of rect * @param y - Upper right corner of rect * @param width - Width of rect * @param height - Height of rect * @param fillet - accept negative or positive values */ filletRect(x: number, y: number, width: number, height: number, fillet: number): this; /** * Draw Rectangle with chamfer corners. These are angled corners. * @param x - Upper left corner of rect * @param y - Upper right corner of rect * @param width - Width of rect * @param height - Height of rect * @param chamfer - non-zero real number, size of corner cutout * @param transform */ chamferRect(x: number, y: number, width: number, height: number, chamfer: number, transform?: Matrix): this; /** * Draws a star shape centered at a specified location. This method allows for the creation * of stars with a variable number of points, outer radius, optional inner radius, and rotation. * The star is drawn as a closed polygon with alternating outer and inner vertices to create the star's points. * An optional transformation can be applied to scale, rotate, or translate the star as needed. * @param x - The x-coordinate of the center of the star. * @param y - The y-coordinate of the center of the star. * @param points - The number of points of the star. * @param radius - The outer radius of the star (distance from the center to the outer points). * @param innerRadius - Optional. The inner radius of the star * (distance from the center to the inner points between the outer points). * If not provided, defaults to half of the `radius`. * @param rotation - Optional. The rotation of the star in radians, where 0 is aligned with the y-axis. * Defaults to 0, meaning one point is directly upward. * @param transform - An optional `Matrix` object to apply a transformation to the star. * This can include rotations, scaling, and translations. * @returns The instance of the current object for chaining further drawing commands. */ star(x: number, y: number, points: number, radius: number, innerRadius?: number, rotation?: number, transform?: Matrix): this; /** * Creates a copy of the current `GraphicsPath` instance. This method supports both shallow and deep cloning. * A shallow clone copies the reference of the instructions array, while a deep clone creates a new array and * copies each instruction individually, ensuring that modifications to the instructions of the cloned `GraphicsPath` * do not affect the original `GraphicsPath` and vice versa. * @param deep - A boolean flag indicating whether the clone should be deep. * @returns A new `GraphicsPath` instance that is a clone of the current instance. */ clone(deep?: boolean): GraphicsPath; clear(): this; /** * Applies a transformation matrix to all drawing instructions within the `GraphicsPath`. * This method enables the modification of the path's geometry according to the provided * transformation matrix, which can include translations, rotations, scaling, and skewing. * * Each drawing instruction in the path is updated to reflect the transformation, * ensuring the visual representation of the path is consistent with the applied matrix. * * Note: The transformation is applied directly to the coordinates and control points of the drawing instructions, * not to the path as a whole. This means the transformation's effects are baked into the individual instructions, * allowing for fine-grained control over the path's appearance. * @param matrix - A `Matrix` object representing the transformation to apply. * @returns The instance of the current object for chaining further operations. */ transform(matrix: Matrix): this; get bounds(): Bounds; /** * Retrieves the last point from the current drawing instructions in the `GraphicsPath`. * This method is useful for operations that depend on the path's current endpoint, * such as connecting subsequent shapes or paths. It supports various drawing instructions, * ensuring the last point's position is accurately determined regardless of the path's complexity. * * If the last instruction is a `closePath`, the method iterates backward through the instructions * until it finds an actionable instruction that defines a point (e.g., `moveTo`, `lineTo`, * `quadraticCurveTo`, etc.). For compound paths added via `addPath`, it recursively retrieves * the last point from the nested path. * @param out - A `Point` object where the last point's coordinates will be stored. * This object is modified directly to contain the result. * @returns The `Point` object containing the last point's coordinates. */ getLastPoint(out: Point): Point; } export type LineCap = "butt" | "round" | "square"; export type LineJoin = "round" | "bevel" | "miter"; export declare const closePointEps = 0.0001; export declare const curveEps = 0.0001; export type GradientType = "linear" | "radial"; export interface LinearGradientFillStyle { x0: number; y0: number; x1: number; y1: number; colors: number[]; stops: number[]; } export declare class FillGradient implements CanvasGradient { static defaultTextureSize: number; /** unique id for this fill gradient */ readonly uid: number; readonly type: GradientType; x0: number; y0: number; x1: number; y1: number; texture: Texture; transform: Matrix; gradientStops: Array<{ offset: number; color: string; }>; private _styleKey; constructor(x0: number, y0: number, x1: number, y1: number); addColorStop(offset: number, color: ColorSource): this; buildLinearGradient(): void; get styleKey(): string; } export type PatternRepetition = "repeat" | "repeat-x" | "repeat-y" | "no-repeat"; export declare class FillPattern implements CanvasPattern { /** unique id for this fill pattern */ readonly uid: number; texture: Texture; transform: Matrix; private _styleKey; constructor(texture: Texture, repetition?: PatternRepetition); setTransform(transform?: Matrix): void; get styleKey(): string; } /** * A fill style object. * @memberof scene */ export interface FillStyle { /** The color to use for the fill. */ color?: ColorSource; /** The alpha value to use for the fill. */ alpha?: number; /** The texture to use for the fill. */ texture?: Texture | null; /** The matrix to apply. */ matrix?: Matrix | null; /** The fill pattern to use. */ fill?: FillPattern | FillGradient | null; } /** * A stroke attribute object, used to define properties for a stroke. * @memberof scene */ export interface StrokeAttributes { /** The width of the stroke. */ width?: number; /** The alignment of the stroke. */ alignment?: number; /** The line cap style to use. */ cap?: LineCap; /** The line join style to use. */ join?: LineJoin; /** The miter limit to use. */ miterLimit?: number; } /** * A stroke style object. * @memberof scene */ export interface StrokeStyle extends FillStyle, StrokeAttributes { } /** * These can be directly used as a fill or a stroke * ```ts * graphics.fill(0xff0000); * graphics.fill(new FillPattern(texture)); * graphics.fill(new FillGradient(0, 0, 200, 0)); * graphics.fill({ * color: 0xff0000, * alpha: 0.5, * texture?: null, * matrix?: null, * }); * graphics.fill({ * fill: new FillPattern(texture), * }); * graphics.fill({ * fill: new FillGradient(0, 0, 200, 0), * }); * ``` * @memberof scene */ export type FillInput = ColorSource | FillGradient | FillPattern | FillStyle; /** * These can be directly used as a stroke * ```ts * graphics.stroke(0xff0000); * graphics.stroke(new FillPattern(texture)); * graphics.stroke(new FillGradient(0, 0, 200, 0)); * graphics.stroke({ * color: 0xff0000, * width?: 1, * alignment?: 0.5, * }); * graphics.stroke({ * fill: new FillPattern(texture), * width: 1, * alignment: 0.5, * }); * graphics.stroke({ * fill: new FillGradient(0, 0, 200, 0), * width: 1, * alignment: 0.5, * }); * ``` * @memberof scene */ export type StrokeInput = ColorSource | FillGradient | FillPattern | StrokeStyle; export type ConvertedFillStyle = Omit, "color"> & { color: number; }; export type ConvertedStrokeStyle = ConvertedFillStyle & Required; /** * @deprecated since v8.1.6 * @see scene.FillInput */ export type FillStyleInputs = ColorSource | FillGradient | FillPattern | FillStyle | ConvertedFillStyle | StrokeStyle | ConvertedStrokeStyle; export type BatchMode = "auto" | "batch" | "no-batch"; export interface FillInstruction { action: "fill" | "cut"; data: { style: ConvertedFillStyle; path: GraphicsPath; hole?: GraphicsPath; }; } export interface StrokeInstruction { action: "stroke"; data: { style: ConvertedStrokeStyle; path: GraphicsPath; hole?: GraphicsPath; }; } export interface TextureInstruction { action: "texture"; data: { image: Texture; dx: number; dy: number; dw: number; dh: number; transform: Matrix; alpha: number; style: number; }; } export type GraphicsInstructions = FillInstruction | StrokeInstruction | TextureInstruction; /** * The GraphicsContext class allows for the creation of lightweight objects that contain instructions for drawing shapes and paths. * It is used internally by the Graphics class to draw shapes and paths, and can be used directly and shared between Graphics objects, * * This sharing of a `GraphicsContext` means that the intensive task of converting graphics instructions into GPU-ready geometry is done once, and the results are reused, * much like sprites reusing textures. * @memberof scene */ export declare class GraphicsContext extends EventEmitter<{ update: GraphicsContext; destroy: GraphicsContext; }> { /** The default fill style to use when none is provided. */ static defaultFillStyle: ConvertedFillStyle; /** The default stroke style to use when none is provided. */ static defaultStrokeStyle: ConvertedStrokeStyle; /** unique id for this graphics context */ readonly uid: number; dirty: boolean; batchMode: BatchMode; instructions: GraphicsInstructions[]; customShader?: Shader; private _activePath; private _transform; private _fillStyle; private _strokeStyle; private _stateStack; private _tick; private _bounds; private _boundsDirty; /** * Creates a new GraphicsContext object that is a clone of this instance, copying all properties, * including the current drawing state, transformations, styles, and instructions. * @returns A new GraphicsContext instance with the same properties and state as this one. */ clone(): GraphicsContext; /** * The current fill style of the graphics context. This can be a color, gradient, pattern, or a more complex style defined by a FillStyle object. */ get fillStyle(): ConvertedFillStyle; set fillStyle(value: FillInput); /** * The current stroke style of the graphics context. Similar to fill styles, stroke styles can encompass colors, gradients, patterns, or more detailed configurations via a StrokeStyle object. */ get strokeStyle(): ConvertedStrokeStyle; set strokeStyle(value: FillInput); /** * Sets the current fill style of the graphics context. The fill style can be a color, gradient, * pattern, or a more complex style defined by a FillStyle object. * @param style - The fill style to apply. This can be a simple color, a gradient or pattern object, * or a FillStyle or ConvertedFillStyle object. * @returns The instance of the current GraphicsContext for method chaining. */ setFillStyle(style: FillInput): this; /** * Sets the current stroke style of the graphics context. Similar to fill styles, stroke styles can * encompass colors, gradients, patterns, or more detailed configurations via a StrokeStyle object. * @param style - The stroke style to apply. Can be defined as a color, a gradient or pattern, * or a StrokeStyle or ConvertedStrokeStyle object. * @returns The instance of the current GraphicsContext for method chaining. */ setStrokeStyle(style: StrokeInput): this; /** * Adds a texture to the graphics context. This method supports multiple overloads for specifying the texture, tint, and dimensions. * If only a texture is provided, it uses the texture's width and height for drawing. Additional parameters allow for specifying * a tint color, and custom dimensions for the texture drawing area. * @param texture - The Texture object to use. * @param tint - (Optional) A ColorSource to tint the texture. If not provided, defaults to white (0xFFFFFF). * @param dx - (Optional) The x-coordinate in the destination canvas at which to place the top-left corner of the source image. * @param dy - (Optional) The y-coordinate in the destination canvas at which to place the top-left corner of the source image. * @param dw - (Optional) The width of the rectangle within the source image to draw onto the destination canvas. If not provided, uses the texture's frame width. * @param dh - (Optional) The height of the rectangle within the source image to draw onto the destination canvas. If not provided, uses the texture's frame height. * @returns The instance of the current GraphicsContext for method chaining. */ texture(texture: Texture): this; texture(texture: Texture, tint: ColorSource): this; texture(texture: Texture, tint: ColorSource, dx: number, dy: number): this; texture(texture: Texture, tint: ColorSource, dx: number, dy: number, dw: number, dh: number): this; /** * Resets the current path. Any previous path and its commands are discarded and a new path is * started. This is typically called before beginning a new shape or series of drawing commands. * @returns The instance of the current GraphicsContext for method chaining. */ beginPath(): this; /** * Fills the current or given path with the current fill style. This method can optionally take * a color and alpha for a simple fill, or a more complex FillInput object for advanced fills. * @param style - (Optional) The style to fill the path with. Can be a color, gradient, pattern, or a complex style object. If omitted, uses the current fill style. * @returns The instance of the current GraphicsContext for method chaining. */ fill(style?: FillInput): this; /** @deprecated 8.0.0 */ fill(color: ColorSource, alpha: number): this; private _initNextPathLocation; /** * Strokes the current path with the current stroke style. This method can take an optional * FillInput parameter to define the stroke's appearance, including its color, width, and other properties. * @param style - (Optional) The stroke style to apply. Can be defined as a simple color or a more complex style object. If omitted, uses the current stroke style. * @returns The instance of the current GraphicsContext for method chaining. */ stroke(style?: StrokeInput): this; /** * Applies a cutout to the last drawn shape. This is used to create holes or complex shapes by * subtracting a path from the previously drawn path. If a hole is not completely in a shape, it will * fail to cut correctly! * @returns The instance of the current GraphicsContext for method chaining. */ cut(): this; /** * Adds an arc to the current path, which is centered at (x, y) with the specified radius, * starting and ending angles, and direction. * @param x - The x-coordinate of the arc's center. * @param y - The y-coordinate of the arc's center. * @param radius - The arc's radius. * @param startAngle - The starting angle, in radians. * @param endAngle - The ending angle, in radians. * @param counterclockwise - (Optional) Specifies whether the arc is drawn counterclockwise (true) or clockwise (false). Defaults to false. * @returns The instance of the current GraphicsContext for method chaining. */ arc(x: number, y: number, radius: number, startAngle: number, endAngle: number, counterclockwise?: boolean): this; /** * Adds an arc to the current path with the given control points and radius, connected to the previous point * by a straight line if necessary. * @param x1 - The x-coordinate of the first control point. * @param y1 - The y-coordinate of the first control point. * @param x2 - The x-coordinate of the second control point. * @param y2 - The y-coordinate of the second control point. * @param radius - The arc's radius. * @returns The instance of the current GraphicsContext for method chaining. */ arcTo(x1: number, y1: number, x2: number, y2: number, radius: number): this; /** * Adds an SVG-style arc to the path, allowing for elliptical arcs based on the SVG spec. * @param rx - The x-radius of the ellipse. * @param ry - The y-radius of the ellipse. * @param xAxisRotation - The rotation of the ellipse's x-axis relative * to the x-axis of the coordinate system, in degrees. * @param largeArcFlag - Determines if the arc should be greater than or less than 180 degrees. * @param sweepFlag - Determines if the arc should be swept in a positive angle direction. * @param x - The x-coordinate of the arc's end point. * @param y - The y-coordinate of the arc's end point. * @returns The instance of the current object for chaining. */ arcToSvg(rx: number, ry: number, xAxisRotation: number, largeArcFlag: number, sweepFlag: number, x: number, y: number): this; /** * Adds a cubic Bezier curve to the path. * It requires three points: the first two are control points and the third one is the end point. * The starting point is the last point in the current path. * @param cp1x - The x-coordinate of the first control point. * @param cp1y - The y-coordinate of the first control point. * @param cp2x - The x-coordinate of the second control point. * @param cp2y - The y-coordinate of the second control point. * @param x - The x-coordinate of the end point. * @param y - The y-coordinate of the end point. * @param smoothness - Optional parameter to adjust the smoothness of the curve. * @returns The instance of the current object for chaining. */ bezierCurveTo(cp1x: number, cp1y: number, cp2x: number, cp2y: number, x: number, y: number, smoothness?: number): this; /** * Closes the current path by drawing a straight line back to the start. * If the shape is already closed or there are no points in the path, this method does nothing. * @returns The instance of the current object for chaining. */ closePath(): this; /** * Draws an ellipse at the specified location and with the given x and y radii. * An optional transformation can be applied, allowing for rotation, scaling, and translation. * @param x - The x-coordinate of the center of the ellipse. * @param y - The y-coordinate of the center of the ellipse. * @param radiusX - The horizontal radius of the ellipse. * @param radiusY - The vertical radius of the ellipse. * @returns The instance of the current object for chaining. */ ellipse(x: number, y: number, radiusX: number, radiusY: number): this; /** * Draws a circle shape. This method adds a new circle path to the current drawing. * @param x - The x-coordinate of the center of the circle. * @param y - The y-coordinate of the center of the circle. * @param radius - The radius of the circle. * @returns The instance of the current object for chaining. */ circle(x: number, y: number, radius: number): this; /** * Adds another `GraphicsPath` to this path, optionally applying a transformation. * @param path - The `GraphicsPath` to add. * @returns The instance of the current object for chaining. */ path(path: GraphicsPath): this; /** * Connects the current point to a new point with a straight line. This method updates the current path. * @param x - The x-coordinate of the new point to connect to. * @param y - The y-coordinate of the new point to connect to. * @returns The instance of the current object for chaining. */ lineTo(x: number, y: number): this; /** * Sets the starting point for a new sub-path. Any subsequent drawing commands are considered part of this path. * @param x - The x-coordinate for the starting point. * @param y - The y-coordinate for the starting point. * @returns The instance of the current object for chaining. */ moveTo(x: number, y: number): this; /** * Adds a quadratic curve to the path. It requires two points: the control point and the end point. * The starting point is the last point in the current path. * @param cpx - The x-coordinate of the control point. * @param cpy - The y-coordinate of the control point. * @param x - The x-coordinate of the end point. * @param y - The y-coordinate of the end point. * @param smoothness - Optional parameter to adjust the smoothness of the curve. * @returns The instance of the current object for chaining. */ quadraticCurveTo(cpx: number, cpy: number, x: number, y: number, smoothness?: number): this; /** * Draws a rectangle shape. This method adds a new rectangle path to the current drawing. * @param x - The x-coordinate of the top-left corner of the rectangle. * @param y - The y-coordinate of the top-left corner of the rectangle. * @param w - The width of the rectangle. * @param h - The height of the rectangle. * @returns The instance of the current object for chaining. */ rect(x: number, y: number, w: number, h: number): this; /** * Draws a rectangle with rounded corners. * The corner radius can be specified to determine how rounded the corners should be. * An optional transformation can be applied, which allows for rotation, scaling, and translation of the rectangle. * @param x - The x-coordinate of the top-left corner of the rectangle. * @param y - The y-coordinate of the top-left corner of the rectangle. * @param w - The width of the rectangle. * @param h - The height of the rectangle. * @param radius - The radius of the rectangle's corners. If not specified, corners will be sharp. * @returns The instance of the current object for chaining. */ roundRect(x: number, y: number, w: number, h: number, radius?: number): this; /** * Draws a polygon shape by specifying a sequence of points. This method allows for the creation of complex polygons, * which can be both open and closed. An optional transformation can be applied, enabling the polygon to be scaled, * rotated, or translated as needed. * @param points - An array of numbers, or an array of PointData objects eg [{x,y}, {x,y}, {x,y}] * representing the x and y coordinates, of the polygon's vertices, in sequence. * @param close - A boolean indicating whether to close the polygon path. True by default. */ poly(points: number[] | PointData[], close?: boolean): this; /** * Draws a regular polygon with a specified number of sides. All sides and angles are equal. * @param x - The x-coordinate of the center of the polygon. * @param y - The y-coordinate of the center of the polygon. * @param radius - The radius of the circumscribed circle of the polygon. * @param sides - The number of sides of the polygon. Must be 3 or more. * @param rotation - The rotation angle of the polygon, in radians. Zero by default. * @param transform - An optional `Matrix` object to apply a transformation to the polygon. * @returns The instance of the current object for chaining. */ regularPoly(x: number, y: number, radius: number, sides: number, rotation?: number, transform?: Matrix): this; /** * Draws a polygon with rounded corners. * Similar to `regularPoly` but with the ability to round the corners of the polygon. * @param x - The x-coordinate of the center of the polygon. * @param y - The y-coordinate of the center of the polygon. * @param radius - The radius of the circumscribed circle of the polygon. * @param sides - The number of sides of the polygon. Must be 3 or more. * @param corner - The radius of the rounding of the corners. * @param rotation - The rotation angle of the polygon, in radians. Zero by default. * @returns The instance of the current object for chaining. */ roundPoly(x: number, y: number, radius: number, sides: number, corner: number, rotation?: number): this; /** * Draws a shape with rounded corners. This function supports custom radius for each corner of the shape. * Optionally, corners can be rounded using a quadratic curve instead of an arc, providing a different aesthetic. * @param points - An array of `RoundedPoint` representing the corners of the shape to draw. * A minimum of 3 points is required. * @param radius - The default radius for the corners. * This radius is applied to all corners unless overridden in `points`. * @param useQuadratic - If set to true, rounded corners are drawn using a quadraticCurve * method instead of an arc method. Defaults to false. * @param smoothness - Specifies the smoothness of the curve when `useQuadratic` is true. * Higher values make the curve smoother. * @returns The instance of the current object for chaining. */ roundShape(points: RoundedPoint[], radius: number, useQuadratic?: boolean, smoothness?: number): this; /** * Draw Rectangle with fillet corners. This is much like rounded rectangle * however it support negative numbers as well for the corner radius. * @param x - Upper left corner of rect * @param y - Upper right corner of rect * @param width - Width of rect * @param height - Height of rect * @param fillet - accept negative or positive values */ filletRect(x: number, y: number, width: number, height: number, fillet: number): this; /** * Draw Rectangle with chamfer corners. These are angled corners. * @param x - Upper left corner of rect * @param y - Upper right corner of rect * @param width - Width of rect * @param height - Height of rect * @param chamfer - non-zero real number, size of corner cutout * @param transform */ chamferRect(x: number, y: number, width: number, height: number, chamfer: number, transform?: Matrix): this; /** * Draws a star shape centered at a specified location. This method allows for the creation * of stars with a variable number of points, outer radius, optional inner radius, and rotation. * The star is drawn as a closed polygon with alternating outer and inner vertices to create the star's points. * An optional transformation can be applied to scale, rotate, or translate the star as needed. * @param x - The x-coordinate of the center of the star. * @param y - The y-coordinate of the center of the star. * @param points - The number of points of the star. * @param radius - The outer radius of the star (distance from the center to the outer points). * @param innerRadius - Optional. The inner radius of the star * (distance from the center to the inner points between the outer points). * If not provided, defaults to half of the `radius`. * @param rotation - Optional. The rotation of the star in radians, where 0 is aligned with the y-axis. * Defaults to 0, meaning one point is directly upward. * @returns The instance of the current object for chaining further drawing commands. */ star(x: number, y: number, points: number, radius: number, innerRadius?: number, rotation?: number): this; /** * Parses and renders an SVG string into the graphics context. This allows for complex shapes and paths * defined in SVG format to be drawn within the graphics context. * @param svg - The SVG string to be parsed and rendered. */ svg(svg: string): this; /** * Restores the most recently saved graphics state by popping the top of the graphics state stack. * This includes transformations, fill styles, and stroke styles. */ restore(): this; /** Saves the current graphics state, including transformations, fill styles, and stroke styles, onto a stack. */ save(): this; /** * Returns the current transformation matrix of the graphics context. * @returns The current transformation matrix. */ getTransform(): Matrix; /** * Resets the current transformation matrix to the identity matrix, effectively removing any transformations (rotation, scaling, translation) previously applied. * @returns The instance of the current GraphicsContext for method chaining. */ resetTransform(): this; /** * Applies a rotation transformation to the graphics context around the current origin. * @param angle - The angle of rotation in radians. * @returns The instance of the current GraphicsContext for method chaining. */ rotate(angle: number): this; /** * Applies a scaling transformation to the graphics context, scaling drawings by x horizontally and by y vertically. * @param x - The scale factor in the horizontal direction. * @param y - (Optional) The scale factor in the vertical direction. If not specified, the x value is used for both directions. * @returns The instance of the current GraphicsContext for method chaining. */ scale(x: number, y?: number): this; /** * Sets the current transformation matrix of the graphics context to the specified matrix or values. * This replaces the current transformation matrix. * @param a - The value for the a property of the matrix, or a Matrix object to use directly. * @param b - The value for the b property of the matrix. * @param c - The value for the c property of the matrix. * @param d - The value for the d property of the matrix. * @param dx - The value for the tx (translate x) property of the matrix. * @param dy - The value for the ty (translate y) property of the matrix. * @returns The instance of the current GraphicsContext for method chaining. */ setTransform(transform: Matrix): this; setTransform(a: number, b: number, c: number, d: number, dx: number, dy: number): this; /** * Applies the specified transformation matrix to the current graphics context by multiplying the current matrix with the specified matrix. * @param a - The value for the a property of the matrix, or a Matrix object to use directly. * @param b - The value for the b property of the matrix. * @param c - The value for the c property of the matrix. * @param d - The value for the d property of the matrix. * @param dx - The value for the tx (translate x) property of the matrix. * @param dy - The value for the ty (translate y) property of the matrix. * @returns The instance of the current GraphicsContext for method chaining. */ transform(transform: Matrix): this; transform(a: number, b: number, c: number, d: number, dx: number, dy: number): this; /** * Applies a translation transformation to the graphics context, moving the origin by the specified amounts. * @param x - The amount to translate in the horizontal direction. * @param y - (Optional) The amount to translate in the vertical direction. If not specified, the x value is used for both directions. * @returns The instance of the current GraphicsContext for method chaining. */ translate(x: number, y?: number): this; /** * Clears all drawing commands from the graphics context, effectively resetting it. This includes clearing the path, * and optionally resetting transformations to the identity matrix. * @returns The instance of the current GraphicsContext for method chaining. */ clear(): this; protected onUpdate(): void; /** The bounds of the graphic shape. */ get bounds(): Bounds; /** * Check to see if a point is contained within this geometry. * @param point - Point to check if it's contained. * @returns {boolean} `true` if the point is contained within geometry. */ containsPoint(point: PointData): boolean; /** * Destroys the GraphicsData object. * @param options - Options parameter. A boolean will act as if all options * have been set to that value * @param {boolean} [options.texture=false] - Should it destroy the current texture of the fill/stroke style? * @param {boolean} [options.textureSource=false] - Should it destroy the texture source of the fill/stroke style? */ destroy(options?: TypeOrBool): void; } export type TextStyleAlign = "left" | "center" | "right" | "justify"; export type TextStyleFill = string | string[] | number | number[] | CanvasGradient | CanvasPattern; export type TextStyleFontStyle = "normal" | "italic" | "oblique"; export type TextStyleFontVariant = "normal" | "small-caps"; export type TextStyleFontWeight = "normal" | "bold" | "bolder" | "lighter" | "100" | "200" | "300" | "400" | "500" | "600" | "700" | "800" | "900"; export type TextStyleLineJoin = "miter" | "round" | "bevel"; export type TextStyleTextBaseline = "alphabetic" | "top" | "hanging" | "middle" | "ideographic" | "bottom"; export type TextStyleWhiteSpace = "normal" | "pre" | "pre-line"; /** * A collection of text related classes. * @namespace text */ /** * A drop shadow effect. * @memberof text */ export type TextDropShadow = { /** Set alpha for the drop shadow */ alpha: number; /** Set a angle of the drop shadow */ angle: number; /** Set a shadow blur radius */ blur: number; /** A fill style to be used on the e.g., 'red', '#00FF00' */ color: ColorSource; /** Set a distance of the drop shadow */ distance: number; }; /** * Constructor options used for `TextStyle` instances. * ```js * const textStyle = new TextStyle({ * fontSize: 12, * fill: 'black', * }); * ``` * @see {@link text.TextStyle} * @memberof text */ export interface TextStyleOptions { /** * Alignment for multiline text, does not affect single line text * @type {'left'|'center'|'right'|'justify'} */ align?: TextStyleAlign; /** Indicates if lines can be wrapped within words, it needs `wordWrap` to be set to `true` */ breakWords?: boolean; /** Set a drop shadow for the text */ dropShadow?: boolean | Partial; /** * A canvas fillstyle that will be used on the text e.g., 'red', '#00FF00'. * Can be an array to create a gradient, e.g., `['#000000','#FFFFFF']` * {@link https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/fillStyle|MDN} * @type {string|string[]|number|number[]|CanvasGradient|CanvasPattern} */ fill?: FillInput; /** The font family, can be a single font name, or a list of names where the first is the preferred font. */ fontFamily?: string | string[]; /** The font size (as a number it converts to px, but as a string, equivalents are '26px','20pt','160%' or '1.6em') */ fontSize?: number | string; /** * The font style. * @type {'normal'|'italic'|'oblique'} */ fontStyle?: TextStyleFontStyle; /** * The font variant. * @type {'normal'|'small-caps'} */ fontVariant?: TextStyleFontVariant; /** * The font weight. * @type {'normal'|'bold'|'bolder'|'lighter'|'100'|'200'|'300'|'400'|'500'|'600'|'700'|'800'|'900'} */ fontWeight?: TextStyleFontWeight; /** The height of the line, a number that represents the vertical space that a letter uses. */ leading?: number; /** The amount of spacing between letters, default is 0 */ letterSpacing?: number; /** The line height, a number that represents the vertical space that a letter uses */ lineHeight?: number; /** * Occasionally some fonts are cropped. Adding some padding will prevent this from * happening by adding padding to all sides of the text. */ padding?: number; /** A canvas fillstyle that will be used on the text stroke, e.g., 'blue', '#FCFF00' */ stroke?: StrokeInput; /** * The baseline of the text that is rendered. * @type {'alphabetic'|'top'|'hanging'|'middle'|'ideographic'|'bottom'} */ textBaseline?: TextStyleTextBaseline; trim?: boolean; /** * Determines whether newlines & spaces are collapsed or preserved "normal" * (collapse, collapse), "pre" (preserve, preserve) | "pre-line" (preserve, * collapse). It needs wordWrap to be set to true. * @type {'normal'|'pre'|'pre-line'} */ whiteSpace?: TextStyleWhiteSpace; /** Indicates if word wrap should be used */ wordWrap?: boolean; /** The width at which text will wrap, it needs wordWrap to be set to true */ wordWrapWidth?: number; } /** * A TextStyle Object contains information to decorate a Text objects. * * An instance can be shared between multiple Text objects; then changing the style will update all text objects using it. * @memberof text * @example * import { TextStyle } from 'pixi.js'; * const style = new TextStyle({ * fontFamily: ['Helvetica', 'Arial', 'sans-serif'], * fontSize: 36, * }); */ export declare class TextStyle extends EventEmitter<{ update: TextDropShadow; }> { /** The default drop shadow settings */ static defaultDropShadow: TextDropShadow; /** The default text style settings */ static defaultTextStyle: TextStyleOptions; _fill: ConvertedFillStyle; private _originalFill; _stroke: ConvertedStrokeStyle; private _originalStroke; private _dropShadow; private _fontFamily; private _fontSize; private _fontStyle; private _fontVariant; private _fontWeight; private _breakWords; private _align; private _leading; private _letterSpacing; private _lineHeight; private _textBaseline; private _whiteSpace; private _wordWrap; private _wordWrapWidth; private _padding; protected _styleKey: string; private _trim; constructor(style?: Partial); /** * Alignment for multiline text, does not affect single line text. * @member {'left'|'center'|'right'|'justify'} */ get align(): TextStyleAlign; set align(value: TextStyleAlign); /** Indicates if lines can be wrapped within words, it needs wordWrap to be set to true. */ get breakWords(): boolean; set breakWords(value: boolean); /** Set a drop shadow for the text. */ get dropShadow(): TextDropShadow; set dropShadow(value: boolean | TextDropShadow); /** The font family, can be a single font name, or a list of names where the first is the preferred font. */ get fontFamily(): string | string[]; set fontFamily(value: string | string[]); /** The font size (as a number it converts to px, but as a string, equivalents are '26px','20pt','160%' or '1.6em') */ get fontSize(): number; set fontSize(value: string | number); /** * The font style. * @member {'normal'|'italic'|'oblique'} */ get fontStyle(): TextStyleFontStyle; set fontStyle(value: TextStyleFontStyle); /** * The font variant. * @member {'normal'|'small-caps'} */ get fontVariant(): TextStyleFontVariant; set fontVariant(value: TextStyleFontVariant); /** * The font weight. * @member {'normal'|'bold'|'bolder'|'lighter'|'100'|'200'|'300'|'400'|'500'|'600'|'700'|'800'|'900'} */ get fontWeight(): TextStyleFontWeight; set fontWeight(value: TextStyleFontWeight); /** The space between lines. */ get leading(): number; set leading(value: number); /** The amount of spacing between letters, default is 0. */ get letterSpacing(): number; set letterSpacing(value: number); /** The line height, a number that represents the vertical space that a letter uses. */ get lineHeight(): number; set lineHeight(value: number); /** * Occasionally some fonts are cropped. Adding some padding will prevent this from happening * by adding padding to all sides of the text. */ get padding(): number; set padding(value: number); /** Trim transparent borders. This is an expensive operation so only use this if you have to! */ get trim(): boolean; set trim(value: boolean); /** * The baseline of the text that is rendered. * @member {'alphabetic'|'top'|'hanging'|'middle'|'ideographic'|'bottom'} */ get textBaseline(): TextStyleTextBaseline; set textBaseline(value: TextStyleTextBaseline); /** * How newlines and spaces should be handled. * Default is 'pre' (preserve, preserve). * * value | New lines | Spaces * --- | --- | --- * 'normal' | Collapse | Collapse * 'pre' | Preserve | Preserve * 'pre-line' | Preserve | Collapse * @member {'normal'|'pre'|'pre-line'} */ get whiteSpace(): TextStyleWhiteSpace; set whiteSpace(value: TextStyleWhiteSpace); /** Indicates if word wrap should be used. */ get wordWrap(): boolean; set wordWrap(value: boolean); /** The width at which text will wrap, it needs wordWrap to be set to true. */ get wordWrapWidth(): number; set wordWrapWidth(value: number); /** A fillstyle that will be used on the text e.g., 'red', '#00FF00'. */ get fill(): FillInput; set fill(value: FillInput); /** A fillstyle that will be used on the text stroke, e.g., 'blue', '#FCFF00'. */ get stroke(): StrokeInput; set stroke(value: StrokeInput); protected _generateKey(): string; update(): void; /** Resets all properties to the default values */ reset(): void; get styleKey(): string; /** * Creates a new TextStyle object with the same values as this one. * @returns New cloned TextStyle object */ clone(): TextStyle; /** * Destroys this text style. * @param options - Options parameter. A boolean will act as if all options * have been set to that value * @param {boolean} [options.texture=false] - Should it destroy the texture of the this style * @param {boolean} [options.textureSource=false] - Should it destroy the textureSource of the this style */ destroy(options?: TypeOrBool): void; private _createProxy; private _isFillStyle; } /** * Options for HTML text style, extends {@link TextStyle}. * @memberof text * @extends text.TextStyleOptions * @property {string[]} [cssOverrides] - CSS style(s) to add. * @property {Record} [tagStyles] - Tag styles. */ export interface HTMLTextStyleOptions extends Omit { cssOverrides?: string[]; tagStyles?: Record; } /** * A TextStyle object rendered by the HTMLTextSystem. * @memberof text */ export declare class HTMLTextStyle extends TextStyle { private _cssOverrides; private _cssStyle; /** * List of styles per tag. * @example * new HTMLText({ * text:'Red,Blue,Green', * style:{ * fontFamily: 'DM Sans', * fill: 'white', * fontSize:100, * tagStyles:{ * red:{ * fill:'red', * }, * blue:{ * fill:'blue', * }, * green:{ * fill:'green', * } * } * } * ); */ tagStyles: Record; constructor(options?: HTMLTextStyleOptions); /** List of style overrides that will be applied to the HTML text. */ set cssOverrides(value: string | string[]); get cssOverrides(): string[]; protected _generateKey(): string; update(): void; /** * Creates a new HTMLTextStyle object with the same values as this one. * @returns New cloned HTMLTextStyle object */ clone(): HTMLTextStyle; get cssStyle(): string; /** * Add a style override, this can be any CSS property * it will override any built-in style. This is the * property and the value as a string (e.g., `color: red`). * This will override any other internal style. * @param {string} value - CSS style(s) to add. * @example * style.addOverride('background-color: red'); */ addOverride(...value: string[]): void; /** * Remove any overrides that match the value. * @param {string} value - CSS style to remove. * @example * style.removeOverride('background-color: red'); */ removeOverride(...value: string[]): void; set fill(value: FillInput); set stroke(value: StrokeInput); } /** * A string or number that can be used as text. * @memberof text */ export type TextString = string | number | { toString: () => string; }; /** * A union of all text styles, including HTML, Bitmap and Canvas text styles. * @memberof text * @see text.TextStyle * @see text.HTMLTextStyle */ export type AnyTextStyle = TextStyle | HTMLTextStyle; /** * A union of all text style options, including HTML, Bitmap and Canvas text style options. * @memberof text * @see text.TextStyleOptions * @see text.HTMLTextStyleOptions */ export type AnyTextStyleOptions = TextStyleOptions | HTMLTextStyleOptions; /** * Options for the {@link scene.Text} class. * @example * const text = new Text({ * text: 'Hello Pixi!', * style: { * fontFamily: 'Arial', * fontSize: 24, * fill: 0xff1010, * align: 'center', * } * }); * @memberof text */ export interface TextOptions extends ContainerOptions { /** The anchor point of the text. */ anchor?: PointData | number; /** The copy for the text object. To split a line you can use '\n'. */ text?: TextString; /** The resolution of the text. */ resolution?: number; /** * The text style * @type { * text.TextStyle | * Partial | * text.TextStyleOptions | * text.HTMLTextStyle | * Partial | * text.HTMLTextStyleOptions * } */ style?: TEXT_STYLE | TEXT_STYLE_OPTIONS; /** Whether or not to round the x/y position. */ roundPixels?: boolean; } /** * An abstract Text class, used by all text type in Pixi. This includes Canvas, HTML, and Bitmap Text. * @see scene.Text * @see scene.BitmapText * @see scene.HTMLText * @memberof scene */ export declare abstract class AbstractText extends ViewContainer implements View { batched: boolean; _anchor: ObservablePoint; _resolution: number; _autoResolution: boolean; _style: TEXT_STYLE; _didTextUpdate: boolean; protected _text: string; private readonly _styleClass; constructor(options: TextOptions, styleClass: new (options: TEXT_STYLE_OPTIONS) => TEXT_STYLE); /** * The anchor sets the origin point of the text. * The default is `(0,0)`, this means the text's origin is the top left. * * Setting the anchor to `(0.5,0.5)` means the text's origin is centered. * * Setting the anchor to `(1,1)` would mean the text's origin point will be the bottom right corner. * * If you pass only single parameter, it will set both x and y to the same value as shown in the example below. * @example * import { Text } from 'pixi.js'; * * const text = new Text('hello world'); * text.anchor.set(0.5); // This will set the origin to center. (0.5) is same as (0.5, 0.5). */ get anchor(): ObservablePoint; set anchor(value: PointData | number); /** Set the copy for the text object. To split a line you can use '\n'. */ set text(value: TextString); get text(): string; /** * The resolution / device pixel ratio of the canvas. * @default 1 */ set resolution(value: number); get resolution(): number; get style(): TEXT_STYLE; /** * Set the style of the text. * * Set up an event listener to listen for changes on the style object and mark the text as dirty. * * If setting the `style` can also be partial {@link AnyTextStyleOptions}. * @type { * text.TextStyle | * Partial | * text.TextStyleOptions | * text.HTMLTextStyle | * Partial | * text.HTMLTextStyleOptions * } */ set style(style: TEXT_STYLE | Partial | TEXT_STYLE_OPTIONS); /** * The local bounds of the Text. * @type {rendering.Bounds} */ get bounds(): Bounds; /** The width of the sprite, setting this will actually modify the scale to achieve the value set. */ get width(): number; set width(value: number); /** The height of the sprite, setting this will actually modify the scale to achieve the value set. */ get height(): number; set height(value: number); /** * Retrieves the size of the Text as a [Size]{@link Size} object. * This is faster than get the width and height separately. * @param out - Optional object to store the size in. * @returns - The size of the Text. */ getSize(out?: Size): Size; /** * Sets the size of the Text to the specified width and height. * This is faster than setting the width and height separately. * @param value - This can be either a number or a [Size]{@link Size} object. * @param height - The height to set. Defaults to the value of `width` if not provided. */ setSize(value: number | Optional, height?: number): void; /** * Adds the bounds of this text to the bounds object. * @param bounds - The output bounds object. */ addBounds(bounds: Bounds): void; /** * Checks if the text contains the given point. * @param point - The point to check */ containsPoint(point: PointData): boolean; onViewUpdate(): void; _getKey(): string; protected abstract _updateBounds(): void; /** * Destroys this text renderable and optionally its style texture. * @param options - Options parameter. A boolean will act as if all options * have been set to that value * @param {boolean} [options.texture=false] - Should it destroy the texture of the text style * @param {boolean} [options.textureSource=false] - Should it destroy the textureSource of the text style * @param {boolean} [options.style=false] - Should it destroy the style of the text */ destroy(options?: DestroyOptions): void; } export declare function ensureOptions(args: any[], name: string): TextOptions; /** * A Text Object will create a line or multiple lines of text. * * To split a line you can use '\n' in your text string, or, on the `style` object, * change its `wordWrap` property to true and and givae the `wordWrapWidth` property a value. * * The primary advantage of this class over BitmapText is that you have great control over the style of the text, * which you can change at runtime. * * The primary disadvantages is that each piece of text has it's own texture, which can use more memory. * When text changes, this texture has to be re-generated and re-uploaded to the GPU, taking up time. * @example * import { Text } from 'pixi.js'; * * const text = new Text({ * text: 'Hello Pixi!', * style: { * fontFamily: 'Arial', * fontSize: 24, * fill: 0xff1010, * align: 'center', * } * }); * @memberof scene */ declare class Text$1 extends AbstractText implements View { readonly renderPipeId: string; /** * @param {text.TextOptions} options - The options of the text. */ constructor(options?: TextOptions); /** @deprecated since 8.0.0 */ constructor(text?: TextString, options?: Partial); protected _updateBounds(): void; } /** The accepted types to pass to the prepare system */ export type PrepareSourceItem = Container | TextureSource | Texture | GraphicsContext; /** The valid types resolved to the queue ready for upload */ export type PrepareQueueItem = TextureSource | Text$1 | GraphicsContext; /** * Part of the prepare system. Responsible for uploading all the items to the GPU. * This class provides the base functionality and handles processing the queue asynchronously. * @memberof rendering */ export declare abstract class PrepareBase { /** The number of uploads to process per frame */ static uploadsPerFrame: number; /** Reference to the renderer */ protected renderer: Renderer; /** The queue to process over a async timer */ protected queue: PrepareQueueItem[]; /** Collection of callbacks to call when the uploads are finished */ protected resolves: ((value: void | PromiseLike) => void)[]; /** Timeout id for next processing call */ protected timeout?: number; /** * @param {rendering.Renderer} renderer - A reference to the current renderer */ constructor(renderer: Renderer); /** Resolve the given resource type and return an item for the queue */ protected abstract resolveQueueItem(source: PrepareSourceItem, queue: PrepareQueueItem[]): void; protected abstract uploadQueueItem(item: PrepareQueueItem): void; /** * Return a copy of the queue * @returns {PrepareQueueItem[]} The queue */ getQueue(): PrepareQueueItem[]; /** * Add a textures or graphics resource to the queue * @param {PrepareSourceItem | PrepareSourceItem[]} resource */ add(resource: PrepareSourceItem | PrepareSourceItem[]): this; /** * Recursively add a container and its children to the queue * @param {Container} container - The container to add to the queue */ private _addContainer; /** * Upload all the textures and graphics to the GPU (optionally add more resources to the queue first) * @param {PrepareSourceItem | PrepareSourceItem[] | undefined} resource */ upload(resource?: PrepareSourceItem | PrepareSourceItem[]): Promise; /** eliminate duplicates before processing */ dedupeQueue(): void; /** called per frame by the ticker, defer processing to next tick */ private readonly _tick; /** process the queue up to max item limit per frame */ private readonly _processQueue; /** Call all the resolve callbacks */ private _resolve; } declare global { namespace PixiMixins { interface RendererSystems { prepare: PrepareBase; } } } declare global { namespace PixiMixins { // eslint-disable-next-line @typescript-eslint/no-empty-interface interface ICanvas { } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface RendererOptions { resolution?: number; failIfMajorPerformanceCaveat?: boolean; roundPixels?: boolean; } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface WebGLOptions { } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface WebGPUOptions { } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface RendererSystems { } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface WebGLSystems { } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface WebGPUSystems { } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface CanvasSystems { } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface RendererPipes { } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface WebGLPipes { } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface WebGPUPipes { } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface CanvasPipes { } } } /** * Constructor options used for `Graphics` instances. * ```js * const graphics = new Graphics({ * fillStyle: { color: 0xff0000, alpha: 0.5 }, * strokeStyle: { color: 0x00ff00, width: 2 }, * }); * ``` * @see {@link scene.Graphics} * @memberof scene */ export interface GraphicsOptions extends ContainerOptions { /** The GraphicsContext to use, useful for reuse and optimisation */ context?: GraphicsContext; /** Whether or not to round the x/y position. */ roundPixels?: boolean; } /** * The Graphics class is primarily used to render primitive shapes such as lines, circles and * rectangles to the display, and to color and fill them. However, you can also use a Graphics * object to build a list of primitives to use as a mask, or as a complex hitArea. * @memberof scene * @extends scene.Container */ export declare class Graphics extends ViewContainer implements Instruction { readonly renderPipeId: string; batched: boolean; _didGraphicsUpdate: boolean; private _context; private readonly _ownedContext; /** * @param options - Options for the Graphics. */ constructor(options?: GraphicsOptions | GraphicsContext); set context(context: GraphicsContext); get context(): GraphicsContext; /** * The local bounds of the graphic. * @type {rendering.Bounds} */ get bounds(): Bounds; /** * Adds the bounds of this object to the bounds object. * @param bounds - The output bounds object. */ addBounds(bounds: Bounds): void; /** * Checks if the object contains the given point. * @param point - The point to check */ containsPoint(point: PointData): boolean; protected onViewUpdate(): void; /** * Destroys this graphics renderable and optionally its context. * @param options - Options parameter. A boolean will act as if all options * * If the context was created by this graphics and `destroy(false)` or `destroy()` is called * then the context will still be destroyed. * * If you want to explicitly not destroy this context that this graphics created, * then you should pass destroy({ context: false }) * * If the context was passed in as an argument to the constructor then it will not be destroyed * @param {boolean} [options.texture=false] - Should destroy the texture of the graphics context * @param {boolean} [options.textureSource=false] - Should destroy the texture source of the graphics context * @param {boolean} [options.context=false] - Should destroy the context */ destroy(options?: DestroyOptions): void; private _callContextMethod; /** * Sets the current fill style of the graphics context. The fill style can be a color, gradient, * pattern, or a more complex style defined by a FillStyle object. * @param {FillInput} args - The fill style to apply. This can be a simple color, a gradient or * pattern object, or a FillStyle or ConvertedFillStyle object. * @returns The instance of the current GraphicsContext for method chaining. */ setFillStyle(...args: Parameters): this; /** * Sets the current stroke style of the graphics context. Similar to fill styles, stroke styles can * encompass colors, gradients, patterns, or more detailed configurations via a StrokeStyle object. * @param {StrokeInput} args - The stroke style to apply. Can be defined as a color, a gradient or pattern, * or a StrokeStyle or ConvertedStrokeStyle object. * @returns The instance of the current GraphicsContext for method chaining. */ setStrokeStyle(...args: Parameters): this; /** * Fills the current or given path with the current fill style. This method can optionally take * a color and alpha for a simple fill, or a more complex FillStyle object for advanced fills. * @param {FillInput} style - (Optional) The style to fill the path with. Can be a color, gradient, pattern, or a * complex style object. If omitted, uses the current fill style. * @returns The instance of the current GraphicsContext for method chaining. */ fill(style?: FillInput): this; /** @deprecated 8.0.0 */ fill(color: ColorSource, alpha?: number): this; /** * Strokes the current path with the current stroke style. This method can take an optional * FillStyle parameter to define the stroke's appearance, including its color, width, and other properties. * @param {FillStyle} args - (Optional) The stroke style to apply. Can be defined as a simple color or a more * complex style object. If omitted, uses the current stroke style. * @returns The instance of the current GraphicsContext for method chaining. */ stroke(...args: Parameters): this; /** * Adds a texture to the graphics context. This method supports multiple overloads for specifying the texture, * tint, and dimensions. If only a texture is provided, it uses the texture's width and height for drawing. * Additional parameters allow for specifying a tint color, and custom dimensions for the texture drawing area. * @param texture - The Texture object to use. * @param tint - (Optional) A ColorSource to tint the texture. If not provided, defaults to white (0xFFFFFF). * @param dx - (Optional) The x-coordinate in the destination canvas at which to place the top-left corner of * the source image. * @param dy - (Optional) The y-coordinate in the destination canvas at which to place the top-left corner of * the source image. * @param dw - (Optional) The width of the rectangle within the source image to draw onto the destination canvas. * If not provided, uses the texture's frame width. * @param dh - (Optional) The height of the rectangle within the source image to draw onto the destination canvas. * If not provided, uses the texture's frame height. * @returns The instance of the current GraphicsContext for method chaining. */ texture(texture: Texture, tint?: ColorSource, dx?: number, dy?: number, dw?: number, dh?: number): this; texture(texture: Texture): this; /** * Resets the current path. Any previous path and its commands are discarded and a new path is * started. This is typically called before beginning a new shape or series of drawing commands. * @returns The instance of the current GraphicsContext for method chaining. */ beginPath(): this; /** * Applies a cutout to the last drawn shape. This is used to create holes or complex shapes by * subtracting a path from the previously drawn path. If a hole is not completely in a shape, it will * fail to cut correctly! */ cut(): this; /** * Adds an arc to the current path, which is centered at (x, y) with the specified radius, * starting and ending angles, and direction. * @param x - The x-coordinate of the arc's center. * @param y - The y-coordinate of the arc's center. * @param radius - The arc's radius. * @param startAngle - The starting angle, in radians. * @param endAngle - The ending angle, in radians. * @param counterclockwise - (Optional) Specifies whether the arc is drawn counterclockwise (true) or clockwise * (false). Defaults to false. * @returns The instance of the current GraphicsContext for method chaining. */ arc(x: number, y: number, radius: number, startAngle: number, endAngle: number, counterclockwise?: boolean): this; /** * Adds an arc to the current path with the given control points and radius, connected to the previous point * by a straight line if necessary. * @param x1 - The x-coordinate of the first control point. * @param y1 - The y-coordinate of the first control point. * @param x2 - The x-coordinate of the second control point. * @param y2 - The y-coordinate of the second control point. * @param radius - The arc's radius. * @returns The instance of the current GraphicsContext for method chaining. */ arcTo(x1: number, y1: number, x2: number, y2: number, radius: number): this; /** * Adds an SVG-style arc to the path, allowing for elliptical arcs based on the SVG spec. * @param rx - The x-radius of the ellipse. * @param ry - The y-radius of the ellipse. * @param xAxisRotation - The rotation of the ellipse's x-axis relative * to the x-axis of the coordinate system, in degrees. * @param largeArcFlag - Determines if the arc should be greater than or less than 180 degrees. * @param sweepFlag - Determines if the arc should be swept in a positive angle direction. * @param x - The x-coordinate of the arc's end point. * @param y - The y-coordinate of the arc's end point. * @returns The instance of the current object for chaining. */ arcToSvg(rx: number, ry: number, xAxisRotation: number, largeArcFlag: number, sweepFlag: number, x: number, y: number): this; /** * Adds a cubic Bezier curve to the path. * It requires three points: the first two are control points and the third one is the end point. * The starting point is the last point in the current path. * @param cp1x - The x-coordinate of the first control point. * @param cp1y - The y-coordinate of the first control point. * @param cp2x - The x-coordinate of the second control point. * @param cp2y - The y-coordinate of the second control point. * @param x - The x-coordinate of the end point. * @param y - The y-coordinate of the end point. * @param smoothness - Optional parameter to adjust the smoothness of the curve. * @returns The instance of the current object for chaining. */ bezierCurveTo(cp1x: number, cp1y: number, cp2x: number, cp2y: number, x: number, y: number, smoothness?: number): this; /** * Closes the current path by drawing a straight line back to the start. * If the shape is already closed or there are no points in the path, this method does nothing. * @returns The instance of the current object for chaining. */ closePath(): this; /** * Draws an ellipse at the specified location and with the given x and y radii. * An optional transformation can be applied, allowing for rotation, scaling, and translation. * @param x - The x-coordinate of the center of the ellipse. * @param y - The y-coordinate of the center of the ellipse. * @param radiusX - The horizontal radius of the ellipse. * @param radiusY - The vertical radius of the ellipse. * @returns The instance of the current object for chaining. */ ellipse(x: number, y: number, radiusX: number, radiusY: number): this; /** * Draws a circle shape. This method adds a new circle path to the current drawing. * @param x - The x-coordinate of the center of the circle. * @param y - The y-coordinate of the center of the circle. * @param radius - The radius of the circle. * @returns The instance of the current object for chaining. */ circle(x: number, y: number, radius: number): this; /** * Adds another `GraphicsPath` to this path, optionally applying a transformation. * @param path - The `GraphicsPath` to add. * @returns The instance of the current object for chaining. */ path(path: GraphicsPath): this; /** * Connects the current point to a new point with a straight line. This method updates the current path. * @param x - The x-coordinate of the new point to connect to. * @param y - The y-coordinate of the new point to connect to. * @returns The instance of the current object for chaining. */ lineTo(x: number, y: number): this; /** * Sets the starting point for a new sub-path. Any subsequent drawing commands are considered part of this path. * @param x - The x-coordinate for the starting point. * @param y - The y-coordinate for the starting point. * @returns The instance of the current object for chaining. */ moveTo(x: number, y: number): this; /** * Adds a quadratic curve to the path. It requires two points: the control point and the end point. * The starting point is the last point in the current path. * @param cpx - The x-coordinate of the control point. * @param cpy - The y-coordinate of the control point. * @param x - The x-coordinate of the end point. * @param y - The y-coordinate of the end point. * @param smoothness - Optional parameter to adjust the smoothness of the curve. * @returns The instance of the current object for chaining. */ quadraticCurveTo(cpx: number, cpy: number, x: number, y: number, smoothness?: number): this; /** * Draws a rectangle shape. This method adds a new rectangle path to the current drawing. * @param x - The x-coordinate of the top-left corner of the rectangle. * @param y - The y-coordinate of the top-left corner of the rectangle. * @param w - The width of the rectangle. * @param h - The height of the rectangle. * @returns The instance of the current object for chaining. */ rect(x: number, y: number, w: number, h: number): this; /** * Draws a rectangle with rounded corners. * The corner radius can be specified to determine how rounded the corners should be. * An optional transformation can be applied, which allows for rotation, scaling, and translation of the rectangle. * @param x - The x-coordinate of the top-left corner of the rectangle. * @param y - The y-coordinate of the top-left corner of the rectangle. * @param w - The width of the rectangle. * @param h - The height of the rectangle. * @param radius - The radius of the rectangle's corners. If not specified, corners will be sharp. * @returns The instance of the current object for chaining. */ roundRect(x: number, y: number, w: number, h: number, radius?: number): this; /** * Draws a polygon shape by specifying a sequence of points. This method allows for the creation of complex polygons, * which can be both open and closed. An optional transformation can be applied, enabling the polygon to be scaled, * rotated, or translated as needed. * @param points - An array of numbers, or an array of PointData objects eg [{x,y}, {x,y}, {x,y}] * representing the x and y coordinates, of the polygon's vertices, in sequence. * @param close - A boolean indicating whether to close the polygon path. True by default. * @returns The instance of the current object for chaining further drawing commands. */ poly(points: number[] | PointData[], close?: boolean): this; /** * Draws a regular polygon with a specified number of sides. All sides and angles are equal. * @param x - The x-coordinate of the center of the polygon. * @param y - The y-coordinate of the center of the polygon. * @param radius - The radius of the circumscribed circle of the polygon. * @param sides - The number of sides of the polygon. Must be 3 or more. * @param rotation - The rotation angle of the polygon, in radians. Zero by default. * @param transform - An optional `Matrix` object to apply a transformation to the polygon. * @returns The instance of the current object for chaining. */ regularPoly(x: number, y: number, radius: number, sides: number, rotation?: number, transform?: Matrix): this; /** * Draws a polygon with rounded corners. * Similar to `regularPoly` but with the ability to round the corners of the polygon. * @param x - The x-coordinate of the center of the polygon. * @param y - The y-coordinate of the center of the polygon. * @param radius - The radius of the circumscribed circle of the polygon. * @param sides - The number of sides of the polygon. Must be 3 or more. * @param corner - The radius of the rounding of the corners. * @param rotation - The rotation angle of the polygon, in radians. Zero by default. * @returns The instance of the current object for chaining. */ roundPoly(x: number, y: number, radius: number, sides: number, corner: number, rotation?: number): this; /** * Draws a shape with rounded corners. This function supports custom radius for each corner of the shape. * Optionally, corners can be rounded using a quadratic curve instead of an arc, providing a different aesthetic. * @param points - An array of `RoundedPoint` representing the corners of the shape to draw. * A minimum of 3 points is required. * @param radius - The default radius for the corners. * This radius is applied to all corners unless overridden in `points`. * @param useQuadratic - If set to true, rounded corners are drawn using a quadraticCurve * method instead of an arc method. Defaults to false. * @param smoothness - Specifies the smoothness of the curve when `useQuadratic` is true. * Higher values make the curve smoother. * @returns The instance of the current object for chaining. */ roundShape(points: RoundedPoint[], radius: number, useQuadratic?: boolean, smoothness?: number): this; /** * Draw Rectangle with fillet corners. This is much like rounded rectangle * however it support negative numbers as well for the corner radius. * @param x - Upper left corner of rect * @param y - Upper right corner of rect * @param width - Width of rect * @param height - Height of rect * @param fillet - accept negative or positive values */ filletRect(x: number, y: number, width: number, height: number, fillet: number): this; /** * Draw Rectangle with chamfer corners. These are angled corners. * @param x - Upper left corner of rect * @param y - Upper right corner of rect * @param width - Width of rect * @param height - Height of rect * @param chamfer - non-zero real number, size of corner cutout * @param transform */ chamferRect(x: number, y: number, width: number, height: number, chamfer: number, transform?: Matrix): this; /** * Draws a star shape centered at a specified location. This method allows for the creation * of stars with a variable number of points, outer radius, optional inner radius, and rotation. * The star is drawn as a closed polygon with alternating outer and inner vertices to create the star's points. * An optional transformation can be applied to scale, rotate, or translate the star as needed. * @param x - The x-coordinate of the center of the star. * @param y - The y-coordinate of the center of the star. * @param points - The number of points of the star. * @param radius - The outer radius of the star (distance from the center to the outer points). * @param innerRadius - Optional. The inner radius of the star * (distance from the center to the inner points between the outer points). * If not provided, defaults to half of the `radius`. * @param rotation - Optional. The rotation of the star in radians, where 0 is aligned with the y-axis. * Defaults to 0, meaning one point is directly upward. * @returns The instance of the current object for chaining further drawing commands. */ star(x: number, y: number, points: number, radius: number, innerRadius?: number, rotation?: number): this; /** * Parses and renders an SVG string into the graphics context. This allows for complex shapes and paths * defined in SVG format to be drawn within the graphics context. * @param svg - The SVG string to be parsed and rendered. */ svg(svg: string): this; /** * Restores the most recently saved graphics state by popping the top of the graphics state stack. * This includes transformations, fill styles, and stroke styles. */ restore(): this; /** Saves the current graphics state, including transformations, fill styles, and stroke styles, onto a stack. */ save(): this; /** * Returns the current transformation matrix of the graphics context. * @returns The current transformation matrix. */ getTransform(): Matrix; /** * Resets the current transformation matrix to the identity matrix, effectively removing * any transformations (rotation, scaling, translation) previously applied. * @returns The instance of the current GraphicsContext for method chaining. */ resetTransform(): this; /** * Applies a rotation transformation to the graphics context around the current origin. * @param angle - The angle of rotation in radians. * @returns The instance of the current GraphicsContext for method chaining. */ rotateTransform(angle: number): this; /** * Applies a scaling transformation to the graphics context, scaling drawings by x horizontally and by y vertically. * @param x - The scale factor in the horizontal direction. * @param y - (Optional) The scale factor in the vertical direction. * If not specified, the x value is used for both directions. * @returns The instance of the current GraphicsContext for method chaining. */ scaleTransform(x: number, y?: number): this; /** * Sets the current transformation matrix of the graphics context to the specified matrix or values. * This replaces the current transformation matrix. * @param a - The value for the a property of the matrix, or a Matrix object to use directly. * @param b - The value for the b property of the matrix. * @param c - The value for the c property of the matrix. * @param d - The value for the d property of the matrix. * @param dx - The value for the tx (translate x) property of the matrix. * @param dy - The value for the ty (translate y) property of the matrix. * @returns The instance of the current GraphicsContext for method chaining. */ setTransform(transform: Matrix): this; setTransform(a: number, b: number, c: number, d: number, dx: number, dy: number): this; setTransform(a: number | Matrix, b?: number, c?: number, d?: number, dx?: number, dy?: number): this; /** * Applies the specified transformation matrix to the current graphics context by multiplying * the current matrix with the specified matrix. * @param a - The value for the a property of the matrix, or a Matrix object to use directly. * @param b - The value for the b property of the matrix. * @param c - The value for the c property of the matrix. * @param d - The value for the d property of the matrix. * @param dx - The value for the tx (translate x) property of the matrix. * @param dy - The value for the ty (translate y) property of the matrix. * @returns The instance of the current GraphicsContext for method chaining. */ transform(transform: Matrix): this; transform(a: number, b: number, c: number, d: number, dx: number, dy: number): this; transform(a: number | Matrix, b?: number, c?: number, d?: number, dx?: number, dy?: number): this; /** * Applies a translation transformation to the graphics context, moving the origin by the specified amounts. * @param x - The amount to translate in the horizontal direction. * @param y - (Optional) The amount to translate in the vertical direction. If not specified, * the x value is used for both directions. * @returns The instance of the current GraphicsContext for method chaining. */ translateTransform(x: number, y?: number): this; /** * Clears all drawing commands from the graphics context, effectively resetting it. This includes clearing the path, * and optionally resetting transformations to the identity matrix. * @returns The instance of the current GraphicsContext for method chaining. */ clear(): this; /** * The fill style to use. * @type {ConvertedFillStyle} */ get fillStyle(): GraphicsContext["fillStyle"]; set fillStyle(value: FillInput); /** * The stroke style to use. * @type {ConvertedStrokeStyle} */ get strokeStyle(): GraphicsContext["strokeStyle"]; set strokeStyle(value: StrokeStyle); /** * Creates a new Graphics object. * Note that only the context of the object is cloned, not its transform (position,scale,etc) * @param deep - Whether to create a deep clone of the graphics object. If false, the context * will be shared between the two objects (default false). If true, the context will be * cloned (recommended if you need to modify the context in any way). * @returns - A clone of the graphics object */ clone(deep?: boolean): Graphics; /** * @param width * @param color * @param alpha * @deprecated since 8.0.0 Use {@link Graphics#setStrokeStyle} instead */ lineStyle(width?: number, color?: ColorSource, alpha?: number): this; /** * @param color * @param alpha * @deprecated since 8.0.0 Use {@link Graphics#fill} instead */ beginFill(color: ColorSource, alpha?: number): this; /** * @deprecated since 8.0.0 Use {@link Graphics#fill} instead */ endFill(): this; /** * @param {...any} args * @deprecated since 8.0.0 Use {@link Graphics#circle} instead */ drawCircle(...args: Parameters): this; /** * @param {...any} args * @deprecated since 8.0.0 Use {@link Graphics#ellipse} instead */ drawEllipse(...args: Parameters): this; /** * @param {...any} args * @deprecated since 8.0.0 Use {@link Graphics#poly} instead */ drawPolygon(...args: Parameters): this; /** * @param {...any} args * @deprecated since 8.0.0 Use {@link Graphics#rect} instead */ drawRect(...args: Parameters): this; /** * @param {...any} args * @deprecated since 8.0.0 Use {@link Graphics#roundRect} instead */ drawRoundedRect(...args: Parameters): this; /** * @param {...any} args * @deprecated since 8.0.0 Use {@link Graphics#star} instead */ drawStar(...args: Parameters): this; } /** * A batchable graphics object. * @ignore */ export declare class BatchableGraphics implements DefaultBatchableMeshElement { readonly packAsQuad = false; batcherName: string; texture: Texture; renderable: Graphics; indexOffset: number; indexSize: number; attributeOffset: number; attributeSize: number; baseColor: number; alpha: number; applyTransform: boolean; roundPixels: 0 | 1; _indexStart: number; _textureId: number; _attributeStart: number; _batcher: Batcher; _batch: Batch; geometryData: { vertices: number[]; uvs: number[]; indices: number[]; }; get uvs(): number[]; get positions(): number[]; get indices(): number[]; get blendMode(): BLEND_MODES; get color(): number; get transform(): Matrix; copyTo(gpuBuffer: BatchableGraphics): void; reset(): void; } interface GeometryData { vertices: number[]; uvs: number[]; indices: number[]; } /** * A class that holds batchable graphics data for a GraphicsContext. * @memberof rendering * @ignore */ export declare class GpuGraphicsContext { isBatchable: boolean; context: GraphicsContext; batches: BatchableGraphics[]; geometryData: GeometryData; graphicsData: GraphicsContextRenderData; } /** * A class that holds the render data for a GraphicsContext. * @memberof rendering * @ignore */ export declare class GraphicsContextRenderData { batcher: Batcher; instructions: InstructionSet; init(): void; /** * @deprecated since version 8.0.0 * Use `batcher.geometry` instead. * @see {Batcher#geometry} */ get geometry(): Geometry; } /** * Options for the GraphicsContextSystem. * @memberof rendering */ export interface GraphicsContextSystemOptions { /** A value from 0 to 1 that controls the smoothness of bezier curves (the higher the smoother) */ bezierSmoothness?: number; } /** * A system that manages the rendering of GraphicsContexts. * @memberof rendering */ export declare class GraphicsContextSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem, ExtensionType.CanvasSystem ]; readonly name: "graphicsContext"; }; /** The default options for the GraphicsContextSystem. */ static readonly defaultOptions: GraphicsContextSystemOptions; private _gpuContextHash; private _graphicsDataContextHash; /** * Runner init called, update the default options * @ignore */ init(options?: GraphicsContextSystemOptions): void; getContextRenderData(context: GraphicsContext): GraphicsContextRenderData; updateGpuContext(context: GraphicsContext): GpuGraphicsContext; getGpuContext(context: GraphicsContext): GpuGraphicsContext; private _initContextRenderData; private _initContext; protected onGraphicsContextDestroy(context: GraphicsContext): void; private _cleanGraphicsContextData; destroy(): void; } export interface GraphicsAdaptor { shader: Shader; init(): void; execute(graphicsPipe: GraphicsPipe, renderable: Graphics): void; destroy(): void; } export interface GraphicsSystem { graphicsContext: GraphicsContextSystem; renderPipes: { batch: BatchPipe; }; _roundPixels: 0 | 1; } export declare class GraphicsPipe implements RenderPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "graphics"; }; renderer: GraphicsSystem; state: State; private _graphicsBatchesHash; private _adaptor; private readonly _destroyRenderableBound; constructor(renderer: GraphicsSystem, adaptor: GraphicsAdaptor); validateRenderable(graphics: Graphics): boolean; addRenderable(graphics: Graphics, instructionSet: InstructionSet): void; updateRenderable(graphics: Graphics): void; destroyRenderable(graphics: Graphics): void; execute(graphics: Graphics): void; private _rebuild; private _addToBatcher; private _getBatchesForRenderable; private _initBatchesForRenderable; private _removeBatchForRenderable; destroy(): void; } declare global { namespace PixiMixins { interface RendererSystems { graphicsContext: GraphicsContextSystem; } interface RendererPipes { graphics: GraphicsPipe; } interface RendererOptions { /** * A value from 0 to 1 that controls the smoothness of bezier curves (the higher the smoother) * @default 0.5 */ bezierSmoothness: number; } } } /** * Options for the mesh geometry. * @memberof scene */ export interface MeshGeometryOptions { /** The positions of the mesh. */ positions?: Float32Array; /** The UVs of the mesh. */ uvs?: Float32Array; /** The indices of the mesh. */ indices?: Uint32Array; /** The topology of the mesh. */ topology?: Topology; /** Whether to shrink the buffers to fit the data. */ shrinkBuffersToFit?: boolean; } /** * A geometry used to batch multiple meshes with the same texture. * @memberof scene */ export declare class MeshGeometry extends Geometry { static defaultOptions: MeshGeometryOptions; batchMode: BatchMode; /** * @param {scene.MeshGeometryOptions} options - The options of the mesh geometry. */ constructor(options: MeshGeometryOptions); /** @deprecated since 8.0.0 */ constructor(positions: Float32Array, uvs: Float32Array, indices: Uint32Array); /** The positions of the mesh. */ get positions(): Float32Array; set positions(value: Float32Array); /** The UVs of the mesh. */ get uvs(): Float32Array; set uvs(value: Float32Array); /** The indices of the mesh. */ get indices(): Uint32Array; set indices(value: Uint32Array); } export interface TextureShader extends Shader { texture: Texture; } /** * Constructor options used for `Mesh` instances. Extends {@link scene.MeshViewOptions} * ```js * const mesh = new Mesh({ * texture: Texture.from('assets/image.png'), * geometry: new PlaneGeometry(), * shader: Shader.from(VERTEX, FRAGMENT), * }); * ``` * @see {@link scene.Mesh} * @see {@link scene.MeshViewOptions} * @memberof scene */ /** * @memberof scene */ export interface MeshOptions extends ContainerOptions { /** * Includes vertex positions, face indices, colors, UVs, and * custom attributes within buffers, reducing the cost of passing all * this data to the GPU. Can be shared between multiple Mesh objects. */ geometry: GEOMETRY; /** * Represents the vertex and fragment shaders that processes the geometry and runs on the GPU. * Can be shared between multiple Mesh objects. */ shader?: SHADER | null; /** The state of WebGL required to render the mesh. */ state?: State; /** The texture that the Mesh uses. Null for non-MeshMaterial shaders */ texture?: Texture; /** Whether or not to round the x/y position. */ roundPixels?: boolean; } /** * Base mesh class. * * This class empowers you to have maximum flexibility to render any kind of WebGL/WebGPU visuals you can think of. * This class assumes a certain level of WebGL/WebGPU knowledge. * If you know a bit this should abstract enough away to make your life easier! * * Pretty much ALL WebGL/WebGPU can be broken down into the following: * - Geometry - The structure and data for the mesh. This can include anything from positions, uvs, normals, colors etc.. * - Shader - This is the shader that PixiJS will render the geometry with (attributes in the shader must match the geometry) * - State - This is the state of WebGL required to render the mesh. * * Through a combination of the above elements you can render anything you want, 2D or 3D! * @memberof scene */ export declare class Mesh extends ViewContainer implements View, Instruction { readonly renderPipeId: string; state: State; /** @ignore */ _texture: Texture; /** @ignore */ _geometry: GEOMETRY; /** @ignore */ _shader: SHADER | null; /** * @param {scene.MeshOptions} options - options for the mesh instance */ constructor(options: MeshOptions); /** @deprecated since 8.0.0 */ constructor(geometry: GEOMETRY, shader: SHADER, state?: State, drawMode?: Topology); /** Alias for {@link scene.Mesh#shader}. */ get material(): SHADER; /** * Represents the vertex and fragment shaders that processes the geometry and runs on the GPU. * Can be shared between multiple Mesh objects. */ set shader(value: SHADER | null); get shader(): SHADER | null; /** * Includes vertex positions, face indices, colors, UVs, and * custom attributes within buffers, reducing the cost of passing all * this data to the GPU. Can be shared between multiple Mesh objects. */ set geometry(value: GEOMETRY); get geometry(): GEOMETRY; /** The texture that the Mesh uses. Null for non-MeshMaterial shaders */ set texture(value: Texture); get texture(): Texture; get batched(): boolean; /** * The local bounds of the mesh. * @type {rendering.Bounds} */ get bounds(): Bounds; /** * Adds the bounds of this object to the bounds object. * @param bounds - The output bounds object. */ addBounds(bounds: Bounds): void; /** * Checks if the object contains the given point. * @param point - The point to check */ containsPoint(point: PointData): boolean; /** @ignore */ onViewUpdate(): void; /** * Destroys this sprite renderable and optionally its texture. * @param options - Options parameter. A boolean will act as if all options * have been set to that value * @param {boolean} [options.texture=false] - Should it destroy the current texture of the renderable as well * @param {boolean} [options.textureSource=false] - Should it destroy the textureSource of the renderable as well */ destroy(options?: DestroyOptions): void; } export interface MeshAdaptor { init(): void; execute(meshPipe: MeshPipe, mesh: Mesh): void; destroy(): void; } export declare class MeshPipe implements RenderPipe, InstructionPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "mesh"; }; localUniforms: UniformGroup<{ uTransformMatrix: { value: Matrix; type: "mat3x3"; }; uColor: { value: Float32Array; type: "vec4"; }; uRound: { value: number; type: "f32"; }; }>; localUniformsBindGroup: BindGroup; renderer: Renderer; private _meshDataHash; private _gpuBatchableMeshHash; private _adaptor; private readonly _destroyRenderableBound; constructor(renderer: Renderer, adaptor: MeshAdaptor); validateRenderable(mesh: Mesh): boolean; addRenderable(mesh: Mesh, instructionSet: InstructionSet): void; updateRenderable(mesh: Mesh): void; destroyRenderable(mesh: Mesh): void; execute(mesh: Mesh): void; private _getMeshData; private _initMeshData; private _getBatchableMesh; private _initBatchableMesh; destroy(): void; } declare global { namespace PixiMixins { interface RendererPipes { mesh: MeshPipe; } } } export interface ChildrenHelperMixin { allowChildren: boolean; addChild(...children: U): U[0]; removeChild(...children: U): U[0]; removeChildren(beginIndex?: number, endIndex?: number): C[]; removeChildAt(index: number): U; getChildAt(index: number): U; setChildIndex(child: C, index: number): void; getChildIndex(child: C): number; addChildAt(child: U, index: number): U; swapChildren(child: U, child2: U): void; removeFromParent(): void; reparentChild(...child: U): U[0]; reparentChildAt(child: U, index: number): U; } export declare const childrenHelperMixin: Partial; export interface EffectsMixinConstructor { mask?: number | Container | null; filters?: Filter | Filter[]; } export interface EffectsMixin extends Required { _maskEffect?: MaskEffect; _filterEffect?: FilterEffect; filterArea?: Rectangle; effects?: Effect[]; addEffect(effect: Effect): void; removeEffect(effect: Effect): void; } export declare const effectsMixin: Partial; export interface FindMixinConstructor { label?: string; } export interface FindMixin extends Required { /** * @deprecated since 8.0.0 * @see Container#label */ name: string; getChildByName(label: RegExp | string, deep?: boolean): Container | null; getChildByLabel(label: RegExp | string, deep?: boolean): Container | null; getChildrenByLabel(label: RegExp | string, deep?: boolean, out?: Container[]): Container[]; } export declare const findMixin: Partial; export interface OnRenderMixinConstructor { onRender?: (() => void | null); } export interface OnRenderMixin extends Required { _onRender: (() => void) | null; } export declare const onRenderMixin: Partial; export interface SortMixinConstructor { zIndex?: number; sortDirty?: boolean; sortableChildren?: boolean; } export interface SortMixin extends Required { _zIndex: number; sortChildren: () => void; depthOfChildModified: () => void; } export declare const sortMixin: Partial; export interface ToLocalGlobalMixin { getGlobalPosition(point?: Point, skipUpdate?: boolean): Point; toGlobal

(position: PointData, point?: P, skipUpdate?: boolean): P; toLocal

(position: PointData, from?: Container, point?: P, skipUpdate?: boolean): P; } export declare const toLocalGlobalMixin: Partial; declare global { namespace PixiMixins { // eslint-disable-next-line @typescript-eslint/no-empty-interface interface Container extends ChildrenHelperMixin, ToLocalGlobalMixin, OnRenderMixin, MeasureMixin, EffectsMixin, FindMixin, SortMixin { } // eslint-disable-next-line @typescript-eslint/no-empty-interface interface ContainerOptions extends OnRenderMixinConstructor, MeasureMixinConstructor, EffectsMixinConstructor, FindMixinConstructor, SortMixinConstructor { } } } /** * Options for the {@link utils.Transform} constructor. * @memberof utils.Transform */ export interface TransformOptions { /** The matrix to use. */ matrix?: Matrix; /** The observer to use. */ observer?: { _onUpdate: (transform: Transform) => void; }; } /** * The Transform class facilitates the manipulation of a 2D transformation matrix through * user-friendly properties: position, scale, rotation, skew, and pivot. * @memberof utils */ export declare class Transform { /** * The local transformation matrix. * @internal * @private */ _matrix: Matrix; /** The coordinate of the object relative to the local coordinates of the parent. */ position: ObservablePoint; /** The scale factor of the object. */ scale: ObservablePoint; /** The pivot point of the container that it rotates around. */ pivot: ObservablePoint; /** The skew amount, on the x and y axis. */ skew: ObservablePoint; /** The rotation amount. */ protected _rotation: number; /** * The X-coordinate value of the normalized local X axis, * the first column of the local transformation matrix without a scale. */ protected _cx: number; /** * The Y-coordinate value of the normalized local X axis, * the first column of the local transformation matrix without a scale. */ protected _sx: number; /** * The X-coordinate value of the normalized local Y axis, * the second column of the local transformation matrix without a scale. */ protected _cy: number; /** * The Y-coordinate value of the normalized local Y axis, * the second column of the local transformation matrix without a scale. */ protected _sy: number; protected dirty: boolean; protected observer: Observer; /** * @param options - Options for the transform. * @param options.matrix - The matrix to use. * @param options.observer - The observer to use. */ constructor({ matrix, observer }?: TransformOptions); /** * This matrix is computed by combining this Transforms position, scale, rotation, skew, and pivot * properties into a single matrix. * @readonly */ get matrix(): Matrix; /** * Called when a value changes. * @param point * @internal * @private */ _onUpdate(point?: ObservablePoint): void; /** Called when the skew or the rotation changes. */ protected updateSkew(): void; toString(): string; /** * Decomposes a matrix and sets the transforms properties based on it. * @param matrix - The matrix to decompose */ setFromMatrix(matrix: Matrix): void; /** The rotation of the object in radians. */ get rotation(): number; set rotation(value: number); } /** * Constructor options used for `TilingSprite` instances. Extends {@link scene.TilingSpriteViewOptions} * ```js * const tilingSprite = new TilingSprite({ * texture: Texture.from('assets/image.png'), * width: 100, * height: 100, * tilePosition: { x: 100, y: 100 }, * tileScale: { x: 2, y: 2 }, * }); * ``` * @see {@link scene.TilingSprite} * @see {@link scene.TilingSpriteViewOptions} * @memberof scene */ export interface TilingSpriteOptions extends ContainerOptions { /** * The anchor point of the sprite * @default {x: 0, y: 0} */ anchor?: PointData; /** * The offset of the image that is being tiled. * @default {x: 0, y: 0} */ tilePosition?: PointData; /** * Scaling of the image that is being tiled. * @default {x: 1, y: 1} */ tileScale?: PointData; /** * The rotation of the image that is being tiled. * @default 0 */ tileRotation?: number; /** * The texture to use for the sprite. * @default Texture.WHITE */ texture?: Texture; /** * The width of the tiling sprite. # * @default 256 */ width?: number; /** * The height of the tiling sprite. * @default 256 */ height?: number; /** * @todo * @default false */ applyAnchorToTexture?: boolean; /** Whether or not to round the x/y position. */ roundPixels?: boolean; } /** * A tiling sprite is a fast way of rendering a tiling image. * @example * const tilingSprite = new TilingSprite({ * texture: Texture.from('assets/image.png'), * width: 100, * height: 100, * }); * * tilingSprite.tilePosition.x = 100; * tilingSprite.tilePosition.y = 100; * * app.stage.addChild(tilingSprite); * @memberof scene * @extends scene.Container */ export declare class TilingSprite extends ViewContainer implements View, Instruction { /** * Creates a new tiling sprite. * @param source - The source to create the texture from. * @param options - The options for creating the tiling sprite. * @returns A new tiling sprite. */ static from(source: Texture | string, options?: TilingSpriteOptions): TilingSprite; /** default options for the TilingSprite */ static defaultOptions: TilingSpriteOptions; readonly renderPipeId: string; readonly batched = true; _anchor: ObservablePoint; _tileTransform: Transform; _texture: Texture; _applyAnchorToTexture: boolean; _didTilingSpriteUpdate: boolean; private _width; private _height; /** * @param {rendering.Texture | scene.TilingSpriteOptions} options - The options for creating the tiling sprite. */ constructor(options?: Texture | TilingSpriteOptions); /** @deprecated since 8.0.0 */ constructor(texture: Texture, width: number, height: number); /** * Changes frame clamping in corresponding textureMatrix * Change to -0.5 to add a pixel to the edge, recommended for transparent trimmed textures in atlas * @default 0.5 * @member {number} */ get clampMargin(): number; set clampMargin(value: number); /** * The anchor sets the origin point of the sprite. The default value is taken from the {@link Texture} * and passed to the constructor. * * The default is `(0,0)`, this means the sprite's origin is the top left. * * Setting the anchor to `(0.5,0.5)` means the sprite's origin is centered. * * Setting the anchor to `(1,1)` would mean the sprite's origin point will be the bottom right corner. * * If you pass only single parameter, it will set both x and y to the same value as shown in the example below. * @example * import { TilingSprite } from 'pixi.js'; * * const sprite = new TilingSprite({texture: Texture.WHITE}); * sprite.anchor.set(0.5); // This will set the origin to center. (0.5) is same as (0.5, 0.5). */ get anchor(): ObservablePoint; set anchor(value: PointData | number); /** The offset of the image that is being tiled. */ get tilePosition(): ObservablePoint; set tilePosition(value: PointData); /** The scaling of the image that is being tiled. */ get tileScale(): ObservablePoint; set tileScale(value: PointData | number); set tileRotation(value: number); /** The rotation of the image that is being tiled. */ get tileRotation(): number; /** The transform of the image that is being tiled. */ get tileTransform(): Transform; /** * The local bounds of the sprite. * @type {rendering.Bounds} */ get bounds(): Bounds; set texture(value: Texture); /** The texture that the sprite is using. */ get texture(): Texture; /** The width of the tiling area. */ set width(value: number); get width(): number; set height(value: number); /** The height of the tiling area. */ get height(): number; /** * Sets the size of the TilingSprite to the specified width and height. * This is faster than setting the width and height separately. * @param value - This can be either a number or a [Size]{@link Size} object. * @param height - The height to set. Defaults to the value of `width` if not provided. */ setSize(value: number | Optional, height?: number): void; /** * Retrieves the size of the TilingSprite as a [Size]{@link Size} object. * This is faster than get the width and height separately. * @param out - Optional object to store the size in. * @returns - The size of the TilingSprite. */ getSize(out?: Size): Size; protected _updateBounds(): void; /** * Adds the bounds of this object to the bounds object. * @param bounds - The output bounds object. */ addBounds(bounds: Bounds): void; /** * Checks if the object contains the given point. * @param point - The point to check */ containsPoint(point: PointData): boolean; onViewUpdate(): void; /** * Destroys this sprite renderable and optionally its texture. * @param options - Options parameter. A boolean will act as if all options * have been set to that value * @param {boolean} [options.texture=false] - Should it destroy the current texture of the renderable as well * @param {boolean} [options.textureSource=false] - Should it destroy the textureSource of the renderable as well */ destroy(options?: DestroyOptions): void; } export declare class TilingSpritePipe implements RenderPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "tilingSprite"; }; private _renderer; private readonly _state; private readonly _tilingSpriteDataHash; private readonly _destroyRenderableBound; constructor(renderer: Renderer); validateRenderable(renderable: TilingSprite): boolean; addRenderable(tilingSprite: TilingSprite, instructionSet: InstructionSet): void; execute(tilingSprite: TilingSprite): void; updateRenderable(tilingSprite: TilingSprite): void; destroyRenderable(tilingSprite: TilingSprite): void; private _getTilingSpriteData; private _initTilingSpriteData; private _updateBatchableMesh; destroy(): void; private _updateCanBatch; } declare global { namespace PixiMixins { interface RendererPipes { tilingSprite: TilingSpritePipe; } } } /** * A BitmapText Object will create a line or multiple lines of text. * * To split a line you can use '\n' in your text string, or, on the `style` object, * change its `wordWrap` property to true and and give the `wordWrapWidth` property a value. * * The text is created using a bitmap font (a sprite sheet of characters). * * The primary advantage of this render mode over `text` is that all of your textures are pre-generated and loaded, * meaning that rendering is fast, and changing text is much faster than Text. * * The primary disadvantage is that supporting character sets other than latin, such as CJK languages, * may be impractical due to the number of characters. * * Pre-loaded BitmapFonts: * * * PixiJS enables the loading of BitmapFonts through its Asset Manager, supporting both XML and FNT formats. * Additionally, PixiJS is compatible with MSDF (Multi-channel Signed Distance Field) and SDF (Signed Distance Field) fonts. * These advanced font types allow for scaling without quality degradation and must be created with specific tools, * such as the one available at https://msdf-bmfont.donmccurdy.com/. * * Dynamically Generated BitmapFonts: * * * PixiJS also offers the capability to generate BitmapFonts dynamically. This means that fonts are created in real-time * based on specified styles, eliminating the need for pre-loading. This process is initiated simply by assigning a style * to a BitmapText object, which then automatically generates the required font. * * However, dynamically generating a large number of fonts may lead to significant memory use. To prevent this, * PixiJS smartly attempts to reuse fonts that closely match the desired style parameters. For instance, if a text style * requires a font size of 80 but a similar font of size 100 has already been generated, PixiJS will scale the existing * font to fit the new requirement, rather than creating a new font from scratch. * * For those who prefer to manage BitmapFonts manually, PixiJS provides the BitmapFont.install method. This method * allows for the pre-generation and preparation of fonts, making them readily available for use by specifying the * fontFamily in your text styling. * * This approach ensures efficient font management within PixiJS, balancing between dynamic generation for flexibility * and manual management for optimized performance. * @example * import { BitmapText, BitmapFont } from 'pixi.js'; * * // generate a dynamic font behind the scenes: * const text = new BitmapText({ * text: 'Hello Pixi!', * style: { * fontFamily: 'Arial', * fontSize: 24, * fill: 0xff1010, * align: 'center', * } * }); * * // pre install * BitmapFont.install({ * name: 'myFont', * style:{ * fontFamily: 'Arial', * } * }) * * // new bitmap text with preinstalled font * const text = new BitmapText({ * text: 'Hello Pixi!', * style: { * fontFamily: 'myFont', * fontSize: 24, * fill: 0xff1010, * align: 'center', * } * } * * // load a font from an xml file * const font = await Assets.load('path/to/myLoadedFont.fnt'); * * // new bitmap text with loaded font * const text = new BitmapText({ * text: 'Hello Pixi!', * style: { * fontFamily: 'myLoadedFont', // the name of the font in the fnt file * fontSize: 24, * fill: 0xff1010, * align: 'center', * } * } * @memberof scene */ export declare class BitmapText extends AbstractText implements View { readonly renderPipeId: string; /** * **Note:** Our docs parser struggles to properly understand the constructor signature. * This is the correct signature. * ```ts * new BitmapText(options?: TextOptions); * ``` * @param { text.TextOptions } options - The options of the bitmap text. */ constructor(options?: TextOptions); /** @deprecated since 8.0.0 */ constructor(text?: TextString, options?: Partial); protected _updateBounds(): void; } export declare class BitmapTextPipe implements RenderPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "bitmapText"; }; private _renderer; private _gpuBitmapText; private readonly _destroyRenderableBound; constructor(renderer: Renderer); validateRenderable(bitmapText: BitmapText): boolean; addRenderable(bitmapText: BitmapText, instructionSet: InstructionSet): void; destroyRenderable(bitmapText: BitmapText): void; private _destroyRenderableByUid; updateRenderable(bitmapText: BitmapText): void; private _updateContext; private _getGpuBitmapText; initGpuText(bitmapText: BitmapText): Graphics; private _updateDistanceField; destroy(): void; } declare global { namespace PixiMixins { interface RendererPipes { bitmapText: BitmapTextPipe; } } } /** * Constructor options used for `HTMLText` instances. * @property {string} [text=''] - The string that you would like the text to display. * @property {text.HTMLTextStyle | text.HTMLTextStyleOptions} [style] - The style of the text. * @memberof text */ export type HTMLTextOptions = TextOptions; /** * A HTMLText Object will create a line or multiple lines of text. * * To split a line you can use '\n' in your text string, or, on the `style` object, * change its `wordWrap` property to true and and give the `wordWrapWidth` property a value. * * HTMLText uses an svg foreignObject to render HTML text. * * * The primary advantages of this render mode are: * * - Supports [HTML tags](https://developer.mozilla.org/en-US/docs/Learn/HTML/Introduction_to_HTML/HTML_text_fundamentals) * for styling such as ``, or ``, as well as `` * * - Better support for emojis and other HTML layout features, better compatibility with CSS * line-height and letter-spacing. * * * The primary disadvantages are: * - Unlike `text`, `html` rendering will vary slightly between platforms and browsers. * `html` uses SVG/DOM to render text and not Context2D's fillText like `text`. * * - Performance and memory usage is on-par with `text` (that is to say, slow and heavy) * * - Only works with browsers that support . * @example * import { HTMLText } from 'pixi.js'; * * const text = new HTMLText({ * text: 'Hello Pixi!', * style: { * fontFamily: 'Arial', * fontSize: 24, * fill: 0xff1010, * align: 'center', * } * }); * @memberof scene */ export declare class HTMLText extends AbstractText implements View { readonly renderPipeId: string; /** * @param {text.HTMLTextOptions} options - The options of the html text. */ constructor(options?: HTMLTextOptions); /** @deprecated since 8.0.0 */ constructor(text?: TextString, options?: Partial); protected _updateBounds(): void; } export interface FontCSSStyleOptions { fontFamily: string | string[]; fontWeight: string; fontStyle: string; } /** * This will take a font url and a style and return a css string that can be injected into a style tag * This will contain inlined base64 font and the font family information * @param style - the style to generate the css for * @param url - The url to load the font from * @returns - The css string */ export declare function loadFontCSS(style: FontCSSStyleOptions, url: string): Promise; /** * System plugin to the renderer to manage HTMLText * @memberof rendering */ export declare class HTMLTextSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem, ExtensionType.CanvasSystem ]; readonly name: "htmlText"; }; static defaultFontOptions: FontCSSStyleOptions; private _activeTextures; /** * WebGPU has a cors issue when uploading an image that is an SVGImage * To get around this we need to create a canvas draw the image to it and upload that instead. * Bit of a shame.. but no other work around just yet! */ private readonly _createCanvas; private readonly _renderer; constructor(renderer: Renderer); getTexture(options: HTMLTextOptions): Promise; getManagedTexture(text: string, resolution: number, style: HTMLTextStyle, textKey: string): Promise; private _buildTexturePromise; private _increaseReferenceCount; decreaseReferenceCount(textKey: string): void; private _cleanUp; getReferenceCount(textKey: string): number; destroy(): void; } /** * A batchable sprite object. * @ignore */ export declare class BatchableSprite implements DefaultBatchableQuadElement { batcherName: string; readonly attributeSize = 4; readonly indexSize = 6; readonly packAsQuad = true; transform: Matrix; renderable: ViewContainer; texture: Texture; bounds: BoundsData; roundPixels: 0 | 1; _indexStart: number; _textureId: number; _attributeStart: number; _batcher: Batcher; _batch: Batch; get blendMode(): BLEND_MODES; get color(): number; reset(): void; } export declare class HTMLTextPipe implements RenderPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "htmlText"; }; private _renderer; private _gpuText; private readonly _destroyRenderableBound; constructor(renderer: Renderer); resolutionChange(): void; validateRenderable(htmlText: HTMLText): boolean; addRenderable(htmlText: HTMLText, instructionSet: InstructionSet): void; updateRenderable(htmlText: HTMLText): void; destroyRenderable(htmlText: HTMLText): void; private _destroyRenderableById; private _updateText; private _updateGpuText; private _getGpuText; initGpuText(htmlText: HTMLText): { textureNeedsUploading: boolean; generatingTexture: boolean; texture: Texture>; currentKey: string; batchableSprite: BatchableSprite; }; destroy(): void; } declare global { namespace PixiMixins { interface RendererSystems { htmlText: HTMLTextSystem; } interface RendererPipes { htmlText: HTMLTextPipe; } } } interface CanvasAndContext$1 { canvas: ICanvas; context: ICanvasRenderingContext2D; } /** * System plugin to the renderer to manage canvas text. * @memberof rendering */ export declare class CanvasTextSystem implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem, ExtensionType.CanvasSystem ]; readonly name: "canvasText"; }; private _activeTextures; private readonly _renderer; constructor(_renderer: Renderer); getTextureSize(text: string, resolution: number, style: TextStyle): { width: number; height: number; }; /** * This is a function that will create a texture from a text string, style and resolution. * Useful if you want to make a texture of your text and use if for various other pixi things! * @param options - The options of the text that will be used to generate the texture. * @param options.text - the text to render * @param options.style - the style of the text * @param options.resolution - the resolution of the texture * @returns the newly created texture */ /** @deprecated since 8.0.0 */ getTexture(text: string, resolution: number, style: TextStyle, textKey: string): Texture; getTexture(options: TextOptions): Texture; createTextureAndCanvas(options: { text: string; style: TextStyle; resolution?: number; }): { texture: Texture>; canvasAndContext: CanvasAndContext; }; getManagedTexture(text: Text$1): Texture>; private _increaseReferenceCount; decreaseReferenceCount(textKey: string): void; getReferenceCount(textKey: string): number; /** * Renders text to its canvas, and updates its texture. * * By default this is used internally to ensure the texture is correct before rendering, * but it can be used called externally, for example from this class to 'pre-generate' the texture from a piece of text, * and then shared across multiple Sprites. * @param text * @param style * @param resolution * @param canvasAndContext */ renderTextToCanvas(text: string, style: TextStyle, resolution: number, canvasAndContext: CanvasAndContext$1): void; /** * Render the text with letter-spacing. * @param text - The text to draw * @param style * @param canvasAndContext * @param x - Horizontal position to draw the text * @param y - Vertical position to draw the text * @param isStroke - Is this drawing for the outside stroke of the * text? If not, it's for the inside fill */ private _drawLetterSpacing; destroy(): void; } export declare class CanvasTextPipe implements RenderPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "text"; }; private _renderer; private _gpuText; private readonly _destroyRenderableBound; constructor(renderer: Renderer); resolutionChange(): void; validateRenderable(text: Text$1): boolean; addRenderable(text: Text$1, instructionSet: InstructionSet): void; updateRenderable(text: Text$1): void; destroyRenderable(text: Text$1): void; private _destroyRenderableById; private _updateText; private _updateGpuText; private _getGpuText; initGpuText(text: Text$1): { texture: Texture>; currentKey: string; batchableSprite: BatchableSprite; }; destroy(): void; } declare global { namespace PixiMixins { interface RendererSystems { canvasText: CanvasTextSystem; } interface RendererPipes { text: CanvasTextPipe; } } } export interface BlendModeFilterOptions { source?: string; gpu?: { functions?: string; main?: string; }; gl?: { functions?: string; main?: string; }; } export declare class BlendModeFilter extends Filter { constructor(options: BlendModeFilterOptions); } /** * Available as `container.blendMode = 'color'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'color' * @memberof filters */ export declare class ColorBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Looks at the color information in each channel and darkens the base color to * reflect the blend color by increasing the contrast between the two. * * Available as `container.blendMode = 'color-burn'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'color-burn' * @memberof filters */ export declare class ColorBurnBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Looks at the color information in each channel and brightens the base color to reflect the blend color by decreasing contrast between the two. * Available as `container.blendMode = 'color-dodge'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'color-dodge' * @memberof filters */ export declare class ColorDodgeBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Uses each color channel to select the darker of the following two values; base or blend color * Available as `container.blendMode = 'darken'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'darken' * @memberof filters */ export declare class DarkenBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Available as `container.blendMode = 'difference'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'difference' * @memberof filters */ export declare class DifferenceBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Divides the blend from the base color using each color channel * Available as `container.blendMode = 'divide'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'divide' * @memberof filters */ export declare class DivideBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Available as `container.blendMode = 'exclusion'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'exclusion' * @memberof filters */ export declare class ExclusionBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Available as `container.blendMode = 'hard-light'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'hard-light' * @memberof filters */ export declare class HardLightBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Hard defines each of the color channel values of the blend color to the RGB values of the base color. * If the sum of a channel is 255, it receives a value of 255; if less than 255, a value of 0. * * Available as `container.blendMode = 'hard-mix'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'hard-mix' * @memberof filters */ export declare class HardMixBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Available as `container.blendMode = 'lighten'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'lighten' * @memberof filters */ export declare class LightenBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Looks at the color information in each channel and darkens the base color to * reflect the blend color by increasing the contrast between the two. * * Available as `container.blendMode = 'linear-burn'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'linear-burn' * @memberof filters */ export declare class LinearBurnBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Looks at the color information in each channel and brightens the base color to reflect the blend color by decreasing contrast between the two. * Available as `container.blendMode = 'linear-dodge'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'linear-dodge' * @memberof filters */ export declare class LinearDodgeBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Increase or decrease brightness by burning or dodging color values, based on the blend color * Available as `container.blendMode = 'linear-light'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'linear-light' * @memberof filters */ export declare class LinearLightBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Available as `container.blendMode = 'luminosity'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'luminosity' * @memberof filters */ export declare class LuminosityBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Available as `container.blendMode = 'negation'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'negation' */ export declare class NegationBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Available as `container.blendMode = 'overlay'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'overlay' */ export declare class OverlayBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Replaces colors based on the blend color. * Available as `container.blendMode = 'pin-light'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'pin-light' * @memberof filters */ export declare class PinLightBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Available as `container.blendMode = 'saturation'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'saturation' */ export declare class SaturationBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Available as `container.blendMode = 'soft-light'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'soft-light' */ export declare class SoftLightBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Subtracts the blend from the base color using each color channel * Available as `container.blendMode = 'subtract'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'subtract' * @memberof filters */ export declare class SubtractBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * Available as `container.blendMode = 'vivid-light'` after importing `pixi.js/advanced-blend-modes`. * @example * import 'pixi.js/advanced-blend-modes'; * import { Sprite } from 'pixi.js'; * * const sprite = Sprite.from('something.png'); * sprite.blendMode = 'vivid-light' */ export declare class VividLightBlend extends BlendModeFilter { /** @ignore */ static extension: ExtensionMetadata; constructor(); } /** * For every asset that is cached, it will call the parsers test function * the flow is as follows: * * 1. `cacheParser.test()`: Test the asset. * 2. `cacheParser.getCacheableAssets()`: If the test passes call the getCacheableAssets function with the asset * * Useful if you want to add more than just a raw asset to the cache * (for example a spritesheet will want to make all its sub textures easily accessible in the cache) * @memberof assets */ export interface CacheParser { /** The extension type of this cache parser */ extension?: ExtensionMetadata; /** A config to adjust the parser */ config?: Record; /** * Gets called by the cache when a dev caches an asset * @param asset - the asset to test */ test: (asset: T) => boolean; /** * If the test passes, this function is called to get the cacheable assets * an example may be that a spritesheet object will return all the sub textures it has so they can * be cached. * @param keys - The keys to cache the assets under * @param asset - The asset to get the cacheable assets from * @returns A key-value pair of cacheable assets */ getCacheableAssets: (keys: string[], asset: T) => Record; } /** * Format detection is useful for detecting feature support on the current platform. * @memberof assets */ export interface FormatDetectionParser { /** Should be ExtensionType.DetectionParser */ extension?: ExtensionMetadata; /** Browser/platform feature detection supported if return true */ test: () => Promise; /** * Add formats (file extensions) to the existing list of formats. * Return an new array with added formats, do not mutate the formats argument. * @returns {Promise} - Promise that resolves to the new formats array. */ add: (formats: string[]) => Promise; /** * Remove formats (file extensions) from the list of supported formats. * This is used when uninstalling this DetectionParser. * Return an new array with filtered formats, do not mutate the formats argument. * @returns {Promise} - Promise that resolves to the new formats array. */ remove: (formats: string[]) => Promise; } export type ArrayOr = T | T[]; /** * Names of the parsers that are built into PixiJS. * Can be any of the following defaults: * - `loadJson` * - `loadSVG` * - `loadTextures` * - `loadTxt` * - `loadVideo` * - `loadWebFont` * or a custom parser name. * @memberof assets */ export type LoadParserName = "loadJson" | "loadSVG" | "loadTextures" | "loadTxt" | "loadVideo" | "loadWebFont" | string; /** * A fully resolved asset, with all the information needed to load it. * @memberof assets */ export interface ResolvedAsset { /** Aliases associated with asset */ alias?: string[]; /** The URL or relative path to the asset */ src?: string; /** Optional data */ data?: T; /** Format, usually the file extension */ format?: string; /** An override that will ensure that the asset is loaded with a specific parser */ loadParser?: LoadParserName; } /** * A fully resolved src, Glob patterns will not work here, and the src will be resolved to a single file. * @memberof assets * @property {string} src - The URL or relative path to the asset * @property {string} format - Format, usually the file extension * @property {string} loadParser - An override that will ensure that the asset is loaded with a specific parser * @property {any} data - Optional data */ export type ResolvedSrc = Pick; /** * A valid asset src. This can be a string, or a [ResolvedSrc]{@link assets.ResolvedSrc}, * or an array of either. * @memberof assets */ export type AssetSrc = ArrayOr | (ArrayOr & { [key: string]: any; }); /** * An asset that has not been resolved yet. * @memberof assets */ export type UnresolvedAsset = Pick, "data" | "format" | "loadParser"> & { /** Aliases associated with asset */ alias?: ArrayOr; /** The URL or relative path to the asset */ src?: AssetSrc; [key: string]: any; }; /** * Structure of a bundle found in a {@link assets.AssetsManifest Manifest} file * @memberof assets */ export interface AssetsBundle { /** The name of the bundle */ name: string; /** The assets in the bundle */ assets: UnresolvedAsset[] | Record | UnresolvedAsset>; } /** * The expected format of a manifest. This could be auto generated or hand made * @memberof assets */ export interface AssetsManifest { /** array of bundles */ bundles: AssetsBundle[]; } /** * A promise and parser pair * @memberof assets */ export interface PromiseAndParser { /** the promise that is loading the asset */ promise: Promise; /** the parser that is loading the asset */ parser: LoaderParser; } /** * The Loader is responsible for loading all assets, such as images, spritesheets, audio files, etc. * It does not do anything clever with URLs - it just loads stuff! * Behind the scenes all things are cached using promises. This means it's impossible to load an asset more than once. * Through the use of LoaderParsers, the loader can understand how to load any kind of file! * * It is not intended that this class is created by developers - its part of the Asset class * This is the second major system of PixiJS' main Assets class * @memberof assets */ export declare class Loader { private readonly _parsers; private _parserHash; private _parsersValidated; /** * All loader parsers registered * @type {assets.LoaderParser[]} */ parsers: LoaderParser>[]; /** Cache loading promises that ae currently active */ promiseCache: Record; /** function used for testing */ reset(): void; /** * Used internally to generate a promise for the asset to be loaded. * @param url - The URL to be loaded * @param data - any custom additional information relevant to the asset being loaded * @returns - a promise that will resolve to an Asset for example a Texture of a JSON object */ private _getLoadPromiseAndParser; /** * Loads one or more assets using the parsers added to the Loader. * @example * // Single asset: * const asset = await Loader.load('cool.png'); * console.log(asset); * * // Multiple assets: * const assets = await Loader.load(['cool.png', 'cooler.png']); * console.log(assets); * @param assetsToLoadIn - urls that you want to load, or a single one! * @param onProgress - For multiple asset loading only, an optional function that is called * when progress on asset loading is made. The function is passed a single parameter, `progress`, * which represents the percentage (0.0 - 1.0) of the assets loaded. Do not use this function * to detect when assets are complete and available, instead use the Promise returned by this function. */ load(assetsToLoadIn: string | ResolvedAsset, onProgress?: (progress: number) => void): Promise; load(assetsToLoadIn: string[] | ResolvedAsset[], onProgress?: (progress: number) => void): Promise>; /** * Unloads one or more assets. Any unloaded assets will be destroyed, freeing up memory for your app. * The parser that created the asset, will be the one that unloads it. * @example * // Single asset: * const asset = await Loader.load('cool.png'); * * await Loader.unload('cool.png'); * * console.log(asset.destroyed); // true * @param assetsToUnloadIn - urls that you want to unload, or a single one! */ unload(assetsToUnloadIn: string | string[] | ResolvedAsset | ResolvedAsset[]): Promise; /** validates our parsers, right now it only checks for name conflicts but we can add more here as required! */ private _validateParsers; } /** * The extension priority for loader parsers. * Helpful when managing multiple parsers that share the same extension test. * The higher priority parsers will be checked first. * @enum {number} */ export declare enum LoaderParserPriority { /** Generic parsers: txt, json, webfonts */ Low = 0, /** PixiJS assets with generic extensions: spritesheets, bitmapfonts */ Normal = 1, /** Specific texture types: svg, png, ktx, dds, basis */ High = 2 } /** A more verbose version of the LoaderParser, allowing you to set the loaded, parsed, and unloaded asset separately */ export interface LoaderParserAdvanced> { /** Should be ExtensionType.LoaderParser */ extension?: ExtensionMetadata; /** A config to adjust the parser */ config?: CONFIG; /** The name of the parser (this can be used when specifying loadParser in a ResolvedAsset) */ name: string; /** * Each URL to load will be tested here, * if the test is passed the assets are loaded using the load function below. * Good place to test for things like file extensions! * @param url - The URL to test * @param resolvedAsset - Any custom additional information relevant to the asset being loaded * @param loader - The loader instance */ test?: (url: string, resolvedAsset?: ResolvedAsset, loader?: Loader) => boolean; /** * This is the promise that loads the URL provided * resolves with a loaded asset if returned by the parser. * @param url - The URL to load * @param resolvedAsset - Any custom additional information relevant to the asset being loaded * @param loader - The loader instance */ load?: (url: string, resolvedAsset?: ResolvedAsset, loader?: Loader) => Promise; /** * This function is used to test if the parse function should be run on the asset * If this returns true then parse is called with the asset * @param asset - The loaded asset data * @param resolvedAsset - Any custom additional information relevant to the asset being loaded * @param loader - The loader instance */ testParse?: (asset: ASSET, resolvedAsset?: ResolvedAsset, loader?: Loader) => Promise; /** * Gets called on the asset it testParse passes. Useful to convert a raw asset into something more useful * @param asset - The loaded asset data * @param resolvedAsset - Any custom additional information relevant to the asset being loaded * @param loader - The loader instance */ parse?: (asset: ASSET, resolvedAsset?: ResolvedAsset, loader?: Loader) => Promise; /** * If an asset is parsed using this parser, the unload function will be called when the user requests an asset * to be unloaded. This is useful for things like sounds or textures that can be unloaded from memory * @param asset - The asset to unload/destroy * @param resolvedAsset - Any custom additional information relevant to the asset being loaded * @param loader - The loader instance */ unload?: (asset: UNLOAD_ASSET, resolvedAsset?: ResolvedAsset, loader?: Loader) => Promise | void; } /** * The interface to define a loader parser *(all functions are optional)*. * * When you create a `parser` object, the flow for every asset loaded is: * * 1. `parser.test()` - Each URL to load will be tested here, if the test is passed the assets are * loaded using the load function below. Good place to test for things like file extensions! * 2. `parser.load()` - This is the promise that loads the URL provided resolves with a loaded asset * if returned by the parser. * 3. `parser.testParse()` - This function is used to test if the parse function should be run on the * asset If this returns true then parse is called with the asset * 4. `parse.parse()` - Gets called on the asset it testParse passes. Useful to convert a raw asset * into something more useful * *
* Some loaders may only be used for parsing, some only for loading, and some for both! * @memberof assets */ export interface LoaderParser> extends LoaderParserAdvanced { } /** * A prefer order lets the resolver know which assets to prefer depending on the various parameters passed to it. * @memberof assets */ export interface PreferOrder { /** the importance order of the params */ priority?: string[]; params: { [key: string]: any; }; } /** * Format for url parser, will test a string and if it pass will then parse it, turning it into an ResolvedAsset * @memberof assets */ export interface ResolveURLParser { extension?: ExtensionMetadata; /** A config to adjust the parser */ config?: Record; /** the test to perform on the url to determine if it should be parsed */ test: (url: string) => boolean; /** the function that will convert the url into an object */ parse: (value: string) => ResolvedAsset & { [key: string]: any; }; } /** * A more verbose version of the AssetExtension, * allowing you to set the cached, loaded, parsed, and unloaded asset separately * @memberof assets */ export interface AssetExtensionAdvanced { /** The type of extension */ extension: ExtensionType.Asset; /** the asset loader */ loader?: LoaderParserAdvanced; /** the asset resolve parser */ resolver?: Partial; /** the asset cache parser */ cache?: Partial>; /** the asset format detection parser */ detection?: Partial; } /** * This developer convenience object allows developers to group * together the various asset parsers into a single object. * @example * import { AssetExtension, extensions } from 'pixi.js'; * * // create the CacheParser * const cache = { * test(asset: item): boolean { * // Gets called by the cache when a dev caches an asset * }, * getCacheableAssets(keys: string[], asset: item): Record { * // If the test passes, this function is called to get the cacheable assets * // an example may be that a spritesheet object will return all the sub textures it has so they can * // be cached. * }, * }; * * // create the ResolveURLParser * const resolver = { * test(value: string): boolean { * // the test to perform on the url to determine if it should be parsed * }, * parse(value: string): ResolvedAsset { * // the function that will convert the url into an object * }, * }; * * // create the LoaderParser * const loader = { * name: 'itemLoader', * extension: { * type: ExtensionType.LoadParser, * }, * async testParse(asset: any, options: ResolvedAsset) { * // This function is used to test if the parse function should be run on the asset * }, * async parse(asset: any, options: ResolvedAsset, loader: Loader) { * // Gets called on the asset it testParse passes. Useful to convert a raw asset into something more useful * }, * unload(item: any) { * // If an asset is parsed using this parser, the unload function will be called when the user requests an asset * // to be unloaded. This is useful for things like sounds or textures that can be unloaded from memory * }, * }; * * // put it all together and create the AssetExtension * extensions.add({ * extension: ExtensionType.Asset, * cache, * resolver, * loader, * } * @memberof assets */ export interface AssetExtension extends AssetExtensionAdvanced { } declare class CacheClass { private readonly _parsers; private readonly _cache; private readonly _cacheMap; /** Clear all entries. */ reset(): void; /** * Check if the key exists * @param key - The key to check */ has(key: any): boolean; /** * Fetch entry by key * @param key - The key of the entry to get */ get(key: any): T; /** * Set a value by key or keys name * @param key - The key or keys to set * @param value - The value to store in the cache or from which cacheable assets will be derived. */ set(key: any | any[], value: unknown): void; /** * Remove entry by key * * This function will also remove any associated alias from the cache also. * @param key - The key of the entry to remove */ remove(key: any): void; /** All loader parsers registered */ get parsers(): CacheParser[]; } declare const Cache$1: CacheClass; /** * Configuration for the [loadTextures]{@link assets.loadTextures} plugin. * @see assets.loadTextures * @memberof assets */ export interface LoadTextureConfig { /** * When set to `true`, loading and decoding images will happen with Worker thread, * if available on the browser. This is much more performant as network requests * and decoding can be expensive on the CPU. However, not all environments support * Workers, in some cases it can be helpful to disable by setting to `false`. * @default true */ preferWorkers: boolean; /** * When set to `true`, loading and decoding images will happen with `createImageBitmap`, * otherwise it will use `new Image()`. * @default true */ preferCreateImageBitmap: boolean; /** * The crossOrigin value to use for images when `preferCreateImageBitmap` is `false`. * @default 'anonymous' */ crossOrigin: HTMLImageElement["crossOrigin"]; } /** * Returns a promise that resolves an ImageBitmaps. * This function is designed to be used by a worker. * Part of WorkerManager! * @param url - The image to load an image bitmap for * @ignore */ export declare function loadImageBitmap(url: string, asset?: ResolvedAsset>): Promise; /** * A simple plugin to load our textures! * This makes use of imageBitmaps where available. * We load the `ImageBitmap` on a different thread using workers if possible. * We can then use the `ImageBitmap` as a source for a Pixi texture * * You can customize the behavior of this loader by setting the `config` property. * Which can be found [here]{@link assets.LoadTextureConfig} * ```js * // Set the config * import { loadTextures } from 'pixi.js'; * * loadTextures.config = { * // If true we will use a worker to load the ImageBitmap * preferWorkers: true, * // If false we will use new Image() instead of createImageBitmap, * // we'll also disable the use of workers as it requires createImageBitmap * preferCreateImageBitmap: true, * crossOrigin: 'anonymous', * }; * ``` * @memberof assets */ export declare const loadTextures: LoaderParser; /** * Options for how the resolver deals with generating bundle ids * @memberof assets */ export interface BundleIdentifierOptions { /** The character that is used to connect the bundleId and the assetId when generating a bundle asset id key */ connector?: string; /** * A function that generates a bundle asset id key from a bundleId and an assetId * @param bundleId - the bundleId * @param assetId - the assetId * @returns the bundle asset id key */ createBundleAssetId?: (bundleId: string, assetId: string) => string; /** * A function that generates an assetId from a bundle asset id key. This is the reverse of generateBundleAssetId * @param bundleId - the bundleId * @param assetBundleId - the bundle asset id key * @returns the assetId */ extractAssetIdFromBundle?: (bundleId: string, assetBundleId: string) => string; } /** * A class that is responsible for resolving mapping asset URLs to keys. * At its most basic it can be used for Aliases: * * ```js * resolver.add('foo', 'bar'); * resolver.resolveUrl('foo') // => 'bar' * ``` * * It can also be used to resolve the most appropriate asset for a given URL: * * ```js * resolver.prefer({ * params: { * format: 'webp', * resolution: 2, * } * }); * * resolver.add('foo', ['bar@2x.webp', 'bar@2x.png', 'bar.webp', 'bar.png']); * * resolver.resolveUrl('foo') // => 'bar@2x.webp' * ``` * Other features include: * - Ability to process a manifest file to get the correct understanding of how to resolve all assets * - Ability to add custom parsers for specific file types * - Ability to add custom prefer rules * * This class only cares about the URL, not the loading of the asset itself. * * It is not intended that this class is created by developers - its part of the Asset class * This is the third major system of PixiJS' main Assets class * @memberof assets */ export declare class Resolver { /** * The prefix that denotes a URL is for a retina asset. * @static * @name RETINA_PREFIX * @type {RegExp} * @default /@([0-9\.]+)x/ * @example `@2x` */ static RETINA_PREFIX: RegExp; private readonly _defaultBundleIdentifierOptions; /** The character that is used to connect the bundleId and the assetId when generating a bundle asset id key */ private _bundleIdConnector; /** * A function that generates a bundle asset id key from a bundleId and an assetId * @param bundleId - the bundleId * @param assetId - the assetId * @returns the bundle asset id key */ private _createBundleAssetId; /** * A function that generates an assetId from a bundle asset id key. This is the reverse of generateBundleAssetId * @param bundleId - the bundleId * @param assetBundleId - the bundle asset id key * @returns the assetId */ private _extractAssetIdFromBundle; private _assetMap; private _preferredOrder; private readonly _parsers; private _resolverHash; private _rootPath; private _basePath; private _manifest; private _bundles; private _defaultSearchParams; /** * Override how the resolver deals with generating bundle ids. * must be called before any bundles are added * @param bundleIdentifier - the bundle identifier options */ setBundleIdentifier(bundleIdentifier: BundleIdentifierOptions): void; /** * Let the resolver know which assets you prefer to use when resolving assets. * Multiple prefer user defined rules can be added. * @example * resolver.prefer({ * // first look for something with the correct format, and then then correct resolution * priority: ['format', 'resolution'], * params:{ * format:'webp', // prefer webp images * resolution: 2, // prefer a resolution of 2 * } * }) * resolver.add('foo', ['bar@2x.webp', 'bar@2x.png', 'bar.webp', 'bar.png']); * resolver.resolveUrl('foo') // => 'bar@2x.webp' * @param preferOrders - the prefer options */ prefer(...preferOrders: PreferOrder[]): void; /** * Set the base path to prepend to all urls when resolving * @example * resolver.basePath = 'https://home.com/'; * resolver.add('foo', 'bar.ong'); * resolver.resolveUrl('foo', 'bar.png'); // => 'https://home.com/bar.png' * @param basePath - the base path to use */ set basePath(basePath: string); get basePath(): string; /** * Set the root path for root-relative URLs. By default the `basePath`'s root is used. If no `basePath` is set, then the * default value for browsers is `window.location.origin` * @example * // Application hosted on https://home.com/some-path/index.html * resolver.basePath = 'https://home.com/some-path/'; * resolver.rootPath = 'https://home.com/'; * resolver.add('foo', '/bar.png'); * resolver.resolveUrl('foo', '/bar.png'); // => 'https://home.com/bar.png' * @param rootPath - the root path to use */ set rootPath(rootPath: string); get rootPath(): string; /** * All the active URL parsers that help the parser to extract information and create * an asset object-based on parsing the URL itself. * * Can be added using the extensions API * @example * resolver.add('foo', [ * { * resolution: 2, * format: 'png', * src: 'image@2x.png', * }, * { * resolution:1, * format:'png', * src: 'image.png', * }, * ]); * * // With a url parser the information such as resolution and file format could extracted from the url itself: * extensions.add({ * extension: ExtensionType.ResolveParser, * test: loadTextures.test, // test if url ends in an image * parse: (value: string) => * ({ * resolution: parseFloat(Resolver.RETINA_PREFIX.exec(value)?.[1] ?? '1'), * format: value.split('.').pop(), * src: value, * }), * }); * * // Now resolution and format can be extracted from the url * resolver.add('foo', [ * 'image@2x.png', * 'image.png', * ]); */ get parsers(): ResolveURLParser[]; /** Used for testing, this resets the resolver to its initial state */ reset(): void; /** * Sets the default URL search parameters for the URL resolver. The urls can be specified as a string or an object. * @param searchParams - the default url parameters to append when resolving urls */ setDefaultSearchParams(searchParams: string | Record): void; /** * Returns the aliases for a given asset * @param asset - the asset to get the aliases for */ getAlias(asset: UnresolvedAsset): string[]; /** * Add a manifest to the asset resolver. This is a nice way to add all the asset information in one go. * generally a manifest would be built using a tool. * @param manifest - the manifest to add to the resolver */ addManifest(manifest: AssetsManifest): void; /** * This adds a bundle of assets in one go so that you can resolve them as a group. * For example you could add a bundle for each screen in you pixi app * @example * resolver.addBundle('animals', [ * { alias: 'bunny', src: 'bunny.png' }, * { alias: 'chicken', src: 'chicken.png' }, * { alias: 'thumper', src: 'thumper.png' }, * ]); * // or * resolver.addBundle('animals', { * bunny: 'bunny.png', * chicken: 'chicken.png', * thumper: 'thumper.png', * }); * * const resolvedAssets = await resolver.resolveBundle('animals'); * @param bundleId - The id of the bundle to add * @param assets - A record of the asset or assets that will be chosen from when loading via the specified key */ addBundle(bundleId: string, assets: AssetsBundle["assets"]): void; /** * Tells the resolver what keys are associated with witch asset. * The most important thing the resolver does * @example * // Single key, single asset: * resolver.add({alias: 'foo', src: 'bar.png'); * resolver.resolveUrl('foo') // => 'bar.png' * * // Multiple keys, single asset: * resolver.add({alias: ['foo', 'boo'], src: 'bar.png'}); * resolver.resolveUrl('foo') // => 'bar.png' * resolver.resolveUrl('boo') // => 'bar.png' * * // Multiple keys, multiple assets: * resolver.add({alias: ['foo', 'boo'], src: ['bar.png', 'bar.webp']}); * resolver.resolveUrl('foo') // => 'bar.png' * * // Add custom data attached to the resolver * Resolver.add({ * alias: 'bunnyBooBooSmooth', * src: 'bunny{png,webp}', * data: { scaleMode:SCALE_MODES.NEAREST }, // Base texture options * }); * * resolver.resolve('bunnyBooBooSmooth') // => { src: 'bunny.png', data: { scaleMode: SCALE_MODES.NEAREST } } * @param aliases - the UnresolvedAsset or array of UnresolvedAssets to add to the resolver */ add(aliases: ArrayOr): void; /** * If the resolver has had a manifest set via setManifest, this will return the assets urls for * a given bundleId or bundleIds. * @example * // Manifest Example * const manifest = { * bundles: [ * { * name: 'load-screen', * assets: [ * { * alias: 'background', * src: 'sunset.png', * }, * { * alias: 'bar', * src: 'load-bar.{png,webp}', * }, * ], * }, * { * name: 'game-screen', * assets: [ * { * alias: 'character', * src: 'robot.png', * }, * { * alias: 'enemy', * src: 'bad-guy.png', * }, * ], * }, * ] * }; * * resolver.setManifest(manifest); * const resolved = resolver.resolveBundle('load-screen'); * @param bundleIds - The bundle ids to resolve * @returns All the bundles assets or a hash of assets for each bundle specified */ resolveBundle(bundleIds: ArrayOr): Record | Record>; /** * Does exactly what resolve does, but returns just the URL rather than the whole asset object * @param key - The key or keys to resolve * @returns - The URLs associated with the key(s) */ resolveUrl(key: ArrayOr): string | Record; /** * Resolves each key in the list to an asset object. * Another key function of the resolver! After adding all the various key/asset pairs. this will run the logic * of finding which asset to return based on any preferences set using the `prefer` function * by default the same key passed in will be returned if nothing is matched by the resolver. * @example * resolver.add('boo', 'bunny.png'); * * resolver.resolve('boo') // => { src: 'bunny.png' } * * // Will return the same string as no key was added for this value.. * resolver.resolve('another-thing.png') // => { src: 'another-thing.png' } * @param keys - key or keys to resolve * @returns - the resolve asset or a hash of resolve assets for each key specified */ resolve(keys: string): ResolvedAsset; resolve(keys: string[]): Record; /** * Checks if an asset with a given key exists in the resolver * @param key - The key of the asset */ hasKey(key: string): boolean; /** * Checks if a bundle with the given key exists in the resolver * @param key - The key of the bundle */ hasBundle(key: string): boolean; /** * Internal function for figuring out what prefer criteria an asset should use. * @param assets */ private _getPreferredOrder; /** * Appends the default url parameters to the url * @param url - The url to append the default parameters to * @returns - The url with the default parameters appended */ private _appendDefaultSearchParams; private _buildResolvedAsset; } export declare function getUrlExtension(url: string): string; /** * Configuration for the [loadSVG]{@link assets.loadSVG} plugin. * @see assets.loadSVG * @memberof assets */ export interface LoadSVGConfig { /** * The crossOrigin value to use for loading the SVG as an image. * @default 'anonymous' */ crossOrigin: HTMLImageElement["crossOrigin"]; /** * When set to `true`, loading and decoding images will happen with `new Image()`, * @default false */ parseAsGraphicsContext: boolean; } /** * A simple loader plugin for loading json data * @memberof assets */ export declare const loadSvg: LoaderParser; /** * Callback for when progress on asset loading is made. * The function is passed a single parameter, `progress`, which represents the percentage (0.0 - 1.0) * of the assets loaded. * @memberof assets * @callback ProgressCallback * @param {number} progress - The percentage (0.0 - 1.0) of the assets loaded. * @returns {void} * @example * (progress) => console.log(progress * 100 + '%') */ export type ProgressCallback = (progress: number) => void; /** * Extensible preferences that can be used, for instance, when configuring loaders. * @since 7.2.0 * @memberof assets */ export interface AssetsPreferences extends LoadTextureConfig, LoadSVGConfig, PixiMixins.AssetsPreferences { } /** * Initialization options object for the Assets Class. * @memberof assets */ export interface AssetInitOptions { /** a base path for any assets loaded */ basePath?: string; /** a default URL parameter string to append to all assets loaded */ defaultSearchParams?: string | Record; /** * a manifest to tell the asset loader upfront what all your assets are * this can be the manifest object itself, or a URL to the manifest. */ manifest?: string | AssetsManifest; /** * optional preferences for which textures preferences you have when resolving assets * for example you might set the resolution to 0.5 if the user is on a rubbish old phone * or you might set the resolution to 2 if the user is on a retina display */ texturePreference?: { /** the resolution order you prefer, can be an array (priority order - first is preferred) or a single resolutions */ resolution?: number | number[]; /** * the formats you prefer, by default this will be: * ['avif', 'webp', 'png', 'jpg', 'jpeg', 'webm', 'mp4', 'm4v', 'ogv'] */ format?: ArrayOr; }; /** * If true, don't attempt to detect whether browser has preferred formats available. * May result in increased performance as it skips detection step. */ skipDetections?: boolean; /** advanced - override how bundlesIds are generated */ bundleIdentifier?: BundleIdentifierOptions; /** Optional loader preferences */ preferences?: Partial; } /** * A one stop shop for all Pixi resource management! * Super modern and easy to use, with enough flexibility to customize and do what you need! * @namespace assets * * Use the singleton class [Assets]{@link assets.Assets} to easily load and manage all your assets. * * ```typescript * import { Assets, Texture } from 'pixi.js'; * * const bunnyTexture = await Assets.load('bunny.png'); * const sprite = new Sprite(bunnyTexture); * ``` * * Check out the sections below for more information on how to deal with assets. * *

* * Asset Loading * * Do not be afraid to load things multiple times - under the hood, it will **NEVER** load anything more than once. * * *For example:* * * ```js * import { Assets } from 'pixi.js'; * * promise1 = Assets.load('bunny.png') * promise2 = Assets.load('bunny.png') * * // promise1 === promise2 * ``` * * Here both promises will be the same. Once resolved... Forever resolved! It makes for really easy resource management! * * Out of the box Pixi supports the following files: * - Textures (**_avif_**, **_webp_**, **_png_**, **_jpg_**, **_gif_**, **_svg_**) via {@link assets.loadTextures}, {@link assets.loadSvg} * - Video Textures (**_mp4_**, **_m4v_**, **_webm_**, **_ogg_**, **_ogv_**, **_h264_**, **_avi_**, **_mov_**) via {@link assets.loadVideoTextures} * - Sprite sheets (**_json_**) via {@link assets.spritesheetAsset} * - Bitmap fonts (**_xml_**, **_fnt_**, **_txt_**) via {@link assets.loadBitmapFont} * - Web fonts (**_ttf_**, **_woff_**, **_woff2_**) via {@link assets.loadWebFont} * - JSON files (**_json_**) via {@link assets.loadJson} * - Text Files (**_txt_**) via {@link assets.loadTxt} *
* More types can be added fairly easily by creating additional {@link assets.LoaderParser LoaderParsers}. *
* *
* * Textures * * - Textures are loaded as ImageBitmap on a worker thread where possible. Leading to much less janky load + parse times. * - By default, we will prefer to load AVIF and WebP image files if you specify them. * But if the browser doesn't support AVIF or WebP we will fall back to png and jpg. * - Textures can also be accessed via `Texture.from()` (see {@link core.from|Texture.from}) * and now use this asset manager under the hood! * - Don't worry if you set preferences for textures that don't exist * (for example you prefer 2x resolutions images but only 1x is available for that texture, * the Assets manager will pick that up as a fallback automatically) * * #### Sprite sheets * - It's hard to know what resolution a sprite sheet is without loading it first, to address this * there is a naming convention we have added that will let Pixi understand the image format and resolution * of the spritesheet via its file name: `my-spritesheet{resolution}.{imageFormat}.json` *

For example: * - `my-spritesheet@2x.webp.json`* // 2x resolution, WebP sprite sheet* * - `my-spritesheet@0.5x.png.json`* // 0.5x resolution, png sprite sheet* * - This is optional! You can just load a sprite sheet as normal. * This is only useful if you have a bunch of different res / formatted spritesheets. *
* *
* * Fonts * * Web fonts will be loaded with all weights. * It is possible to load only specific weights by doing the following: * * ```js * import { Assets } from 'pixi.js'; * * // Load specific weights.. * await Assets.load({ * data: { * weights: ['normal'], // Only loads the weight * }, * src: `outfit.woff2`, * }); * * // Load everything... * await Assets.load(`outfit.woff2`); * ``` *
* *
* * Background Loading * * Background loading will load stuff for you passively behind the scenes. To minimize jank, * it will only load one asset at a time. As soon as a developer calls `Assets.load(...)` the * background loader is paused and requested assets are loaded as a priority. * Don't worry if something is in there that's already loaded, it will just get skipped! * * You still need to call `Assets.load(...)` to get an asset that has been loaded in the background. * It's just that this promise will resolve instantly if the asset * has already been loaded. *
* *
* * Manifest and Bundles * * - {@link assets.AssetsManifest Manifest} is a descriptor that contains a list of all assets and their properties. * - {@link assets.AssetsBundle Bundles} are a way to group assets together. * * ```js * import { Assets } from 'pixi.js'; * * // Manifest Example * const manifest = { * bundles: [ * { * name: 'load-screen', * assets: [ * { * alias: 'background', * src: 'sunset.png', * }, * { * alias: 'bar', * src: 'load-bar.{png,webp}', * }, * ], * }, * { * name: 'game-screen', * assets: [ * { * alias: 'character', * src: 'robot.png', * }, * { * alias: 'enemy', * src: 'bad-guy.png', * }, * ], * }, * ] * }; * * await Assets.init({ manifest }); * * // Load a bundle... * loadScreenAssets = await Assets.loadBundle('load-screen'); * // Load another bundle... * gameScreenAssets = await Assets.loadBundle('game-screen'); * ``` *
*/ /** * The global Assets class, it's a singleton so you don't need to instantiate it. * * **The `Assets` class has four main responsibilities:** * 1. Allows users to map URLs to keys and resolve them according to the user's browser capabilities * 2. Loads the resources and transforms them into assets that developers understand. * 3. Caches the assets and provides a way to access them. * 4. Allow developers to unload assets and clear the cache. * * * **It also has a few advanced features:** * 1. Allows developers to provide a {@link assets.Manifest} upfront of all assets and help manage * them via {@link assets.AssetsBundles Bundles}. * 2. Allows users to background load assets. Shortening (or eliminating) load times and improving UX. With this feature, * in-game loading bars can be a thing of the past! * @example * import { Assets } from 'pixi.js'; * * const bunny = await Assets.load('bunny.png'); * @memberof assets * @class Assets */ export declare class AssetsClass { /** the resolver to map various urls */ resolver: Resolver; /** The loader, loads stuff! */ loader: Loader; /** * The global cache of all assets within PixiJS * @type {assets.Cache} */ cache: typeof Cache$1; /** takes care of loading assets in the background */ private readonly _backgroundLoader; private readonly _detections; private _initialized; constructor(); /** * Best practice is to call this function before any loading commences * Initiating is the best time to add any customization to the way things are loaded. * * you do not need to call this for the Assets class to work, only if you want to set any initial properties * @param options - options to initialize the Assets manager with */ init(options?: AssetInitOptions): Promise; /** * Allows you to specify how to resolve any assets load requests. * There are a few ways to add things here as shown below: * @example * import { Assets } from 'pixi.js'; * * // Simple * Assets.add({alias: 'bunnyBooBoo', src: 'bunny.png'}); * const bunny = await Assets.load('bunnyBooBoo'); * * // Multiple keys: * Assets.add({alias: ['burger', 'chicken'], src: 'bunny.png'}); * * const bunny = await Assets.load('burger'); * const bunny2 = await Assets.load('chicken'); * * // passing options to to the object * Assets.add({ * alias: 'bunnyBooBooSmooth', * src: 'bunny{png,webp}', * data: { scaleMode: SCALE_MODES.NEAREST }, // Base texture options * }); * * // Multiple assets * * // The following all do the same thing: * * Assets.add({alias: 'bunnyBooBoo', src: 'bunny{png,webp}'}); * * Assets.add({ * alias: 'bunnyBooBoo', * src: [ * 'bunny.png', * 'bunny.webp', * ], * }); * * const bunny = await Assets.load('bunnyBooBoo'); // Will try to load WebP if available * @param assets - the unresolved assets to add to the resolver */ add(assets: (ArrayOr)): void; /** * Loads your assets! You pass in a key or URL and it will return a promise that * resolves to the loaded asset. If multiple assets a requested, it will return a hash of assets. * * Don't worry about loading things multiple times, behind the scenes assets are only ever loaded * once and the same promise reused behind the scenes so you can safely call this function multiple * times with the same key and it will always return the same asset. * @example * import { Assets } from 'pixi.js'; * * // Load a URL: * const myImageTexture = await Assets.load('http://some.url.com/image.png'); // => returns a texture * * Assets.add('thumper', 'bunny.png'); * Assets.add('chicko', 'chicken.png'); * * // Load multiple assets: * const textures = await Assets.load(['thumper', 'chicko']); // => {thumper: Texture, chicko: Texture} * @param urls - the urls to load * @param onProgress - optional function that is called when progress on asset loading is made. * The function is passed a single parameter, `progress`, which represents the percentage * (0.0 - 1.0) of the assets loaded. * @returns - the assets that were loaded, either a single asset or a hash of assets */ load(urls: string | UnresolvedAsset, onProgress?: ProgressCallback): Promise; load(urls: string[] | UnresolvedAsset[], onProgress?: ProgressCallback): Promise>; /** * This adds a bundle of assets in one go so that you can load them as a group. * For example you could add a bundle for each screen in you pixi app * @example * import { Assets } from 'pixi.js'; * * Assets.addBundle('animals', [ * { alias: 'bunny', src: 'bunny.png' }, * { alias: 'chicken', src: 'chicken.png' }, * { alias: 'thumper', src: 'thumper.png' }, * ]); * // or * Assets.addBundle('animals', { * bunny: 'bunny.png', * chicken: 'chicken.png', * thumper: 'thumper.png', * }); * * const assets = await Assets.loadBundle('animals'); * @param bundleId - the id of the bundle to add * @param assets - a record of the asset or assets that will be chosen from when loading via the specified key */ addBundle(bundleId: string, assets: AssetsBundle["assets"]): void; /** * Bundles are a way to load multiple assets at once. * If a manifest has been provided to the init function then you can load a bundle, or bundles. * you can also add bundles via `addBundle` * @example * import { Assets } from 'pixi.js'; * * // Manifest Example * const manifest = { * bundles: [ * { * name: 'load-screen', * assets: [ * { * alias: 'background', * src: 'sunset.png', * }, * { * alias: 'bar', * src: 'load-bar.{png,webp}', * }, * ], * }, * { * name: 'game-screen', * assets: [ * { * alias: 'character', * src: 'robot.png', * }, * { * alias: 'enemy', * src: 'bad-guy.png', * }, * ], * }, * ] * }; * * await Assets.init({ manifest }); * * // Load a bundle... * loadScreenAssets = await Assets.loadBundle('load-screen'); * // Load another bundle... * gameScreenAssets = await Assets.loadBundle('game-screen'); * @param bundleIds - the bundle id or ids to load * @param onProgress - Optional function that is called when progress on asset loading is made. * The function is passed a single parameter, `progress`, which represents the percentage (0.0 - 1.0) * of the assets loaded. Do not use this function to detect when assets are complete and available, * instead use the Promise returned by this function. * @returns all the bundles assets or a hash of assets for each bundle specified */ loadBundle(bundleIds: ArrayOr, onProgress?: ProgressCallback): Promise; /** * Initiate a background load of some assets. It will passively begin to load these assets in the background. * So when you actually come to loading them you will get a promise that resolves to the loaded assets immediately * * An example of this might be that you would background load game assets after your initial load. * then when you got to actually load your game screen assets when a player goes to the game - the loading * would already have stared or may even be complete, saving you having to show an interim load bar. * @example * import { Assets } from 'pixi.js'; * * Assets.backgroundLoad('bunny.png'); * * // later on in your app... * await Assets.loadBundle('bunny.png'); // Will resolve quicker as loading may have completed! * @param urls - the url / urls you want to background load */ backgroundLoad(urls: ArrayOr): Promise; /** * Initiate a background of a bundle, works exactly like backgroundLoad but for bundles. * this can only be used if the loader has been initiated with a manifest * @example * import { Assets } from 'pixi.js'; * * await Assets.init({ * manifest: { * bundles: [ * { * name: 'load-screen', * assets: [...], * }, * ... * ], * }, * }); * * Assets.backgroundLoadBundle('load-screen'); * * // Later on in your app... * await Assets.loadBundle('load-screen'); // Will resolve quicker as loading may have completed! * @param bundleIds - the bundleId / bundleIds you want to background load */ backgroundLoadBundle(bundleIds: ArrayOr): Promise; /** * Only intended for development purposes. * This will wipe the resolver and caches. * You will need to reinitialize the Asset */ reset(): void; /** * Instantly gets an asset already loaded from the cache. If the asset has not yet been loaded, * it will return undefined. So it's on you! When in doubt just use `Assets.load` instead. * (Remember, the loader will never load things more than once!) * @param keys - The key or keys for the assets that you want to access * @returns - The assets or hash of assets requested */ get(keys: string): T; get(keys: string[]): Record; /** * helper function to map resolved assets back to loaded assets * @param resolveResults - the resolve results from the resolver * @param onProgress - the progress callback */ private _mapLoadToResolve; /** * Unload an asset or assets. As the Assets class is responsible for creating the assets via the `load` function * this will make sure to destroy any assets and release them from memory. * Once unloaded, you will need to load the asset again. * * Use this to help manage assets if you find that you have a large app and you want to free up memory. * * - it's up to you as the developer to make sure that textures are not actively being used when you unload them, * Pixi won't break but you will end up with missing assets. Not a good look for the user! * @example * import { Assets } from 'pixi.js'; * * // Load a URL: * const myImageTexture = await Assets.load('http://some.url.com/image.png'); // => returns a texture * * await Assets.unload('http://some.url.com/image.png') * * // myImageTexture will be destroyed now. * * // Unload multiple assets: * const textures = await Assets.unload(['thumper', 'chicko']); * @param urls - the urls to unload */ unload(urls: ArrayOr | ResolvedAsset | ResolvedAsset[]): Promise; /** * Bundles are a way to manage multiple assets at once. * this will unload all files in a bundle. * * once a bundle has been unloaded, you need to load it again to have access to the assets. * @example * import { Assets } from 'pixi.js'; * * Assets.addBundle({ * 'thumper': 'http://some.url.com/thumper.png', * }) * * const assets = await Assets.loadBundle('thumper'); * * // Now to unload... * * await Assets.unloadBundle('thumper'); * * // All assets in the assets object will now have been destroyed and purged from the cache * @param bundleIds - the bundle id or ids to unload */ unloadBundle(bundleIds: ArrayOr): Promise; private _unloadFromResolved; /** * Detects the supported formats for the browser, and returns an array of supported formats, respecting * the users preferred formats order. * @param options - the options to use when detecting formats * @param options.preferredFormats - the preferred formats to use * @param options.skipDetections - if we should skip the detections altogether * @param options.detections - the detections to use * @returns - the detected formats */ private _detectFormats; /** All the detection parsers currently added to the Assets class. */ get detections(): FormatDetectionParser[]; /** * General setter for preferences. This is a helper function to set preferences on all parsers. * @param preferences - the preferences to set */ setPreferences(preferences: Partial): void; } export declare const Assets: AssetsClass; /** * Quietly Loads assets in the background. * @memberof assets */ export declare class BackgroundLoader { /** Whether or not the loader should continue loading. */ private _isActive; /** Assets to load. */ private readonly _assetList; /** Whether or not the loader is loading. */ private _isLoading; /** Number of assets to load at a time. */ private readonly _maxConcurrent; /** Should the loader log to the console. */ verbose: boolean; private readonly _loader; /** * @param loader * @param verbose - should the loader log to the console */ constructor(loader: Loader, verbose?: boolean); /** * Adds an array of assets to load. * @param assetUrls - assets to load */ add(assetUrls: ResolvedAsset[]): void; /** * Loads the next set of assets. Will try to load as many assets as it can at the same time. * * The max assets it will try to load at one time will be 4. */ private _next; /** * Activate/Deactivate the loading. If set to true then it will immediately continue to load the next asset. * @returns whether the class is active */ get active(): boolean; set active(value: boolean); } /** * Returns an object of textures from an array of textures to be cached * @memberof assets */ export declare const cacheTextureArray: CacheParser; /** * Detects if the browser supports the AVIF image format. * @memberof assets */ export declare const detectAvif: FormatDetectionParser; /** * Adds some default image formats to the detection parser * @memberof assets */ export declare const detectDefaults: FormatDetectionParser; /** * Detects if the browser supports the MP4 video format. * @memberof assets */ export declare const detectMp4: FormatDetectionParser; /** * Detects if the browser supports the OGV video format. * @memberof assets */ export declare const detectOgv: FormatDetectionParser; /** * Detects if the browser supports the WebM video format. * @memberof assets */ export declare const detectWebm: FormatDetectionParser; /** * Detects if the browser supports the WebP image format. * @memberof assets */ export declare const detectWebp: FormatDetectionParser; export declare function testImageFormat(imageData: string): Promise; export declare function testVideoFormat(mimeType: string): boolean; /** * A simple loader plugin for loading json data * @memberof assets */ export declare const loadJson: { extension: { type: ExtensionType.LoadParser; priority: LoaderParserPriority; }; name: string; test(url: string): boolean; load(url: string): Promise; }; /** * A simple loader plugin for loading text data * @memberof assets */ export declare const loadTxt: { name: string; extension: { type: ExtensionType.LoadParser; priority: LoaderParserPriority; name: string; }; test(url: string): boolean; load(url: string): Promise; }; /** * Data for loading a font * @memberof assets */ export type LoadFontData = { /** Font family name */ family: string; /** A set of optional descriptors passed as an object. It can contain any of the descriptors available for @font-face: */ display: string; /** * The featureSettings property of the FontFace interface retrieves or sets infrequently used * font features that are not available from a font's variant properties. */ featureSettings: string; /** The stretch property of the FontFace interface retrieves or sets how the font stretches. */ stretch: string; /** The style property of the FontFace interface retrieves or sets the font's style. */ style: string; /** * The unicodeRange property of the FontFace interface retrieves or sets the range of * unicode code points encompassing the font. */ unicodeRange: string; /** The variant property of the FontFace interface programmatically retrieves or sets font variant values. */ variant: string; /** The weight property of the FontFace interface retrieves or sets the weight of the font. */ weights: string[]; }; /** * Return font face name from a file name * Ex.: 'fonts/titan-one.woff' turns into 'Titan One' * @param url - File url * @memberof assets */ export declare function getFontFamilyName(url: string): string; /** * A loader plugin for handling web fonts * @example * import { Assets } from 'pixi.js'; * * Assets.load({ * alias: 'font', * src: 'fonts/titan-one.woff', * data: { * family: 'Titan One', * weights: ['normal', 'bold'], * } * }) * @memberof assets */ export declare const loadWebFont: { extension: { type: ExtensionType.LoadParser; priority: LoaderParserPriority; }; name: string; test(url: string): boolean; load(url: string, options?: ResolvedAsset): Promise; unload(font: FontFace | FontFace[]): void; }; /** * Set cross origin based detecting the url and the crossorigin * @param element - Element to apply crossOrigin * @param url - URL to check * @param crossorigin - Cross origin value to use * @memberof assets */ export declare function crossOrigin(element: HTMLImageElement | HTMLVideoElement, url: string, crossorigin?: boolean | string): void; /** * Preload a video element * @param element - Video element to preload */ export declare function preloadVideo(element: HTMLVideoElement): Promise; /** * Sets the `crossOrigin` property for this resource based on if the url * for this resource is cross-origin. If crossOrigin was manually set, this * function does nothing. * Nipped from the resource loader! * @ignore * @param url - The url to test. * @param {object} [loc=window.location] - The location object to test against. * @returns The crossOrigin value to use (or empty string for none). * @memberof assets */ export declare function determineCrossOrigin(url: string, loc?: Location): string; /** * A simple plugin to load video textures. * * You can pass VideoSource options to the loader via the .data property of the asset descriptor * when using Asset.load(). * ```js * // Set the data * const texture = await Assets.load({ * src: './assets/city.mp4', * data: { * preload: true, * autoPlay: true, * }, * }); * ``` * @memberof assets */ export declare const loadVideoTextures: { name: string; extension: { type: ExtensionType.LoadParser; name: string; }; test(url: string): boolean; load(url: string, asset: ResolvedAsset, loader: Loader): Promise; unload(texture: Texture): void; }; /** * Creates a texture from a source and adds it to the cache. * @param source - source of the texture * @param loader - loader * @param url - url of the texture * @ignore */ export declare function createTexture(source: TextureSource, loader: Loader, url: string): Texture>; declare class WorkerManagerClass { worker: Worker; private _resolveHash; private readonly _workerPool; private readonly _queue; private _initialized; private _createdWorkers; private _isImageBitmapSupported?; constructor(); isImageBitmapSupported(): Promise; loadImageBitmap(src: string, asset?: ResolvedAsset>): Promise; private _initWorkers; private _getWorker; private _returnWorker; private _complete; private _run; private _next; } export declare const WorkerManager: WorkerManagerClass; /** * A parser that will resolve a json urls resolution for spritesheets * e.g. `assets/spritesheet@1x.json` * @memberof assets */ export declare const resolveJsonUrl: { extension: { type: ExtensionType.ResolveParser; priority: number; name: string; }; test: (value: string) => boolean; parse: (value: string) => { resolution: number; format: string; src: string; }; }; /** * A parser that will resolve a texture url * @memberof assets */ export declare const resolveTextureUrl: { extension: { type: ExtensionType.ResolveParser; name: string; }; test: (url: string, resolvedAsset?: ResolvedAsset>, loader?: Loader) => boolean; parse: (value: string) => { resolution: number; format: string; src: string; }; }; export declare function checkDataUrl(url: string, mimes: string | string[]): boolean; export declare function checkExtension(url: string, extension: string | string[]): boolean; export declare const convertToList: (input: string | T | (string | T)[], transform?: (input: string) => T, forceTransform?: boolean) => T[]; /** * Copies the search params from one url to another * @param targetUrl - the url to copy the search params to * @param sourceUrl - the url container the search params we want to copy * @returns the url with the search params copied */ export declare const copySearchParams: (targetUrl: string, sourceUrl: string) => string; /** * Creates a list of all possible combinations of the given strings. * @example * const out2 = createStringVariations('name is {chicken,wolf,sheep}'); * console.log(out2); // [ 'name is chicken', 'name is wolf', 'name is sheep' ] * @param string - The string to process */ export declare function createStringVariations(string: string): string[]; /** * Checks if the given value is an array. * @param item - The item to test */ export declare const isSingleItem: (item: unknown) => boolean; export declare const detectBasis: FormatDetectionParser; /** Loads KTX textures! */ export declare const loadBasis: { extension: { type: ExtensionType.LoadParser; priority: LoaderParserPriority; name: string; }; name: string; test(url: string): boolean; load(url: string, _asset: ResolvedAsset, loader: Loader): Promise; unload(texture: Texture | Texture[]): void; }; export type BASISModuleCreator = (config: { locateFile: (file: string) => string; }) => { then: (result: (libktx: BASISModule) => void) => void; }; export type BasisTextureConstructor = new (data: Uint8Array) => BasisTexture; export interface BASISModule { initializeBasis(): void; BasisFile: BasisTextureConstructor; } export interface BasisTexture { getNumImages(): number; getNumLevels(imageIndex: number): number; startTranscoding(): boolean; getImageWidth(imageIndex: number, levelIndex: number): number; getImageHeight(imageIndex: number, levelIndex: number): number; getImageTranscodedSizeInBytes(imageIndex: number, levelIndex: number, format: number): number; transcodeImage(buffer: Uint8Array, imageIndex: number, levelIndex: number, format: number, unused: number, getAlphaForOpaqueFormats: number): boolean; } export declare function createLevelBuffers(basisTexture: BasisTexture, basisTranscoderFormat: number): Uint8Array[]; export declare function gpuFormatToBasisTranscoderFormat(transcoderFormat: string): number; export declare const basisTranscoderUrls: { jsUrl: string; wasmUrl: string; }; export declare function setBasisTranscoderPath(config: Partial): void; export declare function loadBasisOnWorker(url: string, supportedTextures: TEXTURE_FORMATS[]): Promise; /** * @see https://docs.microsoft.com/en-us/windows/win32/api/dxgiformat/ne-dxgiformat-dxgi_format * This is way over-blown for us! Lend us a hand, and remove the ones that aren't used (but set the remaining * ones to their correct value) * @ignore */ export declare enum DXGI_FORMAT { DXGI_FORMAT_UNKNOWN = 0, DXGI_FORMAT_R32G32B32A32_TYPELESS = 1, DXGI_FORMAT_R32G32B32A32_FLOAT = 2, DXGI_FORMAT_R32G32B32A32_UINT = 3, DXGI_FORMAT_R32G32B32A32_SINT = 4, DXGI_FORMAT_R32G32B32_TYPELESS = 5, DXGI_FORMAT_R32G32B32_FLOAT = 6, DXGI_FORMAT_R32G32B32_UINT = 7, DXGI_FORMAT_R32G32B32_SINT = 8, DXGI_FORMAT_R16G16B16A16_TYPELESS = 9, DXGI_FORMAT_R16G16B16A16_FLOAT = 10, DXGI_FORMAT_R16G16B16A16_UNORM = 11, DXGI_FORMAT_R16G16B16A16_UINT = 12, DXGI_FORMAT_R16G16B16A16_SNORM = 13, DXGI_FORMAT_R16G16B16A16_SINT = 14, DXGI_FORMAT_R32G32_TYPELESS = 15, DXGI_FORMAT_R32G32_FLOAT = 16, DXGI_FORMAT_R32G32_UINT = 17, DXGI_FORMAT_R32G32_SINT = 18, DXGI_FORMAT_R32G8X24_TYPELESS = 19, DXGI_FORMAT_D32_FLOAT_S8X24_UINT = 20, DXGI_FORMAT_R32_FLOAT_X8X24_TYPELESS = 21, DXGI_FORMAT_X32_TYPELESS_G8X24_UINT = 22, DXGI_FORMAT_R10G10B10A2_TYPELESS = 23, DXGI_FORMAT_R10G10B10A2_UNORM = 24, DXGI_FORMAT_R10G10B10A2_UINT = 25, DXGI_FORMAT_R11G11B10_FLOAT = 26, DXGI_FORMAT_R8G8B8A8_TYPELESS = 27, DXGI_FORMAT_R8G8B8A8_UNORM = 28, DXGI_FORMAT_R8G8B8A8_UNORM_SRGB = 29, DXGI_FORMAT_R8G8B8A8_UINT = 30, DXGI_FORMAT_R8G8B8A8_SNORM = 31, DXGI_FORMAT_R8G8B8A8_SINT = 32, DXGI_FORMAT_R16G16_TYPELESS = 33, DXGI_FORMAT_R16G16_FLOAT = 34, DXGI_FORMAT_R16G16_UNORM = 35, DXGI_FORMAT_R16G16_UINT = 36, DXGI_FORMAT_R16G16_SNORM = 37, DXGI_FORMAT_R16G16_SINT = 38, DXGI_FORMAT_R32_TYPELESS = 39, DXGI_FORMAT_D32_FLOAT = 40, DXGI_FORMAT_R32_FLOAT = 41, DXGI_FORMAT_R32_UINT = 42, DXGI_FORMAT_R32_SINT = 43, DXGI_FORMAT_R24G8_TYPELESS = 44, DXGI_FORMAT_D24_UNORM_S8_UINT = 45, DXGI_FORMAT_R24_UNORM_X8_TYPELESS = 46, DXGI_FORMAT_X24_TYPELESS_G8_UINT = 47, DXGI_FORMAT_R8G8_TYPELESS = 48, DXGI_FORMAT_R8G8_UNORM = 49, DXGI_FORMAT_R8G8_UINT = 50, DXGI_FORMAT_R8G8_SNORM = 51, DXGI_FORMAT_R8G8_SINT = 52, DXGI_FORMAT_R16_TYPELESS = 53, DXGI_FORMAT_R16_FLOAT = 54, DXGI_FORMAT_D16_UNORM = 55, DXGI_FORMAT_R16_UNORM = 56, DXGI_FORMAT_R16_UINT = 57, DXGI_FORMAT_R16_SNORM = 58, DXGI_FORMAT_R16_SINT = 59, DXGI_FORMAT_R8_TYPELESS = 60, DXGI_FORMAT_R8_UNORM = 61, DXGI_FORMAT_R8_UINT = 62, DXGI_FORMAT_R8_SNORM = 63, DXGI_FORMAT_R8_SINT = 64, DXGI_FORMAT_A8_UNORM = 65, DXGI_FORMAT_R1_UNORM = 66, DXGI_FORMAT_R9G9B9E5_SHAREDEXP = 67, DXGI_FORMAT_R8G8_B8G8_UNORM = 68, DXGI_FORMAT_G8R8_G8B8_UNORM = 69, DXGI_FORMAT_BC1_TYPELESS = 70, DXGI_FORMAT_BC1_UNORM = 71, DXGI_FORMAT_BC1_UNORM_SRGB = 72, DXGI_FORMAT_BC2_TYPELESS = 73, DXGI_FORMAT_BC2_UNORM = 74, DXGI_FORMAT_BC2_UNORM_SRGB = 75, DXGI_FORMAT_BC3_TYPELESS = 76, DXGI_FORMAT_BC3_UNORM = 77, DXGI_FORMAT_BC3_UNORM_SRGB = 78, DXGI_FORMAT_BC4_TYPELESS = 79, DXGI_FORMAT_BC4_UNORM = 80, DXGI_FORMAT_BC4_SNORM = 81, DXGI_FORMAT_BC5_TYPELESS = 82, DXGI_FORMAT_BC5_UNORM = 83, DXGI_FORMAT_BC5_SNORM = 84, DXGI_FORMAT_B5G6R5_UNORM = 85, DXGI_FORMAT_B5G5R5A1_UNORM = 86, DXGI_FORMAT_B8G8R8A8_UNORM = 87, DXGI_FORMAT_B8G8R8X8_UNORM = 88, DXGI_FORMAT_R10G10B10_XR_BIAS_A2_UNORM = 89, DXGI_FORMAT_B8G8R8A8_TYPELESS = 90, DXGI_FORMAT_B8G8R8A8_UNORM_SRGB = 91, DXGI_FORMAT_B8G8R8X8_TYPELESS = 92, DXGI_FORMAT_B8G8R8X8_UNORM_SRGB = 93, DXGI_FORMAT_BC6H_TYPELESS = 94, DXGI_FORMAT_BC6H_UF16 = 95, DXGI_FORMAT_BC6H_SF16 = 96, DXGI_FORMAT_BC7_TYPELESS = 97, DXGI_FORMAT_BC7_UNORM = 98, DXGI_FORMAT_BC7_UNORM_SRGB = 99, DXGI_FORMAT_AYUV = 100, DXGI_FORMAT_Y410 = 101, DXGI_FORMAT_Y416 = 102, DXGI_FORMAT_NV12 = 103, DXGI_FORMAT_P010 = 104, DXGI_FORMAT_P016 = 105, DXGI_FORMAT_420_OPAQUE = 106, DXGI_FORMAT_YUY2 = 107, DXGI_FORMAT_Y210 = 108, DXGI_FORMAT_Y216 = 109, DXGI_FORMAT_NV11 = 110, DXGI_FORMAT_AI44 = 111, DXGI_FORMAT_IA44 = 112, DXGI_FORMAT_P8 = 113, DXGI_FORMAT_A8P8 = 114, DXGI_FORMAT_B4G4R4A4_UNORM = 115, DXGI_FORMAT_P208 = 116, DXGI_FORMAT_V208 = 117, DXGI_FORMAT_V408 = 118, DXGI_FORMAT_SAMPLER_FEEDBACK_MIN_MIP_OPAQUE = 119, DXGI_FORMAT_SAMPLER_FEEDBACK_MIP_REGION_USED_OPAQUE = 120, DXGI_FORMAT_FORCE_UINT = 121 } /** * Possible values of the field {@link DDS_DX10_FIELDS.RESOURCE_DIMENSION} * @ignore */ export declare enum D3D10_RESOURCE_DIMENSION { DDS_DIMENSION_TEXTURE1D = 2, DDS_DIMENSION_TEXTURE2D = 3, DDS_DIMENSION_TEXTURE3D = 6 } export declare enum D3DFMT { UNKNOWN = 0, R8G8B8 = 20, A8R8G8B8 = 21, X8R8G8B8 = 22, R5G6B5 = 23, X1R5G5B5 = 24, A1R5G5B5 = 25, A4R4G4B4 = 26, R3G3B2 = 27, A8 = 28, A8R3G3B2 = 29, X4R4G4B4 = 30, A2B10G10R10 = 31, A8B8G8R8 = 32, X8B8G8R8 = 33, G16R16 = 34, A2R10G10B10 = 35, A16B16G16R16 = 36, A8P8 = 40, P8 = 41, L8 = 50, A8L8 = 51, A4L4 = 52, V8U8 = 60, L6V5U5 = 61, X8L8V8U8 = 62, Q8W8V8U8 = 63, V16U16 = 64, A2W10V10U10 = 67, Q16W16V16U16 = 110, R16F = 111, G16R16F = 112, A16B16G16R16F = 113, R32F = 114, G32R32F = 115, A32B32G32R32F = 116, UYVY, R8G8_B8G8, YUY2, D3DFMT_G8R8_G8B8, DXT1, DXT2, DXT3, DXT4, DXT5, ATI1, AT1N, ATI2, AT2N, BC4U, BC4S, BC5U, BC5S, DX10 } /** * Maps `FOURCC_*` formats to {@link TEXTURE_FORMATS}. * https://en.wikipedia.org/wiki/S3_Texture_Compression#S3TC_format_comparison * https://github.com/microsoft/DirectXTex/blob/main/DDSTextureLoader/DDSTextureLoader11.cpp * @ignore */ export declare const FOURCC_TO_TEXTURE_FORMAT: { [id: number]: TEXTURE_FORMATS; }; /** * Maps {@link DXGI_FORMAT} to {@link TEXTURE_FORMATS} * @ignore */ export declare const DXGI_TO_TEXTURE_FORMAT: { [id: number]: TEXTURE_FORMATS; }; export declare const DDS: { MAGIC_VALUE: number; MAGIC_SIZE: number; HEADER_SIZE: number; HEADER_DX10_SIZE: number; PIXEL_FORMAT_FLAGS: { ALPHAPIXELS: number; ALPHA: number; FOURCC: number; RGB: number; RGBA: number; YUV: number; LUMINANCE: number; LUMINANCEA: number; }; RESOURCE_MISC_TEXTURECUBE: number; HEADER_FIELDS: { MAGIC: number; SIZE: number; FLAGS: number; HEIGHT: number; WIDTH: number; MIPMAP_COUNT: number; PIXEL_FORMAT: number; PF_FLAGS: number; FOURCC: number; RGB_BITCOUNT: number; R_BIT_MASK: number; G_BIT_MASK: number; B_BIT_MASK: number; A_BIT_MASK: number; }; HEADER_DX10_FIELDS: { DXGI_FORMAT: number; RESOURCE_DIMENSION: number; MISC_FLAG: number; ARRAY_SIZE: number; MISC_FLAGS2: number; }; DXGI_FORMAT: typeof DXGI_FORMAT; D3D10_RESOURCE_DIMENSION: typeof D3D10_RESOURCE_DIMENSION; D3DFMT: typeof D3DFMT; }; export declare const TEXTURE_FORMAT_BLOCK_SIZE: Record; /** Loads KTX textures! */ export declare const loadDDS: { extension: { type: ExtensionType.LoadParser; priority: LoaderParserPriority; name: string; }; name: string; test(url: string): boolean; load(url: string, _asset: ResolvedAsset, loader: Loader): Promise; unload(texture: Texture | Texture[]): void; }; export declare function parseDDS(arrayBuffer: ArrayBuffer, supportedFormats: TEXTURE_FORMATS[]): TextureSourceOptions; /** Loads KTX textures! */ export declare const loadKTX: { extension: { type: ExtensionType.LoadParser; priority: LoaderParserPriority; name: string; }; name: string; test(url: string): boolean; load(url: string, _asset: ResolvedAsset, loader: Loader): Promise; unload(texture: Texture | Texture[]): void; }; export declare function parseKTX(arrayBuffer: ArrayBuffer, supportedFormats: TEXTURE_FORMATS[]): TextureSourceOptions; export declare enum GL_INTERNAL_FORMAT { RGBA8_SNORM = 36759, RGBA = 6408, RGBA8UI = 36220, SRGB8_ALPHA8 = 35907, RGBA8I = 36238, RGBA8 = 32856, COMPRESSED_RGB_S3TC_DXT1_EXT = 33776, COMPRESSED_RGBA_S3TC_DXT1_EXT = 33777, COMPRESSED_RGBA_S3TC_DXT3_EXT = 33778, COMPRESSED_RGBA_S3TC_DXT5_EXT = 33779, COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT = 35917, COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT = 35918, COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT = 35919, COMPRESSED_SRGB_S3TC_DXT1_EXT = 35916, COMPRESSED_RED_RGTC1_EXT = 36283, COMPRESSED_SIGNED_RED_RGTC1_EXT = 36284, COMPRESSED_RED_GREEN_RGTC2_EXT = 36285, COMPRESSED_SIGNED_RED_GREEN_RGTC2_EXT = 36286, COMPRESSED_R11_EAC = 37488, COMPRESSED_SIGNED_R11_EAC = 37489, COMPRESSED_RG11_EAC = 37490, COMPRESSED_SIGNED_RG11_EAC = 37491, COMPRESSED_RGB8_ETC2 = 37492, COMPRESSED_RGBA8_ETC2_EAC = 37496, COMPRESSED_SRGB8_ETC2 = 37493, COMPRESSED_SRGB8_ALPHA8_ETC2_EAC = 37497, COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2 = 37494, COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2 = 37495, COMPRESSED_RGBA_ASTC_4x4_KHR = 37808, COMPRESSED_RGBA_ASTC_5x4_KHR = 37809, COMPRESSED_RGBA_ASTC_5x5_KHR = 37810, COMPRESSED_RGBA_ASTC_6x5_KHR = 37811, COMPRESSED_RGBA_ASTC_6x6_KHR = 37812, COMPRESSED_RGBA_ASTC_8x5_KHR = 37813, COMPRESSED_RGBA_ASTC_8x6_KHR = 37814, COMPRESSED_RGBA_ASTC_8x8_KHR = 37815, COMPRESSED_RGBA_ASTC_10x5_KHR = 37816, COMPRESSED_RGBA_ASTC_10x6_KHR = 37817, COMPRESSED_RGBA_ASTC_10x8_KHR = 37818, COMPRESSED_RGBA_ASTC_10x10_KHR = 37819, COMPRESSED_RGBA_ASTC_12x10_KHR = 37820, COMPRESSED_RGBA_ASTC_12x12_KHR = 37821, COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR = 37840, COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR = 37841, COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR = 37842, COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR = 37843, COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR = 37844, COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR = 37845, COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR = 37846, COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR = 37847, COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR = 37848, COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR = 37849, COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR = 37850, COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR = 37851, COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR = 37852, COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR = 37853, COMPRESSED_RGBA_BPTC_UNORM_EXT = 36492, COMPRESSED_SRGB_ALPHA_BPTC_UNORM_EXT = 36493, COMPRESSED_RGB_BPTC_SIGNED_FLOAT_EXT = 36494, COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT_EXT = 36495 } export declare const KTX: { FILE_HEADER_SIZE: number; FILE_IDENTIFIER: number[]; FORMATS_TO_COMPONENTS: { [id: number]: number; }; INTERNAL_FORMAT_TO_BYTES_PER_PIXEL: { [id: number]: number; }; INTERNAL_FORMAT_TO_TEXTURE_FORMATS: { [id: number]: TEXTURE_FORMATS; }; FIELDS: { FILE_IDENTIFIER: number; ENDIANNESS: number; GL_TYPE: number; GL_TYPE_SIZE: number; GL_FORMAT: number; GL_INTERNAL_FORMAT: number; GL_BASE_INTERNAL_FORMAT: number; PIXEL_WIDTH: number; PIXEL_HEIGHT: number; PIXEL_DEPTH: number; NUMBER_OF_ARRAY_ELEMENTS: number; NUMBER_OF_FACES: number; NUMBER_OF_MIPMAP_LEVELS: number; BYTES_OF_KEY_VALUE_DATA: number; }; TYPES_TO_BYTES_PER_COMPONENT: { [id: number]: number; }; TYPES_TO_BYTES_PER_PIXEL: { [id: number]: number; }; ENDIANNESS: number; }; /** Loads KTX2 textures! */ export declare const loadKTX2: { extension: { type: ExtensionType.LoadParser; priority: LoaderParserPriority; name: string; }; name: string; test(url: string): boolean; load(url: string, _asset: ResolvedAsset, loader: Loader): Promise; unload(texture: Texture | Texture[]): Promise; }; export type LIBKTXModuleCreator = (config: { locateFile: (file: string) => string; }) => { then: (result: (libktx: LIBKTXModule) => void) => void; }; export interface KTXTexture { getImageData(level: number, layer: number, face: number): Uint8Array; glInternalformat: number; vkFormat: number; classId: number; numLevels: number; baseHeight: number; baseWidth: number; transcodeBasis(transcodeFormat: any, arg1: number): unknown; needsTranscoding: boolean; } export interface LIBKTXModule { ErrorCode: any; TranscodeTarget: any; ktxTexture: new (data: Uint8Array) => KTXTexture; } export type COMPRESSED_TEXTURE_FORMATS = TEXTURE_FORMATS | "rgb8unorm" | "rgb8unorm-srgb"; export declare function convertFormatIfRequired(textureOptions: TextureSourceOptions): void; export declare function createLevelBuffersFromKTX(ktxTexture: KTXTexture): Uint8Array[]; export declare function getTextureFormatFromKTXTexture(ktxTexture: KTXTexture): COMPRESSED_TEXTURE_FORMATS; export declare function glFormatToGPUFormat(glInternalFormat: number): COMPRESSED_TEXTURE_FORMATS; export declare function gpuFormatToKTXBasisTranscoderFormat(transcoderFormat: string): string; export declare const ktxTranscoderUrls: { jsUrl: string; wasmUrl: string; }; export declare function setKTXTranscoderPath(config: Partial): void; export declare function vkFormatToGPUFormat(vkFormat: number): COMPRESSED_TEXTURE_FORMATS; export declare function loadKTX2onWorker(url: string, supportedTextures: TEXTURE_FORMATS[]): Promise; export declare const detectCompressed: FormatDetectionParser; export declare const validFormats: string[]; export declare const resolveCompressedTextureUrl: { extension: ExtensionType.ResolveParser; test: (value: string) => boolean; parse: (value: string) => { resolution: number; format: string; src: string; }; }; type RectangleLike = { x: number; y: number; width: number; height: number; }; /** * The Culler class is responsible for managing and culling containers. * * * Culled containers will not be rendered, and their children will not be processed. This can be useful for * performance optimization when dealing with large scenes. * @example * import { Culler, Container } from 'pixi.js'; * * const culler = new Culler(); * const stage = new Container(); * * ... set up stage ... * * culler.cull(stage, { x: 0, y: 0, width: 800, height: 600 }); * renderer.render(stage); * @memberof scene */ export declare class Culler { /** * Culls the children of a specific container based on the given view. This will also cull items that are not * being explicitly managed by the culler. * @param container - The container to cull. * @param view - The view rectangle. * @param skipUpdateTransform - Whether to skip updating the transform. */ cull(container: Container, view: RectangleLike, skipUpdateTransform?: boolean): void; private _cullRecursive; /** A shared instance of the Culler class. */ static shared: Culler; } /** * An {@link app.Application} plugin that will automatically cull your stage using the renderers screen size. * @example * import { extensions, CullerPlugin } from 'pixi.js'; * * extensions.add(CullerPlugin); * @memberof app * @see {@link scene.Culler} */ export declare class CullerPlugin { /** @ignore */ static extension: ExtensionMetadata; static renderer: Renderer; static stage: Container; static render: () => void; private static _renderRef; static init(): void; static destroy(): void; } /** * PixiJS supports multiple environments including browsers, Web Workers, and Node.js. * The environment is auto-detected by default using the {@link environment.autoDetectEnvironment} function. * * The {@link environment.Adapter} interface provides a way to abstract away the differences between * these environments. PixiJS uses the {@link environment.BrowserAdapter} by default. * * However you can manually set the environment using the {@link environment.DOMAdapter} singleton, for example to * use Pixi within a WebWorker. * ```js * import { DOMAdapter, WebWorkerAdapter } from 'pixi.js'; * * // WebWorkerAdapter is an implementation of the Adapter interface * DOMAdapter.set(WebWorkerAdapter); * * // use the adapter to create a canvas (in this case an OffscreenCanvas) * DOMAdapter.get().createCanvas(800, 600); * ``` * @namespace environment */ /** * This interface describes all the DOM dependent calls that Pixi makes throughout its codebase. * Implementations of this interface can be used to make sure Pixi will work in any environment, * such as browser, Web Workers, and Node.js. * @memberof environment */ export interface Adapter { /** Returns a canvas object that can be used to create a webgl context. */ createCanvas: (width?: number, height?: number) => ICanvas; /** Returns a 2D rendering context. */ getCanvasRenderingContext2D: () => { prototype: ICanvasRenderingContext2D; }; /** Returns a WebGL rendering context. */ getWebGLRenderingContext: () => typeof WebGLRenderingContext; /** Returns a partial implementation of the browsers window.navigator */ getNavigator: () => { userAgent: string; gpu: GPU | null; }; /** Returns the current base URL For browser environments this is either the document.baseURI or window.location.href */ getBaseUrl: () => string; /** Return the font face set if available */ getFontFaceSet: () => FontFaceSet | null; /** Returns a Response object that has been fetched from the given URL. */ fetch: (url: RequestInfo, options?: RequestInit) => Promise; /** Returns Document object that has been parsed from the given XML string. */ parseXML: (xml: string) => Document; } /** * The DOMAdapter is a singleton that allows PixiJS to perform DOM operations, such as creating a canvas. * This allows PixiJS to be used in any environment, such as a web browser, Web Worker, or Node.js. * It uses the {@link environment.Adapter} interface to abstract away the differences between these environments * and uses the {@link environment.BrowserAdapter} by default. * * It has two methods: `get():Adapter` and `set(adapter: Adapter)`. * * Defaults to the {@link environment.BrowserAdapter}. * @example * import { DOMAdapter, WebWorkerAdapter } from 'pixi.js'; * * // WebWorkerAdapter is an implementation of the Adapter interface * DOMAdapter.set(WebWorkerAdapter); * * // use the adapter to create a canvas (in this case an OffscreenCanvas) * DOMAdapter.get().createCanvas(800, 600); * @memberof environment */ export declare const DOMAdapter: { /** * Returns the current adapter. * @returns {environment.Adapter} The current adapter. */ get(): Adapter; /** * Sets the current adapter. * @param adapter - The new adapter. */ set(adapter: Adapter): void; }; /** * Automatically detects the environment and loads the appropriate extensions. * @param skip - whether to skip loading the default extensions */ export declare function loadEnvironmentExtensions(skip: boolean): Promise; /** * @param add - whether to add the default imports to the bundle * @deprecated since 8.1.6. Use `loadEnvironmentExtensions` instead */ export declare function autoDetectEnvironment(add: boolean): Promise; /** * This is an implementation of the {@link environment.Adapter} interface. * It can be used to make Pixi work in the browser. * @memberof environment * @property {Function} createCanvas - Creates a canvas element of the given size. * This canvas is created using the browser's native canvas element. * @property {Function} getCanvasRenderingContext2D - Returns a 2D rendering context. * @property {Function} getWebGLRenderingContext - Returns a WebGL rendering context. * @property {Function} getNavigator - Returns browsers window.navigator * @property {Function} getBaseUrl - Returns the current base URL for browser environments this is either * the document.baseURI or window.location.href * @property {Function} getFontFaceSet - Return the font face set if available * @property {Function} fetch - Returns a Response object that has been fetched from the given URL. * @property {Function} parseXML - Returns Document object that has been parsed from the given XML string. */ export declare const BrowserAdapter: Adapter; /** * Extension for the browser environment. * @memberof environment */ export declare const browserExt: { extension: { type: ExtensionType; name: string; priority: number; }; test: () => boolean; load: () => Promise; }; /** * This is an implementation of the {@link environment.Adapter} interface. * It can be used to make Pixi work in a Web Worker. * @memberof environment * @property {Function} createCanvas - Creates a canvas element of the given size using the browser's native OffscreenCanvas. * @property {Function} getCanvasRenderingContext2D - Returns a 2D rendering context. * @property {Function} getWebGLRenderingContext - Returns a WebGL rendering context. * @property {Function} getNavigator - Returns browsers window.navigator * @property {Function} getBaseUrl - Returns the current base URL of the worker, which is globalThis.location.href * @property {Function} getFontFaceSet - Return the font face set if available * @property {Function} fetch - Returns a Response object that has been fetched from the given URL. * @property {Function} parseXML - Returns Document object that has been parsed from the given XML string. * @memberof environment */ export declare const WebWorkerAdapter: Adapter; /** * Extension for the webworker environment. * @memberof environment */ export declare const webworkerExt: { extension: { type: ExtensionType; name: string; priority: number; }; test: () => boolean; load: () => Promise; }; /** * A simplified shape of an interactive object for the `eventTarget` property of a {@link FederatedEvent} * @memberof events * @deprecated since 8.1.4 */ export interface FederatedEventTarget extends EventEmitter, EventTarget, Required { /** The parent of this event target. */ readonly parent?: FederatedEventTarget; /** The children of this event target. */ readonly children?: ReadonlyArray; _internalEventMode: EventMode; /** Returns true if the Container has interactive 'static' or 'dynamic' */ isInteractive: () => boolean; /** Remove all listeners, or those of the specified event. */ removeAllListeners(event?: string | symbol): this; } declare class EventsTickerClass { /** The event system. */ events: EventSystem; /** The DOM element to listen to events on. */ domElement: HTMLElement; /** The frequency that fake events will be fired. */ interactionFrequency: number; private _deltaTime; private _didMove; private _tickerAdded; private _pauseUpdate; /** * Initializes the event ticker. * @param events - The event system. */ init(events: EventSystem): void; /** Whether to pause the update checks or not. */ get pauseUpdate(): boolean; set pauseUpdate(paused: boolean); /** Adds the ticker listener. */ addTickerListener(): void; /** Removes the ticker listener. */ removeTickerListener(): void; /** Sets flag to not fire extra events when the user has already moved there mouse */ pointerMoved(): void; /** Updates the state of interactive objects. */ private _update; /** * Updates the state of interactive objects if at least {@link interactionFrequency} * milliseconds have passed since the last invocation. * * Invoked by a throttled ticker update from {@link Ticker.system}. * @param ticker - The throttled ticker. */ private _tickerUpdate; } export declare const EventsTicker: EventsTickerClass; /** A helper object containing the hsl shader code for both glsl */ export declare const hslgl = "\n\tfloat getLuminosity(vec3 c) {\n\t\treturn 0.3 * c.r + 0.59 * c.g + 0.11 * c.b;\n\t}\n\n\tvec3 setLuminosity(vec3 c, float lum) {\n\t\tfloat modLum = lum - getLuminosity(c);\n\t\tvec3 color = c.rgb + vec3(modLum);\n\n\t\t// clip back into legal range\n\t\tmodLum = getLuminosity(color);\n\t\tvec3 modLumVec = vec3(modLum);\n\n\t\tfloat cMin = min(color.r, min(color.g, color.b));\n\t\tfloat cMax = max(color.r, max(color.g, color.b));\n\n\t\tif(cMin < 0.0) {\n\t\t\tcolor = mix(modLumVec, color, modLum / (modLum - cMin));\n\t\t}\n\n\t\tif(cMax > 1.0) {\n\t\t\tcolor = mix(modLumVec, color, (1.0 - modLum) / (cMax - modLum));\n\t\t}\n\n\t\treturn color;\n\t}\n\n\tfloat getSaturation(vec3 c) {\n\t\treturn max(c.r, max(c.g, c.b)) - min(c.r, min(c.g, c.b));\n\t}\n\n\tvec3 setSaturationMinMidMax(vec3 cSorted, float s) {\n\t\tvec3 colorSorted = cSorted;\n\n\t\tif(colorSorted.z > colorSorted.x) {\n\t\t\tcolorSorted.y = (((colorSorted.y - colorSorted.x) * s) / (colorSorted.z - colorSorted.x));\n\t\t\tcolorSorted.z = s;\n\t\t}\n\t\telse {\n\t\t\tcolorSorted.y = 0.0;\n\t\t\tcolorSorted.z = 0.0;\n\t\t}\n\n\t\tcolorSorted.x = 0.0;\n\n\t\treturn colorSorted;\n\t}\n\n\tvec3 setSaturation(vec3 c, float s) {\n\t\tvec3 color = c;\n\n\t\tif(color.r <= color.g && color.r <= color.b) {\n\t\t\tif(color.g <= color.b) {\n\t\t\t\tcolor = setSaturationMinMidMax(color.rgb, s).rgb;\n\t\t\t}\n\t\t\telse {\n\t\t\t\tcolor = setSaturationMinMidMax(color.rbg, s).rbg;\n\t\t\t}\n\t\t}\n\t\telse if(color.g <= color.r && color.g <= color.b) {\n\t\t\tif(color.r <= color.b) {\n\t\t\t\tcolor = setSaturationMinMidMax(color.grb, s).grb;\n\t\t\t}\n\t\t\telse {\n\t\t\t\tcolor = setSaturationMinMidMax(color.gbr, s).gbr;\n\t\t\t}\n\t\t}\n\t\telse {\n\t\t\t// Using bgr for both fixes part of hue\n\t\t\tif(color.r <= color.g) {\n\t\t\t\tcolor = setSaturationMinMidMax(color.brg, s).brg;\n\t\t\t}\n\t\t\telse {\n\t\t\t\tcolor = setSaturationMinMidMax(color.bgr, s).bgr;\n\t\t\t}\n\t\t}\n\n\t\treturn color;\n\t}\n "; /** A helper object containing the hsl shader code for wgsl */ export declare const hslgpu = "\n\tfn getLuminosity(c: vec3) -> f32\n\t{\n\t\treturn 0.3*c.r + 0.59*c.g + 0.11*c.b;\n\t}\n\n\tfn setLuminosity(c: vec3, lum: f32) -> vec3\n\t{\n\t\tvar modLum: f32 = lum - getLuminosity(c);\n\t\tvar color: vec3 = c.rgb + modLum;\n\n\t\t// clip back into legal range\n\t\tmodLum = getLuminosity(color);\n\t\tlet modLumVec = vec3(modLum);\n\n\t\tlet cMin: f32 = min(color.r, min(color.g, color.b));\n\t\tlet cMax: f32 = max(color.r, max(color.g, color.b));\n\n\t\tif(cMin < 0.0)\n\t\t{\n\t\t\tcolor = mix(modLumVec, color, modLum / (modLum - cMin));\n\t\t}\n\n\t\tif(cMax > 1.0)\n\t\t{\n\t\t\tcolor = mix(modLumVec, color, (1 - modLum) / (cMax - modLum));\n\t\t}\n\n\t\treturn color;\n\t}\n\n\tfn getSaturation(c: vec3) -> f32\n\t{\n\t\treturn max(c.r, max(c.g, c.b)) - min(c.r, min(c.g, c.b));\n\t}\n\n\tfn setSaturationMinMidMax(cSorted: vec3, s: f32) -> vec3\n\t{\n\t\tvar colorSorted = cSorted;\n\n\t\tif(colorSorted.z > colorSorted.x)\n\t\t{\n\t\t\tcolorSorted.y = (((colorSorted.y - colorSorted.x) * s) / (colorSorted.z - colorSorted.x));\n\t\t\tcolorSorted.z = s;\n\t\t}\n\t\telse\n\t\t{\n\t\t\tcolorSorted.y = 0;\n\t\t\tcolorSorted.z = 0;\n\t\t}\n\n\t\tcolorSorted.x = 0;\n\n\t\treturn colorSorted;\n\t}\n\n\tfn setSaturation(c: vec3, s: f32) -> vec3\n\t{\n\t\tvar color = c;\n\n\t\tif (color.r <= color.g && color.r <= color.b)\n\t\t{\n\t\t\tif (color.g <= color.b)\n\t\t\t{\n\t\t\t\tcolor = vec3(setSaturationMinMidMax(color.rgb, s)).rgb;\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tcolor = vec3(setSaturationMinMidMax(color.rbg, s)).rbg;\n\t\t\t}\n\t\t}\n\t\telse if (color.g <= color.r && color.g <= color.b)\n\t\t{\n\t\t\tif (color.r <= color.b)\n\t\t\t{\n\t\t\t\tcolor = vec3(setSaturationMinMidMax(color.grb, s)).grb;\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tcolor = vec3(setSaturationMinMidMax(color.gbr, s)).gbr;\n\t\t\t}\n\t\t}\n\t\telse\n\t\t{\n\t\t\t// Using bgr for both fixes part of hue\n\t\t\tif (color.r <= color.g)\n\t\t\t{\n\t\t\t\tcolor = vec3(setSaturationMinMidMax(color.brg, s)).brg;\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tcolor = vec3(setSaturationMinMidMax(color.bgr, s)).bgr;\n\t\t\t}\n\t\t}\n\n\t\treturn color;\n\t}\n\t"; /** * Options for AlphaFilter * @memberof filters */ export interface AlphaFilterOptions extends FilterOptions { /** * Amount of alpha from 0 to 1, where 0 is transparent * @default 1 */ alpha: number; } /** * Simplest filter - applies alpha. * * Use this instead of Container's alpha property to avoid visual layering of individual elements. * AlphaFilter applies alpha evenly across the entire display object and any opaque elements it contains. * If elements are not opaque, they will blend with each other anyway. * * Very handy if you want to use common features of all filters: * * 1. Assign a blendMode to this filter, blend all elements inside display object with background. * * 2. To use clipping in display coordinates, assign a filterArea to the same container that has this filter. * @memberof filters */ export declare class AlphaFilter extends Filter { /** Default filter options */ static readonly defaultOptions: AlphaFilterOptions; constructor(options?: AlphaFilterOptions); /** * Coefficient for alpha multiplication * @default 1 */ get alpha(): number; set alpha(value: number); } /** * Options for BlurFilterPass * @memberof filters */ export interface BlurFilterPassOptions extends BlurFilterOptions { /** Do pass along the x-axis (`true`) or y-axis (`false`). */ horizontal: boolean; } /** * The BlurFilterPass applies a horizontal or vertical Gaussian blur to an object. * @memberof filters */ export declare class BlurFilterPass extends Filter { /** Default blur filter pass options */ static defaultOptions: Partial; /** Do pass along the x-axis (`true`) or y-axis (`false`). */ horizontal: boolean; /** The number of passes to run the filter. */ passes: number; /** The strength of the blur filter. */ strength: number; private _quality; private readonly _uniforms; /** * @param options * @param options.horizontal - Do pass along the x-axis (`true`) or y-axis (`false`). * @param options.strength - The strength of the blur filter. * @param options.quality - The quality of the blur filter. * @param options.kernelSize - The kernelSize of the blur filter.Options: 5, 7, 9, 11, 13, 15. */ constructor(options: BlurFilterPassOptions); /** * Applies the filter. * @param filterManager - The manager. * @param input - The input target. * @param output - The output target. * @param clearMode - How to clear */ apply(filterManager: FilterSystem, input: Texture, output: RenderSurface, clearMode: boolean): void; /** * Sets the strength of both the blur. * @default 16 */ get blur(): number; set blur(value: number); /** * Sets the quality of the blur by modifying the number of passes. More passes means higher * quality blurring but the lower the performance. * @default 4 */ get quality(): number; set quality(value: number); } /** * Options for BlurFilter * @memberof filters */ export interface BlurFilterOptions extends FilterOptions { /** * The strength of the blur filter. * @default 8 */ strength?: number; /** * The horizontal strength of the blur. * @default 8 */ strengthX?: number; /** * The vertical strength of the blur. * @default 8 */ strengthY?: number; /** * The quality of the blur filter. * @default 4 */ quality?: number; /** * The kernelSize of the blur filter.Options: 5, 7, 9, 11, 13, 15. * @default 5 */ kernelSize?: number; } /** * The BlurFilter applies a Gaussian blur to an object. * * The strength of the blur can be set for the x-axis and y-axis separately. * @memberof filters */ export declare class BlurFilter extends Filter { /** Default blur filter options */ static defaultOptions: Partial; /** The horizontal blur filter */ blurXFilter: BlurFilterPass; /** The vertical blur filter */ blurYFilter: BlurFilterPass; private _repeatEdgePixels; /** * @param {filters.BlurFilterOptions} options - The options of the blur filter. */ constructor(options?: BlurFilterOptions); /** @deprecated since 8.0.0 */ constructor(strength?: number, quality?: number, resolution?: number | null, kernelSize?: number); /** * Applies the filter. * @param filterManager - The manager. * @param input - The input target. * @param output - The output target. * @param clearMode - How to clear */ apply(filterManager: FilterSystem, input: Texture, output: RenderSurface, clearMode: boolean): void; protected updatePadding(): void; /** * Sets the strength of both the blurX and blurY properties simultaneously * @default 8 */ get strength(): number; set strength(value: number); /** * Sets the number of passes for blur. More passes means higher quality bluring. * @default 1 */ get quality(): number; set quality(value: number); /** * Sets the strength of horizontal blur * @default 8 */ get strengthX(): number; set strengthX(value: number); /** * Sets the strength of the vertical blur * @default 8 */ get strengthY(): number; set strengthY(value: number); /** * Sets the strength of both the blurX and blurY properties simultaneously * @default 2 * @deprecated since 8.3.0 * @see BlurFilter.strength */ get blur(): number; set blur(value: number); /** * Sets the strength of the blurX property * @default 2 * @deprecated since 8.3.0 * @see BlurFilter.strengthX */ get blurX(): number; set blurX(value: number); /** * Sets the strength of the blurY property * @default 2 * @deprecated since 8.3.0 * @see BlurFilter.strengthY */ get blurY(): number; set blurY(value: number); /** * If set to true the edge of the target will be clamped * @default false */ get repeatEdgePixels(): boolean; set repeatEdgePixels(value: boolean); } export interface IGAUSSIAN_VALUES { [x: number]: number[]; } export declare const GAUSSIAN_VALUES: IGAUSSIAN_VALUES; export declare function generateBlurFragSource(kernelSize: number): string; export declare function generateBlurGlProgram(horizontal: boolean, kernelSize: number): GlProgram; export declare function generateBlurVertSource(kernelSize: number, x: boolean): string; export declare function generateBlurProgram(horizontal: boolean, kernelSize: number): GpuProgram; /** * 5x4 matrix for transforming RGBA color and alpha * @memberof filters */ export type ColorMatrix = ArrayFixed; /** * The ColorMatrixFilter class lets you apply a 5x4 matrix transformation on the RGBA * color and alpha values of every pixel on your container to produce a result * with a new set of RGBA color and alpha values. It's pretty powerful! * * ```js * let colorMatrix = new filters.ColorMatrixFilter(); * container.filters = [colorMatrix]; * colorMatrix.contrast(2); * ``` * @author Clément Chenebault * @memberof filters */ export declare class ColorMatrixFilter extends Filter { constructor(options?: FilterOptions); /** * Transforms current matrix and set the new one * @param {number[]} matrix - 5x4 matrix * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ private _loadMatrix; /** * Multiplies two mat5's * @private * @param out - 5x4 matrix the receiving matrix * @param a - 5x4 matrix the first operand * @param b - 5x4 matrix the second operand * @returns {number[]} 5x4 matrix */ private _multiply; /** * Create a Float32 Array and normalize the offset component to 0-1 * @param {number[]} matrix - 5x4 matrix * @returns {number[]} 5x4 matrix with all values between 0-1 */ private _colorMatrix; /** * Adjusts brightness * @param b - value of the brightness (0-1, where 0 is black) * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ brightness(b: number, multiply: boolean): void; /** * Sets each channel on the diagonal of the color matrix. * This can be used to achieve a tinting effect on Containers similar to the tint field of some * display objects like Sprite, Text, Graphics, and Mesh. * @param color - Color of the tint. This is a hex value. * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ tint(color: ColorSource, multiply?: boolean): void; /** * Set the matrices in grey scales * @param scale - value of the grey (0-1, where 0 is black) * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ greyscale(scale: number, multiply: boolean): void; /** * for our american friends! * @param scale * @param multiply */ grayscale(scale: number, multiply: boolean): void; /** * Set the black and white matrice. * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ blackAndWhite(multiply: boolean): void; /** * Set the hue property of the color * @param rotation - in degrees * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ hue(rotation: number, multiply: boolean): void; /** * Set the contrast matrix, increase the separation between dark and bright * Increase contrast : shadows darker and highlights brighter * Decrease contrast : bring the shadows up and the highlights down * @param amount - value of the contrast (0-1) * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ contrast(amount: number, multiply: boolean): void; /** * Set the saturation matrix, increase the separation between colors * Increase saturation : increase contrast, brightness, and sharpness * @param amount - The saturation amount (0-1) * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ saturate(amount?: number, multiply?: boolean): void; /** Desaturate image (remove color) Call the saturate function */ desaturate(): void; /** * Negative image (inverse of classic rgb matrix) * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ negative(multiply: boolean): void; /** * Sepia image * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ sepia(multiply: boolean): void; /** * Color motion picture process invented in 1916 (thanks Dominic Szablewski) * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ technicolor(multiply: boolean): void; /** * Polaroid filter * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ polaroid(multiply: boolean): void; /** * Filter who transforms : Red -> Blue and Blue -> Red * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ toBGR(multiply: boolean): void; /** * Color reversal film introduced by Eastman Kodak in 1935. (thanks Dominic Szablewski) * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ kodachrome(multiply: boolean): void; /** * Brown delicious browni filter (thanks Dominic Szablewski) * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ browni(multiply: boolean): void; /** * Vintage filter (thanks Dominic Szablewski) * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ vintage(multiply: boolean): void; /** * We don't know exactly what it does, kind of gradient map, but funny to play with! * @param desaturation - Tone values. * @param toned - Tone values. * @param lightColor - Tone values, example: `0xFFE580` * @param darkColor - Tone values, example: `0xFFE580` * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ colorTone(desaturation: number, toned: number, lightColor: ColorSource, darkColor: ColorSource, multiply: boolean): void; /** * Night effect * @param intensity - The intensity of the night effect. * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ night(intensity: number, multiply: boolean): void; /** * Predator effect * * Erase the current matrix by setting a new independent one * @param amount - how much the predator feels his future victim * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ predator(amount: number, multiply: boolean): void; /** * LSD effect * * Multiply the current matrix * @param multiply - if true, current matrix and matrix are multiplied. If false, * just set the current matrix with @param matrix */ lsd(multiply: boolean): void; /** Erase the current matrix by setting the default one. */ reset(): void; /** * The matrix of the color matrix filter * @member {number[]} * @default [1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0] */ get matrix(): ColorMatrix; set matrix(value: ColorMatrix); /** * The opacity value to use when mixing the original and resultant colors. * * When the value is 0, the original color is used without modification. * When the value is 1, the result color is used. * When in the range (0, 1) the color is interpolated between the original and result by this amount. * @default 1 */ get alpha(): number; set alpha(value: number); } /** * Options for DisplacementFilter * @memberof filters */ export interface DisplacementFilterOptions extends FilterOptions { /** The texture used for the displacement map. */ sprite: Sprite; /** The scale of the displacement. */ scale?: number | PointData; } /** * A Noise effect filter. * * original filter: https://github.com/evanw/glfx.js/blob/master/src/filters/adjust/noise.js * @memberof filters * @author Vico @vicocotea */ export declare class DisplacementFilter extends Filter { private readonly _sprite; /** * **Note:** Our docs parser struggles to properly understand the constructor signature. * This is the correct signature. * ```ts * new DisplacementFilter(options?: DisplacementFilterOptions); * ``` * @param options - The options for the filter. * @param options.sprite - The texture used for the displacement map. * @param options.scale - The scale of the displacement. */ constructor(options: Sprite | DisplacementFilterOptions); constructor(sprite: Sprite, scale?: number | PointData); /** * Applies the filter. * @param filterManager - The manager. * @param input - The input target. * @param output - The output target. * @param clearMode - clearMode. */ apply(filterManager: FilterSystem, input: Texture, output: Texture, clearMode: boolean): void; /** scaleX, scaleY for displacements */ get scale(): Point; } /** * Options for NoiseFilter * @memberof filters */ export interface NoiseFilterOptions extends FilterOptions { /** The amount of noise to apply, this value should be in the range (0, 1]. */ noise?: number; /** A seed value to apply to the random noise generation. `Math.random()` is a good value to use. */ seed?: number; } /** * A Noise effect filter. * * original filter: https://github.com/evanw/glfx.js/blob/master/src/filters/adjust/noise.js * @memberof filters * @author Vico @vicocotea */ export declare class NoiseFilter extends Filter { static readonly defaultOptions: NoiseFilterOptions; /** * @param options - The options of the noise filter. */ constructor(options?: NoiseFilterOptions); /** * The amount of noise to apply, this value should be in the range (0, 1]. * @default 0.5 */ get noise(): number; set noise(value: number); /** A seed value to apply to the random noise generation. `Math.random()` is a good value to use. */ get seed(): number; set seed(value: number); } export interface MaskFilterOptions extends FilterOptions { sprite: Sprite; scale?: number | { x: number; y: number; }; } export declare class MaskFilter extends Filter { sprite: Sprite; private readonly _textureMatrix; constructor(options: MaskFilterOptions); apply(filterManager: FilterSystem, input: Texture, output: Texture, clearMode: boolean): void; } type GD8Symmetry = number; /** * @typedef {number} GD8Symmetry * @see groupD8 */ /** * Implements the dihedral group D8, which is similar to * [group D4]{@link http://mathworld.wolfram.com/DihedralGroupD4.html}; * D8 is the same but with diagonals, and it is used for texture * rotations. * * The directions the U- and V- axes after rotation * of an angle of `a: GD8Constant` are the vectors `(uX(a), uY(a))` * and `(vX(a), vY(a))`. These aren't necessarily unit vectors. * * **Origin:**
* This is the small part of gameofbombs.com portal system. It works. * @see maths.groupD8.E * @see maths.groupD8.SE * @see maths.groupD8.S * @see maths.groupD8.SW * @see maths.groupD8.W * @see maths.groupD8.NW * @see maths.groupD8.N * @see maths.groupD8.NE * @author Ivan @ivanpopelyshev * @namespace maths.groupD8 */ export declare const groupD8: { /** * | Rotation | Direction | * |----------|-----------| * | 0° | East | * @memberof maths.groupD8 * @constant {GD8Symmetry} */ E: number; /** * | Rotation | Direction | * |----------|-----------| * | 45°↻ | Southeast | * @memberof maths.groupD8 * @constant {GD8Symmetry} */ SE: number; /** * | Rotation | Direction | * |----------|-----------| * | 90°↻ | South | * @memberof maths.groupD8 * @constant {GD8Symmetry} */ S: number; /** * | Rotation | Direction | * |----------|-----------| * | 135°↻ | Southwest | * @memberof maths.groupD8 * @constant {GD8Symmetry} */ SW: number; /** * | Rotation | Direction | * |----------|-----------| * | 180° | West | * @memberof maths.groupD8 * @constant {GD8Symmetry} */ W: number; /** * | Rotation | Direction | * |-------------|--------------| * | -135°/225°↻ | Northwest | * @memberof maths.groupD8 * @constant {GD8Symmetry} */ NW: number; /** * | Rotation | Direction | * |-------------|--------------| * | -90°/270°↻ | North | * @memberof maths.groupD8 * @constant {GD8Symmetry} */ N: number; /** * | Rotation | Direction | * |-------------|--------------| * | -45°/315°↻ | Northeast | * @memberof maths.groupD8 * @constant {GD8Symmetry} */ NE: number; /** * Reflection about Y-axis. * @memberof maths.groupD8 * @constant {GD8Symmetry} */ MIRROR_VERTICAL: number; /** * Reflection about the main diagonal. * @memberof maths.groupD8 * @constant {GD8Symmetry} */ MAIN_DIAGONAL: number; /** * Reflection about X-axis. * @memberof maths.groupD8 * @constant {GD8Symmetry} */ MIRROR_HORIZONTAL: number; /** * Reflection about reverse diagonal. * @memberof maths.groupD8 * @constant {GD8Symmetry} */ REVERSE_DIAGONAL: number; /** * @memberof maths.groupD8 * @param {GD8Symmetry} ind - sprite rotation angle. * @returns {GD8Symmetry} The X-component of the U-axis * after rotating the axes. */ uX: (ind: GD8Symmetry) => GD8Symmetry; /** * @memberof maths.groupD8 * @param {GD8Symmetry} ind - sprite rotation angle. * @returns {GD8Symmetry} The Y-component of the U-axis * after rotating the axes. */ uY: (ind: GD8Symmetry) => GD8Symmetry; /** * @memberof maths.groupD8 * @param {GD8Symmetry} ind - sprite rotation angle. * @returns {GD8Symmetry} The X-component of the V-axis * after rotating the axes. */ vX: (ind: GD8Symmetry) => GD8Symmetry; /** * @memberof maths.groupD8 * @param {GD8Symmetry} ind - sprite rotation angle. * @returns {GD8Symmetry} The Y-component of the V-axis * after rotating the axes. */ vY: (ind: GD8Symmetry) => GD8Symmetry; /** * @memberof maths.groupD8 * @param {GD8Symmetry} rotation - symmetry whose opposite * is needed. Only rotations have opposite symmetries while * reflections don't. * @returns {GD8Symmetry} The opposite symmetry of `rotation` */ inv: (rotation: GD8Symmetry) => GD8Symmetry; /** * Composes the two D8 operations. * * Taking `^` as reflection: * * | | E=0 | S=2 | W=4 | N=6 | E^=8 | S^=10 | W^=12 | N^=14 | * |-------|-----|-----|-----|-----|------|-------|-------|-------| * | E=0 | E | S | W | N | E^ | S^ | W^ | N^ | * | S=2 | S | W | N | E | S^ | W^ | N^ | E^ | * | W=4 | W | N | E | S | W^ | N^ | E^ | S^ | * | N=6 | N | E | S | W | N^ | E^ | S^ | W^ | * | E^=8 | E^ | N^ | W^ | S^ | E | N | W | S | * | S^=10 | S^ | E^ | N^ | W^ | S | E | N | W | * | W^=12 | W^ | S^ | E^ | N^ | W | S | E | N | * | N^=14 | N^ | W^ | S^ | E^ | N | W | S | E | * * [This is a Cayley table]{@link https://en.wikipedia.org/wiki/Cayley_table} * @memberof maths.groupD8 * @param {GD8Symmetry} rotationSecond - Second operation, which * is the row in the above cayley table. * @param {GD8Symmetry} rotationFirst - First operation, which * is the column in the above cayley table. * @returns {GD8Symmetry} Composed operation */ add: (rotationSecond: GD8Symmetry, rotationFirst: GD8Symmetry) => GD8Symmetry; /** * Reverse of `add`. * @memberof maths.groupD8 * @param {GD8Symmetry} rotationSecond - Second operation * @param {GD8Symmetry} rotationFirst - First operation * @returns {GD8Symmetry} Result */ sub: (rotationSecond: GD8Symmetry, rotationFirst: GD8Symmetry) => GD8Symmetry; /** * Adds 180 degrees to rotation, which is a commutative * operation. * @memberof maths.groupD8 * @param {number} rotation - The number to rotate. * @returns {number} Rotated number */ rotate180: (rotation: number) => number; /** * Checks if the rotation angle is vertical, i.e. south * or north. It doesn't work for reflections. * @memberof maths.groupD8 * @param {GD8Symmetry} rotation - The number to check. * @returns {boolean} Whether or not the direction is vertical */ isVertical: (rotation: GD8Symmetry) => boolean; /** * Approximates the vector `V(dx,dy)` into one of the * eight directions provided by `groupD8`. * @memberof maths.groupD8 * @param {number} dx - X-component of the vector * @param {number} dy - Y-component of the vector * @returns {GD8Symmetry} Approximation of the vector into * one of the eight symmetries. */ byDirection: (dx: number, dy: number) => GD8Symmetry; /** * Helps sprite to compensate texture packer rotation. * @memberof maths.groupD8 * @param {Matrix} matrix - sprite world matrix * @param {GD8Symmetry} rotation - The rotation factor to use. * @param {number} tx - sprite anchoring * @param {number} ty - sprite anchoring */ matrixAppendRotationInv: (matrix: Matrix, rotation: GD8Symmetry, tx?: number, ty?: number) => void; }; /** * Rounds to next power of two. * @function nextPow2 * @param {number} v - input value * @returns {number} - next rounded power of two * @memberof maths */ export declare function nextPow2(v: number): number; /** * Checks if a number is a power of two. * @function isPow2 * @param {number} v - input value * @returns {boolean} `true` if value is power of two * @memberof maths */ export declare function isPow2(v: number): boolean; /** * Computes ceil of log base 2 * @function log2 * @param {number} v - input value * @returns {number} logarithm base 2 * @memberof maths */ export declare function log2(v: number): number; export declare function squaredDistanceToLineSegment(x: number, y: number, x1: number, y1: number, x2: number, y2: number): number; /** * Check if a point is inside a triangle. * @param px - x coordinate of the point * @param py - y coordinate of the point * @param x1 - x coordinate of the first vertex of the triangle * @param y1 - y coordinate of the first vertex of the triangle * @param x2 - x coordinate of the second vertex of the triangle * @param y2 - y coordinate of the second vertex of the triangle * @param x3 - x coordinate of the third vertex of the triangle * @param y3 - y coordinate of the third vertex of the triangle * @returns `true` if the point is inside the triangle, `false` otherwise */ export declare function pointInTriangle(px: number, py: number, x1: number, y1: number, x2: number, y2: number, x3: number, y3: number): boolean; /** * The Circle object is used to help draw graphics and can also be used to specify a hit area for containers. * @memberof maths */ export declare class Circle implements ShapePrimitive { /** * The X coordinate of the center of this circle * @default 0 */ x: number; /** * The Y coordinate of the center of this circle * @default 0 */ y: number; /** * The radius of the circle * @default 0 */ radius: number; /** * The type of the object, mainly used to avoid `instanceof` checks * @default 'circle' */ readonly type: SHAPE_PRIMITIVE; /** * @param x - The X coordinate of the center of this circle * @param y - The Y coordinate of the center of this circle * @param radius - The radius of the circle */ constructor(x?: number, y?: number, radius?: number); /** * Creates a clone of this Circle instance * @returns A copy of the Circle */ clone(): Circle; /** * Checks whether the x and y coordinates given are contained within this circle * @param x - The X coordinate of the point to test * @param y - The Y coordinate of the point to test * @returns Whether the x/y coordinates are within this Circle */ contains(x: number, y: number): boolean; /** * Checks whether the x and y coordinates given are contained within this circle including the stroke. * @param x - The X coordinate of the point to test * @param y - The Y coordinate of the point to test * @param width - The width of the line to check * @returns Whether the x/y coordinates are within this Circle */ strokeContains(x: number, y: number, width: number): boolean; /** * Returns the framing rectangle of the circle as a Rectangle object * @param out * @returns The framing rectangle */ getBounds(out?: Rectangle): Rectangle; /** * Copies another circle to this one. * @param circle - The circle to copy from. * @returns Returns itself. */ copyFrom(circle: Circle): this; /** * Copies this circle to another one. * @param circle - The circle to copy to. * @returns Returns given parameter. */ copyTo(circle: Circle): Circle; toString(): string; } /** * The Ellipse object is used to help draw graphics and can also be used to specify a hit area for containers. * ```js * import { Ellipse } from 'pixi.js'; * * const ellipse = new Ellipse(0, 0, 20, 10); // 40x20 rectangle * const isPointInEllipse = ellipse.contains(0, 0); // true * ``` * @memberof maths */ export declare class Ellipse implements ShapePrimitive { /** * The X coordinate of the center of this ellipse * @default 0 */ x: number; /** * The Y coordinate of the center of this ellipse * @default 0 */ y: number; /** * The half width of this ellipse * @default 0 */ halfWidth: number; /** * The half height of this ellipse * @default 0 */ halfHeight: number; /** * The type of the object, mainly used to avoid `instanceof` checks * @default 'ellipse' */ readonly type = "ellipse"; /** * @param x - The X coordinate of the center of this ellipse * @param y - The Y coordinate of the center of this ellipse * @param halfWidth - The half width of this ellipse * @param halfHeight - The half height of this ellipse */ constructor(x?: number, y?: number, halfWidth?: number, halfHeight?: number); /** * Creates a clone of this Ellipse instance * @returns {Ellipse} A copy of the ellipse */ clone(): Ellipse; /** * Checks whether the x and y coordinates given are contained within this ellipse * @param x - The X coordinate of the point to test * @param y - The Y coordinate of the point to test * @returns Whether the x/y coords are within this ellipse */ contains(x: number, y: number): boolean; /** * Checks whether the x and y coordinates given are contained within this ellipse including stroke * @param x - The X coordinate of the point to test * @param y - The Y coordinate of the point to test * @param width * @returns Whether the x/y coords are within this ellipse */ strokeContains(x: number, y: number, width: number): boolean; /** * Returns the framing rectangle of the ellipse as a Rectangle object * @param out * @returns The framing rectangle */ getBounds(out?: Rectangle): Rectangle; /** * Copies another ellipse to this one. * @param ellipse - The ellipse to copy from. * @returns Returns itself. */ copyFrom(ellipse: Ellipse): this; /** * Copies this ellipse to another one. * @param ellipse - The ellipse to copy to. * @returns Returns given parameter. */ copyTo(ellipse: Ellipse): Ellipse; toString(): string; } /** * A class to define a shape via user defined coordinates. * * * `Polygon` can accept the following different constructor arguments: * - An array of `Point` objects * - An array of coordinate pairs * * * These can be passed as a single array, or as a sequence of arguments. * ```js * import { Polygon } from 'pixi.js'; * * // create a polygon object from an array of points, or an array of coordinate pairs * const polygon1 = new Polygon([ new Point(0, 0), new Point(0, 100), new Point(100, 100) ]); * const polygon2 = new Polygon([ 0, 0, 0, 100, 100, 100 ]); * * // or create a polygon object from a sequence of points, or coordinate pairs * const polygon3 = new Polygon(new Point(0, 0), new Point(0, 100), new Point(100, 100)); * const polygon4 = new Polygon(0, 0, 0, 100, 100, 100); * ``` * @memberof maths */ export declare class Polygon implements ShapePrimitive { /** An array of the points of this polygon. */ points: number[]; /** `false` after moveTo, `true` after `closePath`. In all other cases it is `true`. */ closePath: boolean; /** * The type of the object, mainly used to avoid `instanceof` checks * @default 'polygon' */ readonly type: SHAPE_PRIMITIVE; constructor(points: PointData[] | number[]); constructor(...points: PointData[] | number[]); /** * Creates a clone of this polygon. * @returns - A copy of the polygon. */ clone(): Polygon; /** * Checks whether the x and y coordinates passed to this function are contained within this polygon. * @param x - The X coordinate of the point to test. * @param y - The Y coordinate of the point to test. * @returns - Whether the x/y coordinates are within this polygon. */ contains(x: number, y: number): boolean; /** * Checks whether the x and y coordinates given are contained within this polygon including the stroke. * @param x - The X coordinate of the point to test * @param y - The Y coordinate of the point to test * @param strokeWidth - The width of the line to check * @returns Whether the x/y coordinates are within this polygon */ strokeContains(x: number, y: number, strokeWidth: number): boolean; /** * Returns the framing rectangle of the polygon as a Rectangle object * @param out - optional rectangle to store the result * @returns The framing rectangle */ getBounds(out?: Rectangle): Rectangle; /** * Copies another polygon to this one. * @param polygon - The polygon to copy from. * @returns Returns itself. */ copyFrom(polygon: Polygon): this; /** * Copies this polygon to another one. * @param polygon - The polygon to copy to. * @returns Returns given parameter. */ copyTo(polygon: Polygon): Polygon; toString(): string; /** * Get the last X coordinate of the polygon * @readonly */ get lastX(): number; /** * Get the last Y coordinate of the polygon * @readonly */ get lastY(): number; /** * Get the first X coordinate of the polygon * @readonly */ get x(): number; /** * Get the first Y coordinate of the polygon * @readonly */ get y(): number; } /** * The `RoundedRectangle` object is an area defined by its position, as indicated by its top-left corner * point (`x`, `y`) and by its `width` and its `height`, including a `radius` property that * defines the radius of the rounded corners. * @memberof maths */ export declare class RoundedRectangle implements ShapePrimitive { /** * The X coordinate of the upper-left corner of the rounded rectangle * @default 0 */ x: number; /** * The Y coordinate of the upper-left corner of the rounded rectangle * @default 0 */ y: number; /** * The overall width of this rounded rectangle * @default 0 */ width: number; /** * The overall height of this rounded rectangle * @default 0 */ height: number; /** * Controls the radius of the rounded corners * @default 20 */ radius: number; /** * The type of the object, mainly used to avoid `instanceof` checks * @default 'roundedRectangle' */ readonly type = "roundedRectangle"; /** * @param x - The X coordinate of the upper-left corner of the rounded rectangle * @param y - The Y coordinate of the upper-left corner of the rounded rectangle * @param width - The overall width of this rounded rectangle * @param height - The overall height of this rounded rectangle * @param radius - Controls the radius of the rounded corners */ constructor(x?: number, y?: number, width?: number, height?: number, radius?: number); /** * Returns the framing rectangle of the rounded rectangle as a Rectangle object * @param out - optional rectangle to store the result * @returns The framing rectangle */ getBounds(out?: Rectangle): Rectangle; /** * Creates a clone of this Rounded Rectangle. * @returns - A copy of the rounded rectangle. */ clone(): RoundedRectangle; /** * Copies another rectangle to this one. * @param rectangle - The rectangle to copy from. * @returns Returns itself. */ copyFrom(rectangle: RoundedRectangle): this; /** * Copies this rectangle to another one. * @param rectangle - The rectangle to copy to. * @returns Returns given parameter. */ copyTo(rectangle: RoundedRectangle): RoundedRectangle; /** * Checks whether the x and y coordinates given are contained within this Rounded Rectangle * @param x - The X coordinate of the point to test. * @param y - The Y coordinate of the point to test. * @returns - Whether the x/y coordinates are within this Rounded Rectangle. */ contains(x: number, y: number): boolean; /** * Checks whether the x and y coordinates given are contained within this rectangle including the stroke. * @param pX - The X coordinate of the point to test * @param pY - The Y coordinate of the point to test * @param strokeWidth - The width of the line to check * @returns Whether the x/y coordinates are within this rectangle */ strokeContains(pX: number, pY: number, strokeWidth: number): boolean; toString(): string; } /** * A class to define a shape of a triangle via user defined coordinates. * * Create a `Triangle` object with the `x`, `y`, `x2`, `y2`, `x3`, `y3` properties. * * ```js * import { Triangle } from 'pixi.js'; * * const triangle = new Triangle(0, 0, 100, 0, 50, 50); * ``` * @memberof maths */ export declare class Triangle implements ShapePrimitive { /** * The type of the object, mainly used to avoid `instanceof` checks * @default 'triangle' */ readonly type: SHAPE_PRIMITIVE; /** * The X coord of the first point. * @default 0 */ x: number; /** * The Y coord of the first point. * @default 0 */ y: number; /** * The X coord of the second point. * @default 0 */ x2: number; /** * The Y coord of the second point. * @default 0 */ y2: number; /** * The X coord of the third point. * @default 0 */ x3: number; /** * The Y coord of the third point. * @default 0 */ y3: number; /** * @param x - The X coord of the first point. * @param y - The Y coord of the first point. * @param x2 - The X coord of the second point. * @param y2 - The Y coord of the second point. * @param x3 - The X coord of the third point. * @param y3 - The Y coord of the third point. */ constructor(x?: number, y?: number, x2?: number, y2?: number, x3?: number, y3?: number); /** * Checks whether the x and y coordinates given are contained within this triangle * @param x - The X coordinate of the point to test * @param y - The Y coordinate of the point to test * @returns Whether the x/y coordinates are within this Triangle */ contains(x: number, y: number): boolean; /** * Checks whether the x and y coordinates given are contained within this triangle including the stroke. * @param pointX - The X coordinate of the point to test * @param pointY - The Y coordinate of the point to test * @param strokeWidth - The width of the line to check * @returns Whether the x/y coordinates are within this triangle */ strokeContains(pointX: number, pointY: number, strokeWidth: number): boolean; /** * Creates a clone of this Triangle * @returns a copy of the triangle */ clone(): ShapePrimitive; /** * Copies another triangle to this one. * @param triangle - The triangle to copy from. * @returns Returns itself. */ copyFrom(triangle: Triangle): this; /** * Copies this triangle to another one. * @param triangle - The triangle to copy to. * @returns Returns given parameter. */ copyTo(triangle: Triangle): Triangle; /** * Returns the framing rectangle of the triangle as a Rectangle object * @param out - optional rectangle to store the result * @returns The framing rectangle */ getBounds(out?: Rectangle): Rectangle; } /** * Part of the prepare system. Responsible for uploading all the items to the GPU. * This class extends the base functionality and resolves given resource items ready for the queue. * @memberof rendering */ export declare abstract class PrepareQueue extends PrepareBase { /** * Resolve the given resource type and return an item for the queue * @param source * @param queue */ protected resolveQueueItem(source: PrepareSourceItem, queue: PrepareQueueItem[]): void; /** * Resolve the given container and return an item for the queue * @param container * @param queue */ protected resolveContainerQueueItem(container: Container, queue: PrepareQueueItem[]): void; /** * Resolve the given graphics context and return an item for the queue * @param graphicsContext */ protected resolveGraphicsContextQueueItem(graphicsContext: GraphicsContext): PrepareQueueItem | null; } /** * Part of the prepare system. Responsible for uploading all the items to the GPU. * This class extends the resolver functionality and uploads the given queue items. * @memberof rendering */ export declare abstract class PrepareUpload extends PrepareQueue { /** * Upload the given queue item * @param item */ protected uploadQueueItem(item: PrepareQueueItem): void; protected uploadTextureSource(textureSource: TextureSource): void; protected uploadText(_text: Text$1): void; protected uploadBitmapText(_text: BitmapText): void; protected uploadHTMLText(_text: HTMLText): void; /** * Resolve the given graphics context and return an item for the queue * @param graphicsContext */ protected uploadGraphicsContext(graphicsContext: GraphicsContext): void; } /** * The prepare system provides renderer-specific plugins for pre-rendering DisplayObjects. This is useful for * asynchronously preparing and uploading to the GPU assets, textures, graphics waiting to be displayed. * * Do not instantiate this plugin directly. It is available from the `renderer.prepare` property. * @example * import 'pixi.js/prepare'; * import { Application, Graphics } from 'pixi.js'; * * // Create a new application (prepare will be auto-added to renderer) * const app = new Application(); * await app.init(); * document.body.appendChild(app.view); * * // Don't start rendering right away * app.stop(); * * // Create a display object * const rect = new Graphics() * .beginFill(0x00ff00) * .drawRect(40, 40, 200, 200); * * // Add to the stage * app.stage.addChild(rect); * * // Don't start rendering until the graphic is uploaded to the GPU * app.renderer.prepare.upload(app.stage, () => { * app.start(); * }); * @memberof rendering */ export declare class PrepareSystem extends PrepareUpload implements System$1 { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem ]; readonly name: "prepare"; }; /** Destroys the plugin, don't use after this. */ destroy(): void; } /** * Does exactly the same as getGlobalBounds, but does instead makes use of transforming AABBs * of the various children within the scene graph. This is much faster, but less accurate. * * the result will never be smaller - only ever slightly larger (in most cases, it will be the same). * @param target - The target container to get the bounds from * @param bounds - The output bounds object. * @returns The bounds. */ export declare function getFastGlobalBounds(target: Container, bounds: Bounds): Bounds; export declare function _getGlobalBoundsRecursive(target: Container, bounds: Bounds): void; export declare function getGlobalBounds(target: Container, skipUpdateTransform: boolean, bounds: Bounds): Bounds; export declare function _getGlobalBounds(target: Container, bounds: Bounds, parentTransform: Matrix, skipUpdateTransform: boolean): void; export declare function updateTransformBackwards(target: Container, parentTransform: Matrix): Matrix; export declare function getLocalBounds(target: Container, bounds: Bounds, relativeMatrix?: Matrix): Bounds; export declare function getParent(target: Container, root: Container, matrix: Matrix): void; export declare function getGlobalRenderableBounds(renderables: Renderable[], bounds: Bounds): Bounds; type MatrixPoolItem = Matrix & PoolItem; type BoundsPoolItem = Bounds & PoolItem; export declare const matrixPool: Pool; export declare const boundsPool: Pool; /** * Assigns properties from one object to another, using an optional array of property names to ignore. * @param target - The target object to assign properties to. * @param options - The object to assign properties from. * @param ignore - An object of property names to ignore ({ propToIgnore: true }). */ export declare function assignWithIgnore>(target: T, options: T, ignore?: Record): void; /** * @param renderGroup * @param renderPipes * @deprecated since 8.3.0 */ export declare function buildInstructions(renderGroup: RenderGroup, renderPipes: RenderPipes): void; export declare function buildInstructions(renderGroup: RenderGroup, renderer: Renderer): void; /** * @param container * @param instructionSet * @param renderer * @deprecated since 8.3.0 */ export declare function collectAllRenderables(container: Container, instructionSet: InstructionSet, renderer: RenderPipes): void; export declare function collectAllRenderables(container: Container, instructionSet: InstructionSet, renderer: Renderer): void; /** * This function will crawl through the container essentially check if the children have changed. * * This function checkChildrenDidChange recursively checks if any child in a Container * or its children has changed. It does this by comparing a generated changeId for each * child against a stored value in previousData. * The changeId is a combination of the child's uid and _didChangeId, bitwise manipulated for uniqueness. * If a change is detected, it updates previousData and sets didChange to true. * The function returns a boolean indicating if any change was detected in the entire hierarchy of children. * @param container - the container to check for changes * @param previousData - the previous data from the last check made * @param previousData.data - the data array * @param previousData.index - the index of the data array * @param previousData.didChange - did the data change */ export declare function checkChildrenDidChange(container: Container, previousData: { data: number[]; index: number; didChange: boolean; }): boolean; /** * nulls all slots in an array from a certain index. * assume that when a null item is hit, the rest are also null. * Which will be the case for where this is used! * @param list - the array to clean * @param index - the index to start from */ export declare function clearList(list: Array, index?: number): void; export declare function collectRenderGroups(renderGroup: RenderGroup, out?: RenderGroup[]): RenderGroup[]; /** * Returns a new object with all properties from the input object that have defined values. * @template T - The type of the input object. * @param {T} obj - The input object. * @returns {T} - A new object with only the defined properties from the input object. * @memberof utils * @ignore */ export declare function definedProps>(obj: T): T; export declare function executeInstructions(renderGroup: RenderGroup, renderer: RenderPipes): void; export declare function mixColors(localBGRColor: number, parentBGRColor: number): number; export declare function mixStandardAnd32BitColors(localColorRGB: number, localAlpha: number, parentColor: number): number; export declare function mixHexColors(color1: number, color2: number, ratio: number): number; export declare function multiplyHexColors(color1: number, color2: number): number; export declare function updateLocalTransform(lt: Matrix, container: Container): void; export declare function updateRenderGroupTransforms(renderGroup: RenderGroup, updateChildRenderGroups?: boolean): void; export declare function updateRenderGroupTransform(renderGroup: RenderGroup): void; export declare function updateTransformAndChildren(container: Container, updateTick: number, updateFlags: number): void; export declare function updateWorldTransform(local: Matrix, parent: Matrix, world: Matrix): void; export declare function validateRenderables(renderGroup: RenderGroup, renderPipes: RenderPipes): boolean; /** * A GraphicsAdaptor that uses WebGL to render graphics. * @memberof rendering * @ignore */ export declare class GlGraphicsAdaptor implements GraphicsAdaptor { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipesAdaptor ]; readonly name: "graphics"; }; shader: Shader; init(): void; execute(graphicsPipe: GraphicsPipe, renderable: Graphics): void; destroy(): void; } export declare function colorToUniform(rgb: number, alpha: number, out: Float32Array, offset: number): void; export declare function color32BitToUniform(abgr: number, out: Float32Array, offset: number): void; /** * A GraphicsAdaptor that uses the GPU to render graphics. * @memberof rendering * @ignore */ export declare class GpuGraphicsAdaptor implements GraphicsAdaptor { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUPipesAdaptor ]; readonly name: "graphics"; }; shader: Shader; init(): void; execute(graphicsPipe: GraphicsPipe, renderable: Graphics): void; destroy(): void; } export declare function buildAdaptiveBezier(points: number[], sX: number, sY: number, cp1x: number, cp1y: number, cp2x: number, cp2y: number, eX: number, eY: number, smoothness?: number): number[]; export declare function buildAdaptiveQuadratic(points: number[], sX: number, sY: number, cp1x: number, cp1y: number, eX: number, eY: number, smoothness?: number): number[]; export declare function buildArc(points: number[], x: number, y: number, radius: number, start: number, end: number, clockwise: boolean, steps?: number): void; /** * The arcTo() method creates an arc/curve between two tangents on the canvas. * * "borrowed" from https://code.google.com/p/fxcanvas/ - thanks google! * @param points * @param x1 * @param y1 * @param x2 * @param y2 * @param radius */ export declare function buildArcTo(points: number[], x1: number, y1: number, x2: number, y2: number, radius: number): void; export declare function buildArcToSvg(points: number[], px: number, py: number, cx: number, cy: number, rx: number, ry: number, xAxisRotation?: number, largeArcFlag?: number, sweepFlag?: number): void; export interface ShapeBuildCommand { extension: ExtensionMetadataDetails; build(shape: T, points: number[]): void; triangulate(points: number[], vertices: number[], verticesStride: number, verticesOffset: number, indices: number[], indicesOffset: number): void; } type RoundedShape = Circle | Ellipse | RoundedRectangle; /** * Builds a rectangle to draw * * Ignored from docs since it is not directly exposed. * @ignore * @private */ export declare const buildCircle: ShapeBuildCommand; export declare const buildEllipse: { extension: { name: string; type: ExtensionType | ExtensionType[]; priority?: number; }; build(shape: RoundedShape, points: number[]): void; triangulate(points: number[], vertices: number[], verticesStride: number, verticesOffset: number, indices: number[], indicesOffset: number): void; }; export declare const buildRoundedRectangle: { extension: { name: string; type: ExtensionType | ExtensionType[]; priority?: number; }; build(shape: RoundedShape, points: number[]): void; triangulate(points: number[], vertices: number[], verticesStride: number, verticesOffset: number, indices: number[], indicesOffset: number): void; }; /** * Builds a line to draw using the polygon method. * @param points * @param lineStyle * @param flipAlignment * @param closed * @param vertices * @param _verticesStride * @param _verticesOffset * @param indices * @param _indicesOffset */ export declare function buildLine(points: number[], lineStyle: StrokeAttributes, flipAlignment: boolean, closed: boolean, vertices: number[], _verticesStride: number, _verticesOffset: number, indices: number[], _indicesOffset: number): void; /** * Builds a rectangle to draw * * Ignored from docs since it is not directly exposed. * @ignore * @private */ export declare const buildPolygon: ShapeBuildCommand; /** * Builds a rectangle to draw * * Ignored from docs since it is not directly exposed. * @ignore * @private */ export declare const buildRectangle: ShapeBuildCommand; /** * Builds a triangle to draw * * Ignored from docs since it is not directly exposed. * @ignore * @private */ export declare const buildTriangle: ShapeBuildCommand; export declare function SVGParser(svg: string | SVGElement | SVGSVGElement, graphicsContext?: GraphicsContext): GraphicsContext; export declare function SVGToGraphicsPath(svgPath: string, path: GraphicsPath): GraphicsPath; export declare const shapeBuilders: Record; export declare function buildContextBatches(context: GraphicsContext, gpuContext: GpuGraphicsContext): void; export interface GeometryPathOptions { /** the path to build the geometry from */ path: GraphicsPath; /** a `Matrix` that can be used to modify the the texture UVs of the the path being built */ textureMatrix?: Matrix; /** an optional `MeshGeometry` to write too instead of creating a new one*/ out?: MeshGeometry; } /** * When building a mesh, it helps to leverage the simple API we have in `GraphicsPath` as it can often be easier to * to define the geometry in a more human readable way. This function takes a `GraphicsPath` and returns a `MeshGeometry`. * @example * ```ts * * const path = new GraphicsPath() * .drawRect(0, 0, 100, 100) * * const geometry:MeshGeometry = buildGeometryFromPath(path); * * const mesh = new Mesh({geometry}); * * ``` * You can also pass in a Matrix to transform the uvs as by default you may want to control how they are set up. * @param options - either a `GraphicsPath` or `GeometryPathOptions` * @returns a new `MeshGeometry` instance build from the path */ export declare function buildGeometryFromPath(options: GraphicsPath | GeometryPathOptions): MeshGeometry; /** * Converts a value to a fill style, we do this as PixiJS has a number of ways to define a fill style * They can be a direct color, a texture, a gradient, or an object with these values in them * This function will take any of these input types and convert them into a single object * that PixiJS can understand and use internally. * @param value - The value to convert to a fill style * @param defaultStyle - The default fill style to use * @private */ export declare function toFillStyle(value: T, defaultStyle: ConvertedFillStyle): ConvertedFillStyle; /** * Converts a value to a stroke style, similar to `toFillStyle` but for strokes * @param value - The value to convert to a stroke style * @param defaultStyle - The default stroke style to use * @private */ export declare function toStrokeStyle(value: StrokeInput, defaultStyle: ConvertedStrokeStyle): ConvertedStrokeStyle; export declare function getOrientationOfPoints(points: number[]): number; export declare function triangulateWithHoles(points: number[], holes: number[], vertices: number[], verticesStride: number, verticesOffset: number, indices: number[], indicesOffset: number): void; /** * Constructor options used for `PlaneGeometry` instances. * ```js * const planeGeometry = new PlaneGeometry({ * width: 100, * height: 100, * verticesX: 10, * verticesY: 10, * }); * ``` * @see {@link scene.PlaneGeometry} * @memberof scene */ export interface PlaneGeometryOptions { /** Width of plane */ width?: number; /** Height of plane */ height?: number; /** Number of vertices on x-axis */ verticesX?: number; /** Number of vertices on y-axis */ verticesY?: number; } /** * The PlaneGeometry allows you to draw a 2d plane * @memberof scene */ export declare class PlaneGeometry extends MeshGeometry { static defaultOptions: PlaneGeometryOptions & MeshGeometryOptions; /** The number of vertices on x-axis */ verticesX: number; /** The number of vertices on y-axis */ verticesY: number; /** The width of plane */ width: number; /** The height of plane */ height: number; /** * @param {PlaneGeometryOptions} options - Options to be applied to plane geometry */ constructor(options: PlaneGeometryOptions); /** @deprecated since 8.0.0 */ constructor(width?: number, height?: number, verticesX?: number, verticesY?: number); /** * Refreshes plane coordinates * @param options - Options to be applied to plane geometry */ build(options: PlaneGeometryOptions): void; } /** * Constructor options used for `PerspectivePlaneGeometry` instances. * @memberof scene */ export interface PerspectivePlaneGeometryOptions extends PlaneGeometryOptions { /** The width of the plane */ width: number; /** The height of the plane */ height: number; } /** * A PerspectivePlaneGeometry allows you to draw a 2d plane with perspective. Where ever you move the corners * the texture will be projected to look like it is in 3d space. Great for mapping a 2D mesh into a 3D scene. * * IMPORTANT: This is not a full 3D mesh, it is a 2D mesh with a perspective projection applied to it :) * * ```js * const perspectivePlaneGeometry = new PerspectivePlaneGeometry({ * width: 100, * height: 100, * verticesX: 10, * verticesY: 10, * }); * ``` * @see {@link scene.PerspectivePlaneGeometry} * @memberof scene */ export declare class PerspectivePlaneGeometry extends PlaneGeometry { /** The corner points of the quad you can modify these directly, if you do make sure to call `updateProjection` */ corners: [ number, number, number, number, number, number, number, number ]; private readonly _projectionMatrix; /** * @param options - Options to be applied to MeshPlane * @param options.width - The width of the plane * @param options.height - The height of the plane * @param options.verticesX - The amount of vertices on the x axis * @param options.verticesY - The amount of vertices on the y axis */ constructor(options: PerspectivePlaneGeometryOptions); /** * Will set the corners of the quad to the given coordinates * Calculating the perspective so it looks correct! * @param x0 - x coordinate of the first corner * @param y0 - y coordinate of the first corner * @param x1 - x coordinate of the second corner * @param y1 - y coordinate of the second corner * @param x2 - x coordinate of the third corner * @param y2 - y coordinate of the third corner * @param x3 - x coordinate of the fourth corner * @param y3 - y coordinate of the fourth corner */ setCorners(x0: number, y0: number, x1: number, y1: number, x2: number, y2: number, x3: number, y3: number): void; /** Update the projection matrix based on the corners */ updateProjection(): void; } /** * Constructor options used for `MeshPlane` instances. * ```js * const meshPlane = new MeshPlane({ * texture: Texture.from('snake.png'), * verticesX: 20, * verticesY: 20, * }); * ``` * @see {@link scene.MeshPlane} * @memberof scene */ export interface MeshPlaneOptions extends Omit { /** The texture to use on the plane. */ texture: Texture; /** The number of vertices in the x-axis */ verticesX?: number; /** The number of vertices in the y-axis */ verticesY?: number; } /** * The MeshPlane allows you to draw a texture across several points and then manipulate these points * @example * import { Point, MeshPlane, Texture } from 'pixi.js'; * * for (let i = 0; i < 20; i++) { * points.push(new Point(i * 50, 0)); * } * const MeshPlane = new MeshPlane({ texture: Texture.from('snake.png'), verticesX: points }); * @memberof scene */ export declare class MeshPlane extends Mesh { /** The geometry is automatically updated when the texture size changes. */ autoResize: boolean; protected _textureID: number; /** * @param options - Options to be applied to MeshPlane */ constructor(options: MeshPlaneOptions); /** * Method used for overrides, to do something in case texture frame was changed. * Meshes based on plane can override it and change more details based on texture. */ textureUpdated(): void; set texture(value: Texture); /** The texture of the MeshPlane */ get texture(): Texture; /** * Destroys this sprite renderable and optionally its texture. * @param options - Options parameter. A boolean will act as if all options * have been set to that value * @param {boolean} [options.texture=false] - Should it destroy the current texture of the renderable as well * @param {boolean} [options.textureSource=false] - Should it destroy the textureSource of the renderable as well */ destroy(options?: DestroyOptions): void; } /** * * Constructor options used for `PerspectiveMesh` instances. * ```js * const meshPlane = new PerspectiveMesh({ * texture: Texture.from('snake.png'), * verticesX: 20, * verticesY: 20, * x0: 0, * y0: 0, * x1: 100, * y1: 0, * x2: 100, * y2: 100, * x3: 0, * y3: 100 * }); * @see {@link scene.PerspectiveMesh} * @memberof scene */ export interface PerspectivePlaneOptions extends MeshPlaneOptions { /** top left corner x value */ x0?: number; /** top left corner y value */ y0?: number; /** top right corner x value */ x1?: number; /** top right corner y value */ y1?: number; /** bottom right corner x value */ x2?: number; /** bottom right corner y value */ y2?: number; /** bottom left corner x value */ x3?: number; /** bottom left corner y value */ y3?: number; } /** * A perspective mesh that allows you to draw a 2d plane with perspective. Where ever you move the corners * the texture will be projected to look like it is in 3d space. Great for mapping a 2D mesh into a 3D scene. * * The calculations is done at the uv level. This means that the more vertices you have the more smooth * the perspective will be. If you have a low amount of vertices you may see the texture stretch. Too many vertices * could be slower. It is a balance between performance and quality! We leave that to you to decide. * * IMPORTANT: This is not a full 3D mesh, it is a 2D mesh with a perspective projection applied to it :) * @example * ```js * const meshPlane = new PerspectiveMesh({ * texture: Texture.from('snake.png'), * verticesX: 20, * verticesY: 20, * x0: 0, * y0: 0, * x1: 100, * y1: 0, * x2: 100, * y2: 100, * x3: 0, * y3: 100 * }); * @see {@link scene.PerspectiveMesh} * @memberof scene */ export declare class PerspectiveMesh extends Mesh { /** default options for the mesh */ static defaultOptions: PerspectivePlaneOptions; /** * @param options - Options to be applied to PerspectiveMesh */ constructor(options: PerspectivePlaneOptions); /** Update the geometry when the texture is updated */ protected textureUpdated(): void; set texture(value: Texture); /** The texture that the mesh uses */ get texture(): Texture; /** * Set the corners of the quad to the given coordinates * The mesh will then calculate the perspective so it looks correct! * @param x0 - x coordinate of the first corner * @param y0 - y coordinate of the first corner * @param x1 - x coordinate of the second corner * @param y1 - y coordinate of the second corner * @param x2 - x coordinate of the third corner * @param y2 - y coordinate of the third corner * @param x3 - x coordinate of the fourth corner * @param y3 - y coordinate of the fourth corner */ setCorners(x0: number, y0: number, x1: number, y1: number, x2: number, y2: number, x3: number, y3: number): void; } /** * Apply a projective transformation to a plane geometry * @param width - The width of the plane * @param height - The height of the plane * @param geometry - The plane geometry to apply the transformation to * @param transformationMatrix - The transformation matrix to apply */ export declare function applyProjectiveTransformationToPlane(width: number, height: number, geometry: PlaneGeometry, transformationMatrix: ArrayFixed): void; type Matrix3x3 = ArrayFixed; /** * Compute a 2D projection matrix * @param out - The matrix to store the result in * @param x1s - The x coordinate of the first source point * @param y1s - The y coordinate of the first source point * @param x1d - The x coordinate of the first destination point * @param y1d - The y coordinate of the first destination point * @param x2s - The x coordinate of the second source point * @param y2s - The y coordinate of the second source point * @param x2d - The x coordinate of the second destination point * @param y2d - The y coordinate of the second destination point * @param x3s - The x coordinate of the third source point * @param y3s - The y coordinate of the third source point * @param x3d - The x coordinate of the third destination point * @param y3d - The y coordinate of the third destination point * @param x4s - The x coordinate of the fourth source point * @param y4s - The y coordinate of the fourth source point * @param x4d - The x coordinate of the fourth destination point * @param y4d - The y coordinate of the fourth destination point * @returns - The computed 2D projection matrix * @private */ export declare function compute2DProjection(out: Matrix3x3, x1s: number, y1s: number, x1d: number, y1d: number, x2s: number, y2s: number, x2d: number, y2d: number, x3s: number, y3s: number, x3d: number, y3d: number, x4s: number, y4s: number, x4d: number, y4d: number): Matrix3x3; /** * Constructor options used for `MeshRope` instances. * ```js * const meshRope = new MeshRope({ * texture: Texture.from('snake.png'), * points: [new Point(0, 0), new Point(100, 0)], * textureScale: 0, * }); * ``` * @see {@link scene.MeshRope} * @memberof scene */ export interface MeshRopeOptions extends Omit { /** The texture to use on the rope. */ texture: Texture; /** An array of points that determine the rope. */ points: PointData[]; /** * Rope texture scale, if zero then the rope texture is stretched. * Positive values scale rope texture * keeping its aspect ratio. You can reduce alpha channel artifacts by providing a larger texture * and downsampling here. If set to zero, texture will be stretched instead. */ textureScale?: number; } /** * The rope allows you to draw a texture across several points and then manipulate these points * @example * import { Point, MeshRope, Texture } from 'pixi.js'; * * for (let i = 0; i < 20; i++) { * points.push(new Point(i * 50, 0)); * }; * const rope = new MeshRope(Texture.from('snake.png'), points); * @memberof scene */ export declare class MeshRope extends Mesh { static defaultOptions: Partial; /** re-calculate vertices by rope points each frame */ autoUpdate: boolean; /** * Note: The wrap mode of the texture is set to REPEAT if `textureScale` is positive. * @param options * @param options.texture - The texture to use on the rope. * @param options.points - An array of {@link math.Point} objects to construct this rope. * @param {number} options.textureScale - Optional. Positive values scale rope texture * keeping its aspect ratio. You can reduce alpha channel artifacts by providing a larger texture * and downsampling here. If set to zero, texture will be stretched instead. */ constructor(options: MeshRopeOptions); private _render; } /** * Options for the simple mesh. * @memberof scene */ export interface SimpleMeshOptions extends Omit { /** The texture to use */ texture: Texture; /** if you want to specify the vertices */ vertices?: Float32Array; /** if you want to specify the uvs */ uvs?: Float32Array; /** if you want to specify the indices */ indices?: Uint32Array; /** the topology, can be any of the Topology values */ topology?: Topology; } /** * The Simple Mesh class mimics Mesh in PixiJS, providing easy-to-use constructor arguments. * For more robust customization, use {@link scene.Mesh}. * @memberof scene */ export declare class MeshSimple extends Mesh { /** Upload vertices buffer each frame. */ autoUpdate: boolean; /** * @param options - Options to be used for construction */ constructor(options: SimpleMeshOptions); /** * Collection of vertices data. * @type {Float32Array} */ get vertices(): TypedArray; set vertices(value: TypedArray); private _render; } /** * Constructor options used for `RopeGeometry` instances. * ```js * const ropeGeometry = new RopeGeometry({ * points: [new Point(0, 0), new Point(100, 0)], * width: 10, * textureScale: 0, * }); * ``` * @see {@link scene.RopeGeometry} * @memberof scene */ export interface RopeGeometryOptions { /** The width (i.e., thickness) of the rope. */ width?: number; /** An array of points that determine the rope. */ points?: PointData[]; /** * Rope texture scale, if zero then the rope texture is stretched. * By default the rope texture will be stretched to match * rope length. If textureScale is positive this value will be treated as a scaling * factor and the texture will preserve its aspect ratio instead. To create a tiling rope * set baseTexture.wrapMode to 'repeat' and use a power of two texture, * then set textureScale=1 to keep the original texture pixel size. * In order to reduce alpha channel artifacts provide a larger texture and downsample - * i.e. set textureScale=0.5 to scale it down twice. */ textureScale?: number; } /** * RopeGeometry allows you to draw a geometry across several points and then manipulate these points. * @example * import { Point, RopeGeometry } from 'pixi.js'; * * for (let i = 0; i < 20; i++) { * points.push(new Point(i * 50, 0)); * }; * const rope = new RopeGeometry(100, points); * @memberof scene */ export declare class RopeGeometry extends MeshGeometry { /** Default options for RopeGeometry constructor. */ static defaultOptions: RopeGeometryOptions & MeshGeometryOptions; /** An array of points that determine the rope. */ points: PointData[]; /** Rope texture scale, if zero then the rope texture is stretched. */ readonly textureScale: number; /** * The width (i.e., thickness) of the rope. * @readonly */ _width: number; /** * @param options - Options to be applied to rope geometry */ constructor(options: RopeGeometryOptions); /** * The width (i.e., thickness) of the rope. * @readonly */ get width(): number; /** Refreshes Rope indices and uvs */ private _build; /** refreshes vertices of Rope mesh */ updateVertices(): void; /** Refreshes Rope indices and uvs */ update(): void; } /** * A MeshAdaptor that uses the WebGL to render meshes. * @memberof rendering * @ignore */ export declare class GlMeshAdaptor implements MeshAdaptor { static extension: { readonly type: readonly [ ExtensionType.WebGLPipesAdaptor ]; readonly name: "mesh"; }; private _shader; init(): void; execute(meshPipe: MeshPipe, mesh: Mesh): void; destroy(): void; } /** * The WebGL adaptor for the mesh system. Allows the Mesh System to be used with the WebGl renderer * @memberof rendering * @ignore */ export declare class GpuMeshAdapter implements MeshAdaptor { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUPipesAdaptor ]; readonly name: "mesh"; }; private _shader; init(): void; execute(meshPipe: MeshPipe, mesh: Mesh): void; destroy(): void; } /** * A batchable mesh object. * @ignore */ export declare class BatchableMesh implements DefaultBatchableMeshElement { batcherName: string; readonly packAsQuad = false; location: number; renderable: ViewContainer; indexOffset: number; attributeOffset: number; texture: Texture; geometry: MeshGeometry; transform: Matrix; roundPixels: 0 | 1; _attributeStart: number; _batcher: Batcher; _batch: Batch; _indexStart: number; _textureId: number; private _transformedUvs; private _uvUpdateId; private _textureMatrixUpdateId; get blendMode(): BLEND_MODES; reset(): void; get uvs(): Float32Array; get positions(): Float32Array; get indices(): Uint32Array; get color(): number; get groupTransform(): Matrix; get attributeSize(): number; get indexSize(): number; } export declare function getTextureDefaultMatrix(texture: Texture, out: Matrix): Matrix; export type AnimatedSpriteFrames = Texture[] | FrameObject[]; /** * Constructor options used for `AnimatedSprite` instances. * @see {@link scene.AnimatedSprite} * @memberof scene */ export interface AnimatedSpriteOptions extends SpriteOptions { /** An array of {@link Texture} or frame objects that make up the animation. */ textures: AnimatedSpriteFrames; /** Whether to use Ticker.shared to auto update animation time. */ autoUpdate?: boolean; } /** * An AnimatedSprite is a simple way to display an animation depicted by a list of textures. * * ```js * import { AnimatedSprite, Texture } from 'pixi.js'; * * const alienImages = [ * 'image_sequence_01.png', * 'image_sequence_02.png', * 'image_sequence_03.png', * 'image_sequence_04.png', * ]; * const textureArray = []; * * for (let i = 0; i < 4; i++) * { * const texture = Texture.from(alienImages[i]); * textureArray.push(texture); * } * * const animatedSprite = new AnimatedSprite(textureArray); * ``` * * The more efficient and simpler way to create an animated sprite is using a {@link Spritesheet} * containing the animation definitions: * @example * import { AnimatedSprite, Assets } from 'pixi.js'; * * const sheet = await Assets.load('assets/spritesheet.json'); * animatedSprite = new AnimatedSprite(sheet.animations['image_sequence']); * @memberof scene */ export declare class AnimatedSprite extends Sprite { /** * The speed that the AnimatedSprite will play at. Higher is faster, lower is slower. * @default 1 */ animationSpeed: number; /** * Whether or not the animate sprite repeats after playing. * @default true */ loop: boolean; /** * Update anchor to [Texture's defaultAnchor]{@link Texture#defaultAnchor} when frame changes. * * Useful with [sprite sheet animations]{@link Spritesheet#animations} created with tools. * Changing anchor for each frame allows to pin sprite origin to certain moving feature * of the frame (e.g. left foot). * * Note: Enabling this will override any previously set `anchor` on each frame change. * @default false */ updateAnchor: boolean; /** * User-assigned function to call when an AnimatedSprite finishes playing. * @example * animation.onComplete = () => { * // Finished! * }; */ onComplete?: () => void; /** * User-assigned function to call when an AnimatedSprite changes which texture is being rendered. * @example * animation.onFrameChange = () => { * // Updated! * }; */ onFrameChange?: (currentFrame: number) => void; /** * User-assigned function to call when `loop` is true, and an AnimatedSprite is played and * loops around to start again. * @example * animation.onLoop = () => { * // Looped! * }; */ onLoop?: () => void; private _playing; private _textures; private _durations; /** * `true` uses Ticker.shared to auto update animation time. * @default true */ private _autoUpdate; /** * `true` if the instance is currently connected to Ticker.shared to auto update animation time. * @default false */ private _isConnectedToTicker; /** Elapsed time since animation has been started, used internally to display current texture. */ private _currentTime; /** The texture index that was displayed last time. */ private _previousFrame; /** * @param frames - Collection of textures or frames to use. * @param autoUpdate - Whether to use Ticker.shared to auto update animation time. */ constructor(frames: AnimatedSpriteFrames, autoUpdate?: boolean); /** * @param options - The options for the AnimatedSprite. */ constructor(options: AnimatedSpriteOptions); /** Stops the AnimatedSprite. */ stop(): void; /** Plays the AnimatedSprite. */ play(): void; /** * Stops the AnimatedSprite and goes to a specific frame. * @param frameNumber - Frame index to stop at. */ gotoAndStop(frameNumber: number): void; /** * Goes to a specific frame and begins playing the AnimatedSprite. * @param frameNumber - Frame index to start at. */ gotoAndPlay(frameNumber: number): void; /** * Updates the object transform for rendering. * @param ticker - the ticker to use to update the object. */ update(ticker: Ticker): void; /** Updates the displayed texture to match the current frame index. */ private _updateTexture; /** Stops the AnimatedSprite and destroys it. */ destroy(): void; /** * A short hand way of creating an AnimatedSprite from an array of frame ids. * @param frames - The array of frames ids the AnimatedSprite will use as its texture frames. * @returns - The new animated sprite with the specified frames. */ static fromFrames(frames: string[]): AnimatedSprite; /** * A short hand way of creating an AnimatedSprite from an array of image ids. * @param images - The array of image urls the AnimatedSprite will use as its texture frames. * @returns The new animate sprite with the specified images as frames. */ static fromImages(images: string[]): AnimatedSprite; /** * The total number of frames in the AnimatedSprite. This is the same as number of textures * assigned to the AnimatedSprite. * @readonly * @default 0 */ get totalFrames(): number; /** The array of textures used for this AnimatedSprite. */ get textures(): AnimatedSpriteFrames; set textures(value: AnimatedSpriteFrames); /** The AnimatedSprite's current frame index. */ get currentFrame(): number; set currentFrame(value: number); /** * Indicates if the AnimatedSprite is currently playing. * @readonly */ get playing(): boolean; /** Whether to use Ticker.shared to auto update animation time. */ get autoUpdate(): boolean; set autoUpdate(value: boolean); } /** * A reference to a frame in an {@link scene.AnimatedSprite} * @memberof scene */ export interface FrameObject { /** The {@link Texture} of the frame. */ texture: Texture; /** The duration of the frame, in milliseconds. */ time: number; } /** * Options for the NineSliceGeometry. * @memberof scene */ export interface NineSliceGeometryOptions { /** The width of the NineSlicePlane, setting this will actually modify the vertices and UV's of this plane. */ width?: number; /** The height of the NineSlicePlane, setting this will actually modify the vertices and UV's of this plane. */ height?: number; /** The original width of the texture */ originalWidth?: number; /** The original height of the texture */ originalHeight?: number; /** The width of the left column. */ leftWidth?: number; /** The height of the top row. */ topHeight?: number; /** The width of the right column. */ rightWidth?: number; /** The height of the bottom row. */ bottomHeight?: number; } /** * The NineSliceGeometry class allows you to create a NineSlicePlane object. * @memberof scene */ export declare class NineSliceGeometry extends PlaneGeometry { /** The default options for the NineSliceGeometry. */ static defaultOptions: NineSliceGeometryOptions; _leftWidth: number; _rightWidth: number; _topHeight: number; _bottomHeight: number; private _originalWidth; private _originalHeight; constructor(options?: NineSliceGeometryOptions); /** * Updates the NineSliceGeometry with the options. * @param options - The options of the NineSliceGeometry. */ update(options: NineSliceGeometryOptions): void; /** Updates the positions of the vertices. */ updatePositions(): void; /** Updates the UVs of the vertices. */ updateUvs(): void; } /** * Constructor options used for `NineSliceSprite` instances. * ```js * const nineSliceSprite = new NineSliceSprite({ * texture: Texture.from('button.png'), * leftWidth: 20, * topHeight: 20, * rightWidth: 20, * bottomHeight: 20, * }); * ``` * @see {@link scene.NineSliceSprite} * @memberof scene */ export interface NineSliceSpriteOptions extends ContainerOptions { /** The texture to use on the NineSliceSprite. */ texture: Texture; /** Width of the left vertical bar (A) */ leftWidth?: number; /** Height of the top horizontal bar (C) */ topHeight?: number; /** Width of the right vertical bar (B) */ rightWidth?: number; /** Height of the bottom horizontal bar (D) */ bottomHeight?: number; /** Width of the NineSliceSprite, setting this will actually modify the vertices and not the UV's of this plane. */ width?: number; /** Height of the NineSliceSprite, setting this will actually modify the vertices and not UV's of this plane. */ height?: number; /** Whether or not to round the x/y position. */ roundPixels?: boolean; } /** * The NineSliceSprite allows you to stretch a texture using 9-slice scaling. The corners will remain unscaled (useful * for buttons with rounded corners for example) and the other areas will be scaled horizontally and or vertically * *
 *      A                          B
 *    +---+----------------------+---+
 *  C | 1 |          2           | 3 |
 *    +---+----------------------+---+
 *    |   |                      |   |
 *    | 4 |          5           | 6 |
 *    |   |                      |   |
 *    +---+----------------------+---+
 *  D | 7 |          8           | 9 |
 *    +---+----------------------+---+
 *  When changing this objects width and/or height:
 *     areas 1 3 7 and 9 will remain unscaled.
 *     areas 2 and 8 will be stretched horizontally
 *     areas 4 and 6 will be stretched vertically
 *     area 5 will be stretched both horizontally and vertically
 * 
* @example * import { NineSliceSprite, Texture } from 'pixi.js'; * * const plane9 = new NineSliceSprite(Texture.from('BoxWithRoundedCorners.png'), 15, 15, 15, 15); * @memberof scene */ export declare class NineSliceSprite extends ViewContainer implements View { /** The default options, used to override the initial values of any options passed in the constructor. */ static defaultOptions: NineSliceSpriteOptions; readonly renderPipeId: string; _texture: Texture; batched: boolean; private _leftWidth; private _topHeight; private _rightWidth; private _bottomHeight; private _width; private _height; _didSpriteUpdate: boolean; /** * @param {scene.NineSliceSpriteOptions|Texture} options - Options to use * @param options.texture - The texture to use on the NineSliceSprite. * @param options.leftWidth - Width of the left vertical bar (A) * @param options.topHeight - Height of the top horizontal bar (C) * @param options.rightWidth - Width of the right vertical bar (B) * @param options.bottomHeight - Height of the bottom horizontal bar (D) * @param options.width - Width of the NineSliceSprite, * setting this will actually modify the vertices and not the UV's of this plane. * @param options.height - Height of the NineSliceSprite, * setting this will actually modify the vertices and not UV's of this plane. */ constructor(options: NineSliceSpriteOptions | Texture); /** The local bounds of the view. */ get bounds(): BoundsData; /** The width of the NineSliceSprite, setting this will actually modify the vertices and UV's of this plane. */ get width(): number; set width(value: number); /** The height of the NineSliceSprite, setting this will actually modify the vertices and UV's of this plane. */ get height(): number; set height(value: number); /** * Sets the size of the NiceSliceSprite to the specified width and height. * setting this will actually modify the vertices and UV's of this plane * This is faster than setting the width and height separately. * @param value - This can be either a number or a [Size]{@link Size} object. * @param height - The height to set. Defaults to the value of `width` if not provided. */ setSize(value: number | Optional, height?: number): void; /** * Retrieves the size of the NineSliceSprite as a [Size]{@link Size} object. * This is faster than get the width and height separately. * @param out - Optional object to store the size in. * @returns - The size of the NineSliceSprite. */ getSize(out?: Size): Size; /** The width of the left column (a) of the NineSliceSprite. */ get leftWidth(): number; set leftWidth(value: number); /** The width of the right column (b) of the NineSliceSprite. */ get topHeight(): number; set topHeight(value: number); /** The width of the right column (b) of the NineSliceSprite. */ get rightWidth(): number; set rightWidth(value: number); /** The width of the right column (b) of the NineSliceSprite. */ get bottomHeight(): number; set bottomHeight(value: number); /** The texture that the NineSliceSprite is using. */ get texture(): Texture; set texture(value: Texture); /** The original width of the texture */ get originalWidth(): number; /** The original height of the texture */ get originalHeight(): number; protected onViewUpdate(): void; /** * Adds the bounds of this object to the bounds object. * @param bounds - The output bounds object. */ addBounds(bounds: Bounds): void; /** * Destroys this sprite renderable and optionally its texture. * @param options - Options parameter. A boolean will act as if all options * have been set to that value * @param {boolean} [options.texture=false] - Should it destroy the current texture of the renderable as well * @param {boolean} [options.textureSource=false] - Should it destroy the textureSource of the renderable as well */ destroy(options?: DestroyOptions): void; } /** * Please use the `NineSliceSprite` class instead. * @deprecated since 8.0.0 * @memberof scene */ export declare class NineSlicePlane extends NineSliceSprite { constructor(options: NineSliceSpriteOptions | Texture); /** @deprecated since 8.0.0 */ constructor(texture: Texture, leftWidth: number, topHeight: number, rightWidth: number, bottomHeight: number); } export declare class NineSliceSpritePipe implements RenderPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "nineSliceSprite"; }; private readonly _renderer; private readonly _gpuSpriteHash; private readonly _destroyRenderableBound; constructor(renderer: Renderer); addRenderable(sprite: NineSliceSprite, instructionSet: InstructionSet): void; updateRenderable(sprite: NineSliceSprite): void; validateRenderable(sprite: NineSliceSprite): boolean; destroyRenderable(sprite: NineSliceSprite): void; private _updateBatchableSprite; private _getGpuSprite; private _initGPUSprite; destroy(): void; } export declare const tilingBit: { name: string; vertex: { header: string; main: string; }; fragment: { header: string; main: string; }; }; export declare const tilingBitGl: { name: string; vertex: { header: string; main: string; }; fragment: { header: string; main: string; }; }; export declare class TilingSpriteShader extends Shader { constructor(); updateUniforms(width: number, height: number, matrix: Matrix, anchorX: number, anchorY: number, texture: Texture): void; } export declare function applyMatrix(array: TypedArray, stride: number, offset: number, matrix: Matrix): void; export declare class QuadGeometry extends MeshGeometry { constructor(); } export declare function setPositions(tilingSprite: TilingSprite, positions: Float32Array): void; export declare function setUvs(tilingSprite: TilingSprite, uvs: Float32Array): void; /** * A number, or a string containing a number. * @memberof text * @typedef {object} FontMetrics * @property {number} ascent - Font ascent * @property {number} descent - Font descent * @property {number} fontSize - Font size */ export interface FontMetrics { ascent: number; descent: number; fontSize: number; } /** * The TextMetrics object represents the measurement of a block of text with a specified style. * @example * import { TextMetrics, TextStyle } from 'pixi.js'; * * const style = new TextStyle({ * fontFamily: 'Arial', * fontSize: 24, * fill: 0xff1010, * align: 'center', * }); * const textMetrics = TextMetrics.measureText('Your text', style); * @memberof text */ export declare class CanvasTextMetrics { /** The text that was measured. */ text: string; /** The style that was measured. */ style: TextStyle; /** The measured width of the text. */ width: number; /** The measured height of the text. */ height: number; /** An array of lines of the text broken by new lines and wrapping is specified in style. */ lines: string[]; /** An array of the line widths for each line matched to `lines`. */ lineWidths: number[]; /** The measured line height for this style. */ lineHeight: number; /** The maximum line width for all measured lines. */ maxLineWidth: number; /** The font properties object from TextMetrics.measureFont. */ fontProperties: FontMetrics; /** * String used for calculate font metrics. * These characters are all tall to help calculate the height required for text. */ static METRICS_STRING: string; /** Baseline symbol for calculate font metrics. */ static BASELINE_SYMBOL: string; /** Baseline multiplier for calculate font metrics. */ static BASELINE_MULTIPLIER: number; /** Height multiplier for setting height of canvas to calculate font metrics. */ static HEIGHT_MULTIPLIER: number; /** * A Unicode "character", or "grapheme cluster", can be composed of multiple Unicode code points, * such as letters with diacritical marks (e.g. `'\u0065\u0301'`, letter e with acute) * or emojis with modifiers (e.g. `'\uD83E\uDDD1\u200D\uD83D\uDCBB'`, technologist). * The new `Intl.Segmenter` API in ES2022 can split the string into grapheme clusters correctly. If it is not available, * PixiJS will fallback to use the iterator of String, which can only spilt the string into code points. * If you want to get full functionality in environments that don't support `Intl.Segmenter` (such as Firefox), * you can use other libraries such as [grapheme-splitter]{@link https://www.npmjs.com/package/grapheme-splitter} * or [graphemer]{@link https://www.npmjs.com/package/graphemer} to create a polyfill. Since these libraries can be * relatively large in size to handle various Unicode grapheme clusters properly, PixiJS won't use them directly. */ static graphemeSegmenter: (s: string) => string[]; static _experimentalLetterSpacingSupported?: boolean; /** * Checking that we can use modern canvas 2D API. * * Note: This is an unstable API, Chrome < 94 use `textLetterSpacing`, later versions use `letterSpacing`. * @see TextMetrics.experimentalLetterSpacing * @see https://developer.mozilla.org/en-US/docs/Web/API/ICanvasRenderingContext2D/letterSpacing * @see https://developer.chrome.com/origintrials/#/view_trial/3585991203293757441 */ static get experimentalLetterSpacingSupported(): boolean; /** * New rendering behavior for letter-spacing which uses Chrome's new native API. This will * lead to more accurate letter-spacing results because it does not try to manually draw * each character. However, this Chrome API is experimental and may not serve all cases yet. * @see TextMetrics.experimentalLetterSpacingSupported */ static experimentalLetterSpacing: boolean; /** Cache of {@see TextMetrics.FontMetrics} objects. */ private static _fonts; /** Cache of new line chars. */ private static readonly _newlines; /** Cache of breaking spaces. */ private static readonly _breakingSpaces; private static __canvas; private static __context; private static readonly _measurementCache; /** * @param text - the text that was measured * @param style - the style that was measured * @param width - the measured width of the text * @param height - the measured height of the text * @param lines - an array of the lines of text broken by new lines and wrapping if specified in style * @param lineWidths - an array of the line widths for each line matched to `lines` * @param lineHeight - the measured line height for this style * @param maxLineWidth - the maximum line width for all measured lines * @param {FontMetrics} fontProperties - the font properties object from TextMetrics.measureFont */ constructor(text: string, style: TextStyle, width: number, height: number, lines: string[], lineWidths: number[], lineHeight: number, maxLineWidth: number, fontProperties: FontMetrics); /** * Measures the supplied string of text and returns a Rectangle. * @param text - The text to measure. * @param style - The text style to use for measuring * @param canvas - optional specification of the canvas to use for measuring. * @param wordWrap * @returns Measured width and height of the text. */ static measureText(text: string, style: TextStyle, canvas?: ICanvas, wordWrap?: boolean): CanvasTextMetrics; private static _measureText; /** * Applies newlines to a string to have it optimally fit into the horizontal * bounds set by the Text object's wordWrapWidth property. * @param text - String to apply word wrapping to * @param style - the style to use when wrapping * @param canvas - optional specification of the canvas to use for measuring. * @returns New string with new lines applied where required */ private static _wordWrap; /** * Convenience function for logging each line added during the wordWrap method. * @param line - The line of text to add * @param newLine - Add new line character to end * @returns A formatted line */ private static _addLine; /** * Gets & sets the widths of calculated characters in a cache object * @param key - The key * @param letterSpacing - The letter spacing * @param cache - The cache * @param context - The canvas context * @returns The from cache. */ private static _getFromCache; /** * Determines whether we should collapse breaking spaces. * @param whiteSpace - The TextStyle property whiteSpace * @returns Should collapse */ private static _collapseSpaces; /** * Determines whether we should collapse newLine chars. * @param whiteSpace - The white space * @returns should collapse */ private static _collapseNewlines; /** * Trims breaking whitespaces from string. * @param text - The text * @returns Trimmed string */ private static _trimRight; /** * Determines if char is a newline. * @param char - The character * @returns True if newline, False otherwise. */ private static _isNewline; /** * Determines if char is a breaking whitespace. * * It allows one to determine whether char should be a breaking whitespace * For example certain characters in CJK langs or numbers. * It must return a boolean. * @param char - The character * @param [_nextChar] - The next character * @returns True if whitespace, False otherwise. */ static isBreakingSpace(char: string, _nextChar?: string): boolean; /** * Splits a string into words, breaking-spaces and newLine characters * @param text - The text * @returns A tokenized array */ private static _tokenize; /** * Overridable helper method used internally by TextMetrics, exposed to allow customizing the class's behavior. * * It allows one to customise which words should break * Examples are if the token is CJK or numbers. * It must return a boolean. * @param _token - The token * @param breakWords - The style attr break words * @returns Whether to break word or not */ static canBreakWords(_token: string, breakWords: boolean): boolean; /** * Overridable helper method used internally by TextMetrics, exposed to allow customizing the class's behavior. * * It allows one to determine whether a pair of characters * should be broken by newlines * For example certain characters in CJK langs or numbers. * It must return a boolean. * @param _char - The character * @param _nextChar - The next character * @param _token - The token/word the characters are from * @param _index - The index in the token of the char * @param _breakWords - The style attr break words * @returns whether to break word or not */ static canBreakChars(_char: string, _nextChar: string, _token: string, _index: number, _breakWords: boolean): boolean; /** * Overridable helper method used internally by TextMetrics, exposed to allow customizing the class's behavior. * * It is called when a token (usually a word) has to be split into separate pieces * in order to determine the point to break a word. * It must return an array of characters. * @param token - The token to split * @returns The characters of the token * @see CanvasTextMetrics.graphemeSegmenter */ static wordWrapSplit(token: string): string[]; /** * Calculates the ascent, descent and fontSize of a given font-style * @param font - String representing the style of the font * @returns Font properties object */ static measureFont(font: string): FontMetrics; /** * Clear font metrics in metrics cache. * @param {string} [font] - font name. If font name not set then clear cache for all fonts. */ static clearMetrics(font?: string): void; /** * Cached canvas element for measuring text * TODO: this should be private, but isn't because of backward compat, will fix later. * @ignore */ static get _canvas(): ICanvas; /** * TODO: this should be private, but isn't because of backward compat, will fix later. * @ignore */ static get _context(): ICanvasRenderingContext2D; } /** @memberof text */ export interface CharData { /** Unique id of character */ id: number; /** x-offset to apply when rendering character */ xOffset: number; /** y-offset to apply when rendering character. */ yOffset: number; /** Advancement to apply to next character. */ xAdvance: number; /** The kerning values for this character. */ kerning: Record; /** The texture of the character. */ texture?: Texture; } /** * The raw data of a character in a bitmap font. * @memberof text */ export interface RawCharData extends Omit { /** The page of the font texture that the character is on. */ page: number; /** The x position of the character in the page. */ x: number; /** The y position of the character in the page. */ y: number; /** The width of the character in the page. */ width: number; /** The height of the character in the page. */ height: number; /** The letter of the character. */ letter: string; } /** * The raw data of a bitmap font. * @memberof text */ export interface BitmapFontData { /** The offset of the font face from the baseline. */ baseLineOffset: number; /** The map of characters by character code. */ chars: Record; /** The map of base page textures (i.e., sheets of glyphs). */ pages: { /** Unique id for bitmap texture */ id: number; /** File name */ file: string; }[]; /** The line-height of the font face in pixels. */ lineHeight: number; /** The size of the font face in pixels. */ fontSize: number; /** The name of the font face. */ fontFamily: string; /** The range and type of the distance field for this font. */ distanceField?: { /** Type of distance field */ type: "sdf" | "msdf" | "none"; /** Range of the distance field in pixels */ range: number; }; } interface BitmapFontEvents { destroy: [ Type ]; } /** * An abstract representation of a bitmap font. * @memberof text */ export declare abstract class AbstractBitmapFont extends EventEmitter> implements Omit { /** The map of characters by character code. */ readonly chars: Record; /** * The line-height of the font face in pixels. * @type {number} */ readonly lineHeight: BitmapFontData["lineHeight"]; /** * The name of the font face * @type {string} */ readonly fontFamily: BitmapFontData["fontFamily"]; /** The metrics of the font face. */ readonly fontMetrics: FontMetrics; /** * The offset of the font face from the baseline. * @type {number} */ readonly baseLineOffset: BitmapFontData["baseLineOffset"]; /** The range and type of the distance field for this font. */ readonly distanceField: BitmapFontData["distanceField"]; /** The map of base page textures (i.e., sheets of glyphs). */ readonly pages: { texture: Texture; }[]; /** should the fill for this font be applied as a tint to the text. */ applyFillAsTint: boolean; /** The size of the font face in pixels. */ readonly baseMeasurementFontSize: number; protected baseRenderedFontSize: number; /** * The name of the font face. * @deprecated since 8.0.0 Use `fontFamily` instead. */ get font(): BitmapFontData["fontFamily"]; /** * The map of base page textures (i.e., sheets of glyphs). * @deprecated since 8.0.0 Use `pages` instead. */ get pageTextures(): AbstractBitmapFont["pages"]; /** * The size of the font face in pixels. * @deprecated since 8.0.0 Use `fontMetrics.fontSize` instead. */ get size(): BitmapFontData["fontSize"]; /** * The kind of distance field for this font or "none". * @deprecated since 8.0.0 Use `distanceField.type` instead. */ get distanceFieldRange(): NonNullable["range"]; /** * The range of the distance field in pixels. * @deprecated since 8.0.0 Use `distanceField.range` instead. */ get distanceFieldType(): NonNullable["type"]; destroy(destroyTextures?: boolean): void; } /** * Internal data format used to convert to BitmapFontData. * @private */ export interface BitmapFontRawData { info: { face: string; size: string; }[]; common: { lineHeight: string; base: string; }[]; page: { id: string; file: string; }[]; chars: { count: number; }[]; char: { id: string; page: string; xoffset: string; yoffset: string; xadvance: string; x: string; y: string; width: string; height: string; letter?: string; char?: string; }[]; kernings?: { count: number; }[]; kerning?: { first: string; second: string; amount: string; }[]; distanceField?: { fieldType: "sdf" | "msdf" | "none"; distanceRange: string; }[]; } export declare const bitmapFontTextParser: { test(data: string | XMLDocument | BitmapFontData): boolean; parse(txt: string): BitmapFontData; }; export declare const bitmapFontXMLParser: { test(data: string | XMLDocument | BitmapFontData): boolean; parse(xml: Document): BitmapFontData; }; export declare const bitmapFontXMLStringParser: { test(data: string | XMLDocument | BitmapFontData): boolean; parse(data: string): BitmapFontData; }; export interface BitmapTextLayoutData { width: number; height: number; scale: number; offsetY: number; lines: { width: number; charPositions: number[]; chars: string[]; spaceWidth: number; spacesIndex: number[]; }[]; } export declare function getBitmapTextLayout(chars: string[], style: TextStyle, font: AbstractBitmapFont, trimEnd: boolean): BitmapTextLayoutData; /** * * The options for installing a new BitmapFont. Once installed the font will be available for use in the BitmapText. * It can be accessed by the `fontFamily` property of the TextStyle. * * Install a new BitmapFont will create the characters provided for the font and store them in the cache. * But don't worry, if a character is requested that hasn't been generated yet, it will be created on the fly. * @memberof text */ export interface BitmapFontInstallOptions { /** the name of the font, this will be the name you use in the fontFamily of text style to access this font */ name?: string; /** * Characters included in the font set. You can also use ranges. * For example, `[['a', 'z'], ['A', 'Z'], "!@#$%^&*()~{}[] "]`. * Don't forget to include spaces ' ' in your character set! * @default BitmapFont.ALPHANUMERIC */ chars?: string | (string | string[])[]; /** * Render resolution for glyphs. * @default 1 */ resolution?: number; /** * Padding between glyphs on texture atlas. Lower values could mean more visual artifacts * and bleeding from other glyphs, larger values increase the space required on the texture. * @default 4 */ padding?: number; /** * Skip generation of kerning information for the BitmapFont. * If true, this could potentially increase the performance, but may impact the rendered text appearance. * @default false */ skipKerning?: boolean; /** Style options to render with BitmapFont. */ style?: TextStyle | TextStyleOptions; } declare class BitmapFontManagerClass { /** * This character set includes all the letters in the alphabet (both lower- and upper- case). * @type {string[][]} * @example * BitmapFont.from('ExampleFont', style, { chars: BitmapFont.ALPHA }) */ readonly ALPHA: (string | string[])[]; /** * This character set includes all decimal digits (from 0 to 9). * @type {string[][]} * @example * BitmapFont.from('ExampleFont', style, { chars: BitmapFont.NUMERIC }) */ readonly NUMERIC: string[][]; /** * This character set is the union of `BitmapFont.ALPHA` and `BitmapFont.NUMERIC`. * @type {string[][]} */ readonly ALPHANUMERIC: (string | string[])[]; /** * This character set consists of all the ASCII table. * @member {string[][]} * @see http://www.asciitable.com/ */ readonly ASCII: string[][]; /** Default options for installing a new BitmapFont. */ defaultOptions: Omit; /** * Get a font for the specified text and style. * @param text - The text to get the font for * @param style - The style to use */ getFont(text: string, style: TextStyle): BitmapFont; /** * Get the layout of a text for the specified style. * @param text - The text to get the layout for * @param style - The style to use * @param trimEnd - Whether to ignore whitespaces at the end of each line */ getLayout(text: string, style: TextStyle, trimEnd?: boolean): BitmapTextLayoutData; /** * Measure the text using the specified style. * @param text - The text to measure * @param style - The style to use * @param trimEnd - Whether to ignore whitespaces at the end of each line */ measureText(text: string, style: TextStyle, trimEnd?: boolean): { width: number; height: number; scale: number; offsetY: number; }; /** * Generates a bitmap-font for the given style and character set * @param options - Setup options for font generation. * @returns Font generated by style options. * @example * import { BitmapFontManager, BitmapText } from 'pixi.js'; * * BitmapFontManager.install('TitleFont', { * fontFamily: 'Arial', * fontSize: 12, * strokeThickness: 2, * fill: 'purple', * }); * * const title = new BitmapText({ text: 'This is the title', fontFamily: 'TitleFont' }); */ install(options: BitmapFontInstallOptions): BitmapFont; /** @deprecated since 7.0.0 */ install(name: string, style?: TextStyle | TextStyleOptions, options?: BitmapFontInstallOptions): BitmapFont; /** * Uninstalls a bitmap font from the cache. * @param {string} name - The name of the bitmap font to uninstall. */ uninstall(name: string): void; } export declare const BitmapFontManager: BitmapFontManagerClass; /** * Options for creating a BitmapFont. * @memberof text */ export interface BitmapFontOptions { data: BitmapFontData; textures: Texture[]; } /** * A BitmapFont object represents a particular font face, size, and style. * @memberof text */ export declare class BitmapFont extends AbstractBitmapFont { /** the url of the font */ url?: string; constructor(options: BitmapFontOptions, url?: string); /** Destroys the BitmapFont object. */ destroy(): void; /** * Generates a bitmap-font for the given style and character set * @param options - Setup options for font generation. * @returns Font generated by style options. * @example * import { BitmapFont, BitmapText } from 'pixi.js'; * * BitmapFont.install('TitleFont', { * fontFamily: 'Arial', * fontSize: 12, * strokeThickness: 2, * fill: 'purple', * }); * * const title = new BitmapText({ text: 'This is the title', fontFamily: 'TitleFont' }); */ static install(options: BitmapFontInstallOptions): void; /** * Uninstalls a bitmap font from the cache. * @param {string} name - The name of the bitmap font to uninstall. */ static uninstall(name: string): void; } /** simple loader plugin for loading in bitmap fonts! */ export declare const bitmapFontCachePlugin: { extension: { type: ExtensionType.CacheParser; name: string; }; test: (asset: BitmapFont) => boolean; getCacheableAssets(keys: string[], asset: BitmapFont): Record; }; export declare const loadBitmapFont: { extension: { type: ExtensionType.LoadParser; priority: LoaderParserPriority; }; name: string; test(url: string): boolean; testParse(data: string): Promise; parse(asset: string, data: ResolvedAsset, loader: Loader): Promise; load(url: string, _options: ResolvedAsset): Promise; unload(bitmapFont: BitmapFont, _resolvedAsset: ResolvedAsset, loader: Loader): Promise; }; export interface DynamicBitmapFontOptions { style: TextStyle; skipKerning?: boolean; resolution?: number; padding?: number; overrideFill?: boolean; overrideSize?: boolean; textureSize?: number; mipmap?: boolean; } /** * A BitmapFont that generates its glyphs dynamically. * @memberof text * @ignore */ export declare class DynamicBitmapFont extends AbstractBitmapFont { static defaultOptions: DynamicBitmapFontOptions; /** * this is a resolution modifier for the font size.. * texture resolution will also be used to scale texture according to its font size also */ resolution: number; /** The pages of the font. */ readonly pages: { canvasAndContext?: CanvasAndContext; texture: Texture; }[]; private readonly _padding; private readonly _measureCache; private _currentChars; private _currentX; private _currentY; private _currentPageIndex; private readonly _style; private readonly _skipKerning; private readonly _textureSize; private readonly _mipmap; /** * @param options - The options for the dynamic bitmap font. */ constructor(options: DynamicBitmapFontOptions); ensureCharacters(chars: string): void; /** * @deprecated since 8.0.0 * The map of base page textures (i.e., sheets of glyphs). */ get pageTextures(): DynamicBitmapFont["pages"]; private _applyKerning; private _nextPage; private _setupContext; private _drawGlyph; destroy(): void; } /** * Processes the passed character set data and returns a flattened array of all the characters. * * Ignored because not directly exposed. * @ignore * @param {string | string[] | string[][] } chars * @returns {string[]} the flattened array of characters */ export declare function resolveCharacters(chars: string | (string | string[])[]): string[]; export declare const nssvg = "http://www.w3.org/2000/svg"; export declare const nsxhtml = "http://www.w3.org/1999/xhtml"; export declare class HTMLTextRenderData { svgRoot: SVGSVGElement; foreignObject: SVGForeignObjectElement; domElement: HTMLElement; styleElement: HTMLElement; image: HTMLImageElement; canvasAndContext?: CanvasAndContext; constructor(); } /** * Extracts font families from text. It will extract font families from the style, tagStyles and any font families * embedded in the text. It should also strip out duplicates as it goes. * @param text - The text to extract font families from * @param style - The style to extract font families from * @returns {string[]} - The font families as an array of strings */ export declare function extractFontFamilies(text: string, style: HTMLTextStyle): string[]; export declare const FontStylePromiseCache: Map>; /** * takes the font families and returns a css string that can be injected into a style tag * It will contain the font families and the font urls encoded as base64 * @param fontFamilies - The font families to load * @param style - The FontCSSStyleOptions to load the font with (used for the first font family) * @param defaultOptions - The default options to load the font with (used for the rest of the font families) * @param defaultOptions.fontWeight - The default font weight * @param defaultOptions.fontStyle - The default font style * @returns - The css string */ export declare function getFontCss(fontFamilies: string[], style: FontCSSStyleOptions, defaultOptions: { fontWeight: string; fontStyle: string; }): Promise; /** * takes all the data and returns a svg url string can be loaded by an image element * @param text - The text to measure * @param style - The style to use * @param resolution - The resolution to use * @param fontCSS - The font css to use * @param htmlTextData - The HTMLTextRenderData to write the SVG to * @returns - The SVG as a url string */ export declare function getSVGUrl(text: string, style: HTMLTextStyle, resolution: number, fontCSS: string, htmlTextData: HTMLTextRenderData): string; /** * This function converts an image to a canvas, and returns the canvas. * It is used to convert images to canvases to work around a CORS issue where WebGPU cannot * upload an SVGImage to a texture. * * It uses the CanvasPool to get an optimal canvas and context, and then draws the image onto it. * This canvas is immediately returned to the CanvasPool for reuse, so use the result straight away! * (eg upload it to the GPU!) * @param image - The image to convert to a canvas. * @param resolution - The resolution of the canvas. */ export declare function getTemporaryCanvasFromImage(image: HTMLImageElement, resolution: number): HTMLCanvasElement; /** * Resolves a font url to a base64 string * @param url - The url to load the font from * @returns - The font as a base64 string */ export declare function loadFontAsBase64(url: string): Promise; /** * This function loads an SVG image into an HTMLImageElement. * The image can then be uploaded as texture to the GPU. * iOS has a bug where embedded fonts are not available immediately after the image loads, * so we wait an arbitrary amount of time before resolving the promise. * @param image - The image to load the SVG into * @param url - The url to load the SVG from * @param delay - Whether to delay the load * @returns - A promise that resolves when the image has loaded */ export declare function loadSVGImage(image: HTMLImageElement, url: string, delay: boolean): Promise; /** * Measures the HTML text without actually generating an image. * This is used to calculate the size of the text. * @param text - The text to measure * @param style - The style to use * @param fontStyleCSS - The font css to use * @param htmlTextRenderData - The HTMLTextRenderData to write the SVG to * @returns - The size of the text */ export declare function measureHtmlText(text: string, style: HTMLTextStyle, fontStyleCSS?: string, htmlTextRenderData?: HTMLTextRenderData): Size; /** * Internally converts all of the style properties into CSS equivalents. * @param style * @returns The CSS style string, for setting `style` property of root HTMLElement. */ export declare function textStyleToCSS(style: HTMLTextStyle): string; /** * Generates a font style string to use for `TextMetrics.measureFont()`. * @param style * @returns Font style string, for passing to `TextMetrics.measureFont()` */ export declare function fontStringFromTextStyle(style: TextStyle): string; export declare function getCanvasFillStyle(fillStyle: ConvertedFillStyle, context: ICanvasRenderingContext2D): string | CanvasGradient | CanvasPattern; export declare class SdfShader extends Shader { constructor(); } export declare const localUniformMSDFBit: { name: string; vertex: { header: string; main: string; end: string; }; fragment: { header: string; main: string; }; }; export declare const localUniformMSDFBitGl: { name: string; vertex: { header: string; main: string; end: string; }; fragment: { header: string; main: string; }; }; export declare const mSDFBit: { name: string; fragment: { header: string; }; }; export declare const mSDFBitGl: { name: string; fragment: { header: string; }; }; /** * converts the style input into the correct type of TextStyle * either HTMLTextStyle or TextStyle based on the renderMode. * @param renderMode - The render mode to use * @param style - The style to use * @returns - The style class */ export declare function ensureTextStyle(renderMode: string, style: TextStyle | HTMLTextStyle | TextStyleOptions | HTMLTextStyleOptions): TextStyle; /** * Generates a unique key for the text style. * @param style - The style to generate a key for. * @returns the key for the style. */ export declare function generateTextStyleKey(style: TextStyle): string; /** * Takes an image and creates a texture from it, using a power of 2 texture from the texture pool. * Remember to return the texture when you don't need it any more! * @param image - The image to create a texture from * @param width - the frame width of the texture * @param height - the frame height of the texture * @param resolution - The resolution of the texture * @returns - The texture */ export declare function getPo2TextureFromSource(image: HTMLImageElement | HTMLCanvasElement | ICanvas, width: number, height: number, resolution: number): Texture; /** * Represents the JSON data for a spritesheet atlas. * @memberof assets */ export interface SpritesheetFrameData { /** The frame rectangle of the texture. */ frame: { x: number; y: number; w: number; h: number; }; /** Whether the texture is trimmed. */ trimmed?: boolean; /** Whether the texture is rotated. */ rotated?: boolean; /** The source size of the texture. */ sourceSize?: { w: number; h: number; }; /** The sprite source size. */ spriteSourceSize?: { h?: number; w?: number; x: number; y: number; }; /** The anchor point of the texture. */ anchor?: PointData; /** The 9-slice borders of the texture. */ borders?: TextureBorders; } /** * Atlas format. * @memberof assets */ export interface SpritesheetData { /** The frames of the atlas. */ frames: Dict; /** The animations of the atlas. */ animations?: Dict; /** The meta data of the atlas. */ meta: { app?: string; format?: string; frameTags?: { from: number; name: string; to: number; direction: string; }[]; image?: string; layers?: { blendMode: string; name: string; opacity: number; }[]; scale: number | string; size?: { h: number; w: number; }; slices?: { color: string; name: string; keys: { frame: number; bounds: { x: number; y: number; w: number; h: number; }; }[]; }[]; related_multi_packs?: string[]; version?: string; }; } /** * Utility class for maintaining reference to a collection * of Textures on a single Spritesheet. * * To access a sprite sheet from your code you may pass its JSON data file to Pixi's loader: * * ```js * import { Assets } from 'pixi.js'; * * const sheet = await Assets.load('images/spritesheet.json'); * ``` * * Alternately, you may circumvent the loader by instantiating the Spritesheet directly: * * ```js * import { Spritesheet } from 'pixi.js'; * * const sheet = new Spritesheet(texture, spritesheetData); * await sheet.parse(); * console.log('Spritesheet ready to use!'); * ``` * * With the `sheet.textures` you can create Sprite objects, and `sheet.animations` can be used to create an AnimatedSprite. * * Here's an example of a sprite sheet JSON data file: * ```json * { * "frames": { * "enemy1.png": * { * "frame": {"x":103,"y":1,"w":32,"h":32}, * "spriteSourceSize": {"x":0,"y":0,"w":32,"h":32}, * "sourceSize": {"w":32,"h":32}, * "anchor": {"x":16,"y":16} * }, * "enemy2.png": * { * "frame": {"x":103,"y":35,"w":32,"h":32}, * "spriteSourceSize": {"x":0,"y":0,"w":32,"h":32}, * "sourceSize": {"w":32,"h":32}, * "anchor": {"x":16,"y":16} * }, * "button.png": * { * "frame": {"x":1,"y":1,"w":100,"h":100}, * "spriteSourceSize": {"x":0,"y":0,"w":100,"h":100}, * "sourceSize": {"w":100,"h":100}, * "anchor": {"x":0,"y":0}, * "borders": {"left":35,"top":35,"right":35,"bottom":35} * } * }, * * "animations": { * "enemy": ["enemy1.png","enemy2.png"] * }, * * "meta": { * "image": "sheet.png", * "format": "RGBA8888", * "size": {"w":136,"h":102}, * "scale": "1" * } * } * ``` * Sprite sheets can be packed using tools like {@link https://codeandweb.com/texturepacker|TexturePacker}, * {@link https://renderhjs.net/shoebox/|Shoebox} or {@link https://github.com/krzysztof-o/spritesheet.js|Spritesheet.js}. * Default anchor points (see {@link Texture#defaultAnchor}), default 9-slice borders * (see {@link Texture#defaultBorders}) and grouping of animation sprites are currently only * supported by TexturePacker. * * Alternative ways for loading spritesheet image if you need more control: * * ```js * import { Assets } from 'pixi.js'; * * const sheetTexture = await Assets.load('images/spritesheet.png'); * Assets.add({ * alias: 'atlas', * src: 'images/spritesheet.json', * data: {texture: sheetTexture} // using of preloaded texture * }); * const sheet = await Assets.load('atlas') * ``` * * or: * * ```js * import { Assets } from 'pixi.js'; * * Assets.add({ * alias: 'atlas', * src: 'images/spritesheet.json', * data: {imageFilename: 'my-spritesheet.2x.avif'} // using of custom filename located in "images/my-spritesheet.2x.avif" * }); * const sheet = await Assets.load('atlas') * ``` * @memberof assets */ export declare class Spritesheet { /** The maximum number of Textures to build per process. */ static readonly BATCH_SIZE = 1000; /** For multi-packed spritesheets, this contains a reference to all the other spritesheets it depends on. */ linkedSheets: Spritesheet[]; /** Reference to the source texture. */ textureSource: TextureSource; /** * A map containing all textures of the sprite sheet. * Can be used to create a {@link Sprite|Sprite}: * @example * import { Sprite } from 'pixi.js'; * * new Sprite(sheet.textures['image.png']); */ textures: Record; /** * A map containing the textures for each animation. * Can be used to create an {@link AnimatedSprite|AnimatedSprite}: * @example * import { AnimatedSprite } from 'pixi.js'; * * new AnimatedSprite(sheet.animations['anim_name']); */ animations: Record, Texture[]>; /** * Reference to the original JSON data. * @type {object} */ data: S; /** The resolution of the spritesheet. */ resolution: number; /** * Reference to original source image from the Loader. This reference is retained so we * can destroy the Texture later on. It is never used internally. */ private _texture; /** * Map of spritesheet frames. * @type {object} */ private _frames; /** Collection of frame names. */ private _frameKeys; /** Current batch index being processed. */ private _batchIndex; /** * Callback when parse is completed. * @type {Function} */ private _callback; /** * @param texture - Reference to the source BaseTexture object. * @param {object} data - Spritesheet image data. */ constructor(texture: BindableTexture, data: S); /** * Parser spritesheet from loaded data. This is done asynchronously * to prevent creating too many Texture within a single process. */ parse(): Promise>; /** * Process a batch of frames * @param initialFrameIndex - The index of frame to start. */ private _processFrames; /** Parse animations config. */ private _processAnimations; /** The parse has completed. */ private _parseComplete; /** Begin the next batch of textures. */ private _nextBatch; /** * Destroy Spritesheet and don't use after this. * @param {boolean} [destroyBase=false] - Whether to destroy the base texture as well */ destroy(destroyBase?: boolean): void; } export interface SpriteSheetJson extends SpritesheetData { meta: { image: string; scale: string; related_multi_packs?: string[]; }; } /** * Asset extension for loading spritesheets * @example * import { Assets } from 'pixi.js'; * * Assets.load({ * alias: 'spritesheet', * src: 'path/to/spritesheet.json', * data: { * ignoreMultiPack: true, * } * }) * @type {AssetExtension} * @memberof assets */ export declare const spritesheetAsset: { extension: ExtensionType.Asset; /** Handle the caching of the related Spritesheet Textures */ cache: { test: (asset: Spritesheet) => boolean; getCacheableAssets: (keys: string[], asset: Spritesheet) => Record; }; /** Resolve the resolution of the asset. */ resolver: { extension: { type: ExtensionType.ResolveParser; name: string; }; test: (value: string) => boolean; parse: (value: string) => { resolution: number; format: string; src: string; }; }; /** * Loader plugin that parses sprite sheets! * once the JSON has been loaded this checks to see if the JSON is spritesheet data. * If it is, we load the spritesheets image and parse the data into Spritesheet * All textures in the sprite sheet are then added to the cache */ loader: { name: string; extension: { type: ExtensionType.LoadParser; priority: LoaderParserPriority; name: string; }; testParse(asset: SpriteSheetJson, options: ResolvedAsset): Promise; parse(asset: SpriteSheetJson, options: ResolvedAsset<{ texture?: Texture; imageFilename?: string; ignoreMultiPack?: boolean; }>, loader?: Loader): Promise; unload(spritesheet: Spritesheet, _resolvedAsset: ResolvedAsset, loader: Loader): Promise; }; }; /** * Represents the update priorities used by internal Pixi classes when registered with * the {@link ticker.Ticker} object. Higher priority items are updated first and lower * priority items, such as render, should go later. * @static * @enum {number} * @memberof ticker */ export declare enum UPDATE_PRIORITY { /** * Highest priority used for interaction events in {@link EventSystem} * @default 50 */ INTERACTION = 50, /** * High priority updating, used by {@link AnimatedSprite} * @default 25 */ HIGH = 25, /** * Default priority for ticker events, see {@link Ticker#add}. * @default 0 */ NORMAL = 0, /** * Low priority used for {@link Application} rendering. * @default -25 */ LOW = -25, /** * Lowest priority used for {@link BasePrepare} utility. * @default -50 */ UTILITY = -50 } /** * Internal class for handling the priority sorting of ticker handlers. * @private * @class */ export declare class TickerListener { /** The current priority. */ priority: number; /** The next item in chain. */ next: TickerListener; /** The previous item in chain. */ previous: TickerListener; /** The handler function to execute. */ private _fn; /** The calling to execute. */ private _context; /** If this should only execute once. */ private readonly _once; /** `true` if this listener has been destroyed already. */ private _destroyed; /** * Constructor * @private * @param fn - The listener function to be added for one update * @param context - The listener context * @param priority - The priority for emitting * @param once - If the handler should fire once */ constructor(fn: TickerCallback, context?: T, priority?: number, once?: boolean); /** * Simple compare function to figure out if a function and context match. * @param fn - The listener function to be added for one update * @param context - The listener context * @returns `true` if the listener match the arguments */ match(fn: TickerCallback, context?: any): boolean; /** * Emit by calling the current function. * @param ticker - The ticker emitting. * @returns Next ticker */ emit(ticker: Ticker): TickerListener; /** * Connect to the list. * @param previous - Input node, previous listener */ connect(previous: TickerListener): void; /** * Destroy and don't use after this. * @param hard - `true` to remove the `next` reference, this * is considered a hard destroy. Soft destroy maintains the next reference. * @returns The listener to redirect while emitting or removing. */ destroy(hard?: boolean): TickerListener; } /** * Helper for detecting the correct alpha mode for video textures. * For some reason, some browsers/devices/WebGL implementations premultiply the alpha * of a video before and then a second time if `UNPACK_PREMULTIPLY_ALPHA_WEBGL` * is true. So the video is premultiplied twice if the alpha mode is `UNPACK`. * In this case we need the alpha mode to be `PMA`. This function detects * the upload behavior by uploading a white 2x2 webm with 50% alpha * without `UNPACK_PREMULTIPLY_ALPHA_WEBGL` and then checking whether * the uploaded pixels are premultiplied. * @memberof utils * @function detectVideoAlphaMode * @returns {Promise} The correct alpha mode for video textures. */ export declare function detectVideoAlphaMode(): Promise; export declare function isSafari(): boolean; /** * Helper for checking for WebGL support. * @param failIfMajorPerformanceCaveat - whether to fail if there is a major performance caveat, defaults to false * @memberof utils * @function isWebGLSupported * @returns {boolean} Is WebGL supported. */ export declare function isWebGLSupported(failIfMajorPerformanceCaveat?: boolean): boolean; /** * Helper for checking for WebGPU support. * @param options - The options for requesting a GPU adapter. * @memberof utils * @function isWebGPUSupported * @returns Is WebGPU supported. */ export declare function isWebGPUSupported(options?: GPURequestAdapterOptions): Promise; /** * Not all platforms allow to generate function code (e.g., `new Function`). * this provides the platform-level detection. * @private * @returns {boolean} `true` if `new Function` is supported. */ export declare function unsafeEvalSupported(): boolean; /** * Measuring the bounds of a canvas' visible (non-transparent) pixels. * @param canvas - The canvas to measure. * @param resolution - The resolution of the canvas. * @returns The bounding box of the canvas' visible pixels. * @since 7.1.0 * @memberof utils */ export declare function getCanvasBoundingBox(canvas: ICanvas, resolution?: number): Rectangle; /** * Regexp for data URI. * Based on: {@link https://github.com/ragingwind/data-uri-regex} * @static * @type {RegExp} * @default /(?:^data:image\/([\w+]+);(?:[\w=]+|charset=[\w-]+)?(?:;base64)?,)/i * @example * import { DATA_URI } from 'pixi.js'; * * DATA_URI.test('data:image/png;base64,foobar'); // => true * @memberof utils */ export declare const DATA_URI: RegExp; /** The current version of PixiJS. This is automatically replaced by the build process. */ export declare const VERSION = "$_VERSION"; /** * Remove items from a javascript array without generating garbage * @function removeItems * @memberof utils * @param {Array} arr - Array to remove elements from * @param {number} startIdx - starting index * @param {number} removeCount - how many to remove */ export declare function removeItems(arr: any[], startIdx: number, removeCount: number): void; type UIDNames = "default" | "resource" | "texture" | "textureSource" | "textureResource" | "batcher" | "graphicsContext" | "graphicsView" | "graphicsPath" | "fillGradient" | "fillPattern" | "meshView" | "renderable" | "buffer" | "bufferResource" | "geometry" | "instructionSet" | "renderTarget" | "uniform" | "spriteView" | "textView" | "tilingSpriteView"; /** * Gets the next unique identifier * @param name - The name of the identifier. * @function uid * @returns {number} The next unique identifier to use. * @memberof utils */ export declare function uid(name?: UIDNames): number; /** Resets the next unique identifier to 0. This is used for some tests, dont touch or things WILL explode :) */ export declare function resetUids(): void; export declare function updateQuadBounds(bounds: BoundsData, anchor: ObservablePoint, texture: Texture, padding: number): void; /** * deprecation name for version 8.0.0 * @ignore */ export declare const v8_0_0 = "8.0.0"; export declare const v8_3_4 = "8.3.4"; /** * Helper for warning developers about deprecated features & settings. * A stack track for warnings is given; useful for tracking-down where * deprecated methods/properties/classes are being used within the code. * @memberof utils * @ignore * @function deprecation * @param {string} version - The version where the feature became deprecated * @param {string} message - Message should include what is deprecated, where, and the new solution * @param {number} [ignoreDepth=3] - The number of steps to ignore at the top of the error stack * this is mostly to ignore internal deprecation calls. */ export declare function deprecation(version: string, message: string, ignoreDepth?: number): void; /** * Logs a texture to the console as a base64 image. * This can be very useful for debugging issues with rendering. * @param texture - The texture to log * @param renderer - The renderer to use * @param size - The size of the texture to log in the console * @ignore */ export declare function logDebugTexture(texture: Texture, renderer: Renderer, size?: number): Promise; export declare function logScene(container: Container, depth?: number, data?: { color?: string; }): void; export declare function logRenderGroupScene(renderGroup: RenderGroup, depth?: number, data?: { index: number; color?: string; }): void; /** * Logs a PixiJS warning message to the console. Stops logging after 500 warnings have been logged. * @param args - The warning message(s) to log * @returns {void} * @memberof utils * @ignore */ export declare function warn(...args: any[]): void; export declare const NOOP: () => void; /** * get the resolution / device pixel ratio of an asset by looking for the prefix * used by spritesheets and image urls * @memberof utils * @function getResolutionOfUrl * @param {string} url - the image path * @param {number} [defaultValue=1] - the defaultValue if no filename prefix is set. * @returns {number} resolution / device pixel ratio of an asset */ export declare function getResolutionOfUrl(url: string, defaultValue?: number): number; /** * Path utilities for working with URLs and file paths in a cross-platform way. * All paths that are passed in will become normalized to have posix separators. * ```js * import { path } from 'pixi.js'; * * path.normalize('http://www.example.com/foo/bar/../baz'); // http://www.example.com/foo/baz * ``` * @memberof utils */ export interface Path { /** * Converts a path to posix format. * @param path - The path to convert to posix */ toPosix: (path: string) => string; /** * Checks if the path is a URL e.g. http://, https:// * @param path - The path to check */ isUrl: (path: string) => boolean; /** * Checks if the path is a data URL * @param path - The path to check */ isDataUrl: (path: string) => boolean; /** * Checks if the path is a blob URL * @param path - The path to check */ isBlobUrl: (path: string) => boolean; /** * Checks if the path has a protocol e.g. http://, https://, file:///, data:, blob:, C:/ * This will return true for windows file paths * @param path - The path to check */ hasProtocol: (path: string) => boolean; /** * Returns the protocol of the path e.g. http://, https://, file:///, data:, blob:, C:/ * @param path - The path to get the protocol from */ getProtocol: (path: string) => string; /** * Converts URL to an absolute path. * When loading from a Web Worker, we must use absolute paths. * If the URL is already absolute we return it as is * If it's not, we convert it * @param url - The URL to test * @param customBaseUrl - The base URL to use * @param customRootUrl - The root URL to use */ toAbsolute: (url: string, baseUrl?: string, rootUrl?: string) => string; /** * Normalizes the given path, resolving '..' and '.' segments * @param path - The path to normalize */ normalize: (path: string) => string; /** * Determines if path is an absolute path. * Absolute paths can be urls, data urls, or paths on disk * @param path - The path to test */ isAbsolute: (path: string) => boolean; /** * Joins all given path segments together using the platform-specific separator as a delimiter, * then normalizes the resulting path * @param segments - The segments of the path to join */ join: (...paths: string[]) => string; /** * Returns the directory name of a path * @param path - The path to parse */ dirname: (path: string) => string; /** * Returns the root of the path e.g. /, C:/, file:///, http://domain.com/ * @param path - The path to parse */ rootname: (path: string) => string; /** * Returns the last portion of a path * @param path - The path to test * @param ext - Optional extension to remove */ basename: (path: string, ext?: string) => string; /** * Returns the extension of the path, from the last occurrence of the . (period) character to end of string in the last * portion of the path. If there is no . in the last portion of the path, or if there are no . characters other than * the first character of the basename of path, an empty string is returned. * @param path - The path to parse */ extname: (path: string) => string; /** * Parses a path into an object containing the 'root', `dir`, `base`, `ext`, and `name` properties. * @param path - The path to parse */ parse: (path: string) => { root?: string; dir?: string; base?: string; ext?: string; name?: string; }; sep: string; delimiter: string; joinExtensions: string[]; } /** * Path utilities for working with URLs and file paths in a cross-platform way. * All paths that are passed in will become normalized to have posix separators. * ```js * import { path } from 'pixi.js'; * * path.normalize('http://www.example.com/foo/bar/../baz'); // http://www.example.com/foo/baz * ``` * @see {@link utils.Path} * @memberof utils */ export declare const path: Path; /** * A type alias for a constructor of a Pool. * @template T The type of items in the pool. Must extend PoolItem. * @memberof utils */ export type PoolConstructor = new () => Pool; /** * A group of pools that can be used to store objects of different types. * @memberof utils */ export declare class PoolGroupClass { /** * A map to store the pools by their class type. * @private */ private readonly _poolsByClass; /** * Prepopulates a specific pool with a given number of items. * @template T The type of items in the pool. Must extend PoolItem. * @param {PoolItemConstructor} Class - The constructor of the items in the pool. * @param {number} total - The number of items to add to the pool. */ prepopulate(Class: PoolItemConstructor, total: number): void; /** * Gets an item from a specific pool. * @template T The type of items in the pool. Must extend PoolItem. * @param {PoolItemConstructor} Class - The constructor of the items in the pool. * @param {unknown} [data] - Optional data to pass to the item's constructor. * @returns {T} The item from the pool. */ get(Class: PoolItemConstructor, data?: unknown): T; /** * Returns an item to its respective pool. * @param {PoolItem} item - The item to return to the pool. */ return(item: PoolItem): void; /** * Gets a specific pool based on the class type. * @template T The type of items in the pool. Must extend PoolItem. * @param {PoolItemConstructor} ClassType - The constructor of the items in the pool. * @returns {Pool} The pool of the given class type. */ getPool(ClassType: PoolItemConstructor): Pool; /** gets the usage stats of each pool in the system */ stats(): Record; } export declare const BigPool: PoolGroupClass; /** * Prints out the version and renderer information for this running instance of PixiJS. * @param type - The name of the renderer this instance is using. * @returns {void} */ export declare function sayHello(type: string): void; export { Buffer$1 as Buffer, Cache$1 as Cache, EXT_texture_compression_bptc$1 as EXT_texture_compression_bptc, EXT_texture_compression_rgtc$1 as EXT_texture_compression_rgtc, ExtensionFormat as ExtensionFormatLoose, GPU$1 as GPU, PredefinedColorSpace$1 as PredefinedColorSpace, RenderingContext$1 as RenderingContext, StrictExtensionFormat as ExtensionFormat, System$1 as System, Text$1 as Text, WEBGL_compressed_texture_etc$1 as WEBGL_compressed_texture_etc, WEBGL_compressed_texture_etc1$1 as WEBGL_compressed_texture_etc1, WEBGL_compressed_texture_pvrtc$1 as WEBGL_compressed_texture_pvrtc, exports$1 as earcut, }; export as namespace PIXI; export {};