var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); var __publicField = (obj, key, value) => { __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; // dist/index.js var dist_exports = {}; __export(dist_exports, { Accessor: () => Accessor, WEBGLBuffer: () => WEBGLBuffer, WEBGLCommandEncoder: () => WEBGLCommandEncoder, WEBGLFramebuffer: () => WEBGLFramebuffer, WEBGLRenderPass: () => WEBGLRenderPass, WEBGLRenderPipeline: () => WEBGLRenderPipeline, WEBGLResource: () => WebGLResource, WEBGLSampler: () => WEBGLSampler, WEBGLShader: () => WEBGLShader, WEBGLTexture: () => WEBGLTexture, WEBGLTransformFeedback: () => WEBGLTransformFeedback, WEBGLVertexArray: () => WEBGLVertexArray, WebGLCanvasContext: () => WebGLCanvasContext, WebGLDevice: () => WebGLDevice, WebGLResource: () => WebGLResource, _TEXTURE_FORMATS: () => TEXTURE_FORMATS, _WEBGLRenderbuffer: () => WEBGLRenderbuffer, convertGLToTextureFormat: () => convertGLToTextureFormat, getGLParameters: () => getGLParameters, getShaderLayout: () => getShaderLayout, popContextState: () => popContextState, pushContextState: () => pushContextState, resetGLParameters: () => resetGLParameters, setDeviceParameters: () => setDeviceParameters, setGLParameters: () => setGLParameters, trackContextState: () => trackContextState, withDeviceParameters: () => withDeviceParameters, withGLParameters: () => withGLParameters }); module.exports = __toCommonJS(dist_exports); // dist/adapter/webgl-device.js var import_core27 = require("@luma.gl/core"); // dist/context/state-tracker/track-context-state.js var import_core = require("@luma.gl/core"); // dist/context/parameters/webgl-parameter-tables.js var import_constants = require("@luma.gl/constants"); var GL_PARAMETER_DEFAULTS = { [3042]: false, [32773]: new Float32Array([0, 0, 0, 0]), [32777]: 32774, [34877]: 32774, [32969]: 1, [32968]: 0, [32971]: 1, [32970]: 0, [3106]: new Float32Array([0, 0, 0, 0]), // TBD [3107]: [true, true, true, true], [2884]: false, [2885]: 1029, [2929]: false, [2931]: 1, [2932]: 513, [2928]: new Float32Array([0, 1]), // TBD [2930]: true, [3024]: true, [35725]: null, // FRAMEBUFFER_BINDING and DRAW_FRAMEBUFFER_BINDING(WebGL2) refer same state. [36006]: null, [36007]: null, [34229]: null, [34964]: null, [2886]: 2305, [33170]: 4352, [2849]: 1, [32823]: false, [32824]: 0, [10752]: 0, [32926]: false, [32928]: false, [32938]: 1, [32939]: false, [3089]: false, // Note: Dynamic value. If scissor test enabled we expect users to set correct scissor box [3088]: new Int32Array([0, 0, 1024, 1024]), [2960]: false, [2961]: 0, [2968]: 4294967295, [36005]: 4294967295, [2962]: 519, [2967]: 0, [2963]: 4294967295, [34816]: 519, [36003]: 0, [36004]: 4294967295, [2964]: 7680, [2965]: 7680, [2966]: 7680, [34817]: 7680, [34818]: 7680, [34819]: 7680, // Dynamic value: We use [0, 0, 1024, 1024] as default, but usually this is updated in each frame. [2978]: [0, 0, 1024, 1024], [36389]: null, [36662]: null, [36663]: null, [35053]: null, [35055]: null, [35723]: 4352, [36010]: null, [35977]: false, [3333]: 4, [3317]: 4, [37440]: false, [37441]: false, [37443]: 37444, [3330]: 0, [3332]: 0, [3331]: 0, [3314]: 0, [32878]: 0, [3316]: 0, [3315]: 0, [32877]: 0 }; var enable = (gl, value, key) => value ? gl.enable(key) : gl.disable(key); var hint = (gl, value, key) => gl.hint(key, value); var pixelStorei = (gl, value, key) => gl.pixelStorei(key, value); var bindFramebuffer = (gl, value, key) => { const target = key === 36006 ? 36009 : 36008; return gl.bindFramebuffer(target, value); }; var bindBuffer = (gl, value, key) => { const bindingMap = { [34964]: 34962, [36662]: 36662, [36663]: 36663, [35053]: 35051, [35055]: 35052 }; const glTarget = bindingMap[key]; gl.bindBuffer(glTarget, value); }; function isArray(array) { return Array.isArray(array) || ArrayBuffer.isView(array) && !(array instanceof DataView); } var GL_PARAMETER_SETTERS = { [3042]: enable, [32773]: (gl, value) => gl.blendColor(...value), [32777]: "blendEquation", [34877]: "blendEquation", [32969]: "blendFunc", [32968]: "blendFunc", [32971]: "blendFunc", [32970]: "blendFunc", [3106]: (gl, value) => gl.clearColor(...value), [3107]: (gl, value) => gl.colorMask(...value), [2884]: enable, [2885]: (gl, value) => gl.cullFace(value), [2929]: enable, [2931]: (gl, value) => gl.clearDepth(value), [2932]: (gl, value) => gl.depthFunc(value), [2928]: (gl, value) => gl.depthRange(...value), [2930]: (gl, value) => gl.depthMask(value), [3024]: enable, [35723]: hint, [35725]: (gl, value) => gl.useProgram(value), [36007]: (gl, value) => gl.bindRenderbuffer(36161, value), [36389]: (gl, value) => { var _a; return (_a = gl.bindTransformFeedback) == null ? void 0 : _a.call(gl, 36386, value); }, [34229]: (gl, value) => gl.bindVertexArray(value), // NOTE: FRAMEBUFFER_BINDING and DRAW_FRAMEBUFFER_BINDING(WebGL2) refer same state. [36006]: bindFramebuffer, [36010]: bindFramebuffer, // Buffers [34964]: bindBuffer, [36662]: bindBuffer, [36663]: bindBuffer, [35053]: bindBuffer, [35055]: bindBuffer, [2886]: (gl, value) => gl.frontFace(value), [33170]: hint, [2849]: (gl, value) => gl.lineWidth(value), [32823]: enable, [32824]: "polygonOffset", [10752]: "polygonOffset", [35977]: enable, [32926]: enable, [32928]: enable, [32938]: "sampleCoverage", [32939]: "sampleCoverage", [3089]: enable, [3088]: (gl, value) => gl.scissor(...value), [2960]: enable, [2961]: (gl, value) => gl.clearStencil(value), [2968]: (gl, value) => gl.stencilMaskSeparate(1028, value), [36005]: (gl, value) => gl.stencilMaskSeparate(1029, value), [2962]: "stencilFuncFront", [2967]: "stencilFuncFront", [2963]: "stencilFuncFront", [34816]: "stencilFuncBack", [36003]: "stencilFuncBack", [36004]: "stencilFuncBack", [2964]: "stencilOpFront", [2965]: "stencilOpFront", [2966]: "stencilOpFront", [34817]: "stencilOpBack", [34818]: "stencilOpBack", [34819]: "stencilOpBack", [2978]: (gl, value) => gl.viewport(...value), // WEBGL2 EXTENSIONS // EXT_depth_clamp https://registry.khronos.org/webgl/extensions/EXT_depth_clamp/ [34383]: enable, // WEBGL_provoking_vertex https://registry.khronos.org/webgl/extensions/WEBGL_provoking_vertex/ // [GL.PROVOKING_VERTEX_WEBL]: TODO - extension function needed // WEBGL_polygon_mode https://registry.khronos.org/webgl/extensions/WEBGL_polygon_mode/ // POLYGON_MODE_WEBGL TODO - extension function needed [10754]: enable, // WEBGL_clip_cull_distance https://registry.khronos.org/webgl/extensions/WEBGL_clip_cull_distance/ [12288]: enable, [12289]: enable, [12290]: enable, [12291]: enable, [12292]: enable, [12293]: enable, [12294]: enable, [12295]: enable, // PIXEL PACK/UNPACK MODES [3333]: pixelStorei, [3317]: pixelStorei, [37440]: pixelStorei, [37441]: pixelStorei, [37443]: pixelStorei, [3330]: pixelStorei, [3332]: pixelStorei, [3331]: pixelStorei, [3314]: pixelStorei, [32878]: pixelStorei, [3316]: pixelStorei, [3315]: pixelStorei, [32877]: pixelStorei, // Function-style setters framebuffer: (gl, framebuffer) => { const handle = framebuffer && "handle" in framebuffer ? framebuffer.handle : framebuffer; return gl.bindFramebuffer(36160, handle); }, blend: (gl, value) => value ? gl.enable(3042) : gl.disable(3042), blendColor: (gl, value) => gl.blendColor(...value), blendEquation: (gl, args) => { const separateModes = typeof args === "number" ? [args, args] : args; gl.blendEquationSeparate(...separateModes); }, blendFunc: (gl, args) => { const separateFuncs = (args == null ? void 0 : args.length) === 2 ? [...args, ...args] : args; gl.blendFuncSeparate(...separateFuncs); }, clearColor: (gl, value) => gl.clearColor(...value), clearDepth: (gl, value) => gl.clearDepth(value), clearStencil: (gl, value) => gl.clearStencil(value), colorMask: (gl, value) => gl.colorMask(...value), cull: (gl, value) => value ? gl.enable(2884) : gl.disable(2884), cullFace: (gl, value) => gl.cullFace(value), depthTest: (gl, value) => value ? gl.enable(2929) : gl.disable(2929), depthFunc: (gl, value) => gl.depthFunc(value), depthMask: (gl, value) => gl.depthMask(value), depthRange: (gl, value) => gl.depthRange(...value), dither: (gl, value) => value ? gl.enable(3024) : gl.disable(3024), derivativeHint: (gl, value) => { gl.hint(35723, value); }, frontFace: (gl, value) => gl.frontFace(value), mipmapHint: (gl, value) => gl.hint(33170, value), lineWidth: (gl, value) => gl.lineWidth(value), polygonOffsetFill: (gl, value) => value ? gl.enable(32823) : gl.disable(32823), polygonOffset: (gl, value) => gl.polygonOffset(...value), sampleCoverage: (gl, value) => gl.sampleCoverage(...value), scissorTest: (gl, value) => value ? gl.enable(3089) : gl.disable(3089), scissor: (gl, value) => gl.scissor(...value), stencilTest: (gl, value) => value ? gl.enable(2960) : gl.disable(2960), stencilMask: (gl, value) => { value = isArray(value) ? value : [value, value]; const [mask, backMask] = value; gl.stencilMaskSeparate(1028, mask); gl.stencilMaskSeparate(1029, backMask); }, stencilFunc: (gl, args) => { args = isArray(args) && args.length === 3 ? [...args, ...args] : args; const [func, ref, mask, backFunc, backRef, backMask] = args; gl.stencilFuncSeparate(1028, func, ref, mask); gl.stencilFuncSeparate(1029, backFunc, backRef, backMask); }, stencilOp: (gl, args) => { args = isArray(args) && args.length === 3 ? [...args, ...args] : args; const [sfail, dpfail, dppass, backSfail, backDpfail, backDppass] = args; gl.stencilOpSeparate(1028, sfail, dpfail, dppass); gl.stencilOpSeparate(1029, backSfail, backDpfail, backDppass); }, viewport: (gl, value) => gl.viewport(...value) }; function getValue(glEnum, values, cache) { return values[glEnum] !== void 0 ? values[glEnum] : cache[glEnum]; } var GL_COMPOSITE_PARAMETER_SETTERS = { blendEquation: (gl, values, cache) => gl.blendEquationSeparate(getValue(32777, values, cache), getValue(34877, values, cache)), blendFunc: (gl, values, cache) => gl.blendFuncSeparate(getValue(32969, values, cache), getValue(32968, values, cache), getValue(32971, values, cache), getValue(32970, values, cache)), polygonOffset: (gl, values, cache) => gl.polygonOffset(getValue(32824, values, cache), getValue(10752, values, cache)), sampleCoverage: (gl, values, cache) => gl.sampleCoverage(getValue(32938, values, cache), getValue(32939, values, cache)), stencilFuncFront: (gl, values, cache) => gl.stencilFuncSeparate(1028, getValue(2962, values, cache), getValue(2967, values, cache), getValue(2963, values, cache)), stencilFuncBack: (gl, values, cache) => gl.stencilFuncSeparate(1029, getValue(34816, values, cache), getValue(36003, values, cache), getValue(36004, values, cache)), stencilOpFront: (gl, values, cache) => gl.stencilOpSeparate(1028, getValue(2964, values, cache), getValue(2965, values, cache), getValue(2966, values, cache)), stencilOpBack: (gl, values, cache) => gl.stencilOpSeparate(1029, getValue(34817, values, cache), getValue(34818, values, cache), getValue(34819, values, cache)) }; var GL_HOOKED_SETTERS = { // GENERIC SETTERS enable: (update, capability) => update({ [capability]: true }), disable: (update, capability) => update({ [capability]: false }), pixelStorei: (update, pname, value) => update({ [pname]: value }), hint: (update, pname, hint2) => update({ [pname]: hint2 }), // SPECIFIC SETTERS useProgram: (update, value) => update({ [35725]: value }), bindRenderbuffer: (update, target, value) => update({ [36007]: value }), bindTransformFeedback: (update, target, value) => update({ [36389]: value }), bindVertexArray: (update, value) => update({ [34229]: value }), bindFramebuffer: (update, target, framebuffer) => { switch (target) { case 36160: return update({ [36006]: framebuffer, [36010]: framebuffer }); case 36009: return update({ [36006]: framebuffer }); case 36008: return update({ [36010]: framebuffer }); default: return null; } }, bindBuffer: (update, target, buffer) => { const pname = { [34962]: [34964], [36662]: [36662], [36663]: [36663], [35051]: [35053], [35052]: [35055] }[target]; if (pname) { return update({ [pname]: buffer }); } return { valueChanged: true }; }, blendColor: (update, r, g, b, a) => update({ [32773]: new Float32Array([r, g, b, a]) }), blendEquation: (update, mode) => update({ [32777]: mode, [34877]: mode }), blendEquationSeparate: (update, modeRGB, modeAlpha) => update({ [32777]: modeRGB, [34877]: modeAlpha }), blendFunc: (update, src, dst) => update({ [32969]: src, [32968]: dst, [32971]: src, [32970]: dst }), blendFuncSeparate: (update, srcRGB, dstRGB, srcAlpha, dstAlpha) => update({ [32969]: srcRGB, [32968]: dstRGB, [32971]: srcAlpha, [32970]: dstAlpha }), clearColor: (update, r, g, b, a) => update({ [3106]: new Float32Array([r, g, b, a]) }), clearDepth: (update, depth) => update({ [2931]: depth }), clearStencil: (update, s) => update({ [2961]: s }), colorMask: (update, r, g, b, a) => update({ [3107]: [r, g, b, a] }), cullFace: (update, mode) => update({ [2885]: mode }), depthFunc: (update, func) => update({ [2932]: func }), depthRange: (update, zNear, zFar) => update({ [2928]: new Float32Array([zNear, zFar]) }), depthMask: (update, mask) => update({ [2930]: mask }), frontFace: (update, face) => update({ [2886]: face }), lineWidth: (update, width) => update({ [2849]: width }), polygonOffset: (update, factor, units) => update({ [32824]: factor, [10752]: units }), sampleCoverage: (update, value, invert) => update({ [32938]: value, [32939]: invert }), scissor: (update, x, y, width, height) => update({ [3088]: new Int32Array([x, y, width, height]) }), stencilMask: (update, mask) => update({ [2968]: mask, [36005]: mask }), stencilMaskSeparate: (update, face, mask) => update({ [face === 1028 ? 2968 : 36005]: mask }), stencilFunc: (update, func, ref, mask) => update({ [2962]: func, [2967]: ref, [2963]: mask, [34816]: func, [36003]: ref, [36004]: mask }), stencilFuncSeparate: (update, face, func, ref, mask) => update({ [face === 1028 ? 2962 : 34816]: func, [face === 1028 ? 2967 : 36003]: ref, [face === 1028 ? 2963 : 36004]: mask }), stencilOp: (update, fail, zfail, zpass) => update({ [2964]: fail, [2965]: zfail, [2966]: zpass, [34817]: fail, [34818]: zfail, [34819]: zpass }), stencilOpSeparate: (update, face, fail, zfail, zpass) => update({ [face === 1028 ? 2964 : 34817]: fail, [face === 1028 ? 2965 : 34818]: zfail, [face === 1028 ? 2966 : 34819]: zpass }), viewport: (update, x, y, width, height) => update({ [2978]: [x, y, width, height] }) }; var isEnabled = (gl, key) => gl.isEnabled(key); var GL_PARAMETER_GETTERS = { [3042]: isEnabled, [2884]: isEnabled, [2929]: isEnabled, [3024]: isEnabled, [32823]: isEnabled, [32926]: isEnabled, [32928]: isEnabled, [3089]: isEnabled, [2960]: isEnabled, [35977]: isEnabled }; var NON_CACHE_PARAMETERS = /* @__PURE__ */ new Set([ 34016, 36388, 36387, 35983, 35368, 34965, 35739, 35738, 3074, 34853, 34854, 34855, 34856, 34857, 34858, 34859, 34860, 34861, 34862, 34863, 34864, 34865, 34866, 34867, 34868, 35097, 32873, 35869, 32874, 34068 ]); // dist/context/parameters/unified-parameter-api.js function setGLParameters(gl, parameters) { if (isObjectEmpty(parameters)) { return; } const compositeSetters = {}; for (const key in parameters) { const glConstant = Number(key); const setter = GL_PARAMETER_SETTERS[key]; if (setter) { if (typeof setter === "string") { compositeSetters[setter] = true; } else { setter(gl, parameters[key], glConstant); } } } const cache = gl.state && gl.state.cache; if (cache) { for (const key in compositeSetters) { const compositeSetter = GL_COMPOSITE_PARAMETER_SETTERS[key]; compositeSetter(gl, parameters, cache); } } } function getGLParameters(gl, parameters = GL_PARAMETER_DEFAULTS) { if (typeof parameters === "number") { const key = parameters; const getter = GL_PARAMETER_GETTERS[key]; return getter ? getter(gl, key) : gl.getParameter(key); } const parameterKeys = Array.isArray(parameters) ? parameters : Object.keys(parameters); const state = {}; for (const key of parameterKeys) { const getter = GL_PARAMETER_GETTERS[key]; state[key] = getter ? getter(gl, Number(key)) : gl.getParameter(Number(key)); } return state; } function resetGLParameters(gl) { setGLParameters(gl, GL_PARAMETER_DEFAULTS); } function isObjectEmpty(object) { for (const key in object) { return false; } return true; } // dist/context/state-tracker/deep-array-equal.js function deepArrayEqual(x, y) { if (x === y) { return true; } const isArrayX = Array.isArray(x) || ArrayBuffer.isView(x); const isArrayY = Array.isArray(y) || ArrayBuffer.isView(y); if (isArrayX && isArrayY && x.length === y.length) { for (let i = 0; i < x.length; ++i) { if (x[i] !== y[i]) { return false; } } return true; } return false; } // dist/context/state-tracker/track-context-state.js var GLState = class { gl; program = null; stateStack = []; enable = true; cache; log; constructor(gl, { copyState = false, // Copy cache from params (slow) or initialize from WebGL defaults (fast) log: log9 = () => { } // Logging function, called when gl parameter change calls are actually issued } = {}) { this.gl = gl; this.cache = copyState ? getGLParameters(gl) : Object.assign({}, GL_PARAMETER_DEFAULTS); this.log = log9; this._updateCache = this._updateCache.bind(this); Object.seal(this); } push(values = {}) { this.stateStack.push({}); } pop() { (0, import_core.assert)(this.stateStack.length > 0); const oldValues = this.stateStack[this.stateStack.length - 1]; setGLParameters(this.gl, oldValues); this.stateStack.pop(); } /** // interceptor for context set functions - update our cache and our stack // values (Object) - the key values for this setter * @param values * @returns */ _updateCache(values) { let valueChanged = false; let oldValue; const oldValues = this.stateStack.length > 0 ? this.stateStack[this.stateStack.length - 1] : null; for (const key in values) { (0, import_core.assert)(key !== void 0); const value = values[key]; const cached = this.cache[key]; if (!deepArrayEqual(value, cached)) { valueChanged = true; oldValue = cached; if (oldValues && !(key in oldValues)) { oldValues[key] = cached; } this.cache[key] = value; } } return { valueChanged, oldValue }; } }; function getContextState(gl) { return gl.state; } function trackContextState(gl, options) { const { enable: enable2 = true, copyState } = options; (0, import_core.assert)(copyState !== void 0); if (!gl.state) { gl.state = new GLState(gl, { copyState }); installProgramSpy(gl); for (const key in GL_HOOKED_SETTERS) { const setter = GL_HOOKED_SETTERS[key]; installSetterSpy(gl, key, setter); } installGetterOverride(gl, "getParameter"); installGetterOverride(gl, "isEnabled"); } const glState = getContextState(gl); glState.enable = enable2; return gl; } function pushContextState(gl) { let glState = getContextState(gl); if (!glState) { trackContextState(gl, { copyState: false }); glState = getContextState(gl); } glState.push(); } function popContextState(gl) { const glState = getContextState(gl); (0, import_core.assert)(glState); glState.pop(); } function installGetterOverride(gl, functionName) { const originalGetterFunc = gl[functionName].bind(gl); gl[functionName] = function get(pname) { if (pname === void 0 || NON_CACHE_PARAMETERS.has(pname)) { return originalGetterFunc(pname); } const glState = getContextState(gl); if (!(pname in glState.cache)) { glState.cache[pname] = originalGetterFunc(pname); } return glState.enable ? ( // Call the getter the params so that it can e.g. serve from a cache glState.cache[pname] ) : ( // Optionally call the original function to do a "hard" query from the WebGL2RenderingContext originalGetterFunc(pname) ); }; Object.defineProperty(gl[functionName], "name", { value: `${functionName}-from-cache`, configurable: false }); } function installSetterSpy(gl, functionName, setter) { if (!gl[functionName]) { return; } const originalSetterFunc = gl[functionName].bind(gl); gl[functionName] = function set(...params) { const glState = getContextState(gl); const { valueChanged, oldValue } = setter(glState._updateCache, ...params); if (valueChanged) { originalSetterFunc(...params); } return oldValue; }; Object.defineProperty(gl[functionName], "name", { value: `${functionName}-to-cache`, configurable: false }); } function installProgramSpy(gl) { const originalUseProgram = gl.useProgram.bind(gl); gl.useProgram = function useProgramLuma(handle) { const glState = getContextState(gl); if (glState.program !== handle) { originalUseProgram(handle); glState.program = handle; } }; } // dist/context/helpers/create-browser-context.js var DEFAULT_CONTEXT_PROPS = { powerPreference: "high-performance", // After all, most apps are using WebGL for performance reasons // eslint-disable-next-line no-console onContextLost: () => console.error("WebGL context lost"), // eslint-disable-next-line no-console onContextRestored: () => console.info("WebGL context restored") }; function createBrowserContext(canvas, props) { props = { ...DEFAULT_CONTEXT_PROPS, ...props }; let errorMessage = null; const onCreateError = (error) => errorMessage = error.statusMessage || errorMessage; canvas.addEventListener("webglcontextcreationerror", onCreateError, false); let gl = null; gl ||= canvas.getContext("webgl2", props); canvas.removeEventListener("webglcontextcreationerror", onCreateError, false); if (!gl) { throw new Error(`Failed to create WebGL context: ${errorMessage || "Unknown error"}`); } if (props.onContextLost) { const { onContextLost } = props; canvas.addEventListener("webglcontextlost", (event) => onContextLost(event), false); } if (props.onContextRestored) { const { onContextRestored } = props; canvas.addEventListener("webglcontextrestored", (event) => onContextRestored(event), false); } return gl; } // dist/adapter/device-helpers/webgl-device-info.js var import_constants2 = require("@luma.gl/constants"); // dist/context/helpers/webgl-extensions.js function getWebGLExtension(gl, name, extensions) { if (extensions[name] === void 0) { extensions[name] = gl.getExtension(name) || null; } return extensions[name]; } // dist/adapter/device-helpers/webgl-device-info.js function getDeviceInfo(gl, extensions) { const vendorMasked = gl.getParameter(7936); const rendererMasked = gl.getParameter(7937); getWebGLExtension(gl, "WEBGL_debug_renderer_info", extensions); const ext = extensions.WEBGL_debug_renderer_info; const vendorUnmasked = gl.getParameter(ext ? ext.UNMASKED_VENDOR_WEBGL : 7936); const rendererUnmasked = gl.getParameter(ext ? ext.UNMASKED_RENDERER_WEBGL : 7937); const vendor = vendorUnmasked || vendorMasked; const renderer = rendererUnmasked || rendererMasked; const version = gl.getParameter(7938); const gpu = identifyGPUVendor(vendor, renderer); const gpuBackend = identifyGPUBackend(vendor, renderer); const gpuType = identifyGPUType(vendor, renderer); const shadingLanguage = "glsl"; const shadingLanguageVersion = 300; return { type: "webgl", gpu, gpuType, gpuBackend, vendor, renderer, version, shadingLanguage, shadingLanguageVersion }; } function identifyGPUVendor(vendor, renderer) { if (/NVIDIA/i.exec(vendor) || /NVIDIA/i.exec(renderer)) { return "nvidia"; } if (/INTEL/i.exec(vendor) || /INTEL/i.exec(renderer)) { return "intel"; } if (/Apple/i.exec(vendor) || /Apple/i.exec(renderer)) { return "apple"; } if (/AMD/i.exec(vendor) || /AMD/i.exec(renderer) || /ATI/i.exec(vendor) || /ATI/i.exec(renderer)) { return "amd"; } if (/SwiftShader/i.exec(vendor) || /SwiftShader/i.exec(renderer)) { return "software"; } return "unknown"; } function identifyGPUBackend(vendor, renderer) { if (/Metal/i.exec(vendor) || /Metal/i.exec(renderer)) { return "metal"; } if (/ANGLE/i.exec(vendor) || /ANGLE/i.exec(renderer)) { return "opengl"; } return "unknown"; } function identifyGPUType(vendor, renderer) { if (/SwiftShader/i.exec(vendor) || /SwiftShader/i.exec(renderer)) { return "cpu"; } const gpuVendor = identifyGPUVendor(vendor, renderer); switch (gpuVendor) { case "intel": return "integrated"; case "software": return "cpu"; case "unknown": return "unknown"; default: return "discrete"; } } // dist/adapter/device-helpers/webgl-device-features.js var import_core3 = require("@luma.gl/core"); // dist/adapter/converters/texture-formats.js var import_core2 = require("@luma.gl/core"); var import_constants4 = require("@luma.gl/constants"); // dist/adapter/converters/vertex-formats.js var import_constants3 = require("@luma.gl/constants"); function getGLFromVertexType(dataType) { switch (dataType) { case "uint8": return 5121; case "sint8": return 5120; case "unorm8": return 5121; case "snorm8": return 5120; case "uint16": return 5123; case "sint16": return 5122; case "unorm16": return 5123; case "snorm16": return 5122; case "uint32": return 5125; case "sint32": return 5124; case "float16": return 5131; case "float32": return 5126; } throw new Error(String(dataType)); } // dist/adapter/converters/texture-formats.js var texture_compression_bc = "texture-compression-bc"; var texture_compression_astc = "texture-compression-astc"; var texture_compression_etc2 = "texture-compression-etc2"; var texture_compression_etc1_webgl = "texture-compression-etc1-webgl"; var texture_compression_pvrtc_webgl = "texture-compression-pvrtc-webgl"; var texture_compression_atc_webgl = "texture-compression-atc-webgl"; var float32_renderable = "float32-renderable-webgl"; var float16_renderable = "float16-renderable-webgl"; var rgb9e5ufloat_renderable = "rgb9e5ufloat_renderable-webgl"; var snorm8_renderable = "snorm8-renderable-webgl"; var norm16_renderable = "norm16-renderable-webgl"; var snorm16_renderable = "snorm16-renderable-webgl"; var float32_filterable = "float32-filterable"; var float16_filterable = "float16-filterable-webgl"; var X_S3TC = "WEBGL_compressed_texture_s3tc"; var X_S3TC_SRGB = "WEBGL_compressed_texture_s3tc_srgb"; var X_RGTC = "EXT_texture_compression_rgtc"; var X_BPTC = "EXT_texture_compression_bptc"; var X_ETC2 = "WEBGL_compressed_texture_etc"; var X_ASTC = "WEBGL_compressed_texture_astc"; var X_ETC1 = "WEBGL_compressed_texture_etc1"; var X_PVRTC = "WEBGL_compressed_texture_pvrtc"; var X_ATC = "WEBGL_compressed_texture_atc"; var EXT_texture_norm16 = "EXT_texture_norm16"; var EXT_render_snorm = "EXT_render_snorm"; var EXT_color_buffer_float = "EXT_color_buffer_float"; var TEXTURE_FEATURES = { "float32-renderable-webgl": ["EXT_color_buffer_float"], "float16-renderable-webgl": ["EXT_color_buffer_half_float"], "rgb9e5ufloat_renderable-webgl": ["WEBGL_render_shared_exponent"], "snorm8-renderable-webgl": [EXT_render_snorm], "norm16-renderable-webgl": [EXT_texture_norm16], "snorm16-renderable-webgl": [EXT_texture_norm16, EXT_render_snorm], "float32-filterable": ["OES_texture_float_linear"], "float16-filterable-webgl": ["OES_texture_half_float_linear"], "texture-filterable-anisotropic-webgl": ["EXT_texture_filter_anisotropic"], "texture-blend-float-webgl": ["EXT_float_blend"], "texture-compression-bc": [X_S3TC, X_S3TC_SRGB, X_RGTC, X_BPTC], // 'texture-compression-bc3-srgb-webgl': [X_S3TC_SRGB], // 'texture-compression-bc3-webgl': [X_S3TC], "texture-compression-bc5-webgl": [X_RGTC], "texture-compression-bc7-webgl": [X_BPTC], "texture-compression-etc2": [X_ETC2], "texture-compression-astc": [X_ASTC], "texture-compression-etc1-webgl": [X_ETC1], "texture-compression-pvrtc-webgl": [X_PVRTC], "texture-compression-atc-webgl": [X_ATC] }; function isTextureFeature(feature) { return feature in TEXTURE_FEATURES; } function checkTextureFeature(gl, feature, extensions) { const textureExtensions = TEXTURE_FEATURES[feature] || []; return textureExtensions.every((extension) => getWebGLExtension(gl, extension, extensions)); } var TEXTURE_FORMATS = { // Unsized formats that leave the precision up to the driver. TODO - Fix bpp constants "rgb8unorm-unsized": { gl: 6407, b: 4, c: 2, bpp: 4, dataFormat: 6407, types: [5121, 33635] }, "rgba8unorm-unsized": { gl: 6408, b: 4, c: 2, bpp: 4, dataFormat: 6408, types: [5121, 32819, 32820] }, // 'r8unorm-unsized': {gl: GL.LUMINANCE, b: 4, c: 2, bpp: 4}, // 'rgb8unorm-srgb-unsized': {gl: GL.SRGB_EXT, b: 4, c: 2, bpp: 4, gl1Ext: SRGB}, // 'rgba8unorm-srgb-unsized': {gl: GL.SRGB_ALPHA_EXT, b: 4, c: 2, bpp: 4, gl1Ext: SRGB}, // 8-bit formats "r8unorm": { gl: 33321, b: 1, c: 1, rb: true }, "r8snorm": { gl: 36756, b: 1, c: 1, render: snorm8_renderable }, "r8uint": { gl: 33330, b: 1, c: 1, rb: true }, "r8sint": { gl: 33329, b: 1, c: 1, rb: true }, // 16-bit formats "rg8unorm": { gl: 33323, b: 2, c: 2, rb: true }, "rg8snorm": { gl: 36757, b: 2, c: 2, render: snorm8_renderable }, "rg8uint": { gl: 33336, b: 2, c: 2, rb: true }, "rg8sint": { gl: 33335, b: 2, c: 2, rb: true }, "r16uint": { gl: 33332, b: 2, c: 1, rb: true }, "r16sint": { gl: 33331, b: 2, c: 1, rb: true }, "r16float": { gl: 33325, b: 2, c: 1, render: float16_renderable, filter: "float16-filterable-webgl", rb: true }, "r16unorm-webgl": { gl: 33322, b: 2, c: 1, f: norm16_renderable, rb: true }, "r16snorm-webgl": { gl: 36760, b: 2, c: 1, f: snorm16_renderable }, // Packed 16-bit formats "rgba4unorm-webgl": { gl: 32854, b: 2, c: 4, wgpu: false, rb: true }, "rgb565unorm-webgl": { gl: 36194, b: 2, c: 4, wgpu: false, rb: true }, "rgb5a1unorm-webgl": { gl: 32855, b: 2, c: 4, wgpu: false, rb: true }, // 24-bit formats "rgb8unorm-webgl": { gl: 32849, b: 3, c: 3, wgpu: false }, "rgb8snorm-webgl": { gl: 36758, b: 3, c: 3, wgpu: false }, // 32-bit formats "rgba8unorm": { gl: 32856, b: 4, c: 2, bpp: 4 }, "rgba8unorm-srgb": { gl: 35907, b: 4, c: 4, bpp: 4 }, "rgba8snorm": { gl: 36759, b: 4, c: 4, render: snorm8_renderable }, "rgba8uint": { gl: 36220, b: 4, c: 4, bpp: 4 }, "rgba8sint": { gl: 36238, b: 4, c: 4, bpp: 4 }, // reverse colors, webgpu only "bgra8unorm": { b: 4, c: 4 }, "bgra8unorm-srgb": { b: 4, c: 4 }, "rg16uint": { gl: 33338, b: 4, c: 1, bpp: 4 }, "rg16sint": { gl: 33337, b: 4, c: 2, bpp: 4 }, // When using a WebGL 2 context and the EXT_color_buffer_float WebGL2 extension "rg16float": { gl: 33327, bpp: 4, b: 4, c: 2, render: float16_renderable, filter: float16_filterable, rb: true }, "rg16unorm-webgl": { gl: 33324, b: 2, c: 2, render: norm16_renderable }, "rg16snorm-webgl": { gl: 36761, b: 2, c: 2, render: snorm16_renderable }, "r32uint": { gl: 33334, b: 4, c: 1, bpp: 4, rb: true }, "r32sint": { gl: 33333, b: 4, c: 1, bpp: 4, rb: true }, "r32float": { gl: 33326, bpp: 4, b: 4, c: 1, render: float32_renderable, filter: float32_filterable }, // Packed 32-bit formats "rgb9e5ufloat": { gl: 35901, b: 4, c: 3, p: 1, render: rgb9e5ufloat_renderable }, // , filter: true}, "rg11b10ufloat": { gl: 35898, b: 4, c: 3, p: 1, render: float32_renderable, rb: true }, "rgb10a2unorm": { gl: 32857, b: 4, c: 4, p: 1, rb: true }, "rgb10a2uint-webgl": { b: 4, c: 4, gl: 36975, p: 1, wgpu: false, bpp: 4, rb: true }, // 48-bit formats "rgb16unorm-webgl": { gl: 32852, b: 2, c: 3, f: norm16_renderable }, // rgb not renderable "rgb16snorm-webgl": { gl: 36762, b: 2, c: 3, f: norm16_renderable }, // rgb not renderable // 64-bit formats "rg32uint": { gl: 33340, b: 8, c: 2, rb: true }, "rg32sint": { gl: 33339, b: 8, c: 2, rb: true }, "rg32float": { gl: 33328, b: 8, c: 2, render: float32_renderable, filter: float32_filterable, rb: true }, "rgba16uint": { gl: 36214, b: 8, c: 4, rb: true }, "rgba16sint": { gl: 36232, b: 8, c: 4, rb: true }, "rgba16float": { gl: 34842, b: 8, c: 4, render: float16_renderable, filter: float16_filterable }, "rgba16unorm-webgl": { gl: 32859, b: 2, c: 4, render: norm16_renderable, rb: true }, "rgba16snorm-webgl": { gl: 36763, b: 2, c: 4, render: snorm16_renderable }, // 96-bit formats (deprecated!) "rgb32float-webgl": { gl: 34837, render: float32_renderable, filter: float32_filterable, gl2ext: EXT_color_buffer_float, dataFormat: 6407, types: [5126] }, // 128-bit formats "rgba32uint": { gl: 36208, b: 16, c: 4, rb: true }, "rgba32sint": { gl: 36226, b: 16, c: 4, rb: true }, "rgba32float": { gl: 34836, b: 16, c: 4, render: float32_renderable, filter: float32_filterable, rb: true }, // Depth and stencil formats "stencil8": { gl: 36168, b: 1, c: 1, attachment: 36128, rb: true }, // 8 stencil bits "depth16unorm": { gl: 33189, b: 2, c: 1, attachment: 36096, dataFormat: 6402, types: [5123], rb: true }, // 16 depth bits "depth24plus": { gl: 33190, b: 3, c: 1, attachment: 36096, dataFormat: 6402, types: [5125] }, "depth32float": { gl: 36012, b: 4, c: 1, attachment: 36096, dataFormat: 6402, types: [5126], rb: true }, // The depth component of the "depth24plus" and "depth24plus-stencil8" formats may be implemented as either a 24-bit depth value or a "depth32float" value. "depth24plus-stencil8": { gl: 35056, b: 4, c: 2, p: 1, attachment: 33306, rb: true, depthTexture: true, dataFormat: 34041, types: [34042] }, // "depth24unorm-stencil8" feature "depth24unorm-stencil8": { gl: 35056, b: 4, c: 2, p: 1, attachment: 33306, dataFormat: 34041, types: [34042], rb: true }, // "depth32float-stencil8" feature - TODO below is render buffer only? "depth32float-stencil8": { gl: 36013, b: 5, c: 2, p: 1, attachment: 33306, dataFormat: 34041, types: [36269], rb: true }, // BC compressed formats: check device.features.has("texture-compression-bc"); "bc1-rgb-unorm-webgl": { gl: 33776, x: X_S3TC, f: texture_compression_bc }, "bc1-rgb-unorm-srgb-webgl": { gl: 35916, x: X_S3TC_SRGB, f: texture_compression_bc }, "bc1-rgba-unorm": { gl: 33777, x: X_S3TC, f: texture_compression_bc }, "bc1-rgba-unorm-srgb": { gl: 35916, x: X_S3TC_SRGB, f: texture_compression_bc }, "bc2-rgba-unorm": { gl: 33778, x: X_S3TC, f: texture_compression_bc }, "bc2-rgba-unorm-srgb": { gl: 35918, x: X_S3TC_SRGB, f: texture_compression_bc }, "bc3-rgba-unorm": { gl: 33779, x: X_S3TC, f: texture_compression_bc }, "bc3-rgba-unorm-srgb": { gl: 35919, x: X_S3TC_SRGB, f: texture_compression_bc }, "bc4-r-unorm": { gl: 36283, x: X_RGTC, f: texture_compression_bc }, "bc4-r-snorm": { gl: 36284, x: X_RGTC, f: texture_compression_bc }, "bc5-rg-unorm": { gl: 36285, x: X_RGTC, f: texture_compression_bc }, "bc5-rg-snorm": { gl: 36286, x: X_RGTC, f: texture_compression_bc }, "bc6h-rgb-ufloat": { gl: 36495, x: X_BPTC, f: texture_compression_bc }, "bc6h-rgb-float": { gl: 36494, x: X_BPTC, f: texture_compression_bc }, "bc7-rgba-unorm": { gl: 36492, x: X_BPTC, f: texture_compression_bc }, "bc7-rgba-unorm-srgb": { gl: 36493, x: X_BPTC, f: texture_compression_bc }, // WEBGL_compressed_texture_etc: device.features.has("texture-compression-etc2") // Note: Supposedly guaranteed availability compressed formats in WebGL2, but through CPU decompression "etc2-rgb8unorm": { gl: 37492, f: texture_compression_etc2 }, "etc2-rgb8unorm-srgb": { gl: 37494, f: texture_compression_etc2 }, "etc2-rgb8a1unorm": { gl: 37496, f: texture_compression_etc2 }, "etc2-rgb8a1unorm-srgb": { gl: 37497, f: texture_compression_etc2 }, "etc2-rgba8unorm": { gl: 37493, f: texture_compression_etc2 }, "etc2-rgba8unorm-srgb": { gl: 37495, f: texture_compression_etc2 }, "eac-r11unorm": { gl: 37488, f: texture_compression_etc2 }, "eac-r11snorm": { gl: 37489, f: texture_compression_etc2 }, "eac-rg11unorm": { gl: 37490, f: texture_compression_etc2 }, "eac-rg11snorm": { gl: 37491, f: texture_compression_etc2 }, // X_ASTC compressed formats: device.features.has("texture-compression-astc") "astc-4x4-unorm": { gl: 37808, f: texture_compression_astc }, "astc-4x4-unorm-srgb": { gl: 37840, f: texture_compression_astc }, "astc-5x4-unorm": { gl: 37809, f: texture_compression_astc }, "astc-5x4-unorm-srgb": { gl: 37841, f: texture_compression_astc }, "astc-5x5-unorm": { gl: 37810, f: texture_compression_astc }, "astc-5x5-unorm-srgb": { gl: 37842, f: texture_compression_astc }, "astc-6x5-unorm": { gl: 37811, f: texture_compression_astc }, "astc-6x5-unorm-srgb": { gl: 37843, f: texture_compression_astc }, "astc-6x6-unorm": { gl: 37812, f: texture_compression_astc }, "astc-6x6-unorm-srgb": { gl: 37844, f: texture_compression_astc }, "astc-8x5-unorm": { gl: 37813, f: texture_compression_astc }, "astc-8x5-unorm-srgb": { gl: 37845, f: texture_compression_astc }, "astc-8x6-unorm": { gl: 37814, f: texture_compression_astc }, "astc-8x6-unorm-srgb": { gl: 37846, f: texture_compression_astc }, "astc-8x8-unorm": { gl: 37815, f: texture_compression_astc }, "astc-8x8-unorm-srgb": { gl: 37847, f: texture_compression_astc }, "astc-10x5-unorm": { gl: 37819, f: texture_compression_astc }, "astc-10x5-unorm-srgb": { gl: 37851, f: texture_compression_astc }, "astc-10x6-unorm": { gl: 37817, f: texture_compression_astc }, "astc-10x6-unorm-srgb": { gl: 37849, f: texture_compression_astc }, "astc-10x8-unorm": { gl: 37818, f: texture_compression_astc }, "astc-10x8-unorm-srgb": { gl: 37850, f: texture_compression_astc }, "astc-10x10-unorm": { gl: 37819, f: texture_compression_astc }, "astc-10x10-unorm-srgb": { gl: 37851, f: texture_compression_astc }, "astc-12x10-unorm": { gl: 37820, f: texture_compression_astc }, "astc-12x10-unorm-srgb": { gl: 37852, f: texture_compression_astc }, "astc-12x12-unorm": { gl: 37821, f: texture_compression_astc }, "astc-12x12-unorm-srgb": { gl: 37853, f: texture_compression_astc }, // WEBGL_compressed_texture_pvrtc "pvrtc-rgb4unorm-webgl": { gl: 35840, f: texture_compression_pvrtc_webgl }, "pvrtc-rgba4unorm-webgl": { gl: 35842, f: texture_compression_pvrtc_webgl }, "pvrtc-rbg2unorm-webgl": { gl: 35841, f: texture_compression_pvrtc_webgl }, "pvrtc-rgba2unorm-webgl": { gl: 35843, f: texture_compression_pvrtc_webgl }, // WEBGL_compressed_texture_etc1 "etc1-rbg-unorm-webgl": { gl: 36196, f: texture_compression_etc1_webgl }, // WEBGL_compressed_texture_atc "atc-rgb-unorm-webgl": { gl: 35986, f: texture_compression_atc_webgl }, "atc-rgba-unorm-webgl": { gl: 35986, f: texture_compression_atc_webgl }, "atc-rgbai-unorm-webgl": { gl: 34798, f: texture_compression_atc_webgl } }; var DATA_FORMAT_CHANNELS = { [6403]: 1, [36244]: 1, [33319]: 2, [33320]: 2, [6407]: 3, [36248]: 3, [6408]: 4, [36249]: 4, [6402]: 1, [34041]: 1, [6406]: 1, [6409]: 1, [6410]: 2 }; var TYPE_SIZES = { [5126]: 4, [5125]: 4, [5124]: 4, [5123]: 2, [5122]: 2, [5131]: 2, [5120]: 1, [5121]: 1 }; function isTextureFormatSupported(gl, format, extensions) { const info = TEXTURE_FORMATS[format]; if (!info) { return false; } if (info.gl === void 0) { return false; } const extension = info.x || info.gl2ext; if (extension) { return Boolean(getWebGLExtension(gl, extension, extensions)); } return true; } function isRenderbufferFormatSupported(gl, format, extensions) { var _a; return isTextureFormatSupported(gl, format, extensions) && ((_a = TEXTURE_FORMATS[format]) == null ? void 0 : _a.rb); } function convertGLToTextureFormat(format) { if (typeof format === "string") { return format; } const entry = Object.entries(TEXTURE_FORMATS).find(([, entry2]) => entry2.gl === format); if (!entry) { throw new Error(`Unknown texture format ${format}`); } return entry[0]; } function convertTextureFormatToGL(format) { const formatInfo = TEXTURE_FORMATS[format]; const webglFormat = formatInfo == null ? void 0 : formatInfo.gl; if (webglFormat === void 0) { throw new Error(`Unsupported texture format ${format}`); } return webglFormat; } function isTextureFormatFilterable(gl, format, extensions) { if (!isTextureFormatSupported(gl, format, extensions)) { return false; } if (format.startsWith("depth") || format.startsWith("stencil")) { return false; } try { const decoded = (0, import_core2.decodeTextureFormat)(format); if (decoded.signed) { return false; } } catch { return false; } if (format.endsWith("32float")) { return Boolean(getWebGLExtension(gl, "OES_texture_float_linear, extensions", extensions)); } if (format.endsWith("16float")) { return Boolean(getWebGLExtension(gl, "OES_texture_half_float_linear, extensions", extensions)); } return true; } function isTextureFormatRenderable(gl, format, extensions) { if (!isTextureFormatSupported(gl, format, extensions)) { return false; } if (typeof format === "number") { return false; } return true; } function getWebGLTextureParameters(format) { var _a; const formatData = TEXTURE_FORMATS[format]; const webglFormat = convertTextureFormatToGL(format); const decoded = (0, import_core2.decodeTextureFormat)(format); return { format: webglFormat, dataFormat: (formatData == null ? void 0 : formatData.dataFormat) || getWebGLPixelDataFormat(decoded.format, decoded.integer, decoded.normalized, webglFormat), // depth formats don't have a type type: decoded.dataType ? getGLFromVertexType(decoded.dataType) : ((_a = formatData == null ? void 0 : formatData.types) == null ? void 0 : _a[0]) || 5121, // @ts-expect-error compressed: decoded.compressed }; } function getDepthStencilAttachmentWebGL(format) { const info = TEXTURE_FORMATS[format]; if (!(info == null ? void 0 : info.attachment)) { throw new Error(`${format} is not a depth stencil format`); } return info.attachment; } function getTextureFormatBytesPerPixel(format) { const params = getWebGLTextureParameters(format); const channels = DATA_FORMAT_CHANNELS[params.dataFormat] || 4; const channelSize = TYPE_SIZES[params.type] || 1; return channels * channelSize; } function getWebGLPixelDataFormat(dataFormat, integer, normalized, format) { if (format === 6408 || format === 6407) { return format; } switch (dataFormat) { case "r": return integer && !normalized ? 36244 : 6403; case "rg": return integer && !normalized ? 33320 : 33319; case "rgb": return integer && !normalized ? 36248 : 6407; case "rgba": return integer && !normalized ? 36249 : 6408; default: return 6408; } } // dist/adapter/device-helpers/webgl-device-features.js var WEBGL_FEATURES = { // optional WebGPU features "depth-clip-control": "EXT_depth_clamp", // TODO these seem subtly different // 'timestamp-query' // GPUQueryType "timestamp-query" // "indirect-first-instance" // Textures are handled by getTextureFeatures() // 'depth24unorm-stencil8' // GPUTextureFormat 'depth24unorm-stencil8' // 'depth32float-stencil8' // GPUTextureFormat 'depth32float-stencil8' // optional WebGL features "timer-query-webgl": "EXT_disjoint_timer_query_webgl2", "compilation-status-async-webgl": "KHR_parallel_shader_compile", "polygon-mode-webgl": "WEBGL_polygon_mode", "provoking-vertex-webgl": "WEBGL_provoking_vertex", "shader-clip-cull-distance-webgl": "WEBGL_clip_cull_distance", "shader-noperspective-interpolation-webgl": "NV_shader_noperspective_interpolation", "shader-conservative-depth-webgl": "EXT_conservative_depth" // Textures are handled by getTextureFeatures() }; var WebGLDeviceFeatures = class extends import_core3.DeviceFeatures { gl; extensions; testedFeatures = /* @__PURE__ */ new Set(); constructor(gl, extensions, disabledFeatures) { super([], disabledFeatures); this.gl = gl; this.extensions = extensions; getWebGLExtension(gl, "EXT_color_buffer_float", extensions); } *[Symbol.iterator]() { const features = this.getFeatures(); for (const feature of features) { if (this.has(feature)) { yield feature; } } return []; } has(feature) { if (this.disabledFeatures[feature]) { return false; } if (!this.testedFeatures.has(feature)) { this.testedFeatures.add(feature); if (isTextureFeature(feature) && checkTextureFeature(this.gl, feature, this.extensions)) { this.features.add(feature); } if (this.getWebGLFeature(feature)) { this.features.add(feature); } } return this.features.has(feature); } // FOR DEVICE initializeFeatures() { const features = this.getFeatures().filter((feature) => feature !== "polygon-mode-webgl"); for (const feature of features) { this.has(feature); } } // IMPLEMENTATION getFeatures() { return [...Object.keys(WEBGL_FEATURES), ...Object.keys(TEXTURE_FEATURES)]; } /** Extract all WebGL features */ getWebGLFeature(feature) { const featureInfo = WEBGL_FEATURES[feature]; const isSupported = typeof featureInfo === "string" ? Boolean(getWebGLExtension(this.gl, featureInfo, this.extensions)) : Boolean(featureInfo); return isSupported; } }; // dist/adapter/device-helpers/webgl-device-limits.js var import_core4 = require("@luma.gl/core"); var import_constants5 = require("@luma.gl/constants"); var WebGLDeviceLimits = class extends import_core4.DeviceLimits { get maxTextureDimension1D() { return 0; } // WebGL does not support 1D textures get maxTextureDimension2D() { return this.getParameter(3379); } get maxTextureDimension3D() { return this.getParameter(32883); } get maxTextureArrayLayers() { return this.getParameter(35071); } get maxBindGroups() { return 0; } get maxDynamicUniformBuffersPerPipelineLayout() { return 0; } // TBD get maxDynamicStorageBuffersPerPipelineLayout() { return 0; } // TBD get maxSampledTexturesPerShaderStage() { return this.getParameter(35660); } // ) TBD get maxSamplersPerShaderStage() { return this.getParameter(35661); } get maxStorageBuffersPerShaderStage() { return 0; } // TBD get maxStorageTexturesPerShaderStage() { return 0; } // TBD get maxUniformBuffersPerShaderStage() { return this.getParameter(35375); } get maxUniformBufferBindingSize() { return this.getParameter(35376); } get maxStorageBufferBindingSize() { return 0; } get minUniformBufferOffsetAlignment() { return this.getParameter(35380); } get minStorageBufferOffsetAlignment() { return 0; } get maxVertexBuffers() { return 16; } // WebGL 2 supports 16 buffers, see https://github.com/gpuweb/gpuweb/issues/4284 get maxVertexAttributes() { return this.getParameter(34921); } get maxVertexBufferArrayStride() { return 2048; } // TBD, this is just the default value from WebGPU get maxInterStageShaderComponents() { return this.getParameter(35659); } get maxComputeWorkgroupStorageSize() { return 0; } // WebGL does not support compute shaders get maxComputeInvocationsPerWorkgroup() { return 0; } // WebGL does not support compute shaders get maxComputeWorkgroupSizeX() { return 0; } // WebGL does not support compute shaders get maxComputeWorkgroupSizeY() { return 0; } // WebGL does not support compute shaders get maxComputeWorkgroupSizeZ() { return 0; } // WebGL does not support compute shaders get maxComputeWorkgroupsPerDimension() { return 0; } // WebGL does not support compute shaders // PRIVATE gl; limits = {}; constructor(gl) { super(); this.gl = gl; } getParameter(parameter) { if (this.limits[parameter] === void 0) { this.limits[parameter] = this.gl.getParameter(parameter); } return this.limits[parameter]; } }; // dist/adapter/webgl-canvas-context.js var import_core11 = require("@luma.gl/core"); // dist/adapter/resources/webgl-framebuffer.js var import_core10 = require("@luma.gl/core"); var import_constants11 = require("@luma.gl/constants"); // dist/adapter/resources/webgl-texture.js var import_core9 = require("@luma.gl/core"); var import_constants10 = require("@luma.gl/constants"); // dist/context/state-tracker/with-parameters.js function withGLParameters(gl, parameters, func) { if (isObjectEmpty2(parameters)) { return func(gl); } const { nocatch = true } = parameters; pushContextState(gl); setGLParameters(gl, parameters); let value; if (nocatch) { value = func(gl); popContextState(gl); } else { try { value = func(gl); } finally { popContextState(gl); } } return value; } function isObjectEmpty2(object) { for (const key in object) { return false; } return true; } // dist/adapter/converters/sampler-parameters.js var import_constants7 = require("@luma.gl/constants"); // dist/adapter/converters/device-parameters.js var import_core5 = require("@luma.gl/core"); var import_constants6 = require("@luma.gl/constants"); function withDeviceAndGLParameters(device, parameters, glParameters, func) { if ((0, import_core5.isObjectEmpty)(parameters)) { return func(device); } const webglDevice = device; pushContextState(webglDevice.gl); try { setDeviceParameters(device, parameters); setGLParameters(webglDevice.gl, glParameters); return func(device); } finally { popContextState(webglDevice.gl); } } function withDeviceParameters(device, parameters, func) { if ((0, import_core5.isObjectEmpty)(parameters)) { return func(device); } pushContextState(device.gl); try { setDeviceParameters(device, parameters); return func(device); } finally { popContextState(device.gl); } } function setDeviceParameters(device, parameters) { const webglDevice = device; const { gl } = webglDevice; if (parameters.cullMode) { switch (parameters.cullMode) { case "none": gl.disable(2884); break; case "front": gl.enable(2884); gl.cullFace(1028); break; case "back": gl.enable(2884); gl.cullFace(1029); break; } } if (parameters.frontFace) { gl.frontFace(map("frontFace", parameters.frontFace, { ccw: 2305, cw: 2304 })); } if (parameters.unclippedDepth) { if (device.features.has("depth-clip-control")) { gl.enable(34383); } } if (parameters.depthBias !== void 0) { gl.enable(32823); gl.polygonOffset(parameters.depthBias, parameters.depthBiasSlopeScale || 0); } if (parameters.provokingVertex) { if (device.features.has("provoking-vertex-webgl")) { const extensions = webglDevice.getExtension("WEBGL_provoking_vertex"); const ext = extensions.WEBGL_provoking_vertex; const vertex = map("provokingVertex", parameters.provokingVertex, { first: 36429, last: 36430 }); ext == null ? void 0 : ext.provokingVertexWEBGL(vertex); } } if (parameters.polygonMode || parameters.polygonOffsetLine) { if (device.features.has("polygon-mode-webgl")) { if (parameters.polygonMode) { const extensions = webglDevice.getExtension("WEBGL_polygon_mode"); const ext = extensions.WEBGL_polygon_mode; const mode = map("polygonMode", parameters.polygonMode, { fill: 6914, line: 6913 }); ext == null ? void 0 : ext.polygonModeWEBGL(1028, mode); ext == null ? void 0 : ext.polygonModeWEBGL(1029, mode); } if (parameters.polygonOffsetLine) { gl.enable(10754); } } } if (device.features.has("shader-clip-cull-distance-webgl")) { if (parameters.clipDistance0) { gl.enable(12288); } if (parameters.clipDistance1) { gl.enable(12289); } if (parameters.clipDistance2) { gl.enable(12290); } if (parameters.clipDistance3) { gl.enable(12291); } if (parameters.clipDistance4) { gl.enable(12292); } if (parameters.clipDistance5) { gl.enable(12293); } if (parameters.clipDistance6) { gl.enable(12294); } if (parameters.clipDistance7) { gl.enable(12295); } } if (parameters.depthWriteEnabled !== void 0) { gl.depthMask(mapBoolean("depthWriteEnabled", parameters.depthWriteEnabled)); } if (parameters.depthCompare) { parameters.depthCompare !== "always" ? gl.enable(2929) : gl.disable(2929); gl.depthFunc(convertCompareFunction("depthCompare", parameters.depthCompare)); } if (parameters.stencilWriteMask) { const mask = parameters.stencilWriteMask; gl.stencilMaskSeparate(1028, mask); gl.stencilMaskSeparate(1029, mask); } if (parameters.stencilReadMask) { import_core5.log.warn("stencilReadMask not supported under WebGL"); } if (parameters.stencilCompare) { const mask = parameters.stencilReadMask || 4294967295; const glValue = convertCompareFunction("depthCompare", parameters.stencilCompare); parameters.stencilCompare !== "always" ? gl.enable(2960) : gl.disable(2960); gl.stencilFuncSeparate(1028, glValue, 0, mask); gl.stencilFuncSeparate(1029, glValue, 0, mask); } if (parameters.stencilPassOperation && parameters.stencilFailOperation && parameters.stencilDepthFailOperation) { const dppass = convertStencilOperation("stencilPassOperation", parameters.stencilPassOperation); const sfail = convertStencilOperation("stencilFailOperation", parameters.stencilFailOperation); const dpfail = convertStencilOperation("stencilDepthFailOperation", parameters.stencilDepthFailOperation); gl.stencilOpSeparate(1028, sfail, dpfail, dppass); gl.stencilOpSeparate(1029, sfail, dpfail, dppass); } if (parameters.blendColorOperation || parameters.blendAlphaOperation) { gl.enable(3042); const colorEquation = convertBlendOperationToEquation("blendColorOperation", parameters.blendColorOperation || "add"); const alphaEquation = convertBlendOperationToEquation("blendAlphaOperation", parameters.blendAlphaOperation || "add"); gl.blendEquationSeparate(colorEquation, alphaEquation); const colorSrcFactor = convertBlendFactorToFunction("blendColorSrcFactor", parameters.blendColorSrcFactor || "one"); const colorDstFactor = convertBlendFactorToFunction("blendColorDstFactor", parameters.blendColorDstFactor || "zero"); const alphaSrcFactor = convertBlendFactorToFunction("blendAlphaSrcFactor", parameters.blendAlphaSrcFactor || "one"); const alphaDstFactor = convertBlendFactorToFunction("blendAlphaDstFactor", parameters.blendAlphaDstFactor || "zero"); gl.blendFuncSeparate(colorSrcFactor, colorDstFactor, alphaSrcFactor, alphaDstFactor); } } function convertCompareFunction(parameter, value) { return map(parameter, value, { never: 512, less: 513, equal: 514, "less-equal": 515, greater: 516, "not-equal": 517, "greater-equal": 518, always: 519 }); } function convertStencilOperation(parameter, value) { return map(parameter, value, { keep: 7680, zero: 0, replace: 7681, invert: 5386, "increment-clamp": 7682, "decrement-clamp": 7683, "increment-wrap": 34055, "decrement-wrap": 34056 }); } function convertBlendOperationToEquation(parameter, value) { return map(parameter, value, { add: 32774, subtract: 32778, "reverse-subtract": 32779, min: 32775, max: 32776 }); } function convertBlendFactorToFunction(parameter, value) { return map(parameter, value, { one: 1, zero: 0, "src-color": 768, "one-minus-src-color": 769, "dst-color": 774, "one-minus-dst-color": 775, "src-alpha": 770, "one-minus-src-alpha": 771, "dst-alpha": 772, "one-minus-dst-alpha": 773, "src-alpha-saturated": 776, "constant-color": 32769, "one-minus-constant-color": 32770, "constant-alpha": 32771, "one-minus-constant-alpha": 32772 }); } function message(parameter, value) { return `Illegal parameter ${value} for ${parameter}`; } function map(parameter, value, valueMap) { if (!(value in valueMap)) { throw new Error(message(parameter, value)); } return valueMap[value]; } function mapBoolean(parameter, value) { return value; } // dist/adapter/converters/sampler-parameters.js function convertSamplerParametersToWebGL(props) { const params = {}; if (props.addressModeU) { params[10242] = convertAddressMode(props.addressModeU); } if (props.addressModeV) { params[10243] = convertAddressMode(props.addressModeV); } if (props.addressModeW) { params[32882] = convertAddressMode(props.addressModeW); } if (props.magFilter) { params[10240] = convertMaxFilterMode(props.magFilter); } if (props.minFilter || props.mipmapFilter) { params[10241] = convertMinFilterMode(props.minFilter || "linear", props.mipmapFilter); } if (props.lodMinClamp !== void 0) { params[33082] = props.lodMinClamp; } if (props.lodMaxClamp !== void 0) { params[33083] = props.lodMaxClamp; } if (props.type === "comparison-sampler") { params[34892] = 34894; } if (props.compare) { params[34893] = convertCompareFunction("compare", props.compare); } if (props.maxAnisotropy) { params[34046] = props.maxAnisotropy; } return params; } function convertAddressMode(addressMode) { switch (addressMode) { case "clamp-to-edge": return 33071; case "repeat": return 10497; case "mirror-repeat": return 33648; } } function convertMaxFilterMode(maxFilter) { switch (maxFilter) { case "nearest": return 9728; case "linear": return 9729; } } function convertMinFilterMode(minFilter, mipmapFilter) { if (!mipmapFilter) { return convertMaxFilterMode(minFilter); } switch (minFilter) { case "nearest": return mipmapFilter === "nearest" ? 9984 : 9986; case "linear": return mipmapFilter === "nearest" ? 9985 : 9987; } } // dist/adapter/resources/webgl-buffer.js var import_core6 = require("@luma.gl/core"); var import_constants8 = require("@luma.gl/constants"); var WEBGLBuffer = class extends import_core6.Buffer { device; gl; handle; /** Target in OpenGL defines the type of buffer */ glTarget; /** Usage is a hint on how frequently the buffer will be updates */ glUsage; /** Index type is needed when issuing draw calls, so we pre-compute it */ glIndexType = 5123; /** Number of bytes allocated on the GPU for this buffer */ byteLength; /** Number of bytes used */ bytesUsed; constructor(device, props = {}) { super(device, props); this.device = device; this.gl = this.device.gl; const handle = typeof props === "object" ? props.handle : void 0; this.handle = handle || this.gl.createBuffer(); device.setSpectorMetadata(this.handle, { ...this.props, data: typeof this.props.data }); this.glTarget = getWebGLTarget(this.props.usage); this.glUsage = getWebGLUsage(this.props.usage); this.glIndexType = this.props.indexType === "uint32" ? 5125 : 5123; if (props.data) { this._initWithData(props.data, props.byteOffset, props.byteLength); } else { this._initWithByteLength(props.byteLength || 0); } } // PRIVATE METHODS /** Allocate a new buffer and initialize to contents of typed array */ _initWithData(data, byteOffset = 0, byteLength = data.byteLength + byteOffset) { const glTarget = this.glTarget; this.gl.bindBuffer(glTarget, this.handle); this.gl.bufferData(glTarget, byteLength, this.glUsage); this.gl.bufferSubData(glTarget, byteOffset, data); this.gl.bindBuffer(glTarget, null); this.bytesUsed = byteLength; this.byteLength = byteLength; this._setDebugData(data, byteOffset, byteLength); this.trackAllocatedMemory(byteLength); } // Allocate a GPU buffer of specified size. _initWithByteLength(byteLength) { (0, import_core6.assert)(byteLength >= 0); let data = byteLength; if (byteLength === 0) { data = new Float32Array(0); } const glTarget = this.glTarget; this.gl.bindBuffer(glTarget, this.handle); this.gl.bufferData(glTarget, data, this.glUsage); this.gl.bindBuffer(glTarget, null); this.bytesUsed = byteLength; this.byteLength = byteLength; this._setDebugData(null, 0, byteLength); this.trackAllocatedMemory(byteLength); return this; } destroy() { if (!this.destroyed && this.handle) { this.removeStats(); this.trackDeallocatedMemory(); this.gl.deleteBuffer(this.handle); this.destroyed = true; this.handle = null; } } write(data, byteOffset = 0) { const srcOffset = 0; const byteLength = void 0; const glTarget = 36663; this.gl.bindBuffer(glTarget, this.handle); if (srcOffset !== 0 || byteLength !== void 0) { this.gl.bufferSubData(glTarget, byteOffset, data, srcOffset, byteLength); } else { this.gl.bufferSubData(glTarget, byteOffset, data); } this.gl.bindBuffer(glTarget, null); this._setDebugData(data, byteOffset, data.byteLength); } /** Asynchronously read data from the buffer */ async readAsync(byteOffset = 0, byteLength) { return this.readSyncWebGL(byteOffset, byteLength); } /** Synchronously read data from the buffer. WebGL only. */ readSyncWebGL(byteOffset = 0, byteLength) { byteLength = byteLength ?? this.byteLength - byteOffset; const data = new Uint8Array(byteLength); const dstOffset = 0; this.gl.bindBuffer(36662, this.handle); this.gl.getBufferSubData(36662, byteOffset, data, dstOffset, byteLength); this.gl.bindBuffer(36662, null); this._setDebugData(data, byteOffset, byteLength); return data; } }; function getWebGLTarget(usage) { if (usage & import_core6.Buffer.INDEX) { return 34963; } if (usage & import_core6.Buffer.VERTEX) { return 34962; } if (usage & import_core6.Buffer.UNIFORM) { return 35345; } return 34962; } function getWebGLUsage(usage) { if (usage & import_core6.Buffer.INDEX) { return 35044; } if (usage & import_core6.Buffer.VERTEX) { return 35044; } if (usage & import_core6.Buffer.UNIFORM) { return 35048; } return 35044; } // dist/adapter/resources/webgl-sampler.js var import_core7 = require("@luma.gl/core"); var import_constants9 = require("@luma.gl/constants"); var WEBGLSampler = class extends import_core7.Sampler { device; handle; parameters; constructor(device, props) { super(device, props); this.device = device; this.parameters = convertSamplerParametersToWebGL(props); this.handle = this.handle || this.device.gl.createSampler(); this._setSamplerParameters(this.parameters); } destroy() { if (this.handle) { this.device.gl.deleteSampler(this.handle); this.handle = void 0; } } toString() { return `Sampler(${this.id},${JSON.stringify(this.props)})`; } /** Set sampler parameters on the sampler */ _setSamplerParameters(parameters) { for (const [pname, value] of Object.entries(parameters)) { const param = Number(pname); switch (param) { case 33082: case 33083: this.device.gl.samplerParameterf(this.handle, param, value); break; default: this.device.gl.samplerParameteri(this.handle, param, value); break; } } } }; // dist/adapter/resources/webgl-texture-view.js var import_core8 = require("@luma.gl/core"); var WEBGLTextureView = class extends import_core8.TextureView { device; gl; handle; texture; constructor(device, props) { super(device, { ...import_core8.Texture.defaultProps, ...props }); this.device = device; this.gl = this.device.gl; this.handle = null; this.texture = props.texture; } }; // dist/adapter/resources/webgl-texture.js var DEFAULT_WEBGL_TEXTURE_PROPS = { // deprecated parameters: {}, pixelStore: {}, pixels: null, border: 0, dataFormat: void 0, textureUnit: void 0, target: void 0 }; var _WEBGLTexture = class extends import_core9.Texture { MAX_ATTRIBUTES; device; gl; handle; // (TODO - currently unused in WebGL, but WebGL 2 does support sampler objects) */ sampler = void 0; view = void 0; // data; glFormat = void 0; type = void 0; dataFormat = void 0; mipmaps = void 0; /** * @note `target` cannot be modified by bind: * textures are special because when you first bind them to a target, * they get special information. When you first bind a texture as a * GL_TEXTURE_2D, you are saying that this texture is a 2D texture. * And it will always be a 2D texture; this state cannot be changed ever. * A texture that was first bound as a GL_TEXTURE_2D, must always be bound as a GL_TEXTURE_2D; * attempting to bind it as GL_TEXTURE_3D will give rise to a run-time error * */ target; textureUnit = void 0; /** * Program.draw() checks the loaded flag of all textures to avoid * Textures that are still loading from promises * Set to true as soon as texture has been initialized with valid data */ loaded = false; _video; constructor(device, props) { var _a; super(device, { ...DEFAULT_WEBGL_TEXTURE_PROPS, format: "rgba8unorm", ...props }); this.device = device; this.gl = this.device.gl; this.handle = this.props.handle || this.gl.createTexture(); this.device.setSpectorMetadata(this.handle, { ...this.props, data: typeof this.props.data }); this.glFormat = 6408; this.target = getWebGLTextureTarget(this.props); this.loaded = false; if (typeof ((_a = this.props) == null ? void 0 : _a.data) === "string") { Object.assign(this.props, { data: (0, import_core9.loadImage)(this.props.data) }); } this.initialize(this.props); Object.seal(this); } destroy() { if (this.handle) { this.gl.deleteTexture(this.handle); this.removeStats(); this.trackDeallocatedMemory("Texture"); this.destroyed = true; } } toString() { return `Texture(${this.id},${this.width}x${this.height})`; } createView(props) { return new WEBGLTextureView(this.device, { ...props, texture: this }); } // eslint-disable-next-line max-statements initialize(props = {}) { if (this.props.dimension === "cube") { return this.initializeCube(props); } let data = props.data; if (data instanceof Promise) { data.then((resolvedImageData) => this.initialize(Object.assign({}, props, { pixels: resolvedImageData, data: resolvedImageData }))); return this; } const isVideo = typeof HTMLVideoElement !== "undefined" && data instanceof HTMLVideoElement; if (isVideo && data.readyState < HTMLVideoElement.HAVE_METADATA) { this._video = null; data.addEventListener("loadeddata", () => this.initialize(props)); return this; } const { parameters = {} } = props; const { pixels = null, pixelStore = {}, textureUnit = void 0, mipmaps = true } = props; if (!data) { data = pixels; } let { width, height, dataFormat, type, compressed = false } = props; const { depth = 0 } = props; const glFormat = convertTextureFormatToGL(props.format); ({ width, height, compressed, dataFormat, type } = this._deduceParameters({ format: props.format, type, dataFormat, compressed, data, width, height })); this.width = width; this.height = height; this.glFormat = glFormat; this.type = type; this.dataFormat = dataFormat; this.textureUnit = textureUnit; if (Number.isFinite(this.textureUnit)) { this.gl.activeTexture(33984 + this.textureUnit); this.gl.bindTexture(this.target, this.handle); } this.mipmaps = mipmaps; this.setImageData({ data, width, height, depth, format: props.format, type, dataFormat, // @ts-expect-error parameters: pixelStore, compressed }); this.setSampler(props.sampler); this._setSamplerParameters(parameters); this.view = this.createView({ ...this.props, mipLevelCount: 1, arrayLayerCount: 1 }); if (mipmaps && this.device.isTextureFormatFilterable(props.format)) { this.generateMipmap(); } if (isVideo) { this._video = { video: data, parameters, // @ts-expect-error lastTime: data.readyState >= HTMLVideoElement.HAVE_CURRENT_DATA ? data.currentTime : -1 }; } return this; } initializeCube(props) { const { mipmaps = true, parameters = {} } = props; this.setCubeMapImageData(props).then(() => { this.loaded = true; if (mipmaps) { this.generateMipmap(props); } this.setSampler(props.sampler); this._setSamplerParameters(parameters); }); return this; } setSampler(sampler = {}) { let samplerProps; if (sampler instanceof WEBGLSampler) { this.sampler = sampler; samplerProps = sampler.props; } else { this.sampler = new WEBGLSampler(this.device, sampler); samplerProps = sampler; } const parameters = convertSamplerParametersToWebGL(samplerProps); this._setSamplerParameters(parameters); return this; } /** * If size has changed, reinitializes with current format * @note note clears image and mipmaps */ resize(options) { const { height, width, mipmaps = false } = options; if (width !== this.width || height !== this.height) { return this.initialize({ width, height, format: this.format, type: this.type, dataFormat: this.dataFormat, mipmaps }); } return this; } /** Update external texture (video frame) */ update() { if (this._video) { const { video, parameters, lastTime } = this._video; if (lastTime === video.currentTime || video.readyState < HTMLVideoElement.HAVE_CURRENT_DATA) { return; } this.setSubImageData({ data: video, parameters }); if (this.mipmaps) { this.generateMipmap(); } this._video.lastTime = video.currentTime; } } // Call to regenerate mipmaps after modifying texture(s) generateMipmap(params = {}) { this.mipmaps = true; this.gl.bindTexture(this.target, this.handle); withGLParameters(this.gl, params, () => { this.gl.generateMipmap(this.target); }); this.gl.bindTexture(this.target, null); return this; } /* * Allocates storage * @param {*} pixels - * null - create empty texture of specified format * Typed array - init from image data in typed array * Buffer|WebGLBuffer - (WEBGL2) init from image data in WebGLBuffer * HTMLImageElement|Image - Inits with content of image. Auto width/height * HTMLCanvasElement - Inits with contents of canvas. Auto width/height * HTMLVideoElement - Creates video texture. Auto width/height * * @param width - * @param height - * @param mipMapLevel - * @param {GLenum} format - format of image data. * @param {GLenum} type * - format of array (autodetect from type) or * - (WEBGL2) format of buffer * @param {Number} offset - (WEBGL2) offset from start of buffer * @parameters - temporary settings to be applied, can be used to supply pixel store settings. */ // eslint-disable-next-line max-statements, complexity setImageData(options) { if (this.props.dimension === "3d" || this.props.dimension === "2d-array") { return this.setImageData3D(options); } this.trackDeallocatedMemory("Texture"); const { target = this.target, pixels = null, level = 0, glFormat = this.glFormat, offset = 0, parameters = {} } = options; let { data = null, type = this.type, width = this.width, height = this.height, dataFormat = this.dataFormat, compressed = false } = options; if (!data) { data = pixels; } ({ type, dataFormat, compressed, width, height } = this._deduceParameters({ format: this.props.format, type, dataFormat, compressed, data, width, height })); const { gl } = this; gl.bindTexture(this.target, this.handle); let dataType = null; ({ data, dataType } = this._getDataType({ data, compressed })); withGLParameters(this.gl, parameters, () => { switch (dataType) { case "null": gl.texImage2D(target, level, glFormat, width, height, 0, dataFormat, type, data); break; case "typed-array": gl.texImage2D( target, level, glFormat, width, height, 0, // border (must be 0) dataFormat, type, data, offset ); break; case "buffer": this.device.gl.bindBuffer(35052, data.handle || data); this.device.gl.texImage2D(target, level, glFormat, width, height, 0, dataFormat, type, offset); this.device.gl.bindBuffer(35052, null); break; case "browser-object": gl.texImage2D(target, level, glFormat, width, height, 0, dataFormat, type, data); break; case "compressed": for (const [levelIndex, levelData] of data.entries()) { gl.compressedTexImage2D(target, levelIndex, levelData.format, levelData.width, levelData.height, 0, levelData.data); } break; default: (0, import_core9.assert)(false, "Unknown image data type"); } }); if (data && data.byteLength) { this.trackAllocatedMemory(data.byteLength, "Texture"); } else { const bytesPerPixel = getTextureFormatBytesPerPixel(this.props.format); this.trackAllocatedMemory(this.width * this.height * bytesPerPixel, "Texture"); } this.loaded = true; return this; } /** * Redefines an area of an existing texture * Note: does not allocate storage * Redefines an area of an existing texture */ setSubImageData({ target = this.target, pixels = null, data = null, x = 0, y = 0, width = this.width, height = this.height, level = 0, glFormat = this.glFormat, type = this.type, dataFormat = this.dataFormat, compressed = false, offset = 0, parameters = {} }) { ({ type, dataFormat, compressed, width, height } = this._deduceParameters({ format: this.props.format, type, dataFormat, compressed, data, width, height })); (0, import_core9.assert)(this.depth === 1, "texSubImage not supported for 3D textures"); if (!data) { data = pixels; } if (data && data.data) { const ndarray = data; data = ndarray.data; width = ndarray.shape[0]; height = ndarray.shape[1]; } if (data instanceof WEBGLBuffer) { data = data.handle; } this.gl.bindTexture(this.target, this.handle); withGLParameters(this.gl, parameters, () => { if (compressed) { this.gl.compressedTexSubImage2D(target, level, x, y, width, height, glFormat, data); } else if (data === null) { this.gl.texSubImage2D(target, level, x, y, width, height, dataFormat, type, null); } else if (ArrayBuffer.isView(data)) { this.gl.texSubImage2D(target, level, x, y, width, height, dataFormat, type, data, offset); } else if (typeof WebGLBuffer !== "undefined" && data instanceof WebGLBuffer) { this.device.gl.bindBuffer(35052, data); this.device.gl.texSubImage2D(target, level, x, y, width, height, dataFormat, type, offset); this.device.gl.bindBuffer(35052, null); } else { this.device.gl.texSubImage2D(target, level, x, y, width, height, dataFormat, type, data); } }); this.gl.bindTexture(this.target, null); } /** * Defines a two-dimensional texture image or cube-map texture image with * pixels from the current framebuffer (rather than from client memory). * (gl.copyTexImage2D wrapper) * * Note that binding a texture into a Framebuffer's color buffer and * rendering can be faster. */ copyFramebuffer(opts = {}) { import_core9.log.error("Texture.copyFramebuffer({...}) is no logner supported, use copyToTexture(source, target, opts})")(); return null; } getActiveUnit() { return this.gl.getParameter(34016) - 33984; } bind(textureUnit = this.textureUnit) { const { gl } = this; if (textureUnit !== void 0) { this.textureUnit = textureUnit; gl.activeTexture(33984 + textureUnit); } gl.bindTexture(this.target, this.handle); return textureUnit; } unbind(textureUnit = this.textureUnit) { const { gl } = this; if (textureUnit !== void 0) { this.textureUnit = textureUnit; gl.activeTexture(33984 + textureUnit); } gl.bindTexture(this.target, null); return textureUnit; } // PRIVATE METHODS _getDataType({ data, compressed = false }) { if (compressed) { return { data, dataType: "compressed" }; } if (data === null) { return { data, dataType: "null" }; } if (ArrayBuffer.isView(data)) { return { data, dataType: "typed-array" }; } if (data instanceof WEBGLBuffer) { return { data: data.handle, dataType: "buffer" }; } if (typeof WebGLBuffer !== "undefined" && data instanceof WebGLBuffer) { return { data, dataType: "buffer" }; } return { data, dataType: "browser-object" }; } // HELPER METHODS _deduceParameters(opts) { const { format, data } = opts; let { width, height, dataFormat, type, compressed } = opts; const parameters = getWebGLTextureParameters(format); dataFormat = dataFormat || parameters.dataFormat; type = type || parameters.type; compressed = compressed || parameters.compressed; ({ width, height } = this._deduceImageSize(data, width, height)); return { dataFormat, type, compressed, width, height, format, data }; } // eslint-disable-next-line complexity _deduceImageSize(data, width, height) { let size; if (typeof ImageData !== "undefined" && data instanceof ImageData) { size = { width: data.width, height: data.height }; } else if (typeof HTMLImageElement !== "undefined" && data instanceof HTMLImageElement) { size = { width: data.naturalWidth, height: data.naturalHeight }; } else if (typeof HTMLCanvasElement !== "undefined" && data instanceof HTMLCanvasElement) { size = { width: data.width, height: data.height }; } else if (typeof ImageBitmap !== "undefined" && data instanceof ImageBitmap) { size = { width: data.width, height: data.height }; } else if (typeof HTMLVideoElement !== "undefined" && data instanceof HTMLVideoElement) { size = { width: data.videoWidth, height: data.videoHeight }; } else if (!data) { size = { width: width >= 0 ? width : 1, height: height >= 0 ? height : 1 }; } else { size = { width, height }; } (0, import_core9.assert)(size, "Could not deduced texture size"); (0, import_core9.assert)(width === void 0 || size.width === width, "Deduced texture width does not match supplied width"); (0, import_core9.assert)(height === void 0 || size.height === height, "Deduced texture height does not match supplied height"); return size; } // CUBE MAP METHODS /* eslint-disable max-statements, max-len */ async setCubeMapImageData(options) { const { gl } = this; const { width, height, pixels, data, format = 6408, type = 5121 } = options; const imageDataMap = pixels || data; const resolvedFaces = await Promise.all(_WEBGLTexture.FACES.map((face) => { const facePixels = imageDataMap[face]; return Promise.all(Array.isArray(facePixels) ? facePixels : [facePixels]); })); this.bind(); _WEBGLTexture.FACES.forEach((face, index) => { if (resolvedFaces[index].length > 1 && this.props.mipmaps !== false) { import_core9.log.warn(`${this.id} has mipmap and multiple LODs.`)(); } resolvedFaces[index].forEach((image, lodLevel) => { if (width && height) { gl.texImage2D(face, lodLevel, format, width, height, 0, format, type, image); } else { gl.texImage2D(face, lodLevel, format, format, type, image); } }); }); this.unbind(); } /** @todo update this method to accept LODs */ setImageDataForFace(options) { const { face, width, height, pixels, data, format = 6408, type = 5121 // generateMipmap = false // TODO } = options; const { gl } = this; const imageData = pixels || data; this.bind(); if (imageData instanceof Promise) { imageData.then((resolvedImageData) => this.setImageDataForFace(Object.assign({}, options, { face, data: resolvedImageData, pixels: resolvedImageData }))); } else if (this.width || this.height) { gl.texImage2D(face, 0, format, width, height, 0, format, type, imageData); } else { gl.texImage2D(face, 0, format, format, type, imageData); } return this; } /** Image 3D copies from Typed Array or WebGLBuffer */ setImageData3D(options) { const { level = 0, dataFormat, format, type, // = GL.UNSIGNED_BYTE, width, height, depth = 1, offset = 0, data, parameters = {} } = options; this.trackDeallocatedMemory("Texture"); this.gl.bindTexture(this.target, this.handle); const webglTextureFormat = getWebGLTextureParameters(format); withGLParameters(this.gl, parameters, () => { if (ArrayBuffer.isView(data)) { this.gl.texImage3D( this.target, level, webglTextureFormat.format, width, height, depth, 0, webglTextureFormat.dataFormat, webglTextureFormat.type, // dataType: getWebGL, data ); } if (data instanceof WEBGLBuffer) { this.gl.bindBuffer(35052, data.handle); this.gl.texImage3D(this.target, level, dataFormat, width, height, depth, 0, format, type, offset); } }); if (data && data.byteLength) { this.trackAllocatedMemory(data.byteLength, "Texture"); } else { const bytesPerPixel = getTextureFormatBytesPerPixel(this.props.format); this.trackAllocatedMemory(this.width * this.height * this.depth * bytesPerPixel, "Texture"); } this.loaded = true; return this; } // RESOURCE METHODS /** * Sets sampler parameters on texture */ _setSamplerParameters(parameters) { if ((0, import_core9.isObjectEmpty)(parameters)) { return; } logParameters(parameters); this.gl.bindTexture(this.target, this.handle); for (const [pname, pvalue] of Object.entries(parameters)) { const param = Number(pname); const value = pvalue; switch (param) { case 33082: case 33083: this.gl.texParameterf(this.target, param, value); break; default: this.gl.texParameteri(this.target, param, value); break; } } this.gl.bindTexture(this.target, null); return; } }; var WEBGLTexture = _WEBGLTexture; // TODO - remove? __publicField(WEBGLTexture, "FACES", [ 34069, 34070, 34071, 34072, 34073, 34074 ]); function getWebGLTextureTarget(props) { switch (props.dimension) { case "2d": return 3553; case "cube": return 34067; case "2d-array": return 35866; case "3d": return 32879; case "1d": case "cube-array": default: throw new Error(props.dimension); } } function logParameters(parameters) { import_core9.log.log(1, "texture sampler parameters", parameters)(); } // dist/adapter/resources/webgl-framebuffer.js var WEBGLFramebuffer = class extends import_core10.Framebuffer { device; gl; handle; get texture() { return this.colorAttachments[0]; } constructor(device, props) { super(device, props); const isDefaultFramebuffer = props.handle === null; this.device = device; this.gl = device.gl; this.handle = this.props.handle || isDefaultFramebuffer ? this.props.handle : this.gl.createFramebuffer(); if (!isDefaultFramebuffer) { device.setSpectorMetadata(this.handle, { id: this.props.id, props: this.props }); this.autoCreateAttachmentTextures(); const prevHandle = this.gl.bindFramebuffer(36160, this.handle); for (let i = 0; i < this.colorAttachments.length; ++i) { const attachment = this.colorAttachments[i]; const attachmentPoint = 36064 + i; if (attachment) { this._attachOne(attachmentPoint, attachment); } } if (this.depthStencilAttachment) { this._attachOne(getDepthStencilAttachmentWebGL(this.depthStencilAttachment.props.format), this.depthStencilAttachment); } if (props.check !== false) { const status = this.gl.checkFramebufferStatus(36160); if (status !== 36053) { throw new Error(`Framebuffer ${_getFrameBufferStatus(status)}`); } } this.gl.bindFramebuffer(36160, prevHandle); } } /** destroys any auto created resources etc. */ destroy() { super.destroy(); if (!this.destroyed && this.handle !== null) { this.gl.deleteFramebuffer(this.handle); } } // PRIVATE /** In WebGL we must use renderbuffers for depth/stencil attachments (unless we have extensions) */ createDepthStencilTexture(format) { return new WEBGLTexture(this.device, { id: `${this.id}-depth-stencil`, format, width: this.width, height: this.height, mipmaps: false }); } /** * Attachment resize is expected to be a noop if size is same */ resizeAttachments(width, height) { if (this.handle === null) { this.width = this.gl.drawingBufferWidth; this.height = this.gl.drawingBufferHeight; return this; } if (width === void 0) { width = this.gl.drawingBufferWidth; } if (height === void 0) { height = this.gl.drawingBufferHeight; } for (const colorAttachment of this.colorAttachments) { colorAttachment.texture.resize({ width, height }); } if (this.depthStencilAttachment) { this.depthStencilAttachment.texture.resize({ width, height }); } return this; } /** Attach one attachment */ _attachOne(attachmentPoint, attachment) { if (Array.isArray(attachment)) { const [texture, layer = 0, level = 0] = attachment; this._attachTexture(attachmentPoint, texture, layer, level); return texture; } if (attachment instanceof WEBGLTexture) { this._attachTexture(attachmentPoint, attachment, 0, 0); return attachment; } if (attachment instanceof WEBGLTextureView) { const textureView = attachment; this._attachTexture(attachmentPoint, textureView.texture, textureView.props.baseMipLevel, textureView.props.baseArrayLayer); return attachment.texture; } throw new Error("attach"); } // TODO - we do not seem to need render buffers in WebGL 2 // protected _attachWEBGLRenderbuffer(attachment: GL, renderbuffer: WEBGLRenderbuffer): void { // this.gl.framebufferRenderbuffer( // GL.FRAMEBUFFER, // attachment, // GL.RENDERBUFFER, // renderbuffer.handle // ); // } /** * @param attachment * @param texture * @param layer = 0 - index into WEBGLTextureArray and Texture3D or face for `TextureCubeMap` * @param level = 0 - mipmapLevel */ _attachTexture(attachment, texture, layer, level) { const { gl } = this.device; gl.bindTexture(texture.target, texture.handle); switch (texture.target) { case 35866: case 32879: gl.framebufferTextureLayer(36160, attachment, texture.target, level, layer); break; case 34067: const face = mapIndexToCubeMapFace(layer); gl.framebufferTexture2D(36160, attachment, face, texture.handle, level); break; case 3553: gl.framebufferTexture2D(36160, attachment, 3553, texture.handle, level); break; default: (0, import_core10.assert)(false, "Illegal texture type"); } gl.bindTexture(texture.target, null); } }; function mapIndexToCubeMapFace(layer) { return layer < 34069 ? layer + 34069 : layer; } function _getFrameBufferStatus(status) { switch (status) { case 36053: return "success"; case 36054: return "Mismatched attachments"; case 36055: return "No attachments"; case 36057: return "Height/width mismatch"; case 36061: return "Unsupported or split attachments"; case 36182: return "Samples mismatch"; default: return `${status}`; } } // dist/adapter/webgl-canvas-context.js var WebGLCanvasContext = class extends import_core11.CanvasContext { device; presentationSize; _framebuffer = null; constructor(device, props) { super(props); this.device = device; this.presentationSize = [-1, -1]; this._setAutoCreatedCanvasId(`${this.device.id}-canvas`); this.update(); } getCurrentFramebuffer() { this.update(); this._framebuffer = this._framebuffer || new WEBGLFramebuffer(this.device, { handle: null }); return this._framebuffer; } /** Resizes and updates render targets if necessary */ update() { const size = this.getPixelSize(); const sizeChanged = size[0] !== this.presentationSize[0] || size[1] !== this.presentationSize[1]; if (sizeChanged) { this.presentationSize = size; this.resize(); } } /** * Resize the canvas' drawing buffer. * * Can match the canvas CSS size, and optionally also consider devicePixelRatio * Can be called every frame * * Regardless of size, the drawing buffer will always be scaled to the viewport, but * for best visual results, usually set to either: * canvas CSS width x canvas CSS height * canvas CSS width * devicePixelRatio x canvas CSS height * devicePixelRatio * See http://webgl2fundamentals.org/webgl/lessons/webgl-resizing-the-canvas.html */ resize(options) { if (!this.device.gl) return; if (this.canvas) { const devicePixelRatio = this.getDevicePixelRatio(options == null ? void 0 : options.useDevicePixels); this.setDevicePixelRatio(devicePixelRatio, options); return; } } commit() { } }; // dist/context/debug/spector.js var import_core12 = require("@luma.gl/core"); var DEFAULT_SPECTOR_PROPS = { spector: import_core12.log.get("spector") || import_core12.log.get("spectorjs") }; var SPECTOR_CDN_URL = "https://cdn.jsdelivr.net/npm/spectorjs@0.9.30/dist/spector.bundle.js"; var LOG_LEVEL = 1; var spector = null; var initialized = false; async function loadSpectorJS(props) { if (!globalThis.SPECTOR) { try { await (0, import_core12.loadScript)(SPECTOR_CDN_URL); } catch (error) { import_core12.log.warn(String(error)); } } } function initializeSpectorJS(props) { props = { ...DEFAULT_SPECTOR_PROPS, ...props }; if (!(props == null ? void 0 : props.spector)) { return null; } if (!spector && globalThis.SPECTOR) { import_core12.log.probe(LOG_LEVEL, "SPECTOR found and initialized")(); spector = new globalThis.SPECTOR.Spector(); if (globalThis.luma) { globalThis.luma.spector = spector; } } if (!spector) { return null; } if (!initialized) { initialized = true; spector.spyCanvases(); spector == null ? void 0 : spector.onCaptureStarted.add((capture) => import_core12.log.info("Spector capture started:", capture)()); spector == null ? void 0 : spector.onCapture.add((capture) => { import_core12.log.info("Spector capture complete:", capture)(); spector == null ? void 0 : spector.getResultUI(); spector == null ? void 0 : spector.resultView.display(); spector == null ? void 0 : spector.resultView.addCapture(capture); }); } if (props == null ? void 0 : props.canvas) { if (typeof props.spector === "string" && props.spector !== props.canvas.id) { return spector; } spector == null ? void 0 : spector.startCapture(props == null ? void 0 : props.canvas, 500); new Promise((resolve) => setTimeout(resolve, 2e3)).then((_) => { import_core12.log.info("Spector capture stopped after 2 seconds")(); spector == null ? void 0 : spector.stopCapture(); }); } return spector; } // dist/context/debug/webgl-developer-tools.js var import_core13 = require("@luma.gl/core"); var import_constants12 = require("@luma.gl/constants"); var import_env = require("@probe.gl/env"); var WEBGL_DEBUG_CDN_URL = "https://unpkg.com/webgl-debug@2.0.1/index.js"; function getWebGLContextData(gl) { gl.luma = gl.luma || {}; return gl.luma; } async function loadWebGLDeveloperTools() { if ((0, import_env.isBrowser)() && !globalThis.WebGLDebugUtils) { globalThis.global = globalThis.global || globalThis; globalThis.global.module = {}; await (0, import_core13.loadScript)(WEBGL_DEBUG_CDN_URL); } } function makeDebugContext(gl, props = {}) { if (!gl) { return null; } return props.debug ? getDebugContext(gl, props) : getRealContext(gl); } function getRealContext(gl) { const data = getWebGLContextData(gl); return data.realContext ? data.realContext : gl; } function getDebugContext(gl, props) { if (!globalThis.WebGLDebugUtils) { import_core13.log.warn("webgl-debug not loaded")(); return gl; } const data = getWebGLContextData(gl); if (data.debugContext) { return data.debugContext; } globalThis.WebGLDebugUtils.init({ ...import_constants12.GL, ...gl }); const glDebug = globalThis.WebGLDebugUtils.makeDebugContext(gl, onGLError.bind(null, props), onValidateGLFunc.bind(null, props)); for (const key in import_constants12.GL) { if (!(key in glDebug) && typeof import_constants12.GL[key] === "number") { glDebug[key] = import_constants12.GL[key]; } } class WebGLDebugContext { } Object.setPrototypeOf(glDebug, Object.getPrototypeOf(gl)); Object.setPrototypeOf(WebGLDebugContext, glDebug); const debugContext = Object.create(WebGLDebugContext); data.realContext = gl; data.debugContext = debugContext; debugContext.debug = true; return debugContext; } function getFunctionString(functionName, functionArgs) { functionArgs = Array.from(functionArgs).map((arg) => arg === void 0 ? "undefined" : arg); let args = globalThis.WebGLDebugUtils.glFunctionArgsToString(functionName, functionArgs); args = `${args.slice(0, 100)}${args.length > 100 ? "..." : ""}`; return `gl.${functionName}(${args})`; } function onGLError(props, err, functionName, args) { args = Array.from(args).map((arg) => arg === void 0 ? "undefined" : arg); const errorMessage = globalThis.WebGLDebugUtils.glEnumToString(err); const functionArgs = globalThis.WebGLDebugUtils.glFunctionArgsToString(functionName, args); const message2 = `${errorMessage} in gl.${functionName}(${functionArgs})`; import_core13.log.error(message2)(); debugger; if (props.throwOnError) { throw new Error(message2); } } function onValidateGLFunc(props, functionName, functionArgs) { let functionString = ""; if (import_core13.log.level >= 1) { functionString = getFunctionString(functionName, functionArgs); import_core13.log.log(1, functionString)(); } if (props.break && props.break.length > 0) { functionString = functionString || getFunctionString(functionName, functionArgs); const isBreakpoint = props.break.every((breakOn) => functionString.indexOf(breakOn) !== -1); if (isBreakpoint) { debugger; } } for (const arg of functionArgs) { if (arg === void 0) { functionString = functionString || getFunctionString(functionName, functionArgs); if (props.throwOnError) { throw new Error(`Undefined argument: ${functionString}`); } else { import_core13.log.error(`Undefined argument: ${functionString}`)(); debugger; } } } } // dist/adapter/resources/webgl-shader.js var import_core14 = require("@luma.gl/core"); var import_constants13 = require("@luma.gl/constants"); // dist/adapter/helpers/parse-shader-compiler-log.js function parseShaderCompilerLog(errLog) { const lines = errLog.split(/\r?\n/); const messages = []; for (const line of lines) { if (line.length <= 1) { continue; } const segments = line.split(":"); if (segments.length === 2) { const [messageType2, message2] = segments; messages.push({ message: message2.trim(), type: getMessageType(messageType2), lineNum: 0, linePos: 0 }); continue; } const [messageType, linePosition, lineNumber, ...rest] = segments; let lineNum = parseInt(lineNumber, 10); if (isNaN(lineNum)) { lineNum = 0; } let linePos = parseInt(linePosition, 10); if (isNaN(linePos)) { linePos = 0; } messages.push({ message: rest.join(":").trim(), type: getMessageType(messageType), lineNum, linePos // TODO }); } return messages; } function getMessageType(messageType) { const MESSAGE_TYPES = ["warning", "error", "info"]; const lowerCaseType = messageType.toLowerCase(); return MESSAGE_TYPES.includes(lowerCaseType) ? lowerCaseType : "info"; } // dist/adapter/resources/webgl-shader.js var WEBGLShader = class extends import_core14.Shader { device; handle; constructor(device, props) { super(device, props); this.device = device; switch (this.props.stage) { case "vertex": this.handle = this.props.handle || this.device.gl.createShader(35633); break; case "fragment": this.handle = this.props.handle || this.device.gl.createShader(35632); break; default: throw new Error(this.props.stage); } this._compile(this.source); } destroy() { if (this.handle) { this.removeStats(); this.device.gl.deleteShader(this.handle); this.destroyed = true; } } async getCompilationInfo() { await this._waitForCompilationComplete(); return this.getCompilationInfoSync(); } getCompilationInfoSync() { const log9 = this.device.gl.getShaderInfoLog(this.handle); return parseShaderCompilerLog(log9); } getTranslatedSource() { const extensions = this.device.getExtension("WEBGL_debug_shaders"); const ext = extensions.WEBGL_debug_shaders; return ext == null ? void 0 : ext.getTranslatedShaderSource(this.handle); } // PRIVATE METHODS /** Compile a shader and get compilation status */ async _compile(source) { const addGLSLVersion = (source2) => source2.startsWith("#version ") ? source2 : `#version 100 ${source2}`; source = addGLSLVersion(source); const { gl } = this.device; gl.shaderSource(this.handle, source); gl.compileShader(this.handle); if (import_core14.log.level === 0) { this.compilationStatus = "pending"; return; } if (!this.device.features.has("compilation-status-async-webgl")) { this._getCompilationStatus(); this.debugShader(); if (this.compilationStatus === "error") { throw new Error(`GLSL compilation errors in ${this.props.stage} shader ${this.props.id}`); } return; } import_core14.log.once(1, "Shader compilation is asynchronous")(); await this._waitForCompilationComplete(); import_core14.log.info(2, `Shader ${this.id} - async compilation complete: ${this.compilationStatus}`)(); this._getCompilationStatus(); this.debugShader(); } /** Use KHR_parallel_shader_compile extension if available */ async _waitForCompilationComplete() { const waitMs = async (ms) => await new Promise((resolve) => setTimeout(resolve, ms)); const DELAY_MS = 10; if (!this.device.features.has("compilation-status-async-webgl")) { await waitMs(DELAY_MS); return; } const { gl } = this.device; for (; ; ) { const complete = gl.getShaderParameter(this.handle, 37297); if (complete) { return; } await waitMs(DELAY_MS); } } /** * Get the shader compilation status * TODO - Load log even when no error reported, to catch warnings? * https://gamedev.stackexchange.com/questions/30429/how-to-detect-glsl-warnings */ _getCompilationStatus() { this.compilationStatus = this.device.gl.getShaderParameter(this.handle, 35713) ? "success" : "error"; } }; // dist/adapter/resources/webgl-render-pass.js var import_core15 = require("@luma.gl/core"); var import_constants14 = require("@luma.gl/constants"); var GL_DEPTH_BUFFER_BIT = 256; var GL_STENCIL_BUFFER_BIT = 1024; var GL_COLOR_BUFFER_BIT = 16384; var GL_COLOR = 6144; var COLOR_CHANNELS = [1, 2, 4, 8]; var WEBGLRenderPass = class extends import_core15.RenderPass { device; /** Parameters that should be applied before each draw call */ glParameters; constructor(device, props) { super(device, props); this.device = device; pushContextState(this.device.gl); this.setParameters(this.props.parameters); this.clear(); } end() { popContextState(this.device.gl); } pushDebugGroup(groupLabel) { } popDebugGroup() { } insertDebugMarker(markerLabel) { } // beginOcclusionQuery(queryIndex: number): void; // endOcclusionQuery(): void; // executeBundles(bundles: Iterable): void; /** * Maps RenderPass parameters to GL parameters */ setParameters(parameters = {}) { const glParameters = { ...this.glParameters }; if (this.props.framebuffer) { glParameters.framebuffer = this.props.framebuffer; } if (this.props.depthReadOnly) { glParameters.depthMask = !this.props.depthReadOnly; } glParameters.stencilMask = this.props.stencilReadOnly ? 0 : 1; glParameters[35977] = this.props.discard; if (parameters.viewport) { if (parameters.viewport.length >= 6) { glParameters.viewport = parameters.viewport.slice(0, 4); glParameters.depthRange = [parameters.viewport[4], parameters.viewport[5]]; } else { glParameters.viewport = parameters.viewport; } } if (parameters.scissorRect) { glParameters.scissorTest = true; glParameters.scissor = parameters.scissorRect; } if (parameters.blendConstant) { glParameters.blendColor = parameters.blendConstant; } if (parameters.stencilReference) { console.warn("RenderPassParameters.stencilReference not yet implemented in WebGL"); parameters[2967] = parameters.stencilReference; } if (parameters.colorMask) { glParameters.colorMask = COLOR_CHANNELS.map((channel) => Boolean(channel & parameters.colorMask)); } this.glParameters = glParameters; setGLParameters(this.device.gl, glParameters); } beginOcclusionQuery(queryIndex) { const webglQuerySet = this.props.occlusionQuerySet; webglQuerySet == null ? void 0 : webglQuerySet.beginOcclusionQuery(); } endOcclusionQuery() { const webglQuerySet = this.props.occlusionQuerySet; webglQuerySet == null ? void 0 : webglQuerySet.endOcclusionQuery(); } // PRIVATE /** * Optionally clears depth, color and stencil buffers based on parameters */ clear() { const glParameters = { ...this.glParameters }; let clearMask = 0; if (this.props.clearColor !== false) { clearMask |= GL_COLOR_BUFFER_BIT; glParameters.clearColor = this.props.clearColor; } if (this.props.clearDepth !== false) { clearMask |= GL_DEPTH_BUFFER_BIT; glParameters.clearDepth = this.props.clearDepth; } if (this.props.clearStencil !== false) { clearMask |= GL_STENCIL_BUFFER_BIT; glParameters.clearStencil = this.props.clearStencil; } if (clearMask !== 0) { withGLParameters(this.device.gl, glParameters, () => { this.device.gl.clear(clearMask); }); } } /** * WebGL2 - clear a specific color buffer */ clearColorBuffer(drawBuffer = 0, value = [0, 0, 0, 0]) { withGLParameters(this.device.gl, { framebuffer: this.props.framebuffer }, () => { switch (value.constructor) { case Int32Array: this.device.gl.clearBufferiv(GL_COLOR, drawBuffer, value); break; case Uint32Array: this.device.gl.clearBufferuiv(GL_COLOR, drawBuffer, value); break; case Float32Array: default: this.device.gl.clearBufferfv(GL_COLOR, drawBuffer, value); break; } }); } }; // dist/adapter/resources/webgl-render-pipeline.js var import_core17 = require("@luma.gl/core"); var import_core18 = require("@luma.gl/core"); var import_constants21 = require("@luma.gl/constants"); // dist/adapter/helpers/get-shader-layout.js var import_constants18 = require("@luma.gl/constants"); // dist/classic/accessor.js var import_core16 = require("@luma.gl/core"); var import_constants16 = require("@luma.gl/constants"); // dist/classic/typed-array-utils.js var import_constants15 = require("@luma.gl/constants"); var ERR_TYPE_DEDUCTION = "Failed to deduce GL constant from typed array"; function getGLTypeFromTypedArray(arrayOrType) { const type = ArrayBuffer.isView(arrayOrType) ? arrayOrType.constructor : arrayOrType; switch (type) { case Float32Array: return 5126; case Uint16Array: return 5123; case Uint32Array: return 5125; case Uint8Array: return 5121; case Uint8ClampedArray: return 5121; case Int8Array: return 5120; case Int16Array: return 5122; case Int32Array: return 5124; default: throw new Error(ERR_TYPE_DEDUCTION); } } function getTypedArrayFromGLType(glType, options) { const { clamped = true } = options || {}; switch (glType) { case 5126: return Float32Array; case 5123: case 33635: case 32819: case 32820: return Uint16Array; case 5125: return Uint32Array; case 5121: return clamped ? Uint8ClampedArray : Uint8Array; case 5120: return Int8Array; case 5122: return Int16Array; case 5124: return Int32Array; default: throw new Error("Failed to deduce typed array type from GL constant"); } } // dist/classic/accessor.js var DEFAULT_ACCESSOR_VALUES = { offset: 0, stride: 0, type: 5126, size: 1, divisor: 0, normalized: false, integer: false }; var PROP_CHECKS = { deprecatedProps: { instanced: "divisor", isInstanced: "divisor" } }; var Accessor = class { offset; stride; type; size; divisor; normalized; integer; buffer; index; static getBytesPerElement(accessor) { const ArrayType = getTypedArrayFromGLType(accessor.type || 5126); return ArrayType.BYTES_PER_ELEMENT; } static getBytesPerVertex(accessor) { (0, import_core16.assert)(accessor.size); const ArrayType = getTypedArrayFromGLType(accessor.type || 5126); return ArrayType.BYTES_PER_ELEMENT * accessor.size; } // Combines (merges) a list of accessors. On top of default values // Usually [programAccessor, bufferAccessor, appAccessor] // All props will be set in the returned object. // TODO check for conflicts between values in the supplied accessors static resolve(...accessors) { return new Accessor(...[DEFAULT_ACCESSOR_VALUES, ...accessors]); } constructor(...accessors) { accessors.forEach((accessor) => this._assign(accessor)); Object.freeze(this); } toString() { return JSON.stringify(this); } // ACCESSORS // TODO - remove> get BYTES_PER_ELEMENT() { return Accessor.getBytesPerElement(this); } get BYTES_PER_VERTEX() { return Accessor.getBytesPerVertex(this); } // PRIVATE // eslint-disable-next-line complexity, max-statements _assign(props = {}) { props = (0, import_core16.checkProps)("Accessor", props, PROP_CHECKS); if (props.type !== void 0) { this.type = props.type; if (props.type === 5124 || props.type === 5125) { this.integer = true; } } if (props.size !== void 0) { this.size = props.size; } if (props.offset !== void 0) { this.offset = props.offset; } if (props.stride !== void 0) { this.stride = props.stride; } if (props.normalize !== void 0) { this.normalized = props.normalize; } if (props.normalized !== void 0) { this.normalized = props.normalized; } if (props.integer !== void 0) { this.integer = props.integer; } if (props.divisor !== void 0) { this.divisor = props.divisor; } if (props.buffer !== void 0) { this.buffer = props.buffer; } if (props.index !== void 0) { if (typeof props.index === "boolean") { this.index = props.index ? 1 : 0; } else { this.index = props.index; } } if (props.instanced !== void 0) { this.divisor = props.instanced ? 1 : 0; } if (props.isInstanced !== void 0) { this.divisor = props.isInstanced ? 1 : 0; } if (this.offset === void 0) delete this.offset; if (this.stride === void 0) delete this.stride; if (this.type === void 0) delete this.type; if (this.size === void 0) delete this.size; if (this.divisor === void 0) delete this.divisor; if (this.normalized === void 0) delete this.normalized; if (this.integer === void 0) delete this.integer; if (this.buffer === void 0) delete this.buffer; if (this.index === void 0) delete this.index; return this; } }; // dist/adapter/helpers/decode-webgl-types.js var import_constants17 = require("@luma.gl/constants"); function isSamplerUniform(type) { return SAMPLER_TYPES.includes(type); } var SAMPLER_TYPES = [ 35678, 35680, 35679, 35682, 36289, 36292, 36293, 36298, 36299, 36300, 36303, 36306, 36307, 36308, 36311 ]; var COMPOSITE_GL_TYPES = { [5126]: [5126, 1, "float", "f32", "float32"], [35664]: [5126, 2, "vec2", "vec2", "float32x2"], [35665]: [5126, 3, "vec3", "vec3", "float32x3"], [35666]: [5126, 4, "vec4", "vec4", "float32x4"], [5124]: [5124, 1, "int", "i32", "sint32"], [35667]: [5124, 2, "ivec2", "vec2", "sint32x2"], [35668]: [5124, 3, "ivec3", "vec3", "sint32x3"], [35669]: [5124, 4, "ivec4", "vec4", "sint32x4"], [5125]: [5125, 1, "uint", "u32", "uint32"], [36294]: [5125, 2, "uvec2", "vec2", "uint32x2"], [36295]: [5125, 3, "uvec3", "vec3", "uint32x3"], [36296]: [5125, 4, "uvec4", "vec4", "uint32x4"], [35670]: [5126, 1, "bool", "f32", "float32"], [35671]: [5126, 2, "bvec2", "vec2", "float32x2"], [35672]: [5126, 3, "bvec3", "vec3", "float32x3"], [35673]: [5126, 4, "bvec4", "vec4", "float32x4"], // TODO - are sizes/components below correct? [35674]: [5126, 8, "mat2", "mat2x2"], // 4 [35685]: [5126, 8, "mat2x3", "mat2x3"], // 6 [35686]: [5126, 8, "mat2x4", "mat2x4"], // 8 [35687]: [5126, 12, "mat3x2", "mat3x2"], // 6 [35675]: [5126, 12, "mat3", "mat3x3"], // 9 [35688]: [5126, 12, "mat3x4", "mat3x4"], // 12 [35689]: [5126, 16, "mat4x2", "mat4x2"], // 8 [35690]: [5126, 16, "mat4x3", "mat4x3"], // 12 [35676]: [5126, 16, "mat4", "mat4x4"] // 16 }; function decodeGLUniformType(glUniformType) { const typeAndSize = COMPOSITE_GL_TYPES[glUniformType]; if (!typeAndSize) { throw new Error("uniform"); } const [glType, components, , format] = typeAndSize; return { format, components, glType }; } function decodeGLAttributeType(glAttributeType) { const typeAndSize = COMPOSITE_GL_TYPES[glAttributeType]; if (!typeAndSize) { throw new Error("attribute"); } const [, components, , shaderType, vertexFormat] = typeAndSize; const attributeType = shaderType; return { attributeType, vertexFormat, components }; } // dist/adapter/helpers/get-shader-layout.js function getShaderLayout(gl, program) { const shaderLayout = { attributes: [], bindings: [] }; shaderLayout.attributes = readAttributeDeclarations(gl, program); const uniformBlocks = readUniformBlocks(gl, program); for (const uniformBlock of uniformBlocks) { const uniforms2 = uniformBlock.uniforms.map((uniform) => ({ name: uniform.name, format: uniform.format, byteOffset: uniform.byteOffset, byteStride: uniform.byteStride, arrayLength: uniform.arrayLength })); shaderLayout.bindings.push({ type: "uniform", name: uniformBlock.name, location: uniformBlock.location, visibility: (uniformBlock.vertex ? 1 : 0) & (uniformBlock.fragment ? 2 : 0), minBindingSize: uniformBlock.byteLength, uniforms: uniforms2 }); } const uniforms = readUniformBindings(gl, program); let textureUnit = 0; for (const uniform of uniforms) { if (isSamplerUniform(uniform.type)) { const { viewDimension, sampleType } = getSamplerInfo(uniform.type); shaderLayout.bindings.push({ type: "texture", name: uniform.name, location: textureUnit, viewDimension, sampleType }); uniform.textureUnit = textureUnit; textureUnit += 1; } } if (uniforms.length) { shaderLayout.uniforms = uniforms; } const varyings = readVaryings(gl, program); if (varyings == null ? void 0 : varyings.length) { shaderLayout.varyings = varyings; } return shaderLayout; } function readAttributeDeclarations(gl, program) { const attributes = []; const count = gl.getProgramParameter(program, 35721); for (let index = 0; index < count; index++) { const activeInfo = gl.getActiveAttrib(program, index); if (!activeInfo) { throw new Error("activeInfo"); } const { name, type: compositeType /* , size*/ } = activeInfo; const location = gl.getAttribLocation(program, name); if (location >= 0) { const { attributeType } = decodeGLAttributeType(compositeType); const stepMode = /instance/i.test(name) ? "instance" : "vertex"; attributes.push({ name, location, stepMode, type: attributeType // size - for arrays, size is the number of elements in the array }); } } attributes.sort((a, b) => a.location - b.location); return attributes; } function readVaryings(gl, program) { const varyings = []; const count = gl.getProgramParameter(program, 35971); for (let location = 0; location < count; location++) { const activeInfo = gl.getTransformFeedbackVarying(program, location); if (!activeInfo) { throw new Error("activeInfo"); } const { name, type: compositeType, size } = activeInfo; const { glType, components } = decodeGLUniformType(compositeType); const accessor = new Accessor({ type: glType, size: size * components }); const varying = { location, name, accessor }; varyings.push(varying); } varyings.sort((a, b) => a.location - b.location); return varyings; } function readUniformBindings(gl, program) { const uniforms = []; const uniformCount = gl.getProgramParameter(program, 35718); for (let i = 0; i < uniformCount; i++) { const activeInfo = gl.getActiveUniform(program, i); if (!activeInfo) { throw new Error("activeInfo"); } const { name: rawName, size, type } = activeInfo; const { name, isArray: isArray2 } = parseUniformName(rawName); let webglLocation = gl.getUniformLocation(program, name); const uniformInfo = { // WebGL locations are uniquely typed but just numbers location: webglLocation, name, size, type, isArray: isArray2 }; uniforms.push(uniformInfo); if (uniformInfo.size > 1) { for (let j = 0; j < uniformInfo.size; j++) { const elementName = `${name}[${j}]`; webglLocation = gl.getUniformLocation(program, elementName); const arrayElementUniformInfo = { ...uniformInfo, name: elementName, location: webglLocation }; uniforms.push(arrayElementUniformInfo); } } } return uniforms; } function readUniformBlocks(gl, program) { const getBlockParameter = (blockIndex, pname) => gl.getActiveUniformBlockParameter(program, blockIndex, pname); const uniformBlocks = []; const blockCount = gl.getProgramParameter(program, 35382); for (let blockIndex = 0; blockIndex < blockCount; blockIndex++) { const blockInfo = { name: gl.getActiveUniformBlockName(program, blockIndex) || "", location: getBlockParameter(blockIndex, 35391), byteLength: getBlockParameter(blockIndex, 35392), vertex: getBlockParameter(blockIndex, 35396), fragment: getBlockParameter(blockIndex, 35398), uniformCount: getBlockParameter(blockIndex, 35394), uniforms: [] }; const uniformIndices = getBlockParameter(blockIndex, 35395) || []; const uniformType = gl.getActiveUniforms(program, uniformIndices, 35383); const uniformArrayLength = gl.getActiveUniforms(program, uniformIndices, 35384); const uniformOffset = gl.getActiveUniforms(program, uniformIndices, 35387); const uniformStride = gl.getActiveUniforms(program, uniformIndices, 35388); for (let i = 0; i < blockInfo.uniformCount; ++i) { const activeInfo = gl.getActiveUniform(program, uniformIndices[i]); if (!activeInfo) { throw new Error("activeInfo"); } blockInfo.uniforms.push({ name: activeInfo.name, format: decodeGLUniformType(uniformType[i]).format, type: uniformType[i], arrayLength: uniformArrayLength[i], byteOffset: uniformOffset[i], byteStride: uniformStride[i] // matrixStride: uniformStride[i], // rowMajor: uniformRowMajor[i] }); } uniformBlocks.push(blockInfo); } uniformBlocks.sort((a, b) => a.location - b.location); return uniformBlocks; } var SAMPLER_UNIFORMS_GL_TO_GPU = { [35678]: ["2d", "float"], [35680]: ["cube", "float"], [35679]: ["3d", "float"], [35682]: ["3d", "depth"], [36289]: ["2d-array", "float"], [36292]: ["2d-array", "depth"], [36293]: ["cube", "float"], [36298]: ["2d", "sint"], [36299]: ["3d", "sint"], [36300]: ["cube", "sint"], [36303]: ["2d-array", "uint"], [36306]: ["2d", "uint"], [36307]: ["3d", "uint"], [36308]: ["cube", "uint"], [36311]: ["2d-array", "uint"] }; function getSamplerInfo(type) { const sampler = SAMPLER_UNIFORMS_GL_TO_GPU[type]; if (!sampler) { throw new Error("sampler"); } const [viewDimension, sampleType] = sampler; return { viewDimension, sampleType }; } function parseUniformName(name) { if (name[name.length - 1] !== "]") { return { name, length: 1, isArray: false }; } const UNIFORM_NAME_REGEXP = /([^[]*)(\[[0-9]+\])?/; const matches = UNIFORM_NAME_REGEXP.exec(name); if (!matches || matches.length < 2) { throw new Error(`Failed to parse GLSL uniform name ${name}`); } return { name: matches[1], length: matches[2] ? 1 : 0, isArray: Boolean(matches[2]) }; } // dist/adapter/helpers/set-uniform.js var import_constants19 = require("@luma.gl/constants"); function setUniform(gl, location, type, value) { const gl2 = gl; let uniformValue = value; if (uniformValue === true) { uniformValue = 1; } if (uniformValue === false) { uniformValue = 0; } const arrayValue = typeof uniformValue === "number" ? [uniformValue] : uniformValue; switch (type) { case 35678: case 35680: case 35679: case 35682: case 36289: case 36292: case 36293: case 36298: case 36299: case 36300: case 36303: case 36306: case 36307: case 36308: case 36311: if (typeof value !== "number") { throw new Error("samplers must be set to integers"); } return gl.uniform1i(location, value); case 5126: return gl.uniform1fv(location, arrayValue); case 35664: return gl.uniform2fv(location, arrayValue); case 35665: return gl.uniform3fv(location, arrayValue); case 35666: return gl.uniform4fv(location, arrayValue); case 5124: return gl.uniform1iv(location, arrayValue); case 35667: return gl.uniform2iv(location, arrayValue); case 35668: return gl.uniform3iv(location, arrayValue); case 35669: return gl.uniform4iv(location, arrayValue); case 35670: return gl.uniform1iv(location, arrayValue); case 35671: return gl.uniform2iv(location, arrayValue); case 35672: return gl.uniform3iv(location, arrayValue); case 35673: return gl.uniform4iv(location, arrayValue); case 5125: return gl2.uniform1uiv(location, arrayValue, 1); case 36294: return gl2.uniform2uiv(location, arrayValue, 2); case 36295: return gl2.uniform3uiv(location, arrayValue, 3); case 36296: return gl2.uniform4uiv(location, arrayValue, 4); case 35674: return gl.uniformMatrix2fv(location, false, arrayValue); case 35675: return gl.uniformMatrix3fv(location, false, arrayValue); case 35676: return gl.uniformMatrix4fv(location, false, arrayValue); case 35685: return gl2.uniformMatrix2x3fv(location, false, arrayValue); case 35686: return gl2.uniformMatrix2x4fv(location, false, arrayValue); case 35687: return gl2.uniformMatrix3x2fv(location, false, arrayValue); case 35688: return gl2.uniformMatrix3x4fv(location, false, arrayValue); case 35689: return gl2.uniformMatrix4x2fv(location, false, arrayValue); case 35690: return gl2.uniformMatrix4x3fv(location, false, arrayValue); } throw new Error("Illegal uniform"); } // dist/adapter/helpers/webgl-topology-utils.js var import_constants20 = require("@luma.gl/constants"); function getGLDrawMode(topology) { switch (topology) { case "point-list": return 0; case "line-list": return 1; case "line-strip": return 3; case "line-loop-webgl": return 2; case "triangle-list": return 4; case "triangle-strip": return 5; case "triangle-fan-webgl": return 6; default: throw new Error(topology); } } function getGLPrimitive(topology) { switch (topology) { case "point-list": return 0; case "line-list": return 1; case "line-strip": return 1; case "line-loop-webgl": return 1; case "triangle-list": return 4; case "triangle-strip": return 4; case "triangle-fan-webgl": return 4; default: throw new Error(topology); } } // dist/adapter/resources/webgl-render-pipeline.js var LOG_PROGRAM_PERF_PRIORITY = 4; var WEBGLRenderPipeline = class extends import_core17.RenderPipeline { /** The WebGL device that created this render pipeline */ device; /** Handle to underlying WebGL program */ handle; /** vertex shader */ vs; /** fragment shader */ fs; /** The layout extracted from shader by WebGL introspection APIs */ introspectedLayout; /** Uniforms set on this model */ uniforms = {}; /** Bindings set on this model */ bindings = {}; /** WebGL varyings */ varyings = null; _uniformCount = 0; _uniformSetters = {}; // TODO are these used? constructor(device, props) { super(device, props); this.device = device; this.handle = this.props.handle || this.device.gl.createProgram(); this.device.setSpectorMetadata(this.handle, { id: this.props.id }); this.vs = (0, import_core17.cast)(props.vs); this.fs = (0, import_core17.cast)(props.fs); const { varyings, bufferMode = 35981 } = props; if (varyings && varyings.length > 0) { this.varyings = varyings; this.device.gl.transformFeedbackVaryings(this.handle, varyings, bufferMode); } this._linkShaders(); import_core17.log.time(1, `RenderPipeline ${this.id} - shaderLayout introspection`)(); this.introspectedLayout = getShaderLayout(this.device.gl, this.handle); import_core17.log.timeEnd(1, `RenderPipeline ${this.id} - shaderLayout introspection`)(); this.shaderLayout = (0, import_core18.mergeShaderLayout)(this.introspectedLayout, props.shaderLayout); switch (this.props.topology) { case "triangle-fan-webgl": case "line-loop-webgl": import_core17.log.warn(`Primitive topology ${this.props.topology} is deprecated and will be removed in v9.1`); break; default: } } destroy() { if (this.handle) { this.device.gl.deleteProgram(this.handle); this.destroyed = true; } } /** * Bindings include: textures, samplers and uniform buffers * @todo needed for portable model */ setBindings(bindings, options) { for (const [name, value] of Object.entries(bindings)) { const binding = this.shaderLayout.bindings.find((binding2) => binding2.name === name) || this.shaderLayout.bindings.find((binding2) => binding2.name === `${name}Uniforms`); if (!binding) { const validBindings = this.shaderLayout.bindings.map((binding2) => `"${binding2.name}"`).join(", "); if (!(options == null ? void 0 : options.disableWarnings)) { import_core17.log.warn(`Unknown binding "${name}" in render pipeline "${this.id}", expected one of ${validBindings}`)(); } continue; } if (!value) { import_core17.log.warn(`Unsetting binding "${name}" in render pipeline "${this.id}"`)(); } switch (binding.type) { case "uniform": if (!(value instanceof WEBGLBuffer) && !(value.buffer instanceof WEBGLBuffer)) { throw new Error("buffer value"); } break; case "texture": if (!(value instanceof WEBGLTextureView || value instanceof WEBGLTexture || value instanceof WEBGLFramebuffer)) { throw new Error("texture value"); } break; case "sampler": import_core17.log.warn(`Ignoring sampler ${name}`)(); break; default: throw new Error(binding.type); } this.bindings[name] = value; } } /** @todo needed for portable model * @note The WebGL API is offers many ways to draw things * This function unifies those ways into a single call using common parameters with sane defaults */ draw(options) { var _a; const { renderPass, parameters = this.props.parameters, topology = this.props.topology, vertexArray, vertexCount, // indexCount, instanceCount, isInstanced = false, firstVertex = 0, // firstIndex, // firstInstance, // baseVertex, transformFeedback } = options; const glDrawMode = getGLDrawMode(topology); const isIndexed = Boolean(vertexArray.indexBuffer); const glIndexType = (_a = vertexArray.indexBuffer) == null ? void 0 : _a.glIndexType; if (this.linkStatus !== "success") { import_core17.log.info(2, `RenderPipeline:${this.id}.draw() aborted - waiting for shader linking`)(); return false; } if (!this._areTexturesRenderable() || vertexCount === 0) { import_core17.log.info(2, `RenderPipeline:${this.id}.draw() aborted - textures not yet loaded`)(); return false; } if (vertexCount === 0) { import_core17.log.info(2, `RenderPipeline:${this.id}.draw() aborted - no vertices to draw`)(); return true; } this.device.gl.useProgram(this.handle); vertexArray.bindBeforeRender(renderPass); if (transformFeedback) { transformFeedback.begin(this.props.topology); } this._applyBindings(); this._applyUniforms(); const webglRenderPass = renderPass; withDeviceAndGLParameters(this.device, parameters, webglRenderPass.glParameters, () => { if (isIndexed && isInstanced) { this.device.gl.drawElementsInstanced( glDrawMode, vertexCount || 0, // indexCount? glIndexType, firstVertex, instanceCount || 0 ); } else if (isIndexed) { this.device.gl.drawElements(glDrawMode, vertexCount || 0, glIndexType, firstVertex); } else if (isInstanced) { this.device.gl.drawArraysInstanced(glDrawMode, firstVertex, vertexCount || 0, instanceCount || 0); } else { this.device.gl.drawArrays(glDrawMode, firstVertex, vertexCount || 0); } if (transformFeedback) { transformFeedback.end(); } }); vertexArray.unbindAfterRender(renderPass); return true; } // DEPRECATED METHODS setUniformsWebGL(uniforms) { const { bindings } = (0, import_core17.splitUniformsAndBindings)(uniforms); Object.keys(bindings).forEach((name) => { import_core17.log.warn(`Unsupported value "${JSON.stringify(bindings[name])}" used in setUniforms() for key ${name}. Use setBindings() instead?`)(); }); Object.assign(this.uniforms, uniforms); } // PRIVATE METHODS // setAttributes(attributes: Record): void {} // setBindings(bindings: Record): void {} async _linkShaders() { const { gl } = this.device; gl.attachShader(this.handle, this.vs.handle); gl.attachShader(this.handle, this.fs.handle); import_core17.log.time(LOG_PROGRAM_PERF_PRIORITY, `linkProgram for ${this.id}`)(); gl.linkProgram(this.handle); import_core17.log.timeEnd(LOG_PROGRAM_PERF_PRIORITY, `linkProgram for ${this.id}`)(); if (import_core17.log.level === 0) { } if (!this.device.features.has("compilation-status-async-webgl")) { const status2 = this._getLinkStatus(); this._reportLinkStatus(status2); return; } import_core17.log.once(1, "RenderPipeline linking is asynchronous")(); await this._waitForLinkComplete(); import_core17.log.info(2, `RenderPipeline ${this.id} - async linking complete: ${this.linkStatus}`)(); const status = this._getLinkStatus(); this._reportLinkStatus(status); } /** Report link status. First, check for shader compilation failures if linking fails */ _reportLinkStatus(status) { var _a; switch (status) { case "success": return; default: if (this.vs.compilationStatus === "error") { this.vs.debugShader(); throw new Error(`Error during compilation of shader ${this.vs.id}`); } if (((_a = this.fs) == null ? void 0 : _a.compilationStatus) === "error") { this.fs.debugShader(); throw new Error(`Error during compilation of shader ${this.fs.id}`); } throw new Error(`Error during ${status}: ${this.device.gl.getProgramInfoLog(this.handle)}`); } } /** * Get the shader compilation status * TODO - Load log even when no error reported, to catch warnings? * https://gamedev.stackexchange.com/questions/30429/how-to-detect-glsl-warnings */ _getLinkStatus() { const { gl } = this.device; const linked = gl.getProgramParameter(this.handle, 35714); if (!linked) { this.linkStatus = "error"; return "linking"; } gl.validateProgram(this.handle); const validated = gl.getProgramParameter(this.handle, 35715); if (!validated) { this.linkStatus = "error"; return "validation"; } this.linkStatus = "success"; return "success"; } /** Use KHR_parallel_shader_compile extension if available */ async _waitForLinkComplete() { const waitMs = async (ms) => await new Promise((resolve) => setTimeout(resolve, ms)); const DELAY_MS = 10; if (!this.device.features.has("compilation-status-async-webgl")) { await waitMs(DELAY_MS); return; } const { gl } = this.device; for (; ; ) { const complete = gl.getProgramParameter(this.handle, 37297); if (complete) { return; } await waitMs(DELAY_MS); } } /** * Checks if all texture-values uniforms are renderable (i.e. loaded) * Update a texture if needed (e.g. from video) * Note: This is currently done before every draw call */ _areTexturesRenderable() { let texturesRenderable = true; for (const [, texture] of Object.entries(this.bindings)) { if (texture instanceof WEBGLTexture) { texture.update(); texturesRenderable = texturesRenderable && texture.loaded; } } return texturesRenderable; } /** Apply any bindings (before each draw call) */ _applyBindings() { if (this.linkStatus !== "success") { return; } const { gl } = this.device; gl.useProgram(this.handle); let textureUnit = 0; let uniformBufferIndex = 0; for (const binding of this.shaderLayout.bindings) { const value = this.bindings[binding.name] || this.bindings[binding.name.replace(/Uniforms$/, "")]; if (!value) { throw new Error(`No value for binding ${binding.name} in ${this.id}`); } switch (binding.type) { case "uniform": const { name } = binding; const location = gl.getUniformBlockIndex(this.handle, name); if (location === 4294967295) { throw new Error(`Invalid uniform block name ${name}`); } gl.uniformBlockBinding(this.handle, uniformBufferIndex, location); if (value instanceof WEBGLBuffer) { gl.bindBufferBase(35345, uniformBufferIndex, value.handle); } else { gl.bindBufferRange( 35345, uniformBufferIndex, // @ts-expect-error value.buffer.handle, // @ts-expect-error value.offset || 0, // @ts-expect-error value.size || value.buffer.byteLength - value.offset ); } uniformBufferIndex += 1; break; case "texture": if (!(value instanceof WEBGLTextureView || value instanceof WEBGLTexture || value instanceof WEBGLFramebuffer)) { throw new Error("texture"); } let texture; if (value instanceof WEBGLTextureView) { texture = value.texture; } else if (value instanceof WEBGLTexture) { texture = value; } else if (value instanceof WEBGLFramebuffer && value.colorAttachments[0] instanceof WEBGLTextureView) { import_core17.log.warn("Passing framebuffer in texture binding may be deprecated. Use fbo.colorAttachments[0] instead")(); texture = value.colorAttachments[0].texture; } else { throw new Error("No texture"); } gl.activeTexture(33984 + textureUnit); gl.bindTexture(texture.target, texture.handle); textureUnit += 1; break; case "sampler": break; case "storage": case "read-only-storage": throw new Error(`binding type '${binding.type}' not supported in WebGL`); } } } /** * Due to program sharing, uniforms need to be reset before every draw call * (though caching will avoid redundant WebGL calls) */ _applyUniforms() { for (const uniformLayout of this.shaderLayout.uniforms || []) { const { name, location, type, textureUnit } = uniformLayout; const value = this.uniforms[name] ?? textureUnit; if (value !== void 0) { setUniform(this.device.gl, location, type, value); } } } }; // dist/adapter/resources/webgl-command-encoder.js var import_core20 = require("@luma.gl/core"); // dist/adapter/resources/webgl-command-buffer.js var import_core19 = require("@luma.gl/core"); var import_constants22 = require("@luma.gl/constants"); function cast2(value) { return value; } var WEBGLCommandBuffer = class extends import_core19.CommandBuffer { device; commands = []; constructor(device) { super(device, {}); this.device = device; } submitCommands(commands = this.commands) { for (const command of commands) { switch (command.name) { case "copy-buffer-to-buffer": _copyBufferToBuffer(this.device, command.options); break; case "copy-buffer-to-texture": _copyBufferToTexture(this.device, command.options); break; case "copy-texture-to-buffer": _copyTextureToBuffer(this.device, command.options); break; case "copy-texture-to-texture": _copyTextureToTexture(this.device, command.options); break; } } } }; function _copyBufferToBuffer(device, options) { const source = cast2(options.source); const destination = cast2(options.destination); device.gl.bindBuffer(36662, source.handle); device.gl.bindBuffer(36663, destination.handle); device.gl.copyBufferSubData(36662, 36663, options.sourceOffset ?? 0, options.destinationOffset ?? 0, options.size); device.gl.bindBuffer(36662, null); device.gl.bindBuffer(36663, null); } function _copyBufferToTexture(device, options) { throw new Error("Not implemented"); } function _copyTextureToBuffer(device, options) { const { /** Texture to copy to/from. */ source, /** Mip-map level of the texture to copy to/from. (Default 0) */ mipLevel = 0, /** Defines which aspects of the texture to copy to/from. */ aspect = "all", /** Width to copy */ width = options.source.width, /** Height to copy */ height = options.source.height, depthOrArrayLayers = 0, /** Defines the origin of the copy - the minimum corner of the texture sub-region to copy to/from. */ origin = [0, 0], /** Destination buffer */ destination, /** Offset, in bytes, from the beginning of the buffer to the start of the image data (default 0) */ byteOffset = 0, /** * The stride, in bytes, between the beginning of each block row and the subsequent block row. * Required if there are multiple block rows (i.e. the copy height or depth is more than one block). */ bytesPerRow, /** * Number of block rows per single image of the texture. * rowsPerImage × bytesPerRow is the stride, in bytes, between the beginning of each image of data and the subsequent image. * Required if there are multiple images (i.e. the copy depth is more than one). */ rowsPerImage } = options; if (aspect !== "all") { throw new Error("not supported"); } if (mipLevel !== 0 || depthOrArrayLayers !== 0 || bytesPerRow || rowsPerImage) { throw new Error("not implemented"); } const { framebuffer, destroyFramebuffer } = getFramebuffer(source); let prevHandle; try { const webglBuffer = destination; const sourceWidth = width || framebuffer.width; const sourceHeight = height || framebuffer.height; const sourceParams = getWebGLTextureParameters(framebuffer.texture.props.format); const sourceFormat = sourceParams.dataFormat; const sourceType = sourceParams.type; device.gl.bindBuffer(35051, webglBuffer.handle); prevHandle = device.gl.bindFramebuffer(36160, framebuffer.handle); device.gl.readPixels(origin[0], origin[1], sourceWidth, sourceHeight, sourceFormat, sourceType, byteOffset); } finally { device.gl.bindBuffer(35051, null); if (prevHandle !== void 0) { device.gl.bindFramebuffer(36160, prevHandle); } if (destroyFramebuffer) { framebuffer.destroy(); } } } function _copyTextureToTexture(device, options) { const { /** Texture to copy to/from. */ source, /** Mip-map level of the texture to copy to (Default 0) */ destinationMipLevel = 0, /** Defines which aspects of the texture to copy to/from. */ // aspect = 'all', /** Defines the origin of the copy - the minimum corner of the texture sub-region to copy from. */ origin = [0, 0], /** Defines the origin of the copy - the minimum corner of the texture sub-region to copy to. */ destinationOrigin = [0, 0], /** Texture to copy to/from. */ destination /** Mip-map level of the texture to copy to/from. (Default 0) */ // destinationMipLevel = options.mipLevel, /** Defines the origin of the copy - the minimum corner of the texture sub-region to copy to/from. */ // destinationOrigin = [0, 0], /** Defines which aspects of the texture to copy to/from. */ // destinationAspect = options.aspect, } = options; let { width = options.destination.width, height = options.destination.height // depthOrArrayLayers = 0 } = options; const { framebuffer, destroyFramebuffer } = getFramebuffer(source); const [sourceX, sourceY] = origin; const [destinationX, destinationY, destinationZ] = destinationOrigin; const prevHandle = device.gl.bindFramebuffer(36160, framebuffer.handle); let texture = null; let textureTarget; if (destination instanceof WEBGLTexture) { texture = destination; width = Number.isFinite(width) ? width : texture.width; height = Number.isFinite(height) ? height : texture.height; texture.bind(0); textureTarget = texture.target; } else { throw new Error("invalid destination"); } switch (textureTarget) { case 3553: case 34067: device.gl.copyTexSubImage2D(textureTarget, destinationMipLevel, destinationX, destinationY, sourceX, sourceY, width, height); break; case 35866: case 32879: device.gl.copyTexSubImage3D(textureTarget, destinationMipLevel, destinationX, destinationY, destinationZ, sourceX, sourceY, width, height); break; default: } if (texture) { texture.unbind(); } device.gl.bindFramebuffer(36160, prevHandle); if (destroyFramebuffer) { framebuffer.destroy(); } } function getFramebuffer(source) { if (source instanceof import_core19.Texture) { const { width, height, id } = source; const framebuffer = source.device.createFramebuffer({ id: `framebuffer-for-${id}`, width, height, colorAttachments: [source] }); return { framebuffer, destroyFramebuffer: true }; } return { framebuffer: source, destroyFramebuffer: false }; } // dist/adapter/resources/webgl-command-encoder.js var WEBGLCommandEncoder = class extends import_core20.CommandEncoder { device; commandBuffer; constructor(device, props) { super(device, props); this.device = device; this.commandBuffer = new WEBGLCommandBuffer(device); } destroy() { } finish() { this.commandBuffer.submitCommands(); } // beginRenderPass(GPURenderPassDescriptor descriptor): GPURenderPassEncoder; // beginComputePass(optional GPUComputePassDescriptor descriptor = {}): GPUComputePassEncoder; // finish(options?: {id?: string}): GPUCommandBuffer; copyBufferToBuffer(options) { this.commandBuffer.commands.push({ name: "copy-buffer-to-buffer", options }); } copyBufferToTexture(options) { this.commandBuffer.commands.push({ name: "copy-buffer-to-texture", options }); } copyTextureToBuffer(options) { this.commandBuffer.commands.push({ name: "copy-texture-to-buffer", options }); } copyTextureToTexture(options) { this.commandBuffer.commands.push({ name: "copy-texture-to-texture", options }); } pushDebugGroup(groupLabel) { } popDebugGroup() { } insertDebugMarker(markerLabel) { } resolveQuerySet(querySet, destination, options) { } }; // dist/adapter/resources/webgl-vertex-array.js var import_core21 = require("@luma.gl/core"); var import_constants23 = require("@luma.gl/constants"); var import_env2 = require("@probe.gl/env"); var WEBGLVertexArray = class extends import_core21.VertexArray { get [Symbol.toStringTag]() { return "VertexArray"; } device; handle; /** Attribute 0 buffer constant */ buffer = null; bufferValue = null; /** * Attribute 0 can not be disable on most desktop OpenGL based browsers */ static isConstantAttributeZeroSupported(device) { return (0, import_env2.getBrowser)() === "Chrome"; } // Create a VertexArray constructor(device, props) { super(device, props); this.device = device; this.handle = this.device.gl.createVertexArray(); } destroy() { var _a; super.destroy(); if (this.buffer) { (_a = this.buffer) == null ? void 0 : _a.destroy(); } if (this.handle) { this.device.gl.deleteVertexArray(this.handle); this.handle = void 0; } } /** // Set (bind/unbind) an elements buffer, for indexed rendering. // Must be a Buffer bound to GL.ELEMENT_ARRAY_BUFFER or null. Constants not supported * * @param elementBuffer */ setIndexBuffer(indexBuffer) { const buffer = indexBuffer; if (buffer && buffer.glTarget !== 34963) { throw new Error("Use .setBuffer()"); } this.device.gl.bindVertexArray(this.handle); this.device.gl.bindBuffer(34963, buffer ? buffer.handle : null); this.indexBuffer = buffer; this.device.gl.bindVertexArray(null); } /** Set a location in vertex attributes array to a buffer, enables the location, sets divisor */ setBuffer(location, attributeBuffer) { const buffer = attributeBuffer; if (buffer.glTarget === 34963) { throw new Error("Use .setIndexBuffer()"); } const { size, type, stride, offset, normalized, integer, divisor } = this._getAccessor(location); this.device.gl.bindVertexArray(this.handle); this.device.gl.bindBuffer(34962, buffer.handle); if (integer) { this.device.gl.vertexAttribIPointer(location, size, type, stride, offset); } else { this.device.gl.vertexAttribPointer(location, size, type, normalized, stride, offset); } this.device.gl.bindBuffer(34962, null); this.device.gl.enableVertexAttribArray(location); this.device.gl.vertexAttribDivisor(location, divisor || 0); this.attributes[location] = buffer; this.device.gl.bindVertexArray(null); } /** Set a location in vertex attributes array to a constant value, disables the location */ setConstantWebGL(location, value) { this._enable(location, false); this.attributes[location] = value; } bindBeforeRender() { this.device.gl.bindVertexArray(this.handle); this._applyConstantAttributes(); } unbindAfterRender() { this.device.gl.bindVertexArray(null); } // Internal methods /** * Constant attributes need to be reset before every draw call * Any attribute that is disabled in the current vertex array object * is read from the context's global constant value for that attribute location. * @note Constant attributes are only supported in WebGL, not in WebGPU */ _applyConstantAttributes() { for (let location = 0; location < this.maxVertexAttributes; ++location) { const constant = this.attributes[location]; if (ArrayBuffer.isView(constant)) { this.device.setConstantAttributeWebGL(location, constant); } } } /** * Set a location in vertex attributes array to a buffer, enables the location, sets divisor * @note requires vertex array to be bound */ // protected _setAttributeLayout(location: number): void { // const {size, type, stride, offset, normalized, integer, divisor} = this._getAccessor(location); // // WebGL2 supports *integer* data formats, i.e. GPU will see integer values // if (integer) { // this.device.gl.vertexAttribIPointer(location, size, type, stride, offset); // } else { // // Attaches ARRAY_BUFFER with specified buffer format to location // this.device.gl.vertexAttribPointer(location, size, type, normalized, stride, offset); // } // this.device.gl.vertexAttribDivisor(location, divisor || 0); // } /** Get an accessor from the */ _getAccessor(location) { const attributeInfo = this.attributeInfos[location]; if (!attributeInfo) { throw new Error(`Unknown attribute location ${location}`); } const glType = getGLFromVertexType(attributeInfo.bufferDataType); return { size: attributeInfo.bufferComponents, type: glType, stride: attributeInfo.byteStride, offset: attributeInfo.byteOffset, normalized: attributeInfo.normalized, // it is the shader attribute declaration, not the vertex memory format, // that determines if the data in the buffer will be treated as integers. // // Also note that WebGL supports assigning non-normalized integer data to floating point attributes, // but as far as we can tell, WebGPU does not. integer: attributeInfo.integer, divisor: attributeInfo.stepMode === "instance" ? 1 : 0 }; } /** * Enabling an attribute location makes it reference the currently bound buffer * Disabling an attribute location makes it reference the global constant value * TODO - handle single values for size 1 attributes? * TODO - convert classic arrays based on known type? */ _enable(location, enable2 = true) { const canDisableAttributeZero = WEBGLVertexArray.isConstantAttributeZeroSupported(this.device); const canDisableAttribute = canDisableAttributeZero || location !== 0; if (enable2 || canDisableAttribute) { location = Number(location); this.device.gl.bindVertexArray(this.handle); if (enable2) { this.device.gl.enableVertexAttribArray(location); } else { this.device.gl.disableVertexAttribArray(location); } this.device.gl.bindVertexArray(null); } } /** * Provide a means to create a buffer that is equivalent to a constant. * NOTE: Desktop OpenGL cannot disable attribute 0. * https://stackoverflow.com/questions/20305231/webgl-warning-attribute-0-is-disabled- * this-has-significant-performance-penalty */ getConstantBuffer(elementCount, value) { const constantValue = normalizeConstantArrayValue(value); const byteLength = constantValue.byteLength * elementCount; const length = constantValue.length * elementCount; if (this.buffer && byteLength !== this.buffer.byteLength) { throw new Error(`Buffer size is immutable, byte length ${byteLength} !== ${this.buffer.byteLength}.`); } let updateNeeded = !this.buffer; this.buffer = this.buffer || this.device.createBuffer({ byteLength }); updateNeeded = updateNeeded || !compareConstantArrayValues(constantValue, this.bufferValue); if (updateNeeded) { const typedArray = (0, import_core21.getScratchArray)(value.constructor, length); (0, import_core21.fillArray)({ target: typedArray, source: constantValue, start: 0, count: length }); this.buffer.write(typedArray); this.bufferValue = value; } return this.buffer; } }; function normalizeConstantArrayValue(arrayValue) { if (Array.isArray(arrayValue)) { return new Float32Array(arrayValue); } return arrayValue; } function compareConstantArrayValues(v1, v2) { if (!v1 || !v2 || v1.length !== v2.length || v1.constructor !== v2.constructor) { return false; } for (let i = 0; i < v1.length; ++i) { if (v1[i] !== v2[i]) { return false; } } return true; } // dist/adapter/resources/webgl-transform-feedback.js var import_core22 = require("@luma.gl/core"); var import_constants24 = require("@luma.gl/constants"); var WEBGLTransformFeedback = class extends import_core22.TransformFeedback { device; gl; handle; /** * NOTE: The Model already has this information while drawing, but * TransformFeedback currently needs it internally, to look up * varying information outside of a draw() call. */ layout; buffers = {}; unusedBuffers = {}; /** * Allows us to avoid a Chrome bug where a buffer that is already bound to a * different target cannot be bound to 'TRANSFORM_FEEDBACK_BUFFER' target. * This a major workaround, see: https://github.com/KhronosGroup/WebGL/issues/2346 */ bindOnUse = true; _bound = false; constructor(device, props) { super(device, props); this.device = device; this.gl = device.gl; this.handle = this.props.handle || this.gl.createTransformFeedback(); this.layout = this.props.layout; if (props.buffers) { this.setBuffers(props.buffers); } Object.seal(this); } destroy() { this.gl.deleteTransformFeedback(this.handle); super.destroy(); } begin(topology = "point-list") { this.gl.bindTransformFeedback(36386, this.handle); if (this.bindOnUse) { this._bindBuffers(); } this.gl.beginTransformFeedback(getGLPrimitive(topology)); } end() { this.gl.endTransformFeedback(); if (this.bindOnUse) { this._unbindBuffers(); } this.gl.bindTransformFeedback(36386, null); } // SUBCLASS setBuffers(buffers) { this.buffers = {}; this.unusedBuffers = {}; this.bind(() => { for (const bufferName in buffers) { this.setBuffer(bufferName, buffers[bufferName]); } }); } setBuffer(locationOrName, bufferOrRange) { const location = this._getVaryingIndex(locationOrName); const { buffer, byteLength, byteOffset } = this._getBufferRange(bufferOrRange); if (location < 0) { this.unusedBuffers[locationOrName] = buffer; import_core22.log.warn(`${this.id} unusedBuffers varying buffer ${locationOrName}`)(); return; } this.buffers[location] = { buffer, byteLength, byteOffset }; if (!this.bindOnUse) { this._bindBuffer(location, buffer, byteOffset, byteLength); } } getBuffer(locationOrName) { if (isIndex(locationOrName)) { return this.buffers[locationOrName] || null; } const location = this._getVaryingIndex(locationOrName); return location >= 0 ? this.buffers[location] : null; } bind(funcOrHandle = this.handle) { if (typeof funcOrHandle !== "function") { this.gl.bindTransformFeedback(36386, funcOrHandle); return this; } let value; if (!this._bound) { this.gl.bindTransformFeedback(36386, this.handle); this._bound = true; value = funcOrHandle(); this._bound = false; this.gl.bindTransformFeedback(36386, null); } else { value = funcOrHandle(); } return value; } unbind() { this.bind(null); } // PRIVATE METHODS /** Extract offsets for bindBufferRange */ _getBufferRange(bufferOrRange) { if (bufferOrRange instanceof WEBGLBuffer) { return { buffer: bufferOrRange, byteOffset: 0, byteLength: bufferOrRange.byteLength }; } const { buffer, byteOffset = 0, byteLength = bufferOrRange.buffer.byteLength } = bufferOrRange; return { buffer, byteOffset, byteLength }; } _getVaryingIndex(locationOrName) { if (isIndex(locationOrName)) { return Number(locationOrName); } for (const varying of this.layout.varyings) { if (locationOrName === varying.name) { return varying.location; } } return -1; } /** * Need to avoid chrome bug where buffer that is already bound to a different target * cannot be bound to 'TRANSFORM_FEEDBACK_BUFFER' target. */ _bindBuffers() { for (const bufferIndex in this.buffers) { const { buffer, byteLength, byteOffset } = this._getBufferRange(this.buffers[bufferIndex]); this._bindBuffer(Number(bufferIndex), buffer, byteOffset, byteLength); } } _unbindBuffers() { for (const bufferIndex in this.buffers) { this.gl.bindBufferBase(35982, Number(bufferIndex), null); } } _bindBuffer(index, buffer, byteOffset = 0, byteLength) { const handle = buffer && buffer.handle; if (!handle || byteLength === void 0) { this.gl.bindBufferBase(35982, index, handle); } else { this.gl.bindBufferRange(35982, index, handle, byteOffset, byteLength); } } }; function isIndex(value) { if (typeof value === "number") { return Number.isInteger(value); } return /^\d+$/.test(value); } // dist/adapter/resources/webgl-query-set.js var import_core23 = require("@luma.gl/core"); var import_constants25 = require("@luma.gl/constants"); var WEBGLQuerySet = class extends import_core23.QuerySet { device; handle; target = null; _queryPending = false; _pollingPromise = null; get [Symbol.toStringTag]() { return "Query"; } // Create a query class constructor(device, props) { super(device, props); this.device = device; if (props.count > 1) { throw new Error("WebGL QuerySet can only have one value"); } this.handle = this.device.gl.createQuery(); Object.seal(this); } destroy() { this.device.gl.deleteQuery(this.handle); } // FOR RENDER PASS AND COMMAND ENCODER /** * Shortcut for timer query (dependent on extension in both WebGL1 and 2) * Measures GPU time delta between this call and a matching `end` call in the * GPU instruction stream. */ beginTimestampQuery() { return this._begin(35007); } endTimestampQuery() { this._end(); } // Shortcut for occlusion queries beginOcclusionQuery(options) { return this._begin((options == null ? void 0 : options.conservative) ? 36202 : 35887); } endOcclusionQuery() { this._end(); } // Shortcut for transformFeedbackQuery beginTransformFeedbackQuery() { return this._begin(35976); } endTransformFeedbackQuery() { this._end(); } async resolveQuery() { const value = await this.pollQuery(); return [value]; } // PRIVATE METHODS /** * Due to OpenGL API limitations, after calling `begin()` on one Query * instance, `end()` must be called on that same instance before * calling `begin()` on another query. While there can be multiple * outstanding queries representing disjoint `begin()`/`end()` intervals. * It is not possible to interleave or overlap `begin` and `end` calls. */ _begin(target) { if (this._queryPending) { return; } this.target = target; this.device.gl.beginQuery(this.target, this.handle); return; } // ends the current query _end() { if (this._queryPending) { return; } if (this.target) { this.device.gl.endQuery(this.target); this.target = null; this._queryPending = true; } return; } // Returns true if the query result is available isResultAvailable() { if (!this._queryPending) { return false; } const resultAvailable = this.device.gl.getQueryParameter(this.handle, 34919); if (resultAvailable) { this._queryPending = false; } return resultAvailable; } // Timing query is disjoint, i.e. results are invalid isTimerDisjoint() { return this.device.gl.getParameter(36795); } // Returns query result. getResult() { return this.device.gl.getQueryParameter(this.handle, 34918); } // Returns the query result, converted to milliseconds to match JavaScript conventions. getTimerMilliseconds() { return this.getResult() / 1e6; } // Polls the query pollQuery(limit = Number.POSITIVE_INFINITY) { if (this._pollingPromise) { return this._pollingPromise; } let counter = 0; this._pollingPromise = new Promise((resolve, reject) => { const poll = () => { if (this.isResultAvailable()) { resolve(this.getResult()); this._pollingPromise = null; } else if (counter++ > limit) { reject("Timed out"); this._pollingPromise = null; } else { requestAnimationFrame(poll); } }; requestAnimationFrame(poll); }); return this._pollingPromise; } }; // dist/classic/copy-and-blit.js var import_core25 = require("@luma.gl/core"); var import_constants27 = require("@luma.gl/constants"); // dist/classic/format-utils.js var import_core24 = require("@luma.gl/core"); var import_constants26 = require("@luma.gl/constants"); function glFormatToComponents(format) { switch (format) { case 6406: case 33326: case 6403: return 1; case 33328: case 33319: return 2; case 6407: case 34837: return 3; case 6408: case 34836: return 4; default: (0, import_core24.assert)(false); return 0; } } function glTypeToBytes(type) { switch (type) { case 5121: return 1; case 33635: case 32819: case 32820: return 2; case 5126: return 4; default: (0, import_core24.assert)(false); return 0; } } // dist/classic/copy-and-blit.js function readPixelsToArray(source, options) { var _a, _b; const { sourceX = 0, sourceY = 0, sourceFormat = 6408, sourceAttachment = 36064 // TODO - support gl.readBuffer } = options || {}; let { target = null, // following parameters are auto deduced if not provided sourceWidth, sourceHeight, sourceType } = options || {}; const { framebuffer, deleteFramebuffer } = getFramebuffer2(source); (0, import_core25.assert)(framebuffer); const { gl, handle } = framebuffer; sourceWidth = sourceWidth || framebuffer.width; sourceHeight = sourceHeight || framebuffer.height; const attachment = sourceAttachment - 36064; sourceType = sourceType || ((_b = (_a = framebuffer.colorAttachments[attachment]) == null ? void 0 : _a.texture) == null ? void 0 : _b.type) || 5121; target = getPixelArray(target, sourceType, sourceFormat, sourceWidth, sourceHeight); sourceType = sourceType || getGLTypeFromTypedArray(target); const prevHandle = gl.bindFramebuffer(36160, handle); gl.readPixels(sourceX, sourceY, sourceWidth, sourceHeight, sourceFormat, sourceType, target); gl.bindFramebuffer(36160, prevHandle || null); if (deleteFramebuffer) { framebuffer.destroy(); } return target; } function readPixelsToBuffer(source, options) { const { target, sourceX = 0, sourceY = 0, sourceFormat = 6408, targetByteOffset = 0 } = options || {}; let { sourceWidth, sourceHeight, sourceType } = options || {}; const { framebuffer, deleteFramebuffer } = getFramebuffer2(source); (0, import_core25.assert)(framebuffer); sourceWidth = sourceWidth || framebuffer.width; sourceHeight = sourceHeight || framebuffer.height; const webglFramebuffer = framebuffer; sourceType = sourceType || 5121; let webglBufferTarget = target; if (!webglBufferTarget) { const components = glFormatToComponents(sourceFormat); const byteCount = glTypeToBytes(sourceType); const byteLength = targetByteOffset + sourceWidth * sourceHeight * components * byteCount; webglBufferTarget = webglFramebuffer.device.createBuffer({ byteLength }); } const commandEncoder = source.device.createCommandEncoder(); commandEncoder.copyTextureToBuffer({ source, width: sourceWidth, height: sourceHeight, origin: [sourceX, sourceY], destination: webglBufferTarget, byteOffset: targetByteOffset }); commandEncoder.destroy(); if (deleteFramebuffer) { framebuffer.destroy(); } return webglBufferTarget; } function getFramebuffer2(source) { if (!(source instanceof import_core25.Framebuffer)) { return { framebuffer: toFramebuffer(source), deleteFramebuffer: true }; } return { framebuffer: source, deleteFramebuffer: false }; } function toFramebuffer(texture, props) { const { device, width, height, id } = texture; const framebuffer = device.createFramebuffer({ ...props, id: `framebuffer-for-${id}`, width, height, colorAttachments: [texture] }); return framebuffer; } function getPixelArray(pixelArray, type, format, width, height) { if (pixelArray) { return pixelArray; } type = type || 5121; const ArrayType = getTypedArrayFromGLType(type, { clamped: false }); const components = glFormatToComponents(format); return new ArrayType(width * height * components); } // dist/classic/clear.js var import_core26 = require("@luma.gl/core"); var GL_DEPTH_BUFFER_BIT2 = 256; var GL_STENCIL_BUFFER_BIT2 = 1024; var GL_COLOR_BUFFER_BIT2 = 16384; var ERR_ARGUMENTS = "clear: bad arguments"; function clear(device, options) { const { framebuffer = null, color = null, depth = null, stencil = null } = options || {}; const parameters = {}; if (framebuffer) { parameters.framebuffer = framebuffer; } let clearFlags = 0; if (color) { clearFlags |= GL_COLOR_BUFFER_BIT2; if (color !== true) { parameters.clearColor = color; } } if (depth) { clearFlags |= GL_DEPTH_BUFFER_BIT2; if (depth !== true) { parameters.clearDepth = depth; } } if (stencil) { clearFlags |= GL_STENCIL_BUFFER_BIT2; if (depth !== true) { parameters.clearStencil = depth; } } (0, import_core26.assert)(clearFlags !== 0, ERR_ARGUMENTS); const gl = device.gl; withGLParameters(gl, parameters, () => { gl.clear(clearFlags); }); } // dist/adapter/webgl-device.js var LOG_LEVEL2 = 1; var _WebGLDevice = class extends import_core27.Device { /** type of this device */ type = "webgl"; /** The underlying WebGL context */ handle; features; limits; info; canvasContext; lost; _resolveContextLost; // // Static methods, expected to be present by `luma.createDevice()` // /** Check if WebGL 2 is available */ static isSupported() { return typeof WebGL2RenderingContext !== "undefined"; } /** * Get a device instance from a GL context * Creates and instruments the device if not already created * @param gl * @returns */ static attach(gl) { if (gl instanceof _WebGLDevice) { return gl; } if ((gl == null ? void 0 : gl.device) instanceof import_core27.Device) { return gl.device; } if (!isWebGL(gl)) { throw new Error("Invalid WebGL2RenderingContext"); } return new _WebGLDevice({ gl }); } static async create(props = {}) { var _a; import_core27.log.groupCollapsed(LOG_LEVEL2, "WebGLDevice created")(); const promises = []; if (props.debug) { promises.push(loadWebGLDeveloperTools()); } if (props.spector) { promises.push(loadSpectorJS()); } if (typeof props.canvas === "string") { promises.push(import_core27.CanvasContext.pageLoaded); } const results = await Promise.allSettled(promises); for (const result of results) { if (result.status === "rejected") { import_core27.log.error(`Failed to initialize debug libraries ${result.reason}`)(); } } import_core27.log.probe(LOG_LEVEL2 + 1, "DOM is loaded")(); if ((_a = props.gl) == null ? void 0 : _a.device) { import_core27.log.warn("reattaching existing device")(); return _WebGLDevice.attach(props.gl); } const device = new _WebGLDevice(props); const message2 = `Created ${device.type}${device.debug ? " debug" : ""} context: ${device.info.vendor}, ${device.info.renderer} for canvas: ${device.canvasContext.id}`; import_core27.log.probe(LOG_LEVEL2, message2)(); import_core27.log.table(LOG_LEVEL2, device.info)(); import_core27.log.groupEnd(LOG_LEVEL2)(); return device; } // // Public API // constructor(props) { var _a, _b; super({ ...props, id: props.id || (0, import_core27.uid)("webgl-device") }); const device = (_a = props.gl) == null ? void 0 : _a.device; if (device) { throw new Error(`WebGL context already attached to device ${device.id}`); } const canvas = ((_b = props.gl) == null ? void 0 : _b.canvas) || props.canvas; this.canvasContext = new WebGLCanvasContext(this, { ...props, canvas }); this.lost = new Promise((resolve) => { this._resolveContextLost = resolve; }); let gl = props.gl || null; gl ||= createBrowserContext(this.canvasContext.canvas, { ...props, onContextLost: (event) => { var _a2; return (_a2 = this._resolveContextLost) == null ? void 0 : _a2.call(this, { reason: "destroyed", message: "Entered sleep mode, or too many apps or browser tabs are using the GPU." }); } }); if (!gl) { throw new Error("WebGL context creation failed"); } this.handle = gl; this.gl = gl; this.gl.device = this; this.gl._version = 2; if (props.spector) { this.spectorJS = initializeSpectorJS({ ...this.props, canvas: this.handle.canvas }); } this.info = getDeviceInfo(this.gl, this._extensions); this.limits = new WebGLDeviceLimits(this.gl); this.features = new WebGLDeviceFeatures(this.gl, this._extensions, this.props.disabledFeatures); if (this.props.initalizeFeatures) { this.features.initializeFeatures(); } this.canvasContext.resize(); const { enable: enable2 = true, copyState = false } = props; trackContextState(this.gl, { enable: enable2, copyState, log: (...args) => import_core27.log.log(1, ...args)() }); if (props.debug) { this.gl = makeDebugContext(this.gl, { ...props, throwOnError: true }); this.debug = true; import_core27.log.level = Math.max(import_core27.log.level, 1); import_core27.log.warn("WebGL debug mode activated. Performance reduced.")(); } } /** * Destroys the context * @note Has no effect for WebGL browser contexts, there is no browser API for destroying contexts */ destroy() { } get isLost() { return this.gl.isContextLost(); } getSize() { return [this.gl.drawingBufferWidth, this.gl.drawingBufferHeight]; } isTextureFormatSupported(format) { return isTextureFormatSupported(this.gl, format, this._extensions); } isTextureFormatFilterable(format) { return isTextureFormatFilterable(this.gl, format, this._extensions); } isTextureFormatRenderable(format) { return isTextureFormatRenderable(this.gl, format, this._extensions); } // IMPLEMENTATION OF ABSTRACT DEVICE createCanvasContext(props) { throw new Error("WebGL only supports a single canvas"); } createBuffer(props) { const newProps = this._getBufferProps(props); return new WEBGLBuffer(this, newProps); } _createTexture(props) { return new WEBGLTexture(this, props); } createExternalTexture(props) { throw new Error("createExternalTexture() not implemented"); } createSampler(props) { return new WEBGLSampler(this, props); } createShader(props) { return new WEBGLShader(this, props); } createFramebuffer(props) { return new WEBGLFramebuffer(this, props); } createVertexArray(props) { return new WEBGLVertexArray(this, props); } createTransformFeedback(props) { return new WEBGLTransformFeedback(this, props); } createQuerySet(props) { return new WEBGLQuerySet(this, props); } createRenderPipeline(props) { return new WEBGLRenderPipeline(this, props); } beginRenderPass(props) { return new WEBGLRenderPass(this, props); } createComputePipeline(props) { throw new Error("ComputePipeline not supported in WebGL"); } beginComputePass(props) { throw new Error("ComputePass not supported in WebGL"); } renderPass = null; createCommandEncoder(props) { return new WEBGLCommandEncoder(this, props); } /** * Offscreen Canvas Support: Commit the frame * https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext/commit * Chrome's offscreen canvas does not require gl.commit */ submit() { var _a; (_a = this.renderPass) == null ? void 0 : _a.end(); this.renderPass = null; } // // TEMPORARY HACKS - will be removed in v9.1 // /** @deprecated - should use command encoder */ readPixelsToArrayWebGL(source, options) { return readPixelsToArray(source, options); } /** @deprecated - should use command encoder */ readPixelsToBufferWebGL(source, options) { return readPixelsToBuffer(source, options); } setParametersWebGL(parameters) { setGLParameters(this.gl, parameters); } getParametersWebGL(parameters) { return getGLParameters(this.gl, parameters); } withParametersWebGL(parameters, func) { return withGLParameters(this.gl, parameters, func); } clearWebGL(options) { clear(this, options); } resetWebGL() { import_core27.log.warn("WebGLDevice.resetWebGL is deprecated, use only for debugging")(); resetGLParameters(this.gl); } // // WebGL-only API (not part of `Device` API) // /** WebGL2 context. */ gl; debug = false; /** State used by luma.gl classes: TODO - move to canvasContext*/ _canvasSizeInfo = { clientWidth: 0, clientHeight: 0, devicePixelRatio: 1 }; /** State used by luma.gl classes - TODO - not used? */ _extensions = {}; _polyfilled = false; /** Instance of Spector.js (if initialized) */ spectorJS; /** * Triggers device (or WebGL context) loss. * @note primarily intended for testing how application reacts to device loss */ loseDevice() { var _a; let deviceLossTriggered = false; const extensions = this.getExtension("WEBGL_lose_context"); const ext = extensions.WEBGL_lose_context; if (ext) { deviceLossTriggered = true; ext.loseContext(); } (_a = this._resolveContextLost) == null ? void 0 : _a.call(this, { reason: "destroyed", message: "Application triggered context loss" }); return deviceLossTriggered; } /** Save current WebGL context state onto an internal stack */ pushState() { pushContextState(this.gl); } /** Restores previously saved context state */ popState() { popContextState(this.gl); } /** * Storing data on a special field on WebGLObjects makes that data visible in SPECTOR chrome debug extension * luma.gl ids and props can be inspected */ setSpectorMetadata(handle, props) { handle.__SPECTOR_Metadata = props; } /** * Returns the GL. constant that corresponds to a numeric value of a GL constant * Be aware that there are some duplicates especially for constants that are 0, * so this isn't guaranteed to return the right key in all cases. */ getGLKey(value, gl) { gl = gl || this.gl2 || this.gl; const number = Number(value); for (const key in gl) { if (gl[key] === number) { return `GL.${key}`; } } return String(value); } /** Store constants */ _constants; /** * Set a constant value for a location. Disabled attributes at that location will read from this value * @note WebGL constants are stored globally on the WebGL context, not the VertexArray * so they need to be updated before every render * @todo - remember/cache values to avoid setting them unnecessarily? */ setConstantAttributeWebGL(location, constant) { const maxVertexAttributes = this.limits.maxVertexAttributes; this._constants = this._constants || new Array(maxVertexAttributes).fill(null); const currentConstant = this._constants[location]; if (currentConstant && compareConstantArrayValues2(currentConstant, constant)) { import_core27.log.info(1, `setConstantAttributeWebGL(${location}) could have been skipped, value unchanged`)(); } this._constants[location] = constant; switch (constant.constructor) { case Float32Array: setConstantFloatArray(this, location, constant); break; case Int32Array: setConstantIntArray(this, location, constant); break; case Uint32Array: setConstantUintArray(this, location, constant); break; default: (0, import_core27.assert)(false); } } /** Ensure extensions are only requested once */ getExtension(name) { getWebGLExtension(this.gl, name, this._extensions); return this._extensions; } }; var WebGLDevice = _WebGLDevice; // // Public `Device` API // /** type of this device */ __publicField(WebGLDevice, "type", "webgl"); function isWebGL(gl) { if (typeof WebGL2RenderingContext !== "undefined" && gl instanceof WebGL2RenderingContext) { return true; } return Boolean(gl && Number.isFinite(gl._version)); } function setConstantFloatArray(device, location, array) { switch (array.length) { case 1: device.gl.vertexAttrib1fv(location, array); break; case 2: device.gl.vertexAttrib2fv(location, array); break; case 3: device.gl.vertexAttrib3fv(location, array); break; case 4: device.gl.vertexAttrib4fv(location, array); break; default: (0, import_core27.assert)(false); } } function setConstantIntArray(device, location, array) { device.gl.vertexAttribI4iv(location, array); } function setConstantUintArray(device, location, array) { device.gl.vertexAttribI4uiv(location, array); } function compareConstantArrayValues2(v1, v2) { if (!v1 || !v2 || v1.length !== v2.length || v1.constructor !== v2.constructor) { return false; } for (let i = 0; i < v1.length; ++i) { if (v1[i] !== v2[i]) { return false; } } return true; } // dist/adapter/objects/webgl-resource.js var import_core28 = require("@luma.gl/core"); var ERR_RESOURCE_METHOD_UNDEFINED = "Resource subclass must define virtual methods"; var WebGLResource = class extends import_core28.Resource { device; gl; gl2; _handle; _bound = false; // Only meaningful for resources that allocate GPU memory byteLength = 0; constructor(device, props, defaultProps) { super(device, props, defaultProps); this.device = device; const gl = this.device.gl; const { id } = props || {}; this.gl = gl; this.gl2 = gl; this.id = id || (0, import_core28.uid)(this.constructor.name); this._handle = props == null ? void 0 : props.handle; if (this._handle === void 0) { this._handle = this._createHandle(); } this.byteLength = 0; } toString() { return `${this.constructor.name}(${this.id})`; } get handle() { return this._handle; } delete({ deleteChildren = false } = {}) { const children = this._handle && this._deleteHandle(this._handle); if (this._handle) { this.removeStats(); } this._handle = null; if (children && deleteChildren) { children.filter(Boolean).forEach((child) => child.destroy()); } return this; } bind(funcOrHandle = this.handle) { if (typeof funcOrHandle !== "function") { this._bindHandle(funcOrHandle); return this; } let value; if (!this._bound) { this._bindHandle(this.handle); this._bound = true; value = funcOrHandle(); this._bound = false; this._bindHandle(null); } else { value = funcOrHandle(); } return value; } unbind() { this.bind(null); } // Install stubs for removed methods stubRemovedMethods(className, version, methodNames) { return (0, import_core28.stubRemovedMethods)(this, className, version, methodNames); } // PUBLIC VIRTUAL METHODS initialize(props) { } // PROTECTED METHODS - These must be overridden by subclass _createHandle() { throw new Error(ERR_RESOURCE_METHOD_UNDEFINED); } _deleteHandle() { throw new Error(ERR_RESOURCE_METHOD_UNDEFINED); } _bindHandle(handle) { throw new Error(ERR_RESOURCE_METHOD_UNDEFINED); } _getOptsFromHandle() { throw new Error(ERR_RESOURCE_METHOD_UNDEFINED); } _getParameter(pname, props) { throw new Error(ERR_RESOURCE_METHOD_UNDEFINED); } _setParameter(pname, value) { throw new Error(ERR_RESOURCE_METHOD_UNDEFINED); } }; // dist/adapter/objects/webgl-renderbuffer.js var import_core29 = require("@luma.gl/core"); var import_constants28 = require("@luma.gl/constants"); var _WEBGLRenderbuffer = class extends WebGLResource { get [Symbol.toStringTag]() { return "Renderbuffer"; } get width() { return this.props.width; } get height() { return this.props.height; } get format() { return this.props.format; } get samples() { return this.props.samples; } get attachment() { return; } /** WebGL format constant */ glFormat; static isTextureFormatSupported(device, format) { return isRenderbufferFormatSupported(device.gl, format, device._extensions); } constructor(device, props) { if (typeof props.format === "number") { throw new Error("Renderbuffer"); } super(device, props, _WEBGLRenderbuffer.defaultProps); this.glFormat = convertTextureFormatToGL(this.props.format); this._initialize(this.props); } resize(size) { if (size.width !== this.width || size.height !== this.height) { Object.assign(this.props, { ...size, format: this.format, samples: this.samples }); this._initialize(this.props); } } // PRIVATE METHODS /** Creates and initializes a renderbuffer object's data store */ _initialize(props) { const { format, width, height, samples } = props; (0, import_core29.assert)(format, "Needs format"); this.trackDeallocatedMemory(); this.gl.bindRenderbuffer(36161, this.handle); if (samples !== 0) { this.gl.renderbufferStorageMultisample(36161, samples, this.glFormat, width, height); } else { this.gl.renderbufferStorage(36161, this.glFormat, width, height); } this.gl.bindRenderbuffer(36161, null); this.trackAllocatedMemory(width * height * (samples || 1) * getTextureFormatBytesPerPixel(this.format)); } // RESOURCE IMPLEMENTATION _createHandle() { return this.gl.createRenderbuffer(); } _deleteHandle() { this.gl.deleteRenderbuffer(this.handle); this.trackDeallocatedMemory(); } _bindHandle(handle) { this.gl.bindRenderbuffer(36161, handle); } }; var WEBGLRenderbuffer = _WEBGLRenderbuffer; __publicField(WEBGLRenderbuffer, "defaultProps", { id: void 0, handle: void 0, userData: void 0, format: void 0, // 'depth16unorm' width: 1, height: 1, samples: 0 }); //# sourceMappingURL=index.cjs.map