(function webpackUniversalModuleDefinition(root, factory) { if (typeof exports === 'object' && typeof module === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define([], factory); else if (typeof exports === 'object') exports['luma'] = factory(); else root['luma'] = factory();})(globalThis, function () { "use strict"; var __exports__ = (() => { var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __export = (target, all) => { for (var name2 in all) __defProp(target, name2, { get: all[name2], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); var __publicField = (obj, key, value) => { __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; // bundle.ts var bundle_exports = {}; __export(bundle_exports, { Adapter: () => Adapter, Buffer: () => Buffer2, CanvasContext: () => CanvasContext, CommandBuffer: () => CommandBuffer, CommandEncoder: () => CommandEncoder, ComputePass: () => ComputePass, ComputePipeline: () => ComputePipeline, Device: () => Device, DeviceFeatures: () => DeviceFeatures, DeviceLimits: () => DeviceLimits, ExternalTexture: () => ExternalTexture, Framebuffer: () => Framebuffer, QuerySet: () => QuerySet, RenderPass: () => RenderPass, RenderPipeline: () => RenderPipeline, Resource: () => Resource, Sampler: () => Sampler, Shader: () => Shader, Texture: () => Texture, TextureView: () => TextureView, TransformFeedback: () => TransformFeedback, UniformBlock: () => UniformBlock, UniformBufferLayout: () => UniformBufferLayout, UniformStore: () => UniformStore, VertexArray: () => VertexArray, _BufferLayoutHelper: () => BufferLayoutHelper, _getTextureFormatDefinition: () => getTextureFormatDefinition, _getTextureFormatTable: () => getTextureFormatTable, decodeShaderAttributeType: () => decodeShaderAttributeType, decodeShaderUniformType: () => decodeShaderUniformType, decodeTextureFormat: () => decodeTextureFormat, decodeVertexFormat: () => decodeVertexFormat, getAttributeInfosFromLayouts: () => getAttributeInfosFromLayouts, getDataTypeFromTypedArray: () => getDataTypeFromTypedArray, getScratchArray: () => getScratchArray, getTextureFormatCapabilities: () => getTextureFormatCapabilities, getTypedArrayFromDataType: () => getTypedArrayFromDataType, getVertexFormatFromAttribute: () => getVertexFormatFromAttribute, log: () => log, luma: () => luma }); // ../../node_modules/@probe.gl/env/dist/lib/globals.js var window_ = globalThis; var document_ = globalThis.document || {}; var process_ = globalThis.process || {}; var console_ = globalThis.console; var navigator_ = globalThis.navigator || {}; // ../../node_modules/@probe.gl/env/dist/lib/is-electron.js function isElectron(mockUserAgent) { if (typeof window !== "undefined" && window.process?.type === "renderer") { return true; } if (typeof process !== "undefined" && Boolean(process.versions?.["electron"])) { return true; } const realUserAgent = typeof navigator !== "undefined" && navigator.userAgent; const userAgent = mockUserAgent || realUserAgent; return Boolean(userAgent && userAgent.indexOf("Electron") >= 0); } // ../../node_modules/@probe.gl/env/dist/lib/is-browser.js function isBrowser() { const isNode = ( // @ts-expect-error typeof process === "object" && String(process) === "[object process]" && !process?.browser ); return !isNode || isElectron(); } // ../../node_modules/@probe.gl/env/dist/index.js var VERSION = true ? "4.0.7" : "untranspiled source"; // ../../node_modules/@probe.gl/stats/dist/utils/hi-res-timestamp.js function getHiResTimestamp() { let timestamp; if (typeof window !== "undefined" && window.performance) { timestamp = window.performance.now(); } else if (typeof process !== "undefined" && process.hrtime) { const timeParts = process.hrtime(); timestamp = timeParts[0] * 1e3 + timeParts[1] / 1e6; } else { timestamp = Date.now(); } return timestamp; } // ../../node_modules/@probe.gl/stats/dist/lib/stat.js var Stat = class { constructor(name2, type) { this.sampleSize = 1; this.time = 0; this.count = 0; this.samples = 0; this.lastTiming = 0; this.lastSampleTime = 0; this.lastSampleCount = 0; this._count = 0; this._time = 0; this._samples = 0; this._startTime = 0; this._timerPending = false; this.name = name2; this.type = type; this.reset(); } reset() { this.time = 0; this.count = 0; this.samples = 0; this.lastTiming = 0; this.lastSampleTime = 0; this.lastSampleCount = 0; this._count = 0; this._time = 0; this._samples = 0; this._startTime = 0; this._timerPending = false; return this; } setSampleSize(samples) { this.sampleSize = samples; return this; } /** Call to increment count (+1) */ incrementCount() { this.addCount(1); return this; } /** Call to decrement count (-1) */ decrementCount() { this.subtractCount(1); return this; } /** Increase count */ addCount(value) { this._count += value; this._samples++; this._checkSampling(); return this; } /** Decrease count */ subtractCount(value) { this._count -= value; this._samples++; this._checkSampling(); return this; } /** Add an arbitrary timing and bump the count */ addTime(time) { this._time += time; this.lastTiming = time; this._samples++; this._checkSampling(); return this; } /** Start a timer */ timeStart() { this._startTime = getHiResTimestamp(); this._timerPending = true; return this; } /** End a timer. Adds to time and bumps the timing count. */ timeEnd() { if (!this._timerPending) { return this; } this.addTime(getHiResTimestamp() - this._startTime); this._timerPending = false; this._checkSampling(); return this; } getSampleAverageCount() { return this.sampleSize > 0 ? this.lastSampleCount / this.sampleSize : 0; } /** Calculate average time / count for the previous window */ getSampleAverageTime() { return this.sampleSize > 0 ? this.lastSampleTime / this.sampleSize : 0; } /** Calculate counts per second for the previous window */ getSampleHz() { return this.lastSampleTime > 0 ? this.sampleSize / (this.lastSampleTime / 1e3) : 0; } getAverageCount() { return this.samples > 0 ? this.count / this.samples : 0; } /** Calculate average time / count */ getAverageTime() { return this.samples > 0 ? this.time / this.samples : 0; } /** Calculate counts per second */ getHz() { return this.time > 0 ? this.samples / (this.time / 1e3) : 0; } _checkSampling() { if (this._samples === this.sampleSize) { this.lastSampleTime = this._time; this.lastSampleCount = this._count; this.count += this._count; this.time += this._time; this.samples += this._samples; this._time = 0; this._count = 0; this._samples = 0; } } }; // ../../node_modules/@probe.gl/stats/dist/lib/stats.js var Stats = class { constructor(options) { this.stats = {}; this.id = options.id; this.stats = {}; this._initializeStats(options.stats); Object.seal(this); } /** Acquire a stat. Create if it doesn't exist. */ get(name2, type = "count") { return this._getOrCreate({ name: name2, type }); } get size() { return Object.keys(this.stats).length; } /** Reset all stats */ reset() { for (const stat of Object.values(this.stats)) { stat.reset(); } return this; } forEach(fn) { for (const stat of Object.values(this.stats)) { fn(stat); } } getTable() { const table = {}; this.forEach((stat) => { table[stat.name] = { time: stat.time || 0, count: stat.count || 0, average: stat.getAverageTime() || 0, hz: stat.getHz() || 0 }; }); return table; } _initializeStats(stats = []) { stats.forEach((stat) => this._getOrCreate(stat)); } _getOrCreate(stat) { const { name: name2, type } = stat; let result = this.stats[name2]; if (!result) { if (stat instanceof Stat) { result = stat; } else { result = new Stat(name2, type); } this.stats[name2] = result; } return result; } }; // src/utils/stats-manager.ts var StatsManager = class { stats = /* @__PURE__ */ new Map(); getStats(name2) { return this.get(name2); } get(name2) { if (!this.stats.has(name2)) { this.stats.set(name2, new Stats({ id: name2 })); } return this.stats.get(name2); } }; var lumaStats = new StatsManager(); // ../../node_modules/@probe.gl/log/dist/utils/local-storage.js function getStorage(type) { try { const storage = window[type]; const x = "__storage_test__"; storage.setItem(x, x); storage.removeItem(x); return storage; } catch (e) { return null; } } var LocalStorage = class { constructor(id, defaultConfig, type = "sessionStorage") { this.storage = getStorage(type); this.id = id; this.config = defaultConfig; this._loadConfiguration(); } getConfiguration() { return this.config; } setConfiguration(configuration) { Object.assign(this.config, configuration); if (this.storage) { const serialized = JSON.stringify(this.config); this.storage.setItem(this.id, serialized); } } // Get config from persistent store, if available _loadConfiguration() { let configuration = {}; if (this.storage) { const serializedConfiguration = this.storage.getItem(this.id); configuration = serializedConfiguration ? JSON.parse(serializedConfiguration) : {}; } Object.assign(this.config, configuration); return this; } }; // ../../node_modules/@probe.gl/log/dist/utils/formatters.js function formatTime(ms) { let formatted; if (ms < 10) { formatted = `${ms.toFixed(2)}ms`; } else if (ms < 100) { formatted = `${ms.toFixed(1)}ms`; } else if (ms < 1e3) { formatted = `${ms.toFixed(0)}ms`; } else { formatted = `${(ms / 1e3).toFixed(2)}s`; } return formatted; } function leftPad(string, length = 8) { const padLength = Math.max(length - string.length, 0); return `${" ".repeat(padLength)}${string}`; } // ../../node_modules/@probe.gl/log/dist/utils/color.js var COLOR; (function(COLOR2) { COLOR2[COLOR2["BLACK"] = 30] = "BLACK"; COLOR2[COLOR2["RED"] = 31] = "RED"; COLOR2[COLOR2["GREEN"] = 32] = "GREEN"; COLOR2[COLOR2["YELLOW"] = 33] = "YELLOW"; COLOR2[COLOR2["BLUE"] = 34] = "BLUE"; COLOR2[COLOR2["MAGENTA"] = 35] = "MAGENTA"; COLOR2[COLOR2["CYAN"] = 36] = "CYAN"; COLOR2[COLOR2["WHITE"] = 37] = "WHITE"; COLOR2[COLOR2["BRIGHT_BLACK"] = 90] = "BRIGHT_BLACK"; COLOR2[COLOR2["BRIGHT_RED"] = 91] = "BRIGHT_RED"; COLOR2[COLOR2["BRIGHT_GREEN"] = 92] = "BRIGHT_GREEN"; COLOR2[COLOR2["BRIGHT_YELLOW"] = 93] = "BRIGHT_YELLOW"; COLOR2[COLOR2["BRIGHT_BLUE"] = 94] = "BRIGHT_BLUE"; COLOR2[COLOR2["BRIGHT_MAGENTA"] = 95] = "BRIGHT_MAGENTA"; COLOR2[COLOR2["BRIGHT_CYAN"] = 96] = "BRIGHT_CYAN"; COLOR2[COLOR2["BRIGHT_WHITE"] = 97] = "BRIGHT_WHITE"; })(COLOR || (COLOR = {})); var BACKGROUND_INCREMENT = 10; function getColor(color) { if (typeof color !== "string") { return color; } color = color.toUpperCase(); return COLOR[color] || COLOR.WHITE; } function addColor(string, color, background) { if (!isBrowser && typeof string === "string") { if (color) { const colorCode = getColor(color); string = `\x1B[${colorCode}m${string}\x1B[39m`; } if (background) { const colorCode = getColor(background); string = `\x1B[${colorCode + BACKGROUND_INCREMENT}m${string}\x1B[49m`; } } return string; } // ../../node_modules/@probe.gl/log/dist/utils/autobind.js function autobind(obj, predefined = ["constructor"]) { const proto = Object.getPrototypeOf(obj); const propNames = Object.getOwnPropertyNames(proto); const object = obj; for (const key of propNames) { const value = object[key]; if (typeof value === "function") { if (!predefined.find((name2) => key === name2)) { object[key] = value.bind(obj); } } } } // ../../node_modules/@probe.gl/log/dist/utils/assert.js function assert(condition, message) { if (!condition) { throw new Error(message || "Assertion failed"); } } // ../../node_modules/@probe.gl/log/dist/utils/hi-res-timestamp.js function getHiResTimestamp2() { let timestamp; if (isBrowser() && window_.performance) { timestamp = window_?.performance?.now?.(); } else if ("hrtime" in process_) { const timeParts = process_?.hrtime?.(); timestamp = timeParts[0] * 1e3 + timeParts[1] / 1e6; } else { timestamp = Date.now(); } return timestamp; } // ../../node_modules/@probe.gl/log/dist/log.js var originalConsole = { debug: isBrowser() ? console.debug || console.log : console.log, log: console.log, info: console.info, warn: console.warn, error: console.error }; var DEFAULT_LOG_CONFIGURATION = { enabled: true, level: 0 }; function noop() { } var cache = {}; var ONCE = { once: true }; var Log = class { constructor({ id } = { id: "" }) { this.VERSION = VERSION; this._startTs = getHiResTimestamp2(); this._deltaTs = getHiResTimestamp2(); this.userData = {}; this.LOG_THROTTLE_TIMEOUT = 0; this.id = id; this.userData = {}; this._storage = new LocalStorage(`__probe-${this.id}__`, DEFAULT_LOG_CONFIGURATION); this.timeStamp(`${this.id} started`); autobind(this); Object.seal(this); } set level(newLevel) { this.setLevel(newLevel); } get level() { return this.getLevel(); } isEnabled() { return this._storage.config.enabled; } getLevel() { return this._storage.config.level; } /** @return milliseconds, with fractions */ getTotal() { return Number((getHiResTimestamp2() - this._startTs).toPrecision(10)); } /** @return milliseconds, with fractions */ getDelta() { return Number((getHiResTimestamp2() - this._deltaTs).toPrecision(10)); } /** @deprecated use logLevel */ set priority(newPriority) { this.level = newPriority; } /** @deprecated use logLevel */ get priority() { return this.level; } /** @deprecated use logLevel */ getPriority() { return this.level; } // Configure enable(enabled = true) { this._storage.setConfiguration({ enabled }); return this; } setLevel(level) { this._storage.setConfiguration({ level }); return this; } /** return the current status of the setting */ get(setting) { return this._storage.config[setting]; } // update the status of the setting set(setting, value) { this._storage.setConfiguration({ [setting]: value }); } /** Logs the current settings as a table */ settings() { if (console.table) { console.table(this._storage.config); } else { console.log(this._storage.config); } } // Unconditional logging assert(condition, message) { if (!condition) { throw new Error(message || "Assertion failed"); } } warn(message) { return this._getLogFunction(0, message, originalConsole.warn, arguments, ONCE); } error(message) { return this._getLogFunction(0, message, originalConsole.error, arguments); } /** Print a deprecation warning */ deprecated(oldUsage, newUsage) { return this.warn(`\`${oldUsage}\` is deprecated and will be removed in a later version. Use \`${newUsage}\` instead`); } /** Print a removal warning */ removed(oldUsage, newUsage) { return this.error(`\`${oldUsage}\` has been removed. Use \`${newUsage}\` instead`); } probe(logLevel, message) { return this._getLogFunction(logLevel, message, originalConsole.log, arguments, { time: true, once: true }); } log(logLevel, message) { return this._getLogFunction(logLevel, message, originalConsole.debug, arguments); } info(logLevel, message) { return this._getLogFunction(logLevel, message, console.info, arguments); } once(logLevel, message) { return this._getLogFunction(logLevel, message, originalConsole.debug || originalConsole.info, arguments, ONCE); } /** Logs an object as a table */ table(logLevel, table, columns) { if (table) { return this._getLogFunction(logLevel, table, console.table || noop, columns && [columns], { tag: getTableHeader(table) }); } return noop; } time(logLevel, message) { return this._getLogFunction(logLevel, message, console.time ? console.time : console.info); } timeEnd(logLevel, message) { return this._getLogFunction(logLevel, message, console.timeEnd ? console.timeEnd : console.info); } timeStamp(logLevel, message) { return this._getLogFunction(logLevel, message, console.timeStamp || noop); } group(logLevel, message, opts = { collapsed: false }) { const options = normalizeArguments({ logLevel, message, opts }); const { collapsed } = opts; options.method = (collapsed ? console.groupCollapsed : console.group) || console.info; return this._getLogFunction(options); } groupCollapsed(logLevel, message, opts = {}) { return this.group(logLevel, message, Object.assign({}, opts, { collapsed: true })); } groupEnd(logLevel) { return this._getLogFunction(logLevel, "", console.groupEnd || noop); } // EXPERIMENTAL withGroup(logLevel, message, func) { this.group(logLevel, message)(); try { func(); } finally { this.groupEnd(logLevel)(); } } trace() { if (console.trace) { console.trace(); } } // PRIVATE METHODS /** Deduces log level from a variety of arguments */ _shouldLog(logLevel) { return this.isEnabled() && this.getLevel() >= normalizeLogLevel(logLevel); } _getLogFunction(logLevel, message, method, args, opts) { if (this._shouldLog(logLevel)) { opts = normalizeArguments({ logLevel, message, args, opts }); method = method || opts.method; assert(method); opts.total = this.getTotal(); opts.delta = this.getDelta(); this._deltaTs = getHiResTimestamp2(); const tag = opts.tag || opts.message; if (opts.once && tag) { if (!cache[tag]) { cache[tag] = getHiResTimestamp2(); } else { return noop; } } message = decorateMessage(this.id, opts.message, opts); return method.bind(console, message, ...opts.args); } return noop; } }; Log.VERSION = VERSION; function normalizeLogLevel(logLevel) { if (!logLevel) { return 0; } let resolvedLevel; switch (typeof logLevel) { case "number": resolvedLevel = logLevel; break; case "object": resolvedLevel = logLevel.logLevel || logLevel.priority || 0; break; default: return 0; } assert(Number.isFinite(resolvedLevel) && resolvedLevel >= 0); return resolvedLevel; } function normalizeArguments(opts) { const { logLevel, message } = opts; opts.logLevel = normalizeLogLevel(logLevel); const args = opts.args ? Array.from(opts.args) : []; while (args.length && args.shift() !== message) { } switch (typeof logLevel) { case "string": case "function": if (message !== void 0) { args.unshift(message); } opts.message = logLevel; break; case "object": Object.assign(opts, logLevel); break; default: } if (typeof opts.message === "function") { opts.message = opts.message(); } const messageType = typeof opts.message; assert(messageType === "string" || messageType === "object"); return Object.assign(opts, { args }, opts.opts); } function decorateMessage(id, message, opts) { if (typeof message === "string") { const time = opts.time ? leftPad(formatTime(opts.total)) : ""; message = opts.time ? `${id}: ${time} ${message}` : `${id}: ${message}`; message = addColor(message, opts.color, opts.background); } return message; } function getTableHeader(table) { for (const key in table) { for (const title in table[key]) { return title || "untitled"; } } return "empty"; } // ../../node_modules/@probe.gl/log/dist/init.js globalThis.probe = {}; // ../../node_modules/@probe.gl/log/dist/index.js var dist_default = new Log({ id: "@probe.gl/log" }); // src/utils/log.ts var log = new Log({ id: "luma.gl" }); // src/utils/uid.ts var uidCounters = {}; function uid(id = "id") { uidCounters[id] = uidCounters[id] || 1; const count = uidCounters[id]++; return `${id}-${count}`; } // src/adapter/resources/resource.ts var Resource = class { toString() { return `${this[Symbol.toStringTag] || this.constructor.name}:"${this.id}"`; } /** props.id, for debugging. */ id; props; userData = {}; _device; /** Whether this resource has been destroyed */ destroyed = false; /** For resources that allocate GPU memory */ allocatedBytes = 0; /** Attached resources will be destroyed when this resource is destroyed. Tracks auto-created "sub" resources. */ _attachedResources = /* @__PURE__ */ new Set(); /** * Create a new Resource. Called from Subclass */ constructor(device, props, defaultProps) { if (!device) { throw new Error("no device"); } this._device = device; this.props = selectivelyMerge(props, defaultProps); const id = this.props.id !== "undefined" ? this.props.id : uid(this[Symbol.toStringTag]); this.props.id = id; this.id = id; this.userData = this.props.userData || {}; this.addStats(); } /** * destroy can be called on any resource to release it before it is garbage collected. */ destroy() { this.destroyResource(); } /** @deprecated Use destroy() */ delete() { this.destroy(); return this; } /** * Combines a map of user props and default props, only including props from defaultProps * @returns returns a map of overridden default props */ getProps() { return this.props; } // ATTACHED RESOURCES /** * Attaches a resource. Attached resources are auto destroyed when this resource is destroyed * Called automatically when sub resources are auto created but can be called by application */ attachResource(resource) { this._attachedResources.add(resource); } /** * Detach an attached resource. The resource will no longer be auto-destroyed when this resource is destroyed. */ detachResource(resource) { this._attachedResources.delete(resource); } /** * Destroys a resource (only if owned), and removes from the owned (auto-destroy) list for this resource. */ destroyAttachedResource(resource) { if (this._attachedResources.delete(resource)) { resource.destroy(); } } /** Destroy all owned resources. Make sure the resources are no longer needed before calling. */ destroyAttachedResources() { for (const resource of Object.values(this._attachedResources)) { resource.destroy(); } this._attachedResources = /* @__PURE__ */ new Set(); } // PROTECTED METHODS /** Perform all destroy steps. Can be called by derived resources when overriding destroy() */ destroyResource() { this.destroyAttachedResources(); this.removeStats(); this.destroyed = true; } /** Called by .destroy() to track object destruction. Subclass must call if overriding destroy() */ removeStats() { const stats = this._device.statsManager.getStats("Resource Counts"); const name2 = this[Symbol.toStringTag]; stats.get(`${name2}s Active`).decrementCount(); } /** Called by subclass to track memory allocations */ trackAllocatedMemory(bytes, name2 = this[Symbol.toStringTag]) { const stats = this._device.statsManager.getStats("Resource Counts"); stats.get("GPU Memory").addCount(bytes); stats.get(`${name2} Memory`).addCount(bytes); this.allocatedBytes = bytes; } /** Called by subclass to track memory deallocations */ trackDeallocatedMemory(name2 = this[Symbol.toStringTag]) { const stats = this._device.statsManager.getStats("Resource Counts"); stats.get("GPU Memory").subtractCount(this.allocatedBytes); stats.get(`${name2} Memory`).subtractCount(this.allocatedBytes); this.allocatedBytes = 0; } /** Called by resource constructor to track object creation */ addStats() { const stats = this._device.statsManager.getStats("Resource Counts"); const name2 = this[Symbol.toStringTag]; stats.get("Resources Created").incrementCount(); stats.get(`${name2}s Created`).incrementCount(); stats.get(`${name2}s Active`).incrementCount(); } }; /** Default properties for resource */ __publicField(Resource, "defaultProps", { id: "undefined", handle: void 0, userData: void 0 }); function selectivelyMerge(props, defaultProps) { const mergedProps = { ...defaultProps }; for (const key in props) { if (props[key] !== void 0) { mergedProps[key] = props[key]; } } return mergedProps; } // src/adapter/resources/buffer.ts var _Buffer = class extends Resource { get [Symbol.toStringTag]() { return "Buffer"; } /** The usage with which this buffer was created */ usage; /** For index buffers, whether indices are 16 or 32 bit */ indexType; /** "Time" of last update, can be used to check if redraw is needed */ updateTimestamp; constructor(device, props) { const deducedProps = { ...props }; if ((props.usage || 0) & _Buffer.INDEX && !props.indexType) { if (props.data instanceof Uint32Array) { deducedProps.indexType = "uint32"; } else if (props.data instanceof Uint16Array) { deducedProps.indexType = "uint16"; } } delete deducedProps.data; super(device, deducedProps, _Buffer.defaultProps); this.usage = deducedProps.usage || 0; this.indexType = deducedProps.indexType; this.updateTimestamp = device.incrementTimestamp(); } /** * Create a copy of this Buffer with new byteLength, with same props but of the specified size. * @note Does not copy contents of the cloned Buffer. */ clone(props) { return this.device.createBuffer({ ...this.props, ...props }); } /** Read data synchronously. @note WebGL2 only */ readSyncWebGL(byteOffset, byteLength) { throw new Error("not implemented"); } /** A partial CPU-side copy of the data in this buffer, for debugging purposes */ debugData = new ArrayBuffer(0); /** This doesn't handle partial non-zero offset updates correctly */ _setDebugData(data, byteOffset, byteLength) { const arrayBuffer2 = ArrayBuffer.isView(data) ? data.buffer : data; const debugDataLength = Math.min( data ? data.byteLength : byteLength, _Buffer.DEBUG_DATA_MAX_LENGTH ); if (arrayBuffer2 === null) { this.debugData = new ArrayBuffer(debugDataLength); } else if (byteOffset === 0 && byteLength === arrayBuffer2.byteLength) { this.debugData = arrayBuffer2.slice(0, debugDataLength); } else { this.debugData = arrayBuffer2.slice(byteOffset, byteOffset + debugDataLength); } } }; var Buffer2 = _Buffer; __publicField(Buffer2, "defaultProps", { ...Resource.defaultProps, usage: 0, // Buffer.COPY_DST | Buffer.COPY_SRC byteLength: 0, byteOffset: 0, data: null, indexType: "uint16", mappedAtCreation: false }); // Usage Flags __publicField(Buffer2, "MAP_READ", 1); __publicField(Buffer2, "MAP_WRITE", 2); __publicField(Buffer2, "COPY_SRC", 4); __publicField(Buffer2, "COPY_DST", 8); /** Index buffer */ __publicField(Buffer2, "INDEX", 16); /** Vertex buffer */ __publicField(Buffer2, "VERTEX", 32); /** Uniform buffer */ __publicField(Buffer2, "UNIFORM", 64); /** Storage buffer */ __publicField(Buffer2, "STORAGE", 128); __publicField(Buffer2, "INDIRECT", 256); __publicField(Buffer2, "QUERY_RESOLVE", 512); // PROTECTED METHODS (INTENDED FOR USE BY OTHER FRAMEWORK CODE ONLY) /** Max amount of debug data saved. Two vec4's */ __publicField(Buffer2, "DEBUG_DATA_MAX_LENGTH", 32); // src/gpu-type-utils/decode-data-type.ts function decodeVertexType(type) { const dataType = TYPE_MAP[type]; const bytes = getDataTypeBytes(dataType); const normalized = type.includes("norm"); const integer = !normalized && !type.startsWith("float"); const signed = type.startsWith("s"); return { dataType: TYPE_MAP[type], byteLength: bytes, integer, signed, normalized }; } function getDataTypeBytes(type) { const bytes = TYPE_SIZES[type]; return bytes; } var TYPE_MAP = { uint8: "uint8", sint8: "sint8", unorm8: "uint8", snorm8: "sint8", uint16: "uint16", sint16: "sint16", unorm16: "uint16", snorm16: "sint16", float16: "float16", float32: "float32", uint32: "uint32", sint32: "sint32" }; var TYPE_SIZES = { uint8: 1, sint8: 1, uint16: 2, sint16: 2, float16: 2, float32: 4, uint32: 4, sint32: 4 }; // src/gpu-type-utils/texture-format-table.ts var texture_compression_bc = "texture-compression-bc"; var texture_compression_astc = "texture-compression-astc"; var texture_compression_etc2 = "texture-compression-etc2"; var texture_compression_etc1_webgl = "texture-compression-etc1-webgl"; var texture_compression_pvrtc_webgl = "texture-compression-pvrtc-webgl"; var texture_compression_atc_webgl = "texture-compression-atc-webgl"; var float32_renderable = "float32-renderable-webgl"; var float16_renderable = "float16-renderable-webgl"; var rgb9e5ufloat_renderable = "rgb9e5ufloat-renderable-webgl"; var snorm8_renderable = "snorm8-renderable-webgl"; var norm16_renderable = "norm16-renderable-webgl"; var snorm16_renderable = "snorm16-renderable-webgl"; var float32_filterable = "float32-filterable"; var float16_filterable = "float16-filterable-webgl"; function getTextureFormatDefinition(format) { const info = TEXTURE_FORMAT_TABLE[format]; if (!info) { throw new Error(`Unsupported texture format ${format}`); } return info; } function getTextureFormatTable() { return TEXTURE_FORMAT_TABLE; } var TEXTURE_FORMAT_TABLE = { // 8-bit formats "r8unorm": {}, "r8snorm": { render: snorm8_renderable }, "r8uint": {}, "r8sint": {}, // 16-bit formats "rg8unorm": {}, "rg8snorm": { render: snorm8_renderable }, "rg8uint": {}, "rg8sint": {}, "r16uint": {}, "r16sint": {}, "r16float": { render: float16_renderable, filter: "float16-filterable-webgl" }, "r16unorm-webgl": { f: norm16_renderable }, "r16snorm-webgl": { f: snorm16_renderable }, // Packed 16-bit formats "rgba4unorm-webgl": { channels: "rgba", bitsPerChannel: [4, 4, 4, 4], packed: true }, "rgb565unorm-webgl": { channels: "rgb", bitsPerChannel: [5, 6, 5, 0], packed: true }, "rgb5a1unorm-webgl": { channels: "rgba", bitsPerChannel: [5, 5, 5, 1], packed: true }, // 24-bit formats "rgb8unorm-webgl": {}, "rgb8snorm-webgl": {}, // 32-bit formats "rgba8unorm": {}, "rgba8unorm-srgb": {}, "rgba8snorm": { render: snorm8_renderable }, "rgba8uint": {}, "rgba8sint": {}, // 32-bit, reverse colors, webgpu only "bgra8unorm": {}, "bgra8unorm-srgb": {}, "rg16uint": {}, "rg16sint": {}, "rg16float": { render: float16_renderable, filter: float16_filterable }, "rg16unorm-webgl": { render: norm16_renderable }, "rg16snorm-webgl": { render: snorm16_renderable }, "r32uint": {}, "r32sint": {}, "r32float": { render: float32_renderable, filter: float32_filterable }, // Packed 32 bit formats "rgb9e5ufloat": { channels: "rgb", packed: true, render: rgb9e5ufloat_renderable }, // , filter: true}, "rg11b10ufloat": { channels: "rgb", bitsPerChannel: [11, 11, 10, 0], packed: true, p: 1, render: float32_renderable }, "rgb10a2unorm": { channels: "rgba", bitsPerChannel: [10, 10, 10, 2], packed: true, p: 1 }, "rgb10a2uint-webgl": { channels: "rgba", bitsPerChannel: [10, 10, 10, 2], packed: true, p: 1, wgpu: false }, // 48-bit formats "rgb16unorm-webgl": { f: norm16_renderable }, // rgb not renderable "rgb16snorm-webgl": { f: norm16_renderable }, // rgb not renderable // 64-bit formats "rg32uint": {}, "rg32sint": {}, "rg32float": { render: false, filter: float32_filterable }, "rgba16uint": {}, "rgba16sint": {}, "rgba16float": { render: float16_renderable, filter: float16_filterable }, "rgba16unorm-webgl": { render: norm16_renderable }, "rgba16snorm-webgl": { render: snorm16_renderable }, // 96-bit formats (deprecated!) "rgb32float-webgl": { render: float32_renderable, filter: float32_filterable }, // 128-bit formats "rgba32uint": {}, "rgba32sint": {}, "rgba32float": { render: float32_renderable, filter: float32_filterable }, // Depth/stencil // Depth and stencil formats stencil8: { attachment: "stencil", bitsPerChannel: [8, 0, 0, 0], dataType: "uint8" }, "depth16unorm": { attachment: "depth", bitsPerChannel: [16, 0, 0, 0], dataType: "uint16" }, "depth24plus": { attachment: "depth", bitsPerChannel: [24, 0, 0, 0], dataType: "uint32" }, "depth32float": { attachment: "depth", bitsPerChannel: [32, 0, 0, 0], dataType: "float32" }, // The depth component of the "depth24plus" and "depth24plus-stencil8" formats may be implemented as either a 24-bit depth value or a "depth32float" value. "depth24plus-stencil8": { attachment: "depth-stencil", bitsPerChannel: [24, 8, 0, 0], packed: true }, // "depth32float-stencil8" feature "depth32float-stencil8": { attachment: "depth-stencil", bitsPerChannel: [32, 8, 0, 0], packed: true }, // BC compressed formats: check device.features.has("texture-compression-bc"); "bc1-rgb-unorm-webgl": { f: texture_compression_bc }, "bc1-rgb-unorm-srgb-webgl": { f: texture_compression_bc }, "bc1-rgba-unorm": { f: texture_compression_bc }, "bc1-rgba-unorm-srgb": { f: texture_compression_bc }, "bc2-rgba-unorm": { f: texture_compression_bc }, "bc2-rgba-unorm-srgb": { f: texture_compression_bc }, "bc3-rgba-unorm": { f: texture_compression_bc }, "bc3-rgba-unorm-srgb": { f: texture_compression_bc }, "bc4-r-unorm": { f: texture_compression_bc }, "bc4-r-snorm": { f: texture_compression_bc }, "bc5-rg-unorm": { f: texture_compression_bc }, "bc5-rg-snorm": { f: texture_compression_bc }, "bc6h-rgb-ufloat": { f: texture_compression_bc }, "bc6h-rgb-float": { f: texture_compression_bc }, "bc7-rgba-unorm": { f: texture_compression_bc }, "bc7-rgba-unorm-srgb": { f: texture_compression_bc }, // WEBGL_compressed_texture_etc: device.features.has("texture-compression-etc2") // Note: Supposedly guaranteed availability compressed formats in WebGL2, but through CPU decompression "etc2-rgb8unorm": { f: texture_compression_etc2 }, "etc2-rgb8unorm-srgb": { f: texture_compression_etc2 }, "etc2-rgb8a1unorm": { f: texture_compression_etc2 }, "etc2-rgb8a1unorm-srgb": { f: texture_compression_etc2 }, "etc2-rgba8unorm": { f: texture_compression_etc2 }, "etc2-rgba8unorm-srgb": { f: texture_compression_etc2 }, "eac-r11unorm": { f: texture_compression_etc2 }, "eac-r11snorm": { f: texture_compression_etc2 }, "eac-rg11unorm": { f: texture_compression_etc2 }, "eac-rg11snorm": { f: texture_compression_etc2 }, // X_ASTC compressed formats: device.features.has("texture-compression-astc") "astc-4x4-unorm": { f: texture_compression_astc }, "astc-4x4-unorm-srgb": { f: texture_compression_astc }, "astc-5x4-unorm": { f: texture_compression_astc }, "astc-5x4-unorm-srgb": { f: texture_compression_astc }, "astc-5x5-unorm": { f: texture_compression_astc }, "astc-5x5-unorm-srgb": { f: texture_compression_astc }, "astc-6x5-unorm": { f: texture_compression_astc }, "astc-6x5-unorm-srgb": { f: texture_compression_astc }, "astc-6x6-unorm": { f: texture_compression_astc }, "astc-6x6-unorm-srgb": { f: texture_compression_astc }, "astc-8x5-unorm": { f: texture_compression_astc }, "astc-8x5-unorm-srgb": { f: texture_compression_astc }, "astc-8x6-unorm": { f: texture_compression_astc }, "astc-8x6-unorm-srgb": { f: texture_compression_astc }, "astc-8x8-unorm": { f: texture_compression_astc }, "astc-8x8-unorm-srgb": { f: texture_compression_astc }, "astc-10x5-unorm": { f: texture_compression_astc }, "astc-10x5-unorm-srgb": { f: texture_compression_astc }, "astc-10x6-unorm": { f: texture_compression_astc }, "astc-10x6-unorm-srgb": { f: texture_compression_astc }, "astc-10x8-unorm": { f: texture_compression_astc }, "astc-10x8-unorm-srgb": { f: texture_compression_astc }, "astc-10x10-unorm": { f: texture_compression_astc }, "astc-10x10-unorm-srgb": { f: texture_compression_astc }, "astc-12x10-unorm": { f: texture_compression_astc }, "astc-12x10-unorm-srgb": { f: texture_compression_astc }, "astc-12x12-unorm": { f: texture_compression_astc }, "astc-12x12-unorm-srgb": { f: texture_compression_astc }, // WEBGL_compressed_texture_pvrtc "pvrtc-rgb4unorm-webgl": { f: texture_compression_pvrtc_webgl }, "pvrtc-rgba4unorm-webgl": { f: texture_compression_pvrtc_webgl }, "pvrtc-rbg2unorm-webgl": { f: texture_compression_pvrtc_webgl }, "pvrtc-rgba2unorm-webgl": { f: texture_compression_pvrtc_webgl }, // WEBGL_compressed_texture_etc1 "etc1-rbg-unorm-webgl": { f: texture_compression_etc1_webgl }, // WEBGL_compressed_texture_atc "atc-rgb-unorm-webgl": { f: texture_compression_atc_webgl }, "atc-rgba-unorm-webgl": { f: texture_compression_atc_webgl }, "atc-rgbai-unorm-webgl": { f: texture_compression_atc_webgl } }; // src/gpu-type-utils/decode-texture-format.ts var COMPRESSED_TEXTURE_FORMAT_PREFIXES = [ "bc1", "bc2", "bc3", "bc4", "bc5", "bc6", "bc7", "etc1", "etc2", "eac", "atc", "astc", "pvrtc" ]; var RGB_FORMAT_REGEX = /^(r|rg|rgb|rgba|bgra)([0-9]*)([a-z]*)(-srgb)?(-webgl)?$/; function isTextureFormatCompressed(format) { return COMPRESSED_TEXTURE_FORMAT_PREFIXES.some((prefix) => format.startsWith(prefix)); } function decodeTextureFormat(format) { let formatInfo = decodeTextureFormatUsingTable(format); if (isTextureFormatCompressed(format)) { formatInfo.channels = "rgb"; formatInfo.components = 3; formatInfo.bytesPerPixel = 1; formatInfo.srgb = false; formatInfo.compressed = true; const blockSize = getCompressedTextureBlockSize(format); if (blockSize) { formatInfo.blockWidth = blockSize.blockWidth; formatInfo.blockHeight = blockSize.blockHeight; } } const matches = RGB_FORMAT_REGEX.exec(format); if (matches) { const [, channels, length, type, srgb, suffix] = matches; const dataType = `${type}${length}`; const decodedType = decodeVertexType(dataType); const bits = decodedType.byteLength * 8; const components = channels.length; const bitsPerChannel = [ bits, components >= 2 ? bits : 0, components >= 3 ? bits : 0, components >= 4 ? bits : 0 ]; formatInfo = { format, attachment: formatInfo.attachment, dataType: decodedType.dataType, components, channels, integer: decodedType.integer, signed: decodedType.signed, normalized: decodedType.normalized, bitsPerChannel, bytesPerPixel: decodedType.byteLength * channels.length, packed: formatInfo.packed, srgb: formatInfo.srgb }; if (suffix === "-webgl") { formatInfo.webgl = true; } if (srgb === "-srgb") { formatInfo.srgb = true; } } if (format.endsWith("-webgl")) { formatInfo.webgl = true; } if (format.endsWith("-srgb")) { formatInfo.srgb = true; } return formatInfo; } function decodeTextureFormatUsingTable(format) { const info = getTextureFormatDefinition(format); const bytesPerPixel = info.bytesPerPixel || 1; const bitsPerChannel = info.bitsPerChannel || [8, 8, 8, 8]; delete info.bitsPerChannel; delete info.bytesPerPixel; delete info.f; delete info.render; delete info.filter; delete info.blend; delete info.store; const formatInfo = { ...info, format, attachment: info.attachment || "color", channels: info.channels || "r", components: info.components || info.channels?.length || 1, bytesPerPixel, bitsPerChannel, dataType: info.dataType || "uint8", srgb: info.srgb ?? false, packed: info.packed ?? false, webgl: info.webgl ?? false, integer: info.integer ?? false, signed: info.signed ?? false, normalized: info.normalized ?? false, compressed: info.compressed ?? false }; return formatInfo; } function getCompressedTextureBlockSize(format) { const REGEX = /.*-(\d+)x(\d+)-.*/; const matches = REGEX.exec(format); if (matches) { const [, blockWidth, blockHeight] = matches; return { blockWidth: Number(blockWidth), blockHeight: Number(blockHeight) }; } return null; } // src/gpu-type-utils/texture-format-capabilities.ts function getTextureFormatCapabilities(format) { const info = getTextureFormatDefinition(format); const formatCapabilities = { format, create: info.f ?? true, render: info.render ?? true, filter: info.filter ?? true, blend: info.blend ?? true, store: info.store ?? true }; const formatInfo = decodeTextureFormat(format); const isDepthStencil = format.startsWith("depth") || format.startsWith("stencil"); const isSigned = formatInfo?.signed; const isInteger = formatInfo?.integer; const isWebGLSpecific = formatInfo?.webgl; formatCapabilities.render &&= !isSigned; formatCapabilities.filter &&= !isDepthStencil && !isSigned && !isInteger && !isWebGLSpecific; return formatCapabilities; } // src/adapter/device.ts var DeviceLimits = class { }; var DeviceFeatures = class { features; disabledFeatures; constructor(features = [], disabledFeatures) { this.features = new Set(features); this.disabledFeatures = disabledFeatures || {}; } *[Symbol.iterator]() { yield* this.features; } has(feature) { return !this.disabledFeatures?.[feature] && this.features.has(feature); } }; var _Device = class { get [Symbol.toStringTag]() { return "Device"; } constructor(props) { this.props = { ..._Device.defaultProps, ...props }; this.id = this.props.id || uid(this[Symbol.toStringTag].toLowerCase()); } /** id of this device, primarily for debugging */ id; /** A copy of the device props */ props; /** Available for the application to store data on the device */ userData = {}; /** stats */ statsManager = lumaStats; /** An abstract timestamp used for change tracking */ timestamp = 0; /** Used by other luma.gl modules to store data on the device */ _lumaData = {}; /** Determines what operations are supported on a texture format, checking against supported device features */ getTextureFormatCapabilities(format) { const genericCapabilities = getTextureFormatCapabilities(format); const checkFeature = (featureOrBoolean) => (typeof featureOrBoolean === "string" ? this.features.has(featureOrBoolean) : featureOrBoolean) ?? true; const supported = checkFeature(genericCapabilities.create); const deviceCapabilities = { format, create: supported, render: supported && checkFeature(genericCapabilities.render), filter: supported && checkFeature(genericCapabilities.filter), blend: supported && checkFeature(genericCapabilities.blend), store: supported && checkFeature(genericCapabilities.store) }; return this._getDeviceSpecificTextureFormatCapabilities(deviceCapabilities); } /** Check if device supports a specific texture format (creation and `nearest` sampling) */ isTextureFormatSupported(format, capabilities) { return this.getTextureFormatCapabilities(format).create; } /** Check if linear filtering (sampler interpolation) is supported for a specific texture format */ isTextureFormatFilterable(format) { return this.getTextureFormatCapabilities(format).filter; } /** Check if device supports rendering to a framebuffer color attachment of a specific texture format */ isTextureFormatRenderable(format) { return this.getTextureFormatCapabilities(format).render; } /** Check if a specific texture format is GPU compressed */ isTextureFormatCompressed(format) { return isTextureFormatCompressed(format); } /** * Trigger device loss. * @returns `true` if context loss could actually be triggered. * @note primarily intended for testing how application reacts to device loss */ loseDevice() { return false; } /** Report error (normally called for unhandled device errors) */ reportError(error) { this.props.onError(error); } /** Returns the default / primary canvas context. Throws an error if no canvas context is available (a WebGPU compute device) */ getDefaultCanvasContext() { if (!this.canvasContext) { throw new Error("Device has no default CanvasContext. See props.createCanvasContext"); } return this.canvasContext; } createCommandEncoder(props = {}) { throw new Error("not implemented"); } /** A monotonic counter for tracking buffer and texture updates */ incrementTimestamp() { return this.timestamp++; } // Error Handling /** Report unhandled device errors */ onError(error) { this.props.onError(error); } // DEPRECATED METHODS /** @deprecated Use getDefaultCanvasContext() */ getCanvasContext() { return this.getDefaultCanvasContext(); } // WebGL specific HACKS - enables app to remove webgl import // Use until we have a better way to handle these /** @deprecated - will be removed - should use command encoder */ readPixelsToArrayWebGL(source, options) { throw new Error("not implemented"); } /** @deprecated - will be removed - should use command encoder */ readPixelsToBufferWebGL(source, options) { throw new Error("not implemented"); } /** @deprecated - will be removed - should use WebGPU parameters (pipeline) */ setParametersWebGL(parameters) { throw new Error("not implemented"); } /** @deprecated - will be removed - should use WebGPU parameters (pipeline) */ getParametersWebGL(parameters) { throw new Error("not implemented"); } /** @deprecated - will be removed - should use WebGPU parameters (pipeline) */ withParametersWebGL(parameters, func) { throw new Error("not implemented"); } /** @deprecated - will be removed - should use clear arguments in RenderPass */ clearWebGL(options) { throw new Error("not implemented"); } /** @deprecated - will be removed - should use for debugging only */ resetWebGL() { throw new Error("not implemented"); } /** Subclasses use this to support .createBuffer() overloads */ _normalizeBufferProps(props) { if (props instanceof ArrayBuffer || ArrayBuffer.isView(props)) { props = { data: props }; } const newProps = { ...props }; if ((props.usage || 0) & Buffer2.INDEX && !props.indexType) { if (props.data instanceof Uint32Array) { newProps.indexType = "uint32"; } else if (props.data instanceof Uint16Array) { newProps.indexType = "uint16"; } else { log.warn("indices buffer content must be of integer type")(); } } return newProps; } }; var Device = _Device; __publicField(Device, "defaultProps", { id: null, powerPreference: "high-performance", failIfMajorPerformanceCaveat: false, createCanvasContext: void 0, // Callbacks onError: (error) => log.error(error.message)(), _requestMaxLimits: true, _factoryDestroyPolicy: "unused", // TODO - Change these after confirming things work as expected _initializeFeatures: true, _disabledFeatures: { "compilation-status-async-webgl": true }, _resourceDefaults: {}, // WebGL specific webgl: {}, debug: log.get("debug") || void 0, debugShaders: log.get("debug-shaders") || void 0, debugFramebuffers: Boolean(log.get("debug-framebuffers")), debugWebGL: Boolean(log.get("debug-webgl")), debugSpectorJS: void 0, // Note: log setting is queried by the spector.js code debugSpectorJSUrl: void 0, // INTERNAL _handle: void 0 }); // src/adapter/luma.ts var isPage = isBrowser() && typeof document !== "undefined"; var isPageLoaded = () => isPage && document.readyState === "complete"; var STARTUP_MESSAGE = "set luma.log.level=1 (or higher) to trace rendering"; var ERROR_MESSAGE = "No matching device found. Ensure `@luma.gl/webgl` and/or `@luma.gl/webgpu` modules are imported."; var _Luma = class { /** Global stats for all devices */ stats = lumaStats; /** * Global log * * Assign luma.log.level in console to control logging: \ * 0: none, 1: minimal, 2: verbose, 3: attribute/uniforms, 4: gl logs * luma.log.break[], set to gl funcs, luma.log.profile[] set to model names`; */ log = log; /** Version of luma.gl */ VERSION = ( // Version detection using build plugin // @ts-expect-error no-undef typeof __VERSION__ !== "undefined" ? __VERSION__ : "running from source" ); spector; preregisteredAdapters = /* @__PURE__ */ new Map(); constructor() { if (globalThis.luma) { if (globalThis.luma.VERSION !== this.VERSION) { log.error(`Found luma.gl ${globalThis.luma.VERSION} while initialzing ${this.VERSION}`)(); log.error(`'yarn why @luma.gl/core' can help identify the source of the conflict`)(); throw new Error(`luma.gl - multiple versions detected: see console log`); } log.error("This version of luma.gl has already been initialized")(); } log.log(1, `${this.VERSION} - ${STARTUP_MESSAGE}`)(); globalThis.luma = this; } registerAdapters(adapters) { for (const deviceClass of adapters) { this.preregisteredAdapters.set(deviceClass.type, deviceClass); } } /** Get type strings for supported Devices */ getSupportedAdapters(adapters = []) { const adapterMap = this.getAdapterMap(adapters); return Array.from(adapterMap).map(([, adapter]) => adapter).filter((adapter) => adapter.isSupported?.()).map((adapter) => adapter.type); } /** Get type strings for best available Device */ getBestAvailableAdapter(adapters = []) { const adapterMap = this.getAdapterMap(adapters); if (adapterMap.get("webgpu")?.isSupported?.()) { return "webgpu"; } if (adapterMap.get("webgl")?.isSupported?.()) { return "webgl"; } return null; } setDefaultDeviceProps(props) { Object.assign(_Luma.defaultProps, props); } /** Creates a device. Asynchronously. */ async createDevice(props = {}) { props = { ..._Luma.defaultProps, ...props }; if (props.waitForPageLoad) { await _Luma.pageLoaded; } const adapterMap = this.getAdapterMap(props.adapters); let type = props.type || ""; if (type === "best-available") { type = this.getBestAvailableAdapter(props.adapters) || type; } const adapters = this.getAdapterMap(props.adapters) || adapterMap; const adapter = adapters.get(type); const device = await adapter?.create?.(props); if (device) { return device; } throw new Error(ERROR_MESSAGE); } /** Attach to an existing GPU API handle (WebGL2RenderingContext or GPUDevice). */ async attachDevice(props) { const adapters = this.getAdapterMap(props.adapters); let type = ""; if (props.handle instanceof WebGL2RenderingContext) { type = "webgl"; } if (props.createCanvasContext) { await _Luma.pageLoaded; } if (props.handle === null) { type = "unknown"; } const adapter = adapters.get(type); const device = await adapter?.attach?.(null); if (device) { return device; } throw new Error(ERROR_MESSAGE); } /** * Override `HTMLCanvasContext.getCanvas()` to always create WebGL2 contexts with additional WebGL1 compatibility. * Useful when attaching luma to a context from an external library does not support creating WebGL2 contexts. */ enforceWebGL2(enforce = true, adapters = []) { const adapterMap = this.getAdapterMap(adapters); const webgl2Adapter = adapterMap.get("webgl"); if (!webgl2Adapter) { log.warn("enforceWebGL2: webgl adapter not found")(); } webgl2Adapter?.enforceWebGL2?.(enforce); } /** Convert a list of adapters to a map */ getAdapterMap(adapters = []) { const map = new Map(this.preregisteredAdapters); for (const adapter of adapters) { map.set(adapter.type, adapter); } return map; } // DEPRECATED /** @deprecated Use registerAdapters */ registerDevices(deviceClasses) { log.warn("luma.registerDevices() is deprecated, use luma.registerAdapters() instead"); for (const deviceClass of deviceClasses) { const adapter = deviceClass.adapter; if (adapter) { this.preregisteredAdapters.set(adapter.type, adapter); } } } }; var Luma = _Luma; __publicField(Luma, "defaultProps", { ...Device.defaultProps, type: "best-available", adapters: void 0, waitForPageLoad: true }); /** * Page load promise * Get a 'lazy' promise that resolves when the DOM is loaded. * @note Since there may be limitations on number of `load` event listeners, * it is recommended avoid calling this function until actually needed. * I.e. don't call it until you know that you will be looking up a string in the DOM. */ __publicField(Luma, "pageLoaded", getPageLoadPromise().then(() => { log.probe(2, "DOM is loaded")(); })); var luma = new Luma(); function getPageLoadPromise() { if (isPageLoaded() || typeof window === "undefined") { return Promise.resolve(); } return new Promise((resolve) => { window.addEventListener("load", () => resolve()); }); } // src/adapter/adapter.ts var Adapter = class { }; // src/adapter/canvas-context.ts var _CanvasContext = class { id; props; canvas; htmlCanvas; offscreenCanvas; type; width = 1; height = 1; resizeObserver; /** State used by luma.gl classes: TODO - move to canvasContext*/ _canvasSizeInfo = { clientWidth: 0, clientHeight: 0, devicePixelRatio: 1 }; toString() { return `${this[Symbol.toStringTag]}(${this.id})`; } constructor(props) { this.props = { ..._CanvasContext.defaultProps, ...props }; props = this.props; if (!isBrowser()) { this.id = "node-canvas-context"; this.type = "node"; this.width = this.props.width; this.height = this.props.height; this.canvas = null; return; } if (!props.canvas) { const canvas = createCanvas(props); const container = getContainer(props?.container || null); container.insertBefore(canvas, container.firstChild); this.canvas = canvas; if (!props?.visible) { this.canvas.style.visibility = "hidden"; } } else if (typeof props.canvas === "string") { this.canvas = getCanvasFromDOM(props.canvas); } else { this.canvas = props.canvas; } if (this.canvas instanceof HTMLCanvasElement) { this.id = this.canvas.id; this.type = "html-canvas"; this.htmlCanvas = this.canvas; } else { this.id = "offscreen-canvas"; this.type = "offscreen-canvas"; this.offscreenCanvas = this.canvas; } if (this.canvas instanceof HTMLCanvasElement && props.autoResize) { this.resizeObserver = new ResizeObserver((entries) => { for (const entry of entries) { if (entry.target === this.canvas) { this.update(); } } }); this.resizeObserver.observe(this.canvas); } } /** * Returns the current DPR, if props.useDevicePixels is true * Device refers to physical */ getDevicePixelRatio(useDevicePixels) { if (typeof OffscreenCanvas !== "undefined" && this.canvas instanceof OffscreenCanvas) { return 1; } useDevicePixels = useDevicePixels === void 0 ? this.props.useDevicePixels : useDevicePixels; if (!useDevicePixels || useDevicePixels <= 0) { return 1; } if (useDevicePixels === true) { const dpr = typeof window !== "undefined" && window.devicePixelRatio; return dpr || 1; } return useDevicePixels; } /** * Returns the size of drawing buffer in device pixels. * @note This can be different from the 'CSS' size of a canvas, and also from the * canvas' internal drawing buffer size (.width, .height). * This is the size required to cover the canvas, adjusted for DPR */ getPixelSize() { switch (this.type) { case "node": return [this.width, this.height]; case "offscreen-canvas": return [this.canvas.width, this.canvas.height]; case "html-canvas": const dpr = this.getDevicePixelRatio(); const canvas = this.canvas; return canvas.parentElement ? [canvas.clientWidth * dpr, canvas.clientHeight * dpr] : [this.canvas.width, this.canvas.height]; default: throw new Error(this.type); } } getAspect() { const [width, height] = this.getPixelSize(); return width / height; } /** * Returns multiplier need to convert CSS size to Device size */ cssToDeviceRatio() { try { const [drawingBufferWidth] = this.getDrawingBufferSize(); const clientWidth = this._canvasSizeInfo.clientWidth || this.htmlCanvas?.clientWidth; return clientWidth ? drawingBufferWidth / clientWidth : 1; } catch { return 1; } } /** * Maps CSS pixel position to device pixel position */ cssToDevicePixels(cssPixel, yInvert = true) { const ratio = this.cssToDeviceRatio(); const [width, height] = this.getDrawingBufferSize(); return scalePixels(cssPixel, ratio, width, height, yInvert); } /** * Use devicePixelRatio to set canvas width and height * @note this is a raw port of luma.gl v8 code. Might be worth a review */ setDevicePixelRatio(devicePixelRatio, options = {}) { if (!this.htmlCanvas) { return; } let clientWidth = "width" in options ? options.width : this.htmlCanvas.clientWidth; let clientHeight = "height" in options ? options.height : this.htmlCanvas.clientHeight; if (!clientWidth || !clientHeight) { log.log(1, "Canvas clientWidth/clientHeight is 0")(); devicePixelRatio = 1; clientWidth = this.htmlCanvas.width || 1; clientHeight = this.htmlCanvas.height || 1; } const cachedSize = this._canvasSizeInfo; if (cachedSize.clientWidth !== clientWidth || cachedSize.clientHeight !== clientHeight || cachedSize.devicePixelRatio !== devicePixelRatio) { let clampedPixelRatio = devicePixelRatio; const canvasWidth = Math.floor(clientWidth * clampedPixelRatio); const canvasHeight = Math.floor(clientHeight * clampedPixelRatio); this.htmlCanvas.width = canvasWidth; this.htmlCanvas.height = canvasHeight; const gl = this.device.gl; if (gl) { const [drawingBufferWidth, drawingBufferHeight] = this.getDrawingBufferSize(); if (drawingBufferWidth !== canvasWidth || drawingBufferHeight !== canvasHeight) { clampedPixelRatio = Math.min( drawingBufferWidth / clientWidth, drawingBufferHeight / clientHeight ); this.htmlCanvas.width = Math.floor(clientWidth * clampedPixelRatio); this.htmlCanvas.height = Math.floor(clientHeight * clampedPixelRatio); log.warn("Device pixel ratio clamped")(); } this._canvasSizeInfo.clientWidth = clientWidth; this._canvasSizeInfo.clientHeight = clientHeight; this._canvasSizeInfo.devicePixelRatio = devicePixelRatio; } } } // PRIVATE /** @todo Major hack done to port the CSS methods above, base canvas context should not depend on WebGL */ getDrawingBufferSize() { const gl = this.device.gl; if (!gl) { throw new Error("canvas size"); } return [gl.drawingBufferWidth, gl.drawingBufferHeight]; } /** * Allows subclass constructor to override the canvas id for auto created canvases. * This can really help when debugging DOM in apps that create multiple devices */ _setAutoCreatedCanvasId(id) { if (this.htmlCanvas?.id === "lumagl-auto-created-canvas") { this.htmlCanvas.id = id; } } }; var CanvasContext = _CanvasContext; __publicField(CanvasContext, "defaultProps", { canvas: null, width: 800, // width are height are only used by headless gl height: 600, useDevicePixels: true, autoResize: true, container: null, visible: true, alphaMode: "opaque", colorSpace: "srgb" }); function getContainer(container) { if (typeof container === "string") { const element = document.getElementById(container); if (!element) { throw new Error(`${container} is not an HTML element`); } return element; } else if (container) { return container; } return document.body; } function getCanvasFromDOM(canvasId) { const canvas = document.getElementById(canvasId); if (!(canvas instanceof HTMLCanvasElement)) { throw new Error("Object is not a canvas element"); } return canvas; } function createCanvas(props) { const { width, height } = props; const targetCanvas = document.createElement("canvas"); targetCanvas.id = uid("lumagl-auto-created-canvas"); targetCanvas.width = width || 1; targetCanvas.height = height || 1; targetCanvas.style.width = Number.isFinite(width) ? `${width}px` : "100%"; targetCanvas.style.height = Number.isFinite(height) ? `${height}px` : "100%"; return targetCanvas; } function scalePixels(pixel, ratio, width, height, yInvert) { const point = pixel; const x = scaleX(point[0], ratio, width); let y = scaleY(point[1], ratio, height, yInvert); let t = scaleX(point[0] + 1, ratio, width); const xHigh = t === width - 1 ? t : t - 1; t = scaleY(point[1] + 1, ratio, height, yInvert); let yHigh; if (yInvert) { t = t === 0 ? t : t + 1; yHigh = y; y = t; } else { yHigh = t === height - 1 ? t : t - 1; } return { x, y, // when ratio < 1, current css pixel and next css pixel may point to same device pixel, set width/height to 1 in those cases. width: Math.max(xHigh - x + 1, 1), height: Math.max(yHigh - y + 1, 1) }; } function scaleX(x, ratio, width) { const r = Math.min(Math.round(x * ratio), width - 1); return r; } function scaleY(y, ratio, height, yInvert) { return yInvert ? Math.max(0, height - 1 - Math.round(y * ratio)) : Math.min(Math.round(y * ratio), height - 1); } // src/adapter/resources/texture.ts var _Texture = class extends Resource { get [Symbol.toStringTag]() { return "Texture"; } toString() { return `Texture(${this.id},${this.format},${this.width}x${this.height})`; } /** dimension of this texture */ dimension; /** format of this texture */ format; /** width in pixels of this texture */ width; /** height in pixels of this texture */ height; /** depth of this texture */ depth; /** mip levels in this texture */ mipLevels; /** "Time" of last update. Monotonically increasing timestamp. TODO move to AsyncTexture? */ updateTimestamp; /** Do not use directly. Create with device.createTexture() */ constructor(device, props) { props = _Texture.normalizeProps(device, props); super(device, props, _Texture.defaultProps); this.dimension = this.props.dimension; this.format = this.props.format; this.width = this.props.width; this.height = this.props.height; this.depth = this.props.depth; if (this.props.width === void 0 || this.props.height === void 0) { const size = _Texture.getTextureDataSize(this.props.data); this.width = size?.width || 1; this.height = size?.height || 1; } if (this.props.mipmaps && this.props.mipLevels === void 0) { this.props.mipLevels = "pyramid"; } this.mipLevels = this.props.mipLevels === "pyramid" ? _Texture.getMipLevelCount(this.width, this.height) : this.props.mipLevels || 1; this.updateTimestamp = device.incrementTimestamp(); } /** * Create a new texture with the same parameters and optionally, a different size * @note Textures are immutable and cannot be resized after creation, but we can create a similar texture with the same parameters but a new size. * @note Does not copy contents of the texture */ clone(size) { return this.device.createTexture({ ...this.props, ...size }); } /** Check if data is an external image */ static isExternalImage(data) { return typeof ImageData !== "undefined" && data instanceof ImageData || typeof ImageBitmap !== "undefined" && data instanceof ImageBitmap || typeof HTMLImageElement !== "undefined" && data instanceof HTMLImageElement || typeof HTMLVideoElement !== "undefined" && data instanceof HTMLVideoElement || typeof VideoFrame !== "undefined" && data instanceof VideoFrame || typeof HTMLCanvasElement !== "undefined" && data instanceof HTMLCanvasElement || typeof OffscreenCanvas !== "undefined" && data instanceof OffscreenCanvas; } /** Determine size (width and height) of provided image data */ static getExternalImageSize(data) { if (typeof ImageData !== "undefined" && data instanceof ImageData || typeof ImageBitmap !== "undefined" && data instanceof ImageBitmap || typeof HTMLCanvasElement !== "undefined" && data instanceof HTMLCanvasElement || typeof OffscreenCanvas !== "undefined" && data instanceof OffscreenCanvas) { return { width: data.width, height: data.height }; } if (typeof HTMLImageElement !== "undefined" && data instanceof HTMLImageElement) { return { width: data.naturalWidth, height: data.naturalHeight }; } if (typeof HTMLVideoElement !== "undefined" && data instanceof HTMLVideoElement) { return { width: data.videoWidth, height: data.videoHeight }; } if (typeof VideoFrame !== "undefined" && data instanceof VideoFrame) { return { width: data.displayWidth, height: data.displayHeight }; } throw new Error("Unknown image type"); } /** Check if texture data is a typed array */ static isTextureLevelData(data) { const typedArray = data?.data; return ArrayBuffer.isView(typedArray); } /** Get the size of the texture described by the provided TextureData */ static getTextureDataSize(data) { if (!data) { return null; } if (ArrayBuffer.isView(data)) { return null; } if (Array.isArray(data)) { return _Texture.getTextureDataSize(data[0]); } if (_Texture.isExternalImage(data)) { return _Texture.getExternalImageSize(data); } if (data && typeof data === "object" && data.constructor === Object) { const textureDataArray = Object.values(data); const untypedData = textureDataArray[0]; return { width: untypedData.width, height: untypedData.height }; } throw new Error("texture size deduction failed"); } /** * Normalize TextureData to an array of TextureLevelData / ExternalImages * @param data * @param options * @returns array of TextureLevelData / ExternalImages */ static normalizeTextureData(data, options) { let lodArray; if (ArrayBuffer.isView(data)) { lodArray = [ { // ts-expect-error does data really need to be Uint8ClampedArray? data, width: options.width, height: options.height // depth: options.depth } ]; } else if (!Array.isArray(data)) { lodArray = [data]; } else { lodArray = data; } return lodArray; } /** Calculate the number of mip levels for a texture of width and height */ static getMipLevelCount(width, height) { return Math.floor(Math.log2(Math.max(width, height))) + 1; } /** Convert luma.gl cubemap face constants to depth index */ static getCubeFaceDepth(face) { switch (face) { case "+X": return 0; case "-X": return 1; case "+Y": return 2; case "-Y": return 3; case "+Z": return 4; case "-Z": return 5; default: throw new Error(face); } } /** Ensure we have integer coordinates */ static normalizeProps(device, props) { const newProps = { ...props }; const overriddenDefaultProps = device?.props?._resourceDefaults?.texture || {}; Object.assign(newProps, overriddenDefaultProps); const { width, height } = newProps; if (typeof width === "number") { newProps.width = Math.max(1, Math.ceil(width)); } if (typeof height === "number") { newProps.height = Math.max(1, Math.ceil(height)); } return newProps; } }; var Texture = _Texture; __publicField(Texture, "COPY_SRC", 1); __publicField(Texture, "COPY_DST", 2); __publicField(Texture, "TEXTURE", 4); __publicField(Texture, "STORAGE", 8); __publicField(Texture, "RENDER_ATTACHMENT", 16); __publicField(Texture, "CubeFaces", ["+X", "-X", "+Y", "-Y", "+Z", "-Z"]); __publicField(Texture, "defaultProps", { ...Resource.defaultProps, data: null, dimension: "2d", format: "rgba8unorm", width: void 0, height: void 0, depth: 1, mipmaps: false, compressed: false, usage: 0, mipLevels: void 0, samples: void 0, sampler: {}, view: void 0, flipY: void 0 }); /** Default options */ __publicField(Texture, "defaultCopyExternalImageOptions", { image: void 0, sourceX: 0, sourceY: 0, width: void 0, height: void 0, depth: 1, mipLevel: 0, x: 0, y: 0, z: 0, aspect: "all", colorSpace: "srgb", premultipliedAlpha: false, flipY: false }); // src/adapter/resources/texture-view.ts var _TextureView = class extends Resource { get [Symbol.toStringTag]() { return "TextureView"; } /** Should not be constructed directly. Use `texture.createView(props)` */ constructor(device, props) { super(device, props, _TextureView.defaultProps); } }; var TextureView = _TextureView; __publicField(TextureView, "defaultProps", { ...Resource.defaultProps, format: void 0, dimension: void 0, aspect: "all", baseMipLevel: 0, mipLevelCount: void 0, baseArrayLayer: 0, arrayLayerCount: void 0 }); // src/adapter/resources/external-texture.ts var _ExternalTexture = class extends Resource { get [Symbol.toStringTag]() { return "ExternalTexture"; } constructor(device, props) { super(device, props, _ExternalTexture.defaultProps); } }; var ExternalTexture = _ExternalTexture; __publicField(ExternalTexture, "defaultProps", { ...Resource.defaultProps, source: void 0, colorSpace: "srgb" }); // src/adapter-utils/format-compiler-log.ts function formatCompilerLog(shaderLog, source, options) { let formattedLog = ""; const lines = source.split(/\r?\n/); const log2 = shaderLog.slice().sort((a, b) => a.lineNum - b.lineNum); switch (options?.showSourceCode || "no") { case "all": let currentMessage = 0; for (let lineNum = 1; lineNum <= lines.length; lineNum++) { formattedLog += getNumberedLine(lines[lineNum - 1], lineNum, options); while (log2.length > currentMessage && log2[currentMessage].lineNum === lineNum) { const message = log2[currentMessage++]; formattedLog += formatCompilerMessage(message, lines, message.lineNum, { ...options, inlineSource: false }); } } return formattedLog; case "issues": case "no": for (const message of shaderLog) { formattedLog += formatCompilerMessage(message, lines, message.lineNum, { inlineSource: options?.showSourceCode !== "no" }); } return formattedLog; } } function formatCompilerMessage(message, lines, lineNum, options) { if (options?.inlineSource) { const numberedLines = getNumberedLines(lines, lineNum); const positionIndicator = message.linePos > 0 ? `${" ".repeat(message.linePos + 5)}^^^ ` : ""; return ` ${numberedLines}${positionIndicator}${message.type.toUpperCase()}: ${message.message} `; } const color = message.type === "error" ? "red" : "#8B4000"; return options?.html ? `
${translatedSource}
`;
}
const button = document.createElement("Button");
button.innerHTML = `
${htmlLog}
`;
button.style.top = "10px";
button.style.left = "10px";
button.style.position = "absolute";
button.style.zIndex = "9999";
button.style.width = "100%";
button.style.textAlign = "left";
document.body.appendChild(button);
const errors = document.getElementsByClassName("luma-compiler-log-error");
errors[0]?.scrollIntoView();
button.onclick = () => {
const dataURI = `data:text/plain,${encodeURIComponent(this.source)}`;
navigator.clipboard.writeText(dataURI);
};
}
};
var Shader = _Shader;
__publicField(Shader, "defaultProps", {
...Resource.defaultProps,
language: "auto",
stage: void 0,
source: "",
sourceMap: null,
entryPoint: "main",
debugShaders: void 0
});
function getShaderIdFromProps(props) {
return getShaderName(props.source) || props.id || uid(`unnamed ${props.stage}-shader`);
}
function getShaderName(shader, defaultName = "unnamed") {
const SHADER_NAME_REGEXP = /#define[\s*]SHADER_NAME[\s*]([A-Za-z0-9_-]+)[\s*]/;
const match = SHADER_NAME_REGEXP.exec(shader);
return match ? match[1] : defaultName;
}
// src/adapter/resources/sampler.ts
var _Sampler = class extends Resource {
get [Symbol.toStringTag]() {
return "Sampler";
}
constructor(device, props) {
props = _Sampler.normalizeProps(device, props);
super(device, props, _Sampler.defaultProps);
}
static normalizeProps(device, props) {
const overriddenDefaultProps = device?.props?._resourceDefaults?.sampler || {};
const newProps = { ...props, ...overriddenDefaultProps };
return newProps;
}
};
var Sampler = _Sampler;
__publicField(Sampler, "defaultProps", {
...Resource.defaultProps,
type: "color-sampler",
addressModeU: "clamp-to-edge",
addressModeV: "clamp-to-edge",
addressModeW: "clamp-to-edge",
magFilter: "nearest",
minFilter: "nearest",
mipmapFilter: "none",
lodMinClamp: 0,
lodMaxClamp: 32,
// Per WebGPU spec
compare: "less-equal",
maxAnisotropy: 1
});
// src/adapter/resources/framebuffer.ts
var _Framebuffer = class extends Resource {
get [Symbol.toStringTag]() {
return "Framebuffer";
}
/** Width of all attachments in this framebuffer */
width;
/** Height of all attachments in this framebuffer */
height;
constructor(device, props = {}) {
super(device, props, _Framebuffer.defaultProps);
this.width = this.props.width;
this.height = this.props.height;
}
/**
* Create a copy of this framebuffer with new attached textures, with same props but of the specified size.
* @note Does not copy contents of the attached textures.
*/
clone(size) {
const colorAttachments = this.colorAttachments.map(
(colorAttachment) => colorAttachment.texture.clone(size)
);
const depthStencilAttachment = this.depthStencilAttachment && this.depthStencilAttachment.texture.clone(size);
return this.device.createFramebuffer({ ...this.props, colorAttachments, depthStencilAttachment });
}
resize(size) {
let updateSize = !size;
if (size) {
const [width, height] = Array.isArray(size) ? size : [size.width, size.height];
updateSize = updateSize || height !== this.height || width !== this.width;
this.width = width;
this.height = height;
}
if (updateSize) {
log.log(2, `Resizing framebuffer ${this.id} to ${this.width}x${this.height}`)();
this.resizeAttachments(this.width, this.height);
}
}
/** Auto creates any textures */
autoCreateAttachmentTextures() {
if (this.props.colorAttachments.length === 0 && !this.props.depthStencilAttachment) {
throw new Error("Framebuffer has noattachments");
}
this.colorAttachments = this.props.colorAttachments.map((attachment2, index) => {
if (typeof attachment2 === "string") {
const texture = this.createColorTexture(attachment2, index);
this.attachResource(texture);
return texture.view;
}
if (attachment2 instanceof Texture) {
return attachment2.view;
}
return attachment2;
});
const attachment = this.props.depthStencilAttachment;
if (attachment) {
if (typeof attachment === "string") {
const texture = this.createDepthStencilTexture(attachment);
this.attachResource(texture);
this.depthStencilAttachment = texture.view;
} else if (attachment instanceof Texture) {
this.depthStencilAttachment = attachment.view;
} else {
this.depthStencilAttachment = attachment;
}
}
}
/** Create a color texture */
createColorTexture(format, index) {
return this.device.createTexture({
id: `${this.id}-color-attachment-${index}`,
usage: Texture.RENDER_ATTACHMENT,
format,
width: this.width,
height: this.height,
// TODO deprecated? - luma.gl v8 compatibility
sampler: {
magFilter: "linear",
minFilter: "linear"
}
});
}
/** Create depth stencil texture */
createDepthStencilTexture(format) {
return this.device.createTexture({
id: `${this.id}-depth-stencil-attachment`,
usage: Texture.RENDER_ATTACHMENT,
format,
width: this.width,
height: this.height,
mipmaps: false
});
}
/**
* Default implementation of resize
* Creates new textures with correct size for all attachments.
* and destroys existing textures if owned
*/
resizeAttachments(width, height) {
for (let i = 0; i < this.colorAttachments.length; ++i) {
if (this.colorAttachments[i]) {
const resizedTexture = this.colorAttachments[i].texture.clone({
width,
height
});
this.destroyAttachedResource(this.colorAttachments[i]);
this.colorAttachments[i] = resizedTexture.view;
this.attachResource(resizedTexture.view);
}
}
if (this.depthStencilAttachment) {
const resizedTexture = this.depthStencilAttachment.texture.clone({
width,
height
});
this.destroyAttachedResource(this.depthStencilAttachment);
this.depthStencilAttachment = resizedTexture.view;
this.attachResource(resizedTexture);
}
this.updateAttachments();
}
};
var Framebuffer = _Framebuffer;
__publicField(Framebuffer, "defaultProps", {
...Resource.defaultProps,
width: 1,
height: 1,
colorAttachments: [],
// ['rgba8unorm'],
depthStencilAttachment: null
// 'depth24plus-stencil8'
});
// src/adapter/resources/render-pipeline.ts
var _RenderPipeline = class extends Resource {
get [Symbol.toStringTag]() {
return "RenderPipeline";
}
/** The merged layout */
shaderLayout;
/** Buffer map describing buffer interleaving etc */
bufferLayout;
/** The linking status of the pipeline. 'pending' if linking is asynchronous, and on production */
linkStatus = "pending";
/** The hash of the pipeline */
hash = "";
constructor(device, props) {
super(device, props, _RenderPipeline.defaultProps);
this.shaderLayout = this.props.shaderLayout;
this.bufferLayout = this.props.bufferLayout || [];
}
// DEPRECATED METHODS
/**
* Uniforms
* @deprecated Use uniforms buffers
* @note textures, samplers and uniform buffers should be set via `setBindings()`, these are not considered uniforms.
* @note In WebGL uniforms have a performance penalty, they are reset before each call to enable pipeline sharing.
*/
setUniformsWebGL(uniforms) {
throw new Error("Use uniform blocks");
}
};
var RenderPipeline = _RenderPipeline;
__publicField(RenderPipeline, "defaultProps", {
...Resource.defaultProps,
vs: null,
vertexEntryPoint: "vertexMain",
vsConstants: {},
fs: null,
fragmentEntryPoint: "fragmentMain",
fsConstants: {},
shaderLayout: null,
bufferLayout: [],
topology: "triangle-list",
parameters: {},
// isInstanced: false,
// instanceCount: 0,
// vertexCount: 0,
bindings: {},
uniforms: {}
});
// src/adapter/resources/render-pass.ts
var _RenderPass = class extends Resource {
get [Symbol.toStringTag]() {
return "RenderPass";
}
constructor(device, props) {
props = _RenderPass.normalizeProps(device, props);
super(device, props, _RenderPass.defaultProps);
}
static normalizeProps(device, props) {
const overriddenDefaultProps = device.props._resourceDefaults?.renderPass;
const newProps = { ...overriddenDefaultProps, ...props };
return newProps;
}
};
var RenderPass = _RenderPass;
/** TODO - should be [0, 0, 0, 0], update once deck.gl tests run clean */
__publicField(RenderPass, "defaultClearColor", [0, 0, 0, 1]);
/** Depth 1.0 represents the far plance */
__publicField(RenderPass, "defaultClearDepth", 1);
/** Clears all stencil bits */
__publicField(RenderPass, "defaultClearStencil", 0);
/** Default properties for RenderPass */
__publicField(RenderPass, "defaultProps", {
...Resource.defaultProps,
framebuffer: null,
parameters: void 0,
clearColor: _RenderPass.defaultClearColor,
clearColors: void 0,
clearDepth: _RenderPass.defaultClearDepth,
clearStencil: _RenderPass.defaultClearStencil,
depthReadOnly: false,
stencilReadOnly: false,
discard: false,
occlusionQuerySet: void 0,
timestampQuerySet: void 0,
beginTimestampIndex: void 0,
endTimestampIndex: void 0
});
// src/adapter/resources/compute-pipeline.ts
var _ComputePipeline = class extends Resource {
get [Symbol.toStringTag]() {
return "ComputePipeline";
}
hash = "";
/** The merged shader layout */
shaderLayout;
constructor(device, props) {
super(device, props, _ComputePipeline.defaultProps);
this.shaderLayout = props.shaderLayout;
}
};
var ComputePipeline = _ComputePipeline;
__publicField(ComputePipeline, "defaultProps", {
...Resource.defaultProps,
shader: void 0,
entryPoint: void 0,
constants: {},
shaderLayout: void 0
});
// src/adapter/resources/compute-pass.ts
var _ComputePass = class extends Resource {
get [Symbol.toStringTag]() {
return "ComputePass";
}
constructor(device, props) {
super(device, props, _ComputePass.defaultProps);
}
};
var ComputePass = _ComputePass;
__publicField(ComputePass, "defaultProps", {
...Resource.defaultProps,
timestampQuerySet: void 0,
beginTimestampIndex: void 0,
endTimestampIndex: void 0
});
// src/adapter/resources/command-encoder.ts
var _CommandEncoder = class extends Resource {
get [Symbol.toStringTag]() {
return "CommandEncoder";
}
constructor(device, props) {
super(device, props, _CommandEncoder.defaultProps);
}
// TODO - luma.gl has these on the device, should we align with WebGPU API?
// beginRenderPass(GPURenderPassDescriptor descriptor): GPURenderPassEncoder;
// beginComputePass(optional GPUComputePassDescriptor descriptor = {}): GPUComputePassEncoder;
};
var CommandEncoder = _CommandEncoder;
__publicField(CommandEncoder, "defaultProps", {
...Resource.defaultProps,
measureExecutionTime: void 0
});
// src/adapter/resources/command-buffer.ts
var _CommandBuffer = class extends Resource {
get [Symbol.toStringTag]() {
return "CommandBuffer";
}
constructor(device, props) {
super(device, props, _CommandBuffer.defaultProps);
}
};
var CommandBuffer = _CommandBuffer;
__publicField(CommandBuffer, "defaultProps", {
...Resource.defaultProps
});
// src/gpu-type-utils/decode-attribute-type.ts
function decodeShaderAttributeType(attributeType) {
const [dataType, components] = TYPE_INFO[attributeType];
const integer = dataType === "i32" || dataType === "u32";
const signed = dataType !== "u32";
const byteLength = TYPE_SIZES2[dataType] * components;
const defaultVertexFormat = getCompatibleVertexFormat(dataType, components);
return {
dataType,
components,
defaultVertexFormat,
byteLength,
integer,
signed
};
}
function getCompatibleVertexFormat(dataType, components) {
let vertexType;
switch (dataType) {
case "f32":
vertexType = "float32";
break;
case "i32":
vertexType = "sint32";
break;
case "u32":
vertexType = "uint32";
break;
case "f16":
return components <= 2 ? "float16x2" : "float16x4";
}
if (components === 1) {
return vertexType;
}
return `${vertexType}x${components}`;
}
var TYPE_INFO = {
f32: ["f32", 1],
"vec2