diff --git a/examples/src/examples/graphics/clustered-area-lights.tsx b/examples/src/examples/graphics/clustered-area-lights.tsx index a6f89580e60..7514988a6e2 100644 --- a/examples/src/examples/graphics/clustered-area-lights.tsx +++ b/examples/src/examples/graphics/clustered-area-lights.tsx @@ -6,6 +6,7 @@ import { Observer } from '@playcanvas/observer'; class AreaLightsExample { static CATEGORY = 'Graphics'; static NAME = 'Clustered Area Lights'; + static WEBGPU_ENABLED = true; controls(data: Observer) { return <> diff --git a/examples/src/examples/graphics/ground-fog.tsx b/examples/src/examples/graphics/ground-fog.tsx index a11942b8bdf..2843fcf5b87 100644 --- a/examples/src/examples/graphics/ground-fog.tsx +++ b/examples/src/examples/graphics/ground-fog.tsx @@ -7,6 +7,7 @@ import { Observer } from '@playcanvas/observer'; class GroundFogExample { static CATEGORY = 'Graphics'; static NAME = 'Ground Fog'; + static WEBGPU_ENABLED = true; static FILES = { 'shader.vert': /* glsl */ ` @@ -113,7 +114,10 @@ class GroundFogExample { const gfxOptions = { deviceTypes: [deviceType], glslangUrl: '/static/lib/glslang/glslang.js', - twgslUrl: '/static/lib/twgsl/twgsl.js' + twgslUrl: '/static/lib/twgsl/twgsl.js', + + // WebGPU does not currently support antialiased depth resolve, disable it till we implement a shader resolve solution + antialias: false }; pc.createGraphicsDevice(canvas, gfxOptions).then((device: pc.GraphicsDevice) => { diff --git a/examples/src/examples/graphics/post-effects.tsx b/examples/src/examples/graphics/post-effects.tsx index c8336b9fa6d..1e9ccefa12e 100644 --- a/examples/src/examples/graphics/post-effects.tsx +++ b/examples/src/examples/graphics/post-effects.tsx @@ -7,6 +7,7 @@ import { Observer } from '@playcanvas/observer'; class PostEffectsExample { static CATEGORY = 'Graphics'; static NAME = 'Post Effects'; + static WEBGPU_ENABLED = true; controls(data: Observer) { return <> @@ -102,7 +103,10 @@ class PostEffectsExample { const gfxOptions = { deviceTypes: [deviceType], glslangUrl: '/static/lib/glslang/glslang.js', - twgslUrl: '/static/lib/twgsl/twgsl.js' + twgslUrl: '/static/lib/twgsl/twgsl.js', + + // WebGPU does not currently support antialiased depth resolve, disable it till we implement a shader resolve solution + antialias: false }; pc.createGraphicsDevice(canvas, gfxOptions).then((device: pc.GraphicsDevice) => { diff --git a/src/platform/graphics/graphics-device-create.js b/src/platform/graphics/graphics-device-create.js index 8f9d80edb8f..4a6f48ede8d 100644 --- a/src/platform/graphics/graphics-device-create.js +++ b/src/platform/graphics/graphics-device-create.js @@ -14,6 +14,8 @@ import { WebglGraphicsDevice } from './webgl/webgl-graphics-device.js'; * specified array does not contain [{@link DEVICETYPE_WEBGL2} or {@link DEVICETYPE_WEBGL1}], those * are internally added to its end in this order. Typically, you'd only specify * {@link DEVICETYPE_WEBGPU}, or leave it empty. + * @param {boolean} [options.antialias] - Boolean that indicates whether or not to perform + * anti-aliasing if possible. Defaults to true. * @param {string} [options.glslangUrl] - An url to glslang script, required if * {@link DEVICETYPE_WEBGPU} type is added to deviceTypes array. Not used for * {@link DEVICETYPE_WEBGL} device type creation. @@ -22,6 +24,9 @@ import { WebglGraphicsDevice } from './webgl/webgl-graphics-device.js'; */ function createGraphicsDevice(canvas, options = {}) { + // defaults + options.antialias ??= true; + const deviceTypes = options.deviceTypes ?? []; // automatically added fallbacks diff --git a/src/platform/graphics/graphics-device.js b/src/platform/graphics/graphics-device.js index ca061412481..9161373b4fd 100644 --- a/src/platform/graphics/graphics-device.js +++ b/src/platform/graphics/graphics-device.js @@ -28,6 +28,7 @@ class GraphicsDevice extends EventHandler { * The canvas DOM element that provides the underlying WebGL context used by the graphics device. * * @type {HTMLCanvasElement} + * @readonly */ canvas; @@ -35,6 +36,7 @@ class GraphicsDevice extends EventHandler { * True if the deviceType is WebGPU * * @type {boolean} + * @readonly */ isWebGPU = false; @@ -42,6 +44,7 @@ class GraphicsDevice extends EventHandler { * The scope namespace for shader attributes and variables. * * @type {ScopeSpace} + * @readonly */ scope; @@ -49,6 +52,7 @@ class GraphicsDevice extends EventHandler { * The maximum number of supported bones using uniform buffers. * * @type {number} + * @readonly */ boneLimit; @@ -56,6 +60,7 @@ class GraphicsDevice extends EventHandler { * The maximum supported texture anisotropy setting. * * @type {number} + * @readonly */ maxAnisotropy; @@ -63,6 +68,7 @@ class GraphicsDevice extends EventHandler { * The maximum supported dimension of a cube map. * * @type {number} + * @readonly */ maxCubeMapSize; @@ -70,6 +76,7 @@ class GraphicsDevice extends EventHandler { * The maximum supported dimension of a texture. * * @type {number} + * @readonly */ maxTextureSize; @@ -77,6 +84,7 @@ class GraphicsDevice extends EventHandler { * The maximum supported dimension of a 3D texture (any axis). * * @type {number} + * @readonly */ maxVolumeSize; @@ -85,9 +93,18 @@ class GraphicsDevice extends EventHandler { * 'lowp'. * * @type {string} + * @readonly */ precision; + /** + * The number of hardware anti-aliasing samples used by the frame buffer. + * + * @readonly + * @type {number} + */ + samples; + /** * Currently active render target. * @@ -96,6 +113,14 @@ class GraphicsDevice extends EventHandler { */ renderTarget = null; + /** + * Index of the currently active render pass. + * + * @type {number} + * @ignore + */ + renderPassIndex; + /** @type {boolean} */ insideRenderPass = false; @@ -103,6 +128,7 @@ class GraphicsDevice extends EventHandler { * True if hardware instancing is supported. * * @type {boolean} + * @readonly */ supportsInstancing; @@ -118,6 +144,7 @@ class GraphicsDevice extends EventHandler { * True if 32-bit floating-point textures can be used as a frame buffer. * * @type {boolean} + * @readonly */ textureFloatRenderable; @@ -125,6 +152,7 @@ class GraphicsDevice extends EventHandler { * True if 16-bit floating-point textures can be used as a frame buffer. * * @type {boolean} + * @readonly */ textureHalfFloatRenderable; @@ -539,6 +567,7 @@ class GraphicsDevice extends EventHandler { * @ignore */ frameStart() { + this.renderPassIndex = 0; } } diff --git a/src/platform/graphics/render-pass.js b/src/platform/graphics/render-pass.js index 7d1771ca34f..a8ada5ba406 100644 --- a/src/platform/graphics/render-pass.js +++ b/src/platform/graphics/render-pass.js @@ -1,4 +1,7 @@ +import { Debug } from '../../core/debug.js'; +import { Tracing } from '../../core/tracing.js'; import { Color } from '../../core/math/color.js'; +import { TRACEID_RENDER_PASS, TRACEID_RENDER_PASS_DETAIL } from '../../core/constants.js'; import { DebugGraphics } from '../graphics/debug-graphics.js'; class ColorAttachmentOps { @@ -227,6 +230,10 @@ class RenderPass { const realPass = this.renderTarget !== undefined; DebugGraphics.pushGpuMarker(device, `Pass:${this.name}`); + Debug.call(() => { + this.log(device, device.renderPassIndex); + }); + this.before?.(); if (realPass) { @@ -241,9 +248,58 @@ class RenderPass { this.after?.(); + device.renderPassIndex++; + DebugGraphics.popGpuMarker(device); } + + // #if _DEBUG + log(device, index) { + if (Tracing.get(TRACEID_RENDER_PASS) || Tracing.get(TRACEID_RENDER_PASS_DETAIL)) { + + let rt = this.renderTarget; + if (rt === null && device.isWebGPU) { + rt = device.frameBuffer; + } + const hasColor = rt?.colorBuffer ?? rt?.impl.assignedColorTexture; + const hasDepth = rt?.depth; + const hasStencil = rt?.stencil; + const rtInfo = rt === undefined ? '' : ` RT: ${(rt ? rt.name : 'NULL')} ` + + `${hasColor ? '[Color]' : ''}` + + `${hasDepth ? '[Depth]' : ''}` + + `${hasStencil ? '[Stencil]' : ''}` + + `${(this.samples > 0 ? ' samples: ' + this.samples : '')}`; + + Debug.trace(TRACEID_RENDER_PASS, + `${index.toString().padEnd(2, ' ')}: ${this.name.padEnd(20, ' ')}` + + rtInfo.padEnd(30)); + + if (this.colorOps && hasColor) { + Debug.trace(TRACEID_RENDER_PASS_DETAIL, ` colorOps: ` + + `${this.colorOps.clear ? 'clear' : 'load'}->` + + `${this.colorOps.store ? 'store' : 'discard'} ` + + `${this.colorOps.resolve ? 'resolve ' : ''}` + + `${this.colorOps.mipmaps ? 'mipmaps ' : ''}`); + } + + if (this.depthStencilOps) { + + if (hasDepth) { + Debug.trace(TRACEID_RENDER_PASS_DETAIL, ` depthOps: ` + + `${this.depthStencilOps.clearDepth ? 'clear' : 'load'}->` + + `${this.depthStencilOps.storeDepth ? 'store' : 'discard'}`); + } + + if (hasStencil) { + Debug.trace(TRACEID_RENDER_PASS_DETAIL, ` stencOps: ` + + `${this.depthStencilOps.clearStencil ? 'clear' : 'load'}->` + + `${this.depthStencilOps.storeStencil ? 'store' : 'discard'}`); + } + } + } + } + // #endif } export { RenderPass, ColorAttachmentOps, DepthStencilAttachmentOps }; diff --git a/src/platform/graphics/webgpu/webgpu-graphics-device.js b/src/platform/graphics/webgpu/webgpu-graphics-device.js index e5ad050fd93..0fb65a7dcd7 100644 --- a/src/platform/graphics/webgpu/webgpu-graphics-device.js +++ b/src/platform/graphics/webgpu/webgpu-graphics-device.js @@ -68,6 +68,9 @@ class WebgpuGraphicsDevice extends GraphicsDevice { this.isWebGPU = true; this._deviceType = DEVICETYPE_WEBGPU; + // WebGPU currently only supports 1 and 4 samples + this.samples = options.antialias ? 4 : 1; + this.initDeviceCaps(); } @@ -198,7 +201,7 @@ class WebgpuGraphicsDevice extends GraphicsDevice { name: 'WebgpuFramebuffer', graphicsDevice: this, depth: true, - samples: 4 + samples: this.samples }); } @@ -216,6 +219,8 @@ class WebgpuGraphicsDevice extends GraphicsDevice { frameStart() { + super.frameStart(); + WebgpuDebug.memory(this); WebgpuDebug.validate(this); @@ -587,7 +592,7 @@ class WebgpuGraphicsDevice extends GraphicsDevice { // cannot copy depth from multisampled buffer. On WebGPU, it cannot be resolve at the end of the pass either, // and so we need to implement a custom depth resolve shader based copy // This is currently needed for uSceneDepthMap when the camera renders to multisampled render target - Debug.assert(source.samples <= 1, `copyRenderTarget does not currently support copy of depth from multisampled texture`, sourceRT); + Debug.assert(source.samples <= 1, `copyRenderTarget does not currently support copy of depth from multisampled texture ${sourceRT.name}`, sourceRT); /** @type {GPUImageCopyTexture} */ const copySrc = { diff --git a/src/platform/graphics/webgpu/webgpu-render-target.js b/src/platform/graphics/webgpu/webgpu-render-target.js index fd0a14e9076..0f19a0e2a3b 100644 --- a/src/platform/graphics/webgpu/webgpu-render-target.js +++ b/src/platform/graphics/webgpu/webgpu-render-target.js @@ -120,6 +120,7 @@ class WebgpuRenderTarget { this.assignedColorTexture = gpuTexture; const view = gpuTexture.createView(); + DebugHelper.setLabel(view, 'Framebuffer.assignedColor'); // use it as render buffer or resolve target const colorAttachment = this.renderPassDescriptor.colorAttachments[0]; @@ -145,6 +146,8 @@ class WebgpuRenderTarget { init(device, renderTarget) { Debug.assert(!this.initialized); + Debug.assert(this.renderPassDescriptor, 'The render target has been destroyed and cannot be used anymore.', { renderTarget }); + const wgpu = device.wgpu; WebgpuDebug.memory(device); @@ -234,8 +237,11 @@ class WebgpuRenderTarget { // allocate multi-sampled color buffer this.multisampledColorBuffer = wgpu.createTexture(multisampledTextureDesc); + DebugHelper.setLabel(this.multisampledColorBuffer, `${renderTarget.name}.multisampledColor`); colorAttachment.view = this.multisampledColorBuffer.createView(); + DebugHelper.setLabel(colorAttachment.view, `${renderTarget.name}.multisampledColorView`); + colorAttachment.resolveTarget = colorView; } else { diff --git a/src/scene/frame-graph.js b/src/scene/frame-graph.js index df1c6a2f1ca..758d40bd1b5 100644 --- a/src/scene/frame-graph.js +++ b/src/scene/frame-graph.js @@ -1,7 +1,3 @@ -import { TRACEID_RENDER_PASS, TRACEID_RENDER_PASS_DETAIL } from '../core/constants.js'; -import { Debug } from '../core/debug.js'; -import { Tracing } from '../core/tracing.js'; - /** * A frame graph represents a single rendering frame as a sequence of render passes. * @@ -121,58 +117,6 @@ class FrameGraph { for (let i = 0; i < renderPasses.length; i++) { renderPasses[i].render(); } - - this.log(device); - } - - log(device) { - // #if _DEBUG - if (Tracing.get(TRACEID_RENDER_PASS) || Tracing.get(TRACEID_RENDER_PASS_DETAIL)) { - - this.renderPasses.forEach((renderPass, index) => { - - let rt = renderPass.renderTarget; - if (rt === null && device.isWebGPU) { - rt = device.frameBuffer; - } - const hasColor = rt?.colorBuffer ?? rt?.impl.assignedColorTexture; - const hasDepth = rt?.depth; - const hasStencil = rt?.stencil; - const rtInfo = rt === undefined ? '' : ` RT: ${(rt ? rt.name : 'NULL')} ` + - `${hasColor ? '[Color]' : ''}` + - `${hasDepth ? '[Depth]' : ''}` + - `${hasStencil ? '[Stencil]' : ''}` + - `${(renderPass.samples > 0 ? ' samples: ' + renderPass.samples : '')}`; - - Debug.trace(TRACEID_RENDER_PASS, - `${index.toString().padEnd(2, ' ')}: ${renderPass.name.padEnd(20, ' ')}` + - rtInfo.padEnd(30)); - - if (renderPass.colorOps && hasColor) { - Debug.trace(TRACEID_RENDER_PASS_DETAIL, ` colorOps: ` + - `${renderPass.colorOps.clear ? 'clear' : 'load'}->` + - `${renderPass.colorOps.store ? 'store' : 'discard'} ` + - `${renderPass.colorOps.resolve ? 'resolve ' : ''}` + - `${renderPass.colorOps.mipmaps ? 'mipmaps ' : ''}`); - } - - if (renderPass.depthStencilOps) { - - if (hasDepth) { - Debug.trace(TRACEID_RENDER_PASS_DETAIL, ` depthOps: ` + - `${renderPass.depthStencilOps.clearDepth ? 'clear' : 'load'}->` + - `${renderPass.depthStencilOps.storeDepth ? 'store' : 'discard'}`); - } - - if (hasStencil) { - Debug.trace(TRACEID_RENDER_PASS_DETAIL, ` stencOps: ` + - `${renderPass.depthStencilOps.clearStencil ? 'clear' : 'load'}->` + - `${renderPass.depthStencilOps.storeStencil ? 'store' : 'discard'}`); - } - } - }); - } - // #endif } } diff --git a/src/scene/graphics/quad-render-utils.js b/src/scene/graphics/quad-render-utils.js index 8fe09cf7eeb..18ed820617f 100644 --- a/src/scene/graphics/quad-render-utils.js +++ b/src/scene/graphics/quad-render-utils.js @@ -58,14 +58,20 @@ function drawQuadWithShader(device, target, shader, rect, scissorRect) { const renderPass = new RenderPass(device, () => { quad.render(rect, scissorRect); }); - DebugHelper.setName(renderPass, `RenderPass-drawQuadWithShader${target ? `-${target.name}` : ''}`); + DebugHelper.setName(renderPass, `RenderPass-drawQuadWithShader${target ? `-${target.name}` : 'Framebuffer'}`); renderPass.init(target); renderPass.colorOps.clear = false; renderPass.depthStencilOps.clearDepth = false; - // TODO: this is temporary, till the webgpu supports setDepthTest - if (device.isWebGPU) { - renderPass.depthStencilOps.clearDepth = true; + // TODO: This is a workaround for the case where post-effects are used together with multi-sampled framebuffer. Last post-effect + // renders into multi-sampled framebuffer (render pass A), which is typically followed by further rendering to this framebuffer, + // in a separate render pass B (e.g. rendering UI). Those two render passes need to be merged into one, as they both render into + // the same framebuffer. The workaround here is to store multi-sampled color buffer, instead of only resolving it, which is wasted + // memory bandwidth. Without this we end up with a black result (or just UI), as multi-sampled color buffer is never written to. + if (device.isWebGPU && target === null) { + const samples = target?.samples ?? device.samples; + if (samples > 1) + renderPass.colorOps.store = true; } renderPass.render();