diff --git a/app_headless/index.js b/app_headless/index.js
index fea114de..89509fac 100644
--- a/app_headless/index.js
+++ b/app_headless/index.js
@@ -42,8 +42,7 @@ async function main()
state.userCamera.fitViewToScene(state.gltf, state.sceneIndex);
state.userCamera.updatePosition();
- view.animate(state);
- view.renderFrame(state);
+ view.renderFrame(state, width, height);
let pixels = new Uint8Array(width * height * 4);
gl.readPixels(0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
diff --git a/app_web/index.html b/app_web/index.html
index c1ae5126..91a977aa 100644
--- a/app_web/index.html
+++ b/app_web/index.html
@@ -118,123 +118,136 @@
-
+
Models
- Models
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {{ item.title }}
-
-
+
+
Models
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {{ item.title }}
+
+
+
+
-
+
Display
- Display
-
-
- Image Based
-
- Punctual Lighting
-
-
-
-
-
-
-
-
-
-
-
-
-
+
Animation
- Animation
-
-
-
+
+
+
Animation
+
+
+
+
-
+
+
+
XMP
- XMP
-
+
+
+
XMP
@@ -245,44 +258,53 @@
XMP
Capture
-->
-
+
+
+
Advanced Controls
- Advanced Controls
-
-
-
- {{ item.title }}
-
-
-
-
-
- -3
- -2
- -1
- 0
- +1
- +2
- +3
-
-
-
-
- {{ item.title }}
-
-
-
- Skinning
-
- Morphing
-
+
+
+
Advanced Controls
+
+
+
+ {{ item.title }}
+
+
+
+
+
+ -3
+ -2
+ -1
+ 0
+ +1
+ +2
+ +3
+
+
+
+
+ {{ item.title }}
+
+
+
+ Skinning
+
+ Morphing
+
+ Clearcoat
+ Sheen
+ Transmission
+
+
+
diff --git a/app_web/src/logic/uimodel.js b/app_web/src/logic/uimodel.js
index 3943082b..d5b13a23 100644
--- a/app_web/src/logic/uimodel.js
+++ b/app_web/src/logic/uimodel.js
@@ -1,7 +1,6 @@
-import { bindCallback, fromEvent, merge } from 'rxjs';
+import { fromEvent, merge } from 'rxjs';
import { map, filter, startWith, pluck } from 'rxjs/operators';
import { glTF, ToneMaps, DebugOutput } from 'gltf-sample-viewer';
-import { gltfInput } from '../input.js';
import { getIsGltf, getIsGlb, getIsHdr } from 'gltf-sample-viewer';
@@ -50,6 +49,9 @@ class UIModel
this.exposurecompensation = app.exposureChanged$.pipe(pluck("event", "msg"));
this.skinningEnabled = app.skinningChanged$.pipe(pluck("event", "msg"));
this.morphingEnabled = app.morphingChanged$.pipe(pluck("event", "msg"));
+ this.clearcoatEnabled = app.clearcoatChanged$.pipe(pluck("event", "msg"));
+ this.sheenEnabled = app.sheenChanged$.pipe(pluck("event", "msg"));
+ this.transmissionEnabled = app.transmissionChanged$.pipe(pluck("event", "msg"));
this.iblEnabled = app.iblChanged$.pipe(pluck("event", "msg"));
this.punctualLightsEnabled = app.punctualLightsChanged$.pipe(pluck("event", "msg"));
this.environmentEnabled = app.environmentVisibilityChanged$.pipe(pluck("event", "msg"));
@@ -79,6 +81,32 @@ class UIModel
this.hdr = inputObservables.hdrDropped;
this.variant = app.variantChanged$.pipe(pluck("event", "msg"));
+
+ this.model.subscribe(() => {
+ // remove last filename
+ if(this.app.models[this.app.models.length -1] === this.lastDroppedFilename)
+ {
+ this.app.models.pop();
+ this.lastDroppedFilename = undefined;
+ }
+ });
+
+ const dropedFileName = inputObservables.gltfDropped.pipe(
+ map( (data) => {
+ return data.mainFile.name;
+ })
+ );
+ dropedFileName.subscribe( (filename) => {
+ if(filename !== undefined)
+ {
+ filename = filename.split('/').pop();
+ filename = filename.substr(0, filename.lastIndexOf('.'));
+
+ this.app.models.push(filename);
+ this.app.selectedModel = filename;
+ this.lastDroppedFilename = filename;
+ }
+ });
}
static getInputObservables(inputDomElement)
diff --git a/app_web/src/main.js b/app_web/src/main.js
index 0ec3cb77..534ebb54 100644
--- a/app_web/src/main.js
+++ b/app_web/src/main.js
@@ -13,7 +13,7 @@ async function main()
const canvas = document.getElementById("canvas");
const context = canvas.getContext("webgl2", { alpha: false, antialias: true });
const ui = document.getElementById("app");
- const view = new GltfView(context, ui);
+ const view = new GltfView(context);
const state = view.createState();
initDracoLib();
@@ -29,7 +29,7 @@ async function main()
// whenever a new model is selected, load it and when complete pass the loaded gltf
// into a stream back into the UI
- const subject = new Subject();
+ const gltfLoadedSubject = new Subject();
const gltfLoadedMulticast = uiModel.model.pipe(
mergeMap( (model) =>
{
@@ -41,8 +41,8 @@ async function main()
const scene = state.gltf.scenes[state.sceneIndex];
scene.applyTransformHierarchy(state.gltf);
computePrimitiveCentroids(state.gltf);
+ state.userCamera.aspectRatio = canvas.width / canvas.height;
state.userCamera.fitViewToScene(state.gltf, state.sceneIndex);
- state.userCamera.updatePosition();
state.animationIndices = [0];
state.animationTimer.start();
return state;
@@ -50,18 +50,20 @@ async function main()
);
}),
// transform gltf loaded observable to multicast observable to avoid multiple execution with multiple subscriptions
- multicast(subject)
+ multicast(gltfLoadedSubject)
);
-
+ const sceneChangedSubject = new Subject();
const sceneChangedObservable = uiModel.scene.pipe(map( newSceneIndex => {
state.sceneIndex = newSceneIndex;
+ state.cameraIndex = undefined;
const scene = state.gltf.scenes[state.sceneIndex];
scene.applyTransformHierarchy(state.gltf);
computePrimitiveCentroids(state.gltf);
state.userCamera.fitViewToScene(state.gltf, state.sceneIndex);
- state.userCamera.updatePosition();
- }));
+ }),
+ multicast(sceneChangedSubject)
+ );
const statisticsUpdateObservableTemp = merge(
gltfLoadedMulticast,
@@ -103,6 +105,16 @@ async function main()
state.renderingParameters.morphing = morphingEnabled;
});
+ uiModel.clearcoatEnabled.subscribe( clearcoatEnabled => {
+ state.renderingParameters.clearcoat = clearcoatEnabled;
+ });
+ uiModel.sheenEnabled.subscribe( sheenEnabled => {
+ state.renderingParameters.sheen = sheenEnabled;
+ });
+ uiModel.transmissionEnabled.subscribe( transmissionEnabled => {
+ state.renderingParameters.transmission = transmissionEnabled;
+ });
+
uiModel.iblEnabled.subscribe( iblEnabled => {
state.renderingParameters.useIBL = iblEnabled;
});
@@ -166,21 +178,38 @@ async function main()
input.setupCanvasInputBindings(canvas);
input.onRotate = (deltaX, deltaY) =>
{
- state.userCamera.rotate(deltaX, deltaY);
- state.userCamera.updatePosition();
+ if (state.cameraIndex === undefined)
+ {
+ state.userCamera.orbit(deltaX, deltaY);
+ }
};
input.onPan = (deltaX, deltaY) =>
{
- state.userCamera.pan(deltaX, deltaY);
- state.userCamera.updatePosition();
+ if (state.cameraIndex === undefined)
+ {
+ state.userCamera.pan(deltaX, deltaY);
+ }
};
input.onZoom = (delta) =>
{
- state.userCamera.zoomIn(delta);
- state.userCamera.updatePosition();
+ if (state.cameraIndex === undefined)
+ {
+ state.userCamera.zoomBy(delta);
+ }
+ };
+
+ // configure the animation loop
+ const update = () =>
+ {
+ canvas.width = window.innerWidth - ui.getBoundingClientRect().width;
+ canvas.height = canvas.clientHeight;
+
+ view.renderFrame(state, canvas.width, canvas.height);
+ window.requestAnimationFrame(update);
};
- await view.startRendering(state, canvas);
+ // After this start executing animation loop.
+ window.requestAnimationFrame(update);
}
export { main };
diff --git a/app_web/src/ui/ui.js b/app_web/src/ui/ui.js
index 293cecf5..626bc6ca 100644
--- a/app_web/src/ui/ui.js
+++ b/app_web/src/ui/ui.js
@@ -50,7 +50,7 @@ const app = new Vue({
'environmentChanged$', 'debugchannelChanged$', 'tonemapChanged$', 'skinningChanged$',
'environmentVisibilityChanged$', 'punctualLightsChanged$', 'iblChanged$', 'morphingChanged$',
'addEnvironment$', 'colorChanged$', 'environmentRotationChanged$', 'animationPlayChanged$',
- 'variantChanged$', 'exposureChanged$'],
+ 'variantChanged$', 'exposureChanged$', "clearcoatChanged$", "sheenChanged$", "transmissionChanged$"],
data() {
return {
fullheight: true,
@@ -84,6 +84,9 @@ const app = new Vue({
toneMap: "None",
skinning: true,
morphing: true,
+ clearcoatEnabled: true,
+ sheenEnabled: true,
+ transmissionEnabled: true,
};
},
mounted: function()
diff --git a/app_web/ui.css b/app_web/ui.css
index 0c36415f..260347d2 100644
--- a/app_web/ui.css
+++ b/app_web/ui.css
@@ -103,6 +103,13 @@ label.switch
white-space: nowrap;
}
+.tabItemScrollable
+{
+ overflow-y: auto;
+ overflow-x: hidden;
+ height: 100%;
+}
+
/**********************************/
/**********************************/
/* Styles for individual elements */
@@ -128,6 +135,15 @@ canvas
flex-wrap: unset;
}
+.b-tabs .tab-content
+{
+ padding-right: 0px !important;
+}
+
+.tabContent
+{
+ margin-right: 10px;
+}
.tab-item h2
{
@@ -282,8 +298,4 @@ div .field.has-addons
.b-slider-tick-label
{
margin-top: 8px;
- margin-left: -5px;
- line-height: 0.5;
- position: relative !important;
- text-align: center;
}
diff --git a/source/GltfState/gltf_state.js b/source/GltfState/gltf_state.js
index 597a154f..6d03ccff 100644
--- a/source/GltfState/gltf_state.js
+++ b/source/GltfState/gltf_state.js
@@ -11,6 +11,9 @@ class GltfState
this.renderingParameters = {
morphing: true,
skinning: true,
+ clearcoat: true,
+ sheen: true,
+ transmission: true,
clearColor: [58, 64, 74],
exposure: 1.0,
usePunctual: true,
diff --git a/source/GltfView/gltf_view.js b/source/GltfView/gltf_view.js
index 5c1c4989..ea5629a8 100644
--- a/source/GltfView/gltf_view.js
+++ b/source/GltfView/gltf_view.js
@@ -4,9 +4,8 @@ import { GL } from '../Renderer/webgl.js';
class GltfView
{
- constructor(context, ui)
+ constructor(context)
{
- this.ui = ui;
this.context = context;
this.renderer = new gltfRenderer(this.context);
}
@@ -16,26 +15,11 @@ class GltfView
return new GltfState();
}
- updateCanvas(canvas)
- {
- if(this.ui !== undefined)
- {
- canvas.width = window.innerWidth - this.ui.getBoundingClientRect().width;
- }
- else
- {
- canvas.width = canvas.clientWidth;
- }
- canvas.height = canvas.clientHeight;
- }
-
- updateViewport(width, height)
+ renderFrame(state, width, height)
{
+ this.animate(state);
+
this.renderer.resize(width, height);
- }
-
- renderFrame(state)
- {
this.renderer.clearFrame(state.renderingParameters.clearColor);
@@ -50,6 +34,7 @@ class GltfView
this.renderer.drawScene(state, scene);
}
+
animate(state)
{
if(state.gltf === undefined)
@@ -92,7 +77,11 @@ class GltfView
const transparentMaterials = activeMaterials.filter(material => material.alphaMode === "BLEND");
const faceCount = activePrimitives
.map(primitive => {
- const verticesCount = state.gltf.accessors[primitive.indices].count;
+ let verticesCount = 0;
+ if(primitive.indices !== undefined)
+ {
+ verticesCount = state.gltf.accessors[primitive.indices].count;
+ }
if (verticesCount === 0)
{
return 0;
@@ -125,21 +114,6 @@ class GltfView
transparentMaterialsCount: transparentMaterials.length
};
}
-
- async startRendering(state, canvas)
- {
- const update = () =>
- {
- this.animate(state);
- this.updateCanvas(canvas);
- this.updateViewport(canvas.width, canvas.height);
- this.renderFrame(state);
- window.requestAnimationFrame(update);
- };
-
- // After this start executing render loop.
- window.requestAnimationFrame(update);
- }
}
export { GltfView };
diff --git a/source/Renderer/renderer.js b/source/Renderer/renderer.js
index ea12f583..82ff8dba 100644
--- a/source/Renderer/renderer.js
+++ b/source/Renderer/renderer.js
@@ -270,7 +270,7 @@ class gltfRenderer
this.pushVertParameterDefines(vertDefines, state.renderingParameters, state.gltf, node, primitive);
vertDefines = primitive.getDefines().concat(vertDefines);
- let fragDefines = material.getDefines().concat(vertDefines);
+ let fragDefines = material.getDefines(state.renderingParameters).concat(vertDefines);
this.pushFragParameterDefines(fragDefines, state);
const fragmentHash = this.shaderCache.selectShader(material.getShaderIdentifier(), fragDefines);
@@ -359,7 +359,7 @@ class gltfRenderer
for (let [uniform, val] of material.getProperties().entries())
{
- this.shader.updateUniform(uniform, val);
+ this.shader.updateUniform(uniform, val, false);
}
for (let i = 0; i < material.textures.length; ++i)
@@ -387,7 +387,7 @@ class gltfRenderer
this.webGl.setTexture(this.shader.getUniformLocation("u_SheenELUT"), state.gltf, state.environment.sheenELUT, textureCount++);
}
- if(transmissionSampleTexture !== undefined && state.renderingParameters.useIBL && state.environment)
+ if(transmissionSampleTexture !== undefined && state.renderingParameters.useIBL && state.environment && state.renderingParameters.transmission)
{
this.webGl.context.activeTexture(GL.TEXTURE0 + textureCount);
this.webGl.context.bindTexture(this.webGl.context.TEXTURE_2D, this.opaqueRenderTexture);
diff --git a/source/Renderer/shaders/pbr.frag b/source/Renderer/shaders/pbr.frag
index c6b35884..8ca020fd 100644
--- a/source/Renderer/shaders/pbr.frag
+++ b/source/Renderer/shaders/pbr.frag
@@ -141,6 +141,19 @@ NormalInfo getNormalInfo(vec3 v)
return info;
}
+vec3 getClearcoatNormal(NormalInfo normalInfo)
+{
+ #ifdef HAS_CLEARCOAT_NORMAL_MAP
+ vec3 n = texture(u_ClearcoatNormalSampler, getClearcoatNormalUV()).rgb * 2.0 - vec3(1.0);
+ n *= vec3(u_ClearcoatNormalScale, u_ClearcoatNormalScale, 1.0);
+ n = mat3(normalInfo.t, normalInfo.b, normalInfo.ng) * normalize(n);
+ return n;
+ #else
+ return normalInfo.ng;
+ #endif
+}
+
+
vec4 getBaseColor()
{
vec4 baseColor = vec4(1.0, 1.0, 1.0, 1.0);
@@ -248,12 +261,9 @@ MaterialInfo getClearCoatInfo(MaterialInfo info, NormalInfo normalInfo, float f0
info.clearcoatRoughness *= clearcoatSampleRoughness.g;
#endif
- #ifdef HAS_CLEARCOAT_NORMAL_MAP
- vec4 clearcoatSampleNormal = texture(u_ClearcoatNormalSampler, getClearcoatNormalUV());
- info.clearcoatNormal = normalize(clearcoatSampleNormal.xyz);
- #else
- info.clearcoatNormal = normalInfo.ng;
- #endif
+
+ info.clearcoatNormal = getClearcoatNormal(normalInfo);
+
info.clearcoatRoughness = clamp(info.clearcoatRoughness, 0.0, 1.0);
diff --git a/source/Renderer/shaders/textures.glsl b/source/Renderer/shaders/textures.glsl
index 43be573e..117529b4 100644
--- a/source/Renderer/shaders/textures.glsl
+++ b/source/Renderer/shaders/textures.glsl
@@ -57,6 +57,7 @@ uniform mat3 u_ClearcoatRoughnessUVTransform;
uniform sampler2D u_ClearcoatNormalSampler;
uniform int u_ClearcoatNormalUVSet;
uniform mat3 u_ClearcoatNormalUVTransform;
+uniform float u_ClearcoatNormalScale;
// Sheen Material
uniform sampler2D u_SheenColorSampler;
diff --git a/source/gltf/camera.js b/source/gltf/camera.js
index cdada441..fd274ba8 100644
--- a/source/gltf/camera.js
+++ b/source/gltf/camera.js
@@ -7,9 +7,9 @@ class gltfCamera extends GltfObject
constructor(
type = "perspective",
znear = 0.01,
- zfar = 10000.0,
+ zfar = Infinity,
yfov = 45.0 * Math.PI / 180.0,
- aspectRatio = 16.0 / 9.0,
+ aspectRatio = undefined,
xmag = 1.0,
ymag = 1.0,
name = undefined,
@@ -115,12 +115,12 @@ class gltfCamera extends GltfObject
{
const view = mat4.create();
const position = this.getPosition(gltf);
- const target = this.getLookAtTarget(gltf);
+ const target = this.getTarget(gltf);
mat4.lookAt(view, position, target, vec3.fromValues(0, 1, 0));
return view;
}
- getLookAtTarget(gltf)
+ getTarget(gltf)
{
const target = vec3.create();
const position = this.getPosition(gltf);
@@ -171,6 +171,60 @@ class gltfCamera extends GltfObject
{
return gltf.nodes[this.node];
}
+
+ // Returns a JSON object describing the user camera's current values.
+ getDescription(gltf)
+ {
+ const camera = {
+ "type": this.type
+ };
+
+ if (this.name != undefined)
+ {
+ camera["name"] = this.name;
+ }
+
+ if (this.type === "perspective")
+ {
+ if (this.aspectRatio != undefined)
+ {
+ camera["perspective"]["aspectRatio"] = this.aspectRatio;
+ }
+ camera["perspective"]["yfov"] = this.yfov;
+ if (this.zfar != Infinity)
+ {
+ camera["perspective"]["zfar"] = this.zfar;
+ }
+ camera["perspective"]["ynear"] = this.ynear;
+ }
+ else if (this.type === "orthographic")
+ {
+ camera["orthographic"]["xmag"] = this.xmag;
+ camera["orthographic"]["ymag"] = this.ymag;
+ camera["orthographic"]["zfar"] = this.zfar;
+ camera["orthographic"]["ynear"] = this.ynear;
+ }
+
+ const mat = this.getViewMatrix(gltf);
+
+ const node = {
+ "camera": 0,
+ "matrix": [mat[0], mat[1], mat[2], mat[3],
+ mat[4], mat[5], mat[6], mat[7],
+ mat[8], mat[9], mat[10], mat[11],
+ mat[12], mat[13], mat[14], mat[15]]
+ };
+
+ if (this.nodeIndex != undefined && gltf.nodes[this.nodeIndex].name != undefined)
+ {
+ node["name"] = gltf.nodes[this.nodeIndex].name;
+ }
+
+ return {
+ "node": node,
+ "camera": camera
+ }
+ }
}
export { gltfCamera };
diff --git a/source/gltf/material.js b/source/gltf/material.js
index 0f876dbf..4f7d8988 100644
--- a/source/gltf/material.js
+++ b/source/gltf/material.js
@@ -18,6 +18,11 @@ class gltfMaterial extends GltfObject
this.alphaCutoff = 0.5;
this.doubleSided = false;
+ // pbr next extension toggles
+ this.hasClearcoat = false;
+ this.hasSheen = false;
+ this.hasTransmission = false;
+
// non gltf properties
this.type = "unlit";
this.textures = [];
@@ -52,9 +57,24 @@ class gltfMaterial extends GltfObject
}
}
- getDefines()
+ getDefines(renderingParameters)
{
- return this.defines;
+ const defines = Array.from(this.defines);
+
+ if (this.hasClearcoat && renderingParameters.clearcoat)
+ {
+ defines.push("MATERIAL_CLEARCOAT 1");
+ }
+ if (this.hasSheen && renderingParameters.sheen)
+ {
+ defines.push("MATERIAL_SHEEN 1");
+ }
+ if (this.hasTransmission && renderingParameters.transmission)
+ {
+ defines.push("MATERIAL_TRANSMISSION 1");
+ }
+
+ return defines;
}
getProperties()
@@ -265,7 +285,7 @@ class gltfMaterial extends GltfObject
let clearcoatFactor = 0.0;
let clearcoatRoughnessFactor = 0.0;
- this.defines.push("MATERIAL_CLEARCOAT 1");
+ this.hasClearcoat = true;
if(this.extensions.KHR_materials_clearcoat.clearcoatFactor !== undefined)
{
@@ -299,6 +319,8 @@ class gltfMaterial extends GltfObject
this.textures.push(this.clearcoatNormalTexture);
this.defines.push("HAS_CLEARCOAT_NORMAL_MAP 1");
this.properties.set("u_ClearcoatNormalUVSet", this.clearcoatNormalTexture.texCoord);
+ this.properties.set("u_ClearcoatNormalScale", this.clearcoatNormalTexture.scale);
+
}
this.properties.set("u_ClearcoatFactor", clearcoatFactor);
this.properties.set("u_ClearcoatRoughnessFactor", clearcoatRoughnessFactor);
@@ -311,7 +333,7 @@ class gltfMaterial extends GltfObject
let sheenRoughnessFactor = 0.0;
let sheenColorFactor = vec3.fromValues(1.0, 1.0, 1.0);
- this.defines.push("MATERIAL_SHEEN 1");
+ this.hasSheen = true;
if(this.extensions.KHR_materials_sheen.sheenRoughnessFactor !== undefined)
{
@@ -347,7 +369,7 @@ class gltfMaterial extends GltfObject
{
let transmissionFactor = 0.0;
- this.defines.push("MATERIAL_TRANSMISSION 1");
+ this.hasTransmission = true;
if (transmissionFactor !== undefined)
{
diff --git a/source/gltf/user_camera.js b/source/gltf/user_camera.js
index 32578119..3bcf317e 100644
--- a/source/gltf/user_camera.js
+++ b/source/gltf/user_camera.js
@@ -10,23 +10,19 @@ const MaxNearFarRatio = 10000;
class UserCamera extends gltfCamera
{
constructor(
- position = [0, 0, 0],
- target = [0, 0,0],
- up = [0, 1, 0],
- xRot = 0,
- yRot = 0,
- zoom = 1)
+ target = [0, 0, 0],
+ yaw = 0,
+ pitch = 0,
+ distance = 1)
{
super();
- this.position = jsToGl(position);
this.target = jsToGl(target);
- this.up = jsToGl(up);
- this.xRot = xRot;
- this.yRot = yRot;
- this.zoom = zoom;
+ this.yaw = yaw;
+ this.pitch = pitch;
+ this.distance = distance;
this.zoomFactor = 1.04;
- this.rotateSpeed = 1 / 180;
+ this.orbitSpeed = 1 / 180;
this.panSpeed = 1;
this.sceneExtents = {
min: vec3.create(),
@@ -34,57 +30,92 @@ class UserCamera extends gltfCamera
};
}
- updatePosition()
+ setVerticalFoV(yfov)
+ {
+ this.yfov = yfov;
+ }
+
+ getPosition()
{
// calculate direction from focus to camera (assuming camera is at positive z)
- // yRot rotates *around* x-axis, xRot rotates *around* y-axis
- const direction = vec3.fromValues(0, 0, 1);
- this.toLocalRotation(direction);
+ // pitch rotates *around* x-axis, yaw rotates *around* y-axis
+ const direction = vec3.fromValues(0, 0, this.distance);
+ this.toGlobalOrientation(direction);
const position = vec3.create();
- vec3.scale(position, direction, this.zoom);
- vec3.add(position, position, this.target);
+ vec3.add(position, this.target, direction);
+ return position;
+ }
+
+ getTarget()
+ {
+ return this.target;
+ }
- this.position = position;
+ lookAt(from, to)
+ {
+ // up is implicitly (0, 1, 0)
+ this.target = to;
- this.fitCameraPlanesToExtents(this.sceneExtents.min, this.sceneExtents.max);
+ const difference = vec3.create();
+ vec3.subtract(difference, from, to);
+ const projectedDifference = vec3.fromValues(from[0] - to[0], 0, from[2] - to[2]);
+
+ this.pitch = vec3.angle(difference, projectedDifference);
+ this.yaw = vec3.angle(projectedDifference, vec3.fromValues(1.0, 0.0, 0.0));
+ this.distance = vec3.length(difference);
}
- reset(gltf, sceneIndex)
+ setPosition(position)
{
- this.xRot = 0;
- this.yRot = 0;
- this.fitViewToScene(gltf, sceneIndex, true);
+ this.lookAt(position, this.target);
}
- zoomIn(value)
+ setTarget(target)
+ {
+ this.target = target;
+ }
+
+ setRotation(yaw, pitch)
+ {
+ this.yaw = yaw;
+ this.pitch = pitch;
+ }
+
+ setZoom(distance)
+ {
+ this.distance = distance;
+ }
+
+ zoomBy(value)
{
if (value > 0)
{
- this.zoom *= this.zoomFactor;
+ this.distance *= this.zoomFactor;
}
else
{
- this.zoom /= this.zoomFactor;
+ this.distance /= this.zoomFactor;
}
+ this.fitCameraPlanesToExtents(this.sceneExtents.min, this.sceneExtents.max);
}
- rotate(x, y)
+ orbit(x, y)
{
const yMax = Math.PI / 2 - 0.01;
- this.xRot += (x * this.rotateSpeed);
- this.yRot += (y * this.rotateSpeed);
- this.yRot = clamp(this.yRot, -yMax, yMax);
+ this.yaw += (x * this.orbitSpeed);
+ this.pitch += (y * this.orbitSpeed);
+ this.pitch = clamp(this.pitch, -yMax, yMax);
}
pan(x, y)
{
const left = vec3.fromValues(-1, 0, 0);
- this.toLocalRotation(left);
+ this.toGlobalOrientation(left);
vec3.scale(left, left, x * this.panSpeed);
const up = vec3.fromValues(0, 1, 0);
- this.toLocalRotation(up);
+ this.toGlobalOrientation(up);
vec3.scale(up, up, y * this.panSpeed);
vec3.add(this.target, this.target, up);
@@ -97,35 +128,47 @@ class UserCamera extends gltfCamera
this.panSpeed = longestDistance / PanSpeedDenominator;
}
+ reset()
+ {
+ this.yaw = 0;
+ this.pitch = 0;
+ fitDistanceToExtents(this.sceneExtents.min, this.sceneExtents.max);
+ fitCameraTargetToExtents(this.sceneExtents.min, this.sceneExtents.max);
+ }
+
fitViewToScene(gltf, sceneIndex)
{
getSceneExtents(gltf, sceneIndex, this.sceneExtents.min, this.sceneExtents.max);
this.fitCameraTargetToExtents(this.sceneExtents.min, this.sceneExtents.max);
- this.fitZoomToExtents(this.sceneExtents.min, this.sceneExtents.max);
+ this.fitDistanceToExtents(this.sceneExtents.min, this.sceneExtents.max);
+
+ const direction = vec3.fromValues(0, 0, this.distance);
+ vec3.add(this.getPosition(), this.target, direction);
+
this.fitPanSpeedToScene(this.sceneExtents.min, this.sceneExtents.max);
this.fitCameraPlanesToExtents(this.sceneExtents.min, this.sceneExtents.max);
- }
-
- toLocalRotation(vector)
- {
- vec3.rotateX(vector, vector, VecZero, -this.yRot);
- vec3.rotateY(vector, vector, VecZero, -this.xRot);
- }
- getLookAtTarget()
- {
- return this.target;
+ this.yaw = 0;
+ this.pitch = 0;
}
- getPosition()
+ // Converts orientation from camera space to global space
+ toGlobalOrientation(vector)
{
- return this.position;
+ vec3.rotateX(vector, vector, VecZero, -this.pitch);
+ vec3.rotateY(vector, vector, VecZero, -this.yaw);
}
- fitZoomToExtents(min, max)
+ fitDistanceToExtents(min, max)
{
const maxAxisLength = Math.max(max[0] - min[0], max[1] - min[1]);
- this.zoom = this.getFittingZoom(maxAxisLength);
+ const yfov = this.yfov;
+ const xfov = this.yfov * this.aspectRatio;
+
+ const yZoom = maxAxisLength / 2 / Math.tan(yfov / 2);
+ const xZoom = maxAxisLength / 2 / Math.tan(xfov / 2);
+
+ this.distance = Math.max(xZoom, yZoom);
}
fitCameraTargetToExtents(min, max)
@@ -138,10 +181,12 @@ class UserCamera extends gltfCamera
fitCameraPlanesToExtents(min, max)
{
+ // depends only on scene min/max and the camera distance
+
// Manually increase scene extent just for the camera planes to avoid camera clipping in most situations.
const longestDistance = 10 * vec3.distance(min, max);
- let zNear = this.zoom - (longestDistance * 0.6);
- let zFar = this.zoom + (longestDistance * 0.6);
+ let zNear = this.distance - (longestDistance * 0.6);
+ let zFar = this.distance + (longestDistance * 0.6);
// minimum near plane value needs to depend on far plane value to avoid z fighting or too large near planes
zNear = Math.max(zNear, zFar / MaxNearFarRatio);
@@ -149,17 +194,6 @@ class UserCamera extends gltfCamera
this.znear = zNear;
this.zfar = zFar;
}
-
- getFittingZoom(axisLength)
- {
- const yfov = this.yfov;
- const xfov = this.yfov * this.aspectRatio;
-
- const yZoom = axisLength / 2 / Math.tan(yfov / 2);
- const xZoom = axisLength / 2 / Math.tan(xfov / 2);
-
- return Math.max(xZoom, yZoom);
- }
}
export { UserCamera };