From f0341dbcbe996f28a55f1f6194ed0f6de0e91a77 Mon Sep 17 00:00:00 2001 From: Francis De Brabandere Date: Tue, 18 Jun 2024 16:11:24 +0200 Subject: [PATCH] feat: gltf exporting --- Cargo.toml | 1 + src/vpx/expanded.rs | 130 ++++++++++++++------ src/vpx/gltf.rs | 291 ++++++++++++++++++++++++++++++++++++++++++++ src/vpx/mod.rs | 1 + src/vpx/obj.rs | 89 +++++++------- 5 files changed, 432 insertions(+), 80 deletions(-) create mode 100644 src/vpx/gltf.rs diff --git a/Cargo.toml b/Cargo.toml index 4eb4ef5..3dd8f9d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -34,6 +34,7 @@ flate2 = "1.0.28" image = "0.25.1" weezl = "0.1.8" regex = "1.10.5" +gltf = "1.4.1" [dev-dependencies] dirs = "5.0.1" diff --git a/src/vpx/expanded.rs b/src/vpx/expanded.rs index 10ce407..f9b1658 100644 --- a/src/vpx/expanded.rs +++ b/src/vpx/expanded.rs @@ -1,5 +1,5 @@ use bytes::{Buf, BufMut, BytesMut}; -use std::collections::HashSet; +use std::collections::{HashMap, HashSet}; use std::error::Error; use std::ffi::OsStr; use std::fmt::{Display, Formatter}; @@ -28,10 +28,10 @@ use crate::vpx::custominfotags::CustomInfoTags; use crate::vpx::font::{FontData, FontDataJson}; use crate::vpx::gameitem::primitive::Primitive; use crate::vpx::gameitem::GameItemEnum; +use crate::vpx::gltf::{write_gltf, Output}; use crate::vpx::image::{ImageData, ImageDataBits, ImageDataJpeg, ImageDataJson}; use crate::vpx::jsonmodel::{collections_json, info_to_json, json_to_collections, json_to_info}; use crate::vpx::lzw::{from_lzw_blocks, to_lzw_blocks}; - use crate::vpx::material::{ Material, MaterialJson, SaveMaterial, SaveMaterialJson, SavePhysicsMaterial, SavePhysicsMaterialJson, @@ -101,8 +101,8 @@ pub fn write>(vpx: &VPX, expanded_dir: &P) -> Result<(), WriteErr let mut collections_json_file = File::create(collections_json_path)?; let json_collections = collections_json(&vpx.collections); serde_json::to_writer_pretty(&mut collections_json_file, &json_collections)?; - write_gameitems(vpx, expanded_dir)?; - write_images(vpx, expanded_dir)?; + let image_index = write_images(vpx, expanded_dir)?; + write_gameitems(vpx, expanded_dir, &image_index)?; write_sounds(vpx, expanded_dir)?; write_fonts(vpx, expanded_dir)?; write_game_data(vpx, expanded_dir)?; @@ -241,7 +241,11 @@ where }) } -fn write_images>(vpx: &VPX, expanded_dir: &P) -> Result<(), WriteError> { +fn write_images>( + vpx: &VPX, + expanded_dir: &P, +) -> Result, WriteError> { + let mut index = HashMap::new(); // create an image index let images_index_path = expanded_dir.as_ref().join("images.json"); let mut images_index_file = File::create(images_index_path)?; @@ -314,6 +318,7 @@ fn write_images>(vpx: &VPX, expanded_dir: &P) -> Result<(), Write std::fs::create_dir_all(&images_dir)?; images.iter().try_for_each(|(image_file_name, image)| { let file_path = images_dir.join(image_file_name); + index.insert(image.name.clone(), file_path.clone()); if !file_path.exists() { let mut file = File::create(&file_path)?; if image.is_link() { @@ -350,7 +355,7 @@ fn write_images>(vpx: &VPX, expanded_dir: &P) -> Result<(), Write )) } })?; - Ok(()) + Ok(index) } fn write_image_bmp( @@ -837,7 +842,11 @@ struct GameItemInfoJson { editor_layer_visibility: Option, } -fn write_gameitems>(vpx: &VPX, expanded_dir: &P) -> Result<(), WriteError> { +fn write_gameitems>( + vpx: &VPX, + expanded_dir: &P, + image_index: &HashMap, +) -> Result<(), WriteError> { let gameitems_dir = expanded_dir.as_ref().join("gameitems"); std::fs::create_dir_all(&gameitems_dir)?; let mut used_names_lowercase: HashSet = HashSet::new(); @@ -878,7 +887,7 @@ fn write_gameitems>(vpx: &VPX, expanded_dir: &P) -> Result<(), Wr } let gameitem_file = File::create(&gameitem_path)?; serde_json::to_writer_pretty(&gameitem_file, &gameitem)?; - write_gameitem_binaries(&gameitems_dir, gameitem, file_name)?; + write_gameitem_binaries(&gameitems_dir, gameitem, file_name, image_index)?; } // write the gameitems index as array with names being the type and the name let gameitems_index_path = expanded_dir.as_ref().join("gameitems.json"); @@ -920,6 +929,7 @@ fn write_gameitem_binaries( gameitems_dir: &Path, gameitem: &GameItemEnum, json_file_name: String, + image_index: &HashMap, ) -> Result<(), WriteError> { if let GameItemEnum::Primitive(primitive) = gameitem { // use wavefront-rs to write the vertices and indices @@ -927,12 +937,45 @@ fn write_gameitem_binaries( if let Some(vertices_data) = &primitive.compressed_vertices_data { if let Some(indices_data) = &primitive.compressed_indices_data { - let (vertices, indices) = read_mesh(primitive, vertices_data, indices_data)?; + let mesh = read_mesh(primitive, vertices_data, indices_data)?; let obj_path = gameitems_dir.join(format!("{}.obj", json_file_name)); - write_obj(gameitem.name().to_string(), &vertices, &indices, &obj_path).map_err( - |e| WriteError::Io(io::Error::new(io::ErrorKind::Other, format!("{}", e))), - )?; - + write_obj(gameitem.name().to_string(), &mesh, &obj_path).map_err(|e| { + WriteError::Io(io::Error::new(io::ErrorKind::Other, format!("{}", e))) + })?; + let gltf_path = gameitems_dir.join(format!("{}.gltf", json_file_name)); + // TODO only if the image is not empty? + let image_path = image_index.get(&primitive.image); + let image_rel_path = if let Some(p) = image_path { + PathBuf::from("..") + .join("images") + .join(p.file_name().unwrap()) + } else { + eprintln!( + "Image not found for primitive {}: {}", + primitive.name, primitive.image + ); + PathBuf::new() + }; + write_gltf( + gameitem.name().to_string(), + &mesh, + &gltf_path, + Output::Standard, + image_rel_path.to_str().unwrap(), + ) + .map_err(|e| { + WriteError::Io(io::Error::new(io::ErrorKind::Other, format!("{}", e))) + })?; + write_gltf( + gameitem.name().to_string(), + &mesh, + &gltf_path, + Output::Binary, + image_rel_path.to_str().unwrap(), + ) + .map_err(|e| { + WriteError::Io(io::Error::new(io::ErrorKind::Other, format!("{}", e))) + })?; if let Some(animation_frames) = &primitive.compressed_animation_vertices_data { if let Some(compressed_lengths) = &primitive.compressed_animation_vertices_len { // zip frames with the counts @@ -941,8 +984,7 @@ fn write_gameitem_binaries( gameitems_dir, gameitem, &json_file_name, - &vertices, - &indices, + &mesh, zipped, )?; } else { @@ -973,39 +1015,37 @@ fn write_animation_frames_to_objs( gameitems_dir: &Path, gameitem: &GameItemEnum, json_file_name: &str, - vertices: &[([u8; 32], Vertex3dNoTex2)], - indices: &[i64], + base_mesh: &ReadMesh, zipped: Zip>, Iter>, ) -> Result<(), WriteError> { for (i, (compressed_frame, compressed_length)) in zipped.enumerate() { let animation_frame_vertices = read_vpx_animation_frame(compressed_frame, compressed_length); - let full_vertices = replace_vertices(vertices, animation_frame_vertices)?; + let full_vertices = replace_vertices(&base_mesh.vertices, animation_frame_vertices)?; // The file name of the sequence must be _x.obj where x is the frame number. let file_name_without_ext = json_file_name.trim_end_matches(".json"); let file_name = animation_frame_file_name(file_name_without_ext, i); let obj_path = gameitems_dir.join(file_name); - write_obj( - gameitem.name().to_string(), - &full_vertices, - indices, - &obj_path, - ) - .map_err(|e| WriteError::Io(io::Error::new(io::ErrorKind::Other, format!("{}", e))))?; + let new_mesh = ReadMesh { + vertices: full_vertices, + indices: base_mesh.indices.clone(), + }; + write_obj(gameitem.name().to_string(), &new_mesh, &obj_path) + .map_err(|e| WriteError::Io(io::Error::new(io::ErrorKind::Other, format!("{}", e))))?; } Ok(()) } fn replace_vertices( - vertices: &[([u8; 32], Vertex3dNoTex2)], + vertices: &[ReadVertex], animation_frame_vertices: Result, WriteError>, -) -> Result, WriteError> { +) -> Result, WriteError> { // combine animation_vertices with the vertices and indices from the mesh let full_vertices = vertices .iter() .zip(animation_frame_vertices?.iter()) - .map(|((_, vertex), animation_vertex)| { - let mut full_vertex: Vertex3dNoTex2 = (*vertex).clone(); + .map(|(v, animation_vertex)| { + let mut full_vertex: Vertex3dNoTex2 = v.vertex.clone(); full_vertex.x = animation_vertex.x; full_vertex.y = animation_vertex.y; full_vertex.z = -animation_vertex.z; @@ -1013,7 +1053,10 @@ fn replace_vertices( full_vertex.ny = animation_vertex.ny; full_vertex.nz = -animation_vertex.nz; // TODO we don't have a full representation of the vertex, so we use a zeroed hash - ([0u8; 32], full_vertex) + ReadVertex { + raw: [0u8; 32], + vertex: full_vertex, + } }) .collect::>(); Ok(full_vertices) @@ -1044,7 +1087,17 @@ fn read_vpx_animation_frame( Ok(vertices) } -type ReadMesh = (Vec<([u8; 32], Vertex3dNoTex2)>, Vec); +pub(crate) struct ReadVertex { + /// In case we find a NaN in the data we provide the raw bytes + /// This is mainly because we want 100% compatibility with the original data + pub(crate) raw: [u8; BYTES_PER_VERTEX], + pub(crate) vertex: Vertex3dNoTex2, +} + +pub(crate) struct ReadMesh { + pub(crate) vertices: Vec, + pub(crate) indices: Vec, +} fn read_mesh( primitive: &Primitive, @@ -1076,14 +1129,14 @@ fn read_mesh( } else { 2 }; - let mut vertices: Vec<([u8; 32], Vertex3dNoTex2)> = Vec::with_capacity(num_vertices); + let mut vertices: Vec = Vec::with_capacity(num_vertices); let mut buff = BytesMut::from(raw_vertices.as_slice()); for _ in 0..num_vertices { let mut vertex = read_vertex(&mut buff); // invert the z axis for both position and normal - vertex.1.z = -vertex.1.z; - vertex.1.nz = -vertex.1.nz; + vertex.vertex.z = -vertex.vertex.z; + vertex.vertex.nz = -vertex.vertex.nz; vertices.push(vertex); } @@ -1099,7 +1152,7 @@ fn read_mesh( indices.push(v2); indices.push(v1); } - Ok((vertices, indices)) + Ok(ReadMesh { vertices, indices }) } /// Animation frame vertex data @@ -1145,7 +1198,7 @@ fn write_animation_vertex_data(buff: &mut BytesMut, vertex: &VertData) { buff.put_f32_le(vertex.nz); } -fn read_vertex(buffer: &mut BytesMut) -> ([u8; 32], Vertex3dNoTex2) { +fn read_vertex(buffer: &mut BytesMut) -> ReadVertex { let mut bytes = [0; 32]; buffer.copy_to_slice(&mut bytes); let mut vertex_buff = BytesMut::from(bytes.as_ref()); @@ -1170,7 +1223,10 @@ fn read_vertex(buffer: &mut BytesMut) -> ([u8; 32], Vertex3dNoTex2) { tu, tv, }; - (bytes, v3d) + ReadVertex { + raw: bytes, + vertex: v3d, + } } pub trait BytesMutExt { diff --git a/src/vpx/gltf.rs b/src/vpx/gltf.rs new file mode 100644 index 0000000..5b32df0 --- /dev/null +++ b/src/vpx/gltf.rs @@ -0,0 +1,291 @@ +use crate::vpx::expanded::ReadMesh; +use gltf::json; +use gltf::json::validation::Checked::Valid; +use gltf::json::validation::USize64; +use std::borrow::Cow; +use std::error::Error; +use std::fs::File; +use std::io::Write; +use std::mem; +use std::path::PathBuf; + +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +pub(crate) enum Output { + /// Output standard glTF. + Standard, + + /// Output binary glTF. + Binary, +} + +#[derive(Copy, Clone, Debug)] +#[repr(C)] +struct Vertex { + position: [f32; 3], + normal: [f32; 3], + uv: [f32; 2], +} + +/// Calculate bounding coordinates of a list of vertices, used for the clipping distance of the model +fn bounding_coords(points: &[Vertex]) -> ([f32; 3], [f32; 3]) { + let mut min = [f32::MAX, f32::MAX, f32::MAX]; + let mut max = [f32::MIN, f32::MIN, f32::MIN]; + + for point in points { + let p = point.position; + for i in 0..3 { + min[i] = f32::min(min[i], p[i]); + max[i] = f32::max(max[i], p[i]); + } + } + (min, max) +} + +fn align_to_multiple_of_four(n: &mut usize) { + *n = (*n + 3) & !3; +} + +fn to_padded_byte_vector(vec: Vec) -> Vec { + let byte_length = vec.len() * mem::size_of::(); + let byte_capacity = vec.capacity() * mem::size_of::(); + let alloc = vec.into_boxed_slice(); + let ptr = Box::<[T]>::into_raw(alloc) as *mut u8; + let mut new_vec = unsafe { Vec::from_raw_parts(ptr, byte_length, byte_capacity) }; + while new_vec.len() % 4 != 0 { + new_vec.push(0); // pad to multiple of four bytes + } + new_vec +} + +pub(crate) fn write_gltf( + name: String, + mesh: &ReadMesh, + gltf_file_path: &PathBuf, + output: Output, + image_rel_path: &str, +) -> Result<(), Box> { + let bin_path = gltf_file_path.with_extension("bin"); + + // use the indices to look up the vertices + let vertices = mesh + .indices + .iter() + .map(|i| { + let v = &mesh.vertices[*i as usize]; + Vertex { + position: [v.vertex.x, v.vertex.y, v.vertex.z], + normal: [v.vertex.nx, v.vertex.ny, v.vertex.nz], + uv: [v.vertex.tu, v.vertex.tv], + } + }) + .collect::>(); + + let (min, max) = bounding_coords(&vertices); + + let mut root = json::Root::default(); + + let buffer_length = vertices.len() * mem::size_of::(); + let buffer = root.push(json::Buffer { + byte_length: USize64::from(buffer_length), + extensions: Default::default(), + extras: Default::default(), + name: None, + uri: if output == Output::Standard { + let path: String = bin_path + .file_name() + .expect("Invalid file name") + .to_str() + .expect("Invalid file name") + .to_string(); + Some(path.into()) + } else { + None + }, + }); + let buffer_view = root.push(json::buffer::View { + buffer, + byte_length: USize64::from(buffer_length), + byte_offset: None, + byte_stride: Some(json::buffer::Stride(mem::size_of::())), + extensions: Default::default(), + extras: Default::default(), + name: None, + target: Some(Valid(json::buffer::Target::ArrayBuffer)), + }); + let positions = root.push(json::Accessor { + buffer_view: Some(buffer_view), + byte_offset: Some(USize64(0)), + count: USize64::from(vertices.len()), + component_type: Valid(json::accessor::GenericComponentType( + json::accessor::ComponentType::F32, + )), + extensions: Default::default(), + extras: Default::default(), + type_: Valid(json::accessor::Type::Vec3), + min: Some(json::Value::from(Vec::from(min))), + max: Some(json::Value::from(Vec::from(max))), + name: None, + normalized: false, + sparse: None, + }); + let normals = root.push(json::Accessor { + buffer_view: Some(buffer_view), + // we have to skip the first 3 floats to get to the normals + byte_offset: Some(USize64::from(3 * mem::size_of::())), + count: USize64::from(vertices.len()), + component_type: Valid(json::accessor::GenericComponentType( + json::accessor::ComponentType::F32, + )), + extensions: Default::default(), + extras: Default::default(), + type_: Valid(json::accessor::Type::Vec3), + min: None, + max: None, + name: None, + normalized: false, + sparse: None, + }); + + let tex_coords = root.push(json::Accessor { + buffer_view: Some(buffer_view), + // we have to skip the first 5 floats to get to the texture coordinates + byte_offset: Some(USize64::from(6 * mem::size_of::())), + count: USize64::from(vertices.len()), + component_type: Valid(json::accessor::GenericComponentType( + json::accessor::ComponentType::F32, + )), + extensions: Default::default(), + extras: Default::default(), + type_: Valid(json::accessor::Type::Vec2), + min: None, + max: None, + name: None, + normalized: false, + sparse: None, + }); + + let image = root.push(json::Image { + buffer_view: None, + uri: Some(image_rel_path.to_string()), + mime_type: None, + name: Some("gottlieb_flipper_red".to_string()), + extensions: None, + extras: Default::default(), + }); + + let sampler = root.push(json::texture::Sampler { + mag_filter: None, + min_filter: None, + wrap_s: Valid(json::texture::WrappingMode::Repeat), + wrap_t: Valid(json::texture::WrappingMode::Repeat), + extensions: Default::default(), + extras: Default::default(), + name: None, + }); + + let texture = root.push(json::Texture { + sampler: Some(sampler), + source: image, + extensions: Default::default(), + extras: Default::default(), + name: None, + }); + + let material = root.push(json::Material { + pbr_metallic_roughness: json::material::PbrMetallicRoughness { + base_color_texture: Some(json::texture::Info { + index: texture, + tex_coord: 0, + extensions: Default::default(), + extras: Default::default(), + }), + // base_color_factor: PbrBaseColorFactor([1.0, 1.0, 1.0, 1.0]), + // metallic_factor: StrengthFactor(1.0), + // roughness_factor: StrengthFactor(1.0), + // metallic_roughness_texture: None, + // extensions: Default::default(), + // extras: Default::default(), + ..Default::default() + }, + // normal_texture: None, + // occlusion_texture: None, + // emissive_texture: None, + // emissive_factor: EmissiveFactor([0.0, 0.0, 0.0]), + // alpha_mode: Valid(json::material::AlphaMode::Opaque), + // alpha_cutoff: Some(AlphaCutoff(0.5)), + // double_sided: false, + // extensions: Default::default(), + // extras: Default::default(), + name: Some("material1".to_string()), + ..Default::default() + }); + + let primitive = json::mesh::Primitive { + material: Some(material), + attributes: { + let mut map = std::collections::BTreeMap::new(); + map.insert(Valid(json::mesh::Semantic::Positions), positions); + //map.insert(Valid(json::mesh::Semantic::Colors(0)), colors); + map.insert(Valid(json::mesh::Semantic::Normals), normals); + map.insert(Valid(json::mesh::Semantic::TexCoords(0)), tex_coords); + map + }, + extensions: Default::default(), + extras: Default::default(), + indices: None, + mode: Valid(json::mesh::Mode::Triangles), + targets: None, + }; + + let mesh = root.push(json::Mesh { + extensions: Default::default(), + extras: Default::default(), + name: None, + primitives: vec![primitive], + weights: None, + }); + + let node = root.push(json::Node { + mesh: Some(mesh), + name: Some(name), + ..Default::default() + }); + + root.push(json::Scene { + extensions: Default::default(), + extras: Default::default(), + name: Some("table1".to_string()), + nodes: vec![node], + }); + + match output { + Output::Standard => { + let writer = File::create(gltf_file_path)?; + json::serialize::to_writer_pretty(writer, &root)?; + let bin = to_padded_byte_vector(vertices); + let mut writer = File::create(bin_path)?; + writer.write_all(&bin)?; + } + Output::Binary => { + let json_string = json::serialize::to_string(&root)?; + let mut json_offset = json_string.len(); + align_to_multiple_of_four(&mut json_offset); + let glb = gltf::binary::Glb { + header: gltf::binary::Header { + magic: *b"glTF", + version: 2, + // N.B., the size of binary glTF file is limited to range of `u32`. + length: (json_offset + buffer_length) + .try_into() + .expect("file size exceeds binary glTF limit"), + }, + bin: Some(Cow::Owned(to_padded_byte_vector(vertices))), + json: Cow::Owned(json_string.into_bytes()), + }; + let glb_path = gltf_file_path.with_extension("glb"); + let writer = std::fs::File::create(glb_path)?; + glb.to_writer(writer)?; + } + } + Ok(()) +} diff --git a/src/vpx/mod.rs b/src/vpx/mod.rs index 47b4671..97a6952 100644 --- a/src/vpx/mod.rs +++ b/src/vpx/mod.rs @@ -68,6 +68,7 @@ pub mod renderprobe; pub(crate) mod json; // we have to make this public for the integration tests +mod gltf; pub mod lzw; mod obj; pub(crate) mod wav; diff --git a/src/vpx/obj.rs b/src/vpx/obj.rs index 4eaf801..1d82210 100644 --- a/src/vpx/obj.rs +++ b/src/vpx/obj.rs @@ -1,6 +1,6 @@ //! Wavefront OBJ file reader and writer -use crate::vpx::model::Vertex3dNoTex2; +use crate::vpx::expanded::ReadMesh; use std::error::Error; use std::fs::File; use std::io::BufRead; @@ -50,8 +50,7 @@ fn obj_parse_vpx_comment(comment: &str) -> Option { /// so we have to negate the z values. pub(crate) fn write_obj( name: String, - vertices: &Vec<([u8; 32], Vertex3dNoTex2)>, - indices: &[i64], + mesh: &ReadMesh, obj_file_path: &PathBuf, ) -> Result<(), Box> { let mut obj_file = File::create(obj_file_path)?; @@ -81,7 +80,11 @@ pub(crate) fn write_obj( }; obj_writer.write(&mut writer, &comment)?; let comment = Entity::Comment { - content: format!("numVerts: {} numFaces: {}", vertices.len(), indices.len()), + content: format!( + "numVerts: {} numFaces: {}", + mesh.vertices.len(), + mesh.indices.len() + ), }; obj_writer.write(&mut writer, &comment)?; @@ -90,49 +93,49 @@ pub(crate) fn write_obj( obj_writer.write(&mut writer, &object)?; // write all vertices to the wavefront obj file - for (_, vertex) in vertices { + for v in &mesh.vertices { let vertex = Entity::Vertex { - x: vertex.x as f64, - y: vertex.y as f64, - z: vertex.z as f64, + x: v.vertex.x as f64, + y: v.vertex.y as f64, + z: v.vertex.z as f64, w: None, }; obj_writer.write(&mut writer, &vertex)?; } // write all vertex texture coordinates to the wavefront obj file - for (_, vertex) in vertices { + for v in &mesh.vertices { let vertex = Entity::VertexTexture { - u: vertex.tu as f64, - v: Some(vertex.tv as f64), + u: v.vertex.tu as f64, + v: Some(v.vertex.tv as f64), w: None, }; obj_writer.write(&mut writer, &vertex)?; } // write all vertex normals to the wavefront obj file - for (bytes, vertex) in vertices { + for v in &mesh.vertices { // if one of the values is NaN we write a special comment with the bytes - if vertex.nx.is_nan() || vertex.ny.is_nan() || vertex.nz.is_nan() { - println!("NaN found in vertex normal: {:?}", vertex); - let data = bytes[12..24].try_into().unwrap(); + if v.vertex.nx.is_nan() || v.vertex.ny.is_nan() || v.vertex.nz.is_nan() { + println!("NaN found in vertex normal: {:?}", v.vertex); + let data = v.raw[12..24].try_into().unwrap(); let content = obj_vpx_comment(&data); let comment = Entity::Comment { content }; obj_writer.write(&mut writer, &comment)?; } let vertex = Entity::VertexNormal { - x: if vertex.nx.is_nan() { + x: if v.vertex.nx.is_nan() { 0.0 } else { - vertex.nx as f64 + v.vertex.nx as f64 }, - y: if vertex.ny.is_nan() { + y: if v.vertex.ny.is_nan() { 0.0 } else { - vertex.ny as f64 + v.vertex.ny as f64 }, - z: if vertex.nz.is_nan() { + z: if v.vertex.nz.is_nan() { 0.0 } else { - vertex.nz as f64 + v.vertex.nz as f64 }, }; obj_writer.write(&mut writer, &vertex)?; @@ -141,7 +144,7 @@ pub(crate) fn write_obj( // write all faces to the wavefront obj file // write in groups of 3 - for chunk in indices.chunks(3) { + for chunk in mesh.indices.chunks(3) { // obj indices are 1 based // since the z axis is inverted we have to reverse the order of the vertices let v1 = chunk[0] + 1; @@ -250,6 +253,8 @@ pub(crate) struct ObjData { #[cfg(test)] mod test { use super::*; + use crate::vpx::expanded::ReadVertex; + use crate::vpx::model::Vertex3dNoTex2; use pretty_assertions::assert_eq; use std::io::BufReader; use testdir::testdir; @@ -318,34 +323,32 @@ f 1/1/1 1/1/1 1/1/1 let written_obj_path = testdir.join("screw.obj"); // zip vertices, texture coordinates and normals into a single vec - let vertices: Vec<([u8; 32], Vertex3dNoTex2)> = obj_data + let vertices: Vec = obj_data .vertices .iter() .zip(&obj_data.texture_coordinates) .zip(&obj_data.normals) - .map(|((v, vt), (vn, _))| { - ( - [0u8; 32], - Vertex3dNoTex2 { - x: v.0 as f32, - y: v.1 as f32, - z: v.2 as f32, - nx: vn.0 as f32, - ny: vn.1 as f32, - nz: vn.2 as f32, - tu: vt.0 as f32, - tv: vt.1.unwrap_or(0.0) as f32, - }, - ) + .map(|((v, vt), (vn, _))| ReadVertex { + raw: [0u8; 32], + vertex: Vertex3dNoTex2 { + x: v.0 as f32, + y: v.1 as f32, + z: v.2 as f32, + nx: vn.0 as f32, + ny: vn.1 as f32, + nz: vn.2 as f32, + tu: vt.0 as f32, + tv: vt.1.unwrap_or(0.0) as f32, + }, }) .collect(); - write_obj( - obj_data.name, - &vertices, - &obj_data.indices, - &written_obj_path, - )?; + let mesh = ReadMesh { + vertices, + indices: obj_data.indices.clone(), + }; + + write_obj(obj_data.name, &mesh, &written_obj_path)?; // compare both files as strings let mut original = std::fs::read_to_string(&screw_path)?;