A compressed voxel data library.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

433 lines
14 KiB

use fnv::FnvHashMap;
use crate::MAX_DAG_DEPTH;
use crate::VChildDescriptor;
use crate::Vec3;
use crate::VoxelChunk;
use crate::log_2;
impl VoxelChunk {
/// process a 3D array (`data` with dimensions `dim`) into a SVDAG, mapping values to materials using `f`
pub fn from_dense_voxels(data : &[i32], dim : [usize; 3]) -> Self {
assert!(dim[0] > 0 && dim[1] > 0 && dim[2] > 0);
let depth = log_2(dim.iter().cloned().max().unwrap_or(0) - 1) as usize + 1;
assert!(depth < MAX_DAG_DEPTH, "Depth is too large: {} >= {}", depth, MAX_DAG_DEPTH);
let size = 1 << depth;
fn recursive_create_dense(
s : &mut VoxelChunk, d : usize, min : [usize; 3], size : usize,
data : &[i32], dim : [usize; 3],
dedup : &mut FnvHashMap<VChildDescriptor, i32>
) -> i32 {
if min[0] >= dim[0] || min[1] >= dim[1] || min[2] >= dim[2] {
// air if the voxel does not intersect the voxel data
return 0;
}
if size <= 1 {
// once we reach size 1, take the material from the data
let v = data[min[0] + dim[0] * (min[1] + dim[1] * min[2])];
// prevent awful fractal voxel graphs
return -v.abs();
}
const BOX_OFFSETS : [[usize; 3]; 8] = [
[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 1, 0],
[0, 0, 1],
[1, 0, 1],
[0, 1, 1],
[1, 1, 1]
];
let mut voxel = VChildDescriptor{
sub_voxels : [0; 8],
};
let half_size = size >> 1;
let mut is_uniform = true;
for i in 0..8 {
let bmin = [
min[0] + BOX_OFFSETS[i][0] * half_size,
min[1] + BOX_OFFSETS[i][1] * half_size,
min[2] + BOX_OFFSETS[i][2] * half_size
];
voxel.sub_voxels[i] = recursive_create_dense(s, d - 1, bmin, half_size, data, dim, dedup);
if voxel.sub_voxels[i] != voxel.sub_voxels[0] || voxel.sub_voxels[i] > 0 {
// the subvoxels are not all the same leaf node, so this voxel is not uniform
is_uniform = false;
}
}
if is_uniform {
return voxel.sub_voxels[0];
}
if let Some(&id) = dedup.get(&voxel) {
// this node is a duplicate
id
} else {
// this node is new, so add it
s.voxels.push(voxel);
let id = s.voxels.len() as i32;
dedup.insert(voxel, id);
id
}
}
let mut chunk = VoxelChunk::empty();
chunk.voxels.clear();
// we build a list of unique voxels and store them in here
let mut dedup = FnvHashMap::default();
recursive_create_dense(&mut chunk, depth, [0,0,0], size, data, dim, &mut dedup);
chunk.voxels.reverse();
//fixup the subvoxel pointers (we reversed the order)
let n = chunk.voxels.len() as i32;
for i in 0..(chunk.voxels.len()) {
for j in 0..8 {
let sv = chunk.voxels[i].sub_voxels[j];
if sv > 0 {
let svi = n - sv + 1;
chunk.voxels[i].sub_voxels[j] = svi;
}
}
}
chunk
}
/// process an implicit 3D array into a DAG.
pub fn from_implicit_array<F : FnMut(usize, usize, usize) -> i32>(depth : usize, mut implicit : F) -> Self {
assert!(depth < MAX_DAG_DEPTH, "Depth is too large: {} >= {}", depth, MAX_DAG_DEPTH);
let size = 1 << depth;
fn recursive_create_dense_implicit<F : FnMut(usize, usize, usize) -> i32>(
s : &mut VoxelChunk, min : [usize; 3], size : usize, implicit : &mut F,
dedup : &mut FnvHashMap<VChildDescriptor, i32>
) -> i32 {
if size <= 1 {
// once we reach size 1, evaluate the material at the implicit surface
let v = implicit(min[0], min[1], min[2]);
// prevent awful fractal voxel graphs
return -v.abs();
}
const BOX_OFFSETS : [[usize; 3]; 8] = [
[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 1, 0],
[0, 0, 1],
[1, 0, 1],
[0, 1, 1],
[1, 1, 1]
];
let mut voxel = VChildDescriptor{
sub_voxels : [0; 8],
};
let half_size = size >> 1;
let mut is_uniform = true;
for i in 0..8 {
let bmin = [
min[0] + BOX_OFFSETS[i][0] * half_size,
min[1] + BOX_OFFSETS[i][1] * half_size,
min[2] + BOX_OFFSETS[i][2] * half_size
];
voxel.sub_voxels[i] = recursive_create_dense_implicit(s, bmin, half_size, implicit, dedup);
if voxel.sub_voxels[i] != voxel.sub_voxels[0] || voxel.sub_voxels[i] > 0 {
// the subvoxels are not all the same leaf node, so this voxel is not uniform
is_uniform = false;
}
}
if is_uniform {
return voxel.sub_voxels[0];
}
if let Some(&id) = dedup.get(&voxel) {
// this node is a duplicate
id
} else {
// this node is new, so add it
s.voxels.push(voxel);
let id = s.voxels.len() as i32;
dedup.insert(voxel, id);
id
}
}
let mut chunk = VoxelChunk::empty();
chunk.voxels.clear();
// we build a list of unique voxels and store them in here
let mut dedup = FnvHashMap::default();
recursive_create_dense_implicit(&mut chunk, [0,0,0], size, &mut implicit, &mut dedup);
chunk.voxels.reverse();
//fixup the subvoxel pointers (we reversed the order)
let n = chunk.voxels.len() as i32;
for i in 0..(chunk.voxels.len()) {
for j in 0..8 {
let sv = chunk.voxels[i].sub_voxels[j];
if sv > 0 {
let svi = n - sv + 1;
chunk.voxels[i].sub_voxels[j] = svi;
}
}
}
chunk
}
/// process a distance equation array into a DAG.
///
pub fn from_distance_equation<F : FnMut(f32, f32, f32) -> f32>(depth : usize, mut implicit : F) -> Self {
assert!(depth < MAX_DAG_DEPTH, "Depth is too large: {} >= {}", depth, MAX_DAG_DEPTH);
let size = 1 << depth;
fn recurse_distance_equation<F : FnMut(f32, f32, f32) -> f32>(
s : &mut VoxelChunk, min : [usize; 3], size : usize, implicit : &mut F, rscale : f32,
dedup : &mut FnvHashMap<VChildDescriptor, i32>
) -> i32 {
const SQRT_THREE : f32 = 1.732050807568877293527446341505872366942805253810380628055;
let v = implicit(
rscale * (min[0] as f32 + 0.5 * size as f32),
rscale * (min[1] as f32 + 0.5 * size as f32),
rscale * (min[2] as f32 + 0.5 * size as f32)
);
let bounding_radius = rscale * size as f32 * SQRT_THREE;
if size <= 1 {
// once we reach size 1, check if the object intersects the implicit region
if min[0] == 0 && min[1] == 0 && min[2] == 0 {
// println!("maybe intersection {} < {}", v, bounding_radius);
}
return if v < bounding_radius { -1 } else { 0 };
}
if v > bounding_radius {
// the voxel does not intersect the cube at all based on the distance equation
// println!("no intersection {} {}", v, bounding_radius);
return 0;
}
const BOX_OFFSETS : [[usize; 3]; 8] = [
[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 1, 0],
[0, 0, 1],
[1, 0, 1],
[0, 1, 1],
[1, 1, 1]
];
let mut voxel = VChildDescriptor{
sub_voxels : [0; 8],
};
let half_size = size >> 1;
let mut is_uniform = true;
for i in 0..8 {
let bmin = [
min[0] + BOX_OFFSETS[i][0] * half_size,
min[1] + BOX_OFFSETS[i][1] * half_size,
min[2] + BOX_OFFSETS[i][2] * half_size
];
voxel.sub_voxels[i] = recurse_distance_equation(s, bmin, half_size, implicit, rscale, dedup);
if voxel.sub_voxels[i] != voxel.sub_voxels[0] || voxel.sub_voxels[i] > 0 {
// the subvoxels are not all the same leaf node, so this voxel is not uniform
is_uniform = false;
}
}
if is_uniform {
return voxel.sub_voxels[0];
}
if let Some(&id) = dedup.get(&voxel) {
// this node is a duplicate
id
} else {
// this node is new, so add it
s.voxels.push(voxel);
let id = s.voxels.len() as i32;
dedup.insert(voxel, id);
id
}
}
let mut chunk = VoxelChunk::empty();
chunk.voxels.clear();
// we build a list of unique voxels and store them in here
let mut dedup : FnvHashMap<VChildDescriptor, i32> = FnvHashMap::default();
recurse_distance_equation(&mut chunk, [0,0,0], size, &mut implicit, 1.0 / (size as f32), &mut dedup);
chunk.voxels.reverse();
//fixup the subvoxel pointers (we reversed the order)
let n = chunk.voxels.len() as i32;
for i in 0..(chunk.voxels.len()) {
for j in 0..8 {
let sv = chunk.voxels[i].sub_voxels[j];
if sv > 0 {
let svi = n - sv + 1;
chunk.voxels[i].sub_voxels[j] = svi;
}
}
}
chunk
}
/// process a distance equation array into a DAG.
pub fn from_intersection_test<F : FnMut(Vec3, f32) -> bool>(depth : usize, mut intersect_test : F) -> Self {
assert!(depth < MAX_DAG_DEPTH, "Depth is too large: {} >= {}", depth, MAX_DAG_DEPTH);
let size = 1 << depth;
fn recurse_intersection_test<F : FnMut(Vec3, f32) -> bool>(
s : &mut VoxelChunk, min : [usize; 3], size : usize, intersect_test : &mut F, rscale : f32,
dedup : &mut FnvHashMap<VChildDescriptor, i32>
) -> i32 {
let intersects = intersect_test(
Vec3::new(
rscale * (min[0] as f32 + 0.5 * size as f32),
rscale * (min[1] as f32 + 0.5 * size as f32),
rscale * (min[2] as f32 + 0.5 * size as f32)
),
0.5 * rscale * size as f32
);
if size <= 1 {
// once we reach size 1, check if the object intersects the implicit region
if min[0] == 0 && min[1] == 0 && min[2] == 0 {
// println!("maybe intersection {} < {}", v, bounding_radius);
}
return if intersects { -1 } else { 0 };
}
if !intersects {
// the voxel does not intersect the cube at all based on the distance equation
// println!("no intersection {} {}", v, bounding_radius);
return 0;
}
const BOX_OFFSETS : [[usize; 3]; 8] = [
[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 1, 0],
[0, 0, 1],
[1, 0, 1],
[0, 1, 1],
[1, 1, 1]
];
let mut voxel = VChildDescriptor{
sub_voxels : [0; 8],
};
let half_size = size >> 1;
let mut is_uniform = true;
for i in 0..8 {
let bmin = [
min[0] + BOX_OFFSETS[i][0] * half_size,
min[1] + BOX_OFFSETS[i][1] * half_size,
min[2] + BOX_OFFSETS[i][2] * half_size
];
voxel.sub_voxels[i] = recurse_intersection_test(s, bmin, half_size, intersect_test, rscale, dedup);
if voxel.sub_voxels[i] != voxel.sub_voxels[0] || voxel.sub_voxels[i] > 0 {
// the subvoxels are not all the same leaf node, so this voxel is not uniform
is_uniform = false;
}
}
if is_uniform {
return voxel.sub_voxels[0];
}
if let Some(&id) = dedup.get(&voxel) {
// this node is a duplicate
id
} else {
// this node is new, so add it
s.voxels.push(voxel);
let id = s.voxels.len() as i32;
dedup.insert(voxel, id);
id
}
}
let mut chunk = VoxelChunk::empty();
chunk.voxels.clear();
// we build a list of unique voxel subtrees and store them in here
let mut dedup : FnvHashMap<VChildDescriptor, i32> = FnvHashMap::default();
recurse_intersection_test(&mut chunk, [0,0,0], size, &mut intersect_test, 1.0 / (size as f32), &mut dedup);
chunk.voxels.reverse();
//fixup the subvoxel pointers (we reversed the order)
let n = chunk.voxels.len() as i32;
for i in 0..(chunk.voxels.len()) {
for j in 0..8 {
let sv = chunk.voxels[i].sub_voxels[j];
if sv > 0 {
let svi = n - sv + 1;
chunk.voxels[i].sub_voxels[j] = svi;
}
}
}
chunk
}
}