Skip to content

Commit

Permalink
Ported split_poly_list and find_best_split
Browse files Browse the repository at this point in the history
  • Loading branch information
cmbasnett committed Apr 10, 2024
2 parents 0ffefa2 + 887fb03 commit b7f21be
Show file tree
Hide file tree
Showing 3 changed files with 274 additions and 6 deletions.
217 changes: 216 additions & 1 deletion src/bsp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ pub enum EPolyNodeFilter {
}

#[derive(Clone, Copy, Debug, PartialEq)]
enum EBspOptimization {
pub enum EBspOptimization {
Lame,
Good,
Optimal,
Expand Down Expand Up @@ -1011,4 +1011,219 @@ fn bsp_build(model: &UModel, optimization: EBspOptimization, balance: u8, portal
/// Filter all relevant world polys through the brush.
fn filter_world_through_brush(model: &mut UModel, brush: &mut UModel, csg_operation: ECsgOper, node_index: usize, bounding_sphere: &FSphere) {
!todo!()
}


/// Pick a splitter poly then split a pool of polygons into front and back polygons and
/// recurse.
//
/// iParent = Parent Bsp node, or INDEX_NONE if this is the root node.
/// IsFront = 1 if this is the front node of iParent, 0 of back (undefined if iParent==INDEX_NONE)
pub fn split_poly_list(
model: &mut UModel,
parent_node_index: Option<usize>,
node_place: ENodePlace,
polys: &mut Vec<FPoly>,
poly_indices: &[usize],
optimization: EBspOptimization,
balance: u8,
portal_bias: u8,
rebuild_simple_polys: bool
) {
// To account for big EdPolys split up.
let num_polys_to_alloc = poly_indices.len() + 8 + poly_indices.len() / 4;

let mut front_poly_indices = Vec::with_capacity(num_polys_to_alloc);
let mut back_poly_indices = Vec::with_capacity(num_polys_to_alloc);
// TODO: make a slice that iterates over only the indices passed in.
let split_poly_index = find_best_split_recursive(polys, poly_indices, optimization, balance, portal_bias);

// Add the splitter poly to the Bsp with either a new BspSurf or an existing one.
if rebuild_simple_polys {
if let Some(split_poly_index) = split_poly_index {
let split_poly = &mut polys[split_poly_index];
split_poly.link = None
}
}

let split_poly = &polys[split_poly_index.unwrap()]; // TODO: if the spliy poly function is always assumed to return a valid index, just don't wrap it in an optional.
let our_node_index = bsp_add_node(model, parent_node_index.unwrap(), node_place, EBspNodeFlags::empty(), split_poly);
let mut plane_node_index = our_node_index;

// Now divide all polygons in the pool into (A) polygons that are
// in front of Poly, and (B) polygons that are in back of Poly.
// Coplanar polys are inserted immediately, before recursing.

// If any polygons are split by Poly, we ignrore the original poly,
// split it into two polys, and add two new polys to the pool.

for poly_index in poly_indices {
if *poly_index == split_poly_index.unwrap() {
continue;
}

let [ed_poly, split_poly] = polys.get_many_mut([*poly_index, split_poly_index.unwrap()]).unwrap();
let split_result = ed_poly.split_with_plane(split_poly.vertices[0], split_poly.normal, false);
match split_result {
ESplitType::Coplanar => {
if rebuild_simple_polys {
ed_poly.link = Some(model.surfaces.len() - 1);
plane_node_index = bsp_add_node(model, plane_node_index, ENodePlace::Plane, EBspNodeFlags::empty(), ed_poly);
}
}
ESplitType::Front => {
front_poly_indices.push(*poly_index);
}
ESplitType::Back => {
back_poly_indices.push(*poly_index);
}
ESplitType::Split(mut front_poly, mut back_poly) => {
// Create front & back nodes.
// If newly-split polygons have too many vertices, break them up in half.
if front_poly.vertices.len() >= FPOLY_VERTEX_THRESHOLD {
let split_poly = front_poly.split_in_half().unwrap();
front_poly_indices.extend([front_poly_indices.len(), front_poly_indices.len() + 1]);
polys.extend([front_poly, split_poly]);
} else {
front_poly_indices.push(front_poly_indices.len());
polys.push(front_poly);
}

if back_poly.vertices.len() >= FPOLY_VERTEX_THRESHOLD {
let split_poly = back_poly.split_in_half().unwrap();
back_poly_indices.extend([back_poly_indices.len(), back_poly_indices.len() + 1]);
polys.extend([back_poly, split_poly]);
} else {
back_poly_indices.push(back_poly_indices.len());
polys.push(back_poly);
}
}
}
}

if !front_poly_indices.is_empty() {
split_poly_list(model, Some(our_node_index), ENodePlace::Front, polys, &front_poly_indices, optimization, balance, portal_bias, rebuild_simple_polys);
}

if !back_poly_indices.is_empty() {
split_poly_list(model, Some(our_node_index), ENodePlace::Back, polys, &back_poly_indices, optimization, balance, portal_bias, rebuild_simple_polys);
}

}

pub fn find_best_split(polys: &[FPoly], optimization: EBspOptimization, balance: u8, portal_bias: u8) -> Option<usize> {
let poly_indices = (0..polys.len()).collect::<Vec<usize>>();
find_best_split_recursive(polys, &poly_indices, optimization, balance, portal_bias)
}

fn find_best_split_recursive(polys: &[FPoly], poly_indices: &[usize], optimization: EBspOptimization, balance: u8, portal_bias: u8) -> Option<usize> {
let portal_bias = portal_bias as f32 / 100.0;

// No need to test if only one poly.
if poly_indices.len() == 1 {
return Some(0usize)
}

let step = match optimization {
EBspOptimization::Optimal => 1,
EBspOptimization::Lame => 1.max(polys.len() / 20),
EBspOptimization::Good => 1.max(polys.len() / 4),
};

// See if there are any non-semisolid polygons here.
let all_semi_solids = polys.iter().all(|poly| poly.poly_flags.contains(EPolyFlags::NoAddToBSP));

// Search through all polygons in the pool and find:
// A. The number of splits each poly would make.
// B. The number of front and back nodes the polygon would create.
// C. Number of coplanars.
let mut best_poly_index: Option<usize> = None;
let mut best_score = 0.0f32;

// Iterate over the polys with the given step.
for i in (0..poly_indices.len()).step_by(step) {
let mut poly_index = poly_indices[i];

if !all_semi_solids {
loop {
if poly_index >= (i + step) ||
poly_index >= polys.len() ||
!polys[poly_index].poly_flags.contains(EPolyFlags::NoAddToBSP) ||
polys[poly_index].poly_flags.contains(EPolyFlags::Portal)
{
break
}

poly_index += 1;
}
}

if poly_index >= (i + step) || poly_index >= polys.len() {
continue;
}

let poly = &polys[poly_index];
let mut splits = 0;
let mut front = 0;
let mut back = 0;
let mut coplanars = 0;

for j in 0..poly_indices.len() {
let other_poly = &polys[poly_indices[j]];
if i == j {
continue;
}
match other_poly.split_with_plane_fast(&poly.vertices[0], &poly.normal) {
ESplitType::Coplanar => {
coplanars += 1;
}
ESplitType::Front => {
front += 1;
},
ESplitType::Back => {
back += 1;
},
ESplitType::Split(_, _) => {
// Disfavor splitting polys that are zone portals.
if !other_poly.poly_flags.contains(EPolyFlags::Portal) {
splits += 1;
} else {
splits += 16;
}
},
}

// Score optimization: minimize cuts vs. balance tree (as specified in BSP Rebuilder dialog)
let score = {
let mut score = (100.0 - balance as f32) * splits as f32 + (balance as f32) * ((front - back) as f32).abs();
if poly.poly_flags.contains(EPolyFlags::Portal) {
// PortalBias enables level designers to control the effect of Portals on the BSP.
// This effect can range from 0.0 (ignore portals), to 1.0 (portals cut everything).
//
// In builds prior to this (since the 221 build dating back to 1/31/1999) the bias
// has been 1.0 causing the portals to cut the BSP in ways that will potentially
// degrade level performance, and increase the BSP complexity.
//
// By setting the bias to a value between 0.3 and 0.7 the positive effects of
// the portals are preserved without giving them unreasonable priority in the BSP.
//
// Portals should be weighted high enough in the BSP to separate major parts of the
// level from each other (pushing entire rooms down the branches of the BSP), but
// should not be so high that portals cut through adjacent geometry in a way that
// increases complexity of the room being (typically, accidentally) cut.
score -= (100.0 - balance as f32) * splits as f32 * portal_bias;
}
score
};

println!("score: {}", score);

if score < best_score || best_poly_index.is_none() {
best_poly_index = Some(poly_index);
best_score = score;
}
}
}

best_poly_index
}
4 changes: 2 additions & 2 deletions src/fpoly.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,8 @@ bitflags! {
/// Occludes even if PF_NoOcclude.
const Occlude = 0x80000000;

// PF_NoAddToBSP = PF_EdCut | PF_EdProcessed | PF_Selected | PF_Memorized,
const NoAddToBSP = 0x80000000 | 0x40000000 | 0x02000000 | 0x01000000;
const NoAddToBSP = 0x80000000 | 0x40000000 | 0x02000000 | 0x01000000; /* PF_Semisolid | PF_NotSolid */
const AddLast = 0x00000028; /* PF_EdCut | PF_EdProcessed | PF_Selected | PF_Memorized */
}
}

Expand Down
59 changes: 56 additions & 3 deletions tests/bsp_tests.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use bdk_rs::coords::FModelCoords;
use bdk_rs::math::FVector;
use bdk_rs::bsp::{merge_coplanars, try_to_merge, bsp_add_node};
use bdk_rs::bsp::{EBspOptimization, merge_coplanars, try_to_merge, bsp_add_node, find_best_split};
use bdk_rs::fpoly::{self, FPoly};
use bdk_rs::model::{self, EBspNodeFlags, UModel};

Expand Down Expand Up @@ -173,6 +173,60 @@ fn merge_coplanars_quad_grid_with_skipped_index_test() {
assert_eq!(merged_polys[2].vertices.to_vec(), polys[3].vertices.to_vec());
}

#[test]
fn find_best_split_single_poly_test() {
//
let polys = vec![
FPoly::from_vertices(&vec![
FVector::new(0.0, 0.0, 0.0),
FVector::new(0.0, 1.0, 0.0),
FVector::new(1.0, 1.0, 0.0),
FVector::new(1.0, 0.0, 0.0),
])
];

// Act
let split_index = find_best_split(&polys, EBspOptimization::Optimal, 50, 50);

// Assert
assert_eq!(split_index, Some(0));
}

#[test]
fn find_best_split_all_semisolids_test() {
}

#[test]
fn find_best_split_test() {
// Arrange
// Stack 3 identical polygons with 1 unit of difference between them along Z.
let polys = vec![
FPoly::from_vertices(&[
FVector::new(0.0, 0.0, 0.0),
FVector::new(0.0, 1.0, 0.0),
FVector::new(1.0, 1.0, 0.0),
FVector::new(1.0, 0.0, 0.0)]),
FPoly::from_vertices(&[
FVector::new(0.0, 0.0, 1.0),
FVector::new(0.0, 1.0, 1.0),
FVector::new(1.0, 1.0, 1.0),
FVector::new(1.0, 0.0, 1.0)]),
FPoly::from_vertices(&[
FVector::new(0.0, 0.0, 2.0),
FVector::new(0.0, 1.0, 2.0),
FVector::new(1.0, 1.0, 2.0),
FVector::new(1.0, 0.0, 2.0)]),
];

// Act
let split_index = find_best_split(&polys, EBspOptimization::Optimal, 50, 50);

// Assert
// The middle polygon should be the best split polygon because it
// has one polygon behind it and one polygon in front of it.
assert_eq!(split_index, Some(1))
}


#[test]
fn bsp_add_node_root_node() {
Expand All @@ -190,5 +244,4 @@ fn bsp_add_node_root_node() {
);

println!("{:?}", model.nodes);

}
}

0 comments on commit b7f21be

Please sign in to comment.