init_code
This commit is contained in:
0
pygarment/meshgen/__init__.py
Normal file
0
pygarment/meshgen/__init__.py
Normal file
1630
pygarment/meshgen/boxmeshgen.py
Normal file
1630
pygarment/meshgen/boxmeshgen.py
Normal file
File diff suppressed because it is too large
Load Diff
413
pygarment/meshgen/datasim_utils.py
Normal file
413
pygarment/meshgen/datasim_utils.py
Normal file
@@ -0,0 +1,413 @@
|
||||
"""Routines to run cloth simulation"""
|
||||
|
||||
# Basic
|
||||
import time
|
||||
import multiprocessing
|
||||
import platform
|
||||
import signal
|
||||
from pathlib import Path
|
||||
|
||||
# BoxMeshGen
|
||||
import pygarment.meshgen.boxmeshgen as bmg
|
||||
from pygarment.meshgen.boxmeshgen import BoxMesh
|
||||
from pygarment.meshgen.sim_config import PathCofig
|
||||
|
||||
# Warp simulation
|
||||
from pygarment.meshgen.simulation import run_sim
|
||||
|
||||
|
||||
def batch_sim(data_path, output_path, dataset_props,
|
||||
run_default_body=False, num_samples=None, caching=False, force_restart=False):
|
||||
"""
|
||||
Performs pattern simulation for each example in the dataset
|
||||
given by dataset_props.
|
||||
Batch processing is automatically resumed
|
||||
from the last unporcessed datapoint if restart is not forced. The last
|
||||
example on the processes list is assumed to cause the failure, so it can be later found in failure cases.
|
||||
|
||||
Parameters:
|
||||
* data_path -- path to folder with patterns (for given body type)
|
||||
* output_path -- path to folder with the sumulated dataset
|
||||
* dataset_props -- dataset properties. Properties has to be of custom data_config.Properties() class and contain
|
||||
* dataset folder (inside data_path)
|
||||
* type of dataset structure (with/without subfolders for patterns)
|
||||
* list of processed samples if processing of dataset was already attempted
|
||||
* Simulation parameters
|
||||
* Rendering parameters
|
||||
Other needed properties will be files with default values if the corresponding sections
|
||||
are not found in props object
|
||||
* run_default_body -- runs the dataset on the default body (disabled by default)
|
||||
* num_samples -- number of (unprocessed) samples from dataset to process with this run. If None, runs over all unprocessed samples
|
||||
* caching -- enables caching of every frame of simulation (disabled by default)
|
||||
* force_restart -- force restarting the batch processing even if resume conditions are met.
|
||||
|
||||
"""
|
||||
# ----- Init -----
|
||||
if 'frozen' in dataset_props and dataset_props['frozen']:
|
||||
# avoid accidential re-runs of data
|
||||
print('Warning: dataset is frozen, processing is skipped')
|
||||
return True
|
||||
|
||||
resume = init_sim_props(dataset_props, batch_run=True, force_restart=force_restart)
|
||||
body_type = 'default_body' if run_default_body else 'random_body'
|
||||
data_props_file = output_path / f'dataset_properties_{body_type}.yaml'
|
||||
pattern_names = _get_pattern_names(data_path)
|
||||
|
||||
# Simulate every template
|
||||
count = 0
|
||||
for pattern_name in pattern_names:
|
||||
# skip processed cases -- in case of resume. First condition needed to skip checking second one on False =)
|
||||
if resume and pattern_name in dataset_props['sim']['stats']['processed']:
|
||||
print(f'Skipped as already processed {pattern_name}')
|
||||
continue
|
||||
|
||||
dataset_props['sim']['stats']['processed'].append(pattern_name)
|
||||
_serialize_props_with_sim_stats(dataset_props,
|
||||
data_props_file) # save info of processed files before potential crash
|
||||
|
||||
try:
|
||||
paths = PathCofig(
|
||||
in_element_path=data_path / pattern_name,
|
||||
out_path=output_path,
|
||||
in_name=pattern_name,
|
||||
body_name=dataset_props['body_default'],
|
||||
samples_name=dataset_props['body_samples'],
|
||||
default_body=run_default_body
|
||||
)
|
||||
except BaseException as e:
|
||||
# Not all files available
|
||||
print("***Pattern loading failed (paths)***")
|
||||
dataset_props.add_fail('sim', 'crashes', pattern_name)
|
||||
else:
|
||||
template_simulation(paths, dataset_props, caching=caching)
|
||||
|
||||
count += 1 # count actively processed cases
|
||||
if num_samples is not None and count >= num_samples: # only process requested number of samples
|
||||
break
|
||||
|
||||
# Fin
|
||||
print(f'\nFinished batch of {data_path}')
|
||||
try:
|
||||
if len(dataset_props['sim']['stats']['processed']) >= len(pattern_names):
|
||||
# processing successfully finished -- no need to resume later
|
||||
del dataset_props['sim']['stats']['processed']
|
||||
dataset_props['frozen'] = True
|
||||
process_finished = True
|
||||
else:
|
||||
process_finished = False
|
||||
except KeyError:
|
||||
print('KeyError -processed-')
|
||||
process_finished = True
|
||||
pass
|
||||
|
||||
# Logs
|
||||
_serialize_props_with_sim_stats(dataset_props, data_props_file)
|
||||
|
||||
return process_finished
|
||||
|
||||
|
||||
def resim_fails(data_path, output_path, dataset_props,
|
||||
run_default_body=False, caching=False):
|
||||
"""Resimulate failure cases -- maybe some of them would get fixed"""
|
||||
|
||||
print('************** RESIMULATING FAILS ****************')
|
||||
|
||||
sim_stats = dataset_props['sim']['stats']
|
||||
|
||||
# Collect fails and remove them from fails list if any
|
||||
fails = sim_stats['fails']
|
||||
to_resim = set()
|
||||
for key in fails:
|
||||
if key not in ['cloth_body_intersection', 'cloth_self_intersection']:
|
||||
for el in fails[key]:
|
||||
to_resim.add(el)
|
||||
fails[key] = [] # NOTE: If nothing to be added in this key, it was already an empty array (and nothing changed)
|
||||
|
||||
if not len(to_resim):
|
||||
# Return previous finished state
|
||||
return dataset_props['frozen'] if 'frozen' in dataset_props else False
|
||||
|
||||
if 'processed' not in sim_stats:
|
||||
sim_stats['processed'] = _get_pattern_names(data_path)
|
||||
dataset_props['frozen'] = False
|
||||
|
||||
# Remove fails from processed to trigger re-simulation
|
||||
for sample in to_resim:
|
||||
sim_stats['processed'].remove(sample)
|
||||
|
||||
# Start simulation again
|
||||
finished = batch_sim(
|
||||
data_path, output_path, dataset_props,
|
||||
run_default_body=run_default_body,
|
||||
num_samples=len(to_resim)+1,
|
||||
caching=caching,
|
||||
force_restart=False
|
||||
)
|
||||
|
||||
return finished
|
||||
|
||||
# ------- Utils -------
|
||||
def init_sim_props(props, batch_run=False, force_restart=False):
|
||||
"""
|
||||
Add default config values if not given in props & clean-up stats if not resuming previous processing
|
||||
Returns a flag whether current simulation is a resumed last one
|
||||
"""
|
||||
if 'sim' not in props:
|
||||
props.set_section_config(
|
||||
'sim',
|
||||
max_sim_steps=1000, #affects speed
|
||||
max_meshgen_time=20, #in seconds, affects speed
|
||||
max_frame_time= 15, #in seconds, affects speed
|
||||
max_sim_time= 1500, #in seconds, affects speed
|
||||
zero_gravity_steps=10, # 0.01 # depends on the units used, #affects speed
|
||||
static_threshold=0.03, #affects speed
|
||||
non_static_percent=1.5, #affects speed
|
||||
max_body_collisions=0,
|
||||
max_self_collisions=0,
|
||||
resolution_scale=1.0, #affects speed
|
||||
ground=False, # Do not add floor s.t. garment falls infinitely if falls
|
||||
)
|
||||
|
||||
if 'material' not in props['sim']['config']:
|
||||
props['sim']['config']['material'] = {
|
||||
'garment_tri_ka': 10000.0,
|
||||
|
||||
'garment_edge_ke': 1.0,
|
||||
'garment_tri_ke': 10000.0,
|
||||
'spring_ke': 50000.0,
|
||||
|
||||
'garment_edge_kd': 10.0,
|
||||
'garment_tri_kd': 1.0,
|
||||
'spring_kd': 10.0,
|
||||
|
||||
'fabric_density': 1.0,
|
||||
'fabric_thickness': 0.1,
|
||||
'fabric_friction': 0.5
|
||||
|
||||
}
|
||||
|
||||
if 'options' not in props['sim']['config']:
|
||||
props['sim']['config']['options'] = {
|
||||
'enable_particle_particle_collisions': False,
|
||||
'enable_triangle_particle_collisions': True,
|
||||
'enable_edge_edge_collisions': True,
|
||||
'enable_body_collision_filters': True,
|
||||
|
||||
'enable_attachment_constraint': True,
|
||||
'attachment_frames': 400,
|
||||
'attachment_label_names': ['lower_interface'],
|
||||
'attachment_stiffness': [1000.],
|
||||
'attachment_damping': [10.],
|
||||
|
||||
'global_damping_factor': 0.25,
|
||||
'global_damping_effective_velocity': 0.0,
|
||||
'global_max_velocity': 25.0,
|
||||
|
||||
'enable_global_collision_filter': True,
|
||||
'enable_cloth_reference_drag': False,
|
||||
'cloth_reference_margin': 0.1,
|
||||
|
||||
# FIXME Re-writes mesh references causing occasional CUDA errors when referencing meshes other than the body
|
||||
'enable_body_smoothing': False,
|
||||
'smoothing_total_smoothing_factor': 1.0,
|
||||
'smoothing_recover_start_frame': 150,
|
||||
'smoothing_num_steps': 100,
|
||||
'smoothing_frame_gap_between_steps': 1,
|
||||
|
||||
'body_collision_thickness': 0.25,
|
||||
'body_friction': 0.5
|
||||
}
|
||||
|
||||
if 'render' not in props:
|
||||
# init with defaults
|
||||
props.set_section_config(
|
||||
'render',
|
||||
resolution=[800, 800],
|
||||
sides=['front','back'],
|
||||
front_camera_location=None,
|
||||
uv_texture={
|
||||
'seam_width': 0.5,
|
||||
'dpi': 1500,
|
||||
'fabric_grain_texture_path': None,
|
||||
'fabric_grain_resolution': 5,
|
||||
}
|
||||
)
|
||||
|
||||
if batch_run and 'processed' in props['sim']['stats'] and not force_restart:
|
||||
# resuming existing batch processing -- do not clean stats
|
||||
# Assuming the last example processed example caused the failure
|
||||
last_processed = props['sim']['stats']['processed'][-1]
|
||||
|
||||
if not any([(name in last_processed) or (last_processed in name) for name in
|
||||
props['render']['stats']['render_time']]):
|
||||
# crash detected -- the last example does not appear in the stats
|
||||
if last_processed not in props['sim']['stats']['fails']['crashes']:
|
||||
# add to simulation failures
|
||||
# Remove last from processed if it did not crash
|
||||
if last_processed not in props['sim']['stats']['stop_over']:
|
||||
props['sim']['stats']['processed'].pop()
|
||||
else:
|
||||
# Already passed here once -> add as crash
|
||||
props['sim']['stats']['fails']['crashes'].append(last_processed)
|
||||
|
||||
props['sim']['stats']['stop_over'].append(last_processed) # indicate resuming dataset simulation
|
||||
|
||||
|
||||
return True
|
||||
|
||||
# else new life
|
||||
# Prepare commulative stats
|
||||
props.set_section_stats('sim',
|
||||
fails={},
|
||||
meshgen_time={},
|
||||
sim_time={},
|
||||
spf={},
|
||||
fin_frame={},
|
||||
face_count={},
|
||||
body_collisions={},
|
||||
self_collisions={})
|
||||
props['sim']['stats']['fails'] = {
|
||||
'crashes': [],
|
||||
'cloth_body_intersection': [],
|
||||
'cloth_self_intersection': [],
|
||||
'static_equilibrium': [],
|
||||
'fast_finish': [],
|
||||
'pattern_loading': [],
|
||||
'multi_stitching': [],
|
||||
'gt_edges_creation': []
|
||||
|
||||
}
|
||||
|
||||
props.set_section_stats('render', render_time={})
|
||||
|
||||
if batch_run: # track batch processing
|
||||
props.set_section_stats('sim', processed=[], stop_over=[])
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def template_simulation(paths: PathCofig, props, caching=False):
|
||||
"""
|
||||
Simulate given template within given scene & save log files
|
||||
"""
|
||||
sim_props = props['sim']
|
||||
res = sim_props['config']['resolution_scale']
|
||||
|
||||
garment = BoxMesh(paths.in_g_spec, res)
|
||||
|
||||
print('\n-----------------------------'
|
||||
'\nLoading garment: ', garment.name)
|
||||
|
||||
meshgen_start_time = time.time()
|
||||
timeout_after = int(get_dict_default_value(sim_props['config'], 'max_meshgen_time', 20))
|
||||
|
||||
try:
|
||||
_load_boxmesh_timeout(garment, timeout_after)
|
||||
except TimeoutError as e:
|
||||
print(e)
|
||||
failure_case = 'meshgen-timeout'
|
||||
props.add_fail('sim', failure_case, garment.name)
|
||||
except bmg.PatternLoadingError as e:
|
||||
# record error and skip subequent processing
|
||||
print(e)
|
||||
failure_case = 'pattern_loading'
|
||||
props.add_fail('sim', failure_case, garment.name)
|
||||
except bmg.DegenerateTrianglesError as e:
|
||||
print(e)
|
||||
failure_case = 'degenerate_triangles'
|
||||
props.add_fail('sim', failure_case, garment.name)
|
||||
except bmg.MultiStitchingError as e:
|
||||
print(e)
|
||||
failure_case = 'multi_stitching'
|
||||
props.add_fail('sim', failure_case, garment.name)
|
||||
except bmg.NormError as e:
|
||||
print(e)
|
||||
failure_case = 'norm_error'
|
||||
props.add_fail('sim', failure_case, garment.name)
|
||||
except bmg.StitchingError as e:
|
||||
print(e)
|
||||
failure_case = 'stitching_error'
|
||||
props.add_fail('sim', failure_case, garment.name)
|
||||
except BaseException as e: # Catch the rest of exceptions
|
||||
print("***Pattern loading failed due to unknown error***")
|
||||
print(e)
|
||||
failure_case = 'crashes'
|
||||
props.add_fail('sim', failure_case, garment.name)
|
||||
else:
|
||||
# garment.save_mesh(tag='stitched') # Saving the geometry before eny forces were applied
|
||||
sim_props['stats']['meshgen_time'][garment.name] = time.time() - meshgen_start_time
|
||||
sim_props['stats']['face_count'][garment.name] = len(garment.faces)
|
||||
sim_props_option = sim_props['config']['options']
|
||||
|
||||
vertex_normals = get_dict_default_value(sim_props_option,'store_vertex_normals',False)
|
||||
store_panels = get_dict_default_value(sim_props_option,'store_panels',False)
|
||||
garment.serialize(
|
||||
paths,
|
||||
with_v_norms=vertex_normals,
|
||||
store_panels=store_panels,
|
||||
uv_config=props['render']['config']['uv_texture']
|
||||
)
|
||||
|
||||
run_sim(
|
||||
garment.name,
|
||||
props,
|
||||
paths,
|
||||
save_v_norms=vertex_normals,
|
||||
store_usd=caching, # NOTE: False for fast simulation!,
|
||||
optimize_storage=sim_props['config']['optimize_storage'],
|
||||
verbose=False
|
||||
)
|
||||
|
||||
def _load_boxmesh_timeout(garment, timeout_after):
|
||||
if platform.system() == "Windows":
|
||||
"""https://stackoverflow.com/a/14920854"""
|
||||
p = multiprocessing.Process(target=garment.load(), name="GarmentGeneration")
|
||||
p.start()
|
||||
|
||||
# Wait timeout_after seconds for garment.load()
|
||||
time.sleep(timeout_after)
|
||||
|
||||
# If thread is active
|
||||
if p.is_alive():
|
||||
# Terminate the process
|
||||
p.terminate()
|
||||
p.join()
|
||||
raise TimeoutError
|
||||
|
||||
elif platform.system() in ["Linux", "OSX"]:
|
||||
"""https://code-maven.com/python-timeout"""
|
||||
def alarm_handler(signum, frame):
|
||||
raise TimeoutError
|
||||
|
||||
signal.signal(signal.SIGALRM, alarm_handler)
|
||||
signal.alarm(timeout_after)
|
||||
s_time = time.time()
|
||||
try:
|
||||
garment.load()
|
||||
except TimeoutError as ex:
|
||||
raise TimeoutError
|
||||
else:
|
||||
e_time = time.time() - s_time
|
||||
# print("No timeout error with time: ",e_time)
|
||||
signal.alarm(0)
|
||||
|
||||
|
||||
def get_dict_default_value(props, name, default_value):
|
||||
if name in props:
|
||||
return props[name]
|
||||
return default_value
|
||||
|
||||
def _serialize_props_with_sim_stats(dataset_props, filename):
|
||||
"""Compute data processing statistics and serialize props to file"""
|
||||
dataset_props.stats_summary()
|
||||
dataset_props.serialize(filename)
|
||||
|
||||
|
||||
def _get_pattern_names(data_path: Path):
|
||||
names = []
|
||||
to_ignore = ['renders'] # special dirs not to include in the pattern list
|
||||
for el in data_path.iterdir():
|
||||
if el.is_dir() and el.stem not in to_ignore:
|
||||
names.append(el.stem)
|
||||
|
||||
return names
|
||||
618
pygarment/meshgen/garment.py
Normal file
618
pygarment/meshgen/garment.py
Normal file
@@ -0,0 +1,618 @@
|
||||
import igl
|
||||
import json
|
||||
import pickle
|
||||
import numpy as np
|
||||
import yaml
|
||||
|
||||
import warp as wp
|
||||
|
||||
import warp.sim.render
|
||||
from warp.sim.utils import implicit_laplacian_smoothing
|
||||
import warp.collision.panel_assignment as assign
|
||||
from warp.sim.collide import count_self_intersections, count_body_cloth_intersections
|
||||
from warp.sim.integrator_xpbd import replace_mesh_points
|
||||
|
||||
# Custom
|
||||
from pygarment.meshgen.sim_config import PathCofig, SimConfig
|
||||
from pygarment.pattern.core import BasicPattern
|
||||
|
||||
class Cloth:
|
||||
def __init__(self,
|
||||
name, config: SimConfig, paths: PathCofig,
|
||||
caching=False):
|
||||
|
||||
self.caching = caching # Saves intermediate frames, extra logs, etc.
|
||||
self.paths = paths
|
||||
self.name = name
|
||||
self.config = config
|
||||
|
||||
self.sim_fps = config.sim_fps
|
||||
self.sim_substeps = config.sim_substeps
|
||||
self.zero_gravity_steps = config.zero_gravity_steps
|
||||
self.sim_dt = (1.0 / self.sim_fps) / self.sim_substeps
|
||||
self.usd_frame_time = 0.0
|
||||
self.sim_use_graph = wp.get_device().is_cuda
|
||||
self.device = wp.get_device() if wp.get_device().is_cuda else 'cpu'
|
||||
self.frame = -1
|
||||
|
||||
self.c_scale = 1.0
|
||||
self.b_scale = 100.0
|
||||
self.body_path = paths.in_body_obj
|
||||
|
||||
# collision resolution options
|
||||
self.enable_body_smoothing = config.enable_body_smoothing
|
||||
self.enable_cloth_reference_drag = config.enable_cloth_reference_drag
|
||||
|
||||
# Build the stage -- model object, colliders, etc.
|
||||
self.build_stage(config)
|
||||
|
||||
# -------- Final model settings ----------
|
||||
# NOTE: global_viscous_damping: (damping_factor, min_vel_damp, max_vel)
|
||||
# apply damping when vel > min_vel_damp, and clamp vel below max_vel after damping
|
||||
# TODO Remove after refactoring Euler integrator
|
||||
self.model.global_viscous_damping = wp.vec3(
|
||||
(config.global_damping_factor, config.global_damping_effective_velocity, config.global_max_velocity))
|
||||
self.model.particle_max_velocity = config.global_max_velocity
|
||||
|
||||
self.model.ground = config.ground
|
||||
|
||||
self.model.global_collision_filter = config.enable_global_collision_filter
|
||||
self.model.cloth_reference_drag = self.enable_cloth_reference_drag
|
||||
self.model.cloth_reference_margin = config.cloth_reference_margin
|
||||
self.model.cloth_reference_k = config.cloth_reference_k
|
||||
self.model.cloth_reference_watertight_whole_shape_index = 0
|
||||
self.model.enable_particle_particle_collisions = config.enable_particle_particle_collisions
|
||||
self.model.enable_triangle_particle_collisions = config.enable_triangle_particle_collisions
|
||||
self.model.enable_edge_edge_collisions = config.enable_edge_edge_collisions
|
||||
self.model.attachment_constraint = config.enable_attachment_constraint
|
||||
|
||||
self.model.soft_contact_margin = config.soft_contact_margin
|
||||
self.model.soft_contact_ke = config.soft_contact_ke
|
||||
self.model.soft_contact_kd = config.soft_contact_kd
|
||||
self.model.soft_contact_kf = config.soft_contact_kf
|
||||
self.model.soft_contact_mu = config.soft_contact_mu
|
||||
|
||||
self.model.particle_ke = config.particle_ke
|
||||
self.model.particle_kd = config.particle_kd
|
||||
self.model.particle_kf = config.particle_kf
|
||||
self.model.particle_mu = config.particle_mu
|
||||
self.model.particle_cohesion = config.particle_cohesion
|
||||
self.model.particle_adhesion = config.particle_adhesion
|
||||
|
||||
#self.integrator = wp.sim.SemiImplicitIntegrator() #intialize semi-implicit time-integrator
|
||||
self.integrator = wp.sim.XPBDIntegrator() #intialize semi-implicit time-integrator
|
||||
self.state_0 = self.model.state() #returns state object for model (holds all *time-varying* data for a model)
|
||||
self.state_1 = self.model.state() #i.e. body/particle positions and velocities
|
||||
if self.caching:
|
||||
self.renderer = wp.sim.render.SimRenderer(self.model, str(paths.usd), scaling=1.0)
|
||||
|
||||
if self.sim_use_graph:
|
||||
self.create_graph()
|
||||
|
||||
self.last_verts = None
|
||||
self.current_verts = wp.array.numpy(self.state_0.particle_q)
|
||||
|
||||
def build_stage(self, config):
|
||||
|
||||
builder = wp.sim.ModelBuilder(gravity=0.0)
|
||||
# --------------- Load body info -----------------
|
||||
body_vertices, body_indices, body_faces = self.load_obj(self.paths.in_body_obj)
|
||||
body_seg = self.read_json(self.paths.body_seg)
|
||||
|
||||
body_vertices = body_vertices * self.b_scale
|
||||
self.shift_y = self.get_shift_param(body_vertices)
|
||||
|
||||
if self.shift_y:
|
||||
body_vertices[:, 1] = body_vertices[:, 1] + self.shift_y
|
||||
|
||||
self.v_body = body_vertices
|
||||
self.f_body = body_faces
|
||||
self.body_indices = body_indices
|
||||
|
||||
# -------------- Load cloth ------------
|
||||
cloth_vertices, cloth_indices, cloth_faces = self.load_obj(self.paths.g_box_mesh)
|
||||
cloth_seg_dict = assign.read_segmentation(self.paths.g_mesh_segmentation)
|
||||
self.cloth_seg_dict = cloth_seg_dict
|
||||
stitching_vertices = cloth_seg_dict["stitch"] if 'stitch' in cloth_seg_dict.keys() else []
|
||||
|
||||
cloth_vertices = cloth_vertices * self.c_scale
|
||||
if self.shift_y:
|
||||
cloth_vertices[:, 1] = cloth_vertices[:, 1] + self.shift_y
|
||||
self.v_cloth_init = cloth_vertices
|
||||
self.f_cloth = cloth_faces
|
||||
|
||||
#Load ground truth stitching lengths
|
||||
if not self.paths.g_orig_edge_len.exists():
|
||||
orig_lens_dict = None
|
||||
print("no original length dict found")
|
||||
else:
|
||||
with open(self.paths.g_orig_edge_len, 'rb') as file:
|
||||
orig_lens_dict = pickle.load(file)
|
||||
|
||||
cloth_pos = (0.0, 0.0, 0.0)
|
||||
cloth_rot = wp.quat_from_axis_angle(wp.vec3(0.0, 1.0, 0.0), wp.degrees(0.0)) #no rotation, but orientation of cloth in world space
|
||||
|
||||
builder.add_cloth_mesh_sewing_spring(
|
||||
pos=cloth_pos,
|
||||
rot=cloth_rot,
|
||||
scale=1.0,
|
||||
vel=(0.0, 0.0, 0.0),
|
||||
vertices=cloth_vertices,
|
||||
indices=cloth_indices,
|
||||
resolution_scale=config.resolution_scale,
|
||||
orig_lens=orig_lens_dict,
|
||||
stitching_vertices=stitching_vertices,
|
||||
density=config.garment_density,
|
||||
edge_ke=config.garment_edge_ke,
|
||||
edge_kd=config.garment_edge_kd,
|
||||
tri_ke=config.garment_tri_ke,
|
||||
tri_ka=config.garment_tri_ka,
|
||||
tri_kd=config.garment_tri_kd,
|
||||
tri_drag=config.garment_tri_drag,
|
||||
tri_lift=config.garment_tri_lift,
|
||||
radius=config.garment_radius,
|
||||
add_springs=True,
|
||||
spring_ke=config.spring_ke,
|
||||
spring_kd=config.spring_kd,
|
||||
)
|
||||
|
||||
# ------------ Add a body -----------
|
||||
if self.enable_body_smoothing:
|
||||
# Starts sim from smoothed-out body and slowly restores original details
|
||||
smoothing_total_smoothing_factor = config.smoothing_total_smoothing_factor
|
||||
smoothing_num_steps = config.smoothing_num_steps
|
||||
smoothing_recover_start_frame = config.smoothing_recover_start_frame
|
||||
smoothing_frame_gap_between_steps = config.smoothing_frame_gap_between_steps
|
||||
smoothing_step_size = smoothing_total_smoothing_factor / smoothing_num_steps
|
||||
self.body_smoothing_frames = [smoothing_recover_start_frame + smoothing_frame_gap_between_steps*i for i in range(smoothing_num_steps + 1)]
|
||||
self.body_smoothing_vertices_list = []
|
||||
self.body_smoothing_vertices_list = implicit_laplacian_smoothing(body_vertices, body_indices.reshape(-1, 3),
|
||||
step_size=smoothing_step_size,
|
||||
iters=smoothing_num_steps)
|
||||
body_vertices = self.body_smoothing_vertices_list.pop()
|
||||
self.body_smoothing_frames.pop()
|
||||
self.body_indices = body_indices
|
||||
self.body_vertices_device_buffer = wp.array(body_vertices, dtype=wp.vec3, device=self.device)
|
||||
self.v_body = body_vertices
|
||||
|
||||
self.body_mesh = wp.sim.Mesh(body_vertices, body_indices)
|
||||
|
||||
body_pos = wp.vec3(0.0, 0, 0.0)
|
||||
body_rot = wp.quat_from_axis_angle(wp.vec3(0.0, 1.0, 0.0), wp.degrees(0.0))
|
||||
|
||||
|
||||
# Cloth-body segemntation
|
||||
cloth_reference_labels, body_parts = assign.panel_assignment(
|
||||
cloth_seg_dict, cloth_vertices, cloth_indices, wp.transform(cloth_pos, cloth_rot),
|
||||
body_seg, body_vertices, body_indices, wp.transform(body_pos, body_rot),
|
||||
device=self.device,
|
||||
panel_init_labels=self._load_panel_labels(),
|
||||
strategy='closest',
|
||||
merge_two_legs=True,
|
||||
smpl_body=self.paths.use_smpl_seg
|
||||
)
|
||||
|
||||
face_filters, particle_filter = [], []
|
||||
if config.enable_body_collision_filters:
|
||||
v_connectivity = self._build_vert_connectivity(cloth_vertices, cloth_indices)
|
||||
# Arm filter for the skirts
|
||||
face_filters.append(assign.create_face_filter(
|
||||
body_vertices, body_indices, body_seg, ['left_arm', 'right_arm', 'arms'], smpl_body=self.paths.use_smpl_seg))
|
||||
particle_filter = assign.assign_face_filter_points(
|
||||
cloth_reference_labels,
|
||||
['left_leg', 'right_leg', 'legs'],
|
||||
filter_id=0,
|
||||
vert_connectivity=v_connectivity
|
||||
)
|
||||
|
||||
# Overall filter that ignored internal geometry
|
||||
face_filters.append(assign.create_face_filter(
|
||||
body_vertices, body_indices, body_seg, ['face_internal'], smpl_body=self.paths.use_smpl_seg))
|
||||
particle_filter = assign.assign_face_filter_points(
|
||||
cloth_reference_labels,
|
||||
['body'],
|
||||
filter_id=1,
|
||||
vert_connectivity=v_connectivity,
|
||||
current_vertex_filter=particle_filter
|
||||
)
|
||||
|
||||
self.body_shape_index = 0 # Body is the first collider object to be added
|
||||
builder.add_shape_mesh(
|
||||
body=-1,
|
||||
mesh=self.body_mesh,
|
||||
pos=body_pos,
|
||||
rot=body_rot,
|
||||
scale=wp.vec3(1.0,1.0,1.0), #performed body scaling above
|
||||
thickness=config.body_thickness,
|
||||
mu=config.body_friction,
|
||||
face_filters=face_filters if face_filters else [[]],
|
||||
model_particle_filter_ids = particle_filter,
|
||||
)
|
||||
|
||||
# ----- Attachment constraint -------
|
||||
|
||||
if config.enable_attachment_constraint:
|
||||
self._add_attachment_labels(builder, config)
|
||||
|
||||
# ----- Global collision resolution error ----
|
||||
for part in body_parts:
|
||||
part_v, part_inds = assign.extract_submesh(body_vertices, body_indices, body_parts[part])
|
||||
builder.add_cloth_reference_shape_mesh(
|
||||
mesh = wp.sim.Mesh(part_v, part_inds),
|
||||
name = part,
|
||||
pos = body_pos,
|
||||
rot = body_rot,
|
||||
scale = (1.0,1.0,1.0) #performed body scaling above
|
||||
)
|
||||
# NOTE: has a side-effect of filling up model.particle_reference_label array
|
||||
self.body_parts_names2index = builder.add_cloth_reference_labels(
|
||||
cloth_reference_labels,
|
||||
[ # NOTE: Not adding drag between legs and the body as it's useless and contradicts attachment
|
||||
['left_arm', 'body'],
|
||||
['right_arm', 'body'],
|
||||
['left_leg', 'right_leg'],
|
||||
['left_arm', 'left_leg'],
|
||||
['right_arm', 'left_leg'],
|
||||
['left_arm', 'right_leg'],
|
||||
['right_arm', 'right_leg'],
|
||||
['left_arm', 'legs'],
|
||||
['right_arm', 'legs'],
|
||||
]
|
||||
)
|
||||
|
||||
# ------- Finalize --------------
|
||||
self.model: wp.sim.Model = builder.finalize(device = self.device) #data is transferred to warp tensors, object used in simulation
|
||||
|
||||
def _add_attachment_labels(self, builder, config):
|
||||
with open(self.paths.in_body_mes, 'r') as file:
|
||||
body_dict = yaml.load(file, Loader=yaml.SafeLoader)['body']
|
||||
with open(self.paths.g_vert_labels, 'r') as f:
|
||||
vertex_labels = yaml.load(f, Loader=yaml.SafeLoader)
|
||||
|
||||
lables_present = False
|
||||
for i, attach_label in enumerate(config.attachment_labels):
|
||||
if attach_label in vertex_labels.keys() and len(vertex_labels[attach_label]) > 0:
|
||||
constaint_verts = vertex_labels[attach_label]
|
||||
if attach_label == 'lower_interface':
|
||||
lables_present = True
|
||||
if '_waist_level' in body_dict:
|
||||
waist_level = body_dict['_waist_level']
|
||||
else:
|
||||
waist_level = body_dict['height'] - body_dict['head_l'] - body_dict['waist_line']
|
||||
builder.add_attachment(
|
||||
constaint_verts,
|
||||
wp.vec3(0, waist_level, 0),
|
||||
wp.vec3(0., 1., 0.), # Vertical attachment
|
||||
stiffness = config.attachment_stiffness[i],
|
||||
damping = config.attachment_damping[i]
|
||||
)
|
||||
elif attach_label == 'right_collar':
|
||||
lables_present = True
|
||||
|
||||
neck_w = body_dict['neck_w'] - 2
|
||||
builder.add_attachment(
|
||||
constaint_verts,
|
||||
wp.vec3(-neck_w / 2, 0, 0),
|
||||
wp.vec3(1., 0., 0.), # Horizontal attachment
|
||||
stiffness = config.attachment_stiffness[i],
|
||||
damping = config.attachment_damping[i]
|
||||
)
|
||||
elif attach_label == 'left_collar':
|
||||
lables_present = True
|
||||
|
||||
neck_w = body_dict['neck_w'] - 2
|
||||
builder.add_attachment(
|
||||
constaint_verts,
|
||||
wp.vec3(neck_w / 2, 0, 0),
|
||||
wp.vec3(-1., 0., 0.), # Horizontal attachment
|
||||
stiffness = config.attachment_stiffness[i],
|
||||
damping = config.attachment_damping[i]
|
||||
)
|
||||
elif attach_label == 'strapless_top':
|
||||
lables_present = True
|
||||
|
||||
# Attach under arm
|
||||
level = body_dict['height'] - body_dict['head_l'] - body_dict['armscye_depth']
|
||||
builder.add_attachment(
|
||||
constaint_verts,
|
||||
wp.vec3(0, level, 0),
|
||||
wp.vec3(0., 1., 0.), # Vertical attachment
|
||||
stiffness = config.attachment_stiffness[i],
|
||||
damping = config.attachment_damping[i]
|
||||
)
|
||||
else:
|
||||
print(f'{self.name}::WARNING::Requested attachment label {attach_label} '
|
||||
'is not supported. Skipped')
|
||||
continue
|
||||
|
||||
print(f'Using attachment for {attach_label} with {len(constaint_verts)} vertices')
|
||||
|
||||
if not lables_present:
|
||||
# Loaded garment is not labeled -- update config
|
||||
config.enable_attachment_constraint = False
|
||||
config.update_min_steps()
|
||||
print(f'{self.name}::WARNING::Requested attachment labels {config.attachment_labels} '
|
||||
'are not present. Attachment is turned off'
|
||||
)
|
||||
|
||||
def _load_panel_labels(self):
|
||||
pattern = BasicPattern(self.paths.g_specs)
|
||||
|
||||
labels = {}
|
||||
for name, panel in pattern.pattern['panels'].items():
|
||||
labels[name] = panel['label'] if 'label' in panel else ''
|
||||
|
||||
return labels
|
||||
|
||||
def _sim_frame_with_substeps(self):
|
||||
"""Basic scheme for simulating a frame update"""
|
||||
|
||||
wp.sim.collide(self.model, self.state_0, self.sim_dt * self.sim_substeps) # Generates contact points for the particles and rigid bodies
|
||||
# in the model, to be used in the contact dynamics kernel of the integrator
|
||||
# launches kernels
|
||||
|
||||
for s in range(self.sim_substeps):
|
||||
self.state_0.clear_forces() # set particle and body forces to 0s
|
||||
self.integrator.simulate(self.model, self.state_0, self.state_1,
|
||||
self.sim_dt) # calculate semi-implicit Euler step
|
||||
# launches kernels and calculates new particle (and body) positions and velocities
|
||||
# swap states
|
||||
(self.state_0, self.state_1) = (self.state_1, self.state_0) # swap prev, new state
|
||||
|
||||
def create_graph(self):
|
||||
# create update graph
|
||||
wp.capture_begin() # Captures all subsequent kernel launches and memory operations on CUDA devices.
|
||||
|
||||
self._sim_frame_with_substeps()
|
||||
|
||||
self.graph = wp.capture_end() # returns a handle to a CUDA graph object that can be launched with :func:`~warp.capture_launch()`
|
||||
# do not capture kernel launches anymore
|
||||
|
||||
def update(self, frame):
|
||||
with wp.ScopedTimer("simulate", print=False, active=True):
|
||||
if self.model.enable_particle_particle_collisions:
|
||||
# FIXME: Produces cuda errors when activated together with "enable_cloth_reference_drag"
|
||||
# Reason is unknown. Or not?
|
||||
self.model.particle_grid.build(self.state_0.particle_q, self.model.particle_max_radius * 2.0)
|
||||
if frame == self.zero_gravity_steps:
|
||||
self.model.gravity = np.array((0.0, -9.81, 0.0))
|
||||
if self.sim_use_graph:
|
||||
self.create_graph()
|
||||
if self.enable_body_smoothing and frame in self.body_smoothing_frames:
|
||||
self.update_smooth_body_shape()
|
||||
if self.sim_use_graph:
|
||||
self.create_graph()
|
||||
if (self.model.attachment_constraint
|
||||
and frame >= self.config.attachment_frames):
|
||||
self.model.attachment_constraint = False
|
||||
if self.sim_use_graph:
|
||||
self.create_graph()
|
||||
|
||||
if self.sim_use_graph: #GPU
|
||||
wp.capture_launch(self.graph)
|
||||
|
||||
else: #CPU: launch kernels without graph
|
||||
self._sim_frame_with_substeps()
|
||||
|
||||
# Update vertices of last frame
|
||||
self.last_verts = self.current_verts
|
||||
# NOTE Makes a copy if particle_q device is not CPU
|
||||
self.current_verts = wp.array.numpy(self.state_0.particle_q)
|
||||
|
||||
def update_smooth_body_shape(self):
|
||||
body_vertices = self.body_smoothing_vertices_list.pop()
|
||||
self.v_body = body_vertices
|
||||
wp.copy(self.body_vertices_device_buffer,
|
||||
wp.array(body_vertices, dtype=wp.vec3, device='cpu', copy=False))
|
||||
|
||||
# Apply new vertices and refit the sructures
|
||||
wp.launch(
|
||||
kernel=replace_mesh_points,
|
||||
dim = len(body_vertices),
|
||||
inputs=[self.body_mesh.mesh.id,
|
||||
self.body_vertices_device_buffer],
|
||||
device=self.device
|
||||
)
|
||||
self.body_mesh.mesh.refit()
|
||||
|
||||
#update render
|
||||
if self.caching:
|
||||
self.renderer.render_mesh(
|
||||
f'shape_{self.body_shape_index}',
|
||||
body_vertices,
|
||||
None,
|
||||
is_template=True,
|
||||
)
|
||||
|
||||
def render_usd_frame(self, is_live=False):
|
||||
with wp.ScopedTimer("render", print=False, active=True):
|
||||
start_time = 0.0 if is_live else self.usd_frame_time
|
||||
|
||||
self.renderer.begin_frame(start_time)
|
||||
self.renderer.render(self.state_0)
|
||||
self.renderer.end_frame()
|
||||
|
||||
self.usd_frame_time += 1.0 / self.sim_fps
|
||||
if not is_live:
|
||||
self.renderer.save()
|
||||
|
||||
def run_frame(self):
|
||||
self.update(self.frame)
|
||||
|
||||
# NOTE: USD Render
|
||||
if self.caching:
|
||||
self.render_usd_frame()
|
||||
|
||||
def read_json(self, path):
|
||||
with open(path, 'r') as f:
|
||||
data = json.load(f)
|
||||
return data
|
||||
|
||||
def load_obj(self, path):
|
||||
v, f = igl.read_triangle_mesh(str(path))
|
||||
return v, f.flatten(), f
|
||||
|
||||
def get_shift_param(self,body_vertices):
|
||||
v_body_arr = np.array(body_vertices)
|
||||
min_y = (min(v_body_arr[:, 1]))
|
||||
if min_y < 0:
|
||||
return abs(min_y)
|
||||
return 0.0
|
||||
|
||||
def calc_norm(self, a, b, c):
|
||||
"""
|
||||
This function calculates the norm based on the three points a, b, and c.
|
||||
Input:
|
||||
* self (BoxMesh object): Instance of BoxMesh class from which the function is called
|
||||
* a (ndarray): first point taking part in norm calculation
|
||||
* b (ndarray): second point taking part in norm calculation
|
||||
* c (ndarray): third point taking part in norm calculation
|
||||
Output:
|
||||
* n_normalized (bool): norm(a,b,c) with length 1
|
||||
"""
|
||||
# Calculate the vectors AB and AC
|
||||
AB = np.array(b - a)
|
||||
AC = np.array(c - a)
|
||||
|
||||
# Calculate the cross product of AB and AC
|
||||
n = np.cross(AB, AC)
|
||||
n_normalized = n / np.linalg.norm(n)
|
||||
|
||||
return n_normalized
|
||||
|
||||
def calc_vertex_norms(self):
|
||||
vertex_normals = np.zeros((len(self.v_cloth_init), 4))
|
||||
for face in self.f_cloth:
|
||||
v0, v1, v2 = np.array(self.current_verts)[face]
|
||||
face_norm = list(self.calc_norm(v0, v1, v2))
|
||||
temp_update = face_norm + [1]
|
||||
vertex_normals[face] += temp_update
|
||||
|
||||
vertex_normals = vertex_normals[:, :3] / (vertex_normals[:, 3][:, np.newaxis])
|
||||
return vertex_normals
|
||||
|
||||
def save_frame(self, save_v_norms=False):
|
||||
"""Save current garment state as an obj file,
|
||||
re-using all the information from boxmesh
|
||||
except for vertices and vertex normals (e.g. textures and faces)
|
||||
"""
|
||||
|
||||
# NOTE: igl routine is not used here because it cannot write any extra info (e.g. texture coords) into obj
|
||||
|
||||
# stores v, f, vf and vn
|
||||
# Save cloth with texture and normals
|
||||
if save_v_norms:
|
||||
vertex_normals = self.calc_vertex_norms()
|
||||
|
||||
v_cloth_sim = self.current_verts
|
||||
# Store simulated cloth mesh
|
||||
# Read the boxmesh file
|
||||
with open(self.paths.g_box_mesh, 'r') as obj_file:
|
||||
lines = obj_file.readlines()
|
||||
|
||||
# Modify the vertex positions and normals, if required
|
||||
with open(self.paths.g_sim, 'w') as obj_file:
|
||||
v_idx = 0
|
||||
vn_idx = 0
|
||||
for line in lines:
|
||||
if line.startswith('v '):
|
||||
new_vertex = v_cloth_sim[v_idx]
|
||||
obj_file.write(f'v {new_vertex[0]} {new_vertex[1]} {new_vertex[2]}\n')
|
||||
v_idx += 1
|
||||
elif line.startswith('vn '):
|
||||
if save_v_norms:
|
||||
new_vertex = vertex_normals[vn_idx]
|
||||
obj_file.write(f'vn {new_vertex[0]} {new_vertex[1]} {new_vertex[2]}\n')
|
||||
vn_idx += 1
|
||||
else:
|
||||
obj_file.write(line)
|
||||
|
||||
def is_static(self):
|
||||
"""
|
||||
Checks whether garment is in the static equilibrium
|
||||
Compares current state with the last recorded state
|
||||
"""
|
||||
threshold = self.config.static_threshold
|
||||
non_static_percent = self.config.non_static_percent
|
||||
|
||||
curr_verts_arr = self.current_verts
|
||||
last_verts_arr = self.last_verts
|
||||
|
||||
if self.last_verts is None: # first iteration
|
||||
return False, len(curr_verts_arr)
|
||||
|
||||
# Compare L1 norm per vertex
|
||||
# Checking vertices change is the same as checking if velocity is zero
|
||||
diff = np.abs(curr_verts_arr - last_verts_arr)
|
||||
diff_L1 = np.sum(diff, axis=1)
|
||||
|
||||
non_static_len = len(
|
||||
diff_L1[diff_L1 > threshold]) # compare vertex-wise to allow accurate control over outliers
|
||||
|
||||
if non_static_len == 0 or (non_static_len < len(curr_verts_arr) * 0.01 * non_static_percent):
|
||||
print('\nStatic with {} non-static vertices out of {}'.format(non_static_len, len(curr_verts_arr)))
|
||||
# Store last frame
|
||||
return True, non_static_len
|
||||
else:
|
||||
return False, non_static_len
|
||||
|
||||
def count_self_intersections(self):
|
||||
model = self.model
|
||||
|
||||
if model.particle_count and model.spring_count:
|
||||
model.particle_self_intersection_count.zero_()
|
||||
wp.launch(
|
||||
kernel=count_self_intersections,
|
||||
dim=model.spring_count,
|
||||
inputs=[
|
||||
model.spring_indices,
|
||||
model.particle_shape.id,
|
||||
],
|
||||
outputs=[
|
||||
model.particle_self_intersection_count
|
||||
],
|
||||
device=model.device,
|
||||
)
|
||||
return int(wp.array.numpy(self.model.particle_self_intersection_count)[0])
|
||||
else:
|
||||
return 0
|
||||
|
||||
def count_body_intersections(self):
|
||||
model = self.model
|
||||
|
||||
if model.particle_count:
|
||||
model.body_cloth_intersection_count.zero_()
|
||||
wp.launch(
|
||||
kernel=count_body_cloth_intersections,
|
||||
dim=model.spring_count,
|
||||
inputs=[
|
||||
model.spring_indices,
|
||||
model.particle_shape.id,
|
||||
model.shape_geo,
|
||||
self.body_shape_index
|
||||
],
|
||||
outputs=[
|
||||
model.body_cloth_intersection_count
|
||||
],
|
||||
device=model.device,
|
||||
)
|
||||
return int(wp.array.numpy(self.model.body_cloth_intersection_count)[0])
|
||||
else:
|
||||
return 0
|
||||
|
||||
def _build_vert_connectivity(self, vertices, indices):
|
||||
vert_connectivity = [[] for _ in range(len(vertices))]
|
||||
|
||||
for face_id in range(int(len(indices) / 3)):
|
||||
v1, v2, v3 = indices[face_id*3 + 0], indices[face_id*3 + 1], indices[face_id*3 + 2]
|
||||
|
||||
vert_connectivity[v1].append(v2)
|
||||
vert_connectivity[v1].append(v3)
|
||||
|
||||
vert_connectivity[v2].append(v1)
|
||||
vert_connectivity[v2].append(v3)
|
||||
|
||||
vert_connectivity[v3].append(v1)
|
||||
vert_connectivity[v3].append(v2)
|
||||
|
||||
return vert_connectivity
|
||||
199
pygarment/meshgen/render/pythonrender.py
Normal file
199
pygarment/meshgen/render/pythonrender.py
Normal file
@@ -0,0 +1,199 @@
|
||||
import os
|
||||
import platform
|
||||
if platform.system() == 'Linux':
|
||||
os.environ["PYOPENGL_PLATFORM"] = "egl"
|
||||
import numpy as np
|
||||
import trimesh
|
||||
import pyrender
|
||||
from PIL import Image
|
||||
|
||||
from pygarment.meshgen.sim_config import PathCofig
|
||||
|
||||
|
||||
def rotate_matrix_y(matrix, angle_deg):
|
||||
rotation_angle = angle_deg * (np.pi / 180)
|
||||
|
||||
# Define the rotation matrix for 180-degree rotation around the y-axis
|
||||
rotation_matrix = np.array([
|
||||
[np.cos(rotation_angle), 0, np.sin(rotation_angle), 0],
|
||||
[0, 1, 0, 0],
|
||||
[-np.sin(rotation_angle), 0, np.cos(rotation_angle), 0],
|
||||
[0, 0, 0, 1]
|
||||
])
|
||||
|
||||
# Apply the rotation to the mesh vertices
|
||||
rot_matrix = np.dot(rotation_matrix, matrix)
|
||||
return rot_matrix
|
||||
|
||||
def rotate_matrix_x(matrix, angle_deg):
|
||||
rotation_angle = angle_deg * (np.pi / 180)
|
||||
|
||||
# Define the rotation matrix for 180-degree rotation around the y-axis
|
||||
rotation_matrix = np.array([
|
||||
[1, 0, 0, 0],
|
||||
[0, np.cos(rotation_angle), -np.sin(rotation_angle), 0],
|
||||
[0, np.sin(rotation_angle), np.cos(rotation_angle), 0],
|
||||
[0, 0, 0, 1]
|
||||
])
|
||||
|
||||
# Apply the rotation to the mesh vertices
|
||||
rot_matrix = np.dot(rotation_matrix, matrix)
|
||||
return rot_matrix
|
||||
|
||||
def get_bounding_box_edges(mesh):
|
||||
# Calculate the bounding box of the mesh
|
||||
min_coords = mesh.bounds[0]
|
||||
max_coords = mesh.bounds[1]
|
||||
|
||||
# Compute the corner points of the bounding box
|
||||
corners = [
|
||||
min_coords,
|
||||
[max_coords[0], min_coords[1], min_coords[2]],
|
||||
[min_coords[0], max_coords[1], min_coords[2]],
|
||||
[max_coords[0], max_coords[1], min_coords[2]],
|
||||
[min_coords[0], min_coords[1], max_coords[2]],
|
||||
[max_coords[0], min_coords[1], max_coords[2]],
|
||||
[min_coords[0], max_coords[1], max_coords[2]],
|
||||
max_coords
|
||||
]
|
||||
|
||||
return corners
|
||||
|
||||
def create_camera(pyrender, pyrender_body_mesh, scene, side, camera_location=None):
|
||||
|
||||
# Create a camera
|
||||
y_fov = np.pi / 6.
|
||||
camera = pyrender.PerspectiveCamera(yfov=y_fov)
|
||||
|
||||
|
||||
if camera_location is None:
|
||||
# Evaluate w.r.t. body
|
||||
|
||||
fov = 50 # Set your desired field of view in degrees
|
||||
|
||||
# # Calculate the bounding box center of the mesh
|
||||
bounding_box_center = pyrender_body_mesh.bounds.mean(axis=0)
|
||||
|
||||
# Calculate the diagonal length of the bounding box
|
||||
diagonal_length = np.linalg.norm(pyrender_body_mesh.bounds[1] - pyrender_body_mesh.bounds[0])
|
||||
|
||||
# Calculate the distance of the camera from the object based on the diagonal length
|
||||
distance = 1.5 * diagonal_length / (2 * np.tan(np.radians(fov / 2)))
|
||||
|
||||
camera_location = bounding_box_center
|
||||
camera_location[-1] += distance
|
||||
|
||||
# Calculate the camera pose
|
||||
camera_pose = np.array([
|
||||
[1.0, 0.0, 0.0, camera_location[0]],
|
||||
[0.0, 1.0, 0.0, camera_location[1]],
|
||||
[0.0, 0.0, 1.0, camera_location[2]],
|
||||
[0.0, 0.0, 0.0, 1.0]
|
||||
])
|
||||
|
||||
camera_pose = rotate_matrix_x(camera_pose, -15)
|
||||
camera_pose = rotate_matrix_y(camera_pose, 20)
|
||||
if side == 'back':
|
||||
camera_pose = rotate_matrix_y(camera_pose, 180)
|
||||
|
||||
# Set camera's pose in the scene
|
||||
scene.add(camera, pose=camera_pose)
|
||||
|
||||
def create_lights(scene, intensity=30.0):
|
||||
light_positions = [
|
||||
np.array([1.60614, 1.5341, 1.23701]),
|
||||
np.array([1.31844, 1.92831, -2.52238]),
|
||||
np.array([-2.80522, 1.2594, 2.34624]),
|
||||
np.array([0.160261, 1.81789, 3.52215]),
|
||||
np.array([-2.65752, 1.41194, -1.26328])
|
||||
]
|
||||
light_colors = [
|
||||
[1.0, 1.0, 1.0],
|
||||
[1.0, 1.0, 1.0],
|
||||
[1.0, 1.0, 1.0],
|
||||
[1.0, 1.0, 1.0],
|
||||
[1.0, 1.0, 1.0]
|
||||
]
|
||||
|
||||
# Add lights to the scene
|
||||
for i in range(5):
|
||||
light = pyrender.PointLight(color=light_colors[i], intensity=intensity)
|
||||
light_pose = np.eye(4)
|
||||
light_pose[:3, 3] = light_positions[i]
|
||||
scene.add(light, pose=light_pose)
|
||||
|
||||
def render(
|
||||
pyrender_garm_mesh, pyrender_body_mesh,
|
||||
side,
|
||||
paths: PathCofig,
|
||||
render_props=None
|
||||
):
|
||||
if render_props and 'resolution' in render_props:
|
||||
view_width, view_height = render_props['resolution']
|
||||
else:
|
||||
view_width, view_height = 1080, 1080
|
||||
# Create a pyrender scene
|
||||
scene = pyrender.Scene(bg_color=(1., 1., 1., 0.)) # Transparent!
|
||||
|
||||
# Create a pyrender mesh object from the trimesh object
|
||||
# Add the mesh to the scene
|
||||
scene.add(pyrender_garm_mesh)
|
||||
scene.add(pyrender_body_mesh)
|
||||
|
||||
camera_location=render_props['front_camera_location'] if 'front_camera_location' in render_props else None
|
||||
create_camera(
|
||||
pyrender, pyrender_body_mesh, scene, side,
|
||||
camera_location=camera_location
|
||||
)
|
||||
|
||||
create_lights(scene, intensity=80.)
|
||||
|
||||
# Create a renderer
|
||||
renderer = pyrender.OffscreenRenderer(viewport_width=view_width, viewport_height=view_height)
|
||||
|
||||
# Render the scene
|
||||
color, _ = renderer.render(scene, flags=pyrender.RenderFlags.RGBA)
|
||||
|
||||
image = Image.fromarray(color)
|
||||
image.save(paths.render_path(side), "PNG")
|
||||
|
||||
def load_meshes(paths:PathCofig, body_v, body_f):
|
||||
# Load body mesh
|
||||
body_mesh = trimesh.Trimesh(body_v, body_f)
|
||||
body_mesh.vertices = body_mesh.vertices / 100
|
||||
# Color body mesh
|
||||
body_material = pyrender.MetallicRoughnessMaterial(
|
||||
baseColorFactor=(0.0, 0.0, 0.0, 1.0), # RGB color, Alpha
|
||||
metallicFactor=0.658, # Range: [0.0, 1.0]
|
||||
roughnessFactor=0.5 # Range: [0.0, 1.0]
|
||||
)
|
||||
pyrender_body_mesh = pyrender.Mesh.from_trimesh(body_mesh, material=body_material)
|
||||
|
||||
|
||||
#Load garment mesh
|
||||
garm_mesh = trimesh.load_mesh(str(paths.g_sim)) # NOTE: Includes the texture
|
||||
garm_mesh.vertices = garm_mesh.vertices / 100 # scale to m
|
||||
|
||||
# Material adjustments
|
||||
material = garm_mesh.visual.material.to_pbr()
|
||||
material.baseColorFactor = [1., 1., 1., 1.]
|
||||
material.doubleSided = True # color both face sides
|
||||
# NOTE remove transparency -- add white background just in case
|
||||
white_back = Image.new('RGBA', material.baseColorTexture.size, color=(255, 255, 255, 255))
|
||||
white_back.paste(material.baseColorTexture)
|
||||
material.baseColorTexture = white_back.convert('RGB')
|
||||
|
||||
garm_mesh.visual.material = material
|
||||
|
||||
pyrender_garm_mesh = pyrender.Mesh.from_trimesh(garm_mesh, smooth=True)
|
||||
|
||||
return pyrender_garm_mesh, pyrender_body_mesh
|
||||
|
||||
def render_images(paths: PathCofig, body_v, body_f, render_props):
|
||||
|
||||
pyrender_garm_mesh, pyrender_body_mesh = load_meshes(paths, body_v, body_f)
|
||||
|
||||
for side in render_props['sides']:
|
||||
render(pyrender_garm_mesh, pyrender_body_mesh, side, paths, render_props)
|
||||
|
||||
|
||||
307
pygarment/meshgen/render/texture_utils.py
Normal file
307
pygarment/meshgen/render/texture_utils.py
Normal file
@@ -0,0 +1,307 @@
|
||||
"""Routines for processing UV coordinated for garments and generating texture maps"""
|
||||
import numpy as np
|
||||
import igl
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib
|
||||
from pathlib import Path
|
||||
|
||||
# SECTION UV islands texture creation
|
||||
def texture_mesh_islands(
|
||||
texture_coords, face_texture_coords,
|
||||
out_texture_image_path: Path,
|
||||
out_fabric_tex_image_path: Path = None,
|
||||
out_mtl_file_path: Path = None,
|
||||
boundary_width=0.3,
|
||||
dpi=1200,
|
||||
background_img_path=None,
|
||||
background_resolution=1.,
|
||||
uv_padding=3,
|
||||
mat_name='islands_texture'
|
||||
):
|
||||
"""
|
||||
Returns updated uv coordinates (properly normalized and aligned with the created texture)
|
||||
"""
|
||||
all_uvs, boundary_uv_to_draw = unwarp_UV(texture_coords, face_texture_coords, padding=uv_padding)
|
||||
|
||||
uv_list, width, height = normalize_UVs(all_uvs, axis_padding=uv_padding) # NOTE !! Axis padding should match the uv padding
|
||||
|
||||
# Create image
|
||||
create_UV_island_texture(
|
||||
boundary_uv_to_draw, width, height,
|
||||
texture_image_path=out_texture_image_path,
|
||||
boundary_width=boundary_width,
|
||||
dpi=dpi,
|
||||
preserve_alpha=True
|
||||
)
|
||||
|
||||
# Create image with fabric background
|
||||
if out_fabric_tex_image_path is not None:
|
||||
create_UV_island_texture(
|
||||
boundary_uv_to_draw, width, height,
|
||||
texture_image_path=out_fabric_tex_image_path,
|
||||
boundary_width=boundary_width,
|
||||
dpi=dpi,
|
||||
background_img_path=background_img_path,
|
||||
background_resolution=background_resolution,
|
||||
preserve_alpha=False
|
||||
)
|
||||
|
||||
# Save mtl is requested
|
||||
if out_mtl_file_path:
|
||||
save_texture_mtl(
|
||||
out_mtl_file_path,
|
||||
out_fabric_tex_image_path.name if out_fabric_tex_image_path is not None else out_texture_image_path.name,
|
||||
mat_name=mat_name)
|
||||
|
||||
return uv_list
|
||||
|
||||
def _uv_connected_components(face_texture_coords):
|
||||
|
||||
# Find connected components of face and vertex texture coords
|
||||
face_components = igl.facet_components(face_texture_coords)
|
||||
vert_components = igl.vertex_components(face_texture_coords)
|
||||
num_ccs = max(face_components) + 1
|
||||
|
||||
return vert_components, face_components, num_ccs
|
||||
|
||||
def unwarp_UV(texture_coords, face_texture_coords, padding=3):
|
||||
# Unwrap uvs for each connected component------------------------
|
||||
|
||||
vert_components, face_components, num_ccs = _uv_connected_components(face_texture_coords)
|
||||
|
||||
all_uvs = [] # transform all UVs to update obj file
|
||||
boundary_uv_to_draw = [] # only draw the boundary UVs
|
||||
|
||||
translate_Y = 0
|
||||
translate_X = 0
|
||||
|
||||
shells_per_row = int(num_ccs ** 0.5)
|
||||
column_x_shift = 0
|
||||
|
||||
# Loop through each connected component
|
||||
for i in range(num_ccs):
|
||||
|
||||
# Get faces and vertices of connected component
|
||||
faces_in_cc = np.where(face_components == i)[0]
|
||||
face_vts_in_cc = face_texture_coords[faces_in_cc]
|
||||
|
||||
# get all vertices of connected component
|
||||
verts_in_cc = np.where(vert_components == i)[0]
|
||||
|
||||
all_vert_pos = texture_coords[verts_in_cc]
|
||||
|
||||
# Find boundary loop
|
||||
bound_verts = igl.boundary_loop(face_vts_in_cc)
|
||||
bound_vert_pos = texture_coords[bound_verts]
|
||||
|
||||
# Shift component by bounding box
|
||||
bbox = bound_vert_pos.min(axis=0), bound_vert_pos.max(axis=0)
|
||||
bbox_len_Y = (bbox[1][1] - bbox[0][1])
|
||||
bbox_len_X = (bbox[1][0] - bbox[0][0])
|
||||
|
||||
if (i % shells_per_row == 0):
|
||||
# Start new column
|
||||
translate_Y = padding
|
||||
translate_X += (column_x_shift + padding)
|
||||
column_x_shift = 0 # restart BBOX collection
|
||||
|
||||
# Update shift
|
||||
column_x_shift = max(bbox_len_X, column_x_shift)
|
||||
|
||||
# translate boundary positions
|
||||
verts_translated_bound = [(x + translate_X, y + translate_Y) for x, y in bound_vert_pos]
|
||||
boundary_uv_to_draw.append(verts_translated_bound)
|
||||
|
||||
# translate all positions
|
||||
verts_translated = [(x + translate_X, y + translate_Y) for x, y in all_vert_pos]
|
||||
all_uvs.extend(verts_translated)
|
||||
|
||||
translate_Y = translate_Y + bbox_len_Y + padding
|
||||
|
||||
return all_uvs, boundary_uv_to_draw
|
||||
|
||||
def normalize_UVs(all_uvs, axis_padding=3):
|
||||
# normalize all_uvs
|
||||
uv_list_raw = np.array(all_uvs)
|
||||
uv_list = uv_list_raw
|
||||
|
||||
norm_x = max(uv_list_raw[:,0]) + axis_padding
|
||||
uv_list[:,0] = uv_list_raw[:,0] / norm_x
|
||||
norm_y = max(uv_list_raw[:,1]) + axis_padding
|
||||
uv_list[:,1] = uv_list_raw[:,1] / norm_y
|
||||
|
||||
return uv_list, norm_x, norm_y
|
||||
|
||||
def create_UV_island_texture(
|
||||
boundary_uv_to_draw,
|
||||
width, height,
|
||||
texture_image_path,
|
||||
boundary_width=0.3,
|
||||
boundary_color='black',
|
||||
dpi=1200,
|
||||
color_alpha=0.65,
|
||||
background_alpha=0.8,
|
||||
background_img_path=None,
|
||||
background_resolution=5,
|
||||
preserve_alpha=True
|
||||
):
|
||||
"""Create texture image from the set of UV boundary loops (e.g. sewing pattern panels).
|
||||
It renders the border of the loops and fills them in with color
|
||||
Params:
|
||||
* boundary_uv_to_draw -- 2D list -- sequence of 2D vertices on each of the boundaries. The order is IMPORTANT. The vertices will be connected
|
||||
by boundary edges sequentially
|
||||
* width, height -- the dimentions of the UV map
|
||||
* texture_image_path -- filepath to same a texture image to
|
||||
* boundary_width -- width of the boundary outline
|
||||
* dpi -- resolution of the output image
|
||||
"""
|
||||
n_components = len(boundary_uv_to_draw)
|
||||
|
||||
# Figure size
|
||||
fig, ax = plt.subplots()
|
||||
fig.set_size_inches(width / 100, height / 100) # width & height are usually given in cm
|
||||
|
||||
# Colors
|
||||
shift = 0.17
|
||||
divisor = max(5, n_components)
|
||||
cmap = matplotlib.colormaps['twilight'] # copper cool spring winter twilight # Using smooth Matplotlib colormaps
|
||||
color_sample = [cmap((1 - shift) * id / divisor) for id in range(divisor)]
|
||||
|
||||
# Background -- garment style
|
||||
if background_img_path is not None:
|
||||
back_crop_scale = background_resolution
|
||||
back_img = plt.imread(background_img_path)
|
||||
ax.imshow(
|
||||
back_img[:int(width * back_crop_scale), :int(height * back_crop_scale), :],
|
||||
extent=[0, width, 0, height],
|
||||
alpha=background_alpha,
|
||||
aspect='equal'
|
||||
)
|
||||
|
||||
# Draw the UV island boundaries and fill them up
|
||||
for i in range(n_components):
|
||||
polygon_x = [vert[0] for vert in boundary_uv_to_draw[i]]
|
||||
polygon_x.append(polygon_x[0]) # Loop
|
||||
polygon_y = [vert[1] for vert in boundary_uv_to_draw[i]]
|
||||
polygon_y.append(polygon_y[0]) # Loop
|
||||
|
||||
color = list(color_sample[i])
|
||||
color[-1] = color_alpha # Alpha - transparency for blending with backround
|
||||
|
||||
plt.fill(polygon_x, polygon_y,
|
||||
color=color,
|
||||
edgecolor=boundary_color, linestyle='-', linewidth=boundary_width / 2 # Boundary stylings
|
||||
)
|
||||
|
||||
ax.set_aspect('equal')
|
||||
|
||||
# Set the axis to be tight
|
||||
ax.set_xlim([0, width])
|
||||
ax.set_ylim([0, height])
|
||||
|
||||
# Hide the axis
|
||||
plt.axis('off')
|
||||
|
||||
# Save image
|
||||
plt.savefig(texture_image_path, dpi=dpi, bbox_inches='tight', pad_inches=0, transparent=preserve_alpha)
|
||||
|
||||
# Cleanup
|
||||
plt.close()
|
||||
|
||||
# !SECTION
|
||||
|
||||
# SECTION Saving textures information to files
|
||||
def save_texture_mtl(mtl_file_path, texture_image_name, mat_name='uv_texture'):
|
||||
new_material_lines = [
|
||||
f'newmtl {mat_name}\n',
|
||||
'Ns 0.000000\n',
|
||||
'Ka 1.000000 1.000000 1.000000\n',
|
||||
'Ks 0.000000 0.000000 0.000000\n',
|
||||
'Ke 0.000000 0.000000 0.000000\n',
|
||||
'Ni 1.000000\n',
|
||||
'd 1.000000\n',
|
||||
'illum 1\n',
|
||||
f'map_Kd {texture_image_name}\n'
|
||||
]
|
||||
|
||||
with open(mtl_file_path, 'w') as file:
|
||||
file.writelines(new_material_lines)
|
||||
|
||||
return mat_name
|
||||
|
||||
def save_obj(
|
||||
output_file_path,
|
||||
vertices, faces_with_texture, uv_list,
|
||||
vert_normals=None, mtl_file_name=None, mat_name=None):
|
||||
"""Save an obj file with a texture information (if provided)"""
|
||||
|
||||
with open(output_file_path, 'w') as f:
|
||||
if mtl_file_name is not None:
|
||||
f.write(f'mtllib {mtl_file_name}\n')
|
||||
|
||||
for v in vertices:
|
||||
f.write(f"v {v[0]} {v[1]} {v[2]}\n")
|
||||
|
||||
for vt in uv_list:
|
||||
f.write(f"vt {vt[0]} {vt[1]}\n")
|
||||
|
||||
if vert_normals is not None:
|
||||
for vn in vert_normals:
|
||||
f.write(f"vn {vn[0]} {vn[1]} {vn[2]}\n")
|
||||
|
||||
f.write('s 1\n')
|
||||
if mtl_file_name is not None:
|
||||
f.write(f'usemtl {mat_name}\n')
|
||||
|
||||
if vert_normals is not None:
|
||||
for v_id0, tex_id0, v_id1, tex_id1, v_id2, tex_id2, in faces_with_texture:
|
||||
f.write(f"f {v_id0 + 1}/{tex_id0 + 1}/{v_id0 + 1} "
|
||||
f"{v_id1 + 1}/{tex_id1 + 1}/{v_id1 + 1} "
|
||||
f"{v_id2 + 1}/{tex_id2 + 1}/{v_id2 + 1}\n")
|
||||
else:
|
||||
for v_id0, tex_id0, v_id1, tex_id1, v_id2, tex_id2, in faces_with_texture :
|
||||
f.write(f"f {v_id0 + 1}/{tex_id0 + 1} "
|
||||
f"{v_id1 + 1}/{tex_id1 + 1} "
|
||||
f"{v_id2 + 1}/{tex_id2 + 1}\n")
|
||||
|
||||
def add_texture_to_obj(obj_file_path, output_file_path, uv_list, mtl_file_name, mat_name):
|
||||
# Update OBJ-----------------------------------------------------
|
||||
|
||||
with open(obj_file_path, 'r') as file:
|
||||
lines = file.readlines()
|
||||
|
||||
uv_index = 0
|
||||
updated_lines = []
|
||||
mtllib_exists = False
|
||||
inserted = False
|
||||
|
||||
s_and_usemtl_lines = ['s 1\n', f'usemtl {mat_name}\n']
|
||||
|
||||
for line in lines:
|
||||
if line.startswith('vt '):
|
||||
# Format the new UV coordinates
|
||||
uv = uv_list[uv_index]
|
||||
new_uv_line = f'vt {uv[0]:.6f} {uv[1]:.6f}\n'
|
||||
updated_lines.append(new_uv_line)
|
||||
uv_index += 1
|
||||
elif line.startswith('mtllib '):
|
||||
# Ensure the mtllib line points to the correct MTL file
|
||||
new_mtl_line = f'mtllib {mtl_file_name}\n'
|
||||
updated_lines.append(new_mtl_line)
|
||||
mtllib_exists = True
|
||||
elif line.startswith('f') and not inserted:
|
||||
# Insert the s and usemtl lines before the first face line
|
||||
updated_lines.extend(s_and_usemtl_lines)
|
||||
inserted = True
|
||||
updated_lines.append(line)
|
||||
else:
|
||||
updated_lines.append(line)
|
||||
|
||||
# If mtllib line does not exist, add it at the beginning
|
||||
if not mtllib_exists:
|
||||
updated_lines.insert(0, f'mtllib {mtl_file_name}\n')
|
||||
|
||||
with open(output_file_path, 'w') as file:
|
||||
file.writelines(updated_lines)
|
||||
|
||||
# !SECTION
|
||||
285
pygarment/meshgen/sim_config.py
Normal file
285
pygarment/meshgen/sim_config.py
Normal file
@@ -0,0 +1,285 @@
|
||||
from pathlib import Path
|
||||
import yaml
|
||||
from datetime import datetime
|
||||
|
||||
from pygarment.data_config import Properties
|
||||
|
||||
class PathCofig:
|
||||
"""Routines for getting paths to various relevant objects with standard names"""
|
||||
def __init__(self,
|
||||
in_element_path, out_path, in_name, out_name=None,
|
||||
body_name='', samples_name='', default_body=True,
|
||||
smpl_body=False,
|
||||
add_timestamp=False):
|
||||
"""Specify
|
||||
* in_element_path
|
||||
* our_path -- dataset level output path
|
||||
* body_name -- specify to indicate use of default bodies
|
||||
* samples_name -- specify to indicate use of body sampling (reading body name from measurments file)
|
||||
"""
|
||||
|
||||
self._system = Properties('./system.json') # TODOlOW More stable path?
|
||||
self._body_name = body_name
|
||||
self._samples_folder_name = samples_name
|
||||
self._use_default_body = default_body
|
||||
self.use_smpl_seg = smpl_body
|
||||
|
||||
# Tags
|
||||
if out_name is None:
|
||||
out_name = in_name
|
||||
self.in_tag = in_name
|
||||
self.out_folder_tag = f'{out_name}_{datetime.now().strftime("%y%m%d-%H-%M-%S")}' if add_timestamp else out_name
|
||||
self.sim_tag = out_name
|
||||
self.boxmesh_tag = out_name
|
||||
|
||||
# Base paths
|
||||
self.input = Path(in_element_path)
|
||||
self.out = out_path
|
||||
self.out_el = Path(out_path) / self.out_folder_tag
|
||||
self.out_el.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Individual file paths
|
||||
self._update_in_paths()
|
||||
self._update_boxmesh_paths()
|
||||
self.update_in_copies_paths()
|
||||
self.update_sim_paths()
|
||||
|
||||
def _update_in_paths(self):
|
||||
|
||||
# Base path
|
||||
if not self._samples_folder_name or self._use_default_body:
|
||||
self.bodies_path = Path(self._system['bodies_default_path'])
|
||||
else:
|
||||
self.bodies_path = Path(self._system['body_samples_path']) / self._samples_folder_name / 'meshes'
|
||||
|
||||
# Body measurements
|
||||
if not self._samples_folder_name:
|
||||
self.in_body_mes = self.bodies_path / f'{self._body_name}.yaml'
|
||||
else:
|
||||
self.in_body_mes = self.input / 'body_measurements.yaml'
|
||||
|
||||
with open(self.in_body_mes, 'r') as file:
|
||||
body_dict = yaml.load(file, Loader=yaml.SafeLoader)
|
||||
if 'body_sample' in body_dict['body']: # Not present in default measurements
|
||||
self._body_name = body_dict['body']['body_sample']
|
||||
|
||||
self.in_body_obj = self.bodies_path / f'{self._body_name}.obj'
|
||||
self.in_g_spec = self.input / f'{self.in_tag}_specification.json'
|
||||
self.body_seg = Path(self._system['bodies_default_path']) / ('ggg_body_segmentation.json' if not self.use_smpl_seg else 'smpl_vert_segmentation.json')
|
||||
self.in_design_params = self.input / 'design_params.yaml'
|
||||
|
||||
def _update_boxmesh_paths(self):
|
||||
|
||||
self.g_box_mesh = self.out_el / f'{self.boxmesh_tag}_boxmesh.obj'
|
||||
self.g_box_mesh_compressed = self.out_el / f'{self.boxmesh_tag}_boxmesh.ply'
|
||||
self.g_mesh_segmentation = self.out_el / f'{self.boxmesh_tag}_sim_segmentation.txt'
|
||||
self.g_orig_edge_len = self.out_el / f'{self.boxmesh_tag}_orig_lens.pickle'
|
||||
self.g_vert_labels = self.out_el / f'{self.boxmesh_tag}_vertex_labels.yaml'
|
||||
self.g_texture_fabric = self.out_el / f'{self.boxmesh_tag}_texture_fabric.png'
|
||||
self.g_texture = self.out_el / f'{self.boxmesh_tag}_texture.png'
|
||||
self.g_mtl = self.out_el / f'{self.boxmesh_tag}_material.mtl'
|
||||
|
||||
def update_in_copies_paths(self):
|
||||
self.g_specs = self.out_el / f'{self.in_tag}_specification.json'
|
||||
self.element_sim_props = self.out_el / 'sim_props.yaml'
|
||||
self.body_mes = self.out_el / f'{self.in_tag}_body_measurements.yaml'
|
||||
self.design_params = self.out_el / f'{self.in_tag}_design_params.yaml'
|
||||
|
||||
def update_sim_paths(self):
|
||||
self.g_sim = self.out_el / f'{self.sim_tag}_sim.obj'
|
||||
self.g_sim_glb = self.out_el / f'{self.sim_tag}_sim.glb'
|
||||
self.g_sim_compressed = self.out_el / f'{self.sim_tag}_sim.ply'
|
||||
self.usd = self.out_el / f'{self.sim_tag}_simulation.usd'
|
||||
|
||||
|
||||
def render_path(self, camera_name=''):
|
||||
|
||||
fname = f'{self.sim_tag}_render_{camera_name}.png' if camera_name else f'{self.sim_tag}_render.png'
|
||||
return self.out_el / fname
|
||||
|
||||
|
||||
class SimConfig:
|
||||
def __init__(self, sim_props):
|
||||
# ---- Paths ----
|
||||
# Sim props sections
|
||||
self.props = sim_props
|
||||
sim_props_option = sim_props['options']
|
||||
sim_props_material = sim_props['material']
|
||||
|
||||
# Basic setup
|
||||
self.sim_fps = 60.0
|
||||
self.sim_substeps = 10 #increase?
|
||||
self.sim_wo_gravity_percentage = 0
|
||||
self.zero_gravity_steps = self.get_sim_props_value(sim_props, 'zero_gravity_steps', 5)
|
||||
self.resolution_scale = self.get_sim_props_value(sim_props, 'resolution_scale', 1.0)
|
||||
self.ground = self.get_sim_props_value(sim_props, 'ground', True)
|
||||
|
||||
# Stopping criteria
|
||||
self.static_threshold = self.get_sim_props_value(sim_props, 'static_threshold', 0.01)
|
||||
self.max_sim_steps = self.get_sim_props_value(sim_props, 'max_sim_steps', 1000)
|
||||
self.max_frame_time = self.get_sim_props_value(sim_props, 'max_frame_time', None)
|
||||
if self.max_frame_time is not None:
|
||||
self.max_frame_time = int(self.max_frame_time)
|
||||
self.max_sim_time = int(self.get_sim_props_value(sim_props, 'max_sim_time', 25 * 60))
|
||||
self.non_static_percent = self.get_sim_props_value(sim_props, 'non_static_percent', 5)
|
||||
# Quality filter
|
||||
self.max_body_collisions = self.get_sim_props_value(sim_props, 'max_body_collisions', 0)
|
||||
self.max_self_collisions = self.get_sim_props_value(sim_props, 'max_self_collisions', 0)
|
||||
|
||||
|
||||
# Self-collision prevention properties
|
||||
self.enable_particle_particle_collisions = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'enable_particle_particle_collisions', False)
|
||||
self.enable_triangle_particle_collisions = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'enable_triangle_particle_collisions', False)
|
||||
self.enable_edge_edge_collisions = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'enable_edge_edge_collisions', False)
|
||||
self.enable_body_collision_filters = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'enable_body_collision_filters',
|
||||
False
|
||||
)
|
||||
|
||||
# Attachment constraints
|
||||
self.enable_attachment_constraint = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'enable_attachment_constraint',
|
||||
False
|
||||
)
|
||||
self.attachment_labels = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'attachment_label_names',
|
||||
[]
|
||||
)
|
||||
self.attachment_frames = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'attachment_frames',
|
||||
100
|
||||
)
|
||||
self.attachment_stiffness = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'attachment_stiffness',
|
||||
[]
|
||||
)
|
||||
self.attachment_damping = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'attachment_damping',
|
||||
[]
|
||||
)
|
||||
if not self.attachment_frames or not self.attachment_labels:
|
||||
self.enable_attachment_constraint = False
|
||||
|
||||
# Global damping properties
|
||||
self.global_damping_factor = self.get_sim_props_value(
|
||||
sim_props_option,'global_damping_factor', 1.)
|
||||
self.global_damping_effective_velocity = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'global_damping_effective_velocity', 0.0)
|
||||
self.global_max_velocity = self.get_sim_props_value(
|
||||
sim_props_option,'global_max_velocity', 50.0)
|
||||
|
||||
# Cloth global collision resolution (reference drag) options
|
||||
self.enable_global_collision_filter = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'enable_global_collision_filter',
|
||||
False
|
||||
)
|
||||
self.enable_cloth_reference_drag = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'enable_cloth_reference_drag', False)
|
||||
self.cloth_reference_margin = self.get_sim_props_value(
|
||||
sim_props_option,'cloth_reference_margin', 0.1)
|
||||
self.cloth_reference_k = self.get_sim_props_value(
|
||||
sim_props_option,'cloth_reference_k', 1.0e7)
|
||||
|
||||
# Body smoothing options
|
||||
self.enable_body_smoothing = self.get_sim_props_value(
|
||||
sim_props_option,'enable_body_smoothing', True)
|
||||
self.smoothing_total_smoothing_factor = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'smoothing_total_smoothing_factor', 1)
|
||||
self.smoothing_recover_start_frame = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'smoothing_recover_start_frame', 0)
|
||||
self.smoothing_frame_gap_between_steps = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'smoothing_frame_gap_between_steps', 5)
|
||||
self.smoothing_num_steps = self.get_sim_props_value(
|
||||
sim_props_option, 'smoothing_num_steps', 100)
|
||||
self.smoothing_num_steps = max(min(
|
||||
self.smoothing_num_steps, self.max_sim_steps - self.smoothing_recover_start_frame),
|
||||
0)
|
||||
if self.smoothing_num_steps == 0:
|
||||
self.enable_body_smoothing = False
|
||||
|
||||
# ----- Fabric material properties -----
|
||||
# Bending
|
||||
self.garment_edge_ke = self.get_sim_props_value(
|
||||
sim_props_material,'garment_edge_ke', 50000.0) #default = 100.0
|
||||
self.garment_edge_kd = self.get_sim_props_value(
|
||||
sim_props_material,'garment_edge_kd',10.0) #default = 0.0
|
||||
|
||||
# Area preservation
|
||||
self.garment_tri_ke = self.get_sim_props_value(
|
||||
sim_props_material,'garment_tri_ke', 10000.0) #default = 100.0, small number = more elasticity
|
||||
self.garment_tri_kd = self.get_sim_props_value(
|
||||
sim_props_material,'garment_tri_kd', 1.0) #default = 10.0
|
||||
self.garment_tri_ka = self.get_sim_props_value(
|
||||
sim_props_material, 'garment_tri_ka', 10000.0) # default = 100.0
|
||||
self.garment_tri_drag = 0.0 # default = 0.0
|
||||
self.garment_tri_lift = 0.0 #default = 0.0
|
||||
|
||||
# Thickness
|
||||
self.garment_density = self.get_sim_props_value(
|
||||
sim_props_material,'fabric_density', 1.0)
|
||||
self.garment_radius = self.get_sim_props_value(
|
||||
sim_props_material,'fabric_thickness', 0.1)
|
||||
|
||||
# Spring properties (Distance constraints)
|
||||
self.spring_ke = self.get_sim_props_value(
|
||||
sim_props_material,'spring_ke', 50000)
|
||||
self.spring_kd = self.get_sim_props_value(
|
||||
sim_props_material,'spring_kd', 10.0)
|
||||
|
||||
# Soft contact properties (contact between cloth and body)
|
||||
self.soft_contact_margin = 0.2
|
||||
self.soft_contact_ke = 1000.0
|
||||
self.soft_contact_kd = 10.0
|
||||
self.soft_contact_kf = 1000.0
|
||||
self.soft_contact_mu = self.get_sim_props_value(
|
||||
sim_props_material, 'fabric_friction', 0.5
|
||||
)
|
||||
|
||||
# Body material
|
||||
self.body_thickness = self.get_sim_props_value(sim_props_option,'body_collision_thickness', 0.0)
|
||||
self.body_friction = self.get_sim_props_value(sim_props_option,'body_friction', 0.5)
|
||||
|
||||
# particle properties
|
||||
# Some default values -- not used in cloth sim
|
||||
self.particle_ke = 1.0e3
|
||||
self.particle_kd = 1.0e2
|
||||
self.particle_kf = 100.0
|
||||
self.particle_mu = 0.5
|
||||
self.particle_cohesion = 0.0
|
||||
self.particle_adhesion = 0.0
|
||||
|
||||
# After the initialization
|
||||
self.update_min_steps()
|
||||
|
||||
def update_min_steps(self):
|
||||
self.min_sim_steps = 0
|
||||
if self.enable_body_smoothing:
|
||||
self.min_sim_steps = self.smoothing_recover_start_frame + self.smoothing_num_steps
|
||||
if self.enable_attachment_constraint:
|
||||
# NOTE: Adding a small number of frames
|
||||
# to allow clothing movement to restart after attachment is released
|
||||
self.min_sim_steps = max(self.min_sim_steps, self.attachment_frames + 5)
|
||||
|
||||
def get_sim_props_value(self, sim_props, name, default_value):
|
||||
if name in sim_props:
|
||||
return sim_props[name]
|
||||
return default_value
|
||||
|
||||
258
pygarment/meshgen/simulation.py
Normal file
258
pygarment/meshgen/simulation.py
Normal file
@@ -0,0 +1,258 @@
|
||||
# Copyright (c) 2022 NVIDIA CORPORATION. All rights reserved.
|
||||
# NVIDIA CORPORATION and its licensors retain all intellectual property
|
||||
# and proprietary rights in and to this software, related documentation
|
||||
# and any modifications thereto. Any use, reproduction, disclosure or
|
||||
# distribution of this software and related documentation without an express
|
||||
# license agreement from NVIDIA CORPORATION is strictly prohibited.
|
||||
|
||||
###########################################################################
|
||||
# Example Sim Cloth
|
||||
#
|
||||
# Shows a simulation of an FEM cloth model colliding against a static
|
||||
# rigid body mesh using the wp.sim.ModelBuilder().
|
||||
#
|
||||
###########################################################################
|
||||
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
import platform
|
||||
import multiprocessing
|
||||
import signal
|
||||
import trimesh
|
||||
|
||||
# Warp
|
||||
import warp as wp
|
||||
|
||||
# Custom code
|
||||
from pygarment.meshgen.render.pythonrender import render_images
|
||||
from pygarment.meshgen.garment import Cloth
|
||||
from pygarment.meshgen.sim_config import SimConfig, PathCofig
|
||||
|
||||
wp.init()
|
||||
|
||||
class SimulationError(BaseException):
|
||||
"""To be rised when panel stitching cannot be executed correctly"""
|
||||
pass
|
||||
|
||||
class FrameTimeOutError(BaseException):
|
||||
"""To be rised when frame takes too long to simulate"""
|
||||
pass
|
||||
|
||||
class SimTimeOutError(BaseException):
|
||||
"""To be rised when simulation takes too long"""
|
||||
pass
|
||||
|
||||
def optimize_garment_storage(paths: PathCofig):
|
||||
"""Prepare the data element for compact storage: store the meshes as ply instead of obj,
|
||||
remove texture files
|
||||
"""
|
||||
# Objs to ply
|
||||
try:
|
||||
boxmesh = trimesh.load(paths.g_box_mesh)
|
||||
boxmesh.export(paths.g_box_mesh_compressed)
|
||||
paths.g_box_mesh.unlink()
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
try:
|
||||
simmesh = trimesh.load(paths.g_sim)
|
||||
simmesh.export(paths.g_sim_compressed)
|
||||
paths.g_sim.unlink()
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
# Remove large texture file and mtl -- not so necessary
|
||||
paths.g_texture_fabric.unlink(missing_ok=True)
|
||||
paths.g_mtl.unlink(missing_ok=True)
|
||||
|
||||
|
||||
def update_progress(progress, total):
|
||||
"""Progress bar in console"""
|
||||
# https://stackoverflow.com/questions/3173320/text-progress-bar-in-the-console
|
||||
amtDone = progress / total
|
||||
num_dash = int(amtDone * 50)
|
||||
sys.stdout.write('\rProgress: [{0:50s}] {1:.1f}%'.format('#' * num_dash + '-' * (50 - num_dash), amtDone * 100))
|
||||
sys.stdout.flush()
|
||||
|
||||
def _run_frame_with_timeout(garment, frame_timeout, frame_num):
|
||||
"""Run frame while keeping a cap on time to run it"""
|
||||
try:
|
||||
if platform.system() == "Windows":
|
||||
"""https://stackoverflow.com/a/14920854"""
|
||||
|
||||
if frame_num == 0: #only do it on first frame due to slowdown
|
||||
p_frame = multiprocessing.Process(target=garment.run_frame(), name="FrameSimulation")
|
||||
p_frame.start()
|
||||
|
||||
# Wait timeout_after seconds for garment.run_frame()
|
||||
p_frame.join(frame_timeout)
|
||||
|
||||
# If thread is active
|
||||
if p_frame.is_alive():
|
||||
# Terminate the process
|
||||
p_frame.terminate()
|
||||
p_frame.join()
|
||||
raise TimeoutError
|
||||
else:
|
||||
garment.run_frame()
|
||||
|
||||
elif platform.system() in ["Linux", "OSX"]:
|
||||
"""https://code-maven.com/python-timeout"""
|
||||
|
||||
def alarm_handler(signum, frame):
|
||||
raise TimeoutError
|
||||
|
||||
signal.signal(signal.SIGALRM, alarm_handler)
|
||||
signal.alarm(frame_timeout)
|
||||
try:
|
||||
garment.run_frame()
|
||||
except TimeoutError as ex:
|
||||
raise TimeoutError
|
||||
else:
|
||||
signal.alarm(0)
|
||||
|
||||
except TimeoutError as e:
|
||||
raise FrameTimeOutError
|
||||
|
||||
def sim_frame_sequence(garment, config, store_usd=False, verbose=False):
|
||||
|
||||
# Save initial state
|
||||
if store_usd:
|
||||
garment.render_usd_frame()
|
||||
|
||||
start_time = time.time()
|
||||
for frame in range(0, config.max_sim_steps):
|
||||
|
||||
if verbose:
|
||||
print(f'\n------ Frame {frame + 1} ------')
|
||||
else:
|
||||
update_progress(frame, config.max_sim_steps)
|
||||
|
||||
garment.frame = frame
|
||||
|
||||
#Run frame and raise FrameTimeOutError if frame takes too long to simulate
|
||||
|
||||
static = False
|
||||
if config.max_frame_time is None:
|
||||
# No frame time limits
|
||||
garment.run_frame()
|
||||
else:
|
||||
# NOTE: frame timeouts only work in the main thread of the program.
|
||||
# disable frame timeout by passing 'null' as a max_frame_time parameter in config
|
||||
_run_frame_with_timeout(
|
||||
garment,
|
||||
frame_timeout=config.max_frame_time if frame > 0 else config.max_frame_time * 2,
|
||||
frame_num=frame
|
||||
)
|
||||
|
||||
if verbose:
|
||||
num_cloth_cloth_contacts = garment.count_self_intersections()
|
||||
print(f'\nSelf-Intersection: {num_cloth_cloth_contacts}')
|
||||
|
||||
if frame >= config.zero_gravity_steps and frame >= config.min_sim_steps:
|
||||
static, _ = garment.is_static()
|
||||
if static:
|
||||
break
|
||||
|
||||
runtime = time.time() - start_time
|
||||
if runtime > config.max_sim_time:
|
||||
raise SimTimeOutError
|
||||
|
||||
|
||||
def run_sim(
|
||||
cloth_name, props, paths: PathCofig,
|
||||
save_v_norms=False, store_usd=False,
|
||||
optimize_storage=False,
|
||||
verbose=False):
|
||||
"""Initialize and run the simulation
|
||||
!! Important !!
|
||||
'store_usd' parameter slows down the simulation to CPU rates because of required CPU-GPU copies and file writes. Use only for debugging
|
||||
"""
|
||||
sim_props = props['sim']
|
||||
render_props = props['render']
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
config = SimConfig(sim_props['config']) # Why separate class at all?
|
||||
garment = Cloth(cloth_name, config, paths, caching=store_usd)
|
||||
|
||||
try:
|
||||
print("Simulation..")
|
||||
sim_frame_sequence(garment, config, store_usd, verbose=verbose)
|
||||
|
||||
except FrameTimeOutError:
|
||||
print(f"FrameTimeOutError at frame {garment.frame}")
|
||||
props.add_fail('sim', 'frame_timeout', cloth_name)
|
||||
except SimTimeOutError:
|
||||
print("SimTimeOutError")
|
||||
props.add_fail('sim', 'simulation_timeout', cloth_name)
|
||||
except SimulationError:
|
||||
print("Simulation failed")
|
||||
props.add_fail('sim', 'gt_edges_creation', cloth_name)
|
||||
except BaseException as e:
|
||||
print(f'Sim::{cloth_name}::crashed with {e}')
|
||||
|
||||
if isinstance(e, KeyboardInterrupt):
|
||||
# Allow to stop simulation loops by keyboard interrupt
|
||||
# It's not a real crash, so don't write down the failure
|
||||
sec = round(time.time() - start_time, 3)
|
||||
min = int(sec / 60)
|
||||
print(f"Simulation pipeline took: {min} m {sec - min * 60} s")
|
||||
raise e
|
||||
|
||||
traceback.print_exc()
|
||||
props.add_fail('sim', 'crashes', cloth_name)
|
||||
else: # Other quality checks
|
||||
if garment.frame == config.max_sim_steps - 1:
|
||||
_, non_st_count = garment.is_static()
|
||||
print('\nFailed to achieve static equilibrium for {} with {} non-static vertices out of {}'.format(
|
||||
cloth_name, non_st_count, len(garment.current_verts)))
|
||||
props.add_fail('sim', 'static_equilibrium', cloth_name)
|
||||
|
||||
if time.time() - start_time < 0.5: # 0.5 sec -- finished suspiciously fast
|
||||
props.add_fail('sim', 'fast_finish', cloth_name)
|
||||
|
||||
# 3D penetrations
|
||||
num_body_collisions = garment.count_body_intersections()
|
||||
print("BODY CLOTH INTERSECTIONS: ", num_body_collisions)
|
||||
num_self_collisions = garment.count_self_intersections()
|
||||
|
||||
sim_props['stats']['body_collisions'][cloth_name] = num_body_collisions
|
||||
sim_props['stats']['self_collisions'][cloth_name] = num_self_collisions
|
||||
|
||||
if num_body_collisions > config.max_body_collisions:
|
||||
props.add_fail('sim', 'cloth_body_intersection', cloth_name)
|
||||
if num_self_collisions:
|
||||
print(f'Self-Intersecting with {num_self_collisions}, '
|
||||
f'is fail: {num_self_collisions > config.max_self_collisions}')
|
||||
if num_self_collisions > config.max_self_collisions:
|
||||
props.add_fail('sim', 'cloth_self_intersection', cloth_name)
|
||||
else:
|
||||
print('Not self-intersecting!!!')
|
||||
|
||||
# ---- Postprocessing ----
|
||||
# NOTE: Attempt even on failures for accurate picture and post-analysis
|
||||
frame = garment.frame
|
||||
print(f"\nSimulation took #frames={frame + 1}")
|
||||
|
||||
sim_props['stats']['sim_time'][cloth_name] = sim_time = time.time() - start_time
|
||||
sim_props['stats']['spf'][cloth_name] = sim_time / frame if frame else sim_time
|
||||
sim_props['stats']['fin_frame'][cloth_name] = frame
|
||||
|
||||
garment.save_frame(save_v_norms=save_v_norms) #saving after stats
|
||||
|
||||
# Render images
|
||||
s_time = time.time()
|
||||
render_images(paths, garment.v_body, garment.f_body, render_props['config'])
|
||||
render_image_time = time.time() - s_time
|
||||
render_props['stats']['render_time'][cloth_name] = render_image_time
|
||||
print(f"Rendering {cloth_name} took {render_image_time}s")
|
||||
|
||||
if optimize_storage:
|
||||
optimize_garment_storage(paths)
|
||||
|
||||
# Final info output
|
||||
sec = round(time.time() - start_time, 3)
|
||||
min = int(sec / 60)
|
||||
print(f"\nSimulation pipeline took: {min} m {sec - min * 60} s")
|
||||
313
pygarment/meshgen/triangulation_utils.py
Normal file
313
pygarment/meshgen/triangulation_utils.py
Normal file
@@ -0,0 +1,313 @@
|
||||
"""Helper functions for the triangulation of the panels"""
|
||||
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
# CGAL 2D
|
||||
import CGAL.CGAL_Kernel
|
||||
from CGAL.CGAL_Kernel import Point_2
|
||||
from CGAL.CGAL_Mesh_2 import Mesh_2_Constrained_Delaunay_triangulation_2
|
||||
from CGAL.CGAL_Mesh_2 import Delaunay_mesh_size_criteria_2
|
||||
from CGAL import CGAL_Mesh_2
|
||||
from CGAL.CGAL_Triangulation_2 import Constrained_Delaunay_triangulation_2
|
||||
|
||||
|
||||
class FaceInfo2(object):
|
||||
"""
|
||||
https://github.com/CGAL/cgal-swig-bindings/blob/main/examples/python/polygonal_triangulation.py#L9
|
||||
"""
|
||||
def __init__(self):
|
||||
self.nesting_level = -1
|
||||
|
||||
def in_domain(self):
|
||||
return (self.nesting_level % 2) != 1
|
||||
|
||||
def mark_domains(ct, start_face, index, edge_border, face_info):
|
||||
"""
|
||||
https://github.com/CGAL/cgal-swig-bindings/blob/main/examples/python/polygonal_triangulation.py#L17
|
||||
"""
|
||||
if face_info[start_face].nesting_level != -1:
|
||||
return
|
||||
queue = [start_face]
|
||||
while queue != []:
|
||||
fh = queue[0] # queue.front
|
||||
queue = queue[1:] # queue.pop_front
|
||||
if face_info[fh].nesting_level == -1:
|
||||
face_info[fh].nesting_level = index
|
||||
for i in range(3):
|
||||
e = (fh, i)
|
||||
n = fh.neighbor(i)
|
||||
if face_info[n].nesting_level == -1:
|
||||
if ct.is_constrained(e):
|
||||
edge_border.append(e)
|
||||
else:
|
||||
queue.append(n)
|
||||
|
||||
def mark_domain(cdt):
|
||||
"""Find a mapping that can be tested to see if a face is in a domain
|
||||
|
||||
Explore the set of facets connected with non constrained edges,
|
||||
and attribute to each such set a nesting level.
|
||||
|
||||
We start from the facets incident to the infinite vertex, with a
|
||||
nesting level of 0. Then we recursively consider the non-explored
|
||||
facets incident to constrained edges bounding the former set and
|
||||
increase the nesting level by 1.
|
||||
|
||||
Facets in the domain are those with an odd nesting level.
|
||||
|
||||
https://github.com/CGAL/cgal-swig-bindings/blob/main/examples/python/polygonal_triangulation.py#L36
|
||||
"""
|
||||
face_info = {}
|
||||
for face in cdt.all_faces():
|
||||
face_info[face] = FaceInfo2()
|
||||
index = 0
|
||||
border = []
|
||||
mark_domains(cdt, cdt.infinite_face(), index + 1, border, face_info)
|
||||
while border != []:
|
||||
e = border[0] # border.front
|
||||
border = border[1:] # border.pop_front
|
||||
n = e[0].neighbor(e[1])
|
||||
if face_info[n].nesting_level == -1:
|
||||
lvl = face_info[e[0]].nesting_level + 1
|
||||
mark_domains(cdt, n, lvl, border, face_info)
|
||||
return face_info
|
||||
|
||||
def plot_triangulation(cdt,face_info):
|
||||
"""
|
||||
https://github.com/CGAL/cgal-swig-bindings/blob/main/examples/python/polygonal_triangulation.py#L77
|
||||
"""
|
||||
def rescale_plot(ax, scale=1.1):
|
||||
xmin, xmax = ax.get_xlim()
|
||||
ymin, ymax = ax.get_ylim()
|
||||
xmid = (xmin + xmax) / 2.0
|
||||
ymid = (ymin + ymax) / 2.0
|
||||
xran = xmax - xmid
|
||||
yran = ymax - ymid
|
||||
ax.set_xlim(xmid - xran * scale, xmid + xran * scale)
|
||||
ax.set_ylim(ymid - yran * scale, ymid + yran * scale)
|
||||
|
||||
def plot_edge(edge, *args):
|
||||
edge_seg = cdt.segment(edge)
|
||||
pts = [edge_seg.source(), edge_seg.target()]
|
||||
xs = [pts[0].x(), pts[1].x()]
|
||||
ys = [pts[0].y(), pts[1].y()]
|
||||
plt.plot(xs, ys, *args)
|
||||
|
||||
for edge in cdt.finite_edges():
|
||||
if cdt.is_constrained(edge):
|
||||
plot_edge(edge, 'r-')
|
||||
else:
|
||||
if face_info[edge[0]].in_domain():
|
||||
plot_edge(edge, 'b-')
|
||||
rescale_plot(plt.gca())
|
||||
plt.show()
|
||||
|
||||
def get_edge_vert_ids(edges):
|
||||
"""
|
||||
This function returns a list of index pairs of edge vertices into their corresponding
|
||||
panel.panel_vertices defining the border of the panel.
|
||||
Input:
|
||||
* edges (list): All edges of a panel
|
||||
Output:
|
||||
* zipped_array (ndarray): ndarray of start and end indices of edge vertices into panel.vertices defining
|
||||
the line segments of the panel edges (e.g. [[0,1],[1,2],[2,3],...,[19,20],[20,0]])
|
||||
"""
|
||||
zipped_array = np.empty((0, 2))
|
||||
for edge in edges:
|
||||
edge_verts_ids = edge.vertex_range
|
||||
rolled_list = np.roll(edge_verts_ids, 1, axis=0)
|
||||
zipped_array_edge = np.stack((rolled_list, edge_verts_ids), axis=1)[1:]
|
||||
zipped_array = np.concatenate((zipped_array, zipped_array_edge), axis=0)
|
||||
|
||||
return zipped_array.astype(int)
|
||||
|
||||
def create_cdt_points(cdt, points):
|
||||
"""
|
||||
This function converts the edge vertices to Point_2 objects (if necessary) and inserts them into cdt
|
||||
Input:
|
||||
* cdt (Mesh_2_Constrained_Delaunay_triangulation_2)
|
||||
* points (list): The edge vertices
|
||||
Output:
|
||||
* cdt_points (list): Mesh_2_Constrained_Delaunay_triangulation_2_Vertex_handle of the edge vertices
|
||||
"""
|
||||
cdt_points = []
|
||||
for p in points:
|
||||
if isinstance(p,CGAL.CGAL_Kernel.Point_2):
|
||||
v = cdt.insert(p)
|
||||
else:
|
||||
x,y = p
|
||||
v = cdt.insert(Point_2(float(x),float(y)))
|
||||
|
||||
cdt_points.append(v)
|
||||
|
||||
return cdt_points
|
||||
|
||||
def cdt_insert_constraints(cdt, cdt_points, edge_verts_ids):
|
||||
"""
|
||||
This function defines a planar straight line graph (PSLG) for cdt which represents the boundary
|
||||
of the mesh and acts as a constraint of cdt. The function returns a dict of the newly inserted
|
||||
points containing the indices they get replaced by.
|
||||
Input:
|
||||
* cdt (Mesh_2_Constrained_Delaunay_triangulation_2)
|
||||
* cdt_points (list): Mesh_2_Constrained_Delaunay_triangulation_2_Vertex_handle of points
|
||||
* edge_verts_ids (ndarray): indices into cdt_points of edge vertices
|
||||
Output:
|
||||
* new_points (dict): Dict with indices into cdt.finite_vertices() of newly inserted points (between
|
||||
cdt_points[s_id] and cdt_points[e_id]) as keys. The values of the dict are the respective s_ids
|
||||
which replace the indices of the newly inserted points later.
|
||||
"""
|
||||
init_len = cdt.number_of_vertices()
|
||||
new_points = {} #[id into cdt.finite_vertices()] -> [replace by this id into cdt.finite_vertices()]
|
||||
|
||||
for s_id, e_id in edge_verts_ids:
|
||||
start = cdt_points[s_id]
|
||||
end = cdt_points[e_id]
|
||||
cdt.insert_constraint(start, end)
|
||||
|
||||
num_verts = cdt.number_of_vertices()
|
||||
if init_len != num_verts:
|
||||
new_points[num_verts - 1] = s_id
|
||||
init_len = num_verts
|
||||
print('triangulation_utils::INFO::Generated extra boundary points for sdt contraints. Postprocessing will be performed')
|
||||
|
||||
return new_points
|
||||
|
||||
def get_face_v_ids(cdt, points, new_points, check=False, plot = False):
|
||||
"""
|
||||
This function returns the faces of cdt as a list of ints instead of vertex handles.
|
||||
Input:
|
||||
* cdt (Mesh_2_Constrained_Delaunay_triangulation_2)
|
||||
* faces (list): Mesh_2_Constrained_Delaunay_triangulation_2_Face_handle of faces in domain
|
||||
* points (list): Mesh vertices (filtered out newly inserted boundary vertices)
|
||||
* new_points (dict): Dict with indices into cdt.finite_vertices() of newly inserted points (if existent)
|
||||
as keys. The values of the dict are the indices replacing the indices of the newly inserted points.
|
||||
* check (bool): if True checks if coordinates of vertex handle from face vertex equals point coordinates
|
||||
Output:
|
||||
* f (list): (N x 3) list of vertex indices describing the faces
|
||||
|
||||
Note: We first replace the vertex handle's coordinates of all points by their indices into points / cdt_points
|
||||
because face_handle stores the vertex coordinates and not their indices into points -> speeds up creation of f
|
||||
"""
|
||||
face_v_ids = []
|
||||
|
||||
if new_points:
|
||||
sorted_faces = []
|
||||
new_points_ids = new_points.keys()
|
||||
|
||||
pts = list(cdt.finite_vertices())
|
||||
|
||||
if check:
|
||||
len_points = len(points)
|
||||
for i, v_h in enumerate(pts):
|
||||
first_temp = v_h.point()
|
||||
first = [first_temp.x(),first_temp.y()]
|
||||
|
||||
if not new_points or i < len_points:
|
||||
second = points[i]
|
||||
|
||||
if (not new_points or i < len_points) and (first[0] != second[0] or first[1] != second[1]):
|
||||
raise ValueError("coords of vertex handle from face vertex does not equal point coords")
|
||||
v_h.set_point(Point_2(i, 0.0))
|
||||
|
||||
else:
|
||||
for i, v_h in enumerate(pts):
|
||||
v_h.set_point(Point_2(i, 0.0))
|
||||
|
||||
# Keep faces that are in the domain
|
||||
face_info_new = mark_domain(cdt)
|
||||
|
||||
for face in cdt.finite_faces():
|
||||
if face_info_new[face].in_domain():
|
||||
v0_id = int(face.vertex(0).point().x())
|
||||
v1_id = int(face.vertex(1).point().x())
|
||||
v2_id = int(face.vertex(2).point().x())
|
||||
|
||||
if new_points:
|
||||
v_ids = [v0_id,v1_id,v2_id]
|
||||
for j, v_id in enumerate(v_ids):
|
||||
if v_id in new_points_ids:
|
||||
v_ids[j] = new_points[v_id]
|
||||
|
||||
#check if face now is not an edge/point and not already inserted in faces
|
||||
if not (v_ids[0] == v_ids[1] or v_ids[1] == v_ids[2] or v_ids[0] == v_ids[2]) \
|
||||
and not (sorted_faces and np.any(np.all(np.array(sorted_faces) == sorted(v_ids), axis=1))):
|
||||
face_v_ids.append(v_ids)
|
||||
sorted_faces.append(sorted(v_ids))
|
||||
else:
|
||||
face_v_ids.append([v0_id, v1_id, v2_id])
|
||||
|
||||
if plot:
|
||||
plot_triangulation(cdt, face_info_new)
|
||||
|
||||
f = np.array(face_v_ids)
|
||||
return f
|
||||
|
||||
def get_faces_sorted(cdt):
|
||||
"""
|
||||
This function returns the faces of cdt as a list of *sorted* ints instead of vertex handles.
|
||||
Input:
|
||||
* cdt (Mesh_2_Constrained_Delaunay_triangulation_2)
|
||||
Output:
|
||||
* f (ndaray): (N x 3) *sorted* list of vertex indices describing the faces
|
||||
* points (list): The vertices of cdt whose coordinates have been converted to floats
|
||||
"""
|
||||
|
||||
face_v_ids = []
|
||||
|
||||
pts = list(cdt.finite_vertices())
|
||||
points = []
|
||||
|
||||
|
||||
for i, v_h in enumerate(pts):
|
||||
points.append([v_h.point().x(),v_h.point().y()])
|
||||
v_h.set_point(Point_2(i, 0.0))
|
||||
|
||||
|
||||
# Keep faces that are in the domain
|
||||
face_info_new = mark_domain(cdt)
|
||||
|
||||
for face in cdt.finite_faces():
|
||||
if face_info_new[face].in_domain():
|
||||
v0_id = int(face.vertex(0).point().x())
|
||||
v1_id = int(face.vertex(1).point().x())
|
||||
v2_id = int(face.vertex(2).point().x())
|
||||
|
||||
sorted_ids = sorted([v0_id, v1_id, v2_id])
|
||||
|
||||
face_v_ids.append(sorted_ids)
|
||||
|
||||
f = np.array(face_v_ids)
|
||||
return f, points
|
||||
|
||||
def get_keep_vertices(cdt, len_b):
|
||||
"""
|
||||
This function filters out the newly inserted boundary vertices from cdt after executing the CGAL mesh generation.
|
||||
Input:
|
||||
* cdt (Mesh_2_Constrained_Delaunay_triangulation_2)
|
||||
* len_b (int): Number of edge vertices, i.e., vertices forming the panel boundary
|
||||
Output:
|
||||
* keep_vertices: vertices of cdt without newly inserted boundary points
|
||||
"""
|
||||
faces, points = get_faces_sorted(cdt)
|
||||
edges = np.concatenate([faces[:, :2], faces[:, 1:], faces[:, ::2]])
|
||||
unique_edges, counts = np.unique(np.array(edges), axis=0, return_counts=True)
|
||||
unique_occurring_edges = unique_edges[counts == 1]
|
||||
all_bdry_v_ids = np.unique(unique_occurring_edges.flatten())
|
||||
new_bdry_v_ids = all_bdry_v_ids[all_bdry_v_ids >= len_b]
|
||||
|
||||
#remove new_boundary_vertices
|
||||
keep_vertices = np.delete(points, new_bdry_v_ids, axis=0)
|
||||
|
||||
return list(keep_vertices)
|
||||
|
||||
def is_manifold(face_v_ids: np.ndarray, points: np.ndarray, tol=1e-2):
|
||||
"""Check if the 2D mesh is manifold -- all face triangles are correct triangles"""
|
||||
|
||||
faces = points[face_v_ids]
|
||||
face_side_1 = np.linalg.norm(faces[:, 0] - faces[:, 1], axis=1)
|
||||
face_side_2 = np.linalg.norm(faces[:, 1] - faces[:, 2], axis=1)
|
||||
face_side_3 = np.linalg.norm(faces[:, 0] - faces[:, 2], axis=1)
|
||||
side_lengths = np.stack([face_side_1, face_side_2, face_side_3], axis=-1)
|
||||
|
||||
return np.all(side_lengths.sum(axis=1) > 2 * side_lengths.max(axis=1) + tol)
|
||||
Reference in New Issue
Block a user