init_code
This commit is contained in:
26
pygarment/__init__.py
Normal file
26
pygarment/__init__.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""
|
||||
A Python library for building parametric sewing pattern programs
|
||||
"""
|
||||
|
||||
# Building blocks
|
||||
from pygarment.garmentcode.component import Component
|
||||
from pygarment.garmentcode.panel import Panel
|
||||
from pygarment.garmentcode.edge import Edge, CircleEdge, CurveEdge, EdgeSequence
|
||||
from pygarment.garmentcode.connector import Stitches
|
||||
from pygarment.garmentcode.interface import Interface
|
||||
from pygarment.garmentcode.edge_factory import EdgeSeqFactory
|
||||
from pygarment.garmentcode.edge_factory import CircleEdgeFactory
|
||||
from pygarment.garmentcode.edge_factory import EdgeFactory
|
||||
from pygarment.garmentcode.edge_factory import CurveEdgeFactory
|
||||
|
||||
|
||||
# Operations
|
||||
import pygarment.garmentcode.operators as ops
|
||||
import pygarment.garmentcode.utils as utils
|
||||
|
||||
# Parameter support
|
||||
from pygarment.garmentcode.params import BodyParametrizationBase, DesignSampler
|
||||
|
||||
# Errors
|
||||
from pygarment.pattern.core import EmptyPatternError
|
||||
|
||||
400
pygarment/data_config.py
Normal file
400
pygarment/data_config.py
Normal file
@@ -0,0 +1,400 @@
|
||||
"""
|
||||
The module contain Porperties class to manage paramters & stats in various parts of the system
|
||||
"""
|
||||
|
||||
from datetime import timedelta
|
||||
import json
|
||||
import yaml
|
||||
from numbers import Number
|
||||
import traceback
|
||||
import sys
|
||||
from pathlib import Path
|
||||
import numpy as np
|
||||
|
||||
# for system info
|
||||
import platform
|
||||
import psutil
|
||||
|
||||
# --- Nice dumping of floats ---
|
||||
def float_representer(dumper, data):
|
||||
|
||||
if data != data or (data == 0.0 and data == 1.0):
|
||||
value = '.nan'
|
||||
elif data == dumper.inf_value:
|
||||
value = '.inf'
|
||||
elif data == -dumper.inf_value:
|
||||
value = '-.inf'
|
||||
else:
|
||||
# Custom representation:
|
||||
# https://stackoverflow.com/a/33944926
|
||||
value = f'{data:.3g}'
|
||||
if '.' not in value or 'e' in value: # e only appears for large int numbers with this precision
|
||||
# An integer hidden as a float
|
||||
value = f'{int(data):d}.0'
|
||||
|
||||
return dumper.represent_scalar('tag:yaml.org,2002:float', value)
|
||||
yaml.add_representer(float, float_representer)
|
||||
|
||||
|
||||
# --- Main class ----
|
||||
class Properties():
|
||||
"""Keeps, loads, and saves cofiguration & statistic information
|
||||
Supports gets&sets as a dictionary
|
||||
Provides shortcuts for batch-init configurations
|
||||
|
||||
One of the usages -- store system-dependent basic cofiguration
|
||||
"""
|
||||
def __init__(self, filename="", clean_stats=False):
|
||||
self.properties = {}
|
||||
self.properties_on_load = {}
|
||||
|
||||
if filename:
|
||||
self.properties = self._from_file(filename)
|
||||
self.properties_on_load = self._from_file(filename)
|
||||
if clean_stats: # only makes sense when initialized from file =)
|
||||
self.clean_stats(self.properties)
|
||||
|
||||
# ---- Base utils ----
|
||||
def has(self, key):
|
||||
"""Used to query if a top-level property/section is already defined"""
|
||||
return key in self.properties
|
||||
|
||||
def serialize(self, filename, backup=None):
|
||||
"""Log current props to file. If logging failed, at least restore
|
||||
provided backup or originally loaded props
|
||||
* backup is expected to be a Properties object
|
||||
"""
|
||||
try:
|
||||
extention = Path(filename).suffix.lower()
|
||||
if extention == '.json':
|
||||
with open(filename, 'w') as f_json:
|
||||
json.dump(self.properties, f_json, indent=2, sort_keys=True)
|
||||
elif extention == '.yaml':
|
||||
with open(filename, 'w') as f:
|
||||
yaml.dump(
|
||||
self.properties,
|
||||
f,
|
||||
default_flow_style=False,
|
||||
sort_keys=False
|
||||
)
|
||||
else:
|
||||
raise ValueError(f'{self.__class__.__name__}::ERROR::Unsupported file type on serialization: {extention}')
|
||||
|
||||
except Exception as e:
|
||||
print('Exception occured while saving properties:')
|
||||
traceback.print_exception(*sys.exc_info())
|
||||
# save backup, s.t. the data is not lost due to interruption of
|
||||
# the file override
|
||||
|
||||
if backup is not None:
|
||||
backup.serialize(filename)
|
||||
else:
|
||||
with open(filename, 'w') as f_json:
|
||||
json.dump(self.properties_on_load, f_json,
|
||||
indent=2, sort_keys=True)
|
||||
raise RuntimeError('Error occured while saving properties. Backup version is saved instead')
|
||||
|
||||
def merge(self, filename="", clean_stats=False, re_write=True,
|
||||
adding_tag='added'):
|
||||
"""Merge current set of properties with the one from file
|
||||
* re_write=True sets the default merging of Python dicts, values
|
||||
from new props overrite
|
||||
the one from old one if keys are the same
|
||||
* re_write=False will keep both properties if their values are
|
||||
different (imported one marked with adding_tag)
|
||||
"""
|
||||
new_props = self._from_file(filename)
|
||||
if clean_stats:
|
||||
self.clean_stats(new_props)
|
||||
# merge
|
||||
self._recursive_dict_update(self.properties, new_props, re_write, adding_tag)
|
||||
|
||||
# --- Specialised utils (require domain knowledge) --
|
||||
|
||||
def is_fail(self, dataname):
|
||||
"""
|
||||
Check if a particular object is listed as fail in any of the sections
|
||||
Fails may be listed in the stats subsection of any of the section
|
||||
"""
|
||||
_, fails_list = self.count_fails()
|
||||
|
||||
return dataname in fails_list
|
||||
|
||||
def is_fail_section(self, dataname):
|
||||
"""
|
||||
Check if a particular object is listed as fail in any of the sections
|
||||
Fails may be listed in the stats subsection of any of the section
|
||||
return the section name
|
||||
"""
|
||||
|
||||
for section_key in self.properties:
|
||||
section = self.properties[section_key]
|
||||
if isinstance(section, dict) and 'stats' in section and ('fails' in section['stats']):
|
||||
if isinstance(section['stats']['fails'], dict):
|
||||
for key in section['stats']['fails']:
|
||||
if not isinstance(section['stats']['fails'][key], list):
|
||||
raise NotImplementedError(
|
||||
'Properties::ERROR:: Fails subsections of the type {} is not supported'.format(
|
||||
type(section['stats']['fails'][key])))
|
||||
|
||||
if dataname in section['stats']['fails'][key]: # expects a list as value
|
||||
return True, key
|
||||
|
||||
elif isinstance(section['stats']['fails'], list):
|
||||
if dataname in section['stats']['fails'][key]: # expects a list as value
|
||||
return True, 'fails'
|
||||
else:
|
||||
raise NotImplementedError('Properties::ERROR:: Fails subsections of the type {} is not supported'.format(type(section['stats']['fails'])))
|
||||
|
||||
return False, None
|
||||
|
||||
def count_fails(self, log=False):
|
||||
"""
|
||||
Number of (unique) datapoints marked as fail
|
||||
"""
|
||||
fails = []
|
||||
for section_key in self.properties:
|
||||
section = self.properties[section_key]
|
||||
section_fails = []
|
||||
if isinstance(section, dict) and 'stats' in section and ('fails' in section['stats']):
|
||||
if isinstance(section['stats']['fails'], dict):
|
||||
for key in section['stats']['fails']:
|
||||
if not isinstance(section['stats']['fails'][key], list):
|
||||
raise NotImplementedError(
|
||||
'Properties::ERROR:: Fails subsections of the type {} is not supported'.format(
|
||||
type(section['stats']['fails'][key])))
|
||||
|
||||
section_fails += section['stats']['fails'][key] # expects a list as value
|
||||
|
||||
elif isinstance(section['stats']['fails'], list):
|
||||
section_fails += section['stats']['fails']
|
||||
else:
|
||||
raise NotImplementedError('Properties::Error:: Fails subsections of the type {} is not supported'.format(type(section['stats']['fails'])))
|
||||
|
||||
if log:
|
||||
section['stats']['fails_count'] = len(list(set(section_fails)))
|
||||
|
||||
fails += section_fails
|
||||
|
||||
fails = list(set(fails))
|
||||
|
||||
return len(fails), fails
|
||||
|
||||
def add_fail(self, section_name, fail_type, info):
|
||||
"""Write a failure case to a requested section's stats"""
|
||||
|
||||
section = self.properties[section_name]
|
||||
if 'fails' not in section['stats']:
|
||||
section['stats']['fails'] = {}
|
||||
try:
|
||||
section['stats']['fails'][fail_type].append(info)
|
||||
except KeyError:
|
||||
section['stats']['fails'][fail_type] = [info]
|
||||
|
||||
|
||||
# ---------- Properties updates ---------------
|
||||
def set_basic(self, **kwconfig):
|
||||
"""Adds/updates info on the top level of properties
|
||||
Only to be used for basic information!
|
||||
"""
|
||||
# section exists
|
||||
for key, value in kwconfig.items():
|
||||
self.properties[key] = value
|
||||
|
||||
def set_section_config(self, section, **kwconfig):
|
||||
"""adds or modifies a (top level) section and updates its configuration info
|
||||
"""
|
||||
# create new section
|
||||
if section not in self.properties:
|
||||
self.properties[section] = {
|
||||
'config': kwconfig,
|
||||
'stats': {}
|
||||
}
|
||||
return
|
||||
# section exists
|
||||
for key, value in kwconfig.items():
|
||||
self.properties[section]['config'][key] = value
|
||||
|
||||
def set_section_stats(self, section, **kwstats):
|
||||
"""adds or modifies a (top level) section and updates its statistical info
|
||||
"""
|
||||
# create new section
|
||||
if section not in self.properties:
|
||||
self.properties[section] = {
|
||||
'config': {},
|
||||
'stats': kwstats
|
||||
}
|
||||
return
|
||||
# section exists
|
||||
for key, value in kwstats.items():
|
||||
self.properties[section]['stats'][key] = value
|
||||
|
||||
def clean_stats(self, properties):
|
||||
""" Remove info from all Stats sub sections """
|
||||
for _, value in properties.items():
|
||||
# detect section
|
||||
if isinstance(value, dict) and 'stats' in value:
|
||||
value['stats'] = {}
|
||||
|
||||
def summarize_stats(self,
|
||||
key,
|
||||
log_sum=False, log_avg=False,
|
||||
log_median=False, log_80=False, log_95=False,
|
||||
log_min=False, log_max=False,
|
||||
as_time=False):
|
||||
"""Make a summary of requested key with requested statistics in current props"""
|
||||
updated = False
|
||||
for section in self.properties.values():
|
||||
# check all stats sections
|
||||
if isinstance(section, dict) and 'stats' in section:
|
||||
if key in section['stats']:
|
||||
stats_values = section['stats'][key]
|
||||
if isinstance(stats_values, dict):
|
||||
stats_values = list(stats_values.values())
|
||||
|
||||
# summarize all foundable statistics
|
||||
if isinstance(stats_values, list) and len(stats_values) > 0 and isinstance(stats_values[0], Number):
|
||||
if log_sum:
|
||||
section['stats'][key + "_sum"] = str(timedelta(seconds=sum(stats_values))) if as_time else sum(stats_values)
|
||||
updated = True
|
||||
if log_avg:
|
||||
section['stats'][key + "_avg"] = sum(stats_values) / len(stats_values)
|
||||
if as_time:
|
||||
section['stats'][key + "_avg"] = str(timedelta(seconds=section['stats'][key + "_avg"]))
|
||||
updated = True
|
||||
if log_median:
|
||||
section['stats'][key + "_med"] = str(timedelta(seconds=np.percentile(stats_values, 50))) if as_time else float(np.percentile(stats_values, 50))
|
||||
updated = True
|
||||
if log_80:
|
||||
section['stats'][key + "_p80"] = str(timedelta(seconds=np.percentile(stats_values, 80))) if as_time else float(np.percentile(stats_values, 80))
|
||||
updated = True
|
||||
if log_95:
|
||||
section['stats'][key + "_p95"] = str(timedelta(seconds=np.percentile(stats_values, 95))) if as_time else float(np.percentile(stats_values, 95))
|
||||
updated = True
|
||||
if log_min:
|
||||
section['stats'][key + "_min"] = str(timedelta(seconds=min(stats_values))) if as_time else min(stats_values)
|
||||
updated = True
|
||||
if log_max:
|
||||
section['stats'][key + "_max"] = str(timedelta(seconds=max(stats_values))) if as_time else max(stats_values)
|
||||
updated = True
|
||||
return updated
|
||||
|
||||
# -- Specialised updates (require domain knowledge) --
|
||||
def add_sys_info(self):
|
||||
"""Add or update system information on the top level of config"""
|
||||
|
||||
if sys.version_info.major < 3:
|
||||
raise NotImplementedError('{}::Requesting system info is not supported for Python 2'.format(self.__class__.__name__))
|
||||
|
||||
# https://stackoverflow.com/questions/3103178/how-to-get-the-system-info-with-python
|
||||
|
||||
self.properties['system_info'] = {}
|
||||
|
||||
self.properties['system_info']['platform'] = platform.system()
|
||||
self.properties['system_info']['platform-release'] = platform.release()
|
||||
self.properties['system_info']['platform-version'] = platform.version()
|
||||
self.properties['system_info']['architecture'] = platform.machine()
|
||||
self.properties['system_info']['processor'] = platform.processor()
|
||||
self.properties['system_info']['ram'] = str(round(psutil.virtual_memory().total / (1024.0 ** 3))) + " GB"
|
||||
|
||||
try:
|
||||
import warp # Optional section
|
||||
if warp.context.runtime is None:
|
||||
# runtime = warp.context.Runtime()
|
||||
warp.init()
|
||||
else:
|
||||
print(f'{self.__class__.__name__}::INFO::Saving GPU info -- warp already initialized')
|
||||
curr_device = warp.get_device()
|
||||
self.properties['system_info']['GPU'] = curr_device.name if curr_device.is_cuda else 'Not used'
|
||||
except ImportError:
|
||||
pass # Don't do anything if warp not available
|
||||
|
||||
def stats_summary(self):
|
||||
"""
|
||||
Compute data simulation processing statistics
|
||||
"""
|
||||
updated_render = self.summarize_stats('render_time', log_sum=True, log_avg=True, as_time=True)
|
||||
updated_frames = self.summarize_stats('fin_frame', log_avg=True)
|
||||
updated_sim_time = self.summarize_stats('sim_time', log_sum=True, log_avg=True, as_time=True)
|
||||
updated_spf = self.summarize_stats('spf', log_avg=True, as_time=True)
|
||||
updated_scan = self.summarize_stats('processing_time', log_sum=True, log_avg=True, as_time=True)
|
||||
updated_scan_faces = self.summarize_stats('faces_removed', log_avg=True)
|
||||
|
||||
updated_self_collisions = self.summarize_stats(
|
||||
'self_collisions', log_avg=True, log_median=True, log_80=True, log_95=True)
|
||||
updated_body_collisions = self.summarize_stats(
|
||||
'body_collisions', log_avg=True, log_median=True, log_80=True, log_95=True)
|
||||
|
||||
updated_face_count = self.summarize_stats(
|
||||
'face_count', log_avg=True, log_median=True, log_min=True, log_max=True)
|
||||
updated_panel_count = self.summarize_stats(
|
||||
'panel_count', log_avg=True, log_median=True, log_min=True, log_max=True)
|
||||
|
||||
# fails
|
||||
self.count_fails(log=True)
|
||||
|
||||
if not (updated_frames and updated_render and updated_sim_time and updated_spf and updated_self_collisions and updated_body_collisions):
|
||||
print(f'{self.__class__.__name__}::WARNING::Sim stats summary '
|
||||
'requested, but not all sections were updated')
|
||||
|
||||
# ---- Private utils ----
|
||||
def _from_file(self, filename):
|
||||
""" Load properties from previously created file """
|
||||
extention = Path(filename).suffix.lower()
|
||||
if extention == '.json':
|
||||
with open(filename, 'r') as f_json:
|
||||
return json.load(f_json)
|
||||
elif extention == '.yaml':
|
||||
with open(filename, 'r') as f:
|
||||
return yaml.safe_load(f)
|
||||
else:
|
||||
raise ValueError(f'{self.__class__.__name__}::ERROR::Unsupported file type on load: {extention}')
|
||||
|
||||
|
||||
def _recursive_dict_update(self, in_dict, new_dict, re_write=True, adding_tag='added', in_stats=False):
|
||||
"""
|
||||
updates input dictionary with the update_dict properly updating all the inner dictionaries
|
||||
re_write = True replaces the values with the ones from new dictionary if they happen to be different,
|
||||
re_write = False extends dictionary to include both values if different
|
||||
|
||||
"in_stats" shows if we are currently in any of the stats subsections.
|
||||
In this case, lists are merged instead of being re-written
|
||||
"""
|
||||
if not isinstance(new_dict, dict):
|
||||
in_dict = new_dict # just update with all values
|
||||
return
|
||||
|
||||
for new_key in new_dict:
|
||||
if new_key in in_dict and isinstance(in_dict[new_key], dict):
|
||||
# update inner dict properly
|
||||
self._recursive_dict_update(
|
||||
in_dict[new_key], new_dict[new_key],
|
||||
re_write, adding_tag,
|
||||
(in_stats or new_key == 'stats'))
|
||||
elif not re_write and new_key in in_dict and in_dict[new_key] != new_dict[new_key]:
|
||||
if in_stats and isinstance(in_dict[new_key], list):
|
||||
# merge lists inside stats sections
|
||||
in_dict[new_key] = in_dict[new_key] + new_dict[new_key]
|
||||
else:
|
||||
# Keep both versions (e.g. in configs)
|
||||
adding_name = new_key + '_' + adding_tag
|
||||
while adding_name in in_dict: # in case even the added version is already there
|
||||
adding_name = adding_name + '_added'
|
||||
|
||||
in_dict[adding_name] = new_dict[new_key]
|
||||
in_dict[new_key + '_' + self['name']] = in_dict[new_key]
|
||||
else: # at sertain depth there will be no more dicts -- recusrion stops
|
||||
in_dict[new_key] = new_dict[new_key]
|
||||
# if new_dict is empty -- no update happens
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.properties[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.properties[key] = value
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.properties
|
||||
|
||||
def __str__(self):
|
||||
return str(self.properties)
|
||||
0
pygarment/garmentcode/__init__.py
Normal file
0
pygarment/garmentcode/__init__.py
Normal file
141
pygarment/garmentcode/base.py
Normal file
141
pygarment/garmentcode/base.py
Normal file
@@ -0,0 +1,141 @@
|
||||
from abc import ABC, abstractmethod
|
||||
import numpy as np
|
||||
|
||||
from pygarment.garmentcode.connector import Stitches
|
||||
|
||||
|
||||
class BaseComponent(ABC):
|
||||
"""Basic interface for garment-related components
|
||||
|
||||
NOTE: modifier methods return self object to allow chaining of the
|
||||
operations
|
||||
"""
|
||||
|
||||
def __init__(self, name, verbose=False) -> None:
|
||||
self.name = name
|
||||
self.verbose = verbose
|
||||
|
||||
# List or dictionary of the interfaces of this components
|
||||
# available for connectivity with other components
|
||||
self.interfaces = {}
|
||||
|
||||
# Rules for connecting subcomponents
|
||||
self.stitching_rules = Stitches()
|
||||
|
||||
# Info
|
||||
def pivot_3D(self):
|
||||
"""Pivot location of a component in 3D"""
|
||||
return [0, 0, 0]
|
||||
|
||||
def bbox(self):
|
||||
"""Bounding box -- in 2D"""
|
||||
return np.array([0, 0]), np.array([0, 0])
|
||||
|
||||
def bbox3D(self):
|
||||
"""Bounding box in 3D space"""
|
||||
return np.array([0, 0, 0]), np.array([0, 0, 0])
|
||||
|
||||
def is_self_intersecting(self):
|
||||
"""Check whether the component have self-intersections"""
|
||||
return False
|
||||
|
||||
# Operations
|
||||
@abstractmethod
|
||||
def translate_by(self, delta_translation):
|
||||
return self
|
||||
|
||||
@abstractmethod
|
||||
def translate_to(self, new_translation):
|
||||
"""Set panel translation to be exactly that vector"""
|
||||
return self
|
||||
|
||||
@abstractmethod
|
||||
def rotate_by(self, delta_rotation):
|
||||
return self
|
||||
|
||||
@abstractmethod
|
||||
def rotate_to(self, new_rot):
|
||||
return self
|
||||
|
||||
@abstractmethod
|
||||
def assembly(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
# ----- Placement routines: these are the same for panels and components
|
||||
def place_below(self, comp, gap=2):
|
||||
"""Place below the provided component"""
|
||||
other_bbox = comp.bbox3D()
|
||||
curr_bbox = self.bbox3D()
|
||||
|
||||
self.translate_by([0, other_bbox[0][1] - curr_bbox[1][1] - gap, 0])
|
||||
return self
|
||||
|
||||
def place_by_interface(
|
||||
self,
|
||||
self_interface,
|
||||
out_interface,
|
||||
gap=2,
|
||||
alignment='center',
|
||||
gap_dir=None
|
||||
):
|
||||
"""Adjust the placement of component according to the connectivity
|
||||
instruction
|
||||
|
||||
Alignment options:
|
||||
'center' center of the interface to center of the interface
|
||||
'top' - top on Y axis
|
||||
'bottom' - bottom on Y axis
|
||||
'left' - left on X axis
|
||||
'right' - right on X axis
|
||||
"""
|
||||
|
||||
# Align translation
|
||||
self_bbox = self_interface.bbox_3d()
|
||||
out_bbox = out_interface.bbox_3d()
|
||||
|
||||
# Determine alignment point depending on requested alignment type
|
||||
point_out = (out_bbox[1] + out_bbox[0]) / 2
|
||||
point_self = (self_bbox[1] + self_bbox[0]) / 2
|
||||
if alignment == 'center':
|
||||
pass # No modification needed
|
||||
elif alignment == 'top':
|
||||
point_out[1] = out_bbox[1][1] # Use max in Y
|
||||
point_self[1] = self_bbox[1][1]
|
||||
elif alignment == 'bottom':
|
||||
point_out[1] = out_bbox[0][1] # Use min in Y
|
||||
point_self[1] = self_bbox[0][1]
|
||||
elif alignment == 'right':
|
||||
point_out[0] = out_bbox[0][0] # Use min in X
|
||||
point_self[0] = self_bbox[0][0]
|
||||
elif alignment == 'left':
|
||||
point_out[0] = out_bbox[1][0] # Use max in X
|
||||
point_self[0] = self_bbox[1][0]
|
||||
else:
|
||||
raise ValueError(
|
||||
f'{self.__class__.__name__}::{self.name}::ERROR::'
|
||||
f'Uknown alignment type ({alignment}) requested in place_by_interface().'
|
||||
f' Available types: center, top, bottom, left, right')
|
||||
|
||||
# Add a gap outside the current
|
||||
if gap_dir is None:
|
||||
full_bbox = self.bbox3D()
|
||||
center = (full_bbox[0] + full_bbox[1]) / 2
|
||||
mid_self = (self_bbox[1] + self_bbox[0]) / 2
|
||||
gap_dir = mid_self - center
|
||||
|
||||
gap_dir = gap * gap_dir / np.linalg.norm(gap_dir)
|
||||
diff = point_out - (point_self + gap_dir)
|
||||
|
||||
self.translate_by(diff)
|
||||
|
||||
# NOTE: Norm evaluation of vertex set will fail
|
||||
# for the alignment of 2D panels, where they are likely
|
||||
# to be in one line or in a panel plane instead of
|
||||
# the interface place -- so I'm not using norms for gap estimation
|
||||
|
||||
# TODO Estimate rotation
|
||||
# TODO not just placement by the midpoint of the interfaces?
|
||||
# It created a little overlap when both interfaces are angled a little differently
|
||||
return self
|
||||
|
||||
|
||||
147
pygarment/garmentcode/component.py
Normal file
147
pygarment/garmentcode/component.py
Normal file
@@ -0,0 +1,147 @@
|
||||
import numpy as np
|
||||
from scipy.spatial.transform import Rotation as R
|
||||
|
||||
from pygarment.garmentcode.base import BaseComponent
|
||||
from pygarment.pattern.wrappers import VisPattern
|
||||
|
||||
|
||||
class Component(BaseComponent):
|
||||
"""Garment element (or whole piece) composed of simpler connected garment
|
||||
elements"""
|
||||
|
||||
# TODOLOW Overload copy -- respecting edge sequences -- never had any problems though
|
||||
|
||||
def __init__(self, name) -> None:
|
||||
super().__init__(name)
|
||||
|
||||
self.subs = [] # list of generative subcomponents
|
||||
|
||||
def set_panel_label(self, label: str, overwrite=True):
|
||||
"""Propagate given label to all sub-panels (in subcomponents)"""
|
||||
subs = self._get_subcomponents()
|
||||
for sub in subs:
|
||||
sub.set_panel_label(label, overwrite)
|
||||
|
||||
def pivot_3D(self):
|
||||
"""Pivot of a component as a block
|
||||
|
||||
NOTE: The relation of pivots of sub-blocks needs to be
|
||||
preserved in any placement operations on components
|
||||
"""
|
||||
mins, maxes = self.bbox3D()
|
||||
return np.array(((mins[0] + maxes[0]) / 2, maxes[1],
|
||||
(mins[-1] + maxes[-1]) / 2))
|
||||
|
||||
def length(self):
|
||||
"""Length of a component in cm
|
||||
|
||||
Defaults the to the vertical length of a 3D bounding box
|
||||
* longest_dim -- if set, returns the longest dimention out of the bounding box dimentions
|
||||
"""
|
||||
subs = self._get_subcomponents()
|
||||
return sum([s.length() for s in subs]) if subs else 0
|
||||
|
||||
def translate_by(self, delta_vector):
|
||||
"""Translate component by a vector"""
|
||||
for subs in self._get_subcomponents():
|
||||
subs.translate_by(delta_vector)
|
||||
return self
|
||||
|
||||
def translate_to(self, new_translation):
|
||||
"""Set panel translation to be exactly that vector"""
|
||||
pivot = self.pivot_3D()
|
||||
for subs in self._get_subcomponents():
|
||||
sub_pivot = subs.pivot_3D()
|
||||
subs.translate_to(np.asarray(new_translation) + (sub_pivot - pivot))
|
||||
return self
|
||||
|
||||
def rotate_by(self, delta_rotation: R):
|
||||
"""Rotate component by a given rotation"""
|
||||
pivot = self.pivot_3D()
|
||||
for subs in self._get_subcomponents():
|
||||
# With preserving relationships between components
|
||||
rel = subs.pivot_3D() - pivot
|
||||
rel_rotated = delta_rotation.apply(rel)
|
||||
subs.rotate_by(delta_rotation)
|
||||
subs.translate_by(rel_rotated - rel)
|
||||
return self
|
||||
|
||||
def rotate_to(self, new_rot):
|
||||
# TODOLOW Implement with correct preservation of relative placement
|
||||
# of subcomponents
|
||||
raise NotImplementedError(
|
||||
f'Component::ERROR::rotate_to is not supported on component level.'
|
||||
'Use relative <rotate_by()> method instead')
|
||||
|
||||
def mirror(self, axis=[0, 1]):
|
||||
"""Swap this component with its mirror image by recursively mirroring
|
||||
subcomponents
|
||||
|
||||
Axis specifies 2D axis to swap around: Y axis by default
|
||||
"""
|
||||
for subs in self._get_subcomponents():
|
||||
subs.mirror(axis)
|
||||
return self
|
||||
|
||||
def assembly(self):
|
||||
"""Construction process of the garment component
|
||||
|
||||
get serializable representation
|
||||
Returns: simulator friendly description of component sewing pattern
|
||||
"""
|
||||
spattern = VisPattern()
|
||||
spattern.name = self.name
|
||||
|
||||
subs = self._get_subcomponents()
|
||||
if not subs:
|
||||
return spattern
|
||||
|
||||
# Simple merge of subcomponent representations
|
||||
for sub in subs:
|
||||
sub_raw = sub.assembly().pattern
|
||||
|
||||
# simple merge of panels
|
||||
spattern.pattern['panels'] = {**spattern.pattern['panels'],
|
||||
**sub_raw['panels']}
|
||||
|
||||
# of stitches
|
||||
spattern.pattern['stitches'] += sub_raw['stitches']
|
||||
|
||||
spattern.pattern['stitches'] += self.stitching_rules.assembly()
|
||||
return spattern
|
||||
|
||||
def bbox3D(self):
|
||||
"""Evaluate 3D bounding box of the current component"""
|
||||
|
||||
subs = self._get_subcomponents()
|
||||
bboxes = [s.bbox3D() for s in subs]
|
||||
|
||||
if not len(subs):
|
||||
# Special components without panel geometry -- no bbox defined
|
||||
return np.array([[np.inf, np.inf, np.inf], [-np.inf, -np.inf, -np.inf]])
|
||||
|
||||
mins = np.vstack([b[0] for b in bboxes])
|
||||
maxes = np.vstack([b[1] for b in bboxes])
|
||||
|
||||
return mins.min(axis=0), maxes.max(axis=0)
|
||||
|
||||
def is_self_intersecting(self):
|
||||
"""Check whether the component have self-intersections on panel level"""
|
||||
|
||||
for s in self._get_subcomponents():
|
||||
if s.is_self_intersecting():
|
||||
return True
|
||||
return False
|
||||
|
||||
# Subcomponents
|
||||
def _get_subcomponents(self):
|
||||
"""Unique set of subcomponents defined in the `self.subs` list or as
|
||||
attributes of the object"""
|
||||
|
||||
all_attrs = [getattr(self, name)
|
||||
for name in dir(self)
|
||||
if name[:2] != '__' and name[-2:] != '__']
|
||||
return list(set([att
|
||||
for att in all_attrs
|
||||
if isinstance(att, BaseComponent)] + self.subs))
|
||||
|
||||
197
pygarment/garmentcode/connector.py
Normal file
197
pygarment/garmentcode/connector.py
Normal file
@@ -0,0 +1,197 @@
|
||||
import numpy as np
|
||||
|
||||
from pygarment.garmentcode.interface import Interface
|
||||
from pygarment.garmentcode.utils import close_enough
|
||||
|
||||
|
||||
class StitchingRule:
|
||||
"""High-level stitching instructions connecting two component interfaces
|
||||
"""
|
||||
def __init__(self, int1: Interface, int2: Interface,
|
||||
verbose: bool = False) -> None:
|
||||
"""
|
||||
Inputs:
|
||||
* int1, int2 -- two interfaces to connect in the stitch
|
||||
NOTE: When connecting interfaces with multiple edge count on both
|
||||
sides,
|
||||
1) Note that the edge sequences may change their structure.
|
||||
Involved interfaces and corresponding patterns will be updated
|
||||
automatically
|
||||
Use of the same interfaces in other stitches (creating 3+way
|
||||
stitch edge) may fail.
|
||||
2) The interfaces' edges are matched based on the provided order
|
||||
in the interface.
|
||||
The order can be controlled at the moment of interface creation
|
||||
"""
|
||||
# TODO Explicitely support 3+way stitches
|
||||
self.int1 = int1
|
||||
self.int2 = int2
|
||||
self.verbose = verbose
|
||||
if not self.isMatching():
|
||||
self.match_interfaces()
|
||||
|
||||
if verbose and not close_enough(
|
||||
len1 := int1.projecting_lengths().sum(),
|
||||
len2 := int2.projecting_lengths().sum(),
|
||||
tol=0.3): # NOTE = 3 mm
|
||||
print(
|
||||
f'{self.__class__.__name__}::WARNING::Projected edges do not match in the stitch: \n'
|
||||
f'{len1}: {int1}\n{len2}: {int2}')
|
||||
|
||||
def isMatching(self, tol=0.05):
|
||||
# if both the breakdown and relative partitioning is similar
|
||||
|
||||
frac1 = self.int1.projecting_fractions()
|
||||
frac2 = self.int2.projecting_fractions()
|
||||
|
||||
return len(self.int1) == len(self.int2) and np.allclose(frac1, frac2, atol=tol)
|
||||
|
||||
def match_interfaces(self):
|
||||
""" Subdivide the interface edges on both sides s.t. they are matching
|
||||
and can be safely connected
|
||||
(same number of edges on each side and same relative fractions)
|
||||
|
||||
Serializable format does not natively support t-stitches,
|
||||
so the longer edges needs to be broken down into matching segments
|
||||
"""
|
||||
|
||||
# Eval the fractions corresponding to every segment in the interfaces
|
||||
# Using projecting edges to match desired gather patterns
|
||||
frac1 = self.int1.projecting_fractions()
|
||||
frac2 = self.int2.projecting_fractions()
|
||||
min_frac = min(min(frac1), min(frac2)) # projection tolerance should not be larger than the smallest fraction
|
||||
|
||||
self._match_to_fractions(self.int1, frac2, tol=min(1e-2, min_frac / 2))
|
||||
|
||||
self._match_to_fractions(self.int2, frac1, tol=min(1e-2, min_frac / 2))
|
||||
|
||||
|
||||
def _match_to_fractions(self, inter:Interface, to_add, tol=1e-2):
|
||||
"""Add the vertices at given location to the edge sequence in a given
|
||||
interface
|
||||
|
||||
Parameters:
|
||||
* inter -- interface to modify
|
||||
* to_add -- the faractions of segements to be projected onto the
|
||||
edge sequence in the inter
|
||||
* tol -- the proximity of vertices when they can be regarded as
|
||||
the same vertex.
|
||||
NOTE: tol should be shorter than the smallest expected edge
|
||||
"""
|
||||
|
||||
# NOTE Edge sequences to subdivide might be disconnected
|
||||
# (even belong to different panels), so we need to subdivide per edge
|
||||
|
||||
# Go over the edges keeping track of their fractions
|
||||
add_id, in_id = 0, 0
|
||||
covered_init, covered_added = 0, 0
|
||||
curr_fractions = inter.projecting_fractions()
|
||||
|
||||
while in_id < len(inter.edges) and add_id < len(to_add):
|
||||
# projected edges since they represent the stitch sizes
|
||||
# NOTE: sometimes overshoots slightly due to error accumulation -> bounding by 1.
|
||||
|
||||
next_init = min(covered_init + curr_fractions[in_id], 1.)
|
||||
next_added = min(covered_added + to_add[add_id], 1.)
|
||||
if close_enough(next_init, next_added, tol):
|
||||
# the vertex exists, skip
|
||||
in_id += 1
|
||||
add_id += 1
|
||||
covered_init, covered_added = next_init, next_added
|
||||
elif next_init < next_added:
|
||||
# add on the next step
|
||||
in_id += 1
|
||||
covered_init = next_init
|
||||
else:
|
||||
# add a vertex to the edge at the new location
|
||||
# Eval on projected edge
|
||||
in_frac = curr_fractions[in_id]
|
||||
new_v_loc = in_frac - (next_init - next_added)
|
||||
split_frac = new_v_loc / in_frac
|
||||
base_edge, base_panel = inter.edges[in_id], inter.panel[in_id]
|
||||
|
||||
# Check edge orientation
|
||||
flip = inter.needsFlipping(in_id)
|
||||
if flip:
|
||||
split_frac = 1 - split_frac
|
||||
if self.verbose:
|
||||
print(f'{self.__class__.__name__}::INFO::{base_edge} from {base_panel.name} reoriented in interface')
|
||||
|
||||
# Split the base edge accordingly
|
||||
subdiv = base_edge.subdivide_len([split_frac, 1 - split_frac])
|
||||
|
||||
inter.panel[in_id].edges.substitute(base_edge, subdiv) # Update the panel
|
||||
# Always follows the edge order in the panel
|
||||
# Swap subdiv order for interface to s.w. the interface sequence remains oriented
|
||||
if flip:
|
||||
subdiv.edges.reverse()
|
||||
|
||||
# Update interface accordingly
|
||||
inter.substitute(
|
||||
base_edge, subdiv, [inter.panel[in_id]
|
||||
for _ in range(len(subdiv))])
|
||||
|
||||
# TODO what if these edges are used in other interfaces? Do they need to be updated as well?
|
||||
# next step
|
||||
curr_fractions = inter.projecting_fractions()
|
||||
covered_init += curr_fractions[in_id]
|
||||
covered_added = next_added
|
||||
in_id += 1
|
||||
add_id += 1
|
||||
|
||||
if add_id != len(to_add):
|
||||
raise RuntimeError(f'{self.__class__.__name__}::ERROR::Projection on {inter.panel_names()} failed')
|
||||
|
||||
def assembly(self):
|
||||
"""Produce a stitch that connects two interfaces
|
||||
"""
|
||||
if self.verbose and not self.isMatching():
|
||||
print(f'{self.__class__.__name__}::WARNING::Stitch sides do not match on assembly!!')
|
||||
|
||||
stitches = []
|
||||
|
||||
for i, j in zip(range(len(self.int1.edges)), range(len(self.int2.edges))):
|
||||
stitches.append([
|
||||
{
|
||||
'panel': self.int1.panel[i].name, # corresponds to a name.
|
||||
# Only one element of the first level is expected
|
||||
'edge': self.int1.edges[i].geometric_id
|
||||
},
|
||||
{
|
||||
'panel': self.int2.panel[j].name,
|
||||
'edge': self.int2.edges[j].geometric_id
|
||||
}
|
||||
])
|
||||
|
||||
# Swap indication
|
||||
# NOTE: Swap is indicated on the interfaces in order to support component
|
||||
# incapsulation. Same stitching rule for different participating components may have different
|
||||
# fabric side preferences.
|
||||
# NOTE: "right_wrong" stitch is used when either of the interfaces request it
|
||||
# NOTE: Backward-compatible formulation
|
||||
if self.int1.right_wrong[i] or self.int2.right_wrong[j]:
|
||||
stitches[-1].append('right_wrong')
|
||||
|
||||
return stitches
|
||||
|
||||
|
||||
class Stitches:
|
||||
"""Describes a collection of StitchingRule objects
|
||||
Needed for more compact specification and evaluation of those rules
|
||||
"""
|
||||
def __init__(self, *rules) -> None:
|
||||
"""Rules -- any number of tuples of two interfaces (Interface, Interface) """
|
||||
|
||||
self.rules = [StitchingRule(int1, int2) for int1, int2 in rules]
|
||||
|
||||
def append(self, pair): # TODOLOW two parameters explicitely rather then "pair" object?
|
||||
self.rules.append(StitchingRule(*pair))
|
||||
|
||||
def __getitem__(self, id):
|
||||
return self.rules[id]
|
||||
|
||||
def assembly(self):
|
||||
stitches = []
|
||||
for rule in self.rules:
|
||||
stitches += rule.assembly()
|
||||
return stitches
|
||||
998
pygarment/garmentcode/edge.py
Normal file
998
pygarment/garmentcode/edge.py
Normal file
@@ -0,0 +1,998 @@
|
||||
from copy import deepcopy, copy
|
||||
|
||||
import numpy as np
|
||||
from numpy.linalg import norm
|
||||
import svgpathtools as svgpath # https://github.com/mathandy/svgpathtools
|
||||
|
||||
from pygarment.garmentcode.utils import R2D
|
||||
from pygarment.garmentcode.utils import close_enough
|
||||
from pygarment.garmentcode.utils import c_to_list
|
||||
from pygarment.garmentcode.utils import list_to_c
|
||||
from pygarment.pattern.utils import rel_to_abs_2d, abs_to_rel_2d
|
||||
|
||||
ILENGTH_S_TOL = 1e-10 # NOTE: tolerance value for evaluating curve parameter (t) from acr length
|
||||
|
||||
class Edge:
|
||||
"""Edge an individual segment of a panel border connecting two panel
|
||||
vertices, the basic building block of panels
|
||||
|
||||
Edges are defined on 2D coordinate system with Start vertex as an origin
|
||||
and (End-Start) as Ox axis
|
||||
"""
|
||||
|
||||
def __init__(self, start=None, end=None, label='') -> None:
|
||||
""" Simple edge initialization.
|
||||
Parameters:
|
||||
* start, end: from/to vertices that the edge connects,
|
||||
describing the _interface_ of an edge
|
||||
* label: semantic label of the edge to be writted down as a property on assembly
|
||||
|
||||
# TODOLOW Add support for fold schemes to allow guided folds at
|
||||
the edge (e.g. pleats)
|
||||
"""
|
||||
if start is None:
|
||||
start = [0, 0]
|
||||
if end is None:
|
||||
end = [0, 0]
|
||||
assert not all(close_enough(s, e) for s, e in zip(start, end)), 'Start and end of an edge should differ'
|
||||
|
||||
self.start = start # NOTE: careful with references to vertex objects
|
||||
self.end = end
|
||||
|
||||
# Semantic label
|
||||
self.label = label
|
||||
|
||||
# ID w.r.t. other edges in a super-panel
|
||||
# Filled out at the panel assembly time
|
||||
self.geometric_id = 0
|
||||
|
||||
def length(self):
|
||||
"""Return current length of an edge.
|
||||
Since vertices may change their locations externally, the length
|
||||
is dynamically evaluated
|
||||
"""
|
||||
return self._straight_len()
|
||||
|
||||
def _straight_len(self):
|
||||
"""Length of the edge ignoring the curvature"""
|
||||
return norm(np.asarray(self.end) - np.asarray(self.start))
|
||||
|
||||
def __eq__(self, __o: object, tol=1e-2) -> bool:
|
||||
"""Special implementation of comparison: same edges == edges can be
|
||||
connected by flat stitch
|
||||
Edges are the same if their length is the same (if their flattened
|
||||
representation is the same) => vertices do not have to be on the
|
||||
same locations
|
||||
|
||||
NOTE: The edges may not have the same curvature and still be
|
||||
considered equal ("connectible")
|
||||
"""
|
||||
|
||||
if not isinstance(__o, Edge):
|
||||
return False
|
||||
|
||||
# Base length is the same
|
||||
if close_enough(self.length(), __o.length(), tol=tol):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f'Straight:[{self.start[0]:.2f}, {self.start[1]:.2f}]->[{self.end[0]:.2f}, {self.end[1]:.2f}]'
|
||||
|
||||
def __repr__(self) -> str:
|
||||
""" 'Official string representation' -- for nice printing of lists of edges
|
||||
|
||||
https://stackoverflow.com/questions/3558474/how-to-apply-str-function-when-printing-a-list-of-objects-in-python
|
||||
"""
|
||||
return self.__str__()
|
||||
|
||||
def midpoint(self):
|
||||
"""Center of the edge"""
|
||||
return (np.array(self.start) + np.array(self.end)) / 2
|
||||
|
||||
def shortcut(self):
|
||||
"""Return straight shortcut for an edge,
|
||||
as `np.array`
|
||||
|
||||
For straight edges it's the same as the edge itself
|
||||
"""
|
||||
|
||||
return np.array([self.start, self.end])
|
||||
|
||||
def flip_x_axis(self):
|
||||
"""Flips the Bezier curve along the x-axis by inverting the y-coordinates"""
|
||||
|
||||
self.start[1] = -self.start[1]
|
||||
self.end[1] = -self.end[1]
|
||||
return self
|
||||
|
||||
|
||||
# Representation
|
||||
def as_curve(self):
|
||||
"""As svgpath curve object"""
|
||||
# Get the nodes correcly
|
||||
nodes = np.vstack((self.start, self.end))
|
||||
|
||||
params = nodes[:, 0] + 1j*nodes[:, 1]
|
||||
|
||||
return svgpath.Line(*params)
|
||||
|
||||
def linearize(self, n_verts_inside = 0):
|
||||
"""Return a linear approximation of an edge using the same vertex objects
|
||||
|
||||
# NOTE: for the linear edge it is an egde if n_verts_inside = 0,
|
||||
# else n_verts_inside = number of vertices (excluding the start
|
||||
and end vertices) used to create a linearization of the edge
|
||||
"""
|
||||
|
||||
if not n_verts_inside:
|
||||
return EdgeSequence(self)
|
||||
else:
|
||||
n = n_verts_inside + 1
|
||||
tvals = np.linspace(0, 1, n, endpoint=False)[1:]
|
||||
|
||||
curve = self.as_curve()
|
||||
edge_verts = [c_to_list(curve.point(t)) for t in tvals]
|
||||
seq = self.to_edge_sequence(edge_verts)
|
||||
|
||||
return seq
|
||||
|
||||
def to_edge_sequence(self, edge_verts):
|
||||
"""
|
||||
Returns the edge as a sequence of STRAIGHT edges based on points
|
||||
sampled on the edge between `self.start` and `self.end` (edge_verts).
|
||||
"""
|
||||
seq = EdgeSequence(Edge(self.start, edge_verts[0]))
|
||||
for i in range(1, len(edge_verts)):
|
||||
seq.append(Edge(seq[-1].end, edge_verts[i]))
|
||||
seq.append(Edge(seq[-1].end, self.end))
|
||||
|
||||
return seq
|
||||
|
||||
# Actions
|
||||
def reverse(self):
|
||||
"""Flip the direction of the edge"""
|
||||
self.start, self.end = self.end, self.start
|
||||
|
||||
return self
|
||||
|
||||
def reflect_features(self):
|
||||
"""Reflect edge fetures from one side of the edge to the other"""
|
||||
# Nothing to do for straight edge
|
||||
return self
|
||||
|
||||
def snap_to(self, new_start=None):
|
||||
"""Translate the edge vertices s.t. the start is at new_start
|
||||
"""
|
||||
if new_start is None:
|
||||
new_start = [0, 0]
|
||||
|
||||
self.end[0] = self.end[0] - self.start[0] + new_start[0]
|
||||
self.end[1] = self.end[1] - self.start[1] + new_start[1]
|
||||
self.start[:] = new_start
|
||||
return self
|
||||
|
||||
def rotate(self, angle):
|
||||
"""Rotate edge by angle in place, using first point as a reference
|
||||
|
||||
Parameters:
|
||||
angle -- desired rotation angle in radians (!)
|
||||
"""
|
||||
curr_start = copy(self.start)
|
||||
|
||||
# set the start point to zero
|
||||
self.snap_to([0, 0])
|
||||
self.end[:] = np.matmul(R2D(angle), self.end)
|
||||
|
||||
# recover the original location
|
||||
self.snap_to(curr_start)
|
||||
|
||||
return self
|
||||
|
||||
def subdivide_len(self, fractions: list, connect_internal_verts=True):
|
||||
"""Add intermediate vertices to an edge,
|
||||
splitting its length according to fractions
|
||||
while preserving the overall shape
|
||||
|
||||
* merge_internal -- if False, the newly inserted vertices would be
|
||||
defined
|
||||
as independent objects for each edge. If True, vertex objects
|
||||
will be shared
|
||||
"""
|
||||
# Parametrized by length
|
||||
new_edges = self._subdivide(fractions, by_length=True)
|
||||
|
||||
if connect_internal_verts:
|
||||
self._merge_subdiv_vertices(new_edges)
|
||||
|
||||
return new_edges
|
||||
|
||||
def subdivide_param(self, fractions: list, connect_internal_verts=True):
|
||||
"""Add intermediate vertices to an edge,
|
||||
splitting its curve parametrization according to fractions
|
||||
while preserving the overall shape
|
||||
|
||||
NOTE: for line, it's the same as subdivision by length
|
||||
"""
|
||||
|
||||
new_edges = self._subdivide(fractions, by_length=False)
|
||||
|
||||
if connect_internal_verts:
|
||||
self._merge_subdiv_vertices(new_edges)
|
||||
|
||||
return new_edges
|
||||
|
||||
def _subdivide(self, fractions: list, by_length=True):
|
||||
"""Subdivide edge by length or curve parametrization
|
||||
|
||||
NOTE: equivalent for straight lines
|
||||
"""
|
||||
|
||||
frac = [abs(f) for f in fractions]
|
||||
if not close_enough(fsum := sum(frac), 1, 1e-4):
|
||||
raise RuntimeError(f'Edge Subdivision::ERROR::fraction is incorrect. The sum {fsum} is not 1')
|
||||
|
||||
vec = np.asarray(self.end) - np.asarray(self.start)
|
||||
verts = [self.start]
|
||||
seq = EdgeSequence()
|
||||
for i in range(len(frac) - 1):
|
||||
verts.append(
|
||||
[verts[-1][0] + frac[i]*vec[0],
|
||||
verts[-1][1] + frac[i]*vec[1]]
|
||||
)
|
||||
seq.append(Edge(verts[-2], verts[-1]))
|
||||
verts.append(self.end)
|
||||
seq.append(Edge(verts[-2], verts[-1]))
|
||||
|
||||
return seq
|
||||
|
||||
def _merge_subdiv_vertices(self, subdivision):
|
||||
"""Merge the vertices from cosecutive edges in the given edge subdivision"""
|
||||
|
||||
for i in range(1, len(subdivision)):
|
||||
subdivision[i].start = subdivision[i-1].end
|
||||
return subdivision
|
||||
|
||||
# Assembly into serializable object
|
||||
def assembly(self):
|
||||
"""Returns the dict-based representation of edges,
|
||||
compatible with core -> BasePattern JSON (dict)
|
||||
"""
|
||||
properties = {"endpoints": [0, 1]}
|
||||
if self.label:
|
||||
properties['label'] = self.label
|
||||
|
||||
return [self.start, self.end], properties
|
||||
|
||||
|
||||
class CircleEdge(Edge):
|
||||
"""Curvy edge as circular arc"""
|
||||
|
||||
def __init__(self, start=None, end=None, cy=None, label='') -> None:
|
||||
"""
|
||||
Define a circular arc edge
|
||||
* start, end: from/to vertices that the edge connects
|
||||
* cy: third point on a circle arc (= control point).
|
||||
Expressed relatively w.r.t. distance between start and end.
|
||||
X value for control point is fixed at x=0.5 (edge center) to
|
||||
avoid ambiguity
|
||||
* label: semantic label of the edge to be writted down as a property on assembly
|
||||
|
||||
NOTE: representing control point in relative coordinates
|
||||
allows preservation of curvature (arc angle, relative radius
|
||||
w.r.t. straight edge length)
|
||||
When distance between vertices shrinks / extends
|
||||
|
||||
NOTE: full circle not supported: start & end should differ
|
||||
"""
|
||||
if start is None:
|
||||
start = [0, 0]
|
||||
if end is None:
|
||||
end = [1, 0]
|
||||
super().__init__(start, end, label=label)
|
||||
self.control_y = cy
|
||||
|
||||
def length(self):
|
||||
"""Return current length of an edge.
|
||||
Since vertices may change their locations externally, the length
|
||||
is dynamically evaluated
|
||||
"""
|
||||
return self._rel_radius() * self._straight_len() * self._arc_angle()
|
||||
|
||||
def __str__(self) -> str:
|
||||
|
||||
points = [self.start, [0.5, self.control_y]]
|
||||
|
||||
str = [f'[{p[0]:.2f}, {p[1]:.2f}]->' for p in points]
|
||||
str += [f'[{self.end[0]:.2f}, {self.end[1]:.2f}]']
|
||||
|
||||
return 'Arc:' + ''.join(str)
|
||||
|
||||
def midpoint(self):
|
||||
"""Center of the edge"""
|
||||
return rel_to_abs_2d(self.start, self.end, [0.5, self.control_y])
|
||||
|
||||
# Actions
|
||||
def reverse(self):
|
||||
"""Flip the direction of the edge, accounting for curvatures"""
|
||||
|
||||
self.start, self.end = self.end, self.start
|
||||
self.control_y *= -1
|
||||
|
||||
return self
|
||||
|
||||
def reflect_features(self):
|
||||
"""Reflect edge features from one side of the edge to the other"""
|
||||
|
||||
self.control_y *= -1
|
||||
|
||||
return self
|
||||
|
||||
def _subdivide(self, fractions: list, by_length=False):
|
||||
"""Add intermediate vertices to an edge,
|
||||
splitting its parametrization according to fractions
|
||||
while preserving the overall shape
|
||||
|
||||
NOTE: param subdiv == length subdiv for circle arcs
|
||||
"""
|
||||
# NOTE: subdivide_param() is the same as subdivide_len()
|
||||
# So parent implementation is ok
|
||||
# TODOLOW Implementation is very similar to CurveEdge param-based subdivision
|
||||
|
||||
from pygarment.garmentcode.edge_factory import EdgeFactory # TODOLOW: ami - better solution?
|
||||
frac = [abs(f) for f in fractions]
|
||||
if not close_enough(fsum := sum(frac), 1, 1e-4):
|
||||
raise RuntimeError(f'Edge Subdivision::ERROR::fraction is incorrect. The sum {fsum} is not 1')
|
||||
|
||||
curve = self.as_curve()
|
||||
# Sub-curves
|
||||
covered_fr = 0
|
||||
subcurves = []
|
||||
for fr in fractions:
|
||||
subcurves.append(curve.cropped(covered_fr, covered_fr + fr))
|
||||
covered_fr += fr
|
||||
|
||||
# Convert to CircleEdge objects
|
||||
subedges = EdgeSequence()
|
||||
for curve in subcurves:
|
||||
subedges.append(EdgeFactory.from_svg_curve(curve))
|
||||
# Reference the first/last vertices correctly
|
||||
subedges[0].start = self.start
|
||||
subedges[-1].end = self.end
|
||||
|
||||
return subedges
|
||||
|
||||
# Special tools for circle representation
|
||||
def as_curve(self):
|
||||
"""Represent as svgpath Arc"""
|
||||
|
||||
radius, la, sweep = self.as_radius_flag()
|
||||
|
||||
return svgpath.Arc(
|
||||
list_to_c(self.start),
|
||||
list_to_c([radius, radius]), 0, la, sweep,
|
||||
list_to_c(self.end)
|
||||
)
|
||||
|
||||
def as_radius_flag(self):
|
||||
"""Return circle representation as radius and arc flags"""
|
||||
|
||||
return (self._rel_radius() * self._straight_len(),
|
||||
self._is_large_arc(),
|
||||
self.control_y < 0) # left/right orientation
|
||||
|
||||
def as_radius_angle(self):
|
||||
"""Return circle representation as radius and an angle"""
|
||||
|
||||
return (
|
||||
self._rel_radius() * self._straight_len(),
|
||||
self._arc_angle(),
|
||||
self.control_y < 0
|
||||
)
|
||||
|
||||
def linearize(self, n_verts_inside = 9):
|
||||
"""Return a linear approximation of an edge using the same vertex objects
|
||||
NOTE: n_verts_inside = number of vertices (excluding the start
|
||||
and end vertices) used to create a linearization of the edge
|
||||
"""
|
||||
n = n_verts_inside + 1
|
||||
tvals = np.linspace(0, 1, n, endpoint=False)[1:]
|
||||
|
||||
curve = self.as_curve()
|
||||
edge_verts = [c_to_list(curve.point(t)) for t in tvals]
|
||||
seq = self.to_edge_sequence(edge_verts)
|
||||
|
||||
return seq
|
||||
|
||||
# NOTE: The following values are calculated at runtime to allow
|
||||
# changes to control point after the edge definition
|
||||
def _rel_radius(self, abs_radius=None):
|
||||
"""Eval relative radius (w.r.t. straight distance) from 3-point
|
||||
representation"""
|
||||
|
||||
if abs_radius:
|
||||
return abs_radius / self._straight_len()
|
||||
|
||||
# Using the formula for radius of circumscribed circle
|
||||
# https://en.wikipedia.org/wiki/Circumscribed_circle#Other_properties
|
||||
|
||||
# triangle sides, assuming the begginning and end of an edge are at
|
||||
# (0, 0) and (1, 0)
|
||||
# accordingly
|
||||
a = 1
|
||||
b = norm([0.5, self.control_y])
|
||||
c = norm([0.5 - 1, self.control_y])
|
||||
p = (a + b + c) / 2 # semiperimeter
|
||||
|
||||
rad = a * b * c / np.sqrt(p * (p - a) * (p - b) * (p - c)) / 4
|
||||
|
||||
return rad
|
||||
|
||||
def _arc_angle(self):
|
||||
"""Eval arc angle from control point"""
|
||||
rel_rad = self._rel_radius()
|
||||
|
||||
# NOTE: Bound the sin to avoid out of bounds errors
|
||||
# due to floating point error accumulation
|
||||
arc = 2 * np.arcsin(min(max(1 / rel_rad / 2, -1.), 1.))
|
||||
|
||||
if self._is_large_arc():
|
||||
arc = 2 * np.pi - arc
|
||||
|
||||
return arc
|
||||
|
||||
def _is_large_arc(self):
|
||||
"""Indicate if the arc sweeps the large or small angle"""
|
||||
return abs(self.control_y) > self._rel_radius()
|
||||
|
||||
def assembly(self):
|
||||
"""Returns the dict-based representation of edges,
|
||||
compatible with core -> BasePattern JSON (dict)
|
||||
"""
|
||||
ends, props = super().assembly()
|
||||
|
||||
# NOTE: arc representation is the same as in SVG
|
||||
rad, large_arc, right = self.as_radius_flag()
|
||||
props['curvature'] = {
|
||||
"type": 'circle',
|
||||
"params": [rad, int(large_arc), int(right)]
|
||||
}
|
||||
return ends, props
|
||||
|
||||
|
||||
class CurveEdge(Edge):
|
||||
"""Curvy edge as Besier curve / B-spline"""
|
||||
|
||||
def __init__(self, start=None, end=None, control_points=None,
|
||||
relative=True,
|
||||
label='') -> None:
|
||||
"""Define a Bezier curve edge
|
||||
* start, end: from/to vertices that the edge connects
|
||||
* control_points: coordinated of Bezier control points.
|
||||
Specification of One control point creates the Quadratic Bezier,
|
||||
Specification of 2 control points creates Cubic Bezier.
|
||||
Other degrees are not supported.
|
||||
* label: semantic label of the edge to be writted down as a property on assembly
|
||||
|
||||
* relative: specify whether the control point coordinated are given
|
||||
relative to the edge length (True) or in 2D coordinate system of a
|
||||
panel (False)
|
||||
|
||||
"""
|
||||
if control_points is None:
|
||||
control_points = []
|
||||
if start is None:
|
||||
start = [0, 0]
|
||||
if end is None:
|
||||
end = [0, 0]
|
||||
super().__init__(start, end, label=label)
|
||||
|
||||
self.control_points = control_points
|
||||
|
||||
if len(self.control_points) > 2:
|
||||
raise NotImplementedError(f'{self.__class__.__name__}::ERROR::Up to 2 control points (cubic Bezier) are supported')
|
||||
|
||||
# Storing control points as relative since it preserves overall curve
|
||||
# shape during edge extension/contraction
|
||||
if not relative:
|
||||
self.control_points = [abs_to_rel_2d(self.start, self.end, c).tolist()
|
||||
for c in self.control_points]
|
||||
def flip_x_axis(self):
|
||||
"""Flips the Bezier curve along the x-axis by inverting the y-coordinates"""
|
||||
abs_control_points = [rel_to_abs_2d(self.start, self.end, c) for c in self.control_points]
|
||||
self.start[1] = -self.start[1]
|
||||
self.end[1] = -self.end[1]
|
||||
|
||||
abs_control_points= [[x, -y] for x, y in abs_control_points]
|
||||
self.control_points= [abs_to_rel_2d(self.start, self.end, c).tolist() for c in abs_control_points]
|
||||
return self
|
||||
def length(self):
|
||||
"""Length of Bezier curve edge"""
|
||||
curve = self.as_curve()
|
||||
|
||||
return curve.length()
|
||||
|
||||
def __str__(self) -> str:
|
||||
|
||||
points = [self.start] + self.control_points
|
||||
|
||||
str = [f'[{p[0]:.2f}, {p[1]:.2f}]->' for p in points]
|
||||
str += [f'[{self.end[0]:.2f}, {self.end[1]:.2f}]']
|
||||
|
||||
return 'Curve:' + ''.join(str)
|
||||
|
||||
def midpoint(self):
|
||||
"""Center of the edge"""
|
||||
curve = self.as_curve()
|
||||
|
||||
t_mid = curve.ilength(curve.length()/2, s_tol=ILENGTH_S_TOL)
|
||||
return c_to_list(curve.point(t_mid))
|
||||
|
||||
def _subdivide(self, fractions: list, by_length=False):
|
||||
"""Add intermediate vertices to an edge,
|
||||
splitting its curve parametrization or overall length according to
|
||||
fractions while preserving the overall shape
|
||||
"""
|
||||
from pygarment.garmentcode.edge_factory import EdgeFactory # TODOLOW: ami - better solution?
|
||||
curve = self.as_curve()
|
||||
|
||||
# Sub-curves
|
||||
covered_fr, prev_t = 0, 0
|
||||
clen = curve.length()
|
||||
subcurves = []
|
||||
for fr in fractions:
|
||||
covered_fr += fr
|
||||
if by_length:
|
||||
next_t = curve.ilength(clen * covered_fr, s_tol=ILENGTH_S_TOL)
|
||||
subcurves.append(curve.cropped(prev_t, next_t))
|
||||
prev_t = next_t
|
||||
else:
|
||||
subcurves.append(curve.cropped(covered_fr - fr, covered_fr))
|
||||
|
||||
# Convert to CurveEdge objects
|
||||
subedges = EdgeSequence()
|
||||
for curve in subcurves:
|
||||
subedges.append(EdgeFactory.from_svg_curve(curve))
|
||||
# Reference the first/last vertices correctly
|
||||
subedges[0].start = self.start
|
||||
subedges[-1].end = self.end
|
||||
|
||||
return subedges
|
||||
|
||||
# Actions
|
||||
def reverse(self):
|
||||
"""Flip the direction of the edge, accounting for curvatures"""
|
||||
|
||||
self.start, self.end = self.end, self.start
|
||||
|
||||
# change order of control points
|
||||
if len(self.control_points) == 2:
|
||||
self.control_points[0], self.control_points[1] = self.control_points[1], self.control_points[0]
|
||||
|
||||
# Update coordinates
|
||||
for p in self.control_points:
|
||||
p[0], p[1] = 1 - p[0], -p[1]
|
||||
|
||||
return self
|
||||
|
||||
def reflect_features(self):
|
||||
"""Reflect edge fetures from one side of the edge to the other"""
|
||||
|
||||
for p in self.control_points:
|
||||
p[1] = -p[1]
|
||||
|
||||
return self
|
||||
|
||||
def as_curve(self, absolute=True):
|
||||
"""As svgpath curve object
|
||||
|
||||
Converting on the fly as exact vertex location might have been updated since
|
||||
the creation of the edge
|
||||
"""
|
||||
# Get the nodes correcly
|
||||
if absolute:
|
||||
cp = [rel_to_abs_2d(self.start, self.end, c) for c in self.control_points]
|
||||
nodes = np.vstack((self.start, cp, self.end))
|
||||
else:
|
||||
cp = self.control_points
|
||||
nodes = np.vstack(([0, 0], cp, [1, 0]))
|
||||
|
||||
params = nodes[:, 0] + 1j*nodes[:, 1]
|
||||
|
||||
return svgpath.QuadraticBezier(*params) if len(cp) < 2 else svgpath.CubicBezier(*params)
|
||||
|
||||
def linearize(self, n_verts_inside=9):
|
||||
"""Return a linear approximation of an edge using the same vertex objects
|
||||
NOTE: n_verts_inside = number of vertices (excluding the start
|
||||
and end vertices) used to create a linearization of the edge
|
||||
|
||||
"""
|
||||
n = n_verts_inside + 1
|
||||
tvals_init = np.linspace(0, 1, n, endpoint=False)[1:]
|
||||
|
||||
curve = self.as_curve(absolute=False)
|
||||
curve_lengths = tvals_init * curve.length()
|
||||
tvals = [curve.ilength(c_len, s_tol=ILENGTH_S_TOL) for c_len in curve_lengths]
|
||||
|
||||
edge_verts = [rel_to_abs_2d(self.start, self.end, c_to_list(curve.point(t))) for t in tvals]
|
||||
seq = self.to_edge_sequence(edge_verts)
|
||||
|
||||
return seq
|
||||
|
||||
def _extreme_points(self):
|
||||
"""Return extreme points (on Y) of the current edge
|
||||
NOTE: this does NOT include the border vertices of an edge
|
||||
"""
|
||||
|
||||
# Variation of https://github.com/mathandy/svgpathtools/blob/5c73056420386753890712170da602493aad1860/svgpathtools/bezier.py#L197
|
||||
curve = self.as_curve(absolute=False) # relative coords to find real extremizers
|
||||
poly = svgpath.bezier2polynomial(curve, return_poly1d=True)
|
||||
y = svgpath.imag(poly)
|
||||
dy = y.deriv()
|
||||
y_extremizers = svgpath.polyroots(
|
||||
dy, realroots=True, condition=lambda r: 0 < r < 1)
|
||||
|
||||
extreme_points = np.array(
|
||||
[rel_to_abs_2d(self.start, self.end, c_to_list(curve.point(t)))
|
||||
for t in y_extremizers]
|
||||
)
|
||||
|
||||
return extreme_points
|
||||
|
||||
# Assembly into serializable object
|
||||
def assembly(self):
|
||||
"""Returns the dict-based representation of edges,
|
||||
compatible with core -> BasePattern JSON (dict)
|
||||
"""
|
||||
|
||||
ends, props = super().assembly()
|
||||
|
||||
props['curvature'] = {
|
||||
"type": 'quadratic' if len(self.control_points) == 1 else 'cubic',
|
||||
"params": self.control_points
|
||||
}
|
||||
return ends, props
|
||||
|
||||
|
||||
class EdgeSequence:
|
||||
"""Represents a sequence of (possibly chained) edges (e.g. every next edge
|
||||
starts from the same vertex that the previous edge ends with and
|
||||
allows building some typical edge sequences
|
||||
"""
|
||||
def __init__(self, *args, verbose: bool = False) -> None:
|
||||
self.edges = []
|
||||
self.verbose = verbose
|
||||
for arg in args:
|
||||
self.append(arg)
|
||||
|
||||
# ANCHOR Properties
|
||||
def __getitem__(self, i):
|
||||
if isinstance(i, slice):
|
||||
# return an EdgeSequence object for slices
|
||||
e_slice = self.edges[i]
|
||||
return EdgeSequence(e_slice)
|
||||
else:
|
||||
return self.edges[i]
|
||||
|
||||
def index(self, elem):
|
||||
# Find the same object (by reference)
|
||||
# list.index() is doing something different..
|
||||
# https://stackoverflow.com/a/47057419
|
||||
return next(i for i, e in enumerate(self.edges) if elem is e)
|
||||
|
||||
def __len__(self):
|
||||
"""Number of edges in the sequence"""
|
||||
return len(self.edges)
|
||||
|
||||
def __contains__(self, item):
|
||||
# check presence by comparing references
|
||||
return any([item is e for e in self.edges])
|
||||
|
||||
def __str__(self) -> str:
|
||||
return 'EdgeSeq: ' + str(self.edges)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return self.__str__()
|
||||
|
||||
def length(self):
|
||||
"""Total length of edges"""
|
||||
return sum([e.length() for e in self.edges])
|
||||
|
||||
def isLoop(self):
|
||||
return self.edges[0].start == self.edges[-1].end and len(self) > 1 #modify is to ==
|
||||
|
||||
def isChained(self):
|
||||
"""Does the sequence of edges represent correct chain?"""
|
||||
if len(self) < 2:
|
||||
return False
|
||||
|
||||
for i in range(1, len(self.edges)):
|
||||
if self.edges[i].start is not self.edges[i-1].end:
|
||||
if self.verbose:
|
||||
# This should be helpful to catch bugs
|
||||
print(f'{self.__class__.__name__}::WARNING!::Edge sequence is not properly chained')
|
||||
return False
|
||||
return True
|
||||
|
||||
def fractions(self) -> list:
|
||||
"""Fractions of the lengths of each edge in sequence w.r.t.
|
||||
the whole sequence
|
||||
"""
|
||||
total_len = sum([e.length() for e in self.edges])
|
||||
|
||||
return [e.length() / total_len for e in self.edges]
|
||||
|
||||
def lengths(self) -> list:
|
||||
"""Lengths of individual edges in the sequence"""
|
||||
return [e.length() for e in self.edges]
|
||||
|
||||
def verts(self):
|
||||
"""Return all vertex objects"""
|
||||
verts = [self.edges[0].start]
|
||||
for e in self.edges:
|
||||
if e.start is not verts[-1]: # avoid adding the vertices of chained edges twice
|
||||
verts.append(e.start)
|
||||
verts.append(e.end)
|
||||
if verts[0] is verts[-1]: # don't double count the loop origin
|
||||
verts.pop(-1)
|
||||
return verts
|
||||
|
||||
def shortcut(self):
|
||||
"""Opening of an edge sequence as a vector
|
||||
|
||||
# NOTE May not reflect true shortcut if the egdes were flipped but
|
||||
the order remained
|
||||
"""
|
||||
return np.array([self[0].start, self[-1].end])
|
||||
|
||||
def bbox(self):
|
||||
"""
|
||||
This function evaluates the 2D bounding box of the current panel and
|
||||
returns the panel vertices which are located on the bounding box (
|
||||
b_points).
|
||||
Output:
|
||||
* bbox (list): [min_x, max_x, min_y, max_y] of verts_2d
|
||||
* b_points (list): list of 2D vertices representing the b_points,
|
||||
i.e., the vertices of verts_2d located on the bounding box
|
||||
"""
|
||||
# Take linear version of the edges
|
||||
# To correctly process edges with extreme curvatures
|
||||
|
||||
lin_edges = EdgeSequence([e.linearize() for e in self.edges])
|
||||
verts_2d = np.asarray(lin_edges.verts())
|
||||
mi = verts_2d.min(axis=0)
|
||||
ma = verts_2d.max(axis=0)
|
||||
xs = [mi[0], ma[0]]
|
||||
ys = [mi[1], ma[1]]
|
||||
#return points on bounding box
|
||||
b_points = []
|
||||
for v in verts_2d:
|
||||
if v[0] in xs or v[1] in ys:
|
||||
b_points.append(v)
|
||||
if len(b_points) == 2:
|
||||
if not any(np.array_equal(arr, mi) for arr in b_points):
|
||||
b_points = [b_points[0], mi, b_points[1]]
|
||||
else:
|
||||
p = [mi[0],ma[1]]
|
||||
b_points = [b_points[0],p,b_points[1]]
|
||||
|
||||
# FIXME Use one common order for the bbox output
|
||||
bbox = [mi[0], ma[0], mi[1], ma[1]]
|
||||
|
||||
return bbox, b_points
|
||||
|
||||
# ANCHOR Modifiers
|
||||
# All modifiers return self object to allow chaining
|
||||
# Wrappers around python's list
|
||||
def append(self, item):
|
||||
if isinstance(item, Edge):
|
||||
self.edges.append(item)
|
||||
elif isinstance(item, list): # List of edge / EdgeSeq objects
|
||||
for e in item:
|
||||
self.append(e)
|
||||
elif isinstance(item, EdgeSequence):
|
||||
self.edges += item.edges
|
||||
else:
|
||||
raise ValueError(f'{self.__class__.__name__}::ERROR::Trying to add object of incompatible type {type(item)}')
|
||||
return self
|
||||
|
||||
def insert(self, i, item):
|
||||
if isinstance(item, Edge):
|
||||
self.edges.insert(i, item)
|
||||
elif isinstance(item, list) or isinstance(item, EdgeSequence):
|
||||
for j in range(len(item)):
|
||||
self.edges.insert(i + j, item[j])
|
||||
else:
|
||||
raise NotImplementedError(f'{self.__class__.__name__}::ERROR::incerting object of {type(item)} not suported (yet)')
|
||||
return self
|
||||
|
||||
def pop(self, i):
|
||||
if isinstance(i, Edge):
|
||||
i = self.index(i)
|
||||
self.edges.pop(i)
|
||||
return self
|
||||
|
||||
def substitute(self, orig, new):
|
||||
"""Remove orign item from the list and place seq into it's place
|
||||
orig can be either an id of an item to remove
|
||||
or an instance of Edge that exists in the current sequence
|
||||
"""
|
||||
if isinstance(orig, Edge):
|
||||
orig = self.index(orig)
|
||||
if orig < 0:
|
||||
orig = len(self) + orig
|
||||
self.pop(orig)
|
||||
self.insert(orig, new)
|
||||
return self
|
||||
|
||||
def reverse(self):
|
||||
"""Reverse edge sequence in-place"""
|
||||
self.edges.reverse()
|
||||
for edge in self.edges:
|
||||
edge.reverse()
|
||||
return self
|
||||
|
||||
# EdgeSequence-specific
|
||||
def translate_by(self, shift):
|
||||
"""Translate the edge seq vertices s.t. the first vertex is at new_origin
|
||||
"""
|
||||
for v in self.verts():
|
||||
v[0] += shift[0]
|
||||
v[1] += shift[1]
|
||||
return self
|
||||
|
||||
def snap_to(self, new_origin=None):
|
||||
"""Translate the edge seq vertices s.t. the first vertex is at new_origin
|
||||
"""
|
||||
if new_origin is None:
|
||||
new_origin = [0, 0]
|
||||
start = copy(self[0].start)
|
||||
shift = [new_origin[0] - start[0], new_origin[1] - start[1]]
|
||||
self.translate_by(shift)
|
||||
|
||||
return self
|
||||
|
||||
def close_loop(self):
|
||||
"""if edge loop is not closed, add and edge to close it"""
|
||||
self.isChained() # print warning if smth is wrong
|
||||
if not self.isLoop():
|
||||
self.append(Edge(self[-1].end, self[0].start))
|
||||
return self
|
||||
|
||||
def rotate(self, angle):
|
||||
"""Rotate edge sequence by angle in place, using first point as a reference
|
||||
|
||||
Parameters:
|
||||
angle -- desired rotation angle in radians (!)
|
||||
"""
|
||||
curr_start = copy(self[0].start)
|
||||
|
||||
# set the start point to zero
|
||||
self.snap_to([0, 0])
|
||||
rot = R2D(angle)
|
||||
|
||||
for v in self.verts():
|
||||
v[:] = np.matmul(rot, v)
|
||||
|
||||
# recover the original location
|
||||
self.snap_to(curr_start)
|
||||
|
||||
return self
|
||||
|
||||
def extend(self, factor):
|
||||
"""Extend or shrink the edges along the line from start of the first
|
||||
edge to the end of the last edge in sequence. The start of the first
|
||||
edge remains fixed
|
||||
"""
|
||||
# TODOLOW Version With preservation of total length?
|
||||
# TODOLOW Base extention factor on change in total length of edges rather
|
||||
# than on the shortcut length
|
||||
|
||||
# FIXME extending by negative factor should be predictable (e.g. opposite direction of extention)
|
||||
|
||||
# Need to take the target line from the chained order
|
||||
if not self.isChained():
|
||||
chained_edges = self.chained_order()
|
||||
chained_edges.isChained()
|
||||
if chained_edges.isLoop():
|
||||
print(f'{self.__class__.__name__}::WARNING::Extending looped edge sequences is not available')
|
||||
return self
|
||||
else:
|
||||
chained_edges = self
|
||||
|
||||
target_line = np.array(chained_edges[-1].end) - np.array(chained_edges[0].start)
|
||||
target_line = target_line / norm(target_line)
|
||||
|
||||
# gather vertices
|
||||
verts_coords = self.verts()
|
||||
nverts_coords = np.array(verts_coords)
|
||||
|
||||
# adjust their position based on projection to the target line
|
||||
verts_projection = np.empty(nverts_coords.shape)
|
||||
fixed = nverts_coords[0]
|
||||
for i in range(nverts_coords.shape[0]):
|
||||
verts_projection[i] = (nverts_coords[i] - fixed).dot(target_line) * target_line
|
||||
|
||||
new_verts = verts_coords - (1 - factor) * verts_projection
|
||||
|
||||
# Update vertex objects
|
||||
for i in range(len(verts_coords)):
|
||||
verts_coords[i][:] = new_verts[i]
|
||||
|
||||
return self
|
||||
|
||||
def reflect(self, v0, v1):
|
||||
"""Reflect 2D points w.r.t. 1D line defined by two points"""
|
||||
v0, v1 = np.asarray(v0), np.asarray(v1)
|
||||
vec = np.asarray(v1) - np.asarray(v0)
|
||||
vec = vec / norm(vec) # normalize
|
||||
|
||||
# https://demonstrations.wolfram.com/ReflectionMatrixIn2D/#more
|
||||
Ref = np.array([
|
||||
[1 - 2 * vec[1]**2, 2*vec[0]*vec[1]],
|
||||
[2*vec[0]*vec[1], - 1 + 2 * vec[1]**2]
|
||||
])
|
||||
|
||||
# translate -> reflect -> translate back
|
||||
for v in self.verts():
|
||||
v[:] = np.matmul(Ref, np.asarray(v) - v0) + v0
|
||||
|
||||
# Reflect edge features (curvatures, etc.)
|
||||
for e in self.edges:
|
||||
e.reflect_features()
|
||||
|
||||
return self
|
||||
|
||||
def propagate_label(self, label):
|
||||
"""Propagate label to sub-edges
|
||||
NOTE: Recommended to perform after all edge modification
|
||||
operations (stitching, cutting, inserting) were completed
|
||||
Support for edge label propagation through those operations is not (yet) implemented
|
||||
# TODO Edge labels on cuts/reassemble in the
|
||||
"""
|
||||
for e in self.edges:
|
||||
e.label = label
|
||||
|
||||
# ANCHOR New sequences & versions
|
||||
def copy(self):
|
||||
"""Create a copy of a current edge sequence preserving the chaining
|
||||
property of edge sequences"""
|
||||
new_seq = deepcopy(self)
|
||||
|
||||
# deepcopy recreates the vertex objects on both sides of the edges
|
||||
# in changed edges those vertex objects are supposed to be shared
|
||||
# by neighbor edges
|
||||
|
||||
for i in range(1, len(new_seq)):
|
||||
if self[i].start is self[i-1].end:
|
||||
new_seq[i].start = new_seq[i-1].end
|
||||
|
||||
if self.isLoop():
|
||||
new_seq[-1].end = new_seq[0].start
|
||||
|
||||
return new_seq
|
||||
|
||||
def chained_order(self):
|
||||
""" Attempt to restore a chain in the EdgeSequence
|
||||
The chained edge sequence may lose its property if the edges
|
||||
were reversed externally.
|
||||
This routine created a copy of the correct sequence with aligned
|
||||
the order of edges,
|
||||
|
||||
It might be useful for various calculations
|
||||
|
||||
"""
|
||||
chained = self.copy()
|
||||
|
||||
for i in range(len(chained)):
|
||||
# Assuming the previous one is already sorted
|
||||
if i > 0 and chained[i].end is chained[i-1].end:
|
||||
chained[i].reverse()
|
||||
# Not connected to the previous one
|
||||
elif (i + 1 < len(chained)
|
||||
and (chained[i].start is chained[i+1].start or chained[i].start is chained[i+1].end)):
|
||||
chained[i].reverse()
|
||||
# not connected to anything or connected properly -- leave as is
|
||||
|
||||
return chained
|
||||
499
pygarment/garmentcode/edge_factory.py
Normal file
499
pygarment/garmentcode/edge_factory.py
Normal file
@@ -0,0 +1,499 @@
|
||||
import numpy as np
|
||||
from numpy.linalg import norm
|
||||
import svgpathtools as svgpath
|
||||
from scipy.optimize import minimize
|
||||
|
||||
from pygarment.garmentcode.edge import EdgeSequence, Edge, CurveEdge
|
||||
from pygarment.garmentcode.edge import CircleEdge
|
||||
from pygarment.garmentcode.utils import vector_angle
|
||||
from pygarment.garmentcode.utils import bbox_paths
|
||||
from pygarment.garmentcode.utils import close_enough
|
||||
from pygarment.garmentcode.utils import c_to_list
|
||||
from pygarment.garmentcode.utils import list_to_c
|
||||
from pygarment.pattern.utils import rel_to_abs_2d, abs_to_rel_2d
|
||||
|
||||
|
||||
class EdgeFactory:
|
||||
@staticmethod
|
||||
def from_svg_curve(seg):
|
||||
"""Create Edge/CurveEdge/CircleEdge object from svgpath object
|
||||
Type is determined by svgpath type
|
||||
"""
|
||||
|
||||
start, end = c_to_list(seg.start), c_to_list(seg.end)
|
||||
if isinstance(seg, svgpath.Line):
|
||||
return Edge(start, end)
|
||||
if isinstance(seg, svgpath.Arc):
|
||||
# NOTE: assuming circular arc (same radius in both directoins)
|
||||
radius = seg.radius.real
|
||||
return CircleEdgeFactory.from_points_radius(
|
||||
start, end, radius, seg.large_arc, seg.sweep
|
||||
)
|
||||
|
||||
# Only Bezier left
|
||||
if isinstance(seg, svgpath.QuadraticBezier):
|
||||
cp = [c_to_list(seg.control)]
|
||||
elif isinstance(seg, svgpath.CubicBezier):
|
||||
cp = [c_to_list(seg.control1), c_to_list(seg.control2)]
|
||||
else:
|
||||
raise NotImplementedError(f'CurveEdge::ERROR::Incorrect curve type supplied {seg.type}')
|
||||
|
||||
return CurveEdge(start, end, cp, relative=False)
|
||||
|
||||
class CircleEdgeFactory:
|
||||
@staticmethod
|
||||
def from_points_angle(start, end, arc_angle, right=True):
|
||||
"""Construct circle arc from two fixed points and an angle
|
||||
|
||||
arc_angle:
|
||||
|
||||
NOTE: Might fail on angles close to 2pi
|
||||
"""
|
||||
# Big or small arc
|
||||
if arc_angle > np.pi:
|
||||
arc_angle = 2 * np.pi - arc_angle
|
||||
to_sum = True
|
||||
else:
|
||||
to_sum = False
|
||||
|
||||
radius = 1 / np.sin(arc_angle / 2) / 2
|
||||
h = 1 / np.tan(arc_angle / 2) / 2
|
||||
|
||||
control_y = radius + h if to_sum else radius - h # relative control point
|
||||
control_y *= -1 if right else 1
|
||||
|
||||
return CircleEdge(start, end, cy=control_y)
|
||||
|
||||
@staticmethod
|
||||
def from_points_radius(start, end, radius, large_arc=False, right=True):
|
||||
"""Construct circle arc relative representation
|
||||
from two fixed points and an (absolute) radius
|
||||
"""
|
||||
# Find circle center
|
||||
str_dist = norm(np.asarray(end) - np.asarray(start))
|
||||
|
||||
# NOTE: close enough values may give negative
|
||||
# value under sqrt due to numerical errors
|
||||
if close_enough(radius ** 2, str_dist ** 2 / 4, 1e-3):
|
||||
center_r = 0.
|
||||
else:
|
||||
center_r = np.sqrt(radius ** 2 - str_dist ** 2 / 4)
|
||||
|
||||
# Find the absolute value of Y
|
||||
control_y = radius + center_r if large_arc else radius - center_r
|
||||
|
||||
# Convert to relative
|
||||
control_y = control_y / str_dist
|
||||
|
||||
# Flip sight according to "right" parameter
|
||||
control_y *= -1 if right else 1
|
||||
|
||||
return CircleEdge(start, end, cy=control_y)
|
||||
|
||||
@staticmethod
|
||||
def from_rad_length(rad, length, right=True, start=None):
|
||||
"""NOTE: if start vertex is not provided, both vertices will be created
|
||||
to match desired radius and length
|
||||
"""
|
||||
max_len = 2 * np.pi * rad
|
||||
|
||||
if length > max_len:
|
||||
raise ValueError(
|
||||
f'CircleEdge::ERROR::Incorrect length for specified radius')
|
||||
|
||||
large_arc = length > max_len / 2
|
||||
if large_arc:
|
||||
length = max_len - length
|
||||
|
||||
w_half = rad * np.sin(length / rad / 2)
|
||||
|
||||
edge = CircleEdgeFactory.from_points_radius(
|
||||
[-w_half, 0], [w_half, 0],
|
||||
radius=rad,
|
||||
large_arc=large_arc,
|
||||
right=right
|
||||
)
|
||||
|
||||
if start:
|
||||
edge.snap_to(start)
|
||||
edge.start = start
|
||||
|
||||
return edge
|
||||
|
||||
@staticmethod
|
||||
def from_three_points(start, end, point_on_arc, relative=False):
|
||||
"""Create a circle arc from 3 points (start, end and any point on an arc)
|
||||
|
||||
NOTE: Control point specified in the same coord system as start and end
|
||||
NOTE: points should not be on the same line
|
||||
"""
|
||||
if relative:
|
||||
point_on_arc = rel_to_abs_2d(start, end, point_on_arc)
|
||||
|
||||
nstart, nend, npoint_on_arc = np.asarray(start), np.asarray(
|
||||
end), np.asarray(point_on_arc)
|
||||
|
||||
# https://stackoverflow.com/a/28910804
|
||||
# Using complex numbers to calculate the center & radius
|
||||
x, y, z = list_to_c([start, point_on_arc, end])
|
||||
w = z - x
|
||||
w /= y - x
|
||||
c = (x - y) * (w - abs(w) ** 2) / 2j / w.imag - x
|
||||
# NOTE center = [c.real, c.imag]
|
||||
rad = abs(c + x)
|
||||
|
||||
# Large/small arc
|
||||
mid_dist = norm(npoint_on_arc - ((nstart + nend) / 2))
|
||||
|
||||
# Orientation
|
||||
angle = vector_angle(npoint_on_arc - nstart, nend - nstart) # +/-
|
||||
|
||||
return CircleEdgeFactory.from_points_radius(
|
||||
start, end, radius=rad,
|
||||
large_arc=mid_dist > rad, right=angle > 0)
|
||||
|
||||
class CurveEdgeFactory:
|
||||
@staticmethod
|
||||
def curve_3_points(start, end, target, verbose=False):
|
||||
"""Create (Quadratic) curve edge between start and end that
|
||||
passes through the target point
|
||||
"""
|
||||
rel_target = abs_to_rel_2d(start, end, target)
|
||||
|
||||
if rel_target[0] > 1 or rel_target[0] < 0:
|
||||
raise NotImplementedError(
|
||||
"CurveEdgeFactory::Curve_by_3_points::ERROR::requested target point's projection "
|
||||
"is outside of the base edge, which is not yet supported"
|
||||
)
|
||||
|
||||
# Initialization with a target point as control point
|
||||
# Ensures very smooth, minimal solution
|
||||
out = minimize(
|
||||
_fit_pass_point,
|
||||
rel_target,
|
||||
args=(rel_target)
|
||||
)
|
||||
|
||||
if not out.success:
|
||||
if verbose:
|
||||
print('Curve From Extreme::WARNING::Optimization not successful')
|
||||
print(out)
|
||||
|
||||
cp = out.x.tolist()
|
||||
|
||||
return CurveEdge(start, end, control_points=[cp], relative=True)
|
||||
|
||||
@staticmethod
|
||||
def curve_from_tangents(start, end, target_tan0=None, target_tan1=None,
|
||||
initial_guess=None, verbose=False):
|
||||
"""Create Quadratic Bezier curve connecting given points with the target tangents
|
||||
(both or any of the two can be specified)
|
||||
|
||||
NOTE: Target tangent vectors are automatically normalized
|
||||
"""
|
||||
|
||||
if target_tan0 is not None:
|
||||
target_tan0 = abs_to_rel_2d(start, end, target_tan0, as_vector=True)
|
||||
target_tan0 /= norm(target_tan0)
|
||||
|
||||
if target_tan1 is not None:
|
||||
target_tan1 = abs_to_rel_2d(start, end, target_tan1, as_vector=True)
|
||||
target_tan1 /= norm(target_tan1)
|
||||
|
||||
# Initialization with a target point as control point
|
||||
# Ensures very smooth, minimal solution
|
||||
out = minimize(
|
||||
_fit_tangents,
|
||||
[0.5, 0] if initial_guess is None else initial_guess,
|
||||
args=(target_tan0, target_tan1)
|
||||
)
|
||||
|
||||
if not out.success:
|
||||
print('CurveEdgeFactory::Curve From Tangents::WARNING::Optimization not successful')
|
||||
if verbose:
|
||||
print(out)
|
||||
|
||||
cp = out.x.tolist()
|
||||
|
||||
return CurveEdge(start, end, control_points=[cp], relative=True)
|
||||
|
||||
class EdgeSeqFactory:
|
||||
"""Create EdgeSequence objects for some common edge sequence patterns
|
||||
"""
|
||||
@staticmethod
|
||||
def from_svg_path(path: svgpath.Path, dist_tol=0.05, verbose=False):
|
||||
"""Convert SVG path given as svgpathtool Path object to an EdgeSequence
|
||||
|
||||
* dist_tol: tolerance for vertex closeness to be considered the same
|
||||
vertex
|
||||
NOTE: Assumes that the path can be chained
|
||||
"""
|
||||
# Convert as is
|
||||
edges = []
|
||||
for seg in path._segments:
|
||||
# skip segments of length zero
|
||||
if close_enough(seg.length(), tol=dist_tol):
|
||||
if verbose:
|
||||
print('Skipped: ', seg)
|
||||
continue
|
||||
edges.append(EdgeFactory.from_svg_curve(seg))
|
||||
|
||||
# Chain the edges
|
||||
if len(edges) > 1:
|
||||
for i in range(1, len(edges)):
|
||||
|
||||
if not all(close_enough(s, e, tol=dist_tol)
|
||||
for s, e in zip(edges[i].start, edges[i - 1].end)):
|
||||
raise ValueError(
|
||||
'EdgeSequence::from_svg_path::input path is not chained')
|
||||
|
||||
edges[i].start = edges[i - 1].end
|
||||
return EdgeSequence(*edges, verbose=verbose)
|
||||
|
||||
@staticmethod
|
||||
def from_verts(*verts, loop=False):
|
||||
"""Generate sequence of straight edges from given vertices. If loop==True,
|
||||
the method also closes the edge sequence as a loop
|
||||
"""
|
||||
seq = EdgeSequence(Edge(verts[0], verts[1]))
|
||||
for i in range(2, len(verts)):
|
||||
seq.append(Edge(seq[-1].end, verts[i]))
|
||||
|
||||
if loop:
|
||||
seq.append(Edge(seq[-1].end, seq[0].start))
|
||||
|
||||
seq.isChained() # print warning if smth is wrong
|
||||
return seq
|
||||
|
||||
@staticmethod
|
||||
def from_fractions(start, end, frac=None):
|
||||
"""A sequence of edges between start and end wich lengths are distributed
|
||||
as specified in frac list
|
||||
Parameters:
|
||||
* frac -- list of legth fractions. Every entry is in (0, 1],
|
||||
all entries sums up to 1
|
||||
"""
|
||||
frac = [abs(f) for f in frac]
|
||||
if not close_enough(fsum := sum(frac), 1, 1e-4):
|
||||
raise RuntimeError(f'EdgeSequence::ERROR::fraction is incorrect. The sum {fsum} is not 1')
|
||||
|
||||
vec = np.asarray(end) - np.asarray(start)
|
||||
verts = [start]
|
||||
for i in range(len(frac) - 1):
|
||||
verts.append(
|
||||
[verts[-1][0] + frac[i]*vec[0],
|
||||
verts[-1][1] + frac[i]*vec[1]]
|
||||
)
|
||||
verts.append(end)
|
||||
|
||||
return EdgeSeqFactory.from_verts(*verts)
|
||||
|
||||
@staticmethod
|
||||
def side_with_cut(start=(0, 0), end=(1, 0), start_cut=0, end_cut=0):
|
||||
""" Edge with internal vertices that allows to stitch only part of the border represented
|
||||
by the long side edge
|
||||
|
||||
start_cut and end_cut specify the fraction of the edge to to add extra vertices at
|
||||
"""
|
||||
nstart, nend = np.array(start), np.array(end)
|
||||
verts = [start]
|
||||
|
||||
if start_cut > 0:
|
||||
verts.append((start + start_cut * (nend-nstart)).tolist())
|
||||
if end_cut > 0:
|
||||
verts.append((end - end_cut * (nend-nstart)).tolist())
|
||||
verts.append(end)
|
||||
|
||||
edges = EdgeSeqFactory.from_verts(*verts)
|
||||
|
||||
return edges
|
||||
|
||||
# ------ Darts ------
|
||||
@staticmethod
|
||||
def dart_shape(width, side_len=None, depth=None):
|
||||
"""Shape of simple triangular dart:
|
||||
specified by desired width and either the dart side length or depth
|
||||
"""
|
||||
|
||||
if side_len is None and depth is None:
|
||||
raise ValueError(
|
||||
'EdgeFactory::ERROR::dart shape is not fully specified.'
|
||||
' Add dart side length or dart perpendicular'
|
||||
)
|
||||
|
||||
if depth is None:
|
||||
if width / 2 > side_len:
|
||||
raise ValueError(
|
||||
f'EdgeFactory::ERROR::Requested dart shape (w={width}, side={side_len}) '
|
||||
'does not form a valid triangle')
|
||||
depth = np.sqrt((side_len**2 - (width / 2)**2))
|
||||
|
||||
return EdgeSeqFactory.from_verts([0, 0], [width / 2, -depth], [width, 0])
|
||||
|
||||
# --- SVG ----
|
||||
@staticmethod
|
||||
def halfs_from_svg(svg_filepath, target_height=None):
|
||||
"""Load a shape from an SVG and split it in half (vertically)
|
||||
|
||||
* target_height -- scales the shape s.t. it's height matches the given
|
||||
number
|
||||
|
||||
Shapes restrictions:
|
||||
1) every path in the provided SVG is assumed to form a closed loop
|
||||
that has exactly 2 intersection points with a vertical line
|
||||
passing though the middle of the shape
|
||||
2) The paths should not be nested (inside each other) or intersect
|
||||
as to not create disconnected pieces of the edge when used in
|
||||
shape projection
|
||||
"""
|
||||
paths, _ = svgpath.svg2paths(svg_filepath)
|
||||
|
||||
# Scaling
|
||||
if target_height is not None:
|
||||
bbox = bbox_paths(paths)
|
||||
scale = target_height / (bbox[-1] - bbox[-2])
|
||||
paths = [p.scaled(scale) for p in paths]
|
||||
|
||||
# Get the half-shapes
|
||||
left, right = split_half_svg_paths(paths)
|
||||
|
||||
# Turn into Edge Sequences
|
||||
left_seqs = [EdgeSeqFactory.from_svg_path(p) for p in left]
|
||||
right_seqs = [EdgeSeqFactory.from_svg_path(p) for p in right]
|
||||
|
||||
# In SVG OY is looking downward, we are using OY looking upward
|
||||
# Flip the shape to align
|
||||
bbox = bbox_paths(paths)
|
||||
center_y = (bbox[2] + bbox[3]) / 2
|
||||
left_seqs = [p.reflect([bbox[0], center_y],
|
||||
[bbox[1], center_y]) for p in left_seqs]
|
||||
right_seqs = [p.reflect([bbox[0], center_y],
|
||||
[bbox[1], center_y]) for p in right_seqs]
|
||||
|
||||
# Edge orientation s.t. the shortcut directions align with OY
|
||||
# It preserves the correct relative placement of the shapes later
|
||||
for p in left_seqs:
|
||||
if (p.shortcut()[1][1] - p.shortcut()[0][1]) < 0:
|
||||
p.reverse()
|
||||
for p in right_seqs:
|
||||
if (p.shortcut()[1][1] - p.shortcut()[0][1]) < 0:
|
||||
p.reverse()
|
||||
|
||||
return left_seqs, right_seqs
|
||||
|
||||
# --- For Curves ---
|
||||
def _fit_pass_point(cp, target_location):
|
||||
""" Fit the control point of basic [[0, 0] -> [1, 0]] Quadratic Bezier s.t.
|
||||
it passes through the target location.
|
||||
|
||||
* cp - initial guess for Quadratic Bezier control point coordinates
|
||||
(relative to the edge)
|
||||
* target_location -- target to fit extremum to --
|
||||
expressed in RELATIVE coordinates to your desired edge
|
||||
"""
|
||||
control_bezier = np.array([
|
||||
[0, 0],
|
||||
cp,
|
||||
[1, 0]
|
||||
])
|
||||
params = list_to_c(control_bezier)
|
||||
curve = svgpath.QuadraticBezier(*params)
|
||||
|
||||
inter_segment = svgpath.Line(
|
||||
target_location[0] + 1j * target_location[1] * 2,
|
||||
target_location[0] + 1j * (- target_location[1] * 2)
|
||||
)
|
||||
|
||||
intersect_t = curve.intersect(inter_segment)
|
||||
point = curve.point(intersect_t[0][0])
|
||||
|
||||
diff = abs(point - list_to_c(target_location))
|
||||
|
||||
return diff**2
|
||||
|
||||
|
||||
def _fit_tangents(cp, target_tangent_start, target_tangent_end, reg_strength=0.01):
|
||||
""" Fit the control point of basic [[0, 0] -> [1, 0]] Quadratic Bezier s.t.
|
||||
it's expremum is close to target location.
|
||||
|
||||
* cp - initial guess for Quadratic Bezier control point coordinates
|
||||
(relative to the edge)
|
||||
* target_location -- target to fit extremum to --
|
||||
expressed in RELATIVE coordinates to your desired edge
|
||||
"""
|
||||
control_bezier = np.array([
|
||||
[0, 0],
|
||||
cp,
|
||||
[1, 0]
|
||||
])
|
||||
params = list_to_c(control_bezier)
|
||||
curve = svgpath.QuadraticBezier(*params)
|
||||
|
||||
fin = 0
|
||||
if target_tangent_start is not None:
|
||||
# NOTE: tangents seems to use opposite left/right convention
|
||||
target0 = target_tangent_start[0] + 1j*target_tangent_start[1]
|
||||
fin += (abs(curve.unit_tangent(0) - target0))**2
|
||||
|
||||
if target_tangent_end is not None:
|
||||
target1 = target_tangent_end[0] + 1j*target_tangent_end[1]
|
||||
fin += (abs(curve.unit_tangent(1) - target1))**2
|
||||
|
||||
# NOTE: Tried _max_curvature() and Y value regularizaton,
|
||||
# but it seems like they are not needed
|
||||
return fin
|
||||
|
||||
|
||||
# ---- For SVG Loading ----
|
||||
|
||||
def split_half_svg_paths(paths):
|
||||
"""Sepate SVG paths in half over the vertical line -- for insertion into an
|
||||
edge side
|
||||
|
||||
Paths shapes restrictions:
|
||||
1) every path in the provided list is assumed to form a closed loop
|
||||
that has
|
||||
exactly 2 intersection points with a vetrical line passing though the
|
||||
middle of the shape
|
||||
2) The paths geometry should not be nested
|
||||
as to not create disconnected pieces of the edge when used in
|
||||
shape projection
|
||||
|
||||
"""
|
||||
# Shape Bbox
|
||||
bbox = bbox_paths(paths)
|
||||
center_x = (bbox[0] + bbox[1]) / 2
|
||||
|
||||
# Mid-Intersection
|
||||
inter_segment = svgpath.Line(
|
||||
center_x + 1j * bbox[2],
|
||||
center_x + 1j * bbox[3]
|
||||
)
|
||||
|
||||
right, left = [], []
|
||||
for p in paths:
|
||||
# Intersect points
|
||||
intersect_t = p.intersect(inter_segment)
|
||||
|
||||
if len(intersect_t) != 2:
|
||||
raise ValueError(f'SplitSVGHole::ERROR::Each Provided Svg path should cross vertical like exactly 2 times')
|
||||
|
||||
# Split
|
||||
from_T, to_T = intersect_t[0][0][0], intersect_t[1][0][0]
|
||||
if to_T < from_T:
|
||||
from_T, to_T = to_T, from_T
|
||||
|
||||
side_1 = p.cropped(from_T, to_T)
|
||||
# This order should preserve continuity
|
||||
side_2 = svgpath.Path(
|
||||
*p.cropped(to_T, 1)._segments,
|
||||
*p.cropped(0, from_T)._segments)
|
||||
|
||||
# Collect correctly
|
||||
if side_1.bbox()[2] > center_x:
|
||||
side_1, side_2 = side_2, side_1
|
||||
|
||||
right.append(side_2)
|
||||
left.append(side_1)
|
||||
|
||||
return left, right
|
||||
365
pygarment/garmentcode/interface.py
Normal file
365
pygarment/garmentcode/interface.py
Normal file
@@ -0,0 +1,365 @@
|
||||
from copy import copy
|
||||
|
||||
from numpy.linalg import norm
|
||||
import numpy as np
|
||||
|
||||
from pygarment.garmentcode.edge import EdgeSequence, Edge
|
||||
from pygarment.garmentcode.utils import close_enough
|
||||
|
||||
|
||||
class Interface:
|
||||
"""Description of an interface of a panel or component
|
||||
that can be used in stitches as a single unit
|
||||
"""
|
||||
def __init__(self, panel, edges, ruffle=1., right_wrong=False):
|
||||
"""
|
||||
Parameters:
|
||||
* panel - Panel object
|
||||
* edges - Edge or EdgeSequence -- edges in the panel that are
|
||||
allowed to connect to
|
||||
# TODO rename to something more generic/projection related?
|
||||
* ruffle - ruffle coefficient for a particular edge. Interface
|
||||
object will supply projecting_edges() shape
|
||||
s.t. the ruffles with the given rate are created. Default = 1.
|
||||
(no ruffles, smooth connection)
|
||||
* right_wrong -- control of stitch orientation -- indication if this interface's
|
||||
right side of the fabric should be connected to the wrong side of another interface.
|
||||
Default -- False -- connect right side of the fabric to the right side of the faric,
|
||||
sufficient in most cases.
|
||||
"""
|
||||
|
||||
self.edges = edges if isinstance(edges, EdgeSequence) else EdgeSequence(edges)
|
||||
self.panel = [panel for _ in range(len(self.edges))] # matches every edge
|
||||
self.right_wrong = [right_wrong for _ in range(len(self.edges))]
|
||||
|
||||
# Allow to enfoce change the direction of edge
|
||||
# (used in many-to-many stitches correspondance determination)
|
||||
self.edges_flipping = [False for _ in range(len(self.edges))]
|
||||
|
||||
# Ruffles are applied to sections
|
||||
# Since extending a chain of edges != extending each edge individually
|
||||
if isinstance(ruffle, list):
|
||||
assert len(ruffle) == len(edges), "Ruffles and Edges don't match"
|
||||
self.ruffle = []
|
||||
last_coef = None
|
||||
last_start = 0
|
||||
for i, coef in enumerate(ruffle):
|
||||
if coef == last_coef or last_coef is None:
|
||||
last_coef = coef # Making sure to overwrite None
|
||||
continue
|
||||
self.ruffle.append(dict(coeff=last_coef, sec=[last_start, i]))
|
||||
last_start, last_coef = i, coef
|
||||
|
||||
self.ruffle.append(dict(coeff=last_coef, sec=[last_start, len(ruffle)]))
|
||||
|
||||
else:
|
||||
self.ruffle = [dict(coeff=ruffle, sec=[0, len(self.edges)])]
|
||||
|
||||
def projecting_edges(self, on_oriented=False) -> EdgeSequence:
|
||||
"""Return edges shape that should be used when projecting interface
|
||||
onto another panel
|
||||
NOTE: reflects current state of the edge object. Call this function
|
||||
again if egdes change (e.g. their direction)
|
||||
# FIXME projection only works w.r.t. the line connecting the first and
|
||||
# the last vertex of the edge sequence -> use with cation
|
||||
"""
|
||||
# Per edge set ruffle application
|
||||
projected = self.edges.copy() if not on_oriented else self.oriented_edges()
|
||||
for r in self.ruffle:
|
||||
if not close_enough(r['coeff'], 1, 1e-3):
|
||||
if r['sec'][1] >= len(projected) or not projected[r['sec'][1] - 1:r['sec'][1] + 1].isChained():
|
||||
projected[r['sec'][0]:r['sec'][1]].extend(1 / r['coeff'])
|
||||
else:
|
||||
# Don't let extention to affect the rest of the sequence
|
||||
# Find the vert that separates the ruffle seqences
|
||||
prev_edge, next_edge = projected[r['sec'][1] - 1], projected[r['sec'][1]]
|
||||
|
||||
# Common vertex
|
||||
common_v = prev_edge.end if prev_edge.end is next_edge.end or prev_edge.end is next_edge.start else prev_edge.start
|
||||
|
||||
# Create copy and assign to next edge
|
||||
common_v_copy = common_v.copy()
|
||||
copy_to_end = False
|
||||
if common_v is next_edge.end:
|
||||
next_edge.end = common_v_copy
|
||||
copy_to_end = True
|
||||
else:
|
||||
next_edge.start = common_v_copy
|
||||
|
||||
# Extend the sequence
|
||||
projected[r['sec'][0]:r['sec'][1]].extend(1 / r['coeff'])
|
||||
|
||||
# move the next edges s.t. created vertex alignes with the original common vertex
|
||||
projected[r['sec'][1]:].translate_by([common_v[0] - common_v_copy[0], common_v[1] - common_v_copy[1]])
|
||||
|
||||
# re-chain the edges
|
||||
if copy_to_end:
|
||||
next_edge.end = common_v
|
||||
else:
|
||||
next_edge.start = common_v
|
||||
|
||||
return projected
|
||||
|
||||
# ANCHOR --- Projections for stitching -- to connect edges correctly and create correct ruffles
|
||||
def projecting_lengths(self):
|
||||
"""Desired projected length of the interface edges, as specified by ruffle coefficients"""
|
||||
projecting_lengths = []
|
||||
for r in self.ruffle:
|
||||
if not close_enough(r['coeff'], 1, 1e-3):
|
||||
projecting_lengths += [e.length() / r['coeff'] for e in self.edges[r['sec'][0]:r['sec'][1]]]
|
||||
else:
|
||||
projecting_lengths += [e.length() for e in self.edges[r['sec'][0]:r['sec'][1]]]
|
||||
|
||||
return np.array(projecting_lengths)
|
||||
|
||||
def projecting_fractions(self):
|
||||
"""Desired projected fractions of the interface edges, as specified by ruffle coefficients
|
||||
Fractions calculated w.r.t. to total projection lengths
|
||||
"""
|
||||
projecting_lengths = self.projecting_lengths()
|
||||
return projecting_lengths / projecting_lengths.sum()
|
||||
|
||||
def needsFlipping(self, i):
|
||||
""" Check if particular edge (i) should be re-oriented to follow the
|
||||
general direction of the interface
|
||||
* tol -- tolerance in distance differences that triggers flipping (in cm)
|
||||
|
||||
"""
|
||||
return self.edges_flipping[i]
|
||||
|
||||
# ANCHOR --- Info ----
|
||||
def oriented_edges(self):
|
||||
""" Orient the edges withing the interface sequence along the general
|
||||
direction of the interface
|
||||
|
||||
Creates a copy of the interface s.t. not to disturb the original
|
||||
edge objects
|
||||
"""
|
||||
# NOTE we cannot we do the same for the edge sub-sequences:
|
||||
# - midpoint of a sequence is less representative
|
||||
# - more likely to have weird relative 3D orientations
|
||||
# => heuristic won't work as well
|
||||
|
||||
oriented = self.edges.copy()
|
||||
|
||||
for i in range(len(self.edges)):
|
||||
if self.needsFlipping(i):
|
||||
oriented[i].reverse()
|
||||
oriented[i].flipped = True
|
||||
else:
|
||||
oriented[i].flipped = False
|
||||
return oriented
|
||||
|
||||
def verts_3d(self):
|
||||
"""Return 3D locations of all vertices that participate in the
|
||||
interface"""
|
||||
|
||||
verts_2d = []
|
||||
matching_panels = []
|
||||
for e, panel in zip(self.edges, self.panel):
|
||||
if all(e.start is not v for v in verts_2d): # Ensuring uniqueness
|
||||
verts_2d.append(e.start)
|
||||
matching_panels.append(panel)
|
||||
|
||||
if all(e.end is not v for v in verts_2d): # Ensuring uniqueness
|
||||
verts_2d.append(e.end)
|
||||
matching_panels.append(panel)
|
||||
|
||||
# To 3D
|
||||
verts_3d = []
|
||||
for v, panel in zip(verts_2d, matching_panels):
|
||||
verts_3d.append(panel.point_to_3D(v))
|
||||
|
||||
return np.asarray(verts_3d)
|
||||
|
||||
def bbox_3d(self):
|
||||
"""Return Interface bounding box"""
|
||||
|
||||
# NOTE: Vertex repetitions don't matter for bbox evaluation
|
||||
verts_3d = []
|
||||
for e, panel in zip(self.edges, self.panel):
|
||||
# Using curve linearization for more accurate approximation of bbox
|
||||
lin_edges = e.linearize()
|
||||
verts_2d = lin_edges.verts()
|
||||
verts_3d += [panel.point_to_3D(v) for v in verts_2d]
|
||||
verts_3d = np.asarray(verts_3d)
|
||||
|
||||
return verts_3d.min(axis=0), verts_3d.max(axis=0)
|
||||
|
||||
|
||||
def __len__(self):
|
||||
return len(self.edges)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f'Interface: {[p.name for p in self.panel]}: {str(self.oriented_edges())}'
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return self.__str__()
|
||||
|
||||
def panel_names(self):
|
||||
return [p.name for p in self.panel]
|
||||
|
||||
# ANCHOR --- Interface Updates -----
|
||||
|
||||
def reverse(self, with_edge_dir_reverse=False):
|
||||
"""Reverse the order of edges in the interface
|
||||
(without updating the edge objects)
|
||||
|
||||
Reversal is useful for reordering interface edges for correct
|
||||
matching in the multi-stitches
|
||||
"""
|
||||
self.edges.edges.reverse()
|
||||
self.panel.reverse()
|
||||
self.edges_flipping.reverse()
|
||||
if with_edge_dir_reverse:
|
||||
self.edges_flipping = [not e for e in self.edges_flipping]
|
||||
|
||||
enum = len(self.edges)
|
||||
for r in self.ruffle:
|
||||
# Update ids
|
||||
r['sec'][0] = enum - r['sec'][0]
|
||||
r['sec'][1] = enum - r['sec'][1]
|
||||
# Swap
|
||||
r['sec'][0], r['sec'][1] = r['sec'][1], r['sec'][0]
|
||||
|
||||
return self
|
||||
|
||||
def flip_edges(self):
|
||||
"""Reverse the direction of edges in the interface
|
||||
(without updating the edge objects)
|
||||
|
||||
Reversal is useful for updating interface edges for correct
|
||||
matching in the multi-stitches
|
||||
"""
|
||||
self.edges_flipping = [not e for e in self.edges_flipping]
|
||||
|
||||
return self
|
||||
|
||||
# TODO Edge Sequence Function?
|
||||
def reorder(self, curr_edge_ids, projected_edge_ids):
|
||||
"""Change the order of edges from curr_edge_ids to projected_edge_ids
|
||||
in the interface
|
||||
|
||||
Note that the input should prescrive new ordering for all affected
|
||||
edges e.g. if moving 0 -> 1, specify the new location for 1 as well
|
||||
"""
|
||||
|
||||
for i, j in zip(curr_edge_ids, projected_edge_ids):
|
||||
for r in self.ruffle:
|
||||
if (i >= r['sec'][0] and i < r['sec'][1]
|
||||
and (j < r['sec'][0] or j >= r['sec'][1])):
|
||||
raise NotImplementedError(
|
||||
f'{self.__class__.__name__}::ERROR::reordering between panel-related sub-segments is not supported')
|
||||
|
||||
new_edges = EdgeSequence()
|
||||
new_panel_list = []
|
||||
new_flipping_info = []
|
||||
new_right_wrong = []
|
||||
for i in range(len(self.panel)):
|
||||
id = i if i not in curr_edge_ids else projected_edge_ids[curr_edge_ids.index(i)]
|
||||
# edges
|
||||
new_edges.append(self.edges[id])
|
||||
new_flipping_info.append(self.edges_flipping[id])
|
||||
# panels
|
||||
new_panel_list.append(self.panel[id])
|
||||
# connectivity indication
|
||||
new_right_wrong.append(self.right_wrong[id])
|
||||
|
||||
self.edges = new_edges
|
||||
self.panel = new_panel_list
|
||||
self.edges_flipping = new_flipping_info
|
||||
self.right_wrong = new_right_wrong
|
||||
|
||||
def substitute(self, orig, new_edges, new_panels):
|
||||
"""Update the interface edges with correct correction of panels
|
||||
* orig -- could be an edge object or the id of edges that need
|
||||
substitution
|
||||
* new_edges -- new edges to insert in place of orig
|
||||
* new_panels -- per-edge panel objects indicating where each of
|
||||
new_edges belong to
|
||||
|
||||
NOTE: the ruffle indicator for the new_edges is expected to be the
|
||||
same as for orig edge
|
||||
Specifying new indicators is not yet supported
|
||||
|
||||
"""
|
||||
if isinstance(orig, Edge):
|
||||
orig = self.edges.index(orig)
|
||||
if orig < 0:
|
||||
orig = len(self.edges) + orig
|
||||
self.edges.substitute(orig, new_edges)
|
||||
|
||||
# Update panels & flip info & right_wrong info
|
||||
self.panel.pop(orig)
|
||||
curr_edges_flip = self.edges_flipping.pop(orig)
|
||||
curr_right_wrong = self.right_wrong.pop(orig)
|
||||
if isinstance(new_panels, list) or isinstance(new_panels, tuple):
|
||||
for j in range(len(new_panels)):
|
||||
self.panel.insert(orig + j, new_panels[j])
|
||||
|
||||
# TODOLOW Note propagation of default values. Allow to specify them as func input!
|
||||
self.edges_flipping.insert(orig + j, curr_edges_flip)
|
||||
self.right_wrong.insert(orig + j, curr_right_wrong)
|
||||
else:
|
||||
self.panel.insert(orig, new_panels)
|
||||
self.edges_flipping.insert(orig, curr_edges_flip)
|
||||
self.right_wrong.insert(orig + j, curr_right_wrong)
|
||||
|
||||
# Propagate ruffle indicators
|
||||
ins_len = 1 if isinstance(new_edges, Edge) else len(new_edges)
|
||||
if ins_len > 1:
|
||||
for it in self.ruffle: # UPD ruffle indicators
|
||||
if it['sec'][0] > orig:
|
||||
it['sec'][0] += ins_len - 1
|
||||
if it['sec'][1] > orig:
|
||||
it['sec'][1] += ins_len - 1
|
||||
|
||||
return self
|
||||
|
||||
def set_right_wrong(self, right_wrong):
|
||||
"""Set all right_wrong values for the edges in current interface to the input value"""
|
||||
self.right_wrong = [right_wrong for _ in range(len(self.edges))]
|
||||
return self
|
||||
|
||||
# ANCHOR ----- Statics ----
|
||||
@staticmethod
|
||||
def from_multiple(*ints):
|
||||
"""Create interface from other interfaces:
|
||||
* Allows to use different panels in one interface
|
||||
* different ruffle values in one interface
|
||||
|
||||
# NOTE the relative order of edges is preserved from the
|
||||
original interfaces and the incoming interface sequence
|
||||
This order will then be used in the SrtitchingRule when
|
||||
determing connectivity between interfaces
|
||||
"""
|
||||
new_int = copy(ints[0]) # shallow copy -- don't create unnecessary objects
|
||||
new_int.edges = EdgeSequence()
|
||||
new_int.edges_flipping = []
|
||||
new_int.panel = []
|
||||
new_int.ruffle = []
|
||||
new_int.right_wrong = []
|
||||
|
||||
for elem in ints:
|
||||
shift = len(new_int.edges)
|
||||
new_int.ruffle += [copy(r) for r in elem.ruffle]
|
||||
for r in new_int.ruffle[-len(elem.ruffle):]:
|
||||
r.update(sec=[r['sec'][0] + shift, r['sec'][1] + shift])
|
||||
|
||||
new_int.edges.append(elem.edges)
|
||||
new_int.panel += elem.panel
|
||||
new_int.right_wrong += elem.right_wrong
|
||||
new_int.edges_flipping += elem.edges_flipping
|
||||
|
||||
return new_int
|
||||
|
||||
@staticmethod
|
||||
def _is_order_matching(panel_s, vert_s, panel_1, vert1, panel_2, vert2) -> bool:
|
||||
"""Check which of the two vertices vert1 (panel_1) or vert2 (panel_2)
|
||||
is closer to the vert_s
|
||||
from panel_s in 3D"""
|
||||
s_3d = panel_s.point_to_3D(vert_s)
|
||||
v1_3d = panel_1.point_to_3D(vert1)
|
||||
v2_3d = panel_2.point_to_3D(vert2)
|
||||
|
||||
return norm(v1_3d - s_3d) < norm(v2_3d - s_3d)
|
||||
606
pygarment/garmentcode/operators.py
Normal file
606
pygarment/garmentcode/operators.py
Normal file
@@ -0,0 +1,606 @@
|
||||
"""Shortcuts for common operations on panels and components"""
|
||||
from copy import deepcopy, copy
|
||||
|
||||
import numpy as np
|
||||
from numpy.linalg import norm
|
||||
from scipy.spatial.transform import Rotation as R
|
||||
from scipy.optimize import minimize
|
||||
import svgpathtools as svgpath
|
||||
|
||||
from pygarment.garmentcode.edge import Edge, CurveEdge, EdgeSequence, ILENGTH_S_TOL
|
||||
from pygarment.garmentcode.interface import Interface
|
||||
from pygarment.garmentcode.utils import vector_angle, close_enough, c_to_list, c_to_np
|
||||
from pygarment.garmentcode.utils import list_to_c
|
||||
from pygarment.garmentcode.base import BaseComponent
|
||||
|
||||
|
||||
# ANCHOR ----- Edge Sequences Modifiers ----
|
||||
def cut_corner(target_shape: EdgeSequence, target_interface: Interface,
|
||||
verbose: bool = False):
|
||||
""" Cut the corner made of edges 1 and 2 following the shape of target_shape
|
||||
This routine updated the panel geometry and interfaces appropriately
|
||||
|
||||
Parameters:
|
||||
* 'target_shape' is an EdgeSequence that is expected to contain one
|
||||
Edge or sequence of chained Edges
|
||||
(next one starts from the end vertex of the one before)
|
||||
# NOTE: 'target_shape' might be scaled (along the main direction)
|
||||
to fit the corner size
|
||||
* Panel to modify
|
||||
* target_interface -- the chained pairs of edges that form the corner
|
||||
to cut, s.t. the end vertex of eid1 is at the corner
|
||||
# NOTE: Onto edges are expected to be straight lines for simplicity
|
||||
|
||||
# NOTE There might be slight computational errors in the resulting
|
||||
shape, that are more pronounced on svg visualizations due to
|
||||
scaling and rasterization
|
||||
|
||||
Side-Effects:
|
||||
* Modified the panel shape to insert new edges
|
||||
* Adds new interface object corresponding to new edges to the
|
||||
panel interface list
|
||||
|
||||
Returns:
|
||||
* Newly inserted edges
|
||||
* New interface object corresponding to new edges
|
||||
"""
|
||||
# TODO Support any number of edges in the target corner edges
|
||||
|
||||
# ---- Evaluate optimal projection of the target shape onto the corner
|
||||
corner_shape = target_shape.copy()
|
||||
panel = target_interface.panel[0] # TODO Support multiple panels???
|
||||
target_edges = target_interface.edges
|
||||
|
||||
# Get rid of directions by working on vertices
|
||||
if target_edges[0].start is target_edges[-1].end:
|
||||
# Orginal edges have beed reversed in normalization or smth
|
||||
target_edges.edges.reverse() # UPD the order
|
||||
|
||||
if corner_shape[0].start is corner_shape[-1].end:
|
||||
# Orginal edges have beed reversed in normalization or smth
|
||||
corner_shape.edges.reverse() # UPD the order
|
||||
|
||||
if corner_shape[0].start[1] > corner_shape[-1].end[1]:
|
||||
# now corner shape is oriented the same way as vertices
|
||||
corner_shape.reverse()
|
||||
corner_shape.snap_to([0,0])
|
||||
|
||||
shortcut = corner_shape.shortcut()
|
||||
|
||||
# Curves (can be defined outside)
|
||||
curve1 = target_edges[0].as_curve()
|
||||
curve2 = target_edges[1].as_curve()
|
||||
|
||||
# align order with the projecting shape, s.t.
|
||||
# curve2 is always the lower one
|
||||
swaped = False
|
||||
if target_edges[0].start[1] > target_edges[-1].end[1]:
|
||||
curve1, curve2 = curve2, curve1
|
||||
swaped = True
|
||||
# NOW curve1 is lower then curve2
|
||||
|
||||
# ----- FIND OPTIMAL PLACE -----
|
||||
start = [0.5, 0.5]
|
||||
out = minimize(
|
||||
_fit_location_corner, start,
|
||||
args=(shortcut[1] - shortcut[0], curve1, curve2),
|
||||
bounds=[(0, 1), (0, 1)])
|
||||
|
||||
if verbose and not out.success:
|
||||
print(f'Cut_corner::ERROR::finding the projection (translation) is unsuccessful. Likely an error in edges choice')
|
||||
print(out)
|
||||
|
||||
if verbose and not close_enough(out.fun):
|
||||
print(f'Cut_corner::WARNING::projection on {target_interface} finished with fun={out.fun}')
|
||||
print(out)
|
||||
|
||||
loc = out.x
|
||||
point1 = c_to_list(curve1.point(loc[0]))
|
||||
# re-align corner_shape with found shifts
|
||||
corner_shape.snap_to(point1)
|
||||
|
||||
# ----- UPD panel ----
|
||||
# Complete to the full corner -- connect with the initial vertices
|
||||
if swaped:
|
||||
# The edges are aligned as v2 -> vc -> v1
|
||||
corner_shape.reverse()
|
||||
loc[0], loc[1] = loc[1], loc[0]
|
||||
|
||||
# Insert a new shape
|
||||
cut_edge1, _ = target_edges[0].subdivide_param([loc[0], 1-loc[0]])
|
||||
_, cut_edge2 = target_edges[1].subdivide_param([loc[1], 1-loc[1]])
|
||||
|
||||
cut_edge1.end = corner_shape[0].start # Connect with new insert
|
||||
cut_edge2.start = corner_shape[-1].end
|
||||
|
||||
corner_shape.insert(0, cut_edge1)
|
||||
corner_shape.append(cut_edge2)
|
||||
|
||||
# Substitute edges in the panel definition
|
||||
panel.edges.pop(target_edges[0])
|
||||
panel.edges.substitute(target_edges[1], corner_shape)
|
||||
|
||||
# Update interface definitions
|
||||
target_edges = EdgeSequence(target_edges.edges) # keep the same edge references,
|
||||
# but not the same edge sequence reference
|
||||
# In case it matches one of the interfaces (we don't want target edges to be overriden)
|
||||
iter = panel.interfaces if isinstance(panel.interfaces, list) else panel.interfaces.values()
|
||||
for intr in iter:
|
||||
# Substitute old edges with what's left from them after cutting
|
||||
if target_edges[0] in intr.edges:
|
||||
intr.edges.substitute(target_edges[0], corner_shape[0])
|
||||
if target_edges[1] in intr.edges:
|
||||
intr.edges.substitute(target_edges[1], corner_shape[-1])
|
||||
|
||||
# Add new interface corresponding to the introduced cut
|
||||
new_int = Interface(panel, corner_shape[1:-1])
|
||||
if isinstance(panel.interfaces, list):
|
||||
panel.interfaces.append(new_int)
|
||||
else:
|
||||
panel.interfaces[f'int_{len(panel.interfaces)}'] = new_int
|
||||
|
||||
return corner_shape[1:-1], new_int
|
||||
|
||||
|
||||
def cut_into_edge(target_shape, base_edge:Edge, offset=0, right=True,
|
||||
flip_target=False, tol=1e-2):
|
||||
""" Insert edges of the target_shape into the given base_edge, starting
|
||||
from offset edges in target shape are rotated s.t. start -> end
|
||||
vertex vector is aligned with the edge
|
||||
|
||||
NOTE: Supports making multiple cuts in one go maintaining the relative
|
||||
distances between cuts
|
||||
provided that
|
||||
* they are all specified in the same coordinate system
|
||||
* (for now) the openings (shortcuts) of each cut are aligned with
|
||||
OY direction
|
||||
|
||||
Parameters:
|
||||
* target_shape -- list of single edge, chained edges, or multiple
|
||||
chaind EdgeSequences to be inserted in the edge.
|
||||
* base_edge -- edge object, defining the border
|
||||
* Offset -- position of the center of the target shape along the edge.
|
||||
* right -- which direction the cut should be oriented w.r.t. the
|
||||
direction of base edge
|
||||
* flip_target -- reflect the shape w.r.t its central perpendicular
|
||||
(default=False, no action taken)
|
||||
|
||||
Returns:
|
||||
* Newly created edges that accomodate the cut
|
||||
* Edges corresponding to the target shape
|
||||
* Edges that lie on the original base edge
|
||||
"""
|
||||
|
||||
# TODO Not only for Y-aligned shapes
|
||||
# TODOLOW Add a parameter: Align target_shape by center or from the start of the offset
|
||||
# NOTE: the optimization routine might be different for the two options
|
||||
|
||||
if isinstance(target_shape, EdgeSequence):
|
||||
return cut_into_edge_single(
|
||||
target_shape, base_edge, offset, right, tol)
|
||||
|
||||
# center of the shape
|
||||
shortcuts = np.asarray([e.shortcut() for e in target_shape])
|
||||
median_y = (shortcuts[:, :, 1].max() + shortcuts[:, :, 1].min()) / 2
|
||||
|
||||
# Flip the shapes if requested
|
||||
if flip_target:
|
||||
target_shape = [s.copy() for s in target_shape]
|
||||
# Flip
|
||||
target_shape = [s.reflect([0, median_y], [1, median_y])
|
||||
for s in target_shape]
|
||||
# Flip the order as well to reflect orientation change
|
||||
target_shape = [s.reverse() for s in target_shape]
|
||||
|
||||
# Calculate relative offsets to place the whole shape at the target offset
|
||||
shortcuts = np.asarray([e.shortcut() for e in target_shape])
|
||||
rel_offsets = [(s[0][1] + s[1][1]) / 2 - median_y for s in shortcuts]
|
||||
per_seq_offsets = [offset + r for r in rel_offsets]
|
||||
|
||||
# Project from farthest to closest
|
||||
sorted_tup = sorted(zip(per_seq_offsets, target_shape), reverse=True)
|
||||
proj_edge, int_edges = base_edge, EdgeSequence(base_edge)
|
||||
new_in_edges = EdgeSequence()
|
||||
all_new_edges = EdgeSequence(base_edge)
|
||||
for off, shape in sorted_tup:
|
||||
new_edge, in_edges, new_interface = cut_into_edge(
|
||||
shape, proj_edge, offset=off, right=right, tol=tol)
|
||||
|
||||
all_new_edges.substitute(proj_edge, new_edge)
|
||||
int_edges.substitute(proj_edge, new_interface)
|
||||
new_in_edges.append(in_edges)
|
||||
proj_edge = new_edge[0]
|
||||
|
||||
return all_new_edges, new_in_edges, int_edges
|
||||
|
||||
|
||||
def cut_into_edge_single(target_shape, base_edge: Edge, offset=0, right=True,
|
||||
tol=1e-2, verbose: bool = False):
|
||||
""" Insert edges of the target_shape into the given base_edge, starting
|
||||
from offset
|
||||
edges in target shape are rotated s.t. start -> end vertex vector is
|
||||
aligned with the edge
|
||||
|
||||
Parameters:
|
||||
* target_shape -- list of single edge or chained edges to be inserted
|
||||
in the edge.
|
||||
* base_edge -- edge object, defining the border
|
||||
* right -- which direction the cut should be oriented w.r.t. the
|
||||
direction of base edge
|
||||
* Offset -- position of the center of the target shape along the edge.
|
||||
|
||||
Returns:
|
||||
* Newly created edges that accommodate the cut
|
||||
* Edges corresponding to the target shape
|
||||
* Edges that lie on the original base edge
|
||||
"""
|
||||
|
||||
target_shape = EdgeSequence(target_shape)
|
||||
new_edges = target_shape.copy().snap_to([0, 0]) # copy and normalize translation of vertices
|
||||
|
||||
# Simplify to vectors
|
||||
shortcut = new_edges.shortcut() # "Interface" of the shape to insert
|
||||
target_shape_w = norm(shortcut)
|
||||
edge_len = base_edge.length()
|
||||
|
||||
if offset < target_shape_w / 2 - tol or offset > (edge_len - target_shape_w / 2) + tol:
|
||||
# NOTE: This is not a definitive check, and the cut might still not fit, depending on the base_edge curvature
|
||||
raise ValueError(f'Operators-CutingIntoEdge::ERROR::offset value is not within the base_edge length')
|
||||
|
||||
# find starting vertex for insertion & place edges there
|
||||
curve = base_edge.as_curve()
|
||||
rel_offset = curve.ilength(offset, s_tol=ILENGTH_S_TOL)
|
||||
|
||||
# ----- OPTIMIZATION ---
|
||||
start = [0.1, 0.1]
|
||||
out = minimize(
|
||||
_fit_location_edge, start,
|
||||
args=(rel_offset, target_shape_w, curve),
|
||||
bounds=[(0, 1)])
|
||||
shift = out.x
|
||||
|
||||
# Error checks
|
||||
if verbose and not out.success:
|
||||
print(f'Cut_edge::ERROR::finding the projection (translation) is unsuccessful. Likely an error in edges choice')
|
||||
|
||||
if not close_enough(out.fun, tol=0.01):
|
||||
if verbose:
|
||||
print(out)
|
||||
raise RuntimeError(f'Cut_edge::ERROR::projection on {base_edge} finished with fun={out.fun}')
|
||||
|
||||
if rel_offset + shift[0] > 1 + tol or (rel_offset - shift[1]) < 0 - tol:
|
||||
raise RuntimeError(
|
||||
f'Cut_edge::ERROR::projection on {base_edge} is out of edge bounds: '
|
||||
f'[{rel_offset - shift[1], rel_offset + shift[0]}].'
|
||||
' Check the offset value')
|
||||
|
||||
# All good -- integrate the target shape
|
||||
ins_point = c_to_np(curve.point(rel_offset - shift[1])) if (rel_offset - shift[1]) > tol else base_edge.start
|
||||
fin_point = c_to_np(curve.point(rel_offset + shift[0])) if (rel_offset + shift[0]) < 1 - tol else base_edge.end
|
||||
|
||||
# Align the shape with an edge
|
||||
# find rotation to apply on target shape
|
||||
insert_vector = np.asarray(fin_point) - np.asarray(ins_point)
|
||||
angle = vector_angle(shortcut[1] - shortcut[0], insert_vector)
|
||||
new_edges.rotate(angle)
|
||||
|
||||
# place
|
||||
new_edges.snap_to(ins_point)
|
||||
|
||||
# Check orientation
|
||||
avg_vertex = np.asarray(new_edges.verts()).mean(0)
|
||||
right_position = np.sign(np.cross(insert_vector, avg_vertex - np.asarray(new_edges[0].start))) == -1
|
||||
if not right and right_position or right and not right_position:
|
||||
# flip shape to match the requested direction
|
||||
new_edges.reflect(new_edges[0].start, new_edges[-1].end)
|
||||
|
||||
# Integrate edges
|
||||
# NOTE: no need to create extra edges if the the shape is incerted right at the beggining or end of the edge
|
||||
base_edge_leftovers = EdgeSequence()
|
||||
start_id, end_id = 0, len(new_edges)
|
||||
|
||||
if ins_point is base_edge.start:
|
||||
new_edges[0].start = base_edge.start # Connect into the original edge
|
||||
else:
|
||||
# TODOLOW more elegant subroutine
|
||||
start_part = base_edge.subdivide_param([rel_offset - shift[1], 1 - (rel_offset - shift[1])])[0]
|
||||
start_part.end = new_edges[0].start
|
||||
new_edges.insert(0, start_part)
|
||||
base_edge_leftovers.append(new_edges[0])
|
||||
start_id = 1
|
||||
|
||||
if fin_point is base_edge.end:
|
||||
new_edges[-1].end = base_edge.end # Connect into the original edge
|
||||
else:
|
||||
end_part = base_edge.subdivide_param([rel_offset + shift[0], 1 - (rel_offset + shift[0])])[-1]
|
||||
end_part.start = new_edges[-1].end
|
||||
new_edges.append(end_part)
|
||||
base_edge_leftovers.append(new_edges[-1])
|
||||
end_id = -1
|
||||
|
||||
return new_edges, new_edges[start_id:end_id], base_edge_leftovers
|
||||
|
||||
|
||||
def _fit_location_corner(l, diff_target, curve1, curve2,
|
||||
verbose: bool = False):
|
||||
"""Find the points on two curves s.t. vector between them is the same as
|
||||
shortcut"""
|
||||
|
||||
# Current points on curves
|
||||
point1 = c_to_np(curve1.point(l[0]))
|
||||
point2 = c_to_np(curve2.point(l[1]))
|
||||
diff_curr = point2 - point1
|
||||
|
||||
if verbose:
|
||||
print('Location Progression: ', (diff_curr[0] - diff_target[0])**2,
|
||||
(diff_curr[1] - diff_target[1])**2)
|
||||
|
||||
return ((diff_curr[0] - diff_target[0])**2
|
||||
+ (diff_curr[1] - diff_target[1])**2)
|
||||
|
||||
|
||||
def _fit_location_edge(shift, location, width_target, curve,
|
||||
verbose: bool = False):
|
||||
"""Find the points on two curves s.t. vector between them is the same as
|
||||
shortcut"""
|
||||
|
||||
# Current points on curves
|
||||
pointc = c_to_np(curve.point(location)) # TODO this is constant
|
||||
point1 = c_to_np(curve.point(location + shift[0]))
|
||||
point2 = c_to_np(curve.point(location - shift[1]))
|
||||
|
||||
if verbose:
|
||||
print('Location Progression: ', (_dist(point1, point2) - width_target)**2)
|
||||
|
||||
# regularize points to be at the same distance from center
|
||||
reg_symmetry = (_dist(point1, pointc) - _dist(point2, pointc))**2
|
||||
|
||||
return (_dist(point1, point2) - width_target)**2 + reg_symmetry
|
||||
|
||||
|
||||
# ANCHOR ----- Panel operations ------
|
||||
def distribute_Y(component, n_copies, odd_copy_shift=0, name_tag='panel'):
|
||||
"""Distribute copies of component over the circle around Oy"""
|
||||
copies = [ component ]
|
||||
component.name = f'{name_tag}_0' # Unique
|
||||
delta_rotation = R.from_euler('XYZ', [0, 360 / n_copies, 0], degrees=True)
|
||||
|
||||
for i in range(n_copies - 1):
|
||||
new_component = deepcopy(copies[-1])
|
||||
new_component.name = f'{name_tag}_{i + 1}' # Unique
|
||||
new_component.rotate_by(delta_rotation)
|
||||
new_component.translate_to(delta_rotation.apply(new_component.translation))
|
||||
|
||||
copies.append(new_component)
|
||||
|
||||
# shift around to resolve collisions (hopefully)
|
||||
if odd_copy_shift:
|
||||
for i in range(n_copies):
|
||||
if not i % 2:
|
||||
copies[i].translate_by(copies[i].norm() * odd_copy_shift)
|
||||
|
||||
return copies
|
||||
|
||||
|
||||
def distribute_horisontally(component, n_copies, stride=20, name_tag='panel'):
|
||||
"""Distribute copies of component over the straight horisontal line
|
||||
perpendicular to the norm"""
|
||||
copies = [ component ]
|
||||
component.name = f'{name_tag}_0' # Unique
|
||||
|
||||
if isinstance(component, BaseComponent):
|
||||
translation_dir = component.rotation.apply([0, 0, 1]) # Horisontally along the panel
|
||||
# FIXME What if it's looking up?
|
||||
translation_dir = np.cross(translation_dir, [0, 1, 0]) # perpendicular to Y
|
||||
translation_dir = translation_dir / norm(translation_dir)
|
||||
delta_translation = translation_dir * stride
|
||||
else:
|
||||
translation_dir = [1, 0, 0]
|
||||
|
||||
for i in range(n_copies - 1):
|
||||
new_component = deepcopy(copies[-1]) # TODO proper copy
|
||||
new_component.name = f'{name_tag}_{i + 1}' # Unique
|
||||
new_component.translate_by(delta_translation)
|
||||
|
||||
copies.append(new_component)
|
||||
|
||||
return copies
|
||||
|
||||
|
||||
# ANCHOR ----- Sleeve support -----
|
||||
def even_armhole_openings(front_opening, back_opening, tol=1e-2, verbose: bool = False):
|
||||
"""
|
||||
Rearrange sleeve openings for front and back s.t. their projection
|
||||
on vertical line is the same, while preserving the overall shape.
|
||||
Allows for creation of two symmetric sleeve panels from them
|
||||
|
||||
!! Important: assumes that the front opening is longer then back opening
|
||||
"""
|
||||
# Construct sleeve panel shapes from opening inverses
|
||||
cfront, cback = front_opening.copy(), back_opening.copy()
|
||||
cback.reflect([0, 0], [1, 0]).reverse().snap_to(cfront[-1].end)
|
||||
|
||||
# Cutout
|
||||
slope = np.array([cfront[0].start, cback[-1].end])
|
||||
slope_vec = slope[1] - slope[0]
|
||||
slope_perp = np.asarray([-slope_vec[1], slope_vec[0]])
|
||||
slope_midpoint = (slope[0] + slope[1]) / 2
|
||||
|
||||
# Intersection with the sleeve itself line
|
||||
# svgpath tools allow solution regardless of egde types
|
||||
inter_segment = svgpath.Line(
|
||||
list_to_c(slope_midpoint - 20 * slope_perp),
|
||||
list_to_c(slope_midpoint + 20 * slope_perp)
|
||||
)
|
||||
target_segment = cfront[-1].as_curve()
|
||||
|
||||
intersect_t = target_segment.intersect(inter_segment)
|
||||
if len(intersect_t) != 1 and verbose:
|
||||
print(
|
||||
f'Redistribute Sleeve Openings::WARNING::{len(intersect_t)} intersection points instead of one. '
|
||||
f'Front and back opening curves might be the same with lengths: {cfront.length()}, {cback.length()}'
|
||||
)
|
||||
|
||||
if (len(intersect_t) >= 1
|
||||
and not (close_enough(intersect_t[0][0], 0, tol=tol) # checking if they are already ok separated
|
||||
or close_enough(intersect_t[0][0], 1, tol=tol))):
|
||||
# The current separation is not satisfactory
|
||||
# Update the opening shapes
|
||||
intersect_t = intersect_t[0][0]
|
||||
subdiv = front_opening.edges[-1].subdivide_param([intersect_t, 1 - intersect_t])
|
||||
front_opening.substitute(-1, subdiv[0])
|
||||
|
||||
# Move this part to the back opening
|
||||
subdiv[1].start, subdiv[1].end = copy(subdiv[1].start), copy(subdiv[1].end) # Disconnect vertices in subdivided version
|
||||
subdiv.pop(0) # TODOLOW No reflect in the edge class??
|
||||
subdiv.reflect([0, 0], [1, 0]).reverse().snap_to(back_opening[-1].end)
|
||||
subdiv[0].start = back_opening[-1].end
|
||||
|
||||
back_opening.append(subdiv[0])
|
||||
|
||||
# Align the slope with OY direction
|
||||
# for correct size of sleeve panels
|
||||
slope_angle = np.arctan(-slope_vec[0] / slope_vec[1])
|
||||
front_opening.rotate(-slope_angle)
|
||||
back_opening.rotate(slope_angle)
|
||||
|
||||
return front_opening, back_opening
|
||||
|
||||
|
||||
# ANCHOR ----- Curve tools -----
|
||||
def _avg_curvature(curve, points_estimates=100):
|
||||
"""Average curvature in a curve"""
|
||||
# NOTE: this work slow, but direct evaluation seems
|
||||
# infeasible
|
||||
# Some hints here:
|
||||
# https://math.stackexchange.com/questions/220900/bezier-curvature
|
||||
t_space = np.linspace(0, 1, points_estimates)
|
||||
return sum([curve.curvature(t) for t in t_space]) / points_estimates
|
||||
|
||||
|
||||
def _max_curvature(curve, points_estimates=100):
|
||||
"""Average curvature in a curve"""
|
||||
# NOTE: this work slow, but direct evaluation seems
|
||||
# infeasible
|
||||
# Some hints here: https://math.stackexchange.com/questions/1954845/bezier-curvature-extrema
|
||||
t_space = np.linspace(0, 1, points_estimates)
|
||||
return max([curve.curvature(t) for t in t_space])
|
||||
|
||||
|
||||
def _bend_extend_2_tangent(
|
||||
shift, cp, target_len, direction,
|
||||
target_tangent_start, target_tangent_end,
|
||||
point_estimates=50):
|
||||
"""Evaluate how well curve preserves the length and tangents
|
||||
|
||||
NOTE: point_estimates controls average curvature evaluation.
|
||||
The higher the number, the more stable the optimization,
|
||||
but higher computational cost
|
||||
"""
|
||||
|
||||
control = np.array([
|
||||
cp[0],
|
||||
[cp[1][0] + shift[0], cp[1][1] + shift[1]],
|
||||
[cp[2][0] + shift[2], cp[2][1] + shift[3]],
|
||||
cp[-1] + direction * shift[4]
|
||||
])
|
||||
|
||||
params = control[:, 0] + 1j*control[:, 1]
|
||||
curve_inverse = svgpath.CubicBezier(*params)
|
||||
|
||||
length_diff = (curve_inverse.length() - target_len)**2 # preservation
|
||||
|
||||
tan_0_diff = (abs(curve_inverse.unit_tangent(0) - target_tangent_start))**2
|
||||
tan_1_diff = (abs(curve_inverse.unit_tangent(1) - target_tangent_end))**2
|
||||
|
||||
# NOTE: tried regularizing based on Y value in relative coordinates (for speed),
|
||||
# But it doesn't produce good results
|
||||
curvature_reg = _max_curvature(curve_inverse, points_estimates=point_estimates)**2
|
||||
|
||||
end_expantion_reg = 0.001*shift[-1]**2
|
||||
|
||||
return length_diff + tan_0_diff + tan_1_diff + curvature_reg + end_expantion_reg
|
||||
|
||||
|
||||
def curve_match_tangents(curve, target_tan0, target_tan1, target_len=None,
|
||||
return_as_edge=False, verbose: bool = False):
|
||||
"""Update the curve to have the desired tangent directions at endpoints
|
||||
while preserving curve length or desired target length ('target_len') and overall direction
|
||||
|
||||
Returns
|
||||
* control points for the final CubicBezier curves
|
||||
* Or CurveEdge instance, if return_as_edge=True
|
||||
|
||||
NOTE: Only Cubic Bezier curves are supported
|
||||
NOTE: Expects good enough initialization ('curve') that approximated desired solution
|
||||
"""
|
||||
if not isinstance(curve, svgpath.CubicBezier):
|
||||
raise NotImplementedError(
|
||||
f'Curve_match_tangents::ERROR::Only Cubic Bezier curves are supported ',
|
||||
f'(got {type(curve)})')
|
||||
|
||||
curve_cps = c_to_np(curve.bpoints())
|
||||
|
||||
direction = curve_cps[-1] - curve_cps[0]
|
||||
direction /= np.linalg.norm(direction)
|
||||
|
||||
target_tan0 = target_tan0 / np.linalg.norm(target_tan0)
|
||||
target_tan1 = target_tan1 / np.linalg.norm(target_tan1)
|
||||
|
||||
# match tangents with the requested ones while preserving length
|
||||
out = minimize(
|
||||
_bend_extend_2_tangent, # with tangent matching
|
||||
[0, 0, 0, 0, 0],
|
||||
args=(
|
||||
curve_cps,
|
||||
curve.length() if target_len is None else target_len,
|
||||
direction,
|
||||
list_to_c(target_tan0),
|
||||
list_to_c(target_tan1),
|
||||
70 # NOTE: Low values cause instable resutls
|
||||
),
|
||||
method='L-BFGS-B',
|
||||
)
|
||||
if not out.success:
|
||||
if verbose:
|
||||
print(f'Curve_match_tangents::WARNING::optimization not successfull')
|
||||
print(out)
|
||||
|
||||
shift = out.x
|
||||
|
||||
fin_curve_cps = [
|
||||
curve_cps[0].tolist(),
|
||||
[curve_cps[1][0] + shift[0], curve_cps[1][1] + shift[1]],
|
||||
[curve_cps[2][0] + shift[2], curve_cps[2][1] + shift[3]],
|
||||
(curve_cps[-1] + direction*shift[-1]).tolist(),
|
||||
]
|
||||
|
||||
if return_as_edge:
|
||||
fin_inv_edge = CurveEdge(
|
||||
start=fin_curve_cps[0],
|
||||
end=fin_curve_cps[-1],
|
||||
control_points=fin_curve_cps[1:3],
|
||||
relative=False
|
||||
)
|
||||
return fin_inv_edge
|
||||
|
||||
return fin_curve_cps
|
||||
|
||||
|
||||
# ---- Utils ----
|
||||
|
||||
def _dist(v1, v2):
|
||||
return norm(v2-v1)
|
||||
|
||||
|
||||
def _fit_scale(s, shortcut, v1, v2, vc, d_v1, d_v2):
|
||||
"""Evaluate how good a shortcut fits the corner if the vertices are shifted
|
||||
a little along the line"""
|
||||
# Shortcut can be used as 2D vector, not a set of 2D points, e.g.
|
||||
shifted = deepcopy(shortcut)
|
||||
shifted[0] += (shortcut[0] - shortcut[1]) * s[0] # this only changes the end vertex though
|
||||
shifted[1] += (shortcut[1] - shortcut[0]) * s[1] # this only changes the end vertex though
|
||||
|
||||
return ((d_v1 - _dist(shifted[0], v1) - _dist(shifted[0], vc))**2
|
||||
+ (d_v2 - _dist(shifted[1], v2) - _dist(shifted[1], vc))**2
|
||||
)
|
||||
408
pygarment/garmentcode/panel.py
Normal file
408
pygarment/garmentcode/panel.py
Normal file
@@ -0,0 +1,408 @@
|
||||
import numpy as np
|
||||
from copy import copy
|
||||
from argparse import Namespace
|
||||
from scipy.spatial.transform import Rotation as R
|
||||
|
||||
from pygarment.pattern.core import BasicPattern
|
||||
from pygarment.garmentcode.base import BaseComponent
|
||||
from pygarment.garmentcode.edge import Edge, EdgeSequence, CircleEdge
|
||||
from pygarment.garmentcode.utils import close_enough, vector_align_3D
|
||||
from pygarment.garmentcode.operators import cut_into_edge
|
||||
from pygarment.garmentcode.interface import Interface
|
||||
|
||||
|
||||
class Panel(BaseComponent):
|
||||
""" A Base class for defining a Garment component corresponding to a single
|
||||
flat fiece of fabric
|
||||
|
||||
Defined as a collection of edges on a 2D grid with specified 3D placement
|
||||
(world coordinates)
|
||||
|
||||
NOTE: All operations methods return 'self' object to allow sequential
|
||||
applications
|
||||
|
||||
"""
|
||||
def __init__(self, name, label='') -> None:
|
||||
"""Base class for panel creations
|
||||
* Name: panel name. Expected to be a unique identifier of a panel object
|
||||
* label: additional panel label (non-unique)
|
||||
"""
|
||||
super().__init__(name)
|
||||
|
||||
self.label = label
|
||||
self.translation = np.zeros(3)
|
||||
self.rotation = R.from_euler('XYZ', [0, 0, 0]) # zero rotation
|
||||
# NOTE: initiating with empty sequence allows .append() to it safely
|
||||
self.edges = EdgeSequence()
|
||||
|
||||
# Info
|
||||
def pivot_3D(self):
|
||||
"""Pivot point of a panel in 3D"""
|
||||
return self.point_to_3D([0, 0])
|
||||
|
||||
def length(self, longest_dim=False):
|
||||
"""Length of a panel element in cm
|
||||
|
||||
Defaults the to the vertical length of a 2D bounding box
|
||||
* longest_dim -- if set, returns the longest dimention out of the bounding box dimentions
|
||||
"""
|
||||
bbox = self.bbox()
|
||||
|
||||
x = abs(bbox[1][0] - bbox[0][0])
|
||||
y = abs(bbox[1][1] - bbox[0][1])
|
||||
|
||||
return max(x, y) if longest_dim else y
|
||||
|
||||
def is_self_intersecting(self):
|
||||
"""Check whether the panel has self-intersection"""
|
||||
edge_curves = []
|
||||
for e in self.edges:
|
||||
if isinstance(e, CircleEdge):
|
||||
# NOTE: Intersections for Arcs (Circle edge) fails in svgpathtools:
|
||||
# They are not well implemented in svgpathtools, see
|
||||
# https://github.com/mathandy/svgpathtools/issues/121
|
||||
# https://github.com/mathandy/svgpathtools/blob/fcb648b9bb9591d925876d3b51649fa175b40524/svgpathtools/path.py#L1960
|
||||
# Hence using linear approximation for robustness:
|
||||
edge_curves += [eseg.as_curve() for eseg in e.linearize(n_verts_inside=10)]
|
||||
else:
|
||||
edge_curves.append(e.as_curve())
|
||||
|
||||
# NOTE: simple pairwise checks of edges
|
||||
for i1 in range(0, len(edge_curves)):
|
||||
for i2 in range(i1 + 1, len(edge_curves)):
|
||||
intersect_t = edge_curves[i1].intersect(edge_curves[i2])
|
||||
|
||||
# Check exceptions -- intersection at the vertex
|
||||
for i in range(len(intersect_t)):
|
||||
t1, t2 = intersect_t[i]
|
||||
if t2 < t1:
|
||||
t1, t2 = t2, t1
|
||||
if close_enough(t1, 0) and close_enough(t2, 1):
|
||||
intersect_t[i] = None
|
||||
intersect_t = [el for el in intersect_t if el is not None]
|
||||
|
||||
if intersect_t: # Any other case of intersections
|
||||
return True
|
||||
return False
|
||||
|
||||
# ANCHOR - Operations -- update object in-place
|
||||
def set_panel_label(self, label: str, overwrite=True):
|
||||
"""If overwrite is not enabled, only updates the label if it's empty."""
|
||||
if not self.label or overwrite:
|
||||
self.label = label
|
||||
|
||||
def set_pivot(self, point_2d, replicate_placement=False):
|
||||
"""Specify 2D point w.r.t. panel local space
|
||||
to be used as pivot for translation and rotation
|
||||
|
||||
Parameters:
|
||||
* point_2d -- desired point 2D point w.r.t current pivot (origin)
|
||||
of panel local space
|
||||
* replicate_placement -- will replicate the location of the panel
|
||||
as it was before pivot change
|
||||
default - False (no adjustment, the panel may "jump" in 3D)
|
||||
"""
|
||||
point_2d = copy(point_2d) # Remove unwanted object reference
|
||||
# In case an actual vertex was used as a target point
|
||||
|
||||
if replicate_placement:
|
||||
self.translation = self.point_to_3D(point_2d)
|
||||
# FIXME Replicate rotation
|
||||
|
||||
# UPD vertex locations relative to new pivot
|
||||
for v in self.edges.verts():
|
||||
v[0] -= int(point_2d[0])
|
||||
v[1] -= int(point_2d[1])
|
||||
|
||||
def top_center_pivot(self):
|
||||
"""One of the most useful pivots
|
||||
is the one in the middle of the top edge of the panel
|
||||
"""
|
||||
vertices = np.asarray(self.edges.verts())
|
||||
|
||||
# out of 2D bounding box sides' midpoints choose the one that is
|
||||
# highest in 3D
|
||||
top_right = vertices.max(axis=0)
|
||||
low_left = vertices.min(axis=0)
|
||||
mid_x = (top_right[0] + low_left[0]) / 2
|
||||
mid_y = (top_right[1] + low_left[1]) / 2
|
||||
mid_points_2D = [
|
||||
[mid_x, top_right[1]],
|
||||
[mid_x, low_left[1]],
|
||||
[top_right[0], mid_y],
|
||||
[low_left[0], mid_y]
|
||||
]
|
||||
mid_points_3D = np.vstack(tuple(
|
||||
[self.point_to_3D(coords) for coords in mid_points_2D]
|
||||
))
|
||||
top_mid_point = mid_points_3D[:, 1].argmax()
|
||||
|
||||
self.set_pivot(mid_points_2D[top_mid_point])
|
||||
return self
|
||||
|
||||
def translate_by(self, delta_vector):
|
||||
"""Translate panel by a vector"""
|
||||
self.translation = self.translation + np.array(delta_vector)
|
||||
# NOTE: One may also want to have autonorm only on the assembly?
|
||||
self.autonorm()
|
||||
return self
|
||||
|
||||
def translate_to(self, new_translation):
|
||||
"""Set panel translation to be exactly that vector"""
|
||||
self.translation = np.asarray(new_translation)
|
||||
self.autonorm()
|
||||
return self
|
||||
|
||||
def rotate_by(self, delta_rotation: R):
|
||||
"""Rotate panel by a given rotation
|
||||
* delta_rotation: scipy rotation object
|
||||
"""
|
||||
self.rotation = delta_rotation * self.rotation
|
||||
self.autonorm()
|
||||
return self
|
||||
|
||||
def rotate_to(self, new_rot: R):
|
||||
"""Set panel rotation to be exactly the given rotation
|
||||
* new_rot: scipy rotation object
|
||||
"""
|
||||
if not isinstance(new_rot, R):
|
||||
raise ValueError(f'{self.__class__.__name__}::ERROR::Only accepting rotations in scipy format')
|
||||
self.rotation = new_rot
|
||||
self.autonorm()
|
||||
return self
|
||||
|
||||
def rotate_align(self, vector):
|
||||
"""Set panel rotation s.t. it's norm is aligned with a given 3D
|
||||
vector"""
|
||||
|
||||
vector = np.asarray(vector)
|
||||
vector = vector / np.linalg.norm(vector)
|
||||
n = self.norm()
|
||||
self.rotate_by(vector_align_3D(n, vector))
|
||||
return self
|
||||
|
||||
def center_x(self):
|
||||
"""Adjust translation over x s.t. the center of the panel is aligned
|
||||
with the Y axis (center of the body)"""
|
||||
|
||||
center_3d = self.point_to_3D(self._center_2D())
|
||||
self.translation[0] += -center_3d[0]
|
||||
return self
|
||||
|
||||
def autonorm(self):
|
||||
"""Update right/wrong side orientation, s.t. the normal of the surface
|
||||
looks outside he world origin,
|
||||
taking into account the shape and the global position.
|
||||
|
||||
This should provide correct panel orientation in most cases.
|
||||
|
||||
NOTE: for best results, call autonorm after translation
|
||||
specification
|
||||
"""
|
||||
norm_dr = self.norm()
|
||||
|
||||
# NOTE: Nothing happens if self.translation is zero
|
||||
if np.dot(norm_dr, self.translation) < 0:
|
||||
# Swap if wrong
|
||||
self.edges.reverse()
|
||||
|
||||
def mirror(self, axis=None):
|
||||
"""Swap this panel with its mirror image
|
||||
|
||||
Axis specifies 2D axis to swap around: Y axis by default
|
||||
"""
|
||||
if axis is None:
|
||||
axis = [0, 1]
|
||||
# Case Around Y
|
||||
if close_enough(axis[0], tol=1e-4): # reflection around Y
|
||||
|
||||
# Vertices
|
||||
self.edges.reflect([0, 0], [0, 1])
|
||||
|
||||
# Position
|
||||
self.translation[0] *= -1
|
||||
|
||||
# Rotations
|
||||
curr_euler = self.rotation.as_euler('XYZ')
|
||||
curr_euler[1] *= -1
|
||||
curr_euler[2] *= -1
|
||||
self.rotate_to(R.from_euler('XYZ', curr_euler))
|
||||
|
||||
# Fix right/wrong side
|
||||
self.autonorm()
|
||||
else:
|
||||
# TODO Any other axis
|
||||
raise NotImplementedError(f'{self.name}::ERROR::Mirrowing over arbitrary axis is not implemented')
|
||||
return self
|
||||
|
||||
def add_dart(self, dart_shape, edge, offset, right=True, edge_seq=None, int_edge_seq=None, ):
|
||||
""" Shortcut for adding a dart to a panel:
|
||||
* Performs insertion of the dart_shape in the given edge (parameters are the same
|
||||
as in pyp.ops.cut_into_edge)
|
||||
* Creates stitch to connect the dart sides
|
||||
* Modifies edge_sequnces with full set (edge_seq) or only the interface part (int_edge_seq)
|
||||
of the created edges, if those are provided
|
||||
|
||||
Returns new edges after insertion, and the interface part (excludes dart edges)
|
||||
"""
|
||||
edges_new, dart_edges, int_new = cut_into_edge(
|
||||
dart_shape,
|
||||
edge,
|
||||
offset=offset,
|
||||
right=right)
|
||||
|
||||
self.stitching_rules.append(
|
||||
(Interface(self, dart_edges[0]), Interface(self, dart_edges[1])))
|
||||
|
||||
# Update the edges if given
|
||||
if edge_seq is not None:
|
||||
edge_seq.substitute(edge, edges_new)
|
||||
edges_new = edge_seq
|
||||
if int_edge_seq is not None:
|
||||
int_edge_seq.substitute(edge, int_new)
|
||||
int_new = int_edge_seq
|
||||
|
||||
return edges_new, int_new
|
||||
|
||||
# ANCHOR - Build the panel -- get serializable representation
|
||||
def assembly(self):
|
||||
"""Convert panel into serialazable representation
|
||||
|
||||
NOTE: panel EdgeSequence is assumed to be a single loop of edges
|
||||
"""
|
||||
# FIXME Some panels have weird resulting alignemnt when th
|
||||
# is pivot setup is removed -- there is a bug somewhere
|
||||
|
||||
# always start from zero for consistency between panels
|
||||
self.set_pivot(self.edges[0].start, replicate_placement=True)
|
||||
|
||||
# Basics
|
||||
panel = Namespace(
|
||||
translation=self.translation.tolist(),
|
||||
rotation=self.rotation.as_euler('XYZ', degrees=True).tolist(),
|
||||
vertices=[self.edges[0].start],
|
||||
edges=[])
|
||||
|
||||
for i in range(len(self.edges)):
|
||||
vertices, edge = self.edges[i].assembly()
|
||||
|
||||
# add new vertices
|
||||
if panel.vertices[-1] == vertices[0]: # We care if both point to the same vertex location, not necessarily the same vertex object
|
||||
vert_shift = len(panel.vertices) - 1 # first edge vertex = last vertex already in the loop
|
||||
panel.vertices += vertices[1:]
|
||||
else:
|
||||
vert_shift = len(panel.vertices)
|
||||
panel.vertices += vertices
|
||||
|
||||
# upd vertex references in edges according to new vertex ids in
|
||||
# the panel vertex loop
|
||||
edge['endpoints'] = [id + vert_shift for id in edge['endpoints']]
|
||||
|
||||
edge_shift = len(panel.edges) # before adding new ones
|
||||
self.edges[i].geometric_id = edge_shift # remember the mapping of logical edge to geometric id in panel loop
|
||||
panel.edges.append(edge)
|
||||
|
||||
# Check closing of the loop and upd vertex reference for the last edge
|
||||
if panel.vertices[-1] == panel.vertices[0]:
|
||||
panel.vertices.pop()
|
||||
panel.edges[-1]['endpoints'][-1] = 0
|
||||
|
||||
# Add panel label, if known
|
||||
if self.label:
|
||||
panel.label = self.label
|
||||
|
||||
spattern = BasicPattern()
|
||||
spattern.name = self.name
|
||||
spattern.pattern['panels'] = {self.name: vars(panel)}
|
||||
|
||||
# Assembly stitching info (panel might have inner stitches)
|
||||
spattern.pattern['stitches'] = self.stitching_rules.assembly()
|
||||
|
||||
return spattern
|
||||
|
||||
# ANCHOR utils
|
||||
def _center_2D(self, n_verts_inside = 3):
|
||||
"""Approximate Location of the panel center.
|
||||
|
||||
NOTE: uses crude linear approximation for curved edges,
|
||||
n_verts_inside = number of vertices (excluding the start
|
||||
and end vertices) used to create a linearization of an edge
|
||||
"""
|
||||
# NOTE: assuming that edges are organized in a loop and share vertices
|
||||
lin_edges = EdgeSequence([e.linearize(n_verts_inside)
|
||||
for e in self.edges])
|
||||
verts = lin_edges.verts()
|
||||
|
||||
return np.mean(verts, axis=0)
|
||||
|
||||
def point_to_3D(self, point_2d):
|
||||
"""Calculate 3D location of a point given in the local 2D plane """
|
||||
point_2d = np.asarray(point_2d)
|
||||
if len(point_2d) == 2:
|
||||
point_2d = np.append(point_2d, 0)
|
||||
|
||||
point_3d = self.rotation.apply(point_2d)
|
||||
point_3d += self.translation
|
||||
return point_3d
|
||||
|
||||
def norm(self):
|
||||
"""Normal direction for the current panel using bounding box"""
|
||||
|
||||
# To make norm evaluation work for non-convex panels
|
||||
# Determine points located on bounding box (b_verts_2d), compute
|
||||
# norm of consecutive b_verts_3d and the b_verts_3d mean (b_center_3d),
|
||||
# then weight the norms.
|
||||
# The dominant norm direction should be the correct one
|
||||
|
||||
_, b_verts_2d = self.edges.bbox()
|
||||
b_verts_3d = [self.point_to_3D(bv_2d) for bv_2d in b_verts_2d]
|
||||
b_center_3d = np.mean((b_verts_3d), axis=0)
|
||||
|
||||
norms = []
|
||||
num_b_verts_3d = len(b_verts_3d)
|
||||
for i in range(num_b_verts_3d):
|
||||
vert_0 = b_verts_3d[i]
|
||||
vert_1 = b_verts_3d[(i + 1) % num_b_verts_3d]
|
||||
# Pylance + NP error for unreachanble code -- see https://github.com/numpy/numpy/issues/22146
|
||||
# Works ok for numpy 1.23.4+
|
||||
norm = np.cross(vert_0 - b_center_3d, vert_1 - b_center_3d)
|
||||
norm /= np.linalg.norm(norm)
|
||||
norms.append(norm)
|
||||
|
||||
# Current norm direction
|
||||
avg_norm = sum(norms) / len(norms)
|
||||
|
||||
if close_enough(np.linalg.norm(avg_norm), 0):
|
||||
# Indecisive averaging, so using just one of the norms
|
||||
# NOTE: sometimes happens on thin arcs
|
||||
avg_norm = norms[0]
|
||||
if self.verbose:
|
||||
print(f'{self.__class__.__name__}::{self.name}::WARNING::Norm evaluation failed, assigning norm based on the first edge')
|
||||
|
||||
final_norm = avg_norm / np.linalg.norm(avg_norm)
|
||||
|
||||
# solve float errors
|
||||
for i, ni in enumerate(final_norm):
|
||||
if np.isclose([ni], [0.0]):
|
||||
final_norm[i] = 0.0
|
||||
|
||||
return final_norm
|
||||
|
||||
def bbox(self):
|
||||
"""Evaluate 2D bounding box"""
|
||||
# Using curve linearization for more accurate approximation of bbox
|
||||
lin_edges = EdgeSequence([e.linearize() for e in self.edges])
|
||||
verts_2d = np.asarray(lin_edges.verts())
|
||||
|
||||
return verts_2d.min(axis=0), verts_2d.max(axis=0)
|
||||
|
||||
|
||||
def bbox3D(self):
|
||||
"""Evaluate 3D bounding box of the current panel"""
|
||||
|
||||
# Using curve linearization for more accurate approximation of bbox
|
||||
lin_edges = EdgeSequence([e.linearize() for e in self.edges])
|
||||
verts_2d = lin_edges.verts()
|
||||
verts_3d = np.asarray([self.point_to_3D(v) for v in verts_2d])
|
||||
|
||||
return verts_3d.min(axis=0), verts_3d.max(axis=0)
|
||||
153
pygarment/garmentcode/params.py
Normal file
153
pygarment/garmentcode/params.py
Normal file
@@ -0,0 +1,153 @@
|
||||
"""Parameter class wrappers around parameter files allowing definition of computed parameters
|
||||
"""
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
from copy import deepcopy
|
||||
import random
|
||||
|
||||
from pygarment.garmentcode.utils import nested_get, nested_set, close_enough
|
||||
|
||||
|
||||
class BodyParametrizationBase:
|
||||
"""Base class for body parametrization wrappers that allows definition of
|
||||
dependent parameters
|
||||
"""
|
||||
|
||||
def __init__(self, param_file='') -> None:
|
||||
|
||||
self.params = {}
|
||||
self.load(param_file)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.params[key]
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Return an iterator of dict keys
|
||||
"""
|
||||
return iter(self.params)
|
||||
|
||||
# Updates
|
||||
def __setitem__(self, key, value):
|
||||
self.params[key] = value
|
||||
self.eval_dependencies(key)
|
||||
|
||||
def load(self, param_file):
|
||||
"""Load new values from file"""
|
||||
with open(param_file, 'r') as f:
|
||||
dict = yaml.safe_load(f)['body']
|
||||
self.params.update(dict)
|
||||
self.eval_dependencies() # Parameters have been updated
|
||||
|
||||
def load_from_dict(self, in_dict):
|
||||
self.params.update(in_dict)
|
||||
self.eval_dependencies() # Parameters have been updated
|
||||
|
||||
# Processing
|
||||
def eval_dependencies(self, key=None):
|
||||
"""Evaluate dependent attributes, e.g. after a new value has been set
|
||||
|
||||
Define your dependent parameters in the overload of this function
|
||||
|
||||
* key -- the information on what field is being updated
|
||||
"""
|
||||
pass
|
||||
|
||||
# Save
|
||||
def save(self, path, name='body_measurements'):
|
||||
with open(Path(path) / f'{name}.yaml', 'w') as f:
|
||||
yaml.dump(
|
||||
{'body': self.params},
|
||||
f,
|
||||
default_flow_style=False
|
||||
)
|
||||
|
||||
|
||||
class DesignSampler:
|
||||
"""Base class for design parameters sampling """
|
||||
|
||||
def __init__(self, param_file='') -> None:
|
||||
|
||||
self.params = {}
|
||||
if param_file:
|
||||
self.load(param_file)
|
||||
|
||||
def load(self, param_file):
|
||||
"""Load new values from file"""
|
||||
with open(param_file, 'r') as f:
|
||||
dict = yaml.safe_load(f)['design']
|
||||
self.params.update(dict)
|
||||
|
||||
def default(self):
|
||||
return self.params
|
||||
|
||||
# ---- Randomization of values ----
|
||||
def randomize(self):
|
||||
"""Generate random values for the current design parameters"""
|
||||
|
||||
random_params = deepcopy(self.params)
|
||||
|
||||
# NOTE dealing with the nested dict
|
||||
self._randomize_subset(random_params, [])
|
||||
|
||||
return random_params
|
||||
|
||||
def _randomize_subset(self, random_params, path):
|
||||
|
||||
subset = nested_get(random_params, path) if path else random_params
|
||||
for key in subset:
|
||||
if 'v' in subset[key].keys():
|
||||
self._randomize_value(random_params, path + [key])
|
||||
else:
|
||||
self._randomize_subset(random_params, path + [key])
|
||||
|
||||
def _randomize_value(self, random_params, path):
|
||||
""" Randomize the value of one parameter
|
||||
Path is leading to the leaf of param dict. value.
|
||||
"""
|
||||
|
||||
range = nested_get(random_params, path + ['range'])
|
||||
p_type = nested_get(random_params, path + ['type'])
|
||||
|
||||
# Check Defaults
|
||||
try:
|
||||
def_prob = nested_get(random_params, path + ['default_prob'])
|
||||
except KeyError as e: # Default probability not given -> Sample uniformly
|
||||
def_prob = None
|
||||
|
||||
def_value = nested_get(self.params, path + ['v'])
|
||||
if self.__use_default(def_prob):
|
||||
new_val = def_value
|
||||
else:
|
||||
if 'select' in p_type or p_type == 'bool' or 'file' in p_type: # All discrete types
|
||||
if p_type == 'select_null' and None not in range:
|
||||
range.append(None)
|
||||
# Exclude default
|
||||
if def_prob is not None:
|
||||
range.remove(def_value)
|
||||
new_val = random.choice(range)
|
||||
elif p_type == 'int':
|
||||
new_val = self.__randint_exclude(range, None if def_prob is None else def_value)
|
||||
elif p_type == 'float':
|
||||
new_val = self.__uniform_exclude(range, None if def_prob is None else def_value)
|
||||
|
||||
nested_set(random_params, path + ['v'], new_val)
|
||||
|
||||
def __use_default(self, probability):
|
||||
if probability is None:
|
||||
return False
|
||||
return random.random() < probability
|
||||
|
||||
def __randint_exclude(self, range, exclude):
|
||||
rand_v = random.randint(*range)
|
||||
if exclude is not None and rand_v == exclude:
|
||||
return self.__randint_exclude(range, exclude)
|
||||
|
||||
return rand_v
|
||||
|
||||
def __uniform_exclude(self, range, exclude):
|
||||
rand_v = random.uniform(*range)
|
||||
if exclude is not None and close_enough(rand_v, exclude):
|
||||
return self.__uniform_exclude(range, exclude)
|
||||
|
||||
return rand_v
|
||||
164
pygarment/garmentcode/utils.py
Normal file
164
pygarment/garmentcode/utils.py
Normal file
@@ -0,0 +1,164 @@
|
||||
from typing import TypeVar, Generic, Sequence, Callable
|
||||
|
||||
import numpy as np
|
||||
from numpy.linalg import norm
|
||||
from scipy.spatial.transform import Rotation
|
||||
import svgpathtools as svgpath
|
||||
|
||||
|
||||
|
||||
# proper inserstions by key with bicest module in python <3.10
|
||||
# https://stackoverflow.com/questions/27672494/how-to-use-bisect-insort-left-with-a-key
|
||||
|
||||
T = TypeVar('T')
|
||||
V = TypeVar('V')
|
||||
|
||||
|
||||
class KeyWrapper(Generic[T, V]):
|
||||
def __init__(self, iterable: Sequence[T], key: Callable[[T], V]):
|
||||
self.it = iterable
|
||||
self.key = key
|
||||
|
||||
def __getitem__(self, i: int) -> V:
|
||||
return self.key(self.it[i])
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self.it)
|
||||
|
||||
|
||||
def vector_angle(v1, v2):
|
||||
"""Find an angle between two 2D vectors"""
|
||||
v1, v2 = np.asarray(v1), np.asarray(v2)
|
||||
cos = np.dot(v1, v2) / (norm(v1) * norm(v2))
|
||||
angle = np.arccos(cos)
|
||||
# Cross to indicate correct relative orienataion of v2 w.r.t. v1
|
||||
cross = np.cross(v1, v2)
|
||||
|
||||
if abs(cross) > 1e-5:
|
||||
angle *= np.sign(cross)
|
||||
return angle
|
||||
|
||||
|
||||
def R2D(angle):
|
||||
"""2D rotation matrix by an angle"""
|
||||
return np.array([[np.cos(angle), -np.sin(angle)], [np.sin(angle), np.cos(angle)]])
|
||||
|
||||
|
||||
def vector_align_3D(v1, v2):
|
||||
"""Find a rotation to align v1 with v2"""
|
||||
|
||||
v1, v2 = np.asarray(v1), np.asarray(v2)
|
||||
cos = np.dot(v1, v2) / (norm(v1) * norm(v2))
|
||||
cos = max(min(cos, 1), -1) # NOTE: getting rid of numbers like 1.000002 that appear due to numerical instability
|
||||
|
||||
angle = np.arccos(cos)
|
||||
|
||||
# Cross to get the axis of rotation
|
||||
cross = np.cross(v1, v2)
|
||||
cross = cross / norm(cross)
|
||||
|
||||
return Rotation.from_rotvec(cross * angle)
|
||||
|
||||
|
||||
def close_enough(f1, f2=0, tol=1e-4):
|
||||
"""Compare two floats correctly """
|
||||
return abs(f1 - f2) < tol
|
||||
|
||||
|
||||
def bbox_paths(paths):
|
||||
"""Bounding box of a list of paths/Edge Sequences"""
|
||||
|
||||
bboxes = np.array([p.bbox() for p in paths])
|
||||
return (min(bboxes[:, 0]), max(bboxes[:, 1]),
|
||||
min(bboxes[:, 2]), max(bboxes[:, 3]))
|
||||
|
||||
|
||||
def lin_interpolation(val1, val2, factor):
|
||||
"""Linear interpolation between val1 and val2 with factor [0, 1]
|
||||
|
||||
with factor == 0, output is val1
|
||||
with factor == 1, output is val2
|
||||
"""
|
||||
if factor < 0 or factor > 1:
|
||||
raise ValueError(f'lin_interpolation::ERROR::Expected a factor \in [0, 1], got {factor}')
|
||||
|
||||
return (1 - factor) * val1 + factor * val2
|
||||
|
||||
|
||||
# ---- Complex numbers converters -----
|
||||
def c_to_list(num):
|
||||
"""Convert complex number to a list of 2 elements
|
||||
Allows processing of lists of complex numbers
|
||||
"""
|
||||
|
||||
if isinstance(num, (list, tuple, set, np.ndarray)):
|
||||
return [c_to_list(n) for n in num]
|
||||
else:
|
||||
return [num.real, num.imag]
|
||||
|
||||
|
||||
def c_to_np(num):
|
||||
"""Convert complex number to a numpy array of 2 elements
|
||||
Allows processing of lists of complex numbers
|
||||
"""
|
||||
if isinstance(num, (list, tuple, set, np.ndarray)):
|
||||
return np.asarray([c_to_list(n) for n in num])
|
||||
else:
|
||||
return np.asarray([num.real, num.imag])
|
||||
|
||||
|
||||
def list_to_c(num):
|
||||
"""Convert 2D list or list of 2D lists into complex number/list of complex
|
||||
numbers"""
|
||||
if isinstance(num[0], (list, tuple, set, np.ndarray)):
|
||||
return [complex(n[0], n[1]) for n in num]
|
||||
else:
|
||||
return complex(num[0], num[1])
|
||||
|
||||
|
||||
# ---- Nested Dictionaries shortcuts ----
|
||||
# https://stackoverflow.com/a/37704379
|
||||
def nested_get(dic, keys):
|
||||
for key in keys:
|
||||
dic = dic[key]
|
||||
return dic
|
||||
|
||||
|
||||
def nested_set(dic, keys, value):
|
||||
for key in keys[:-1]:
|
||||
dic = dic.setdefault(key, {})
|
||||
dic[keys[-1]] = value
|
||||
|
||||
|
||||
def nested_del(dic, keys):
|
||||
for key in keys[:-1]:
|
||||
dic = dic[key]
|
||||
del dic[keys[-1]]
|
||||
|
||||
|
||||
# ----- Curves -----
|
||||
def curve_extreme_points(curve, on_x=False, on_y=True):
|
||||
"""Return extreme points of the current edge
|
||||
NOTE: this does NOT include the border vertices of an edge
|
||||
"""
|
||||
# Variation of https://github.com/mathandy/svgpathtools/blob/5c73056420386753890712170da602493aad1860/svgpathtools/bezier.py#L197
|
||||
poly = svgpath.bezier2polynomial(curve, return_poly1d=True)
|
||||
|
||||
x_extremizers, y_extremizers = [], []
|
||||
if on_y:
|
||||
y = svgpath.imag(poly)
|
||||
dy = y.deriv()
|
||||
|
||||
y_extremizers = svgpath.polyroots(dy, realroots=True,
|
||||
condition=lambda r: 0 < r < 1)
|
||||
if on_x:
|
||||
x = svgpath.real(poly)
|
||||
dx = x.deriv()
|
||||
x_extremizers = svgpath.polyroots(dx, realroots=True,
|
||||
condition=lambda r: 0 < r < 1)
|
||||
all_extremizers = x_extremizers + y_extremizers
|
||||
|
||||
extreme_points = np.array([c_to_list(curve.point(t))
|
||||
for t in all_extremizers])
|
||||
|
||||
return extreme_points
|
||||
31
pygarment/mayaqltools/__init__.py
Normal file
31
pygarment/mayaqltools/__init__.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""
|
||||
Package for to simulate garments from patterns in Maya with Qualoth.
|
||||
|
||||
Main dependencies:
|
||||
* Maya 2022+ (uses Python 3.6+)
|
||||
* Arnold Renderer
|
||||
* Qualoth (compatible with your Maya version)
|
||||
|
||||
To run the package in Maya don't foget to add it to PYTHONPATH!
|
||||
"""
|
||||
from importlib import reload
|
||||
|
||||
import pygarment.mayaqltools.mayascene as mayascene
|
||||
reload(mayascene)
|
||||
|
||||
from .mayascene import PatternLoadingError
|
||||
from .mayascene import MayaGarment
|
||||
from .mayascene import Scene
|
||||
from .mayascene import MayaGarmentWithUI
|
||||
|
||||
import pygarment.mayaqltools.simulation as simulation
|
||||
import pygarment.mayaqltools.qualothwrapper as qualothwrapper
|
||||
import pygarment.mayaqltools.garmentUI as garmentUI
|
||||
import pygarment.mayaqltools.scan_imitation as scan_imitation
|
||||
import pygarment.mayaqltools.utils as utils
|
||||
|
||||
reload(simulation)
|
||||
reload(qualothwrapper)
|
||||
reload(garmentUI)
|
||||
reload(scan_imitation)
|
||||
reload(utils)
|
||||
569
pygarment/mayaqltools/garmentUI.py
Normal file
569
pygarment/mayaqltools/garmentUI.py
Normal file
@@ -0,0 +1,569 @@
|
||||
"""
|
||||
Maya interface for editing & testing patterns files
|
||||
* Maya 2022+
|
||||
* Qualoth
|
||||
"""
|
||||
|
||||
# Basic
|
||||
from functools import partial
|
||||
from datetime import datetime
|
||||
import os
|
||||
import numpy as np
|
||||
|
||||
# Maya
|
||||
from maya import cmds
|
||||
import maya.mel as mel
|
||||
|
||||
# My modules
|
||||
from pygarment import mayaqltools as mymaya
|
||||
from pygarment import data_config
|
||||
|
||||
|
||||
# -------- Main call - Draw the UI -------------
|
||||
def start_GUI():
|
||||
"""Initialize interface"""
|
||||
|
||||
# Init state
|
||||
state = State()
|
||||
|
||||
# init window
|
||||
window_width = 450
|
||||
main_offset = 10
|
||||
win = cmds.window(
|
||||
title="Garment Viewer", width=window_width,
|
||||
closeCommand=win_closed_callback,
|
||||
topEdge=15
|
||||
)
|
||||
cmds.columnLayout(columnAttach=('both', main_offset), rowSpacing=10, adj=1)
|
||||
|
||||
# ------ Draw GUI -------
|
||||
# Pattern load
|
||||
text_button_group(template_field_callback, state, label='Pattern spec: ', button_label='Load')
|
||||
# body load
|
||||
text_button_group(load_body_callback, state, label='Body file: ', button_label='Load')
|
||||
# props load
|
||||
text_button_group(load_props_callback, state, label='Properties: ', button_label='Load')
|
||||
# scene setup load
|
||||
text_button_group(load_scene_callback, state, label='Scene: ', button_label='Load')
|
||||
# separate
|
||||
cmds.separator()
|
||||
|
||||
# Pattern description
|
||||
state.pattern_layout = cmds.columnLayout(
|
||||
columnAttach=('both', 0), rowSpacing=main_offset, adj=1)
|
||||
cmds.text(label='<pattern_here>', al='left')
|
||||
cmds.setParent('..')
|
||||
# separate
|
||||
cmds.separator()
|
||||
|
||||
# Operations
|
||||
equal_rowlayout(5, win_width=window_width, offset=(main_offset / 2))
|
||||
cmds.button(label='Reload Spec', backgroundColor=[255 / 256, 169 / 256, 119 / 256],
|
||||
command=partial(reload_garment_callback, state))
|
||||
sim_button = cmds.button(label='Start Sim', backgroundColor=[227 / 256, 255 / 256, 119 / 256])
|
||||
cmds.button(sim_button, edit=True,
|
||||
command=partial(start_sim_callback, sim_button, state))
|
||||
collisions_button = cmds.button(label='Collisions', backgroundColor=[250 / 256, 200 / 256, 119 / 256])
|
||||
cmds.button(collisions_button, edit=True,
|
||||
command=partial(check_collisions_callback, collisions_button, state))
|
||||
segm_button = cmds.button(label='Segmentation', backgroundColor=[150 / 256, 225 / 256, 80 / 256])
|
||||
cmds.button(segm_button, edit=True,
|
||||
command=partial(display_segmentation_callback, segm_button, state))
|
||||
scan_button = cmds.button(label='3D Scan', backgroundColor=[200 / 256, 225 / 256, 80 / 256])
|
||||
cmds.button(scan_button, edit=True,
|
||||
command=partial(imitate_3D_scan_callback, scan_button, state))
|
||||
|
||||
cmds.setParent('..')
|
||||
# separate
|
||||
cmds.separator()
|
||||
|
||||
# Saving folder
|
||||
saving_to_field = text_button_group(saving_folder_callback, state,
|
||||
label='Saving to: ', button_label='Choose')
|
||||
# saving requests
|
||||
equal_rowlayout(2, win_width=window_width, offset=main_offset)
|
||||
cmds.button(label='Save snapshot', backgroundColor=[227 / 256, 255 / 256, 119 / 256],
|
||||
command=partial(quick_save_callback, saving_to_field, state),
|
||||
ann='Quick save with pattern spec and sim config')
|
||||
cmds.button(label='Save with render', backgroundColor=[255 / 256, 140 / 256, 73 / 256],
|
||||
command=partial(full_save_callback, saving_to_field, state),
|
||||
ann='Full save with pattern spec, sim config, garment mesh & rendering')
|
||||
cmds.setParent('..')
|
||||
|
||||
# Last
|
||||
cmds.text(label='') # offset
|
||||
|
||||
# fin
|
||||
cmds.showWindow(win)
|
||||
|
||||
|
||||
# ----- State -------
|
||||
class State(object):
|
||||
def __init__(self):
|
||||
self.pattern_layout = None # to be set on UI init
|
||||
self.garment = None
|
||||
self.scene = None
|
||||
self.save_to = None
|
||||
self.saving_prefix = None
|
||||
self.body_file = None
|
||||
self.config = data_config.Properties()
|
||||
self.scenes_path = ''
|
||||
self.segmented = False
|
||||
mymaya.simulation.init_sim_props(self.config) # use default setup for simulation -- for now
|
||||
|
||||
def reload_garment(self):
|
||||
"""Reloads garment Geometry & UI with current scene.
|
||||
JSON is NOT loaded from disk as it's on-demand operation"""
|
||||
if self.garment is None:
|
||||
return
|
||||
|
||||
if self.scene is not None:
|
||||
self.garment.load(
|
||||
shader_group=self.scene.cloth_SG(),
|
||||
obstacles=[self.scene.body], # self.scene.floor()],
|
||||
config=self.config['sim']['config']
|
||||
)
|
||||
self.scene.reset_garment_color() # in case there was a segmentation display
|
||||
else:
|
||||
self.garment.load(config=self.config['sim']['config'])
|
||||
|
||||
# calling UI after loading for correct connection of attributes
|
||||
self.garment.drawUI(self.pattern_layout)
|
||||
|
||||
def fetch(self):
|
||||
"""Update info in deendent object from Maya"""
|
||||
if self.scene is not None:
|
||||
self.scene.fetch_props_from_Maya()
|
||||
garment_conf = self.garment.fetchSimProps()
|
||||
self.config.set_section_config(
|
||||
'sim',
|
||||
material=garment_conf['material'],
|
||||
body_friction=garment_conf['body_friction'],
|
||||
collision_thickness=garment_conf['collision_thickness']
|
||||
)
|
||||
|
||||
def serialize(self, directory):
|
||||
"""Serialize text-like objects"""
|
||||
self.config.serialize(os.path.join(directory, 'sim_props.json'))
|
||||
self.garment.serialize(
|
||||
directory,
|
||||
to_subfolder=False,
|
||||
with_3d=False, with_text=False, view_ids=False,
|
||||
empty_ok=True)
|
||||
|
||||
def save_scene(self, directory):
|
||||
"""Save scene objects"""
|
||||
self.garment.save_mesh(directory)
|
||||
self.scene.render(directory, self.garment.name)
|
||||
|
||||
|
||||
# ------- Errors --------
|
||||
class CustomError(Exception):
|
||||
def __init__(self, *args):
|
||||
if args:
|
||||
self.message = args[0]
|
||||
else:
|
||||
self.message = None
|
||||
|
||||
def __str__(self):
|
||||
if self.message:
|
||||
return(self.__class__.__name__ + ', {0} '.format(self.message))
|
||||
else:
|
||||
return(self.__class__.__name__)
|
||||
|
||||
|
||||
class SceneSavingError(CustomError):
|
||||
def __init__(self, *args):
|
||||
super(SceneSavingError, self).__init__(*args)
|
||||
|
||||
|
||||
# --------- UI Drawing ----------
|
||||
def equal_rowlayout(num_columns, win_width, offset):
|
||||
"""Create new layout with given number of columns + extra columns for spacing"""
|
||||
col_width = []
|
||||
for col in range(1, num_columns + 1):
|
||||
col_width.append((col, win_width / num_columns - offset))
|
||||
|
||||
col_attach = [(col, 'both', offset) for col in range(1, num_columns + 1)]
|
||||
|
||||
return cmds.rowLayout(
|
||||
numberOfColumns=num_columns,
|
||||
columnWidth=col_width,
|
||||
columnAttach=col_attach,
|
||||
)
|
||||
|
||||
|
||||
def text_button_group(callback, state, label='', button_label='Click'):
|
||||
"""Custom version of textFieldButtonGrp"""
|
||||
cmds.rowLayout(nc=3, adj=2)
|
||||
cmds.text(label=label)
|
||||
text_field = cmds.textField(editable=False)
|
||||
cmds.button(
|
||||
label=button_label,
|
||||
bgc=[0.99, 0.66, 0.46], # backgroundColor=[255 / 256, 169 / 256, 119 / 256],
|
||||
command=partial(callback, text_field, state))
|
||||
cmds.setParent('..')
|
||||
return text_field
|
||||
|
||||
|
||||
# ----- Callbacks -----
|
||||
# -- Loading --
|
||||
def sample_callback(text, *args):
|
||||
print('Called ' + text)
|
||||
|
||||
|
||||
def template_field_callback(view_field, state, *args):
|
||||
"""Get the file with pattern"""
|
||||
|
||||
current_dir = os.path.dirname(cmds.textField(view_field, query=True, text=True))
|
||||
|
||||
multipleFilters = "JSON (*.json);;All Files (*.*)"
|
||||
template_file = cmds.fileDialog2(
|
||||
fileFilter=multipleFilters,
|
||||
dialogStyle=2,
|
||||
fileMode=1,
|
||||
caption='Choose pattern specification file',
|
||||
startingDirectory=current_dir
|
||||
)
|
||||
if not template_file: # do nothing
|
||||
return
|
||||
template_file = template_file[0]
|
||||
|
||||
cmds.textField(view_field, edit=True, text=template_file)
|
||||
|
||||
# create new grament
|
||||
if state.garment is not None:
|
||||
# Cleanup
|
||||
state.garment.clean(delete=True)
|
||||
|
||||
state.garment = mymaya.MayaGarmentWithUI(template_file, True)
|
||||
state.reload_garment()
|
||||
|
||||
|
||||
def load_body_callback(view_field, state, *args):
|
||||
"""Get body file & (re)init scene"""
|
||||
current_dir = os.path.dirname(cmds.textField(view_field, query=True, text=True))
|
||||
multipleFilters = "OBJ (*.obj);;All Files (*.*)"
|
||||
file = cmds.fileDialog2(
|
||||
fileFilter=multipleFilters,
|
||||
dialogStyle=2,
|
||||
fileMode=1,
|
||||
caption='Choose body obj file',
|
||||
startingDirectory=current_dir
|
||||
)
|
||||
if not file: # do nothing
|
||||
return
|
||||
|
||||
file = file[0]
|
||||
cmds.textField(view_field, edit=True, text=file)
|
||||
|
||||
state.config['body'] = os.path.basename(file) # update info
|
||||
state.body_file = file
|
||||
state.scene = mymaya.Scene(file, state.config['render'], scenes_path=state.scenes_path, clean_on_die=True) # previous scene will autodelete
|
||||
state.reload_garment()
|
||||
|
||||
|
||||
def load_props_callback(view_field, state, *args):
|
||||
"""Load sim & renderign properties from file rather then use defaults"""
|
||||
current_dir = os.path.dirname(cmds.textField(view_field, query=True, text=True))
|
||||
multipleFilters = "JSON (*.json);;All Files (*.*)"
|
||||
file = cmds.fileDialog2(
|
||||
fileFilter=multipleFilters,
|
||||
dialogStyle=2,
|
||||
fileMode=1,
|
||||
caption='Choose sim & rendering properties file',
|
||||
startingDirectory=current_dir
|
||||
)
|
||||
if not file: # do nothing
|
||||
return
|
||||
|
||||
file = file[0]
|
||||
cmds.textField(view_field, edit=True, text=file)
|
||||
|
||||
# Edit the incoming config to reflect explicit choiced made in other UI elements
|
||||
in_config = data_config.Properties(file)
|
||||
|
||||
# Use current body info instead of one from config
|
||||
if state.body_file is not None:
|
||||
in_config['body'] = os.path.basename(state.body_file)
|
||||
|
||||
# Use current scene info instead of one from config
|
||||
if 'scene' not in state.config['render']['config']: # remove entirely
|
||||
in_config['render']['config'].pop('scene', None)
|
||||
else:
|
||||
in_config['render']['config']['scene'] = state.config['render']['config']['scene']
|
||||
|
||||
# After the adjustments made, apply the new config to all elements
|
||||
state.config = in_config
|
||||
mymaya.simulation.init_sim_props(state.config) # fill the empty parts
|
||||
|
||||
if state.scene is not None:
|
||||
state.scene = mymaya.Scene(
|
||||
state.body_file, state.config['render'],
|
||||
scenes_path=state.scenes_path, clean_on_die=True)
|
||||
|
||||
if state.garment is not None:
|
||||
state.reload_garment()
|
||||
|
||||
|
||||
def load_scene_callback(view_field, state, *args):
|
||||
"""Load sim & renderign properties from file rather then use defaults"""
|
||||
current_dir = os.path.dirname(cmds.textField(view_field, query=True, text=True))
|
||||
multipleFilters = "MayaBinary (*.mb);;All Files (*.*)"
|
||||
file = cmds.fileDialog2(
|
||||
fileFilter=multipleFilters,
|
||||
dialogStyle=2,
|
||||
fileMode=1,
|
||||
caption='Choose scene setup Maya file',
|
||||
startingDirectory=current_dir
|
||||
)
|
||||
if not file: # do nothing
|
||||
return
|
||||
|
||||
file = file[0]
|
||||
cmds.textField(view_field, edit=True, text=file)
|
||||
|
||||
# Use current scene info instead of one from config
|
||||
state.config['render']['config']['scene'] = os.path.basename(file)
|
||||
state.scenes_path = os.path.dirname(file)
|
||||
|
||||
# Update scene with new config
|
||||
if state.scene is not None:
|
||||
# del state.scene
|
||||
state.scene = mymaya.Scene(
|
||||
state.body_file, state.config['render'],
|
||||
scenes_path=state.scenes_path,
|
||||
clean_on_die=True)
|
||||
state.reload_garment()
|
||||
|
||||
|
||||
def reload_garment_callback(state, *args):
|
||||
"""
|
||||
(re)loads current garment object to Maya if it exists
|
||||
"""
|
||||
if state.garment is not None:
|
||||
state.garment.reloadJSON()
|
||||
state.reload_garment()
|
||||
|
||||
|
||||
# -- Operations --
|
||||
def start_sim_callback(button, state, *args):
|
||||
""" Start simulation """
|
||||
if state.garment is None or state.scene is None:
|
||||
cmds.confirmDialog(title='Error', message='Load pattern specification & body info first')
|
||||
return
|
||||
print('Simulating..')
|
||||
|
||||
# Reload geometry in case something changed
|
||||
state.reload_garment()
|
||||
|
||||
mymaya.qualothwrapper.start_maya_sim(state.garment, state.config['sim'])
|
||||
|
||||
# Update button
|
||||
cmds.button(button, edit=True,
|
||||
label='Stop Sim', backgroundColor=[245 / 256, 96 / 256, 66 / 256],
|
||||
command=partial(stop_sim_callback, button, state))
|
||||
|
||||
|
||||
def stop_sim_callback(button, state, *args):
|
||||
"""Stop simulation execution"""
|
||||
# toggle playback
|
||||
cmds.play(state=False)
|
||||
print('Simulation::Stopped')
|
||||
# uppdate button state
|
||||
cmds.button(button, edit=True,
|
||||
label='Start Sim', backgroundColor=[227 / 256, 255 / 256, 119 / 256],
|
||||
command=partial(start_sim_callback, button, state))
|
||||
|
||||
cmds.select(state.garment.get_qlcloth_props_obj()) # for props change
|
||||
|
||||
|
||||
def check_collisions_callback(button, state, *args):
|
||||
"""Run removal of faces that might be invisible to 3D scanner"""
|
||||
|
||||
# indicate waiting for imitation finish
|
||||
cmds.button(button, edit=True,
|
||||
label='Checking...', backgroundColor=[245 / 256, 96 / 256, 66 / 256],
|
||||
command=partial(stop_sim_callback, button, state))
|
||||
cmds.refresh(currentView=True)
|
||||
|
||||
cmds.confirmDialog(
|
||||
title='Simulation quality info:',
|
||||
message=(
|
||||
'Simulation quality checks: \n\n'
|
||||
'Garment intersect colliders: {} \n'
|
||||
'Garment has self-intersections: {}').format(
|
||||
'Yes' if state.garment.intersect_colliders_3D() else 'No',
|
||||
'Yes' if state.garment.self_intersect_3D(verbose=True) else 'No'),
|
||||
button=['Ok'], defaultButton='Ok', cancelButton='Ok', dismissString='Ok')
|
||||
|
||||
cmds.button(button, edit=True,
|
||||
label='Collisions', backgroundColor=[250 / 256, 200 / 256, 119 / 256],
|
||||
command=partial(check_collisions_callback, button, state))
|
||||
|
||||
|
||||
def imitate_3D_scan_callback(button, state, *args):
|
||||
"""Run removal of faces that might be invisible to 3D scanner"""
|
||||
|
||||
# indicate waiting for imitation finish
|
||||
cmds.button(button, edit=True,
|
||||
label='Scanning...', backgroundColor=[245 / 256, 96 / 256, 66 / 256])
|
||||
cmds.refresh(currentView=True)
|
||||
|
||||
if 'scan_imitation' in state.config:
|
||||
num_rays = state.config['scan_imitation']['config']['test_rays_num']
|
||||
vis_rays = state.config['scan_imitation']['config']['visible_rays_num']
|
||||
mymaya.scan_imitation.remove_invisible(
|
||||
state.garment.get_qlcloth_geomentry(),
|
||||
[state.scene.body] if state.scene is not None else [],
|
||||
num_rays, vis_rays
|
||||
)
|
||||
else: # go with function defaults
|
||||
mymaya.scan_imitation.remove_invisible(
|
||||
state.garment.get_qlcloth_geomentry(),
|
||||
[state.scene.body] if state.scene is not None else []
|
||||
)
|
||||
|
||||
cmds.button(button, edit=True,
|
||||
label='3D Scan', backgroundColor=[200 / 256, 225 / 256, 80 / 256],
|
||||
command=partial(imitate_3D_scan_callback, button, state))
|
||||
|
||||
|
||||
def display_segmentation_callback(button, state, *args):
|
||||
"""
|
||||
Visualize the segmentation labels
|
||||
"""
|
||||
if not state.segmented:
|
||||
# indicate waiting for imitation finish
|
||||
cmds.button(button, edit=True,
|
||||
label='Segmenting...', backgroundColor=[245 / 256, 96 / 256, 66 / 256])
|
||||
cmds.refresh(currentView=True)
|
||||
|
||||
state.garment.display_vertex_segmentation(state.scene.scene['cloth_shader'])
|
||||
|
||||
print('Segmentation displayed!')
|
||||
state.segmented = True
|
||||
|
||||
cmds.button(button, edit=True,
|
||||
label='Color', backgroundColor=[150 / 256, 225 / 256, 80 / 256],
|
||||
command=partial(display_segmentation_callback, button, state))
|
||||
else:
|
||||
state.scene.reset_garment_color()
|
||||
state.segmented = False
|
||||
cmds.button(button, edit=True,
|
||||
label='Segmentation', backgroundColor=[150 / 256, 225 / 256, 80 / 256],
|
||||
command=partial(display_segmentation_callback, button, state))
|
||||
|
||||
|
||||
# -- Saving ---
|
||||
def win_closed_callback(*args):
|
||||
"""Clean-up"""
|
||||
# Remove solver objects from the scene
|
||||
cmds.delete(cmds.ls('qlSolver*'))
|
||||
# Other created objects will be automatically deleted through destructors
|
||||
|
||||
|
||||
def saving_folder_callback(view_field, state, *args):
|
||||
"""Choose folder to save files to"""
|
||||
|
||||
current_dir = cmds.textField(view_field, query=True, text=True)
|
||||
|
||||
directory = cmds.fileDialog2(
|
||||
dialogStyle=2,
|
||||
fileMode=3, # directories
|
||||
caption='Choose folder to save snapshots and renderings to',
|
||||
startingDirectory=current_dir
|
||||
)
|
||||
if not directory: # do nothing
|
||||
return
|
||||
|
||||
directory = directory[0]
|
||||
cmds.textField(view_field, edit=True, text=directory)
|
||||
|
||||
state.save_to = directory
|
||||
|
||||
# request saving prefix
|
||||
tag_result = cmds.promptDialog(
|
||||
t='Enter a saving prefix',
|
||||
m='Enter a saving prefix:',
|
||||
button=['OK', 'Cancel'],
|
||||
defaultButton='OK',
|
||||
cancelButton='Cancel',
|
||||
dismissString='Cancel'
|
||||
)
|
||||
if tag_result == 'OK':
|
||||
tag = cmds.promptDialog(query=True, text=True)
|
||||
state.saving_prefix = tag
|
||||
else:
|
||||
state.saving_prefix = None
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _new_dir(root_dir, tag='snap'):
|
||||
"""create fresh directory for saving files"""
|
||||
folder = tag + '_' + datetime.now().strftime('%y%m%d-%H-%M-%S')
|
||||
path = os.path.join(root_dir, folder)
|
||||
os.makedirs(path)
|
||||
return path
|
||||
|
||||
|
||||
def _create_saving_dir(view_field, state):
|
||||
"""Create directory to save to """
|
||||
|
||||
if state.garment is None:
|
||||
cmds.confirmDialog(title='Error', message='Load pattern specification first')
|
||||
raise SceneSavingError('Garment is not loaded before saving')
|
||||
|
||||
if state.save_to is None:
|
||||
if not saving_folder_callback(view_field, state):
|
||||
raise SceneSavingError('Saving folder not supplied')
|
||||
|
||||
if state.saving_prefix is not None:
|
||||
tag = state.saving_prefix
|
||||
else:
|
||||
tag = state.garment.name
|
||||
|
||||
new_dir = _new_dir(state.save_to, tag)
|
||||
|
||||
return new_dir
|
||||
|
||||
|
||||
def quick_save_callback(view_field, state, *args):
|
||||
"""Quick save with pattern spec and sim config"""
|
||||
try:
|
||||
new_dir = _create_saving_dir(view_field, state)
|
||||
except SceneSavingError:
|
||||
return
|
||||
|
||||
state.fetch()
|
||||
state.serialize(new_dir)
|
||||
|
||||
state.garment.save_mesh(new_dir)
|
||||
|
||||
print('Garment info saved to ' + new_dir)
|
||||
|
||||
|
||||
def full_save_callback(view_field, state, *args):
|
||||
"""Full save with pattern spec, sim config, garment mesh & rendering"""
|
||||
|
||||
if state.garment is None or state.scene is None:
|
||||
cmds.confirmDialog(title='Error', message='Load pattern specification & body info first')
|
||||
return
|
||||
|
||||
# do the same as for quick save
|
||||
try:
|
||||
new_dir = _create_saving_dir(view_field, state)
|
||||
except SceneSavingError:
|
||||
return
|
||||
|
||||
# save scene objects
|
||||
state.save_scene(new_dir)
|
||||
|
||||
# save text properties
|
||||
state.fetch()
|
||||
state.serialize(new_dir)
|
||||
|
||||
print('Pattern spec, props, 3D mesh & render saved to ' + new_dir)
|
||||
|
||||
cmds.select(state.garment.get_qlcloth_props_obj()) # for props change
|
||||
1515
pygarment/mayaqltools/mayascene.py
Normal file
1515
pygarment/mayaqltools/mayascene.py
Normal file
File diff suppressed because it is too large
Load Diff
396
pygarment/mayaqltools/qualothwrapper.py
Normal file
396
pygarment/mayaqltools/qualothwrapper.py
Normal file
@@ -0,0 +1,396 @@
|
||||
"""
|
||||
Qualoth scripts are written in MEL.
|
||||
This module makes a python interface to them
|
||||
Notes:
|
||||
* Error checks are sparse to save coding time & lines.
|
||||
This sould not be a problem during the normal workflow
|
||||
|
||||
"""
|
||||
import time
|
||||
import sys
|
||||
|
||||
from maya import mel
|
||||
from maya import cmds
|
||||
|
||||
|
||||
def load_plugin():
|
||||
"""
|
||||
Forces loading Qualoth plugin into Maya.
|
||||
Note that plugin should be installed and licensed to use it!
|
||||
Inquire here: http://www.fxgear.net/vfxpricing
|
||||
"""
|
||||
maya_year = int(mel.eval('getApplicationVersionAsFloat'))
|
||||
plugin_name = 'qualoth_' + str(maya_year) + '_x64'
|
||||
print('Loading ', plugin_name)
|
||||
|
||||
cmds.loadPlugin(plugin_name)
|
||||
|
||||
|
||||
# -------- Wrappers -----------
|
||||
# Make sure that Qualoth plugin is loaded before running any wrappers!
|
||||
|
||||
def qlCreatePattern(curves_group):
|
||||
"""
|
||||
Converts given 2D closed curve to a flat geometry piece
|
||||
"""
|
||||
objects_before = cmds.ls(assemblies=True)
|
||||
# run
|
||||
cmds.select(curves_group)
|
||||
mel.eval('qlCreatePattern()')
|
||||
|
||||
# Identify newly created objects
|
||||
objects_after = cmds.ls(assemblies=True)
|
||||
# No need for symmetric difference because we don't care if some objects were deleted
|
||||
return list(set(objects_after) - set(objects_before))
|
||||
|
||||
|
||||
def qlCreateSeam(curve1, curve2):
|
||||
"""
|
||||
Create a seam between two selected curves
|
||||
"""
|
||||
cmds.select([curve1, curve2])
|
||||
# Operates on selection
|
||||
seam_shape = mel.eval('qlCreateSeam()')
|
||||
return seam_shape
|
||||
|
||||
|
||||
def qlCreateCollider(cloth, target):
|
||||
"""
|
||||
Marks object as a collider object for cloth --
|
||||
eshures that cloth won't penetrate body when simulated
|
||||
"""
|
||||
objects_before = cmds.ls(assemblies=True)
|
||||
|
||||
cmds.select([cloth, target])
|
||||
# Operates on selection
|
||||
mel.eval('qlCreateCollider()')
|
||||
|
||||
objects_after = cmds.ls(assemblies=True)
|
||||
return list(set(objects_after) - set(objects_before))
|
||||
|
||||
|
||||
def qlCleanCache(cloth):
|
||||
"""Clean layback cache for given cloth. Accepts qlCloth object"""
|
||||
cmds.select(cloth)
|
||||
mel.eval('qlClearCache()')
|
||||
|
||||
|
||||
def qlReinitSolver(cloth, solver):
|
||||
"""Reinitialize solver
|
||||
set both cloth and solver to the initial state before simulation was applied
|
||||
NOTE: useful for correct reload of garments on delete
|
||||
"""
|
||||
cmds.select([cloth, solver])
|
||||
mel.eval('qlReinitializeSolver()')
|
||||
|
||||
# ------- Higher-level functions --------
|
||||
|
||||
def start_maya_sim(garment, props):
|
||||
"""Start simulation through Maya defalut playback without checks
|
||||
Gives Maya user default control over stopping & resuming sim
|
||||
Current qlCloth material properties from Maya are used (instead of garment config)
|
||||
"""
|
||||
config = props['config']
|
||||
solver = _init_sim(config)
|
||||
|
||||
# Allow to assemble without gravity
|
||||
print('Simulation::Assemble without gravity')
|
||||
_set_gravity(solver, 0)
|
||||
for frame in range(1, config['zero_gravity_steps']):
|
||||
cmds.currentTime(frame) # step
|
||||
|
||||
# resume normally
|
||||
print('Simulation::normal playback.. Use ESC key to stop simulation')
|
||||
_set_gravity(solver, -980)
|
||||
cmds.currentTime(frame - 1) # one step back to start from simulated state
|
||||
cmds.play()
|
||||
|
||||
|
||||
def run_sim(garment, props):
|
||||
"""
|
||||
Setup and run cloth simulator untill static equlibrium is achieved.
|
||||
Note:
|
||||
* Assumes garment is already properly aligned!
|
||||
* All of the garments existing in Maya scene will be simulated
|
||||
because solver is shared!!
|
||||
"""
|
||||
config = props['config']
|
||||
solver = _init_sim(config)
|
||||
|
||||
start_time = time.time()
|
||||
# Allow to assemble without gravity + skip checks for first few frames
|
||||
print('Simulating {}'.format(garment.name))
|
||||
_set_gravity(solver, 0)
|
||||
for frame in range(1, config['zero_gravity_steps']):
|
||||
cmds.currentTime(frame) # step
|
||||
garment.cache_if_enabled(frame)
|
||||
garment.update_verts_info()
|
||||
_update_progress(frame, config['max_sim_steps']) # progress bar
|
||||
|
||||
# resume normally
|
||||
_set_gravity(solver, -980)
|
||||
for frame in range(config['zero_gravity_steps'], config['max_sim_steps']):
|
||||
cmds.currentTime(frame) # step
|
||||
garment.cache_if_enabled(frame)
|
||||
garment.update_verts_info()
|
||||
|
||||
_update_progress(frame, config['max_sim_steps']) # progress bar
|
||||
static, non_st_count = garment.is_static(config['static_threshold'], config['non_static_percent'])
|
||||
if static: # Success!
|
||||
print('\nAchieved static equilibrium for {}'.format(garment.name))
|
||||
break
|
||||
|
||||
# stats
|
||||
props['stats']['sim_time'][garment.name] = time.time() - start_time
|
||||
props['stats']['spf'][garment.name] = props['stats']['sim_time'][garment.name] / frame
|
||||
props['stats']['fin_frame'][garment.name] = frame
|
||||
|
||||
# Fail checks
|
||||
# static equilibrium never detected -- might have false negs!
|
||||
if frame == config['max_sim_steps'] - 1:
|
||||
print('\nFailed to achieve static equilibrium for {} with {} non-static vertices out of {}'.format(
|
||||
garment.name, non_st_count, len(garment.current_verts)))
|
||||
_record_fail(props, 'static_equilibrium', garment.name)
|
||||
# 3D penetrations
|
||||
if garment.intersect_colliders_3D():
|
||||
_record_fail(props, 'intersect_colliders', garment.name)
|
||||
if garment.self_intersect_3D():
|
||||
_record_fail(props, 'intersect_self', garment.name)
|
||||
# Finished too fast
|
||||
if props['stats']['sim_time'][garment.name] < 2: # 2 sec
|
||||
_record_fail(props, 'fast_finish', garment.name)
|
||||
|
||||
|
||||
def findSolver():
|
||||
"""
|
||||
Returns the name of the qlSover existing in the scene
|
||||
(usully solver is created once per scene)
|
||||
"""
|
||||
solver = cmds.ls('*qlSolver*Shape*')
|
||||
return solver[0] if solver else None
|
||||
|
||||
|
||||
def deleteSolver():
|
||||
"""deletes all solver objects from the scene"""
|
||||
cmds.delete(cmds.ls('*qlSolver*'))
|
||||
|
||||
|
||||
def flipPanelNormal(panel_geom):
|
||||
"""Set flippling normals to True for a given panel geom objects
|
||||
at least one of the provided objects should a qlPattern object"""
|
||||
|
||||
ql_pattern = [obj for obj in panel_geom if 'Pattern' in obj]
|
||||
ql_pattern = ql_pattern[0]
|
||||
shape = cmds.listRelatives(ql_pattern, shapes=True, path=True)
|
||||
|
||||
cmds.setAttr(shape[0] + '.flipNormal', 1)
|
||||
|
||||
|
||||
def getVertsOnCurve(panel_node, curve, curve_group=None):
|
||||
"""
|
||||
Return the list of mesh vertices located on the curve
|
||||
* panel_node is qlPattern object to which the curve belongs
|
||||
* curve is a main name of a curve object to get vertex info for
|
||||
OR any substring of it's full Maya name that would uniquely identify it
|
||||
* (optional) curve_group is a name of parent group of given curve to uni quely distinguish the curve
|
||||
"""
|
||||
# find qlDiscretizer node
|
||||
if 'Shape' not in panel_node:
|
||||
shapes = cmds.listRelatives(panel_node, shapes=True)
|
||||
panel_node = panel_node + '|' + shapes[0]
|
||||
|
||||
connections = cmds.listConnections(panel_node)
|
||||
|
||||
discretizer = [node for node in connections if 'qlDiscretizer' in node]
|
||||
discretizer = discretizer[0]
|
||||
info_array = discretizer + '.curveVeritcesInfoArray'
|
||||
|
||||
# iterate over curveVeritcesInfoArray
|
||||
num_curves = cmds.getAttr(info_array, size=True)
|
||||
# avoid matching 'curve1' with 'Acurve10' by adding a starting and ending caharacter
|
||||
if curve[0] != '|':
|
||||
curve = '|' + curve
|
||||
if curve[-1] != '|':
|
||||
curve = curve + '|'
|
||||
for idx in range(num_curves):
|
||||
curve_name = cmds.getAttr(info_array + '[%d].curveName' % idx)
|
||||
|
||||
if curve in curve_name:
|
||||
if curve_group is not None and curve_group not in curve_name:
|
||||
# erroneous match
|
||||
continue
|
||||
# found!
|
||||
vertices = cmds.getAttr(info_array + '[%d].curveVertices' % idx)
|
||||
return vertices
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# ------ Working with props ------
|
||||
def setColliderFriction(collider_objects, friction_value):
|
||||
"""Sets the level of friction of the given collider to friction_value"""
|
||||
|
||||
main_collider = [obj for obj in collider_objects if 'Offset' not in obj]
|
||||
collider_shape = cmds.listRelatives(main_collider[0], shapes=True)
|
||||
|
||||
cmds.setAttr(collider_shape[0] + '.friction', friction_value)
|
||||
|
||||
|
||||
def setFabricProps(cloth, props):
|
||||
"""Set given material propertied to qlClothObject"""
|
||||
if not props:
|
||||
return
|
||||
# Simple ones
|
||||
cmds.setAttr(cloth + '.density', props['density'], clamp=True)
|
||||
cmds.setAttr(cloth + '.stretch', props['stretch_resistance'], clamp=True)
|
||||
cmds.setAttr(cloth + '.shear', props['shear_resistance'], clamp=True)
|
||||
cmds.setAttr(cloth + '.stretchDamp', props['stretch_damp'], clamp=True)
|
||||
cmds.setAttr(cloth + '.bend', props['bend_resistance'], clamp=True)
|
||||
cmds.setAttr(cloth + '.bendAngleDropOff', props['bend_angle_dropoff'], clamp=True)
|
||||
cmds.setAttr(cloth + '.bendDamp', props['bend_damp'], clamp=True)
|
||||
cmds.setAttr(cloth + '.bendDampDropOff', props['bend_damp_dropoff'], clamp=True)
|
||||
cmds.setAttr(cloth + '.bendYield', props['bend_yield'], clamp=True)
|
||||
cmds.setAttr(cloth + '.bendPlasticity', props['bend_plasticity'], clamp=True)
|
||||
cmds.setAttr(cloth + '.viscousDamp', props['viscous_damp'], clamp=True)
|
||||
cmds.setAttr(cloth + '.friction', props['friction'], clamp=True)
|
||||
cmds.setAttr(cloth + '.pressure', props['pressure'], clamp=True)
|
||||
cmds.setAttr(cloth + '.lengthScale', props['length_scale'], clamp=True)
|
||||
cmds.setAttr(cloth + '.airDrag', props['air_drag'], clamp=True)
|
||||
cmds.setAttr(cloth + '.rubber', props['rubber'], clamp=True)
|
||||
|
||||
# need setting flags
|
||||
cmds.setAttr(cloth + '.overrideCompression', 1)
|
||||
cmds.setAttr(cloth + '.compression', props['compression_resistance'], clamp=True)
|
||||
|
||||
cmds.setAttr(cloth + '.anisotropicControl', 1)
|
||||
cmds.setAttr(cloth + '.uStretchScale', props['weft_resistance_scale'], clamp=True)
|
||||
cmds.setAttr(cloth + '.vStretchScale', props['warp_resistance_scale'], clamp=True)
|
||||
cmds.setAttr(cloth + '.rubberU', props['weft_rubber_scale'], clamp=True)
|
||||
cmds.setAttr(cloth + '.rubberV', props['warp_rubber_scale'], clamp=True)
|
||||
|
||||
|
||||
def setPanelsResolution(scaling):
|
||||
"""Set resoluiton conroller of all qlPatterns in the scene"""
|
||||
all_panels = cmds.ls('*qlPattern*', shapes=True)
|
||||
for panel in all_panels:
|
||||
cmds.setAttr(panel + '.resolutionScale', scaling)
|
||||
|
||||
|
||||
def fetchFabricProps(cloth):
|
||||
"""Returns current material properties of the cloth's objects
|
||||
Requires qlCloth object
|
||||
"""
|
||||
props = {}
|
||||
# Mass density per unit area. (Kg/cm2)
|
||||
props['density'] = cmds.getAttr(cloth + '.density')
|
||||
# Resisting force to planar stretching and compression
|
||||
props['stretch_resistance'] = cmds.getAttr(cloth + '.stretch')
|
||||
# Resisting force to shearing. (See Figure.) This parameter is
|
||||
# interpreted as a scale factor to the stretch resistance.
|
||||
props['shear_resistance'] = cmds.getAttr(cloth + '.shear')
|
||||
# Damping factor for stretching motion.
|
||||
props['stretch_damp'] = cmds.getAttr(cloth + '.stretchDamp')
|
||||
# Resisting force to bending.
|
||||
props['bend_resistance'] = cmds.getAttr(cloth + '.bend')
|
||||
props['bend_angle_dropoff'] = cmds.getAttr(cloth + '.bendAngleDropOff')
|
||||
# Damping factor for bending motion
|
||||
props['bend_damp'] = cmds.getAttr(cloth + '.bendDamp')
|
||||
props['bend_damp_dropoff'] = cmds.getAttr(cloth + '.bendDampDropOff')
|
||||
|
||||
# creases: elasticity vs plasticity
|
||||
props['bend_yield'] = cmds.getAttr(cloth + '.bendYield')
|
||||
props['bend_plasticity'] = cmds.getAttr(cloth + '.bendPlasticity')
|
||||
|
||||
# external
|
||||
# This damping force drags the motion of each cloth vertex in all directions uniformly
|
||||
# regardless of the directions of normals of each vertex.
|
||||
props['viscous_damp'] = cmds.getAttr(cloth + '.viscousDamp')
|
||||
# Controls the friction among cloth objects or colliders. Also self-friction
|
||||
props['friction'] = cmds.getAttr(cloth + '.friction')
|
||||
# The amount of pressure force which are applied to the vertex normal directions of each cloth vertex.
|
||||
props['pressure'] = cmds.getAttr(cloth + '.pressure')
|
||||
|
||||
# need setting flags
|
||||
# need to turn on .overrideCompression
|
||||
props['compression_resistance'] = cmds.getAttr(cloth + '.compression')
|
||||
|
||||
# ------ unlikely to be used ---------
|
||||
# Scale factor for length unit.
|
||||
props['length_scale'] = cmds.getAttr(cloth + '.lengthScale')
|
||||
# value controls the amount of influence from those air fields. In case
|
||||
# here is no attached field to this cloth, 'Air Drag' simply drags the
|
||||
# cloth motion in the direction of face normals of each triangle.
|
||||
props['air_drag'] = cmds.getAttr(cloth + '.airDrag')
|
||||
# This value scales the area of the cloth in rest state.
|
||||
props['rubber'] = cmds.getAttr(cloth + '.rubber')
|
||||
# fine-grained
|
||||
# The scale factor to the planar stretching/compression resistance in weft (U) direction.
|
||||
props['weft_resistance_scale'] = cmds.getAttr(cloth + '.uStretchScale')
|
||||
# The scale factor to the planar stretching/compression resistance in warp (V) direction.
|
||||
props['warp_resistance_scale'] = cmds.getAttr(cloth + '.vStretchScale')
|
||||
# The scale factor to the rubber value (rest length scale) in weft (U) direction.
|
||||
props['weft_rubber_scale'] = cmds.getAttr(cloth + '.rubberU')
|
||||
# The scale factor to the rubber value (rest length scale) in warp (V) direction.
|
||||
props['warp_rubber_scale'] = cmds.getAttr(cloth + '.rubberV')
|
||||
|
||||
return props
|
||||
|
||||
|
||||
def fetchColliderFriction(collider_objects):
|
||||
"""Retrieve collider friction info from given collider"""
|
||||
|
||||
try:
|
||||
main_collider = [obj for obj in collider_objects if 'Offset' not in obj]
|
||||
collider_shape = cmds.listRelatives(main_collider[0], shapes=True)
|
||||
|
||||
return cmds.getAttr(collider_shape[0] + '.friction')
|
||||
except ValueError as e:
|
||||
# collider doesn't exist any more
|
||||
return None
|
||||
|
||||
|
||||
def fetchPanelResolution():
|
||||
some_panels = cmds.ls('*qlPattern*')
|
||||
|
||||
shape = cmds.listRelatives(some_panels[0], shapes=True, path=True)
|
||||
|
||||
return cmds.getAttr(shape[0] + '.resolutionScale')
|
||||
|
||||
|
||||
# ------- Self-Utils ---------
|
||||
def _init_sim(config):
|
||||
"""
|
||||
Basic simulation settings before starting simulation
|
||||
"""
|
||||
solver = findSolver()
|
||||
|
||||
cmds.setAttr(solver + '.selfCollision', 1)
|
||||
cmds.setAttr(solver + '.startTime', 1)
|
||||
cmds.setAttr(solver + '.solverStatistics', 0) # for easy reading of console output
|
||||
cmds.playbackOptions(ps=0, max=config['max_sim_steps']) # 0 playback speed = play every frame
|
||||
|
||||
return solver
|
||||
|
||||
|
||||
def _set_gravity(solver, gravity):
|
||||
"""Set a given value of gravity to sim solver"""
|
||||
cmds.setAttr(solver + '.gravity1', gravity)
|
||||
|
||||
|
||||
def _update_progress(progress, total):
|
||||
"""Progress bar in console"""
|
||||
# https://stackoverflow.com/questions/3173320/text-progress-bar-in-the-console
|
||||
amtDone = progress / total
|
||||
num_dash = int(amtDone * 50)
|
||||
sys.stdout.write('\rProgress: [{0:50s}] {1:.1f}%'.format('#' * num_dash + '-' * (50 - num_dash), amtDone * 100))
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
def _record_fail(props, fail_type, garment_name):
|
||||
"""add a failure recording to props. Creates nodes if don't exist"""
|
||||
if 'fails' not in props['stats']:
|
||||
props['stats']['fails'] = {}
|
||||
try:
|
||||
props['stats']['fails'][fail_type].append(garment_name)
|
||||
except KeyError:
|
||||
props['stats']['fails'][fail_type] = [garment_name]
|
||||
116
pygarment/mayaqltools/scan_imitation.py
Normal file
116
pygarment/mayaqltools/scan_imitation.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""
|
||||
Maya script for removing faces from 3D garement model that are not visible from the outside cameras
|
||||
The goal is to imitate scanning artifacts that result in missing geometry
|
||||
* Maya 2022+
|
||||
"""
|
||||
|
||||
from maya import OpenMaya
|
||||
from maya import cmds
|
||||
import numpy as np
|
||||
from datetime import datetime
|
||||
|
||||
# My modules
|
||||
from pygarment.mayaqltools import utils
|
||||
|
||||
|
||||
def _sample_on_sphere(rad):
|
||||
"""Uniformly sample a point on a sphere with radious rad. Return as Maya-compatible floating-point vector"""
|
||||
# Using method of (Muller 1959, Marsaglia 1972)
|
||||
# see the last one here https://mathworld.wolfram.com/SpherePointPicking.html
|
||||
|
||||
uni_array = np.random.normal(size=3)
|
||||
|
||||
uni_array = uni_array / np.linalg.norm(uni_array) * rad
|
||||
|
||||
return OpenMaya.MFloatVector(uni_array[0], uni_array[1], uni_array[2])
|
||||
|
||||
|
||||
def _camera_surface(target, obstacles=[], vertical_scaling_factor=1.5, ground_scaling_factor=1.2):
|
||||
"""Generate a (3D scanning) camera surface around provided scene"""
|
||||
|
||||
# basically, draw a bounding box around the target
|
||||
bbox = np.array(cmds.exactWorldBoundingBox(obstacles + [target])) # [xmin, ymin, zmin, xmax, ymax, zmax]
|
||||
|
||||
top = bbox[3:]
|
||||
bottom = bbox[:3]
|
||||
center = (top + bottom) / 2
|
||||
dims = top - bottom
|
||||
dims = [max(dims[0], dims[2]) * ground_scaling_factor, dims[1] * vertical_scaling_factor]
|
||||
|
||||
cube = cmds.polyCube(height=dims[1], depth=dims[0], width=dims[0], name='camera_surface')
|
||||
|
||||
# align with center
|
||||
cmds.move(center[0], center[1], center[2], cube, absolute=True)
|
||||
|
||||
# remove bottom face -- as if no cameras there
|
||||
# adding '.f[1]' would also remove the ceiling
|
||||
cmds.polyDelFacet( cube[0] + '.f[3]') # we know exact structure of default polyCube in Maya2018 & Maya2020
|
||||
|
||||
return cube[0], np.max(dims)
|
||||
|
||||
|
||||
def remove_invisible(target, obstacles=[], num_rays=30, visibile_rays=4):
|
||||
"""Update target 3D mesh: remove faces that are not visible from camera_surface
|
||||
* due to self-occlusion or occlusion by an obstacle
|
||||
* Camera surface is generated aroung the target as a small "room" with empty floor and ceiling
|
||||
|
||||
In my context, target is usually a garment mesh, and obstacle is a body surface
|
||||
Noise control:
|
||||
* num_rays -- number of random rays to emit from each face -- the less rays, the more noisy the output is
|
||||
* visibile_rays -- number of rays to hit camera surface without obstacles to consider the face to be visible
|
||||
BUT at least one ray is always required to consider face as visible!
|
||||
"""
|
||||
# Follows the idea of self_intersect_3D() checks used in simulation pipeline
|
||||
print('Performing scanning imitation on {} with obstacles {}'.format(target, obstacles))
|
||||
|
||||
# generate apropriate camera surface
|
||||
camera_surface_obj, ray_dist = _camera_surface(target, obstacles)
|
||||
|
||||
start_time = datetime.now()
|
||||
|
||||
# get mesh objects as OpenMaya object
|
||||
target_mesh, target_dag = utils.get_mesh_dag(target)
|
||||
camera_surface_mesh, _ = utils.get_mesh_dag(camera_surface_obj)
|
||||
obstacles_meshes = [utils.get_mesh_dag(name)[0] for name in obstacles]
|
||||
|
||||
# search for intersections
|
||||
target_accelerator = target_mesh.autoUniformGridParams()
|
||||
cam_surface_accelerator = camera_surface_mesh.autoUniformGridParams()
|
||||
obstacles_accs = [mesh.autoUniformGridParams() for mesh in obstacles_meshes]
|
||||
to_delete = []
|
||||
|
||||
target_face_iterator = OpenMaya.MItMeshPolygon(target_dag)
|
||||
while not target_face_iterator.isDone(): # https://stackoverflow.com/questions/40422082/how-to-find-face-neighbours-in-maya
|
||||
# midpoint of the current face -- start of all the rays
|
||||
face_mean = OpenMaya.MFloatPoint(target_face_iterator.center(OpenMaya.MSpace.kWorld))
|
||||
face_id = target_face_iterator.index()
|
||||
|
||||
visible_count = 0
|
||||
visible = False
|
||||
# Send rays in all directions from the currect vertex
|
||||
for _ in range(num_rays):
|
||||
rayDir = _sample_on_sphere(ray_dist)
|
||||
# Case when face is visible from camera surface
|
||||
if (utils.test_ray_intersect(camera_surface_mesh, face_mean, rayDir, cam_surface_accelerator) # intesection with camera surface
|
||||
and not any([utils.test_ray_intersect(mesh, face_mean, rayDir, acc,) for mesh, acc in zip(obstacles_meshes, obstacles_accs)]) # intesects any of the obstacles
|
||||
and not utils.test_ray_intersect(target_mesh, face_mean, rayDir, target_accelerator, hit_tol=1e-5)): # intersects itself
|
||||
visible_count += 1
|
||||
if visible_count >= visibile_rays: # enough rays are visible -- no need to test more
|
||||
visible = True
|
||||
|
||||
if not visible:
|
||||
to_delete.append(face_id)
|
||||
target_face_iterator.next() # iterate!
|
||||
|
||||
cmds.delete(camera_surface_obj) # clean-up the scene
|
||||
|
||||
# Remove invisible faces
|
||||
delete_strs = [target + '.f[{}]'.format(face_id) for face_id in to_delete]
|
||||
if len(delete_strs) > 0:
|
||||
cmds.polyDelFacet(tuple(delete_strs)) # as this is the last command to execute, it could be undone with Ctrl-Z once
|
||||
|
||||
passed = datetime.now() - start_time
|
||||
print('{}::Removed {} faces after {}. Press Ctrl-Z to undo the changes'.format(target, len(to_delete), passed))
|
||||
|
||||
return len(to_delete), passed.total_seconds()
|
||||
|
||||
264
pygarment/mayaqltools/simulation.py
Normal file
264
pygarment/mayaqltools/simulation.py
Normal file
@@ -0,0 +1,264 @@
|
||||
"""Routines to run cloth simulation in Maya + Qualoth"""
|
||||
|
||||
# Basic
|
||||
import time
|
||||
import os
|
||||
|
||||
# Maya
|
||||
from maya import cmds
|
||||
|
||||
# My modules
|
||||
from pygarment.pattern.core import BasicPattern
|
||||
import pygarment.mayaqltools as mymaya
|
||||
from pygarment.mayaqltools import qualothwrapper as qw
|
||||
|
||||
|
||||
# ----------- High-level requests --------------
|
||||
# TODO Deprecated
|
||||
def single_file_sim(resources, props, caching=False):
|
||||
"""
|
||||
Simulates the given template and puts the results in original template folder,
|
||||
including config and statistics
|
||||
"""
|
||||
try:
|
||||
# ----- Init -----
|
||||
init_sim_props(props, True)
|
||||
qw.load_plugin()
|
||||
scene = mymaya.Scene(
|
||||
os.path.join(resources['bodies_path'], props['body']),
|
||||
props['render'],
|
||||
scenes_path=resources['scenes_path'])
|
||||
|
||||
# Main part
|
||||
template_simulation(os.path.join(resources['templates_path'], props['templates']),
|
||||
scene, props['sim'], caching=caching)
|
||||
|
||||
# Fin
|
||||
print('\nFinished experiment')
|
||||
try:
|
||||
# remove unnecessaty field
|
||||
del props['sim']['stats']['processed']
|
||||
except KeyError:
|
||||
pass
|
||||
props.serialize(os.path.join(resources['templates_path'], 'props.json'))
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
def batch_sim(resources, data_path, dataset_props,
|
||||
num_samples=None, caching=False, force_restart=False):
|
||||
"""
|
||||
Performs pattern simulation for each example in the dataset
|
||||
given by dataset_props.
|
||||
Batch processing is automatically resumed
|
||||
from the last unporcessed datapoint if restart is not forced. The last
|
||||
example on the processes list is assumed to cause the failure, so it can be later found in failure cases.
|
||||
|
||||
Parameters:
|
||||
* resources -- dict of paths to needed resoursed:
|
||||
* body_path -- path to folder with body meshes
|
||||
* data_path -- path to folder with the dataset
|
||||
* scenes_path -- path to folder with rendering scenes
|
||||
* dataset_props -- dataset properties. Properties has to be of custom data_config.Properties() class and contain
|
||||
* dataset folder (inside data_path)
|
||||
* name of pattern template
|
||||
* name of body .obj file
|
||||
* type of dataset structure (with/without subfolders for patterns)
|
||||
* list of processed samples if processing of dataset was allready attempted
|
||||
Other needed properties will be filles with default values if the corresponding sections
|
||||
are not found in props object
|
||||
* num_samples -- number of (unprocessed) samples from dataset to process with this run. If None, runs over all unprocessed samples
|
||||
* caching -- enables caching of every frame of simulation (disabled by default)
|
||||
* force_restart -- force restarting the batch processing even if resume conditions are met.
|
||||
|
||||
"""
|
||||
# ----- Init -----
|
||||
if 'frozen' in dataset_props and dataset_props['frozen']:
|
||||
# avoid accidential re-runs of data
|
||||
print('WARNING: dataset is frozen, processing is skipped')
|
||||
return True
|
||||
|
||||
resume = init_sim_props(dataset_props, batch_run=True, force_restart=force_restart)
|
||||
|
||||
qw.load_plugin()
|
||||
scene = mymaya.Scene(
|
||||
os.path.join(resources['bodies_default_path'], dataset_props['body']),
|
||||
dataset_props['render'],
|
||||
scenes_path=resources['scenes_path'])
|
||||
|
||||
pattern_specs = _get_pattern_files(data_path, dataset_props)
|
||||
data_props_file = os.path.join(data_path, 'dataset_properties.json')
|
||||
|
||||
# Simulate every template
|
||||
count = 0
|
||||
for pattern_spec in pattern_specs:
|
||||
# skip processed cases -- in case of resume. First condition needed to skip checking second one on False =)
|
||||
pattern_spec_norm = os.path.normpath(pattern_spec)
|
||||
pattern_name = BasicPattern.name_from_path(pattern_spec_norm)
|
||||
if resume and pattern_name in dataset_props['sim']['stats']['processed']:
|
||||
print('Skipped as already processed {}'.format(pattern_spec_norm))
|
||||
continue
|
||||
|
||||
dataset_props['sim']['stats']['processed'].append(pattern_name)
|
||||
_serialize_props_with_sim_stats(dataset_props, data_props_file) # save info of processed files before potential crash
|
||||
|
||||
template_simulation(pattern_spec_norm,
|
||||
scene,
|
||||
dataset_props['sim'],
|
||||
delete_on_clean=True, # delete geometry after sim as we don't need it any more
|
||||
caching=caching,
|
||||
save_maya_scene=False)
|
||||
|
||||
if pattern_name in dataset_props['sim']['stats']['fails']['crashes']:
|
||||
# if we successfully finished simulating crashed example -- it's not a crash any more!
|
||||
print('Crash successfully resimulated!')
|
||||
dataset_props['sim']['stats']['fails']['crashes'].remove(pattern_name)
|
||||
|
||||
count += 1 # count actively processed cases
|
||||
if num_samples is not None and count >= num_samples: # only process requested number of samples
|
||||
break
|
||||
|
||||
# Fin
|
||||
print('\nFinished batch of ' + os.path.basename(data_path))
|
||||
try:
|
||||
if len(dataset_props['sim']['stats']['processed']) >= len(pattern_specs):
|
||||
# processing successfully finished -- no need to resume later
|
||||
del dataset_props['sim']['stats']['processed']
|
||||
dataset_props['frozen'] = True
|
||||
process_finished = True
|
||||
else:
|
||||
process_finished = False
|
||||
except KeyError:
|
||||
print('KeyError -processed-')
|
||||
process_finished = True
|
||||
pass
|
||||
|
||||
# Logs
|
||||
_serialize_props_with_sim_stats(dataset_props, data_props_file)
|
||||
|
||||
return process_finished
|
||||
|
||||
|
||||
# ------- Utils -------
|
||||
def init_sim_props(props, batch_run=False, force_restart=False):
|
||||
"""
|
||||
Add default config values if not given in props & clean-up stats if not resuming previous processing
|
||||
Returns a flag wheter current simulation is a resumed last one
|
||||
"""
|
||||
if 'sim' not in props:
|
||||
props.set_section_config(
|
||||
'sim',
|
||||
max_sim_steps=500,
|
||||
zero_gravity_steps=5, # time to assembly
|
||||
static_threshold=0.05, # 0.01 # depends on the units used,
|
||||
non_static_percent=1,
|
||||
material={},
|
||||
body_friction=0.5,
|
||||
resolution_scale=5
|
||||
)
|
||||
|
||||
if 'material' not in props['sim']['config']:
|
||||
props['sim']['config']['material'] = {}
|
||||
|
||||
if 'render' not in props:
|
||||
# init with defaults
|
||||
props.set_section_config(
|
||||
'render',
|
||||
resolution=[800, 800]
|
||||
)
|
||||
|
||||
if batch_run and 'processed' in props['sim']['stats'] and not force_restart:
|
||||
# resuming existing batch processing -- do not clean stats
|
||||
# Assuming the last example processed example caused the failure
|
||||
last_processed = props['sim']['stats']['processed'][-1]
|
||||
props['sim']['stats']['stop_over'].append(last_processed) # indicate resuming dataset simulation
|
||||
|
||||
if not any([(name in last_processed) or (last_processed in name) for name in props['render']['stats']['render_time']]):
|
||||
# crash detected -- the last example does not appear in the stats
|
||||
if last_processed not in props['sim']['stats']['fails']['crashes']:
|
||||
# first time to crash here -- try to re-do this example => remove from visited
|
||||
props['sim']['stats']['processed'].pop()
|
||||
props['sim']['stats']['fails']['crashes'].append(last_processed)
|
||||
# else we crashed here before -- do not re-try + leave in crashed list
|
||||
|
||||
return True
|
||||
|
||||
# else new life
|
||||
# Prepare commulative stats
|
||||
props.set_section_stats('sim', fails={}, sim_time={}, spf={}, fin_frame={})
|
||||
props['sim']['stats']['fails'] = {
|
||||
'crashes': [],
|
||||
'intersect_colliders': [],
|
||||
'intersect_self': [],
|
||||
'static_equilibrium': [],
|
||||
'fast_finish': [],
|
||||
'pattern_loading': []
|
||||
}
|
||||
|
||||
props.set_section_stats('render', render_time={})
|
||||
|
||||
if batch_run: # track batch processing
|
||||
props.set_section_stats('sim', processed=[], stop_over=[])
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def template_simulation(spec, scene, sim_props, delete_on_clean=False, caching=False, save_maya_scene=False):
|
||||
"""
|
||||
Simulate given template within given scene & save log files
|
||||
"""
|
||||
print('\nGarment load')
|
||||
garment = mymaya.MayaGarment(spec)
|
||||
try:
|
||||
garment.load(
|
||||
shader_group=scene.cloth_SG(),
|
||||
obstacles=[scene.body], # I don't add floor s.t. garment falls infinitely if falls
|
||||
config=sim_props['config']
|
||||
)
|
||||
except mymaya.PatternLoadingError as e:
|
||||
# record error and skip subequent processing
|
||||
sim_props['stats']['fails']['pattern_loading'].append(garment.name)
|
||||
else:
|
||||
# garment.save_mesh(tag='stitched') # Saving the geometry before eny forces were applied
|
||||
garment.sim_caching(caching)
|
||||
|
||||
qw.run_sim(garment, sim_props)
|
||||
|
||||
# save even if sim failed -- to see what happened!
|
||||
garment.save_mesh(tag='sim')
|
||||
scene.render(garment.path, garment.name)
|
||||
if save_maya_scene:
|
||||
# save current Maya scene
|
||||
cmds.file(rename=os.path.join(garment.path, garment.name + '_scene'))
|
||||
cmds.file(save=True, type='mayaBinary', force=True, defaultExtensions=True)
|
||||
|
||||
garment.clean(delete_on_clean)
|
||||
|
||||
|
||||
def _serialize_props_with_sim_stats(dataset_props, filename):
|
||||
"""Compute data processing statistics and serialize props to file"""
|
||||
dataset_props.stats_summary()
|
||||
dataset_props.serialize(filename)
|
||||
|
||||
|
||||
def _get_pattern_files(data_path, dataset_props):
|
||||
""" Collects paths to all the pattern files in given folder"""
|
||||
|
||||
to_ignore = ['renders'] # special dirs not to include in the pattern list
|
||||
|
||||
pattern_specs = []
|
||||
root, dirs, files = next(os.walk(data_path))
|
||||
if dataset_props['to_subfolders']:
|
||||
# https://stackoverflow.com/questions/800197/how-to-get-all-of-the-immediate-subdirectories-in-python
|
||||
for directory in dirs:
|
||||
if directory not in to_ignore:
|
||||
pattern_specs.append(os.path.join(root, directory, 'specification.json')) # cereful for file name changes ^^
|
||||
else:
|
||||
for file in files:
|
||||
# NOTE filtering might not be very robust
|
||||
if ('.json' in file
|
||||
and 'specification' in file
|
||||
and 'template' not in file):
|
||||
pattern_specs.append(os.path.normpath(os.path.join(root, file)))
|
||||
return pattern_specs
|
||||
|
||||
167
pygarment/mayaqltools/utils.py
Normal file
167
pygarment/mayaqltools/utils.py
Normal file
@@ -0,0 +1,167 @@
|
||||
"""Shares utils to work with Maya"""
|
||||
|
||||
import ctypes
|
||||
import os
|
||||
import numpy as np
|
||||
from maya import OpenMaya
|
||||
from maya import cmds
|
||||
|
||||
|
||||
# ----- Working with files -----
|
||||
def load_file(filepath, name='object'):
|
||||
"""Load mesh to the scene"""
|
||||
if not os.path.isfile(filepath):
|
||||
raise RuntimeError('Loading Object from file to Maya::Missing file {}'.format(filepath))
|
||||
|
||||
obj = cmds.file(filepath, i=True, rnn=True)[0]
|
||||
obj = cmds.rename(obj, name + '#')
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
def save_mesh(target, to_file):
|
||||
"""Save given object to file as a mesh"""
|
||||
|
||||
# Make sure to only select requested mesh
|
||||
cmds.select(clear=True)
|
||||
cmds.select(target)
|
||||
|
||||
cmds.file(
|
||||
to_file,
|
||||
type='OBJExport',
|
||||
exportSelectedStrict=True, # export selected -- only explicitely selected
|
||||
options='groups=0;ptgroups=0;materials=0;smoothing=0;normals=1', # very simple obj
|
||||
force=True, # force override if file exists
|
||||
defaultExtensions=False
|
||||
)
|
||||
|
||||
cmds.select(clear=True)
|
||||
|
||||
|
||||
# ----- Mesh info -----
|
||||
def get_dag(object_name):
|
||||
"""Return DAG for requested object"""
|
||||
selectionList = OpenMaya.MSelectionList()
|
||||
selectionList.add(object_name)
|
||||
dag = OpenMaya.MDagPath()
|
||||
selectionList.getDagPath(0, dag)
|
||||
return dag
|
||||
|
||||
|
||||
def get_mesh_dag(object_name):
|
||||
"""Return MFnMesh object by the object name"""
|
||||
# get object as OpenMaya object -- though DAG
|
||||
dag = get_dag(object_name)
|
||||
# as mesh
|
||||
mesh = OpenMaya.MFnMesh(dag) # reference https://help.autodesk.com/view/MAYAUL/2017/ENU/?guid=__py_ref_class_open_maya_1_1_m_fn_mesh_html
|
||||
|
||||
return mesh, dag
|
||||
|
||||
|
||||
def get_vertices_np(mesh):
|
||||
"""
|
||||
Retreive vertex info as np array for given mesh object
|
||||
"""
|
||||
maya_vertices = OpenMaya.MPointArray()
|
||||
mesh.getPoints(maya_vertices, OpenMaya.MSpace.kWorld)
|
||||
|
||||
vertices = np.empty((maya_vertices.length(), 3))
|
||||
for i in range(maya_vertices.length()):
|
||||
for j in range(3):
|
||||
vertices[i, j] = maya_vertices[i][j]
|
||||
|
||||
return vertices
|
||||
|
||||
|
||||
def match_vert_lists(short_list, long_list):
|
||||
"""
|
||||
Find the vertices from long list that correspond to verts in short_list
|
||||
Both lists are numpy arrays
|
||||
NOTE: Assuming order is matching => O(len(long_list)) complexity:
|
||||
order of vertices in short list is the same as in long list (for those that are left)
|
||||
"""
|
||||
match_list = []
|
||||
|
||||
idx_short = 0
|
||||
for idx_long in range(len(long_list)):
|
||||
long_vertex = long_list[idx_long]
|
||||
short_vertex = short_list[idx_short]
|
||||
|
||||
if all(np.isclose(short_vertex, long_vertex, atol=1e-5)):
|
||||
match_list.append(idx_long)
|
||||
idx_short += 1 # advance the short list indexing
|
||||
if idx_short >= len(short_list): # short list finished before the long one
|
||||
break
|
||||
|
||||
if len(match_list) != len(short_list):
|
||||
raise ValueError('Vertex matching unsuccessfull: matched {} of {} vertices in short list'.format(
|
||||
len(match_list), len(short_list)
|
||||
))
|
||||
|
||||
return match_list
|
||||
|
||||
|
||||
# ---- Mesh operations ----
|
||||
def test_ray_intersect(mesh, raySource, rayVector, accelerator=None, hit_tol=None, return_info=False):
|
||||
"""Check if given ray intersect given mesh
|
||||
* hit_tol ignores intersections that are within hit_tol from the ray source (as % of ray length) -- usefull when checking self-intersect
|
||||
* mesh is expected to be of MFnMesh type
|
||||
* accelrator is a stucture for speeding-up calculations.
|
||||
It can be initialized from MFnMesh object and should be supplied with every call to this function
|
||||
"""
|
||||
# It turns out that OpenMaya python reference has nothing to do with reality of passing argument:
|
||||
# most of the functions I use below are to be treated as wrappers of c++ API
|
||||
# https://help.autodesk.com/view/MAYAUL/2018//ENU/?guid=__cpp_ref_class_m_fn_mesh_html
|
||||
|
||||
# follow structure https://stackoverflow.com/questions/58390664/how-to-fix-typeerror-in-method-mfnmesh-anyintersection-argument-4-of-type
|
||||
maxParam = 1 # only search for intersections within given vector
|
||||
testBothDirections = False # only in the given direction
|
||||
sortHits = False # no need to waste time on sorting
|
||||
|
||||
hitPoints = OpenMaya.MFloatPointArray()
|
||||
hitRayParams = OpenMaya.MFloatArray()
|
||||
hitFaces = OpenMaya.MIntArray()
|
||||
hit = mesh.allIntersections(
|
||||
raySource, rayVector, None, None, False, OpenMaya.MSpace.kWorld, maxParam, testBothDirections, accelerator, sortHits,
|
||||
hitPoints, hitRayParams, hitFaces, None, None, None, 1e-6)
|
||||
|
||||
if hit and hit_tol is not None:
|
||||
hit = any([dist > hit_tol for dist in hitRayParams])
|
||||
|
||||
if return_info:
|
||||
return hit, hitFaces, hitPoints, hitRayParams
|
||||
|
||||
return hit
|
||||
|
||||
|
||||
def edge_vert_ids(mesh, edge_id):
|
||||
"""Return vertex ids for a given edge in given mesh"""
|
||||
# Have to go through the C++ wrappers
|
||||
# Vertices that comprise an edge
|
||||
script_util = OpenMaya.MScriptUtil(0.0)
|
||||
v_ids_cptr = script_util.asInt2Ptr() # https://forums.cgsociety.org/t/mfnmesh-getedgevertices-error-on-2011/1652362
|
||||
mesh.getEdgeVertices(edge_id, v_ids_cptr)
|
||||
|
||||
# get values from SWIG pointer https://stackoverflow.com/questions/39344039/python-cast-swigpythonobject-to-python-object
|
||||
ty = ctypes.c_uint * 2
|
||||
v_ids_list = ty.from_address(int(v_ids_cptr))
|
||||
return v_ids_list[0], v_ids_list[1]
|
||||
|
||||
|
||||
def scale_to_cm(target, max_height_cm=220):
|
||||
"""Heuristically check the target units and scale to cantimeters if other units are detected
|
||||
* default value of max_height_cm is for meshes of humans
|
||||
"""
|
||||
# check for througth height (Y axis)
|
||||
# NOTE prone to fails if non-meter units are used for body
|
||||
bb = cmds.polyEvaluate(target, boundingBox=True) # ((xmin,xmax), (ymin,ymax), (zmin,zmax))
|
||||
height = bb[1][1] - bb[1][0]
|
||||
if height < max_height_cm * 0.01: # meters
|
||||
cmds.scale(100, 100, 100, target, centerPivot=True, absolute=True)
|
||||
print('WARNING: {} is found to use meters as units. Scaled up by 100 for cm'.format(target))
|
||||
elif height < max_height_cm * 0.1: # decimeters
|
||||
cmds.scale(10, 10, 10, target, centerPivot=True, absolute=True)
|
||||
print('WARNING: {} is found to use decimeters as units. Scaled up by 10 for cm'.format(target))
|
||||
elif height > max_height_cm: # millimiters or something strange
|
||||
cmds.scale(0.1, 0.1, 0.1, target, centerPivot=True, absolute=True)
|
||||
print('WARNING: {} is found to use millimiters as units. Scaled down by 0.1 for cm'.format(target))
|
||||
0
pygarment/meshgen/__init__.py
Normal file
0
pygarment/meshgen/__init__.py
Normal file
1630
pygarment/meshgen/boxmeshgen.py
Normal file
1630
pygarment/meshgen/boxmeshgen.py
Normal file
File diff suppressed because it is too large
Load Diff
413
pygarment/meshgen/datasim_utils.py
Normal file
413
pygarment/meshgen/datasim_utils.py
Normal file
@@ -0,0 +1,413 @@
|
||||
"""Routines to run cloth simulation"""
|
||||
|
||||
# Basic
|
||||
import time
|
||||
import multiprocessing
|
||||
import platform
|
||||
import signal
|
||||
from pathlib import Path
|
||||
|
||||
# BoxMeshGen
|
||||
import pygarment.meshgen.boxmeshgen as bmg
|
||||
from pygarment.meshgen.boxmeshgen import BoxMesh
|
||||
from pygarment.meshgen.sim_config import PathCofig
|
||||
|
||||
# Warp simulation
|
||||
from pygarment.meshgen.simulation import run_sim
|
||||
|
||||
|
||||
def batch_sim(data_path, output_path, dataset_props,
|
||||
run_default_body=False, num_samples=None, caching=False, force_restart=False):
|
||||
"""
|
||||
Performs pattern simulation for each example in the dataset
|
||||
given by dataset_props.
|
||||
Batch processing is automatically resumed
|
||||
from the last unporcessed datapoint if restart is not forced. The last
|
||||
example on the processes list is assumed to cause the failure, so it can be later found in failure cases.
|
||||
|
||||
Parameters:
|
||||
* data_path -- path to folder with patterns (for given body type)
|
||||
* output_path -- path to folder with the sumulated dataset
|
||||
* dataset_props -- dataset properties. Properties has to be of custom data_config.Properties() class and contain
|
||||
* dataset folder (inside data_path)
|
||||
* type of dataset structure (with/without subfolders for patterns)
|
||||
* list of processed samples if processing of dataset was already attempted
|
||||
* Simulation parameters
|
||||
* Rendering parameters
|
||||
Other needed properties will be files with default values if the corresponding sections
|
||||
are not found in props object
|
||||
* run_default_body -- runs the dataset on the default body (disabled by default)
|
||||
* num_samples -- number of (unprocessed) samples from dataset to process with this run. If None, runs over all unprocessed samples
|
||||
* caching -- enables caching of every frame of simulation (disabled by default)
|
||||
* force_restart -- force restarting the batch processing even if resume conditions are met.
|
||||
|
||||
"""
|
||||
# ----- Init -----
|
||||
if 'frozen' in dataset_props and dataset_props['frozen']:
|
||||
# avoid accidential re-runs of data
|
||||
print('Warning: dataset is frozen, processing is skipped')
|
||||
return True
|
||||
|
||||
resume = init_sim_props(dataset_props, batch_run=True, force_restart=force_restart)
|
||||
body_type = 'default_body' if run_default_body else 'random_body'
|
||||
data_props_file = output_path / f'dataset_properties_{body_type}.yaml'
|
||||
pattern_names = _get_pattern_names(data_path)
|
||||
|
||||
# Simulate every template
|
||||
count = 0
|
||||
for pattern_name in pattern_names:
|
||||
# skip processed cases -- in case of resume. First condition needed to skip checking second one on False =)
|
||||
if resume and pattern_name in dataset_props['sim']['stats']['processed']:
|
||||
print(f'Skipped as already processed {pattern_name}')
|
||||
continue
|
||||
|
||||
dataset_props['sim']['stats']['processed'].append(pattern_name)
|
||||
_serialize_props_with_sim_stats(dataset_props,
|
||||
data_props_file) # save info of processed files before potential crash
|
||||
|
||||
try:
|
||||
paths = PathCofig(
|
||||
in_element_path=data_path / pattern_name,
|
||||
out_path=output_path,
|
||||
in_name=pattern_name,
|
||||
body_name=dataset_props['body_default'],
|
||||
samples_name=dataset_props['body_samples'],
|
||||
default_body=run_default_body
|
||||
)
|
||||
except BaseException as e:
|
||||
# Not all files available
|
||||
print("***Pattern loading failed (paths)***")
|
||||
dataset_props.add_fail('sim', 'crashes', pattern_name)
|
||||
else:
|
||||
template_simulation(paths, dataset_props, caching=caching)
|
||||
|
||||
count += 1 # count actively processed cases
|
||||
if num_samples is not None and count >= num_samples: # only process requested number of samples
|
||||
break
|
||||
|
||||
# Fin
|
||||
print(f'\nFinished batch of {data_path}')
|
||||
try:
|
||||
if len(dataset_props['sim']['stats']['processed']) >= len(pattern_names):
|
||||
# processing successfully finished -- no need to resume later
|
||||
del dataset_props['sim']['stats']['processed']
|
||||
dataset_props['frozen'] = True
|
||||
process_finished = True
|
||||
else:
|
||||
process_finished = False
|
||||
except KeyError:
|
||||
print('KeyError -processed-')
|
||||
process_finished = True
|
||||
pass
|
||||
|
||||
# Logs
|
||||
_serialize_props_with_sim_stats(dataset_props, data_props_file)
|
||||
|
||||
return process_finished
|
||||
|
||||
|
||||
def resim_fails(data_path, output_path, dataset_props,
|
||||
run_default_body=False, caching=False):
|
||||
"""Resimulate failure cases -- maybe some of them would get fixed"""
|
||||
|
||||
print('************** RESIMULATING FAILS ****************')
|
||||
|
||||
sim_stats = dataset_props['sim']['stats']
|
||||
|
||||
# Collect fails and remove them from fails list if any
|
||||
fails = sim_stats['fails']
|
||||
to_resim = set()
|
||||
for key in fails:
|
||||
if key not in ['cloth_body_intersection', 'cloth_self_intersection']:
|
||||
for el in fails[key]:
|
||||
to_resim.add(el)
|
||||
fails[key] = [] # NOTE: If nothing to be added in this key, it was already an empty array (and nothing changed)
|
||||
|
||||
if not len(to_resim):
|
||||
# Return previous finished state
|
||||
return dataset_props['frozen'] if 'frozen' in dataset_props else False
|
||||
|
||||
if 'processed' not in sim_stats:
|
||||
sim_stats['processed'] = _get_pattern_names(data_path)
|
||||
dataset_props['frozen'] = False
|
||||
|
||||
# Remove fails from processed to trigger re-simulation
|
||||
for sample in to_resim:
|
||||
sim_stats['processed'].remove(sample)
|
||||
|
||||
# Start simulation again
|
||||
finished = batch_sim(
|
||||
data_path, output_path, dataset_props,
|
||||
run_default_body=run_default_body,
|
||||
num_samples=len(to_resim)+1,
|
||||
caching=caching,
|
||||
force_restart=False
|
||||
)
|
||||
|
||||
return finished
|
||||
|
||||
# ------- Utils -------
|
||||
def init_sim_props(props, batch_run=False, force_restart=False):
|
||||
"""
|
||||
Add default config values if not given in props & clean-up stats if not resuming previous processing
|
||||
Returns a flag whether current simulation is a resumed last one
|
||||
"""
|
||||
if 'sim' not in props:
|
||||
props.set_section_config(
|
||||
'sim',
|
||||
max_sim_steps=1000, #affects speed
|
||||
max_meshgen_time=20, #in seconds, affects speed
|
||||
max_frame_time= 15, #in seconds, affects speed
|
||||
max_sim_time= 1500, #in seconds, affects speed
|
||||
zero_gravity_steps=10, # 0.01 # depends on the units used, #affects speed
|
||||
static_threshold=0.03, #affects speed
|
||||
non_static_percent=1.5, #affects speed
|
||||
max_body_collisions=0,
|
||||
max_self_collisions=0,
|
||||
resolution_scale=1.0, #affects speed
|
||||
ground=False, # Do not add floor s.t. garment falls infinitely if falls
|
||||
)
|
||||
|
||||
if 'material' not in props['sim']['config']:
|
||||
props['sim']['config']['material'] = {
|
||||
'garment_tri_ka': 10000.0,
|
||||
|
||||
'garment_edge_ke': 1.0,
|
||||
'garment_tri_ke': 10000.0,
|
||||
'spring_ke': 50000.0,
|
||||
|
||||
'garment_edge_kd': 10.0,
|
||||
'garment_tri_kd': 1.0,
|
||||
'spring_kd': 10.0,
|
||||
|
||||
'fabric_density': 1.0,
|
||||
'fabric_thickness': 0.1,
|
||||
'fabric_friction': 0.5
|
||||
|
||||
}
|
||||
|
||||
if 'options' not in props['sim']['config']:
|
||||
props['sim']['config']['options'] = {
|
||||
'enable_particle_particle_collisions': False,
|
||||
'enable_triangle_particle_collisions': True,
|
||||
'enable_edge_edge_collisions': True,
|
||||
'enable_body_collision_filters': True,
|
||||
|
||||
'enable_attachment_constraint': True,
|
||||
'attachment_frames': 400,
|
||||
'attachment_label_names': ['lower_interface'],
|
||||
'attachment_stiffness': [1000.],
|
||||
'attachment_damping': [10.],
|
||||
|
||||
'global_damping_factor': 0.25,
|
||||
'global_damping_effective_velocity': 0.0,
|
||||
'global_max_velocity': 25.0,
|
||||
|
||||
'enable_global_collision_filter': True,
|
||||
'enable_cloth_reference_drag': False,
|
||||
'cloth_reference_margin': 0.1,
|
||||
|
||||
# FIXME Re-writes mesh references causing occasional CUDA errors when referencing meshes other than the body
|
||||
'enable_body_smoothing': False,
|
||||
'smoothing_total_smoothing_factor': 1.0,
|
||||
'smoothing_recover_start_frame': 150,
|
||||
'smoothing_num_steps': 100,
|
||||
'smoothing_frame_gap_between_steps': 1,
|
||||
|
||||
'body_collision_thickness': 0.25,
|
||||
'body_friction': 0.5
|
||||
}
|
||||
|
||||
if 'render' not in props:
|
||||
# init with defaults
|
||||
props.set_section_config(
|
||||
'render',
|
||||
resolution=[800, 800],
|
||||
sides=['front','back'],
|
||||
front_camera_location=None,
|
||||
uv_texture={
|
||||
'seam_width': 0.5,
|
||||
'dpi': 1500,
|
||||
'fabric_grain_texture_path': None,
|
||||
'fabric_grain_resolution': 5,
|
||||
}
|
||||
)
|
||||
|
||||
if batch_run and 'processed' in props['sim']['stats'] and not force_restart:
|
||||
# resuming existing batch processing -- do not clean stats
|
||||
# Assuming the last example processed example caused the failure
|
||||
last_processed = props['sim']['stats']['processed'][-1]
|
||||
|
||||
if not any([(name in last_processed) or (last_processed in name) for name in
|
||||
props['render']['stats']['render_time']]):
|
||||
# crash detected -- the last example does not appear in the stats
|
||||
if last_processed not in props['sim']['stats']['fails']['crashes']:
|
||||
# add to simulation failures
|
||||
# Remove last from processed if it did not crash
|
||||
if last_processed not in props['sim']['stats']['stop_over']:
|
||||
props['sim']['stats']['processed'].pop()
|
||||
else:
|
||||
# Already passed here once -> add as crash
|
||||
props['sim']['stats']['fails']['crashes'].append(last_processed)
|
||||
|
||||
props['sim']['stats']['stop_over'].append(last_processed) # indicate resuming dataset simulation
|
||||
|
||||
|
||||
return True
|
||||
|
||||
# else new life
|
||||
# Prepare commulative stats
|
||||
props.set_section_stats('sim',
|
||||
fails={},
|
||||
meshgen_time={},
|
||||
sim_time={},
|
||||
spf={},
|
||||
fin_frame={},
|
||||
face_count={},
|
||||
body_collisions={},
|
||||
self_collisions={})
|
||||
props['sim']['stats']['fails'] = {
|
||||
'crashes': [],
|
||||
'cloth_body_intersection': [],
|
||||
'cloth_self_intersection': [],
|
||||
'static_equilibrium': [],
|
||||
'fast_finish': [],
|
||||
'pattern_loading': [],
|
||||
'multi_stitching': [],
|
||||
'gt_edges_creation': []
|
||||
|
||||
}
|
||||
|
||||
props.set_section_stats('render', render_time={})
|
||||
|
||||
if batch_run: # track batch processing
|
||||
props.set_section_stats('sim', processed=[], stop_over=[])
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def template_simulation(paths: PathCofig, props, caching=False):
|
||||
"""
|
||||
Simulate given template within given scene & save log files
|
||||
"""
|
||||
sim_props = props['sim']
|
||||
res = sim_props['config']['resolution_scale']
|
||||
|
||||
garment = BoxMesh(paths.in_g_spec, res)
|
||||
|
||||
print('\n-----------------------------'
|
||||
'\nLoading garment: ', garment.name)
|
||||
|
||||
meshgen_start_time = time.time()
|
||||
timeout_after = int(get_dict_default_value(sim_props['config'], 'max_meshgen_time', 20))
|
||||
|
||||
try:
|
||||
_load_boxmesh_timeout(garment, timeout_after)
|
||||
except TimeoutError as e:
|
||||
print(e)
|
||||
failure_case = 'meshgen-timeout'
|
||||
props.add_fail('sim', failure_case, garment.name)
|
||||
except bmg.PatternLoadingError as e:
|
||||
# record error and skip subequent processing
|
||||
print(e)
|
||||
failure_case = 'pattern_loading'
|
||||
props.add_fail('sim', failure_case, garment.name)
|
||||
except bmg.DegenerateTrianglesError as e:
|
||||
print(e)
|
||||
failure_case = 'degenerate_triangles'
|
||||
props.add_fail('sim', failure_case, garment.name)
|
||||
except bmg.MultiStitchingError as e:
|
||||
print(e)
|
||||
failure_case = 'multi_stitching'
|
||||
props.add_fail('sim', failure_case, garment.name)
|
||||
except bmg.NormError as e:
|
||||
print(e)
|
||||
failure_case = 'norm_error'
|
||||
props.add_fail('sim', failure_case, garment.name)
|
||||
except bmg.StitchingError as e:
|
||||
print(e)
|
||||
failure_case = 'stitching_error'
|
||||
props.add_fail('sim', failure_case, garment.name)
|
||||
except BaseException as e: # Catch the rest of exceptions
|
||||
print("***Pattern loading failed due to unknown error***")
|
||||
print(e)
|
||||
failure_case = 'crashes'
|
||||
props.add_fail('sim', failure_case, garment.name)
|
||||
else:
|
||||
# garment.save_mesh(tag='stitched') # Saving the geometry before eny forces were applied
|
||||
sim_props['stats']['meshgen_time'][garment.name] = time.time() - meshgen_start_time
|
||||
sim_props['stats']['face_count'][garment.name] = len(garment.faces)
|
||||
sim_props_option = sim_props['config']['options']
|
||||
|
||||
vertex_normals = get_dict_default_value(sim_props_option,'store_vertex_normals',False)
|
||||
store_panels = get_dict_default_value(sim_props_option,'store_panels',False)
|
||||
garment.serialize(
|
||||
paths,
|
||||
with_v_norms=vertex_normals,
|
||||
store_panels=store_panels,
|
||||
uv_config=props['render']['config']['uv_texture']
|
||||
)
|
||||
|
||||
run_sim(
|
||||
garment.name,
|
||||
props,
|
||||
paths,
|
||||
save_v_norms=vertex_normals,
|
||||
store_usd=caching, # NOTE: False for fast simulation!,
|
||||
optimize_storage=sim_props['config']['optimize_storage'],
|
||||
verbose=False
|
||||
)
|
||||
|
||||
def _load_boxmesh_timeout(garment, timeout_after):
|
||||
if platform.system() == "Windows":
|
||||
"""https://stackoverflow.com/a/14920854"""
|
||||
p = multiprocessing.Process(target=garment.load(), name="GarmentGeneration")
|
||||
p.start()
|
||||
|
||||
# Wait timeout_after seconds for garment.load()
|
||||
time.sleep(timeout_after)
|
||||
|
||||
# If thread is active
|
||||
if p.is_alive():
|
||||
# Terminate the process
|
||||
p.terminate()
|
||||
p.join()
|
||||
raise TimeoutError
|
||||
|
||||
elif platform.system() in ["Linux", "OSX"]:
|
||||
"""https://code-maven.com/python-timeout"""
|
||||
def alarm_handler(signum, frame):
|
||||
raise TimeoutError
|
||||
|
||||
signal.signal(signal.SIGALRM, alarm_handler)
|
||||
signal.alarm(timeout_after)
|
||||
s_time = time.time()
|
||||
try:
|
||||
garment.load()
|
||||
except TimeoutError as ex:
|
||||
raise TimeoutError
|
||||
else:
|
||||
e_time = time.time() - s_time
|
||||
# print("No timeout error with time: ",e_time)
|
||||
signal.alarm(0)
|
||||
|
||||
|
||||
def get_dict_default_value(props, name, default_value):
|
||||
if name in props:
|
||||
return props[name]
|
||||
return default_value
|
||||
|
||||
def _serialize_props_with_sim_stats(dataset_props, filename):
|
||||
"""Compute data processing statistics and serialize props to file"""
|
||||
dataset_props.stats_summary()
|
||||
dataset_props.serialize(filename)
|
||||
|
||||
|
||||
def _get_pattern_names(data_path: Path):
|
||||
names = []
|
||||
to_ignore = ['renders'] # special dirs not to include in the pattern list
|
||||
for el in data_path.iterdir():
|
||||
if el.is_dir() and el.stem not in to_ignore:
|
||||
names.append(el.stem)
|
||||
|
||||
return names
|
||||
618
pygarment/meshgen/garment.py
Normal file
618
pygarment/meshgen/garment.py
Normal file
@@ -0,0 +1,618 @@
|
||||
import igl
|
||||
import json
|
||||
import pickle
|
||||
import numpy as np
|
||||
import yaml
|
||||
|
||||
import warp as wp
|
||||
|
||||
import warp.sim.render
|
||||
from warp.sim.utils import implicit_laplacian_smoothing
|
||||
import warp.collision.panel_assignment as assign
|
||||
from warp.sim.collide import count_self_intersections, count_body_cloth_intersections
|
||||
from warp.sim.integrator_xpbd import replace_mesh_points
|
||||
|
||||
# Custom
|
||||
from pygarment.meshgen.sim_config import PathCofig, SimConfig
|
||||
from pygarment.pattern.core import BasicPattern
|
||||
|
||||
class Cloth:
|
||||
def __init__(self,
|
||||
name, config: SimConfig, paths: PathCofig,
|
||||
caching=False):
|
||||
|
||||
self.caching = caching # Saves intermediate frames, extra logs, etc.
|
||||
self.paths = paths
|
||||
self.name = name
|
||||
self.config = config
|
||||
|
||||
self.sim_fps = config.sim_fps
|
||||
self.sim_substeps = config.sim_substeps
|
||||
self.zero_gravity_steps = config.zero_gravity_steps
|
||||
self.sim_dt = (1.0 / self.sim_fps) / self.sim_substeps
|
||||
self.usd_frame_time = 0.0
|
||||
self.sim_use_graph = wp.get_device().is_cuda
|
||||
self.device = wp.get_device() if wp.get_device().is_cuda else 'cpu'
|
||||
self.frame = -1
|
||||
|
||||
self.c_scale = 1.0
|
||||
self.b_scale = 100.0
|
||||
self.body_path = paths.in_body_obj
|
||||
|
||||
# collision resolution options
|
||||
self.enable_body_smoothing = config.enable_body_smoothing
|
||||
self.enable_cloth_reference_drag = config.enable_cloth_reference_drag
|
||||
|
||||
# Build the stage -- model object, colliders, etc.
|
||||
self.build_stage(config)
|
||||
|
||||
# -------- Final model settings ----------
|
||||
# NOTE: global_viscous_damping: (damping_factor, min_vel_damp, max_vel)
|
||||
# apply damping when vel > min_vel_damp, and clamp vel below max_vel after damping
|
||||
# TODO Remove after refactoring Euler integrator
|
||||
self.model.global_viscous_damping = wp.vec3(
|
||||
(config.global_damping_factor, config.global_damping_effective_velocity, config.global_max_velocity))
|
||||
self.model.particle_max_velocity = config.global_max_velocity
|
||||
|
||||
self.model.ground = config.ground
|
||||
|
||||
self.model.global_collision_filter = config.enable_global_collision_filter
|
||||
self.model.cloth_reference_drag = self.enable_cloth_reference_drag
|
||||
self.model.cloth_reference_margin = config.cloth_reference_margin
|
||||
self.model.cloth_reference_k = config.cloth_reference_k
|
||||
self.model.cloth_reference_watertight_whole_shape_index = 0
|
||||
self.model.enable_particle_particle_collisions = config.enable_particle_particle_collisions
|
||||
self.model.enable_triangle_particle_collisions = config.enable_triangle_particle_collisions
|
||||
self.model.enable_edge_edge_collisions = config.enable_edge_edge_collisions
|
||||
self.model.attachment_constraint = config.enable_attachment_constraint
|
||||
|
||||
self.model.soft_contact_margin = config.soft_contact_margin
|
||||
self.model.soft_contact_ke = config.soft_contact_ke
|
||||
self.model.soft_contact_kd = config.soft_contact_kd
|
||||
self.model.soft_contact_kf = config.soft_contact_kf
|
||||
self.model.soft_contact_mu = config.soft_contact_mu
|
||||
|
||||
self.model.particle_ke = config.particle_ke
|
||||
self.model.particle_kd = config.particle_kd
|
||||
self.model.particle_kf = config.particle_kf
|
||||
self.model.particle_mu = config.particle_mu
|
||||
self.model.particle_cohesion = config.particle_cohesion
|
||||
self.model.particle_adhesion = config.particle_adhesion
|
||||
|
||||
#self.integrator = wp.sim.SemiImplicitIntegrator() #intialize semi-implicit time-integrator
|
||||
self.integrator = wp.sim.XPBDIntegrator() #intialize semi-implicit time-integrator
|
||||
self.state_0 = self.model.state() #returns state object for model (holds all *time-varying* data for a model)
|
||||
self.state_1 = self.model.state() #i.e. body/particle positions and velocities
|
||||
if self.caching:
|
||||
self.renderer = wp.sim.render.SimRenderer(self.model, str(paths.usd), scaling=1.0)
|
||||
|
||||
if self.sim_use_graph:
|
||||
self.create_graph()
|
||||
|
||||
self.last_verts = None
|
||||
self.current_verts = wp.array.numpy(self.state_0.particle_q)
|
||||
|
||||
def build_stage(self, config):
|
||||
|
||||
builder = wp.sim.ModelBuilder(gravity=0.0)
|
||||
# --------------- Load body info -----------------
|
||||
body_vertices, body_indices, body_faces = self.load_obj(self.paths.in_body_obj)
|
||||
body_seg = self.read_json(self.paths.body_seg)
|
||||
|
||||
body_vertices = body_vertices * self.b_scale
|
||||
self.shift_y = self.get_shift_param(body_vertices)
|
||||
|
||||
if self.shift_y:
|
||||
body_vertices[:, 1] = body_vertices[:, 1] + self.shift_y
|
||||
|
||||
self.v_body = body_vertices
|
||||
self.f_body = body_faces
|
||||
self.body_indices = body_indices
|
||||
|
||||
# -------------- Load cloth ------------
|
||||
cloth_vertices, cloth_indices, cloth_faces = self.load_obj(self.paths.g_box_mesh)
|
||||
cloth_seg_dict = assign.read_segmentation(self.paths.g_mesh_segmentation)
|
||||
self.cloth_seg_dict = cloth_seg_dict
|
||||
stitching_vertices = cloth_seg_dict["stitch"] if 'stitch' in cloth_seg_dict.keys() else []
|
||||
|
||||
cloth_vertices = cloth_vertices * self.c_scale
|
||||
if self.shift_y:
|
||||
cloth_vertices[:, 1] = cloth_vertices[:, 1] + self.shift_y
|
||||
self.v_cloth_init = cloth_vertices
|
||||
self.f_cloth = cloth_faces
|
||||
|
||||
#Load ground truth stitching lengths
|
||||
if not self.paths.g_orig_edge_len.exists():
|
||||
orig_lens_dict = None
|
||||
print("no original length dict found")
|
||||
else:
|
||||
with open(self.paths.g_orig_edge_len, 'rb') as file:
|
||||
orig_lens_dict = pickle.load(file)
|
||||
|
||||
cloth_pos = (0.0, 0.0, 0.0)
|
||||
cloth_rot = wp.quat_from_axis_angle(wp.vec3(0.0, 1.0, 0.0), wp.degrees(0.0)) #no rotation, but orientation of cloth in world space
|
||||
|
||||
builder.add_cloth_mesh_sewing_spring(
|
||||
pos=cloth_pos,
|
||||
rot=cloth_rot,
|
||||
scale=1.0,
|
||||
vel=(0.0, 0.0, 0.0),
|
||||
vertices=cloth_vertices,
|
||||
indices=cloth_indices,
|
||||
resolution_scale=config.resolution_scale,
|
||||
orig_lens=orig_lens_dict,
|
||||
stitching_vertices=stitching_vertices,
|
||||
density=config.garment_density,
|
||||
edge_ke=config.garment_edge_ke,
|
||||
edge_kd=config.garment_edge_kd,
|
||||
tri_ke=config.garment_tri_ke,
|
||||
tri_ka=config.garment_tri_ka,
|
||||
tri_kd=config.garment_tri_kd,
|
||||
tri_drag=config.garment_tri_drag,
|
||||
tri_lift=config.garment_tri_lift,
|
||||
radius=config.garment_radius,
|
||||
add_springs=True,
|
||||
spring_ke=config.spring_ke,
|
||||
spring_kd=config.spring_kd,
|
||||
)
|
||||
|
||||
# ------------ Add a body -----------
|
||||
if self.enable_body_smoothing:
|
||||
# Starts sim from smoothed-out body and slowly restores original details
|
||||
smoothing_total_smoothing_factor = config.smoothing_total_smoothing_factor
|
||||
smoothing_num_steps = config.smoothing_num_steps
|
||||
smoothing_recover_start_frame = config.smoothing_recover_start_frame
|
||||
smoothing_frame_gap_between_steps = config.smoothing_frame_gap_between_steps
|
||||
smoothing_step_size = smoothing_total_smoothing_factor / smoothing_num_steps
|
||||
self.body_smoothing_frames = [smoothing_recover_start_frame + smoothing_frame_gap_between_steps*i for i in range(smoothing_num_steps + 1)]
|
||||
self.body_smoothing_vertices_list = []
|
||||
self.body_smoothing_vertices_list = implicit_laplacian_smoothing(body_vertices, body_indices.reshape(-1, 3),
|
||||
step_size=smoothing_step_size,
|
||||
iters=smoothing_num_steps)
|
||||
body_vertices = self.body_smoothing_vertices_list.pop()
|
||||
self.body_smoothing_frames.pop()
|
||||
self.body_indices = body_indices
|
||||
self.body_vertices_device_buffer = wp.array(body_vertices, dtype=wp.vec3, device=self.device)
|
||||
self.v_body = body_vertices
|
||||
|
||||
self.body_mesh = wp.sim.Mesh(body_vertices, body_indices)
|
||||
|
||||
body_pos = wp.vec3(0.0, 0, 0.0)
|
||||
body_rot = wp.quat_from_axis_angle(wp.vec3(0.0, 1.0, 0.0), wp.degrees(0.0))
|
||||
|
||||
|
||||
# Cloth-body segemntation
|
||||
cloth_reference_labels, body_parts = assign.panel_assignment(
|
||||
cloth_seg_dict, cloth_vertices, cloth_indices, wp.transform(cloth_pos, cloth_rot),
|
||||
body_seg, body_vertices, body_indices, wp.transform(body_pos, body_rot),
|
||||
device=self.device,
|
||||
panel_init_labels=self._load_panel_labels(),
|
||||
strategy='closest',
|
||||
merge_two_legs=True,
|
||||
smpl_body=self.paths.use_smpl_seg
|
||||
)
|
||||
|
||||
face_filters, particle_filter = [], []
|
||||
if config.enable_body_collision_filters:
|
||||
v_connectivity = self._build_vert_connectivity(cloth_vertices, cloth_indices)
|
||||
# Arm filter for the skirts
|
||||
face_filters.append(assign.create_face_filter(
|
||||
body_vertices, body_indices, body_seg, ['left_arm', 'right_arm', 'arms'], smpl_body=self.paths.use_smpl_seg))
|
||||
particle_filter = assign.assign_face_filter_points(
|
||||
cloth_reference_labels,
|
||||
['left_leg', 'right_leg', 'legs'],
|
||||
filter_id=0,
|
||||
vert_connectivity=v_connectivity
|
||||
)
|
||||
|
||||
# Overall filter that ignored internal geometry
|
||||
face_filters.append(assign.create_face_filter(
|
||||
body_vertices, body_indices, body_seg, ['face_internal'], smpl_body=self.paths.use_smpl_seg))
|
||||
particle_filter = assign.assign_face_filter_points(
|
||||
cloth_reference_labels,
|
||||
['body'],
|
||||
filter_id=1,
|
||||
vert_connectivity=v_connectivity,
|
||||
current_vertex_filter=particle_filter
|
||||
)
|
||||
|
||||
self.body_shape_index = 0 # Body is the first collider object to be added
|
||||
builder.add_shape_mesh(
|
||||
body=-1,
|
||||
mesh=self.body_mesh,
|
||||
pos=body_pos,
|
||||
rot=body_rot,
|
||||
scale=wp.vec3(1.0,1.0,1.0), #performed body scaling above
|
||||
thickness=config.body_thickness,
|
||||
mu=config.body_friction,
|
||||
face_filters=face_filters if face_filters else [[]],
|
||||
model_particle_filter_ids = particle_filter,
|
||||
)
|
||||
|
||||
# ----- Attachment constraint -------
|
||||
|
||||
if config.enable_attachment_constraint:
|
||||
self._add_attachment_labels(builder, config)
|
||||
|
||||
# ----- Global collision resolution error ----
|
||||
for part in body_parts:
|
||||
part_v, part_inds = assign.extract_submesh(body_vertices, body_indices, body_parts[part])
|
||||
builder.add_cloth_reference_shape_mesh(
|
||||
mesh = wp.sim.Mesh(part_v, part_inds),
|
||||
name = part,
|
||||
pos = body_pos,
|
||||
rot = body_rot,
|
||||
scale = (1.0,1.0,1.0) #performed body scaling above
|
||||
)
|
||||
# NOTE: has a side-effect of filling up model.particle_reference_label array
|
||||
self.body_parts_names2index = builder.add_cloth_reference_labels(
|
||||
cloth_reference_labels,
|
||||
[ # NOTE: Not adding drag between legs and the body as it's useless and contradicts attachment
|
||||
['left_arm', 'body'],
|
||||
['right_arm', 'body'],
|
||||
['left_leg', 'right_leg'],
|
||||
['left_arm', 'left_leg'],
|
||||
['right_arm', 'left_leg'],
|
||||
['left_arm', 'right_leg'],
|
||||
['right_arm', 'right_leg'],
|
||||
['left_arm', 'legs'],
|
||||
['right_arm', 'legs'],
|
||||
]
|
||||
)
|
||||
|
||||
# ------- Finalize --------------
|
||||
self.model: wp.sim.Model = builder.finalize(device = self.device) #data is transferred to warp tensors, object used in simulation
|
||||
|
||||
def _add_attachment_labels(self, builder, config):
|
||||
with open(self.paths.in_body_mes, 'r') as file:
|
||||
body_dict = yaml.load(file, Loader=yaml.SafeLoader)['body']
|
||||
with open(self.paths.g_vert_labels, 'r') as f:
|
||||
vertex_labels = yaml.load(f, Loader=yaml.SafeLoader)
|
||||
|
||||
lables_present = False
|
||||
for i, attach_label in enumerate(config.attachment_labels):
|
||||
if attach_label in vertex_labels.keys() and len(vertex_labels[attach_label]) > 0:
|
||||
constaint_verts = vertex_labels[attach_label]
|
||||
if attach_label == 'lower_interface':
|
||||
lables_present = True
|
||||
if '_waist_level' in body_dict:
|
||||
waist_level = body_dict['_waist_level']
|
||||
else:
|
||||
waist_level = body_dict['height'] - body_dict['head_l'] - body_dict['waist_line']
|
||||
builder.add_attachment(
|
||||
constaint_verts,
|
||||
wp.vec3(0, waist_level, 0),
|
||||
wp.vec3(0., 1., 0.), # Vertical attachment
|
||||
stiffness = config.attachment_stiffness[i],
|
||||
damping = config.attachment_damping[i]
|
||||
)
|
||||
elif attach_label == 'right_collar':
|
||||
lables_present = True
|
||||
|
||||
neck_w = body_dict['neck_w'] - 2
|
||||
builder.add_attachment(
|
||||
constaint_verts,
|
||||
wp.vec3(-neck_w / 2, 0, 0),
|
||||
wp.vec3(1., 0., 0.), # Horizontal attachment
|
||||
stiffness = config.attachment_stiffness[i],
|
||||
damping = config.attachment_damping[i]
|
||||
)
|
||||
elif attach_label == 'left_collar':
|
||||
lables_present = True
|
||||
|
||||
neck_w = body_dict['neck_w'] - 2
|
||||
builder.add_attachment(
|
||||
constaint_verts,
|
||||
wp.vec3(neck_w / 2, 0, 0),
|
||||
wp.vec3(-1., 0., 0.), # Horizontal attachment
|
||||
stiffness = config.attachment_stiffness[i],
|
||||
damping = config.attachment_damping[i]
|
||||
)
|
||||
elif attach_label == 'strapless_top':
|
||||
lables_present = True
|
||||
|
||||
# Attach under arm
|
||||
level = body_dict['height'] - body_dict['head_l'] - body_dict['armscye_depth']
|
||||
builder.add_attachment(
|
||||
constaint_verts,
|
||||
wp.vec3(0, level, 0),
|
||||
wp.vec3(0., 1., 0.), # Vertical attachment
|
||||
stiffness = config.attachment_stiffness[i],
|
||||
damping = config.attachment_damping[i]
|
||||
)
|
||||
else:
|
||||
print(f'{self.name}::WARNING::Requested attachment label {attach_label} '
|
||||
'is not supported. Skipped')
|
||||
continue
|
||||
|
||||
print(f'Using attachment for {attach_label} with {len(constaint_verts)} vertices')
|
||||
|
||||
if not lables_present:
|
||||
# Loaded garment is not labeled -- update config
|
||||
config.enable_attachment_constraint = False
|
||||
config.update_min_steps()
|
||||
print(f'{self.name}::WARNING::Requested attachment labels {config.attachment_labels} '
|
||||
'are not present. Attachment is turned off'
|
||||
)
|
||||
|
||||
def _load_panel_labels(self):
|
||||
pattern = BasicPattern(self.paths.g_specs)
|
||||
|
||||
labels = {}
|
||||
for name, panel in pattern.pattern['panels'].items():
|
||||
labels[name] = panel['label'] if 'label' in panel else ''
|
||||
|
||||
return labels
|
||||
|
||||
def _sim_frame_with_substeps(self):
|
||||
"""Basic scheme for simulating a frame update"""
|
||||
|
||||
wp.sim.collide(self.model, self.state_0, self.sim_dt * self.sim_substeps) # Generates contact points for the particles and rigid bodies
|
||||
# in the model, to be used in the contact dynamics kernel of the integrator
|
||||
# launches kernels
|
||||
|
||||
for s in range(self.sim_substeps):
|
||||
self.state_0.clear_forces() # set particle and body forces to 0s
|
||||
self.integrator.simulate(self.model, self.state_0, self.state_1,
|
||||
self.sim_dt) # calculate semi-implicit Euler step
|
||||
# launches kernels and calculates new particle (and body) positions and velocities
|
||||
# swap states
|
||||
(self.state_0, self.state_1) = (self.state_1, self.state_0) # swap prev, new state
|
||||
|
||||
def create_graph(self):
|
||||
# create update graph
|
||||
wp.capture_begin() # Captures all subsequent kernel launches and memory operations on CUDA devices.
|
||||
|
||||
self._sim_frame_with_substeps()
|
||||
|
||||
self.graph = wp.capture_end() # returns a handle to a CUDA graph object that can be launched with :func:`~warp.capture_launch()`
|
||||
# do not capture kernel launches anymore
|
||||
|
||||
def update(self, frame):
|
||||
with wp.ScopedTimer("simulate", print=False, active=True):
|
||||
if self.model.enable_particle_particle_collisions:
|
||||
# FIXME: Produces cuda errors when activated together with "enable_cloth_reference_drag"
|
||||
# Reason is unknown. Or not?
|
||||
self.model.particle_grid.build(self.state_0.particle_q, self.model.particle_max_radius * 2.0)
|
||||
if frame == self.zero_gravity_steps:
|
||||
self.model.gravity = np.array((0.0, -9.81, 0.0))
|
||||
if self.sim_use_graph:
|
||||
self.create_graph()
|
||||
if self.enable_body_smoothing and frame in self.body_smoothing_frames:
|
||||
self.update_smooth_body_shape()
|
||||
if self.sim_use_graph:
|
||||
self.create_graph()
|
||||
if (self.model.attachment_constraint
|
||||
and frame >= self.config.attachment_frames):
|
||||
self.model.attachment_constraint = False
|
||||
if self.sim_use_graph:
|
||||
self.create_graph()
|
||||
|
||||
if self.sim_use_graph: #GPU
|
||||
wp.capture_launch(self.graph)
|
||||
|
||||
else: #CPU: launch kernels without graph
|
||||
self._sim_frame_with_substeps()
|
||||
|
||||
# Update vertices of last frame
|
||||
self.last_verts = self.current_verts
|
||||
# NOTE Makes a copy if particle_q device is not CPU
|
||||
self.current_verts = wp.array.numpy(self.state_0.particle_q)
|
||||
|
||||
def update_smooth_body_shape(self):
|
||||
body_vertices = self.body_smoothing_vertices_list.pop()
|
||||
self.v_body = body_vertices
|
||||
wp.copy(self.body_vertices_device_buffer,
|
||||
wp.array(body_vertices, dtype=wp.vec3, device='cpu', copy=False))
|
||||
|
||||
# Apply new vertices and refit the sructures
|
||||
wp.launch(
|
||||
kernel=replace_mesh_points,
|
||||
dim = len(body_vertices),
|
||||
inputs=[self.body_mesh.mesh.id,
|
||||
self.body_vertices_device_buffer],
|
||||
device=self.device
|
||||
)
|
||||
self.body_mesh.mesh.refit()
|
||||
|
||||
#update render
|
||||
if self.caching:
|
||||
self.renderer.render_mesh(
|
||||
f'shape_{self.body_shape_index}',
|
||||
body_vertices,
|
||||
None,
|
||||
is_template=True,
|
||||
)
|
||||
|
||||
def render_usd_frame(self, is_live=False):
|
||||
with wp.ScopedTimer("render", print=False, active=True):
|
||||
start_time = 0.0 if is_live else self.usd_frame_time
|
||||
|
||||
self.renderer.begin_frame(start_time)
|
||||
self.renderer.render(self.state_0)
|
||||
self.renderer.end_frame()
|
||||
|
||||
self.usd_frame_time += 1.0 / self.sim_fps
|
||||
if not is_live:
|
||||
self.renderer.save()
|
||||
|
||||
def run_frame(self):
|
||||
self.update(self.frame)
|
||||
|
||||
# NOTE: USD Render
|
||||
if self.caching:
|
||||
self.render_usd_frame()
|
||||
|
||||
def read_json(self, path):
|
||||
with open(path, 'r') as f:
|
||||
data = json.load(f)
|
||||
return data
|
||||
|
||||
def load_obj(self, path):
|
||||
v, f = igl.read_triangle_mesh(str(path))
|
||||
return v, f.flatten(), f
|
||||
|
||||
def get_shift_param(self,body_vertices):
|
||||
v_body_arr = np.array(body_vertices)
|
||||
min_y = (min(v_body_arr[:, 1]))
|
||||
if min_y < 0:
|
||||
return abs(min_y)
|
||||
return 0.0
|
||||
|
||||
def calc_norm(self, a, b, c):
|
||||
"""
|
||||
This function calculates the norm based on the three points a, b, and c.
|
||||
Input:
|
||||
* self (BoxMesh object): Instance of BoxMesh class from which the function is called
|
||||
* a (ndarray): first point taking part in norm calculation
|
||||
* b (ndarray): second point taking part in norm calculation
|
||||
* c (ndarray): third point taking part in norm calculation
|
||||
Output:
|
||||
* n_normalized (bool): norm(a,b,c) with length 1
|
||||
"""
|
||||
# Calculate the vectors AB and AC
|
||||
AB = np.array(b - a)
|
||||
AC = np.array(c - a)
|
||||
|
||||
# Calculate the cross product of AB and AC
|
||||
n = np.cross(AB, AC)
|
||||
n_normalized = n / np.linalg.norm(n)
|
||||
|
||||
return n_normalized
|
||||
|
||||
def calc_vertex_norms(self):
|
||||
vertex_normals = np.zeros((len(self.v_cloth_init), 4))
|
||||
for face in self.f_cloth:
|
||||
v0, v1, v2 = np.array(self.current_verts)[face]
|
||||
face_norm = list(self.calc_norm(v0, v1, v2))
|
||||
temp_update = face_norm + [1]
|
||||
vertex_normals[face] += temp_update
|
||||
|
||||
vertex_normals = vertex_normals[:, :3] / (vertex_normals[:, 3][:, np.newaxis])
|
||||
return vertex_normals
|
||||
|
||||
def save_frame(self, save_v_norms=False):
|
||||
"""Save current garment state as an obj file,
|
||||
re-using all the information from boxmesh
|
||||
except for vertices and vertex normals (e.g. textures and faces)
|
||||
"""
|
||||
|
||||
# NOTE: igl routine is not used here because it cannot write any extra info (e.g. texture coords) into obj
|
||||
|
||||
# stores v, f, vf and vn
|
||||
# Save cloth with texture and normals
|
||||
if save_v_norms:
|
||||
vertex_normals = self.calc_vertex_norms()
|
||||
|
||||
v_cloth_sim = self.current_verts
|
||||
# Store simulated cloth mesh
|
||||
# Read the boxmesh file
|
||||
with open(self.paths.g_box_mesh, 'r') as obj_file:
|
||||
lines = obj_file.readlines()
|
||||
|
||||
# Modify the vertex positions and normals, if required
|
||||
with open(self.paths.g_sim, 'w') as obj_file:
|
||||
v_idx = 0
|
||||
vn_idx = 0
|
||||
for line in lines:
|
||||
if line.startswith('v '):
|
||||
new_vertex = v_cloth_sim[v_idx]
|
||||
obj_file.write(f'v {new_vertex[0]} {new_vertex[1]} {new_vertex[2]}\n')
|
||||
v_idx += 1
|
||||
elif line.startswith('vn '):
|
||||
if save_v_norms:
|
||||
new_vertex = vertex_normals[vn_idx]
|
||||
obj_file.write(f'vn {new_vertex[0]} {new_vertex[1]} {new_vertex[2]}\n')
|
||||
vn_idx += 1
|
||||
else:
|
||||
obj_file.write(line)
|
||||
|
||||
def is_static(self):
|
||||
"""
|
||||
Checks whether garment is in the static equilibrium
|
||||
Compares current state with the last recorded state
|
||||
"""
|
||||
threshold = self.config.static_threshold
|
||||
non_static_percent = self.config.non_static_percent
|
||||
|
||||
curr_verts_arr = self.current_verts
|
||||
last_verts_arr = self.last_verts
|
||||
|
||||
if self.last_verts is None: # first iteration
|
||||
return False, len(curr_verts_arr)
|
||||
|
||||
# Compare L1 norm per vertex
|
||||
# Checking vertices change is the same as checking if velocity is zero
|
||||
diff = np.abs(curr_verts_arr - last_verts_arr)
|
||||
diff_L1 = np.sum(diff, axis=1)
|
||||
|
||||
non_static_len = len(
|
||||
diff_L1[diff_L1 > threshold]) # compare vertex-wise to allow accurate control over outliers
|
||||
|
||||
if non_static_len == 0 or (non_static_len < len(curr_verts_arr) * 0.01 * non_static_percent):
|
||||
print('\nStatic with {} non-static vertices out of {}'.format(non_static_len, len(curr_verts_arr)))
|
||||
# Store last frame
|
||||
return True, non_static_len
|
||||
else:
|
||||
return False, non_static_len
|
||||
|
||||
def count_self_intersections(self):
|
||||
model = self.model
|
||||
|
||||
if model.particle_count and model.spring_count:
|
||||
model.particle_self_intersection_count.zero_()
|
||||
wp.launch(
|
||||
kernel=count_self_intersections,
|
||||
dim=model.spring_count,
|
||||
inputs=[
|
||||
model.spring_indices,
|
||||
model.particle_shape.id,
|
||||
],
|
||||
outputs=[
|
||||
model.particle_self_intersection_count
|
||||
],
|
||||
device=model.device,
|
||||
)
|
||||
return int(wp.array.numpy(self.model.particle_self_intersection_count)[0])
|
||||
else:
|
||||
return 0
|
||||
|
||||
def count_body_intersections(self):
|
||||
model = self.model
|
||||
|
||||
if model.particle_count:
|
||||
model.body_cloth_intersection_count.zero_()
|
||||
wp.launch(
|
||||
kernel=count_body_cloth_intersections,
|
||||
dim=model.spring_count,
|
||||
inputs=[
|
||||
model.spring_indices,
|
||||
model.particle_shape.id,
|
||||
model.shape_geo,
|
||||
self.body_shape_index
|
||||
],
|
||||
outputs=[
|
||||
model.body_cloth_intersection_count
|
||||
],
|
||||
device=model.device,
|
||||
)
|
||||
return int(wp.array.numpy(self.model.body_cloth_intersection_count)[0])
|
||||
else:
|
||||
return 0
|
||||
|
||||
def _build_vert_connectivity(self, vertices, indices):
|
||||
vert_connectivity = [[] for _ in range(len(vertices))]
|
||||
|
||||
for face_id in range(int(len(indices) / 3)):
|
||||
v1, v2, v3 = indices[face_id*3 + 0], indices[face_id*3 + 1], indices[face_id*3 + 2]
|
||||
|
||||
vert_connectivity[v1].append(v2)
|
||||
vert_connectivity[v1].append(v3)
|
||||
|
||||
vert_connectivity[v2].append(v1)
|
||||
vert_connectivity[v2].append(v3)
|
||||
|
||||
vert_connectivity[v3].append(v1)
|
||||
vert_connectivity[v3].append(v2)
|
||||
|
||||
return vert_connectivity
|
||||
199
pygarment/meshgen/render/pythonrender.py
Normal file
199
pygarment/meshgen/render/pythonrender.py
Normal file
@@ -0,0 +1,199 @@
|
||||
import os
|
||||
import platform
|
||||
if platform.system() == 'Linux':
|
||||
os.environ["PYOPENGL_PLATFORM"] = "egl"
|
||||
import numpy as np
|
||||
import trimesh
|
||||
import pyrender
|
||||
from PIL import Image
|
||||
|
||||
from pygarment.meshgen.sim_config import PathCofig
|
||||
|
||||
|
||||
def rotate_matrix_y(matrix, angle_deg):
|
||||
rotation_angle = angle_deg * (np.pi / 180)
|
||||
|
||||
# Define the rotation matrix for 180-degree rotation around the y-axis
|
||||
rotation_matrix = np.array([
|
||||
[np.cos(rotation_angle), 0, np.sin(rotation_angle), 0],
|
||||
[0, 1, 0, 0],
|
||||
[-np.sin(rotation_angle), 0, np.cos(rotation_angle), 0],
|
||||
[0, 0, 0, 1]
|
||||
])
|
||||
|
||||
# Apply the rotation to the mesh vertices
|
||||
rot_matrix = np.dot(rotation_matrix, matrix)
|
||||
return rot_matrix
|
||||
|
||||
def rotate_matrix_x(matrix, angle_deg):
|
||||
rotation_angle = angle_deg * (np.pi / 180)
|
||||
|
||||
# Define the rotation matrix for 180-degree rotation around the y-axis
|
||||
rotation_matrix = np.array([
|
||||
[1, 0, 0, 0],
|
||||
[0, np.cos(rotation_angle), -np.sin(rotation_angle), 0],
|
||||
[0, np.sin(rotation_angle), np.cos(rotation_angle), 0],
|
||||
[0, 0, 0, 1]
|
||||
])
|
||||
|
||||
# Apply the rotation to the mesh vertices
|
||||
rot_matrix = np.dot(rotation_matrix, matrix)
|
||||
return rot_matrix
|
||||
|
||||
def get_bounding_box_edges(mesh):
|
||||
# Calculate the bounding box of the mesh
|
||||
min_coords = mesh.bounds[0]
|
||||
max_coords = mesh.bounds[1]
|
||||
|
||||
# Compute the corner points of the bounding box
|
||||
corners = [
|
||||
min_coords,
|
||||
[max_coords[0], min_coords[1], min_coords[2]],
|
||||
[min_coords[0], max_coords[1], min_coords[2]],
|
||||
[max_coords[0], max_coords[1], min_coords[2]],
|
||||
[min_coords[0], min_coords[1], max_coords[2]],
|
||||
[max_coords[0], min_coords[1], max_coords[2]],
|
||||
[min_coords[0], max_coords[1], max_coords[2]],
|
||||
max_coords
|
||||
]
|
||||
|
||||
return corners
|
||||
|
||||
def create_camera(pyrender, pyrender_body_mesh, scene, side, camera_location=None):
|
||||
|
||||
# Create a camera
|
||||
y_fov = np.pi / 6.
|
||||
camera = pyrender.PerspectiveCamera(yfov=y_fov)
|
||||
|
||||
|
||||
if camera_location is None:
|
||||
# Evaluate w.r.t. body
|
||||
|
||||
fov = 50 # Set your desired field of view in degrees
|
||||
|
||||
# # Calculate the bounding box center of the mesh
|
||||
bounding_box_center = pyrender_body_mesh.bounds.mean(axis=0)
|
||||
|
||||
# Calculate the diagonal length of the bounding box
|
||||
diagonal_length = np.linalg.norm(pyrender_body_mesh.bounds[1] - pyrender_body_mesh.bounds[0])
|
||||
|
||||
# Calculate the distance of the camera from the object based on the diagonal length
|
||||
distance = 1.5 * diagonal_length / (2 * np.tan(np.radians(fov / 2)))
|
||||
|
||||
camera_location = bounding_box_center
|
||||
camera_location[-1] += distance
|
||||
|
||||
# Calculate the camera pose
|
||||
camera_pose = np.array([
|
||||
[1.0, 0.0, 0.0, camera_location[0]],
|
||||
[0.0, 1.0, 0.0, camera_location[1]],
|
||||
[0.0, 0.0, 1.0, camera_location[2]],
|
||||
[0.0, 0.0, 0.0, 1.0]
|
||||
])
|
||||
|
||||
camera_pose = rotate_matrix_x(camera_pose, -15)
|
||||
camera_pose = rotate_matrix_y(camera_pose, 20)
|
||||
if side == 'back':
|
||||
camera_pose = rotate_matrix_y(camera_pose, 180)
|
||||
|
||||
# Set camera's pose in the scene
|
||||
scene.add(camera, pose=camera_pose)
|
||||
|
||||
def create_lights(scene, intensity=30.0):
|
||||
light_positions = [
|
||||
np.array([1.60614, 1.5341, 1.23701]),
|
||||
np.array([1.31844, 1.92831, -2.52238]),
|
||||
np.array([-2.80522, 1.2594, 2.34624]),
|
||||
np.array([0.160261, 1.81789, 3.52215]),
|
||||
np.array([-2.65752, 1.41194, -1.26328])
|
||||
]
|
||||
light_colors = [
|
||||
[1.0, 1.0, 1.0],
|
||||
[1.0, 1.0, 1.0],
|
||||
[1.0, 1.0, 1.0],
|
||||
[1.0, 1.0, 1.0],
|
||||
[1.0, 1.0, 1.0]
|
||||
]
|
||||
|
||||
# Add lights to the scene
|
||||
for i in range(5):
|
||||
light = pyrender.PointLight(color=light_colors[i], intensity=intensity)
|
||||
light_pose = np.eye(4)
|
||||
light_pose[:3, 3] = light_positions[i]
|
||||
scene.add(light, pose=light_pose)
|
||||
|
||||
def render(
|
||||
pyrender_garm_mesh, pyrender_body_mesh,
|
||||
side,
|
||||
paths: PathCofig,
|
||||
render_props=None
|
||||
):
|
||||
if render_props and 'resolution' in render_props:
|
||||
view_width, view_height = render_props['resolution']
|
||||
else:
|
||||
view_width, view_height = 1080, 1080
|
||||
# Create a pyrender scene
|
||||
scene = pyrender.Scene(bg_color=(1., 1., 1., 0.)) # Transparent!
|
||||
|
||||
# Create a pyrender mesh object from the trimesh object
|
||||
# Add the mesh to the scene
|
||||
scene.add(pyrender_garm_mesh)
|
||||
scene.add(pyrender_body_mesh)
|
||||
|
||||
camera_location=render_props['front_camera_location'] if 'front_camera_location' in render_props else None
|
||||
create_camera(
|
||||
pyrender, pyrender_body_mesh, scene, side,
|
||||
camera_location=camera_location
|
||||
)
|
||||
|
||||
create_lights(scene, intensity=80.)
|
||||
|
||||
# Create a renderer
|
||||
renderer = pyrender.OffscreenRenderer(viewport_width=view_width, viewport_height=view_height)
|
||||
|
||||
# Render the scene
|
||||
color, _ = renderer.render(scene, flags=pyrender.RenderFlags.RGBA)
|
||||
|
||||
image = Image.fromarray(color)
|
||||
image.save(paths.render_path(side), "PNG")
|
||||
|
||||
def load_meshes(paths:PathCofig, body_v, body_f):
|
||||
# Load body mesh
|
||||
body_mesh = trimesh.Trimesh(body_v, body_f)
|
||||
body_mesh.vertices = body_mesh.vertices / 100
|
||||
# Color body mesh
|
||||
body_material = pyrender.MetallicRoughnessMaterial(
|
||||
baseColorFactor=(0.0, 0.0, 0.0, 1.0), # RGB color, Alpha
|
||||
metallicFactor=0.658, # Range: [0.0, 1.0]
|
||||
roughnessFactor=0.5 # Range: [0.0, 1.0]
|
||||
)
|
||||
pyrender_body_mesh = pyrender.Mesh.from_trimesh(body_mesh, material=body_material)
|
||||
|
||||
|
||||
#Load garment mesh
|
||||
garm_mesh = trimesh.load_mesh(str(paths.g_sim)) # NOTE: Includes the texture
|
||||
garm_mesh.vertices = garm_mesh.vertices / 100 # scale to m
|
||||
|
||||
# Material adjustments
|
||||
material = garm_mesh.visual.material.to_pbr()
|
||||
material.baseColorFactor = [1., 1., 1., 1.]
|
||||
material.doubleSided = True # color both face sides
|
||||
# NOTE remove transparency -- add white background just in case
|
||||
white_back = Image.new('RGBA', material.baseColorTexture.size, color=(255, 255, 255, 255))
|
||||
white_back.paste(material.baseColorTexture)
|
||||
material.baseColorTexture = white_back.convert('RGB')
|
||||
|
||||
garm_mesh.visual.material = material
|
||||
|
||||
pyrender_garm_mesh = pyrender.Mesh.from_trimesh(garm_mesh, smooth=True)
|
||||
|
||||
return pyrender_garm_mesh, pyrender_body_mesh
|
||||
|
||||
def render_images(paths: PathCofig, body_v, body_f, render_props):
|
||||
|
||||
pyrender_garm_mesh, pyrender_body_mesh = load_meshes(paths, body_v, body_f)
|
||||
|
||||
for side in render_props['sides']:
|
||||
render(pyrender_garm_mesh, pyrender_body_mesh, side, paths, render_props)
|
||||
|
||||
|
||||
307
pygarment/meshgen/render/texture_utils.py
Normal file
307
pygarment/meshgen/render/texture_utils.py
Normal file
@@ -0,0 +1,307 @@
|
||||
"""Routines for processing UV coordinated for garments and generating texture maps"""
|
||||
import numpy as np
|
||||
import igl
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib
|
||||
from pathlib import Path
|
||||
|
||||
# SECTION UV islands texture creation
|
||||
def texture_mesh_islands(
|
||||
texture_coords, face_texture_coords,
|
||||
out_texture_image_path: Path,
|
||||
out_fabric_tex_image_path: Path = None,
|
||||
out_mtl_file_path: Path = None,
|
||||
boundary_width=0.3,
|
||||
dpi=1200,
|
||||
background_img_path=None,
|
||||
background_resolution=1.,
|
||||
uv_padding=3,
|
||||
mat_name='islands_texture'
|
||||
):
|
||||
"""
|
||||
Returns updated uv coordinates (properly normalized and aligned with the created texture)
|
||||
"""
|
||||
all_uvs, boundary_uv_to_draw = unwarp_UV(texture_coords, face_texture_coords, padding=uv_padding)
|
||||
|
||||
uv_list, width, height = normalize_UVs(all_uvs, axis_padding=uv_padding) # NOTE !! Axis padding should match the uv padding
|
||||
|
||||
# Create image
|
||||
create_UV_island_texture(
|
||||
boundary_uv_to_draw, width, height,
|
||||
texture_image_path=out_texture_image_path,
|
||||
boundary_width=boundary_width,
|
||||
dpi=dpi,
|
||||
preserve_alpha=True
|
||||
)
|
||||
|
||||
# Create image with fabric background
|
||||
if out_fabric_tex_image_path is not None:
|
||||
create_UV_island_texture(
|
||||
boundary_uv_to_draw, width, height,
|
||||
texture_image_path=out_fabric_tex_image_path,
|
||||
boundary_width=boundary_width,
|
||||
dpi=dpi,
|
||||
background_img_path=background_img_path,
|
||||
background_resolution=background_resolution,
|
||||
preserve_alpha=False
|
||||
)
|
||||
|
||||
# Save mtl is requested
|
||||
if out_mtl_file_path:
|
||||
save_texture_mtl(
|
||||
out_mtl_file_path,
|
||||
out_fabric_tex_image_path.name if out_fabric_tex_image_path is not None else out_texture_image_path.name,
|
||||
mat_name=mat_name)
|
||||
|
||||
return uv_list
|
||||
|
||||
def _uv_connected_components(face_texture_coords):
|
||||
|
||||
# Find connected components of face and vertex texture coords
|
||||
face_components = igl.facet_components(face_texture_coords)
|
||||
vert_components = igl.vertex_components(face_texture_coords)
|
||||
num_ccs = max(face_components) + 1
|
||||
|
||||
return vert_components, face_components, num_ccs
|
||||
|
||||
def unwarp_UV(texture_coords, face_texture_coords, padding=3):
|
||||
# Unwrap uvs for each connected component------------------------
|
||||
|
||||
vert_components, face_components, num_ccs = _uv_connected_components(face_texture_coords)
|
||||
|
||||
all_uvs = [] # transform all UVs to update obj file
|
||||
boundary_uv_to_draw = [] # only draw the boundary UVs
|
||||
|
||||
translate_Y = 0
|
||||
translate_X = 0
|
||||
|
||||
shells_per_row = int(num_ccs ** 0.5)
|
||||
column_x_shift = 0
|
||||
|
||||
# Loop through each connected component
|
||||
for i in range(num_ccs):
|
||||
|
||||
# Get faces and vertices of connected component
|
||||
faces_in_cc = np.where(face_components == i)[0]
|
||||
face_vts_in_cc = face_texture_coords[faces_in_cc]
|
||||
|
||||
# get all vertices of connected component
|
||||
verts_in_cc = np.where(vert_components == i)[0]
|
||||
|
||||
all_vert_pos = texture_coords[verts_in_cc]
|
||||
|
||||
# Find boundary loop
|
||||
bound_verts = igl.boundary_loop(face_vts_in_cc)
|
||||
bound_vert_pos = texture_coords[bound_verts]
|
||||
|
||||
# Shift component by bounding box
|
||||
bbox = bound_vert_pos.min(axis=0), bound_vert_pos.max(axis=0)
|
||||
bbox_len_Y = (bbox[1][1] - bbox[0][1])
|
||||
bbox_len_X = (bbox[1][0] - bbox[0][0])
|
||||
|
||||
if (i % shells_per_row == 0):
|
||||
# Start new column
|
||||
translate_Y = padding
|
||||
translate_X += (column_x_shift + padding)
|
||||
column_x_shift = 0 # restart BBOX collection
|
||||
|
||||
# Update shift
|
||||
column_x_shift = max(bbox_len_X, column_x_shift)
|
||||
|
||||
# translate boundary positions
|
||||
verts_translated_bound = [(x + translate_X, y + translate_Y) for x, y in bound_vert_pos]
|
||||
boundary_uv_to_draw.append(verts_translated_bound)
|
||||
|
||||
# translate all positions
|
||||
verts_translated = [(x + translate_X, y + translate_Y) for x, y in all_vert_pos]
|
||||
all_uvs.extend(verts_translated)
|
||||
|
||||
translate_Y = translate_Y + bbox_len_Y + padding
|
||||
|
||||
return all_uvs, boundary_uv_to_draw
|
||||
|
||||
def normalize_UVs(all_uvs, axis_padding=3):
|
||||
# normalize all_uvs
|
||||
uv_list_raw = np.array(all_uvs)
|
||||
uv_list = uv_list_raw
|
||||
|
||||
norm_x = max(uv_list_raw[:,0]) + axis_padding
|
||||
uv_list[:,0] = uv_list_raw[:,0] / norm_x
|
||||
norm_y = max(uv_list_raw[:,1]) + axis_padding
|
||||
uv_list[:,1] = uv_list_raw[:,1] / norm_y
|
||||
|
||||
return uv_list, norm_x, norm_y
|
||||
|
||||
def create_UV_island_texture(
|
||||
boundary_uv_to_draw,
|
||||
width, height,
|
||||
texture_image_path,
|
||||
boundary_width=0.3,
|
||||
boundary_color='black',
|
||||
dpi=1200,
|
||||
color_alpha=0.65,
|
||||
background_alpha=0.8,
|
||||
background_img_path=None,
|
||||
background_resolution=5,
|
||||
preserve_alpha=True
|
||||
):
|
||||
"""Create texture image from the set of UV boundary loops (e.g. sewing pattern panels).
|
||||
It renders the border of the loops and fills them in with color
|
||||
Params:
|
||||
* boundary_uv_to_draw -- 2D list -- sequence of 2D vertices on each of the boundaries. The order is IMPORTANT. The vertices will be connected
|
||||
by boundary edges sequentially
|
||||
* width, height -- the dimentions of the UV map
|
||||
* texture_image_path -- filepath to same a texture image to
|
||||
* boundary_width -- width of the boundary outline
|
||||
* dpi -- resolution of the output image
|
||||
"""
|
||||
n_components = len(boundary_uv_to_draw)
|
||||
|
||||
# Figure size
|
||||
fig, ax = plt.subplots()
|
||||
fig.set_size_inches(width / 100, height / 100) # width & height are usually given in cm
|
||||
|
||||
# Colors
|
||||
shift = 0.17
|
||||
divisor = max(5, n_components)
|
||||
cmap = matplotlib.colormaps['twilight'] # copper cool spring winter twilight # Using smooth Matplotlib colormaps
|
||||
color_sample = [cmap((1 - shift) * id / divisor) for id in range(divisor)]
|
||||
|
||||
# Background -- garment style
|
||||
if background_img_path is not None:
|
||||
back_crop_scale = background_resolution
|
||||
back_img = plt.imread(background_img_path)
|
||||
ax.imshow(
|
||||
back_img[:int(width * back_crop_scale), :int(height * back_crop_scale), :],
|
||||
extent=[0, width, 0, height],
|
||||
alpha=background_alpha,
|
||||
aspect='equal'
|
||||
)
|
||||
|
||||
# Draw the UV island boundaries and fill them up
|
||||
for i in range(n_components):
|
||||
polygon_x = [vert[0] for vert in boundary_uv_to_draw[i]]
|
||||
polygon_x.append(polygon_x[0]) # Loop
|
||||
polygon_y = [vert[1] for vert in boundary_uv_to_draw[i]]
|
||||
polygon_y.append(polygon_y[0]) # Loop
|
||||
|
||||
color = list(color_sample[i])
|
||||
color[-1] = color_alpha # Alpha - transparency for blending with backround
|
||||
|
||||
plt.fill(polygon_x, polygon_y,
|
||||
color=color,
|
||||
edgecolor=boundary_color, linestyle='-', linewidth=boundary_width / 2 # Boundary stylings
|
||||
)
|
||||
|
||||
ax.set_aspect('equal')
|
||||
|
||||
# Set the axis to be tight
|
||||
ax.set_xlim([0, width])
|
||||
ax.set_ylim([0, height])
|
||||
|
||||
# Hide the axis
|
||||
plt.axis('off')
|
||||
|
||||
# Save image
|
||||
plt.savefig(texture_image_path, dpi=dpi, bbox_inches='tight', pad_inches=0, transparent=preserve_alpha)
|
||||
|
||||
# Cleanup
|
||||
plt.close()
|
||||
|
||||
# !SECTION
|
||||
|
||||
# SECTION Saving textures information to files
|
||||
def save_texture_mtl(mtl_file_path, texture_image_name, mat_name='uv_texture'):
|
||||
new_material_lines = [
|
||||
f'newmtl {mat_name}\n',
|
||||
'Ns 0.000000\n',
|
||||
'Ka 1.000000 1.000000 1.000000\n',
|
||||
'Ks 0.000000 0.000000 0.000000\n',
|
||||
'Ke 0.000000 0.000000 0.000000\n',
|
||||
'Ni 1.000000\n',
|
||||
'd 1.000000\n',
|
||||
'illum 1\n',
|
||||
f'map_Kd {texture_image_name}\n'
|
||||
]
|
||||
|
||||
with open(mtl_file_path, 'w') as file:
|
||||
file.writelines(new_material_lines)
|
||||
|
||||
return mat_name
|
||||
|
||||
def save_obj(
|
||||
output_file_path,
|
||||
vertices, faces_with_texture, uv_list,
|
||||
vert_normals=None, mtl_file_name=None, mat_name=None):
|
||||
"""Save an obj file with a texture information (if provided)"""
|
||||
|
||||
with open(output_file_path, 'w') as f:
|
||||
if mtl_file_name is not None:
|
||||
f.write(f'mtllib {mtl_file_name}\n')
|
||||
|
||||
for v in vertices:
|
||||
f.write(f"v {v[0]} {v[1]} {v[2]}\n")
|
||||
|
||||
for vt in uv_list:
|
||||
f.write(f"vt {vt[0]} {vt[1]}\n")
|
||||
|
||||
if vert_normals is not None:
|
||||
for vn in vert_normals:
|
||||
f.write(f"vn {vn[0]} {vn[1]} {vn[2]}\n")
|
||||
|
||||
f.write('s 1\n')
|
||||
if mtl_file_name is not None:
|
||||
f.write(f'usemtl {mat_name}\n')
|
||||
|
||||
if vert_normals is not None:
|
||||
for v_id0, tex_id0, v_id1, tex_id1, v_id2, tex_id2, in faces_with_texture:
|
||||
f.write(f"f {v_id0 + 1}/{tex_id0 + 1}/{v_id0 + 1} "
|
||||
f"{v_id1 + 1}/{tex_id1 + 1}/{v_id1 + 1} "
|
||||
f"{v_id2 + 1}/{tex_id2 + 1}/{v_id2 + 1}\n")
|
||||
else:
|
||||
for v_id0, tex_id0, v_id1, tex_id1, v_id2, tex_id2, in faces_with_texture :
|
||||
f.write(f"f {v_id0 + 1}/{tex_id0 + 1} "
|
||||
f"{v_id1 + 1}/{tex_id1 + 1} "
|
||||
f"{v_id2 + 1}/{tex_id2 + 1}\n")
|
||||
|
||||
def add_texture_to_obj(obj_file_path, output_file_path, uv_list, mtl_file_name, mat_name):
|
||||
# Update OBJ-----------------------------------------------------
|
||||
|
||||
with open(obj_file_path, 'r') as file:
|
||||
lines = file.readlines()
|
||||
|
||||
uv_index = 0
|
||||
updated_lines = []
|
||||
mtllib_exists = False
|
||||
inserted = False
|
||||
|
||||
s_and_usemtl_lines = ['s 1\n', f'usemtl {mat_name}\n']
|
||||
|
||||
for line in lines:
|
||||
if line.startswith('vt '):
|
||||
# Format the new UV coordinates
|
||||
uv = uv_list[uv_index]
|
||||
new_uv_line = f'vt {uv[0]:.6f} {uv[1]:.6f}\n'
|
||||
updated_lines.append(new_uv_line)
|
||||
uv_index += 1
|
||||
elif line.startswith('mtllib '):
|
||||
# Ensure the mtllib line points to the correct MTL file
|
||||
new_mtl_line = f'mtllib {mtl_file_name}\n'
|
||||
updated_lines.append(new_mtl_line)
|
||||
mtllib_exists = True
|
||||
elif line.startswith('f') and not inserted:
|
||||
# Insert the s and usemtl lines before the first face line
|
||||
updated_lines.extend(s_and_usemtl_lines)
|
||||
inserted = True
|
||||
updated_lines.append(line)
|
||||
else:
|
||||
updated_lines.append(line)
|
||||
|
||||
# If mtllib line does not exist, add it at the beginning
|
||||
if not mtllib_exists:
|
||||
updated_lines.insert(0, f'mtllib {mtl_file_name}\n')
|
||||
|
||||
with open(output_file_path, 'w') as file:
|
||||
file.writelines(updated_lines)
|
||||
|
||||
# !SECTION
|
||||
285
pygarment/meshgen/sim_config.py
Normal file
285
pygarment/meshgen/sim_config.py
Normal file
@@ -0,0 +1,285 @@
|
||||
from pathlib import Path
|
||||
import yaml
|
||||
from datetime import datetime
|
||||
|
||||
from pygarment.data_config import Properties
|
||||
|
||||
class PathCofig:
|
||||
"""Routines for getting paths to various relevant objects with standard names"""
|
||||
def __init__(self,
|
||||
in_element_path, out_path, in_name, out_name=None,
|
||||
body_name='', samples_name='', default_body=True,
|
||||
smpl_body=False,
|
||||
add_timestamp=False):
|
||||
"""Specify
|
||||
* in_element_path
|
||||
* our_path -- dataset level output path
|
||||
* body_name -- specify to indicate use of default bodies
|
||||
* samples_name -- specify to indicate use of body sampling (reading body name from measurments file)
|
||||
"""
|
||||
|
||||
self._system = Properties('./system.json') # TODOlOW More stable path?
|
||||
self._body_name = body_name
|
||||
self._samples_folder_name = samples_name
|
||||
self._use_default_body = default_body
|
||||
self.use_smpl_seg = smpl_body
|
||||
|
||||
# Tags
|
||||
if out_name is None:
|
||||
out_name = in_name
|
||||
self.in_tag = in_name
|
||||
self.out_folder_tag = f'{out_name}_{datetime.now().strftime("%y%m%d-%H-%M-%S")}' if add_timestamp else out_name
|
||||
self.sim_tag = out_name
|
||||
self.boxmesh_tag = out_name
|
||||
|
||||
# Base paths
|
||||
self.input = Path(in_element_path)
|
||||
self.out = out_path
|
||||
self.out_el = Path(out_path) / self.out_folder_tag
|
||||
self.out_el.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Individual file paths
|
||||
self._update_in_paths()
|
||||
self._update_boxmesh_paths()
|
||||
self.update_in_copies_paths()
|
||||
self.update_sim_paths()
|
||||
|
||||
def _update_in_paths(self):
|
||||
|
||||
# Base path
|
||||
if not self._samples_folder_name or self._use_default_body:
|
||||
self.bodies_path = Path(self._system['bodies_default_path'])
|
||||
else:
|
||||
self.bodies_path = Path(self._system['body_samples_path']) / self._samples_folder_name / 'meshes'
|
||||
|
||||
# Body measurements
|
||||
if not self._samples_folder_name:
|
||||
self.in_body_mes = self.bodies_path / f'{self._body_name}.yaml'
|
||||
else:
|
||||
self.in_body_mes = self.input / 'body_measurements.yaml'
|
||||
|
||||
with open(self.in_body_mes, 'r') as file:
|
||||
body_dict = yaml.load(file, Loader=yaml.SafeLoader)
|
||||
if 'body_sample' in body_dict['body']: # Not present in default measurements
|
||||
self._body_name = body_dict['body']['body_sample']
|
||||
|
||||
self.in_body_obj = self.bodies_path / f'{self._body_name}.obj'
|
||||
self.in_g_spec = self.input / f'{self.in_tag}_specification.json'
|
||||
self.body_seg = Path(self._system['bodies_default_path']) / ('ggg_body_segmentation.json' if not self.use_smpl_seg else 'smpl_vert_segmentation.json')
|
||||
self.in_design_params = self.input / 'design_params.yaml'
|
||||
|
||||
def _update_boxmesh_paths(self):
|
||||
|
||||
self.g_box_mesh = self.out_el / f'{self.boxmesh_tag}_boxmesh.obj'
|
||||
self.g_box_mesh_compressed = self.out_el / f'{self.boxmesh_tag}_boxmesh.ply'
|
||||
self.g_mesh_segmentation = self.out_el / f'{self.boxmesh_tag}_sim_segmentation.txt'
|
||||
self.g_orig_edge_len = self.out_el / f'{self.boxmesh_tag}_orig_lens.pickle'
|
||||
self.g_vert_labels = self.out_el / f'{self.boxmesh_tag}_vertex_labels.yaml'
|
||||
self.g_texture_fabric = self.out_el / f'{self.boxmesh_tag}_texture_fabric.png'
|
||||
self.g_texture = self.out_el / f'{self.boxmesh_tag}_texture.png'
|
||||
self.g_mtl = self.out_el / f'{self.boxmesh_tag}_material.mtl'
|
||||
|
||||
def update_in_copies_paths(self):
|
||||
self.g_specs = self.out_el / f'{self.in_tag}_specification.json'
|
||||
self.element_sim_props = self.out_el / 'sim_props.yaml'
|
||||
self.body_mes = self.out_el / f'{self.in_tag}_body_measurements.yaml'
|
||||
self.design_params = self.out_el / f'{self.in_tag}_design_params.yaml'
|
||||
|
||||
def update_sim_paths(self):
|
||||
self.g_sim = self.out_el / f'{self.sim_tag}_sim.obj'
|
||||
self.g_sim_glb = self.out_el / f'{self.sim_tag}_sim.glb'
|
||||
self.g_sim_compressed = self.out_el / f'{self.sim_tag}_sim.ply'
|
||||
self.usd = self.out_el / f'{self.sim_tag}_simulation.usd'
|
||||
|
||||
|
||||
def render_path(self, camera_name=''):
|
||||
|
||||
fname = f'{self.sim_tag}_render_{camera_name}.png' if camera_name else f'{self.sim_tag}_render.png'
|
||||
return self.out_el / fname
|
||||
|
||||
|
||||
class SimConfig:
|
||||
def __init__(self, sim_props):
|
||||
# ---- Paths ----
|
||||
# Sim props sections
|
||||
self.props = sim_props
|
||||
sim_props_option = sim_props['options']
|
||||
sim_props_material = sim_props['material']
|
||||
|
||||
# Basic setup
|
||||
self.sim_fps = 60.0
|
||||
self.sim_substeps = 10 #increase?
|
||||
self.sim_wo_gravity_percentage = 0
|
||||
self.zero_gravity_steps = self.get_sim_props_value(sim_props, 'zero_gravity_steps', 5)
|
||||
self.resolution_scale = self.get_sim_props_value(sim_props, 'resolution_scale', 1.0)
|
||||
self.ground = self.get_sim_props_value(sim_props, 'ground', True)
|
||||
|
||||
# Stopping criteria
|
||||
self.static_threshold = self.get_sim_props_value(sim_props, 'static_threshold', 0.01)
|
||||
self.max_sim_steps = self.get_sim_props_value(sim_props, 'max_sim_steps', 1000)
|
||||
self.max_frame_time = self.get_sim_props_value(sim_props, 'max_frame_time', None)
|
||||
if self.max_frame_time is not None:
|
||||
self.max_frame_time = int(self.max_frame_time)
|
||||
self.max_sim_time = int(self.get_sim_props_value(sim_props, 'max_sim_time', 25 * 60))
|
||||
self.non_static_percent = self.get_sim_props_value(sim_props, 'non_static_percent', 5)
|
||||
# Quality filter
|
||||
self.max_body_collisions = self.get_sim_props_value(sim_props, 'max_body_collisions', 0)
|
||||
self.max_self_collisions = self.get_sim_props_value(sim_props, 'max_self_collisions', 0)
|
||||
|
||||
|
||||
# Self-collision prevention properties
|
||||
self.enable_particle_particle_collisions = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'enable_particle_particle_collisions', False)
|
||||
self.enable_triangle_particle_collisions = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'enable_triangle_particle_collisions', False)
|
||||
self.enable_edge_edge_collisions = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'enable_edge_edge_collisions', False)
|
||||
self.enable_body_collision_filters = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'enable_body_collision_filters',
|
||||
False
|
||||
)
|
||||
|
||||
# Attachment constraints
|
||||
self.enable_attachment_constraint = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'enable_attachment_constraint',
|
||||
False
|
||||
)
|
||||
self.attachment_labels = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'attachment_label_names',
|
||||
[]
|
||||
)
|
||||
self.attachment_frames = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'attachment_frames',
|
||||
100
|
||||
)
|
||||
self.attachment_stiffness = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'attachment_stiffness',
|
||||
[]
|
||||
)
|
||||
self.attachment_damping = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'attachment_damping',
|
||||
[]
|
||||
)
|
||||
if not self.attachment_frames or not self.attachment_labels:
|
||||
self.enable_attachment_constraint = False
|
||||
|
||||
# Global damping properties
|
||||
self.global_damping_factor = self.get_sim_props_value(
|
||||
sim_props_option,'global_damping_factor', 1.)
|
||||
self.global_damping_effective_velocity = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'global_damping_effective_velocity', 0.0)
|
||||
self.global_max_velocity = self.get_sim_props_value(
|
||||
sim_props_option,'global_max_velocity', 50.0)
|
||||
|
||||
# Cloth global collision resolution (reference drag) options
|
||||
self.enable_global_collision_filter = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'enable_global_collision_filter',
|
||||
False
|
||||
)
|
||||
self.enable_cloth_reference_drag = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'enable_cloth_reference_drag', False)
|
||||
self.cloth_reference_margin = self.get_sim_props_value(
|
||||
sim_props_option,'cloth_reference_margin', 0.1)
|
||||
self.cloth_reference_k = self.get_sim_props_value(
|
||||
sim_props_option,'cloth_reference_k', 1.0e7)
|
||||
|
||||
# Body smoothing options
|
||||
self.enable_body_smoothing = self.get_sim_props_value(
|
||||
sim_props_option,'enable_body_smoothing', True)
|
||||
self.smoothing_total_smoothing_factor = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'smoothing_total_smoothing_factor', 1)
|
||||
self.smoothing_recover_start_frame = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'smoothing_recover_start_frame', 0)
|
||||
self.smoothing_frame_gap_between_steps = self.get_sim_props_value(
|
||||
sim_props_option,
|
||||
'smoothing_frame_gap_between_steps', 5)
|
||||
self.smoothing_num_steps = self.get_sim_props_value(
|
||||
sim_props_option, 'smoothing_num_steps', 100)
|
||||
self.smoothing_num_steps = max(min(
|
||||
self.smoothing_num_steps, self.max_sim_steps - self.smoothing_recover_start_frame),
|
||||
0)
|
||||
if self.smoothing_num_steps == 0:
|
||||
self.enable_body_smoothing = False
|
||||
|
||||
# ----- Fabric material properties -----
|
||||
# Bending
|
||||
self.garment_edge_ke = self.get_sim_props_value(
|
||||
sim_props_material,'garment_edge_ke', 50000.0) #default = 100.0
|
||||
self.garment_edge_kd = self.get_sim_props_value(
|
||||
sim_props_material,'garment_edge_kd',10.0) #default = 0.0
|
||||
|
||||
# Area preservation
|
||||
self.garment_tri_ke = self.get_sim_props_value(
|
||||
sim_props_material,'garment_tri_ke', 10000.0) #default = 100.0, small number = more elasticity
|
||||
self.garment_tri_kd = self.get_sim_props_value(
|
||||
sim_props_material,'garment_tri_kd', 1.0) #default = 10.0
|
||||
self.garment_tri_ka = self.get_sim_props_value(
|
||||
sim_props_material, 'garment_tri_ka', 10000.0) # default = 100.0
|
||||
self.garment_tri_drag = 0.0 # default = 0.0
|
||||
self.garment_tri_lift = 0.0 #default = 0.0
|
||||
|
||||
# Thickness
|
||||
self.garment_density = self.get_sim_props_value(
|
||||
sim_props_material,'fabric_density', 1.0)
|
||||
self.garment_radius = self.get_sim_props_value(
|
||||
sim_props_material,'fabric_thickness', 0.1)
|
||||
|
||||
# Spring properties (Distance constraints)
|
||||
self.spring_ke = self.get_sim_props_value(
|
||||
sim_props_material,'spring_ke', 50000)
|
||||
self.spring_kd = self.get_sim_props_value(
|
||||
sim_props_material,'spring_kd', 10.0)
|
||||
|
||||
# Soft contact properties (contact between cloth and body)
|
||||
self.soft_contact_margin = 0.2
|
||||
self.soft_contact_ke = 1000.0
|
||||
self.soft_contact_kd = 10.0
|
||||
self.soft_contact_kf = 1000.0
|
||||
self.soft_contact_mu = self.get_sim_props_value(
|
||||
sim_props_material, 'fabric_friction', 0.5
|
||||
)
|
||||
|
||||
# Body material
|
||||
self.body_thickness = self.get_sim_props_value(sim_props_option,'body_collision_thickness', 0.0)
|
||||
self.body_friction = self.get_sim_props_value(sim_props_option,'body_friction', 0.5)
|
||||
|
||||
# particle properties
|
||||
# Some default values -- not used in cloth sim
|
||||
self.particle_ke = 1.0e3
|
||||
self.particle_kd = 1.0e2
|
||||
self.particle_kf = 100.0
|
||||
self.particle_mu = 0.5
|
||||
self.particle_cohesion = 0.0
|
||||
self.particle_adhesion = 0.0
|
||||
|
||||
# After the initialization
|
||||
self.update_min_steps()
|
||||
|
||||
def update_min_steps(self):
|
||||
self.min_sim_steps = 0
|
||||
if self.enable_body_smoothing:
|
||||
self.min_sim_steps = self.smoothing_recover_start_frame + self.smoothing_num_steps
|
||||
if self.enable_attachment_constraint:
|
||||
# NOTE: Adding a small number of frames
|
||||
# to allow clothing movement to restart after attachment is released
|
||||
self.min_sim_steps = max(self.min_sim_steps, self.attachment_frames + 5)
|
||||
|
||||
def get_sim_props_value(self, sim_props, name, default_value):
|
||||
if name in sim_props:
|
||||
return sim_props[name]
|
||||
return default_value
|
||||
|
||||
258
pygarment/meshgen/simulation.py
Normal file
258
pygarment/meshgen/simulation.py
Normal file
@@ -0,0 +1,258 @@
|
||||
# Copyright (c) 2022 NVIDIA CORPORATION. All rights reserved.
|
||||
# NVIDIA CORPORATION and its licensors retain all intellectual property
|
||||
# and proprietary rights in and to this software, related documentation
|
||||
# and any modifications thereto. Any use, reproduction, disclosure or
|
||||
# distribution of this software and related documentation without an express
|
||||
# license agreement from NVIDIA CORPORATION is strictly prohibited.
|
||||
|
||||
###########################################################################
|
||||
# Example Sim Cloth
|
||||
#
|
||||
# Shows a simulation of an FEM cloth model colliding against a static
|
||||
# rigid body mesh using the wp.sim.ModelBuilder().
|
||||
#
|
||||
###########################################################################
|
||||
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
import platform
|
||||
import multiprocessing
|
||||
import signal
|
||||
import trimesh
|
||||
|
||||
# Warp
|
||||
import warp as wp
|
||||
|
||||
# Custom code
|
||||
from pygarment.meshgen.render.pythonrender import render_images
|
||||
from pygarment.meshgen.garment import Cloth
|
||||
from pygarment.meshgen.sim_config import SimConfig, PathCofig
|
||||
|
||||
wp.init()
|
||||
|
||||
class SimulationError(BaseException):
|
||||
"""To be rised when panel stitching cannot be executed correctly"""
|
||||
pass
|
||||
|
||||
class FrameTimeOutError(BaseException):
|
||||
"""To be rised when frame takes too long to simulate"""
|
||||
pass
|
||||
|
||||
class SimTimeOutError(BaseException):
|
||||
"""To be rised when simulation takes too long"""
|
||||
pass
|
||||
|
||||
def optimize_garment_storage(paths: PathCofig):
|
||||
"""Prepare the data element for compact storage: store the meshes as ply instead of obj,
|
||||
remove texture files
|
||||
"""
|
||||
# Objs to ply
|
||||
try:
|
||||
boxmesh = trimesh.load(paths.g_box_mesh)
|
||||
boxmesh.export(paths.g_box_mesh_compressed)
|
||||
paths.g_box_mesh.unlink()
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
try:
|
||||
simmesh = trimesh.load(paths.g_sim)
|
||||
simmesh.export(paths.g_sim_compressed)
|
||||
paths.g_sim.unlink()
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
# Remove large texture file and mtl -- not so necessary
|
||||
paths.g_texture_fabric.unlink(missing_ok=True)
|
||||
paths.g_mtl.unlink(missing_ok=True)
|
||||
|
||||
|
||||
def update_progress(progress, total):
|
||||
"""Progress bar in console"""
|
||||
# https://stackoverflow.com/questions/3173320/text-progress-bar-in-the-console
|
||||
amtDone = progress / total
|
||||
num_dash = int(amtDone * 50)
|
||||
sys.stdout.write('\rProgress: [{0:50s}] {1:.1f}%'.format('#' * num_dash + '-' * (50 - num_dash), amtDone * 100))
|
||||
sys.stdout.flush()
|
||||
|
||||
def _run_frame_with_timeout(garment, frame_timeout, frame_num):
|
||||
"""Run frame while keeping a cap on time to run it"""
|
||||
try:
|
||||
if platform.system() == "Windows":
|
||||
"""https://stackoverflow.com/a/14920854"""
|
||||
|
||||
if frame_num == 0: #only do it on first frame due to slowdown
|
||||
p_frame = multiprocessing.Process(target=garment.run_frame(), name="FrameSimulation")
|
||||
p_frame.start()
|
||||
|
||||
# Wait timeout_after seconds for garment.run_frame()
|
||||
p_frame.join(frame_timeout)
|
||||
|
||||
# If thread is active
|
||||
if p_frame.is_alive():
|
||||
# Terminate the process
|
||||
p_frame.terminate()
|
||||
p_frame.join()
|
||||
raise TimeoutError
|
||||
else:
|
||||
garment.run_frame()
|
||||
|
||||
elif platform.system() in ["Linux", "OSX"]:
|
||||
"""https://code-maven.com/python-timeout"""
|
||||
|
||||
def alarm_handler(signum, frame):
|
||||
raise TimeoutError
|
||||
|
||||
signal.signal(signal.SIGALRM, alarm_handler)
|
||||
signal.alarm(frame_timeout)
|
||||
try:
|
||||
garment.run_frame()
|
||||
except TimeoutError as ex:
|
||||
raise TimeoutError
|
||||
else:
|
||||
signal.alarm(0)
|
||||
|
||||
except TimeoutError as e:
|
||||
raise FrameTimeOutError
|
||||
|
||||
def sim_frame_sequence(garment, config, store_usd=False, verbose=False):
|
||||
|
||||
# Save initial state
|
||||
if store_usd:
|
||||
garment.render_usd_frame()
|
||||
|
||||
start_time = time.time()
|
||||
for frame in range(0, config.max_sim_steps):
|
||||
|
||||
if verbose:
|
||||
print(f'\n------ Frame {frame + 1} ------')
|
||||
else:
|
||||
update_progress(frame, config.max_sim_steps)
|
||||
|
||||
garment.frame = frame
|
||||
|
||||
#Run frame and raise FrameTimeOutError if frame takes too long to simulate
|
||||
|
||||
static = False
|
||||
if config.max_frame_time is None:
|
||||
# No frame time limits
|
||||
garment.run_frame()
|
||||
else:
|
||||
# NOTE: frame timeouts only work in the main thread of the program.
|
||||
# disable frame timeout by passing 'null' as a max_frame_time parameter in config
|
||||
_run_frame_with_timeout(
|
||||
garment,
|
||||
frame_timeout=config.max_frame_time if frame > 0 else config.max_frame_time * 2,
|
||||
frame_num=frame
|
||||
)
|
||||
|
||||
if verbose:
|
||||
num_cloth_cloth_contacts = garment.count_self_intersections()
|
||||
print(f'\nSelf-Intersection: {num_cloth_cloth_contacts}')
|
||||
|
||||
if frame >= config.zero_gravity_steps and frame >= config.min_sim_steps:
|
||||
static, _ = garment.is_static()
|
||||
if static:
|
||||
break
|
||||
|
||||
runtime = time.time() - start_time
|
||||
if runtime > config.max_sim_time:
|
||||
raise SimTimeOutError
|
||||
|
||||
|
||||
def run_sim(
|
||||
cloth_name, props, paths: PathCofig,
|
||||
save_v_norms=False, store_usd=False,
|
||||
optimize_storage=False,
|
||||
verbose=False):
|
||||
"""Initialize and run the simulation
|
||||
!! Important !!
|
||||
'store_usd' parameter slows down the simulation to CPU rates because of required CPU-GPU copies and file writes. Use only for debugging
|
||||
"""
|
||||
sim_props = props['sim']
|
||||
render_props = props['render']
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
config = SimConfig(sim_props['config']) # Why separate class at all?
|
||||
garment = Cloth(cloth_name, config, paths, caching=store_usd)
|
||||
|
||||
try:
|
||||
print("Simulation..")
|
||||
sim_frame_sequence(garment, config, store_usd, verbose=verbose)
|
||||
|
||||
except FrameTimeOutError:
|
||||
print(f"FrameTimeOutError at frame {garment.frame}")
|
||||
props.add_fail('sim', 'frame_timeout', cloth_name)
|
||||
except SimTimeOutError:
|
||||
print("SimTimeOutError")
|
||||
props.add_fail('sim', 'simulation_timeout', cloth_name)
|
||||
except SimulationError:
|
||||
print("Simulation failed")
|
||||
props.add_fail('sim', 'gt_edges_creation', cloth_name)
|
||||
except BaseException as e:
|
||||
print(f'Sim::{cloth_name}::crashed with {e}')
|
||||
|
||||
if isinstance(e, KeyboardInterrupt):
|
||||
# Allow to stop simulation loops by keyboard interrupt
|
||||
# It's not a real crash, so don't write down the failure
|
||||
sec = round(time.time() - start_time, 3)
|
||||
min = int(sec / 60)
|
||||
print(f"Simulation pipeline took: {min} m {sec - min * 60} s")
|
||||
raise e
|
||||
|
||||
traceback.print_exc()
|
||||
props.add_fail('sim', 'crashes', cloth_name)
|
||||
else: # Other quality checks
|
||||
if garment.frame == config.max_sim_steps - 1:
|
||||
_, non_st_count = garment.is_static()
|
||||
print('\nFailed to achieve static equilibrium for {} with {} non-static vertices out of {}'.format(
|
||||
cloth_name, non_st_count, len(garment.current_verts)))
|
||||
props.add_fail('sim', 'static_equilibrium', cloth_name)
|
||||
|
||||
if time.time() - start_time < 0.5: # 0.5 sec -- finished suspiciously fast
|
||||
props.add_fail('sim', 'fast_finish', cloth_name)
|
||||
|
||||
# 3D penetrations
|
||||
num_body_collisions = garment.count_body_intersections()
|
||||
print("BODY CLOTH INTERSECTIONS: ", num_body_collisions)
|
||||
num_self_collisions = garment.count_self_intersections()
|
||||
|
||||
sim_props['stats']['body_collisions'][cloth_name] = num_body_collisions
|
||||
sim_props['stats']['self_collisions'][cloth_name] = num_self_collisions
|
||||
|
||||
if num_body_collisions > config.max_body_collisions:
|
||||
props.add_fail('sim', 'cloth_body_intersection', cloth_name)
|
||||
if num_self_collisions:
|
||||
print(f'Self-Intersecting with {num_self_collisions}, '
|
||||
f'is fail: {num_self_collisions > config.max_self_collisions}')
|
||||
if num_self_collisions > config.max_self_collisions:
|
||||
props.add_fail('sim', 'cloth_self_intersection', cloth_name)
|
||||
else:
|
||||
print('Not self-intersecting!!!')
|
||||
|
||||
# ---- Postprocessing ----
|
||||
# NOTE: Attempt even on failures for accurate picture and post-analysis
|
||||
frame = garment.frame
|
||||
print(f"\nSimulation took #frames={frame + 1}")
|
||||
|
||||
sim_props['stats']['sim_time'][cloth_name] = sim_time = time.time() - start_time
|
||||
sim_props['stats']['spf'][cloth_name] = sim_time / frame if frame else sim_time
|
||||
sim_props['stats']['fin_frame'][cloth_name] = frame
|
||||
|
||||
garment.save_frame(save_v_norms=save_v_norms) #saving after stats
|
||||
|
||||
# Render images
|
||||
s_time = time.time()
|
||||
render_images(paths, garment.v_body, garment.f_body, render_props['config'])
|
||||
render_image_time = time.time() - s_time
|
||||
render_props['stats']['render_time'][cloth_name] = render_image_time
|
||||
print(f"Rendering {cloth_name} took {render_image_time}s")
|
||||
|
||||
if optimize_storage:
|
||||
optimize_garment_storage(paths)
|
||||
|
||||
# Final info output
|
||||
sec = round(time.time() - start_time, 3)
|
||||
min = int(sec / 60)
|
||||
print(f"\nSimulation pipeline took: {min} m {sec - min * 60} s")
|
||||
313
pygarment/meshgen/triangulation_utils.py
Normal file
313
pygarment/meshgen/triangulation_utils.py
Normal file
@@ -0,0 +1,313 @@
|
||||
"""Helper functions for the triangulation of the panels"""
|
||||
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
# CGAL 2D
|
||||
import CGAL.CGAL_Kernel
|
||||
from CGAL.CGAL_Kernel import Point_2
|
||||
from CGAL.CGAL_Mesh_2 import Mesh_2_Constrained_Delaunay_triangulation_2
|
||||
from CGAL.CGAL_Mesh_2 import Delaunay_mesh_size_criteria_2
|
||||
from CGAL import CGAL_Mesh_2
|
||||
from CGAL.CGAL_Triangulation_2 import Constrained_Delaunay_triangulation_2
|
||||
|
||||
|
||||
class FaceInfo2(object):
|
||||
"""
|
||||
https://github.com/CGAL/cgal-swig-bindings/blob/main/examples/python/polygonal_triangulation.py#L9
|
||||
"""
|
||||
def __init__(self):
|
||||
self.nesting_level = -1
|
||||
|
||||
def in_domain(self):
|
||||
return (self.nesting_level % 2) != 1
|
||||
|
||||
def mark_domains(ct, start_face, index, edge_border, face_info):
|
||||
"""
|
||||
https://github.com/CGAL/cgal-swig-bindings/blob/main/examples/python/polygonal_triangulation.py#L17
|
||||
"""
|
||||
if face_info[start_face].nesting_level != -1:
|
||||
return
|
||||
queue = [start_face]
|
||||
while queue != []:
|
||||
fh = queue[0] # queue.front
|
||||
queue = queue[1:] # queue.pop_front
|
||||
if face_info[fh].nesting_level == -1:
|
||||
face_info[fh].nesting_level = index
|
||||
for i in range(3):
|
||||
e = (fh, i)
|
||||
n = fh.neighbor(i)
|
||||
if face_info[n].nesting_level == -1:
|
||||
if ct.is_constrained(e):
|
||||
edge_border.append(e)
|
||||
else:
|
||||
queue.append(n)
|
||||
|
||||
def mark_domain(cdt):
|
||||
"""Find a mapping that can be tested to see if a face is in a domain
|
||||
|
||||
Explore the set of facets connected with non constrained edges,
|
||||
and attribute to each such set a nesting level.
|
||||
|
||||
We start from the facets incident to the infinite vertex, with a
|
||||
nesting level of 0. Then we recursively consider the non-explored
|
||||
facets incident to constrained edges bounding the former set and
|
||||
increase the nesting level by 1.
|
||||
|
||||
Facets in the domain are those with an odd nesting level.
|
||||
|
||||
https://github.com/CGAL/cgal-swig-bindings/blob/main/examples/python/polygonal_triangulation.py#L36
|
||||
"""
|
||||
face_info = {}
|
||||
for face in cdt.all_faces():
|
||||
face_info[face] = FaceInfo2()
|
||||
index = 0
|
||||
border = []
|
||||
mark_domains(cdt, cdt.infinite_face(), index + 1, border, face_info)
|
||||
while border != []:
|
||||
e = border[0] # border.front
|
||||
border = border[1:] # border.pop_front
|
||||
n = e[0].neighbor(e[1])
|
||||
if face_info[n].nesting_level == -1:
|
||||
lvl = face_info[e[0]].nesting_level + 1
|
||||
mark_domains(cdt, n, lvl, border, face_info)
|
||||
return face_info
|
||||
|
||||
def plot_triangulation(cdt,face_info):
|
||||
"""
|
||||
https://github.com/CGAL/cgal-swig-bindings/blob/main/examples/python/polygonal_triangulation.py#L77
|
||||
"""
|
||||
def rescale_plot(ax, scale=1.1):
|
||||
xmin, xmax = ax.get_xlim()
|
||||
ymin, ymax = ax.get_ylim()
|
||||
xmid = (xmin + xmax) / 2.0
|
||||
ymid = (ymin + ymax) / 2.0
|
||||
xran = xmax - xmid
|
||||
yran = ymax - ymid
|
||||
ax.set_xlim(xmid - xran * scale, xmid + xran * scale)
|
||||
ax.set_ylim(ymid - yran * scale, ymid + yran * scale)
|
||||
|
||||
def plot_edge(edge, *args):
|
||||
edge_seg = cdt.segment(edge)
|
||||
pts = [edge_seg.source(), edge_seg.target()]
|
||||
xs = [pts[0].x(), pts[1].x()]
|
||||
ys = [pts[0].y(), pts[1].y()]
|
||||
plt.plot(xs, ys, *args)
|
||||
|
||||
for edge in cdt.finite_edges():
|
||||
if cdt.is_constrained(edge):
|
||||
plot_edge(edge, 'r-')
|
||||
else:
|
||||
if face_info[edge[0]].in_domain():
|
||||
plot_edge(edge, 'b-')
|
||||
rescale_plot(plt.gca())
|
||||
plt.show()
|
||||
|
||||
def get_edge_vert_ids(edges):
|
||||
"""
|
||||
This function returns a list of index pairs of edge vertices into their corresponding
|
||||
panel.panel_vertices defining the border of the panel.
|
||||
Input:
|
||||
* edges (list): All edges of a panel
|
||||
Output:
|
||||
* zipped_array (ndarray): ndarray of start and end indices of edge vertices into panel.vertices defining
|
||||
the line segments of the panel edges (e.g. [[0,1],[1,2],[2,3],...,[19,20],[20,0]])
|
||||
"""
|
||||
zipped_array = np.empty((0, 2))
|
||||
for edge in edges:
|
||||
edge_verts_ids = edge.vertex_range
|
||||
rolled_list = np.roll(edge_verts_ids, 1, axis=0)
|
||||
zipped_array_edge = np.stack((rolled_list, edge_verts_ids), axis=1)[1:]
|
||||
zipped_array = np.concatenate((zipped_array, zipped_array_edge), axis=0)
|
||||
|
||||
return zipped_array.astype(int)
|
||||
|
||||
def create_cdt_points(cdt, points):
|
||||
"""
|
||||
This function converts the edge vertices to Point_2 objects (if necessary) and inserts them into cdt
|
||||
Input:
|
||||
* cdt (Mesh_2_Constrained_Delaunay_triangulation_2)
|
||||
* points (list): The edge vertices
|
||||
Output:
|
||||
* cdt_points (list): Mesh_2_Constrained_Delaunay_triangulation_2_Vertex_handle of the edge vertices
|
||||
"""
|
||||
cdt_points = []
|
||||
for p in points:
|
||||
if isinstance(p,CGAL.CGAL_Kernel.Point_2):
|
||||
v = cdt.insert(p)
|
||||
else:
|
||||
x,y = p
|
||||
v = cdt.insert(Point_2(float(x),float(y)))
|
||||
|
||||
cdt_points.append(v)
|
||||
|
||||
return cdt_points
|
||||
|
||||
def cdt_insert_constraints(cdt, cdt_points, edge_verts_ids):
|
||||
"""
|
||||
This function defines a planar straight line graph (PSLG) for cdt which represents the boundary
|
||||
of the mesh and acts as a constraint of cdt. The function returns a dict of the newly inserted
|
||||
points containing the indices they get replaced by.
|
||||
Input:
|
||||
* cdt (Mesh_2_Constrained_Delaunay_triangulation_2)
|
||||
* cdt_points (list): Mesh_2_Constrained_Delaunay_triangulation_2_Vertex_handle of points
|
||||
* edge_verts_ids (ndarray): indices into cdt_points of edge vertices
|
||||
Output:
|
||||
* new_points (dict): Dict with indices into cdt.finite_vertices() of newly inserted points (between
|
||||
cdt_points[s_id] and cdt_points[e_id]) as keys. The values of the dict are the respective s_ids
|
||||
which replace the indices of the newly inserted points later.
|
||||
"""
|
||||
init_len = cdt.number_of_vertices()
|
||||
new_points = {} #[id into cdt.finite_vertices()] -> [replace by this id into cdt.finite_vertices()]
|
||||
|
||||
for s_id, e_id in edge_verts_ids:
|
||||
start = cdt_points[s_id]
|
||||
end = cdt_points[e_id]
|
||||
cdt.insert_constraint(start, end)
|
||||
|
||||
num_verts = cdt.number_of_vertices()
|
||||
if init_len != num_verts:
|
||||
new_points[num_verts - 1] = s_id
|
||||
init_len = num_verts
|
||||
print('triangulation_utils::INFO::Generated extra boundary points for sdt contraints. Postprocessing will be performed')
|
||||
|
||||
return new_points
|
||||
|
||||
def get_face_v_ids(cdt, points, new_points, check=False, plot = False):
|
||||
"""
|
||||
This function returns the faces of cdt as a list of ints instead of vertex handles.
|
||||
Input:
|
||||
* cdt (Mesh_2_Constrained_Delaunay_triangulation_2)
|
||||
* faces (list): Mesh_2_Constrained_Delaunay_triangulation_2_Face_handle of faces in domain
|
||||
* points (list): Mesh vertices (filtered out newly inserted boundary vertices)
|
||||
* new_points (dict): Dict with indices into cdt.finite_vertices() of newly inserted points (if existent)
|
||||
as keys. The values of the dict are the indices replacing the indices of the newly inserted points.
|
||||
* check (bool): if True checks if coordinates of vertex handle from face vertex equals point coordinates
|
||||
Output:
|
||||
* f (list): (N x 3) list of vertex indices describing the faces
|
||||
|
||||
Note: We first replace the vertex handle's coordinates of all points by their indices into points / cdt_points
|
||||
because face_handle stores the vertex coordinates and not their indices into points -> speeds up creation of f
|
||||
"""
|
||||
face_v_ids = []
|
||||
|
||||
if new_points:
|
||||
sorted_faces = []
|
||||
new_points_ids = new_points.keys()
|
||||
|
||||
pts = list(cdt.finite_vertices())
|
||||
|
||||
if check:
|
||||
len_points = len(points)
|
||||
for i, v_h in enumerate(pts):
|
||||
first_temp = v_h.point()
|
||||
first = [first_temp.x(),first_temp.y()]
|
||||
|
||||
if not new_points or i < len_points:
|
||||
second = points[i]
|
||||
|
||||
if (not new_points or i < len_points) and (first[0] != second[0] or first[1] != second[1]):
|
||||
raise ValueError("coords of vertex handle from face vertex does not equal point coords")
|
||||
v_h.set_point(Point_2(i, 0.0))
|
||||
|
||||
else:
|
||||
for i, v_h in enumerate(pts):
|
||||
v_h.set_point(Point_2(i, 0.0))
|
||||
|
||||
# Keep faces that are in the domain
|
||||
face_info_new = mark_domain(cdt)
|
||||
|
||||
for face in cdt.finite_faces():
|
||||
if face_info_new[face].in_domain():
|
||||
v0_id = int(face.vertex(0).point().x())
|
||||
v1_id = int(face.vertex(1).point().x())
|
||||
v2_id = int(face.vertex(2).point().x())
|
||||
|
||||
if new_points:
|
||||
v_ids = [v0_id,v1_id,v2_id]
|
||||
for j, v_id in enumerate(v_ids):
|
||||
if v_id in new_points_ids:
|
||||
v_ids[j] = new_points[v_id]
|
||||
|
||||
#check if face now is not an edge/point and not already inserted in faces
|
||||
if not (v_ids[0] == v_ids[1] or v_ids[1] == v_ids[2] or v_ids[0] == v_ids[2]) \
|
||||
and not (sorted_faces and np.any(np.all(np.array(sorted_faces) == sorted(v_ids), axis=1))):
|
||||
face_v_ids.append(v_ids)
|
||||
sorted_faces.append(sorted(v_ids))
|
||||
else:
|
||||
face_v_ids.append([v0_id, v1_id, v2_id])
|
||||
|
||||
if plot:
|
||||
plot_triangulation(cdt, face_info_new)
|
||||
|
||||
f = np.array(face_v_ids)
|
||||
return f
|
||||
|
||||
def get_faces_sorted(cdt):
|
||||
"""
|
||||
This function returns the faces of cdt as a list of *sorted* ints instead of vertex handles.
|
||||
Input:
|
||||
* cdt (Mesh_2_Constrained_Delaunay_triangulation_2)
|
||||
Output:
|
||||
* f (ndaray): (N x 3) *sorted* list of vertex indices describing the faces
|
||||
* points (list): The vertices of cdt whose coordinates have been converted to floats
|
||||
"""
|
||||
|
||||
face_v_ids = []
|
||||
|
||||
pts = list(cdt.finite_vertices())
|
||||
points = []
|
||||
|
||||
|
||||
for i, v_h in enumerate(pts):
|
||||
points.append([v_h.point().x(),v_h.point().y()])
|
||||
v_h.set_point(Point_2(i, 0.0))
|
||||
|
||||
|
||||
# Keep faces that are in the domain
|
||||
face_info_new = mark_domain(cdt)
|
||||
|
||||
for face in cdt.finite_faces():
|
||||
if face_info_new[face].in_domain():
|
||||
v0_id = int(face.vertex(0).point().x())
|
||||
v1_id = int(face.vertex(1).point().x())
|
||||
v2_id = int(face.vertex(2).point().x())
|
||||
|
||||
sorted_ids = sorted([v0_id, v1_id, v2_id])
|
||||
|
||||
face_v_ids.append(sorted_ids)
|
||||
|
||||
f = np.array(face_v_ids)
|
||||
return f, points
|
||||
|
||||
def get_keep_vertices(cdt, len_b):
|
||||
"""
|
||||
This function filters out the newly inserted boundary vertices from cdt after executing the CGAL mesh generation.
|
||||
Input:
|
||||
* cdt (Mesh_2_Constrained_Delaunay_triangulation_2)
|
||||
* len_b (int): Number of edge vertices, i.e., vertices forming the panel boundary
|
||||
Output:
|
||||
* keep_vertices: vertices of cdt without newly inserted boundary points
|
||||
"""
|
||||
faces, points = get_faces_sorted(cdt)
|
||||
edges = np.concatenate([faces[:, :2], faces[:, 1:], faces[:, ::2]])
|
||||
unique_edges, counts = np.unique(np.array(edges), axis=0, return_counts=True)
|
||||
unique_occurring_edges = unique_edges[counts == 1]
|
||||
all_bdry_v_ids = np.unique(unique_occurring_edges.flatten())
|
||||
new_bdry_v_ids = all_bdry_v_ids[all_bdry_v_ids >= len_b]
|
||||
|
||||
#remove new_boundary_vertices
|
||||
keep_vertices = np.delete(points, new_bdry_v_ids, axis=0)
|
||||
|
||||
return list(keep_vertices)
|
||||
|
||||
def is_manifold(face_v_ids: np.ndarray, points: np.ndarray, tol=1e-2):
|
||||
"""Check if the 2D mesh is manifold -- all face triangles are correct triangles"""
|
||||
|
||||
faces = points[face_v_ids]
|
||||
face_side_1 = np.linalg.norm(faces[:, 0] - faces[:, 1], axis=1)
|
||||
face_side_2 = np.linalg.norm(faces[:, 1] - faces[:, 2], axis=1)
|
||||
face_side_3 = np.linalg.norm(faces[:, 0] - faces[:, 2], axis=1)
|
||||
side_lengths = np.stack([face_side_1, face_side_2, face_side_3], axis=-1)
|
||||
|
||||
return np.all(side_lengths.sum(axis=1) > 2 * side_lengths.max(axis=1) + tol)
|
||||
3
pygarment/pattern/__init__.py
Normal file
3
pygarment/pattern/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""
|
||||
Package with various 2D garment pattern wrappers when pattern is given in custom .json format
|
||||
"""
|
||||
165
pygarment/pattern/cairo_dlls/cairosvg_LICENSE.txt
Normal file
165
pygarment/pattern/cairo_dlls/cairosvg_LICENSE.txt
Normal file
@@ -0,0 +1,165 @@
|
||||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
|
||||
This version of the GNU Lesser General Public License incorporates
|
||||
the terms and conditions of version 3 of the GNU General Public
|
||||
License, supplemented by the additional permissions listed below.
|
||||
|
||||
0. Additional Definitions.
|
||||
|
||||
As used herein, "this License" refers to version 3 of the GNU Lesser
|
||||
General Public License, and the "GNU GPL" refers to version 3 of the GNU
|
||||
General Public License.
|
||||
|
||||
"The Library" refers to a covered work governed by this License,
|
||||
other than an Application or a Combined Work as defined below.
|
||||
|
||||
An "Application" is any work that makes use of an interface provided
|
||||
by the Library, but which is not otherwise based on the Library.
|
||||
Defining a subclass of a class defined by the Library is deemed a mode
|
||||
of using an interface provided by the Library.
|
||||
|
||||
A "Combined Work" is a work produced by combining or linking an
|
||||
Application with the Library. The particular version of the Library
|
||||
with which the Combined Work was made is also called the "Linked
|
||||
Version".
|
||||
|
||||
The "Minimal Corresponding Source" for a Combined Work means the
|
||||
Corresponding Source for the Combined Work, excluding any source code
|
||||
for portions of the Combined Work that, considered in isolation, are
|
||||
based on the Application, and not on the Linked Version.
|
||||
|
||||
The "Corresponding Application Code" for a Combined Work means the
|
||||
object code and/or source code for the Application, including any data
|
||||
and utility programs needed for reproducing the Combined Work from the
|
||||
Application, but excluding the System Libraries of the Combined Work.
|
||||
|
||||
1. Exception to Section 3 of the GNU GPL.
|
||||
|
||||
You may convey a covered work under sections 3 and 4 of this License
|
||||
without being bound by section 3 of the GNU GPL.
|
||||
|
||||
2. Conveying Modified Versions.
|
||||
|
||||
If you modify a copy of the Library, and, in your modifications, a
|
||||
facility refers to a function or data to be supplied by an Application
|
||||
that uses the facility (other than as an argument passed when the
|
||||
facility is invoked), then you may convey a copy of the modified
|
||||
version:
|
||||
|
||||
a) under this License, provided that you make a good faith effort to
|
||||
ensure that, in the event an Application does not supply the
|
||||
function or data, the facility still operates, and performs
|
||||
whatever part of its purpose remains meaningful, or
|
||||
|
||||
b) under the GNU GPL, with none of the additional permissions of
|
||||
this License applicable to that copy.
|
||||
|
||||
3. Object Code Incorporating Material from Library Header Files.
|
||||
|
||||
The object code form of an Application may incorporate material from
|
||||
a header file that is part of the Library. You may convey such object
|
||||
code under terms of your choice, provided that, if the incorporated
|
||||
material is not limited to numerical parameters, data structure
|
||||
layouts and accessors, or small macros, inline functions and templates
|
||||
(ten or fewer lines in length), you do both of the following:
|
||||
|
||||
a) Give prominent notice with each copy of the object code that the
|
||||
Library is used in it and that the Library and its use are
|
||||
covered by this License.
|
||||
|
||||
b) Accompany the object code with a copy of the GNU GPL and this license
|
||||
document.
|
||||
|
||||
4. Combined Works.
|
||||
|
||||
You may convey a Combined Work under terms of your choice that,
|
||||
taken together, effectively do not restrict modification of the
|
||||
portions of the Library contained in the Combined Work and reverse
|
||||
engineering for debugging such modifications, if you also do each of
|
||||
the following:
|
||||
|
||||
a) Give prominent notice with each copy of the Combined Work that
|
||||
the Library is used in it and that the Library and its use are
|
||||
covered by this License.
|
||||
|
||||
b) Accompany the Combined Work with a copy of the GNU GPL and this license
|
||||
document.
|
||||
|
||||
c) For a Combined Work that displays copyright notices during
|
||||
execution, include the copyright notice for the Library among
|
||||
these notices, as well as a reference directing the user to the
|
||||
copies of the GNU GPL and this license document.
|
||||
|
||||
d) Do one of the following:
|
||||
|
||||
0) Convey the Minimal Corresponding Source under the terms of this
|
||||
License, and the Corresponding Application Code in a form
|
||||
suitable for, and under terms that permit, the user to
|
||||
recombine or relink the Application with a modified version of
|
||||
the Linked Version to produce a modified Combined Work, in the
|
||||
manner specified by section 6 of the GNU GPL for conveying
|
||||
Corresponding Source.
|
||||
|
||||
1) Use a suitable shared library mechanism for linking with the
|
||||
Library. A suitable mechanism is one that (a) uses at run time
|
||||
a copy of the Library already present on the user's computer
|
||||
system, and (b) will operate properly with a modified version
|
||||
of the Library that is interface-compatible with the Linked
|
||||
Version.
|
||||
|
||||
e) Provide Installation Information, but only if you would otherwise
|
||||
be required to provide such information under section 6 of the
|
||||
GNU GPL, and only to the extent that such information is
|
||||
necessary to install and execute a modified version of the
|
||||
Combined Work produced by recombining or relinking the
|
||||
Application with a modified version of the Linked Version. (If
|
||||
you use option 4d0, the Installation Information must accompany
|
||||
the Minimal Corresponding Source and Corresponding Application
|
||||
Code. If you use option 4d1, you must provide the Installation
|
||||
Information in the manner specified by section 6 of the GNU GPL
|
||||
for conveying Corresponding Source.)
|
||||
|
||||
5. Combined Libraries.
|
||||
|
||||
You may place library facilities that are a work based on the
|
||||
Library side by side in a single library together with other library
|
||||
facilities that are not Applications and are not covered by this
|
||||
License, and convey such a combined library under terms of your
|
||||
choice, if you do both of the following:
|
||||
|
||||
a) Accompany the combined library with a copy of the same work based
|
||||
on the Library, uncombined with any other library facilities,
|
||||
conveyed under the terms of this License.
|
||||
|
||||
b) Give prominent notice with the combined library that part of it
|
||||
is a work based on the Library, and explaining where to find the
|
||||
accompanying uncombined form of the same work.
|
||||
|
||||
6. Revised Versions of the GNU Lesser General Public License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions
|
||||
of the GNU Lesser General Public License from time to time. Such new
|
||||
versions will be similar in spirit to the present version, but may
|
||||
differ in detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Library as you received it specifies that a certain numbered version
|
||||
of the GNU Lesser General Public License "or any later version"
|
||||
applies to it, you have the option of following the terms and
|
||||
conditions either of that published version or of any later version
|
||||
published by the Free Software Foundation. If the Library as you
|
||||
received it does not specify a version number of the GNU Lesser
|
||||
General Public License, you may choose any version of the GNU Lesser
|
||||
General Public License ever published by the Free Software Foundation.
|
||||
|
||||
If the Library as you received it specifies that a proxy can decide
|
||||
whether future versions of the GNU Lesser General Public License shall
|
||||
apply, that proxy's public statement of acceptance of any version is
|
||||
permanent authorization for you to choose that version for the
|
||||
Library.
|
||||
BIN
pygarment/pattern/cairo_dlls/libGraphicsMagick++-12.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libGraphicsMagick++-12.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libbrotlicommon.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libbrotlicommon.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libbrotlidec.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libbrotlidec.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libbrotlienc.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libbrotlienc.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libbz2-1.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libbz2-1.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libcairo-2.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libcairo-2.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libcairo-gobject-2.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libcairo-gobject-2.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libcairomm-1.0-1.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libcairomm-1.0-1.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libexpat-1.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libexpat-1.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libffi-8.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libffi-8.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libfontconfig-1.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libfontconfig-1.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libfreetype-6.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libfreetype-6.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libfribidi-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libfribidi-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libgc-1.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libgc-1.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libgcc_s_seh-1.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libgcc_s_seh-1.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libgdk_pixbuf-2.0-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libgdk_pixbuf-2.0-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libgdkmm-3.0-1.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libgdkmm-3.0-1.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libgirepository-1.0-1.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libgirepository-1.0-1.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libglib-2.0-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libglib-2.0-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libglibmm-2.4-1.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libglibmm-2.4-1.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libgmodule-2.0-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libgmodule-2.0-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libgobject-2.0-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libgobject-2.0-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libgomp-1.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libgomp-1.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libgraphite2.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libgraphite2.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libgslcblas-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libgslcblas-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libgspell-1-2.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libgspell-1-2.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libharfbuzz-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libharfbuzz-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libheif.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libheif.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libhwy.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libhwy.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libiconv-2.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libiconv-2.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libidn2-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libidn2-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libintl-8.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libintl-8.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libjasper.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libjasper.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libjbig-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libjbig-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libjxl_threads.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libjxl_threads.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/liblcms2-2.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/liblcms2-2.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/liblqr-1-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/liblqr-1-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libltdl-7.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libltdl-7.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/liblzma-5.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/liblzma-5.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libmpdec-2.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libmpdec-2.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libncursesw6.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libncursesw6.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libnghttp2-14.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libnghttp2-14.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libnspr4.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libnspr4.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libopenjp2-7.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libopenjp2-7.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libpanelw6.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libpanelw6.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libpango-1.0-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libpango-1.0-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libpangocairo-1.0-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libpangocairo-1.0-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libpangoft2-1.0-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libpangoft2-1.0-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libpangomm-1.4-1.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libpangomm-1.4-1.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libpangowin32-1.0-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libpangowin32-1.0-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libpcre2-8-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libpcre2-8-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libpixman-1-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libpixman-1-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libplc4.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libplc4.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libplds4.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libplds4.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libpng16-16.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libpng16-16.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libpoppler-glib-8.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libpoppler-glib-8.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libpotrace-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libpotrace-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libpsl-5.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libpsl-5.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libquadmath-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libquadmath-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libraqm-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libraqm-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libreadline8.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libreadline8.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/librevenge-0.0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/librevenge-0.0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/librevenge-stream-0.0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/librevenge-stream-0.0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libsigc-2.0-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libsigc-2.0-0.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libsoup-2.4-1.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libsoup-2.4-1.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libssh2-1.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libssh2-1.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libssl-1_1-x64.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libssl-1_1-x64.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libstdc++-6.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libstdc++-6.dll
Normal file
Binary file not shown.
BIN
pygarment/pattern/cairo_dlls/libtermcap-0.dll
Normal file
BIN
pygarment/pattern/cairo_dlls/libtermcap-0.dll
Normal file
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user