init_code

This commit is contained in:
sky
2025-07-03 17:03:00 +08:00
parent a710c87a2b
commit 89766fe3d1
220 changed files with 479903 additions and 77 deletions

View File

@@ -0,0 +1,54 @@
"""Aling body models s.t. they stand exactly on the plane y=0 and save as a new data"""
import igl
import numpy as np
from pathlib import Path
import trimesh
from pygarment.data_config import Properties
def load_mesh(path):
v, f = igl.read_triangle_mesh(str(path))
return v, f.flatten(), f
def get_shift_param(body_vertices):
v_body_arr = np.array(body_vertices)
min_y = (min(v_body_arr[:, 1]))
if min_y < 0:
return abs(min_y)
return 0.0
def save_mesh(path, v, f):
igl.write_triangle_mesh(str(path), v=v, f=f, force_ascii=False)
def process_body(path_in, path_out):
body_vertices, _, body_faces = load_mesh(path_in) # self.paths.in_body_obj)
shift_y = get_shift_param(body_vertices)
# body_vertices = body_vertices * b_scale
if shift_y:
body_vertices[:, 1] = body_vertices[:, 1] + shift_y
save_mesh(path_out, body_vertices, body_faces)
if __name__ == "__main__":
system_paths = Properties('./system.json')
# body_folder_path = Path(system_paths['body_samples_path']) / 'body_shapes_and_measures_2023-12-30'
# body_objs_path = body_folder_path / 'meshes'
body_objs_path = Path('./assets/bodies')
# out_path = body_folder_path / 'meshes_aligned'
out_path = Path('./assets/bodies_aligned')
out_path.mkdir(parents=True, exist_ok=True)
# loop over all meshes
for file in body_objs_path.iterdir():
if '.obj' in file.name:
process_body(file, out_path / file.name)
print(file.name)

View File

@@ -0,0 +1,25 @@
"""In simulated dataset, gather all the scene images in one folder"""
import pygarment.data_config as config
from pathlib import Path
import shutil
from pattern_data_sim import gather_renders
system_props = config.Properties('./system.json')
dataset = 'unpacking_test'
datapaths = [
Path(system_props['output']) / dataset / 'default_body',
Path(system_props['output']) / dataset / 'random_body'
]
for datapath in datapaths:
# Check packing
tar_path = datapath / 'data.tar.gz'
if tar_path.exists():
shutil.unpack_archive(tar_path, datapath)
# Finally -- clean up
tar_path.unlink()
gather_renders(datapath)

View File

@@ -0,0 +1,117 @@
"""
Run or Resume simulation of a pattern dataset with MayaPy standalone mode
Note that this module is executed in Maya (or by mayapy)
How to use:
* fill out system.json with approppriate paths
Running itself:
./datasim.py --data <dataset folder name> --minibatch <size> --config <simulation_rendering_configuration.json>
"""
import argparse
import sys
import shutil
from pathlib import Path
# My modules
import pygarment.data_config as data_config
import pygarment.meshgen.datasim_utils as sim
def get_command_args():
"""command line arguments to control the run"""
# https://stackoverflow.com/questions/40001892/reading-named-command-arguments
parser = argparse.ArgumentParser()
parser.add_argument('--data', '-d', help='name of dataset folder', type=str)
parser.add_argument('--config', '-c', help='name of .json file with desired simulation&rendering config', type=str,
default=None)
parser.add_argument('--minibatch', '-b', help='number of examples to simulate in this run', type=int, default=None)
parser.add_argument('--default_body', action='store_true', help='run dataset on default body')
parser.add_argument('--caching', action='store_true', help='cache intermediate simulation')
parser.add_argument('--rewrite_config', action='store_true', help='cache intermediate simulation')
args = parser.parse_args()
print(args)
return args
def gather_renders(out_data_path: Path, verbose=False):
renders_path = out_data_path / 'renders'
renders_path.mkdir(exist_ok=True)
render_files = list(out_data_path.glob('**/*render*.png'))
for file in render_files:
try:
shutil.copy(str(file), str(renders_path))
except shutil.SameFileError:
if verbose:
print(f'File {file} already exists')
pass
if __name__ == "__main__":
command_args = get_command_args()
system_config = data_config.Properties('./system.json')
# ------ Dataset ------
dataset = command_args.data
datapath = Path(system_config['datasets_path']) / dataset
init_dataset_file = datapath / 'dataset_properties.yaml'
# Create dataset_file in correct folder (default_body or random_body)
body_type = 'default_body' if command_args.default_body else 'random_body'
datapath = datapath / body_type / 'data' # Overwrite datapath to specific body type
output_path = Path(system_config['datasets_sim']) / dataset / body_type
output_path.mkdir(parents=True, exist_ok=True)
dataset_file_body = output_path / f'dataset_properties_{body_type}.yaml'
if not dataset_file_body.exists():
shutil.copy(str(init_dataset_file), str(dataset_file_body))
dataset_file = dataset_file_body
props = data_config.Properties(dataset_file_body)
if 'frozen' in props and props['frozen']: #Where is this set?
# avoid accidential re-runs of data
print('Warning: dataset is frozen, processing is skipped')
sys.exit(0)
# ------- Defining sim props -----
props.set_basic(data_folder=dataset) # in case data properties are from other dataset/folder, update info
if command_args.config is not None:
props.merge(
Path(system_config['sim_configs_path']) / command_args.config,
re_write=command_args.rewrite_config) # Re-write sim config only explicitly
# ----- Main loop ----------
finished = sim.batch_sim(
datapath,
output_path,
props,
run_default_body=command_args.default_body,
num_samples=command_args.minibatch, # run in mini-batch if requested
caching=command_args.caching, force_restart=False)
# ----- Try and resim fails once -----
if finished:
# NOTE: Could be larger than a regular batch
finished = sim.resim_fails(
datapath,
output_path,
props,
run_default_body=command_args.default_body,
caching=command_args.caching)
props.add_sys_info() # Save system information
props.serialize(dataset_file)
# ------ Gather renders -------
gather_renders(output_path)
# -------- fin --------
if finished:
# finished processing the dataset
print('Dataset processing finished')
sys.exit(0)
else:
sys.exit(1) # not finished dataset processing

View File

@@ -0,0 +1,34 @@
#!/bin/bash
# This script is needed to autorestart execution of simulating datapoints for a dataset
# in case of crashes and/or using mini-batches
# sh ./datasim_runner.sh 3>&1 2>&1 > C:\Users\out.txt (path to output file)
dataset_name=my_dataset
config=default_sim_props.yaml
sim_default_bodies=false
batch_size=100
# -- Main calls --
ret_code=1
STARTTIME=$(date +%s)
while [ $ret_code != 0 ] # failed for any reason
do
if [ "$sim_default_bodies" = "true" ]; then
python ./pattern_data_sim.py --data $dataset_name --default_body --config $config -b $batch_size
else
python ./pattern_data_sim.py --data $dataset_name --config $config -b $batch_size
fi
ret_code=$?
if [ $ret_code -eq 0 ]; then
echo "The execution completed successfully."
else
echo "The execution failed with an error (ret_code: $ret_code)."
fi
ENDTIME=$(date +%s)
T=$(($ENDTIME - $STARTTIME))
echo "It took ${T} seconds to complete this task so far..."
printf "Pretty format: %02dd %02dh %02dm %02ds\n" "$(($T/86400))" "$(($T/3600%24))" "$(($T/60%60))" "$(($T%60))"
done

View File

@@ -0,0 +1,229 @@
"""
Fitting one sewing pattern design to a set of various body shapes
"""
from datetime import datetime
from pathlib import Path
import yaml
import shutil
import time
import traceback
import argparse
# Custom
from pygarment.data_config import Properties
from assets.garment_programs.meta_garment import MetaGarment
from assets.bodies.body_params import BodyParameters
def get_command_args():
"""command line arguments to control the run"""
# https://stackoverflow.com/questions/40001892/reading-named-command-arguments
parser = argparse.ArgumentParser()
parser.add_argument('design_file', help='Path to design parameters file to be used to fit to the bodies', type=str)
parser.add_argument('--batch_id', '-b', help='id of a sampling batch', type=int, default=None)
parser.add_argument('--size', '-s', help='size of a sample', type=int, default=10)
parser.add_argument('--name', '-n', help='Name of the dataset', type=str, default='design_fit')
parser.add_argument('--replicate', '-re', help='Name of the dataset to re-generate. If set, other arguments are ignored', type=str, default=None)
args = parser.parse_args()
print('Commandline arguments: ', args)
return args
def _create_data_folder(properties, path=Path('')):
""" Create a new directory to put dataset in
& generate appropriate name & update dataset properties
"""
if 'data_folder' in properties: # will this work?
# => regenerating from existing data
properties['name'] = properties['data_folder'] + '_regen'
data_folder = properties['name']
else:
data_folder = properties['name']
# make unique
data_folder += '_' + datetime.now().strftime('%y%m%d-%H-%M-%S')
properties['data_folder'] = data_folder
path_with_dataset = path / data_folder
path_with_dataset.mkdir(parents=True)
default_folder = path_with_dataset / 'default_body'
body_folder = path_with_dataset / 'random_body'
default_folder.mkdir(parents=True, exist_ok=True)
body_folder.mkdir(parents=True, exist_ok=True)
return path_with_dataset, default_folder, body_folder
def _gather_body_options(body_path: Path):
objs_path = body_path / 'measurements'
bodies = []
for file in objs_path.iterdir():
# Get name
b_name = file.stem.split('_')[0]
bodies.append({})
# Get obj options
bodies[-1]['objs'] = dict(
straight=f'meshes/{b_name}_straight.obj',
apart=f'meshes/{b_name}_apart.obj', )
# Get measurements
bodies[-1]['mes'] = f'measurements/{b_name}.yaml'
return bodies
def body_sample(idx, bodies: dict, path: Path, straight=True):
body_i = bodies[idx]
mes_file = body_i['mes']
obj_file = body_i['objs']['straight'] if straight else body_i['objs']['apart']
body = BodyParameters(path / mes_file)
body.params['body_sample'] = (path / obj_file).stem
return body
def _save_sample(piece, body, new_design, folder, verbose=False):
pattern = piece.assembly()
# Save as json file
folder = pattern.serialize(
folder,
tag='',
to_subfolder=True,
with_3d=False, with_text=False, view_ids=False)
body.save(folder)
with open(Path(folder) / 'design_params.yaml', 'w') as f:
yaml.dump(
{'design': new_design},
f,
default_flow_style=False,
sort_keys=False
)
if verbose:
print(f'Saved {piece.name}')
def generate(path, properties, sys_paths, verbose=False):
"""Generates a synthetic dataset of patterns with given properties
Params:
path : path to folder to put a new dataset into
props : an instance of DatasetProperties class
requested properties of the dataset
"""
path = Path(path)
gen_config = properties['generator']['config']
gen_stats = properties['generator']['stats']
body_samples_path = Path(sys_paths['body_samples_path']) / properties['body_samples']
body_options = _gather_body_options(body_samples_path)
# create data folder
data_folder, default_path, body_sample_path = _create_data_folder(properties, path)
default_sample_data = default_path / 'data'
body_sample_data = body_sample_path / 'data'
# generate data
start_time = time.time()
# Load design
with open(properties['design_file'], 'r') as f:
design = yaml.safe_load(f)['design']
# On default body
default_body = BodyParameters(Path(sys_paths['bodies_default_path']) / (properties['body_default'] + '.yaml'))
piece_default = MetaGarment(properties['body_default'], default_body, design)
_save_sample(piece_default, default_body, design, default_sample_data, verbose=verbose)
for i in range(properties['size']):
# log properties every time
properties.serialize(data_folder / 'dataset_properties.yaml')
try:
# On random body shape
rand_body = body_sample(
i + properties['body_sample_start_id'],
body_options,
body_samples_path,
straight='Pants' != design['meta']['bottom']['v'])
name = rand_body.params['body_sample']
piece_shaped = MetaGarment(name, rand_body, design)
# Save samples
_save_sample(piece_shaped, rand_body, design, body_sample_data, verbose=verbose)
except KeyboardInterrupt: # Return immediately with whatever is ready
return default_path, body_sample_path
except BaseException as e:
print(f'{name} failed')
traceback.print_exc()
print(e)
continue
elapsed = time.time() - start_time
gen_stats['generation_time'] = f'{elapsed:.3f} s'
# log properties
properties.serialize(data_folder / 'dataset_properties.yaml')
return default_path, body_sample_path
def gather_visuals(path, verbose=False):
vis_path = Path(path) / 'patterns_vis'
vis_path.mkdir(parents=True, exist_ok=True)
for p in path.rglob("*.png"):
try:
shutil.copy(p, vis_path)
except shutil.SameFileError:
if verbose:
print('File {} already exists'.format(p.name))
pass
if __name__ == '__main__':
system_props = Properties('./system.json')
args = get_command_args()
if args.replicate is not None:
props = Properties(
Path(system_props['datasets_path']) / args.replicate / 'dataset_properties.yaml',
True)
else:
props = Properties()
props.set_basic(
design_file=args.design_file,
body_default='mean_all',
body_samples='5000_body_shapes_and_measures',
body_sample_start_id=0,
name=f'{args.name}_{args.size}' if not args.batch_id else f'{args.name}_{args.size}_{args.batch_id}',
size=args.size,
to_subfolders=True)
props.set_section_config('generator')
# Generator
default_path, body_sample_path = generate(
system_props['datasets_path'], props, system_props, verbose=False)
# Gather the pattern images separately
gather_visuals(default_path)
gather_visuals(body_sample_path)
# At the end -- it takes some time to gather the info
props.add_sys_info()
print('Data generation completed!')

View File

@@ -0,0 +1,336 @@
"""
Create a random sample of sewing pattern designs and fit each
to a neutral and a random body shape
"""
from datetime import datetime
from pathlib import Path
import yaml
import shutil
import time
import random
import string
import traceback
import argparse
# Custom
from pygarment.data_config import Properties
from assets.garment_programs.meta_garment import MetaGarment, IncorrectElementConfiguration
from assets.bodies.body_params import BodyParameters
import pygarment as pyg
import assets.garment_programs.stats_utils as stats_utils
def get_command_args():
"""command line arguments to control the run"""
# https://stackoverflow.com/questions/40001892/reading-named-command-arguments
parser = argparse.ArgumentParser()
parser.add_argument('--batch_id', '-b', help='id of a sampling batch', type=int, default=None)
parser.add_argument('--size', '-s', help='size of a sample', type=int, default=10)
parser.add_argument('--name', '-n', help='Name of the dataset', type=str, default='data')
parser.add_argument('--replicate', '-re', help='Name of the dataset to re-generate. If set, other arguments are ignored', type=str, default=None)
args = parser.parse_args()
print('Commandline arguments: ', args)
return args
# Utils
def _create_data_folder(properties, path=Path('')):
""" Create a new directory to put dataset in
& generate appropriate name & update dataset properties
"""
if 'data_folder' in properties: # will this work?
# => regenerating from existing data
properties['name'] = properties['data_folder'] + '_regen'
data_folder = properties['name']
else:
data_folder = properties['name']
# make unique
data_folder += '_' + datetime.now().strftime('%y%m%d-%H-%M-%S')
properties['data_folder'] = data_folder
path_with_dataset = path / data_folder
path_with_dataset.mkdir(parents=True)
default_folder = path_with_dataset / 'default_body'
body_folder = path_with_dataset / 'random_body'
default_folder.mkdir(parents=True, exist_ok=True)
body_folder.mkdir(parents=True, exist_ok=True)
return path_with_dataset, default_folder, body_folder
def gather_body_options(body_path: Path):
objs_path = body_path / 'measurements'
bodies = {}
for file in objs_path.iterdir():
# Get name
b_name = file.stem.split('_')[0]
bodies[b_name] = {}
# Get obj options
bodies[b_name]['objs'] = dict(
straight=f'meshes/{b_name}_straight.obj',
apart=f'meshes/{b_name}_apart.obj', )
# Get measurements
bodies[b_name]['mes'] = f'measurements/{b_name}.yaml'
return bodies
def _id_generator(size=10, chars=string.ascii_uppercase + string.digits):
"""Generate a random string of a given size, see
https://stackoverflow.com/questions/2257441/random-string-generation-with-upper-case-letters-and-digits
"""
return ''.join(random.choices(chars, k=size))
def body_sample(bodies: dict, path: Path, straight=True):
rand_name = random.sample(list(bodies.keys()), k=1)
body_i = bodies[rand_name[0]]
mes_file = body_i['mes']
obj_file = body_i['objs']['straight'] if straight else body_i['objs']['apart']
body = BodyParameters(path / mes_file)
body.params['body_sample'] = (path / obj_file).stem
return body
def _save_sample(piece, body, new_design, folder, verbose=False):
pattern = piece.assembly()
# Save as json file
folder = pattern.serialize(
folder,
tag='',
to_subfolder=True,
with_3d=False, with_text=False, view_ids=False)
body.save(folder)
with open(Path(folder) / 'design_params.yaml', 'w') as f:
yaml.dump(
{'design': new_design},
f,
default_flow_style=False,
sort_keys=False
)
if verbose:
print(f'Saved {piece.name}')
return pattern
def has_pants(design):
return 'Pants' == design['meta']['bottom']['v']
def gather_visuals(path, verbose=False):
vis_path = Path(path) / 'patterns_vis'
vis_path.mkdir(parents=True, exist_ok=True)
for p in path.rglob("*.png"):
try:
shutil.copy(p, vis_path)
except shutil.SameFileError:
if verbose:
print('File {} already exists'.format(p.name))
pass
# Quality filter
def assert_param_combinations(design, filter_belts=True):
"""Check for some known invalid parameter combinations cases"""
upper_name = design['meta']['upper']['v']
lower_name = design['meta']['bottom']['v']
belt_name = design['meta']['wb']['v']
if upper_name: # No issues with garments that can hang on shoulders
return
# Empty patterns and singular belts
if not lower_name:
if filter_belts or not belt_name:
raise IncorrectElementConfiguration('ERROR::IncorrectParams::Empty pattern or singular belt')
return
# Cases when lower name is present (and maybe a belt):
# All pants and pencils are okay
if lower_name in ['Pants', 'PencilSkirt']:
return
# -- Sliding issues --
# NOTE: Checks are conservative, so some sliding issues might be present nontheless
# Skirt 2 & skirts of top of it -- uses ruffles and belt is too wide if even present
if (lower_name == 'Skirt2'
or lower_name == 'GodetSkirt' and design['godet-skirt']['base']['v'] == 'Skirt2'
or lower_name == 'SkirtLevels' and design['levels-skirt']['base']['v'] == 'Skirt2'
):
if (design['skirt']['ruffle']['v'] > 1 and (not belt_name or design['waistband']['waist']['v'] > 1.)):
raise IncorrectElementConfiguration('ERROR::IncorrectParams::Skirt2 ruffles + belt')
# Flare skirts & skirts on top of it -- no belt + too wide / too long
flare_skirts = ['SkirtCircle', 'AsymmSkirtCircle', 'SkirtManyPanels']
if (lower_name in flare_skirts
or lower_name == 'SkirtLevels' and design['levels-skirt']['base']['v'] in flare_skirts
):
# if Fitted belt of enough width not present -- check if "heavy"
if (not belt_name
or design['waistband']['waist']['v'] > 1.
or design['waistband']['width']['v'] <= 0.25
):
length_param = design['levels-skirt' if lower_name == 'SkirtLevels' else 'flare-skirt']['length']['v']
if length_param > 0.5 or design['flare-skirt']['suns']['v'] > 0.75:
raise IncorrectElementConfiguration('ERROR::IncorrectParams::Flare skirts + belt')
# Generation loop
def generate(path, properties, sys_paths, verbose=False):
"""Generates a synthetic dataset of patterns with given properties
Params:
path : path to folder to put a new dataset into
props : an instance of DatasetProperties class
requested properties of the dataset
"""
path = Path(path)
gen_config = properties['generator']['config']
gen_stats = properties['generator']['stats']
body_samples_path = Path(sys_paths['body_samples_path']) / properties['body_samples']
body_options = gather_body_options(body_samples_path)
# create data folder
data_folder, default_path, body_sample_path = _create_data_folder(properties, path)
default_sample_data = default_path / 'data'
body_sample_data = body_sample_path / 'data'
# init random seed
if 'random_seed' not in gen_config or gen_config['random_seed'] is None:
gen_config['random_seed'] = int(time.time())
print(f'Random seed is {gen_config["random_seed"]}')
random.seed(gen_config['random_seed'])
# generate data
start_time = time.time()
default_body = BodyParameters(Path(sys_paths['bodies_default_path']) / (properties['body_default'] + '.yaml'))
sampler = pyg.DesignSampler(properties['design_file'])
for i in range(properties['size']):
# log properties every time
properties.serialize(data_folder / 'dataset_properties.yaml')
# Redo sampling untill success
for _ in range(100): # Putting a limit on re-tries to avoid infinite loops
new_design = sampler.randomize()
name = f'rand_{_id_generator()}'
try:
if verbose:
print(f'{name} saving design params for debug')
with open(Path('./Logs') / f'{name}_design_params.yaml', 'w') as f:
yaml.dump(
{'design': new_design},
f,
default_flow_style=False,
sort_keys=False
)
# Preliminary checks
assert_param_combinations(new_design)
# On default body
piece_default = MetaGarment(name, default_body, new_design)
piece_default.assert_total_length() # Check final length correctnesss
# Straight/apart legs pose
def_obj_name = properties['body_default']
if has_pants(new_design):
def_obj_name += '_apart'
default_body.params['body_sample'] = def_obj_name
# On random body shape
rand_body = body_sample(
body_options,
body_samples_path,
straight=not has_pants(new_design))
piece_shaped = MetaGarment(name, rand_body, new_design)
piece_shaped.assert_total_length() # Check final length correctness
if piece_default.is_self_intersecting() or piece_shaped.is_self_intersecting():
if verbose:
print(f'{piece_default.name} is self-intersecting!!')
continue # Redo the randomization
# Save samples
pattern = _save_sample(piece_default, default_body, new_design, default_sample_data, verbose=verbose)
_save_sample(piece_shaped, rand_body, new_design, body_sample_data, verbose=verbose)
stats_utils.count_panels(pattern, props)
stats_utils.garment_type(name, new_design, props)
break # Stop generation
except KeyboardInterrupt: # Return immediately with whatever is ready
return default_path, body_sample_path
except BaseException as e:
print(f'{name} failed')
if verbose:
traceback.print_exc()
print(e)
# Check empty folder
if (default_sample_data / name).exists():
print('Generate::Info::Removed empty folder after unsuccessful sampling attempt', default_sample_data / name)
shutil.rmtree(default_sample_data / name, ignore_errors=True)
if (body_sample_data / name).exists():
print('Generate::Info::Removed empty folder after unsuccessful sampling attempt', body_sample_data / name)
shutil.rmtree(body_sample_data / name, ignore_errors=True)
continue
elapsed = time.time() - start_time
gen_stats['generation_time'] = f'{elapsed:.3f} s'
# log properties
props.stats_summary()
properties.serialize(data_folder / 'dataset_properties.yaml')
return default_path, body_sample_path
if __name__ == '__main__':
system_props = Properties('./system.json')
args = get_command_args()
if args.replicate is not None:
props = Properties(
Path(system_props['datasets_path']) / args.replicate / 'dataset_properties.yaml',
True)
else: # New sample
props = Properties()
props.set_basic(
design_file='./assets/design_params/default.yaml',
body_default='mean_all',
body_samples='5000_body_shapes_and_measures',
size=args.size,
name=f'{args.name}_{args.size}' if not args.batch_id else f'{args.name}_{args.size}_{args.batch_id}',
to_subfolders=True)
props.set_section_config('generator')
props.set_section_stats(
'generator',
panel_count={},
garment_types={},
garment_types_summary=dict(main={}, style={})
)
# Generator
default_path, body_sample_path = generate(
system_props['datasets_path'], props, system_props, verbose=False)
# Gather the pattern images separately
gather_visuals(default_path)
gather_visuals(body_sample_path)
print('Data generation completed!')