init_code
This commit is contained in:
1837
lmm_utils/Qwen/qwen2vl_lora_mlp/qwen2vl_modify_modeling_qwen2_vl.py
Normal file
1837
lmm_utils/Qwen/qwen2vl_lora_mlp/qwen2vl_modify_modeling_qwen2_vl.py
Normal file
File diff suppressed because it is too large
Load Diff
0
lmm_utils/__init__.py
Normal file
0
lmm_utils/__init__.py
Normal file
135
lmm_utils/agent.py
Normal file
135
lmm_utils/agent.py
Normal file
@@ -0,0 +1,135 @@
|
||||
|
||||
import copy
|
||||
from lmm_utils.core import MMUA
|
||||
from lmm_utils.projector import input_caption2random_default_cption
|
||||
from lmm_utils.predict_garmentcode_picture import Predictor
|
||||
|
||||
class Agent():
|
||||
def __init__(self,api_key=None, base_url=None, model=None,text_model=None,model_init=True):
|
||||
|
||||
self.mmua=MMUA(api_key=api_key, base_url=base_url, model=model,text_model=text_model)
|
||||
self.dsl_ga= Predictor(model_init=model_init)
|
||||
|
||||
def modify_design(self, design_list, text_prompt,design_params):
|
||||
|
||||
gpt_design_list=None
|
||||
gpt_response=None
|
||||
gpt_design_params=None
|
||||
try:
|
||||
gpt_design_list, gpt_response = self.mmua.text_forusermodel_gpt(design_list,
|
||||
user_input=text_prompt)
|
||||
except Exception as e:
|
||||
print("modify_fail,trying again.please wait")
|
||||
try:
|
||||
gpt_design_list, gpt_response = self.mmua.text_forusermodel_gpt(design_list,
|
||||
user_input=text_prompt)
|
||||
except Exception as e:
|
||||
print("modify_fail,pleae input prompt again")
|
||||
gpt_response = "modify_fail,pleae input prompt again"
|
||||
if gpt_design_list is not None:
|
||||
gpt_design_list = input_caption2random_default_cption(gpt_design_list)
|
||||
more_caption_list = set(gpt_design_list)-set(design_list)
|
||||
gpt_design_params = self.dsl_ga.caption2yaml(more_caption_list,modify=True,cache_input_design_data=copy.deepcopy(design_params))
|
||||
return gpt_response, gpt_design_params, gpt_design_list
|
||||
|
||||
|
||||
def stress_design(self, design_list, img_url,design_params):
|
||||
gpt_design_list=None
|
||||
gpt_response=None
|
||||
gpt_design_params=None
|
||||
try:
|
||||
gpt_design_list, gpt_response = self.mmua.picture_caption_gpt_red(img_url, caption=design_list)
|
||||
except Exception as e:
|
||||
print("stress_fail,trying again.please wait")
|
||||
try:
|
||||
gpt_design_list, gpt_response = self.mmua.picture_caption_gpt_red(img_url, caption=design_list)
|
||||
except Exception as e:
|
||||
print("stress_fail,please input prompt and picture again")
|
||||
gpt_response = "stress_fail,please input prompt and picture again"
|
||||
if gpt_design_list is not None:
|
||||
gpt_design_list = input_caption2random_default_cption(gpt_design_list)
|
||||
more_caption_list = set(gpt_design_list) - set(design_list)
|
||||
gpt_design_params = self.dsl_ga.caption2yaml(more_caption_list, modify=True,
|
||||
cache_input_design_data=copy.deepcopy(design_params))
|
||||
|
||||
return gpt_response, gpt_design_params, gpt_design_list
|
||||
|
||||
|
||||
def picture_text_design(self, img_url,text_prompt):
|
||||
gpt_design_list=None
|
||||
gpt_response=None
|
||||
gpt_design_params=None
|
||||
recognize_picture_bool = True
|
||||
try:
|
||||
gpt_design_list, gpt_response = self.mmua.picture_gpt(img_url)
|
||||
except Exception as e:
|
||||
print(f"GPT_UTIL::Generation_FAILURE::Failed for image [I]{img_url} and [T]{text_prompt}"
|
||||
f" due to {str(e)}, trying again...")
|
||||
try:
|
||||
gpt_design_list, gpt_response = self.mmua.picture_gpt(img_url)
|
||||
except Exception as e:
|
||||
print(f"GPT_UTIL::PARSE_FAILURE::Failed to parse image [I]{img_url} "
|
||||
f"and [T]{text_prompt} again due to {str(e)}, give up.")
|
||||
gpt_response = "Generation failed, please try another image or prompt."
|
||||
recognize_picture_bool = False
|
||||
|
||||
if recognize_picture_bool:
|
||||
try:
|
||||
gpt_design_list, gpt_response = self.mmua.text_forusermodel_gpt(
|
||||
caption=gpt_design_list, user_input=text_prompt)
|
||||
except Exception as e:
|
||||
print(f"GPT_UTIL::AUTHORING_FAILURE::Failed to understand instruction {text_prompt} "
|
||||
f"due to {str(e)}, trying again...")
|
||||
try:
|
||||
gpt_design_list, gpt_response = self.mmua.text_forusermodel_gpt(
|
||||
caption=gpt_design_list, user_input=text_prompt)
|
||||
except Exception as e:
|
||||
print(f"GPT_UTIL::AUTHORING_FAILURE::Failed to understand instruction {text_prompt}"
|
||||
f" due to {str(e)}, give up.")
|
||||
gpt_response = "Authoring failed, please try another instruction."
|
||||
if gpt_design_list is not None:
|
||||
gpt_design_list = input_caption2random_default_cption(gpt_design_list)
|
||||
gpt_design_params = self.dsl_ga.caption2yaml(gpt_design_list)
|
||||
return gpt_response, gpt_design_params, gpt_design_list
|
||||
def picture_design(self, img_url):
|
||||
gpt_design_list=None
|
||||
gpt_response=None
|
||||
gpt_design_params=None
|
||||
try:
|
||||
gpt_design_list, gpt_response = self.mmua.picture_gpt(img_url)
|
||||
except Exception as e:
|
||||
print(f"GPT_UTIL::PARSE_IMAGE_FAILURE::Failed for image {img_url} due to {str(e)}, trying again...")
|
||||
try:
|
||||
gpt_design_list, gpt_response = self.mmua.picture_gpt(img_url)
|
||||
except Exception as e:
|
||||
print(f"GPT_UTIL::PARSE_IMAGE_FAILURE::Failed for image {img_url} due to {str(e)}, give up.")
|
||||
gpt_response = "Generation failed, please try another image."
|
||||
if gpt_design_list is not None:
|
||||
gpt_design_list = input_caption2random_default_cption(gpt_design_list)
|
||||
gpt_design_params = self.dsl_ga.caption2yaml(gpt_design_list,image_path=img_url)
|
||||
|
||||
return gpt_response, gpt_design_params, gpt_design_list
|
||||
|
||||
|
||||
def text_design(self, text_prompt):
|
||||
gpt_design_list=None
|
||||
gpt_response=None
|
||||
gpt_design_params=None
|
||||
try:
|
||||
gpt_design_list, gpt_response = self.mmua.text_gpt(text_prompt)
|
||||
|
||||
except Exception as e:
|
||||
print(f"GPT_UTIL::PARSE_TEXT_FAILURE::Failed to parse {text_prompt} due to {str(e)}, trying again...")
|
||||
try:
|
||||
gpt_design_list, gpt_response = self.mmua.text_gpt(text_prompt)
|
||||
except Exception as e:
|
||||
print(f"GPT_UTIL::PARSE_TEXT_FAILURE::Failed to parse {text_prompt} due to {str(e)}, give up.")
|
||||
gpt_response="Generation failed, please try another prompt."
|
||||
if gpt_design_list is not None:
|
||||
gpt_design_list = input_caption2random_default_cption(gpt_design_list)
|
||||
gpt_design_params = self.dsl_ga.caption2yaml(gpt_design_list)
|
||||
|
||||
return gpt_response, gpt_design_params, gpt_design_list
|
||||
|
||||
|
||||
|
||||
2260
lmm_utils/core.py
Normal file
2260
lmm_utils/core.py
Normal file
File diff suppressed because it is too large
Load Diff
116
lmm_utils/fintuned_qwen2vl_model.py
Normal file
116
lmm_utils/fintuned_qwen2vl_model.py
Normal file
@@ -0,0 +1,116 @@
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
from transformers import AutoModel, AutoConfig
|
||||
from peft import PeftModel, PeftConfig
|
||||
import torch
|
||||
from datasets import Dataset
|
||||
from modelscope import snapshot_download, AutoTokenizer
|
||||
from qwen_vl_utils import process_vision_info
|
||||
from peft import LoraConfig, TaskType, get_peft_model, PeftModel
|
||||
from transformers import (
|
||||
TrainingArguments,
|
||||
Trainer,
|
||||
DataCollatorForSeq2Seq,
|
||||
# Qwen2VLForConditionalGeneration,
|
||||
AutoProcessor,
|
||||
PreTrainedModel
|
||||
)
|
||||
from lmm_utils.Qwen.qwen2vl_lora_mlp.qwen2vl_modify_modeling_qwen2_vl import Qwen2VLForConditionalGeneration
|
||||
import json
|
||||
# Qwen2VLForConditionalGeneration
|
||||
class LoRAWithMLP(nn.Module):
|
||||
def __init__(self, base_model_name, mlp_hidden_size=512, num_mlp_layers=2,device='cuda:0'):
|
||||
super().__init__()
|
||||
self.device=device
|
||||
self.base_model = Qwen2VLForConditionalGeneration.from_pretrained("./lmm_utils/Qwen/Qwen2-VL-2B-Instruct/", device_map=device,
|
||||
torch_dtype=torch.bfloat16, trust_remote_code=True, )
|
||||
self.base_model.enable_input_require_grads() # This method is performed when gradient checkpoints are turned on
|
||||
config = LoraConfig(
|
||||
task_type=TaskType.CAUSAL_LM,
|
||||
# target_modules=["q_proj", "k_proj", "v_proj", "o_proj", "gate_proj", "up_proj", "down_proj"],
|
||||
target_modules=["q_proj", "k_proj", "v_proj", "o_proj"],
|
||||
inference_mode=False, # Training mode
|
||||
r=64,
|
||||
lora_alpha=16,
|
||||
lora_dropout=0.05,
|
||||
bias="none",
|
||||
)
|
||||
# Get the LoRA model
|
||||
self.lora_model = get_peft_model(self.base_model, config)
|
||||
mlp_layers = []
|
||||
input_dim = self.lora_model.config.hidden_size # Inherit large model hidden_size
|
||||
for _ in range(num_mlp_layers):
|
||||
mlp_layers.append(nn.Linear(input_dim, mlp_hidden_size,dtype=torch.bfloat16))
|
||||
mlp_layers.append(nn.ReLU())
|
||||
input_dim = mlp_hidden_size
|
||||
mlp_layers.append(nn.Linear(mlp_hidden_size, 123,dtype=torch.bfloat16)) #Output size = hidden_size
|
||||
self.mlp = nn.Sequential(*mlp_layers)
|
||||
|
||||
def forward(self, input_ids=None,
|
||||
attention_mask=None,
|
||||
inputs_embeds=None,
|
||||
labels=None,
|
||||
output_attentions=None,
|
||||
output_hidden_states=None,
|
||||
return_dict=None,
|
||||
task_ids=None,
|
||||
**kwargs,):
|
||||
# Calculate the output of the large model after LoRA adaptation
|
||||
|
||||
lora_output = self.lora_model(input_ids=input_ids,attention_mask=attention_mask,inputs_embeds=inputs_embeds,labels=None,
|
||||
output_attentions=output_attentions,
|
||||
output_hidden_states=output_hidden_states,
|
||||
return_dict=return_dict,
|
||||
task_ids=task_ids,
|
||||
)
|
||||
|
||||
# Calculate the output of MLP additional processing
|
||||
mlp_output = self.mlp(lora_output.hidden_states[:,-1])
|
||||
|
||||
return mlp_output # Let MLP adjust the output of LoRA
|
||||
|
||||
|
||||
def save_checkpoint(self, path,epoch,optimizer,scheduler,best_valid_loss,avg_train_loss):
|
||||
filtered_dict = {name: param for name, param in self.state_dict().items() if
|
||||
'base_model' not in name or 'lora' in name}
|
||||
checkpoint_dict = {
|
||||
'epoch': epoch,
|
||||
'model_state_dict': filtered_dict,
|
||||
'optimizer_state_dict': optimizer.state_dict(),
|
||||
'best_valid_loss': best_valid_loss,
|
||||
'avg_train_loss': avg_train_loss,
|
||||
"scheduler_state_dict":scheduler.state_dict(),
|
||||
}
|
||||
torch.save(checkpoint_dict, path)
|
||||
def load_checkpoint(self, path, optimizer,scheduler, device):
|
||||
"""
|
||||
Load the checkpoint and restore the model and optimizer state
|
||||
:p aram model: The model that needs to be restored
|
||||
:p aram optimizer: The optimizer that needs to be restored
|
||||
:p aram path: checkpoint file path
|
||||
:p aram device: Runtime device (default GPU)
|
||||
:return: epoch of training, best validation loss, training loss
|
||||
"""
|
||||
checkpoint = torch.load(path, map_location=device,optimizer=None,) # Load checkpoint
|
||||
|
||||
self.load_state_dict(checkpoint['model_state_dict'], strict=False) # Load the model parameters
|
||||
optimizer.load_state_dict(checkpoint['optimizer_state_dict']) # Load optimizer parameters
|
||||
scheduler.load_state_dict(checkpoint['scheduler_state_dict'])
|
||||
epoch = checkpoint.get('epoch', 0) # Get the epoch
|
||||
best_valid_loss = checkpoint.get('best_valid_loss', float('inf')) # Get the best verification loss
|
||||
avg_train_loss = checkpoint.get('avg_train_loss', float('inf')) # Get the average training loss
|
||||
|
||||
return epoch, best_valid_loss
|
||||
|
||||
def save_weights(self, path):
|
||||
""" Only LoRA + MLP weights are saved, and the original Qwen2VL model is not included """
|
||||
# for name, param in self.state_dict().items():
|
||||
# print(name, param.requires_grad)
|
||||
filtered_dict = {name: param for name, param in self.state_dict().items() if 'base_model' not in name or 'lora' in name}
|
||||
torch.save(filtered_dict, path)
|
||||
# torch.save(self.state_dict(), path)
|
||||
|
||||
def load_weights(self, path):
|
||||
""" Load LoRA + MLP weights (need to initialize the model first) """
|
||||
state_dict = torch.load(path, map_location=self.device)
|
||||
self.load_state_dict(state_dict, strict=False) # strict=False Some layers are allowed to be missing
|
||||
153
lmm_utils/helper.py
Normal file
153
lmm_utils/helper.py
Normal file
@@ -0,0 +1,153 @@
|
||||
import os
|
||||
from lmm_utils.core import MMUA
|
||||
import shutil
|
||||
from lmm_utils.projector import input_caption2random_default_cption
|
||||
import json
|
||||
import time
|
||||
from sim_utils import modelandreturn_picture_path, garmentyaml_folder2json_folder
|
||||
from lmm_utils.predict_garmentcode_picture import Predictor
|
||||
|
||||
|
||||
def category2yaml2json(
|
||||
category,
|
||||
category_data,
|
||||
final_json_path=None,
|
||||
sim_bool=False,
|
||||
id="root",
|
||||
api_key=None,
|
||||
base_url=None,
|
||||
model=None,
|
||||
dsl_ga=None,
|
||||
):
|
||||
"""Specify the different types of data, pass in the data, and get a json file to generate the result for the final boilerplate.
|
||||
Args:
|
||||
category(string): Specify the type of input 'picture' 'text' 'list'
|
||||
category_data: List or ImagePath(String) or Text(String) Enter different specific data depending on the type
|
||||
final_json_path (string): specifies the final JSON path,
|
||||
At the same time, the json file of the template data, the template image, the converted stylexd format, and the template generation time will be generated in the json folder
|
||||
sim_bool (bool): Specifies whether simulation is required
|
||||
id(string): Create a folder under the user_data based on the ID you entered temp_user_folder_for{id}gpt contains all the data related to the template generated this time.
|
||||
api_key (string): Specifies the API to access the model, if the input is None, the default API will be called
|
||||
base_url (string): Specifies the URL to be visited, if the input is None, the default API will be called
|
||||
model(string): Specifies the model to be accessed, if the input is None, the default model will be called
|
||||
Return :
|
||||
json_list (list): the list selected by the large model
|
||||
response(string): the reply of the large model, removing the list content in the reply
|
||||
"""
|
||||
|
||||
mmua_llm = MMUA(api_key=api_key, base_url=base_url, model=model)
|
||||
if dsl_ga is None:
|
||||
dsl_ga = Predictor()
|
||||
gpt_respond = None
|
||||
start_time = time.time()
|
||||
json_list = []
|
||||
picture_path = None
|
||||
if category == "picture": # The corresponding category_data is the image path
|
||||
json_list, gpt_respond = mmua_llm.picture_gpt(category_data)
|
||||
picture_path = category_data
|
||||
if category == "text": # The text corresponding to the input
|
||||
json_list, gpt_respond = mmua_llm.text_gpt(category_data)
|
||||
if category == "list": # It's the list of captions
|
||||
json_list = category_data
|
||||
caption_json_list = json_list
|
||||
json_list = input_caption2random_default_cption(json_list)
|
||||
dsl_ga.caption_json(caption=json_list, id=id,picture_path=picture_path)
|
||||
end_time = time.time()
|
||||
|
||||
pattern_generate_time = end_time - start_time
|
||||
print(pattern_generate_time)
|
||||
|
||||
temp_json_file_path = (
|
||||
f"user_data/temp_user_folder_for{id}gpt/now_{id}/now_{id}_specification.json"
|
||||
)
|
||||
if final_json_path is not None:
|
||||
final_json_dirname = os.path.dirname(final_json_path)
|
||||
os.makedirs(final_json_dirname, exist_ok=True)
|
||||
|
||||
with open(f"{final_json_dirname}/pattern_generate_time.txt", "a") as f:
|
||||
f.write(f"pattern_generate_time:{pattern_generate_time:.4f} s\n")
|
||||
|
||||
with open(f"{final_json_dirname}/caption.json", "w") as file:
|
||||
json.dump(caption_json_list, file, indent=4)
|
||||
|
||||
with open(f"{final_json_dirname}/gpt_respond.txt", "w") as file:
|
||||
file.write(gpt_respond)
|
||||
f"user_data/temp_user_folder_for{id}gpt"
|
||||
# Define the source file path
|
||||
|
||||
print("temp_json_file_path", temp_json_file_path)
|
||||
# print("temp_json_file_path",temp_json_file_path)
|
||||
|
||||
# Define the target file path, including the new file name
|
||||
destination = final_json_path
|
||||
|
||||
# Copy the file and retain the metadata, while modifying the file name
|
||||
shutil.copy2(temp_json_file_path, destination)
|
||||
if category == "text":
|
||||
with open(f"{final_json_dirname}/user_input.txt", "w") as file:
|
||||
# Write text
|
||||
file.write(category_data)
|
||||
if category == "picture":
|
||||
image_path = category_data
|
||||
# Next, save the original image to the folder where the json path is located, and name it the same as this json file, but with a different extension
|
||||
_, picture_file_extension = os.path.splitext(image_path)
|
||||
picture_file_path, _ = os.path.splitext(final_json_path)
|
||||
# Composite the new image path, here the name of the image is changed
|
||||
picture_path = picture_file_path + picture_file_extension
|
||||
shutil.copy2(image_path, picture_path)
|
||||
|
||||
filename = os.path.splitext(os.path.basename(final_json_path))[0]
|
||||
|
||||
if category == "list": pass
|
||||
if sim_bool:
|
||||
modelandreturn_picture_path(temp_json_file_path)
|
||||
model_png = f"user_data/temp_user_folder_for{id}gpt/now_{id}/now_{id}/now_{id}_render_front.png"
|
||||
#Copy the mockup to the destination folder
|
||||
model_png_end_path = final_json_dirname + "/sim_garment_front.png"
|
||||
shutil.copy2(model_png, model_png_end_path)
|
||||
model_png = f"user_data/temp_user_folder_for{id}gpt/now_{id}/now_{id}/now_{id}_render_back.png"
|
||||
# Copy the mockup to the destination folder
|
||||
model_png_end_path = final_json_dirname + "/sim_garment_back.png"
|
||||
|
||||
shutil.copy2(model_png, model_png_end_path)
|
||||
|
||||
# Duplicate the PNG image of the plate
|
||||
|
||||
pattern_png = (
|
||||
f"user_data/temp_user_folder_for{id}gpt/now_{id}/now_{id}_pattern.png"
|
||||
)
|
||||
pattern_png_end_path = final_json_dirname + f"/{filename}_pattern.png"
|
||||
shutil.copy2(pattern_png, pattern_png_end_path)
|
||||
|
||||
# Copy the yaml file
|
||||
yaml_file = f"user_data/temp_user_folder_for{id}gpt//now_{id}/now_{id}.yaml"
|
||||
yaml_end_path = final_json_dirname + f"/{filename}.yaml"
|
||||
shutil.copy2(yaml_file, yaml_end_path)
|
||||
if final_json_path is None:
|
||||
temp_json_dirname = os.path.dirname(temp_json_file_path)
|
||||
with open(f"{temp_json_dirname}/pattern_generate_time.txt", "a") as f:
|
||||
f.write(f"pattern_generate_time:{pattern_generate_time:.4f} s\n")
|
||||
|
||||
with open(f"{temp_json_dirname}/caption.json", "w") as file:
|
||||
json.dump(caption_json_list, file, indent=4)
|
||||
|
||||
with open(f"{temp_json_dirname}/gpt_respond.txt", "w") as file:
|
||||
file.write(gpt_respond)
|
||||
|
||||
if category == "text":
|
||||
with open(f"{temp_json_dirname}/user_input.txt", "w") as file:
|
||||
# Write text
|
||||
file.write(category_data)
|
||||
if category == "picture":
|
||||
image_path = category_data
|
||||
# Next, save the original image to the folder where the json path is located, and name it the same as this json file, but with a different extension
|
||||
_, picture_file_extension = os.path.splitext(image_path)
|
||||
picture_file_path, _ = os.path.splitext(temp_json_file_path)
|
||||
# Composite the new image path, here the name of the image is changed
|
||||
picture_path = picture_file_path + picture_file_extension
|
||||
|
||||
shutil.copy2(image_path, picture_path)
|
||||
if sim_bool:
|
||||
modelandreturn_picture_path(temp_json_file_path)
|
||||
|
||||
return json_list, gpt_respond
|
||||
478
lmm_utils/predict_garmentcode_picture.py
Normal file
478
lmm_utils/predict_garmentcode_picture.py
Normal file
@@ -0,0 +1,478 @@
|
||||
from torch.utils.data import DataLoader
|
||||
import torch
|
||||
from modelscope import AutoTokenizer
|
||||
from qwen_vl_utils import process_vision_info
|
||||
from transformers import AutoProcessor
|
||||
import json
|
||||
from lmm_utils.fintuned_qwen2vl_model import LoRAWithMLP
|
||||
from lmm_utils.projector import vec_2_pattern_yaml
|
||||
from lmm_utils.projector import save_design2yaml
|
||||
import yaml
|
||||
import os
|
||||
import numpy as np
|
||||
from lmm_utils.sim_utils import garmentyaml_folder2json_folder
|
||||
from pathlib import Path
|
||||
def load_system_config():
|
||||
root_path = Path(__file__).resolve().parent.parent # Navigate to the project's home directory
|
||||
config_path = root_path / "system.json"
|
||||
with open(config_path, "r") as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
_config = load_system_config()
|
||||
|
||||
class Predictor:
|
||||
def __init__(self, model_path="./lmm_utils/Qwen/Qwen2-VL-2B-Instruct", device=None,model_init=True):
|
||||
self.model_init = model_init
|
||||
if not model_init:
|
||||
return
|
||||
|
||||
if device is None:
|
||||
self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
||||
else:
|
||||
self.device = device
|
||||
self.model =LoRAWithMLP(base_model_name=model_path, mlp_hidden_size=512, num_mlp_layers=2,
|
||||
device=device)
|
||||
self.tokenizer = AutoTokenizer.from_pretrained(model_path, use_fast=False, trust_remote_code=True)
|
||||
self.processor = AutoProcessor.from_pretrained(model_path)
|
||||
mask_list = torch.tensor([1, 1, 1, 1, 0, 0,
|
||||
1, # fitted
|
||||
0, 0, 0, # shirt
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, # collar b_beizer_y
|
||||
1, 1, 1, 0, 1, 0, 0, # collar
|
||||
1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, # sleeve
|
||||
1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0,
|
||||
1, 0, 0, 0, 0, 0, # lfet sleeve _cuff
|
||||
0, 0, 0, 0, 0, # skirt
|
||||
0, 0, 0, 0, 0, 0, 1, 0, 0, 0, # flare-skirt
|
||||
1, 0, 0, 1, 0, # godet-skirt
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 1, # peicl-skirt
|
||||
1, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 1, 0, 0, 0, 0, 0]).to(device=self.device) # 1 represents the discrete value identified by the MMUA as the final result.
|
||||
self.mask_list = 1 - mask_list
|
||||
checkpoint = torch.load(
|
||||
_config['param_model'],
|
||||
map_location='cpu')
|
||||
self.model.load_state_dict(checkpoint['model_state_dict'], strict=False) # Load the model parameters
|
||||
self.model.to(self.device)
|
||||
def predict(self,img_path,caption):
|
||||
messages_list = [[
|
||||
{
|
||||
"role": "user",
|
||||
"content": [
|
||||
{
|
||||
"type": "image",
|
||||
"image": f"{img_path}",
|
||||
"resized_height": 280,
|
||||
"resized_width": 280,
|
||||
},
|
||||
{"type": "text", "text": f"garmentcode Yes:{caption}"},
|
||||
],
|
||||
}
|
||||
]]
|
||||
|
||||
"""
|
||||
The dataset is preprocessed
|
||||
"""
|
||||
MAX_LENGTH = 8192
|
||||
input_ids, attention_mask, labels = [], [], []
|
||||
msgs = messages_list
|
||||
texts = self.processor.apply_chat_template(msgs, tokenize=False, add_generation_prompt=True)
|
||||
# def process_func(self, conversation):
|
||||
image_inputs, video_inputs = process_vision_info(msgs) # Get data (preprocessed)
|
||||
inputs = self.processor(
|
||||
text=texts,
|
||||
images=image_inputs,
|
||||
videos=video_inputs,
|
||||
padding=True,
|
||||
return_tensors="pt",
|
||||
|
||||
)
|
||||
inputs['image_grid_thw'] = inputs['image_grid_thw'] # Transform from (1,h,w,c) to (h,w,c)
|
||||
input_ids = inputs['input_ids']
|
||||
attention_mask = inputs['attention_mask']
|
||||
labels = None
|
||||
batch = {"input_ids": input_ids, "attention_mask": attention_mask, "labels": labels,
|
||||
"pixel_values": inputs['pixel_values'], "image_grid_thw": inputs['image_grid_thw']}
|
||||
batch = {k: v.to(self.device) for k, v in batch.items() if isinstance(v, torch.Tensor)}
|
||||
with torch.no_grad():
|
||||
outputs = self.model(**batch)
|
||||
outputs = outputs * self.mask_list
|
||||
return outputs
|
||||
|
||||
def caption2yaml(self,caption, yaml_path='assets/design_params/default_text_value.yaml', new_yaml_path=None,
|
||||
return_template_yaml='assets/design_params/default_template.yaml',
|
||||
body_param_files="assets/bodies/mean_all_full.yaml", modify=False, image_path=None,
|
||||
cache_input_design_data=None):
|
||||
'''Map the caption to a yaml file.
|
||||
Args:
|
||||
caption(list): the list generated by the large model.
|
||||
yaml_path (string): The basic YAML file used to fill in the value represented by the caption into the YAML file,
|
||||
which is a specially marked YAML file.
|
||||
new_yaml_path (string): Save this yaml file to a new path
|
||||
modify=False and text_bool=False, which are used to control the processing of the length of the lower body.
|
||||
Return :
|
||||
design(dict): the data of the processed YAML file
|
||||
'''
|
||||
|
||||
with open(body_param_files, 'r', encoding='utf-8') as yaml_file:
|
||||
body_param = yaml.safe_load(yaml_file)
|
||||
body = body_param['body']
|
||||
with open(yaml_path, 'r', encoding='utf-8') as yaml_file:
|
||||
default_yaml = yaml.safe_load(yaml_file)
|
||||
design = default_yaml['design']
|
||||
with open(return_template_yaml, 'r', encoding='utf-8') as yaml_file:
|
||||
default_template_yaml = yaml.safe_load(yaml_file)
|
||||
design_template = default_template_yaml['design']
|
||||
if cache_input_design_data is not None:
|
||||
design_template = cache_input_design_data
|
||||
|
||||
for design_item in caption:
|
||||
design_item_list = design_item.split('__')
|
||||
temp = design
|
||||
template = design_template
|
||||
for i in range(len(design_item_list)):
|
||||
|
||||
if i == (len(design_item_list) - 1):
|
||||
final_text = design_item_list[i]
|
||||
if final_text.isdigit():
|
||||
final_text = int(final_text)
|
||||
if final_text == 'None':
|
||||
final_text = None
|
||||
if final_text == 'True':
|
||||
final_text = True
|
||||
if final_text == 'False':
|
||||
final_text = False
|
||||
if isinstance(temp['range'][0], dict):
|
||||
for item in temp['range']:
|
||||
if final_text in item:
|
||||
final_text = item[final_text]
|
||||
|
||||
temp['v'] = final_text
|
||||
template['v'] = final_text
|
||||
else:
|
||||
try:
|
||||
temp = temp[design_item_list[i]]
|
||||
template = template[design_item_list[i]]
|
||||
except Exception as e:
|
||||
print(temp)
|
||||
|
||||
design = design_template
|
||||
if 'meta__upper__None' in caption:
|
||||
design['skirt']['rise']['v'] = 0.5
|
||||
design['flare-skirt']['rise']['v'] = 0.5
|
||||
design['pencil-skirt']['rise']['v'] = 0.5
|
||||
design['levels-skirt']['rise']['v'] = 0.5
|
||||
|
||||
if "meta__bottom__SkirtManyPanels" in caption:
|
||||
if "flare-skirt__length__micro" in caption:
|
||||
design['flare-skirt']['length']['v'] = 0.15
|
||||
if "flare-skirt__length__mini" in caption:
|
||||
design['flare-skirt']['length']['v'] = 0.2
|
||||
if "flare-skirt__length__above-knee" in caption:
|
||||
design['flare-skirt']['length']['v'] = 0.3
|
||||
|
||||
if "flare-skirt__length__knee-length" in caption:
|
||||
design['flare-skirt']['length']['v'] = 0.35
|
||||
if "flare-skirt__length__midi" in caption:
|
||||
design['flare-skirt']['length']['v'] = 0.45
|
||||
if "flare-skirt__length__floor-length" in caption:
|
||||
design['flare-skirt']['length']['v'] = 0.6
|
||||
|
||||
if not modify:
|
||||
shirt_length = 0
|
||||
waist_length = 0
|
||||
if 'meta__upper__Shirt' in caption:
|
||||
front_frac = (body['bust'] - body['back_width']) / 2 / body['bust']
|
||||
fb_diff = (front_frac - (0.5 - front_frac)) * body['bust']
|
||||
sh_tan = float(np.tan(np.deg2rad(body['_shoulder_incl'])))
|
||||
shirt_length = design['shirt']['length']['v'] * body['waist_line'] - sh_tan * fb_diff
|
||||
|
||||
if 'meta__upper__FittedShirt' in caption:
|
||||
m_bust = body['bust']
|
||||
front_frac = (body['bust'] - body['back_width']) / 2 / body['bust']
|
||||
sh_tan = float(np.tan(np.deg2rad(body['_shoulder_incl'])))
|
||||
width = front_frac * m_bust
|
||||
adjustment = sh_tan * (width - body['shoulder_w'] / 2)
|
||||
fitted_shirt_length = body['waist_over_bust_line'] - adjustment
|
||||
shirt_length = fitted_shirt_length
|
||||
|
||||
if "meta__wb__None" not in caption:
|
||||
waist_length = design['waistband']['width']['v'] * body["hips_line"]
|
||||
if ("meta__bottom__None" not in caption and 'meta__bottom__Pants' not in caption
|
||||
and 'meta__upper__None' not in caption and "meta__connected__True" in caption):
|
||||
if "meta__bottom__Skirt2" in caption:
|
||||
if "skirt__length__micro" in caption:
|
||||
all_length = 63.99739360159472
|
||||
design['skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['skirt']['rise']['v'] * body["hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
elif "skirt__length__mini" in caption:
|
||||
all_length = 70.38289360159473
|
||||
design['skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['skirt']['rise']['v'] * body["hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "skirt__length__above-knee" in caption:
|
||||
all_length = 83.15389360159473
|
||||
design['skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['skirt']['rise']['v'] * body["hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "skirt__length__knee-length" in caption:
|
||||
all_length = 98.05339360159472
|
||||
design['skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['skirt']['rise']['v'] * body["hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "skirt__length__midi" in caption:
|
||||
all_length = 108.69589360159473
|
||||
design['skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['skirt']['rise']['v'] * body["hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "skirt__length__floor-length" in caption:
|
||||
all_length = 121.46689360159472
|
||||
design['skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['skirt']['rise']['v'] * body["hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif ("meta__bottom__SkirtCircle" in caption or "meta__bottom__SkirtManyPanels" in caption
|
||||
or "meta__bottom__AsymmSkirtCircle" in caption):
|
||||
if "flare-skirt__length__micro" in caption:
|
||||
all_length = 63.99739360159472
|
||||
design['flare-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['flare-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "flare-skirt__length__mini" in caption:
|
||||
all_length = 70.38289360159473
|
||||
design['flare-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['flare-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "flare-skirt__length__above-knee" in caption:
|
||||
all_length = 83.15389360159473
|
||||
design['flare-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['flare-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "flare-skirt__length__knee-length" in caption:
|
||||
all_length = 98.05339360159472
|
||||
design['flare-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['flare-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "flare-skirt__length__midi" in caption:
|
||||
all_length = 108.69589360159473
|
||||
design['flare-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['flare-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "flare-skirt__length__floor-length" in caption:
|
||||
all_length = 121.46689360159472
|
||||
design['flare-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['flare-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
elif "meta__bottom__GodetSkirt" in caption:
|
||||
if "godet-skirt__base__Skirt2" in caption:
|
||||
if "skirt__length__micro" in caption:
|
||||
all_length = 63.99739360159472
|
||||
design['skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['skirt']['rise']['v'] * body["hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "skirt__length__mini" in caption:
|
||||
all_length = 70.38289360159473
|
||||
design['skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['skirt']['rise']['v'] * body["hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "skirt__length__above-knee" in caption:
|
||||
all_length = 83.15389360159473
|
||||
design['skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['skirt']['rise']['v'] * body["hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "skirt__length__knee-length" in caption:
|
||||
all_length = 98.05339360159472
|
||||
design['skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['skirt']['rise']['v'] * body["hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "skirt__length__midi" in caption:
|
||||
all_length = 108.69589360159473
|
||||
design['skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['skirt']['rise']['v'] * body["hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "skirt__length__floor-length" in caption:
|
||||
all_length = 121.46689360159472
|
||||
design['skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['skirt']['rise']['v'] * body["hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "godet-skirt__base__PencilSkirt" in caption:
|
||||
if "pencil-skirt__length__micro" in caption:
|
||||
all_length = 63.99739360159472
|
||||
design['pencil-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['pencil-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "pencil-skirt__length__mini" in caption:
|
||||
all_length = 70.38289360159473
|
||||
design['pencil-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['pencil-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "pencil-skirt__length__above-knee" in caption:
|
||||
all_length = 83.15389360159473
|
||||
design['pencil-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['pencil-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "pencil-skirt__length__knee-length" in caption:
|
||||
all_length = 98.05339360159472
|
||||
design['pencil-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['pencil-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "pencil-skirt__length__midi" in caption:
|
||||
all_length = 108.69589360159473
|
||||
design['pencil-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['pencil-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "pencil-skirt__length__floor-length" in caption:
|
||||
all_length = 121.46689360159472
|
||||
design['pencil-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['pencil-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "meta__bottom__PencilSkirt" in caption:
|
||||
|
||||
if "pencil-skirt__length__micro" in caption:
|
||||
all_length = 63.99739360159472
|
||||
design['pencil-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['pencil-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "pencil-skirt__length__mini" in caption:
|
||||
all_length = 70.38289360159473
|
||||
design['pencil-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['pencil-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
|
||||
elif "pencil-skirt__length__above-knee" in caption:
|
||||
all_length = 83.15389360159473
|
||||
design['pencil-skirt']['length']['v'] = (all_length - shirt_length - waist_length -
|
||||
design['pencil-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "pencil-skirt__length__knee-length" in caption:
|
||||
all_length = 98.05339360159472
|
||||
design['pencil-skirt']['length']['v'] = (all_length - shirt_length - waist_length -
|
||||
design['pencil-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "pencil-skirt__length__midi" in caption:
|
||||
all_length = 108.69589360159473
|
||||
design['pencil-skirt']['length']['v'] = (all_length - shirt_length - waist_length -
|
||||
design['pencil-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "pencil-skirt__length__floor-length" in caption:
|
||||
all_length = 121.46689360159472
|
||||
design['pencil-skirt']['length']['v'] = (all_length - shirt_length - waist_length -
|
||||
design['pencil-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "meta__bottom__SkirtLevels" in caption:
|
||||
if "levels-skirt__length__micro" in caption:
|
||||
all_length = 63.99739360159472
|
||||
design['levels-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['levels-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "levels-skirt__length__mini" in caption:
|
||||
all_length = 63.99739360159472
|
||||
design['levels-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['levels-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "levels-skirt__length__above-knee" in caption:
|
||||
all_length = 83.15389360159473
|
||||
design['levels-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['levels-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "levels-skirt__length__knee-length" in caption:
|
||||
all_length = 98.05339360159472
|
||||
design['levels-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['levels-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "levels-skirt__length__midi" in caption:
|
||||
all_length = 108.69589360159473
|
||||
design['levels-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['levels-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
elif "levels-skirt__length__floor-length" in caption:
|
||||
all_length = 121.46689360159472
|
||||
design['levels-skirt']['length']['v'] = (all_length - shirt_length - waist_length
|
||||
- design['levels-skirt']['rise']['v'] * body[
|
||||
"hips_line"]) / \
|
||||
body["_leg_length"]
|
||||
|
||||
if image_path and os.path.exists(image_path) and self.model_init:
|
||||
temp_cwd = os.getcwd()
|
||||
image_path = temp_cwd + '/' + image_path
|
||||
param_vec = self.predict(img_path=image_path, caption=caption)
|
||||
param_vec = param_vec[0].float().cpu().numpy()
|
||||
design = vec_2_pattern_yaml(design, param_vec, self.mask_list.tolist())
|
||||
|
||||
if new_yaml_path is not None: save_design2yaml(design, new_yaml_path)
|
||||
return design
|
||||
|
||||
def caption_json(self,caption, id="root",picture_path=None,dsl_ga=None):
|
||||
os.makedirs(f"user_data/temp_user_folder_for{id}gpt", exist_ok=True)
|
||||
self.caption2yaml(
|
||||
caption=caption,
|
||||
new_yaml_path=f"user_data/temp_user_folder_for{id}gpt/now_{id}.yaml",
|
||||
yaml_path="assets/design_params/default_text_value.yaml",
|
||||
image_path=picture_path
|
||||
)
|
||||
# In this case, a folder with the same name as the yaml file will be generated
|
||||
# under the corresponding output folder based on the file name of the yaml file,
|
||||
# and a json file with the same name as the yaml file will be the final result.
|
||||
# Here the output will be the now_picture file in the now_picture folder
|
||||
garmentyaml_folder2json_folder(
|
||||
input_folder=f"user_data/temp_user_folder_for{id}gpt",
|
||||
output_folder=f"user_data/temp_user_folder_for{id}gpt",
|
||||
)
|
||||
785
lmm_utils/projector.py
Normal file
785
lmm_utils/projector.py
Normal file
@@ -0,0 +1,785 @@
|
||||
import os
|
||||
import uuid
|
||||
import json
|
||||
import yaml
|
||||
import random
|
||||
import numpy as np
|
||||
from collections import OrderedDict
|
||||
|
||||
# from predict_garmentcode_picture import predict
|
||||
CONNECT_TAG = '__'
|
||||
|
||||
all_text_dict={
|
||||
"meta__upper": [
|
||||
"meta__upper__FittedShirt",
|
||||
"meta__upper__Shirt",
|
||||
"meta__upper__None"
|
||||
],
|
||||
"meta__wb": [
|
||||
"meta__wb__StraightWB",
|
||||
"meta__wb__FittedWB",
|
||||
"meta__wb__None"
|
||||
],
|
||||
"meta__bottom": [
|
||||
"meta__bottom__SkirtCircle",
|
||||
"meta__bottom__AsymmSkirtCircle",
|
||||
"meta__bottom__GodetSkirt",
|
||||
"meta__bottom__Pants",
|
||||
"meta__bottom__Skirt2",
|
||||
"meta__bottom__SkirtManyPanels",
|
||||
"meta__bottom__PencilSkirt",
|
||||
"meta__bottom__SkirtLevels",
|
||||
"meta__bottom__None"
|
||||
],
|
||||
"meta__connected": [
|
||||
"meta__connected__True",
|
||||
"meta__connected__False"
|
||||
],
|
||||
"waistband__waist": [
|
||||
"waistband__waist__fitted",
|
||||
"waistband__waist__slightly-loose",
|
||||
"waistband__waist__loose"
|
||||
],
|
||||
"waistband__width": [
|
||||
"waistband__width__narrow",
|
||||
"waistband__width__medium",
|
||||
"waistband__width__wide"
|
||||
],
|
||||
"fitted_shirt__strapless": [
|
||||
"fitted_shirt__strapless__True",
|
||||
"fitted_shirt__strapless__False"
|
||||
],
|
||||
"shirt__length": [
|
||||
"shirt__length__super-cropped",
|
||||
"shirt__length__regular",
|
||||
"shirt__length__long"
|
||||
],
|
||||
"shirt__width": [
|
||||
"shirt__width__normal",
|
||||
"shirt__width__relaxed"
|
||||
],
|
||||
"shirt__flare": [
|
||||
"shirt__flare__tight",
|
||||
"shirt__flare__straight",
|
||||
"shirt__flare__flared",
|
||||
"shirt__flare__very-flared"
|
||||
],
|
||||
"collar__f_collar": [
|
||||
"collar__f_collar__CircleNeckHalf",
|
||||
"collar__f_collar__CurvyNeckHalf",
|
||||
"collar__f_collar__VNeckHalf",
|
||||
"collar__f_collar__SquareNeckHalf",
|
||||
"collar__f_collar__TrapezoidNeckHalf",
|
||||
"collar__f_collar__CircleArcNeckHalf",
|
||||
"collar__f_collar__Bezier2NeckHalf"
|
||||
],
|
||||
"collar__b_collar": [
|
||||
"collar__b_collar__CircleNeckHalf",
|
||||
"collar__b_collar__CurvyNeckHalf",
|
||||
"collar__b_collar__VNeckHalf",
|
||||
"collar__b_collar__SquareNeckHalf",
|
||||
"collar__b_collar__TrapezoidNeckHalf",
|
||||
"collar__b_collar__CircleArcNeckHalf",
|
||||
"collar__b_collar__Bezier2NeckHalf"
|
||||
],
|
||||
"collar__width": [
|
||||
"collar__width__very-narrow",
|
||||
"collar__width__medium",
|
||||
"collar__width__wide"
|
||||
],
|
||||
"collar__fc_depth": [
|
||||
"collar__fc_depth__shallow",
|
||||
"collar__fc_depth__medium",
|
||||
"collar__fc_depth__deep"
|
||||
],
|
||||
"collar__bc_depth": [
|
||||
"collar__bc_depth__shallow",
|
||||
"collar__bc_depth__medium",
|
||||
"collar__bc_depth__deep"
|
||||
],
|
||||
"collar__fc_angle": [
|
||||
"collar__fc_angle__acute",
|
||||
"collar__fc_angle__standard",
|
||||
"collar__fc_angle__obtuse"
|
||||
],
|
||||
"collar__bc_angle": [
|
||||
"collar__bc_angle__acute",
|
||||
"collar__bc_angle__standard",
|
||||
"collar__bc_angle__obtuse"
|
||||
],
|
||||
"collar__f_bezier_x": [
|
||||
"collar__f_bezier_x__left",
|
||||
"collar__f_bezier_x__center",
|
||||
"collar__f_bezier_x__right"
|
||||
],
|
||||
"collar__f_bezier_y": [
|
||||
"collar__f_bezier_y__top",
|
||||
"collar__f_bezier_y__center",
|
||||
"collar__f_bezier_y__bottom"
|
||||
],
|
||||
"collar__b_bezier_x": [
|
||||
"collar__b_bezier_x__left",
|
||||
"collar__b_bezier_x__center",
|
||||
"collar__b_bezier_x__right"
|
||||
],
|
||||
"collar__b_bezier_y": [
|
||||
"collar__b_bezier_y__top",
|
||||
"collar__b_bezier_y__center",
|
||||
"collar__b_bezier_y__bottom"
|
||||
],
|
||||
"collar__f_flip_curve": [
|
||||
"collar__f_flip_curve__True",
|
||||
"collar__f_flip_curve__False"
|
||||
],
|
||||
"collar__b_flip_curve": [
|
||||
"collar__b_flip_curve__True",
|
||||
"collar__b_flip_curve__False"
|
||||
],
|
||||
"collar__component__style": [
|
||||
"collar__component__style__Turtle",
|
||||
"collar__component__style__SimpleLapel",
|
||||
"collar__component__style__Hood2Panels",
|
||||
"collar__component__style__None"
|
||||
],
|
||||
"collar__component__depth": [
|
||||
"collar__component__depth__shallow",
|
||||
"collar__component__depth__medium",
|
||||
"collar__component__depth__deep"
|
||||
],
|
||||
"collar__component__lapel_standing": [
|
||||
"collar__component__lapel_standing__True",
|
||||
"collar__component__lapel_standing__False"
|
||||
],
|
||||
"collar__component__hood_depth": [
|
||||
"collar__component__hood_depth__shallow",
|
||||
"collar__component__hood_depth__medium",
|
||||
"collar__component__hood_depth__deep"
|
||||
],
|
||||
"collar__component__hood_length": [
|
||||
"collar__component__hood_length__short",
|
||||
"collar__component__hood_length__medium",
|
||||
"collar__component__hood_length__long"
|
||||
],
|
||||
"sleeve__sleeveless": [
|
||||
"sleeve__sleeveless__True",
|
||||
"sleeve__sleeveless__False"
|
||||
],
|
||||
"sleeve__armhole_shape": [
|
||||
"sleeve__armhole_shape__ArmholeSquare",
|
||||
"sleeve__armhole_shape__ArmholeAngle",
|
||||
"sleeve__armhole_shape__ArmholeCurve"
|
||||
],
|
||||
"sleeve__length": [
|
||||
"sleeve__length__short",
|
||||
"sleeve__length__half",
|
||||
"sleeve__length__three-quarter",
|
||||
"sleeve__length__long",
|
||||
"sleeve__length__full"
|
||||
],
|
||||
"sleeve__connecting_width": [
|
||||
"sleeve__connecting_width__narrow",
|
||||
"sleeve__connecting_width__medium",
|
||||
"sleeve__connecting_width__loose",
|
||||
"sleeve__connecting_width__very-loose"
|
||||
],
|
||||
"sleeve__end_width": [
|
||||
"sleeve__end_width__closing",
|
||||
"sleeve__end_width__straight",
|
||||
"sleeve__end_width__opening"
|
||||
],
|
||||
"sleeve__sleeve_angle": [
|
||||
"sleeve__sleeve_angle__small",
|
||||
"sleeve__sleeve_angle__medium",
|
||||
"sleeve__sleeve_angle__large"
|
||||
],
|
||||
"sleeve__opening_dir_mix": [
|
||||
"sleeve__opening_dir_mix__negative-twist",
|
||||
"sleeve__opening_dir_mix__standard",
|
||||
"sleeve__opening_dir_mix__positive-twist"
|
||||
],
|
||||
"sleeve__standing_shoulder": [
|
||||
"sleeve__standing_shoulder__True",
|
||||
"sleeve__standing_shoulder__False"
|
||||
],
|
||||
"sleeve__standing_shoulder_len": [
|
||||
"sleeve__standing_shoulder_len__short",
|
||||
"sleeve__standing_shoulder_len__medium",
|
||||
"sleeve__standing_shoulder_len__long"
|
||||
],
|
||||
"sleeve__connect_ruffle": [
|
||||
"sleeve__connect_ruffle__none",
|
||||
"sleeve__connect_ruffle__some",
|
||||
"sleeve__connect_ruffle__obvious"
|
||||
],
|
||||
"sleeve__smoothing_coeff": [
|
||||
"sleeve__smoothing_coeff__very-smooth",
|
||||
"sleeve__smoothing_coeff__moderate",
|
||||
"sleeve__smoothing_coeff__less-smooth"
|
||||
],
|
||||
"sleeve__cuff__type": [
|
||||
"sleeve__cuff__type__CuffBand",
|
||||
"sleeve__cuff__type__CuffSkirt",
|
||||
"sleeve__cuff__type__CuffBandSkirt",
|
||||
"sleeve__cuff__type__None"
|
||||
],
|
||||
"sleeve__cuff__top_ruffle": [
|
||||
"sleeve__cuff__top_ruffle__straight",
|
||||
"sleeve__cuff__top_ruffle__tapered",
|
||||
"sleeve__cuff__top_ruffle__very_tapered"
|
||||
],
|
||||
"sleeve__cuff__cuff_len": [
|
||||
"sleeve__cuff__cuff_len__short",
|
||||
"sleeve__cuff__cuff_len__medium",
|
||||
"sleeve__cuff__cuff_len__long"
|
||||
],
|
||||
"sleeve__cuff__skirt_fraction": [
|
||||
"sleeve__cuff__skirt_fraction__small",
|
||||
"sleeve__cuff__skirt_fraction__medium",
|
||||
"sleeve__cuff__skirt_fraction__large"
|
||||
],
|
||||
"sleeve__cuff__skirt_flare": [
|
||||
"sleeve__cuff__skirt_flare__slight",
|
||||
"sleeve__cuff__skirt_flare__moderate",
|
||||
"sleeve__cuff__skirt_flare__significant"
|
||||
],
|
||||
"sleeve__cuff__skirt_ruffle": [
|
||||
"sleeve__cuff__skirt_ruffle__none",
|
||||
"sleeve__cuff__skirt_ruffle__some"
|
||||
],
|
||||
"left__enable_asym": [
|
||||
"left__enable_asym__True",
|
||||
"left__enable_asym__False"
|
||||
],
|
||||
"left__fitted_shirt__strapless": [
|
||||
"left__fitted_shirt__strapless__True",
|
||||
"left__fitted_shirt__strapless__False"
|
||||
],
|
||||
"left__shirt__width": [
|
||||
"left__shirt__width__normal",
|
||||
"left__shirt__width__relaxed"
|
||||
],
|
||||
"left__shirt__flare": [
|
||||
"left__shirt__flare__tight",
|
||||
"left__shirt__flare__straight",
|
||||
"left__shirt__flare__flared",
|
||||
"left__shirt__flare__very-flared"
|
||||
],
|
||||
"left__collar__f_collar": [
|
||||
"left__collar__f_collar__CircleNeckHalf",
|
||||
"left__collar__f_collar__CurvyNeckHalf",
|
||||
"left__collar__f_collar__VNeckHalf",
|
||||
"left__collar__f_collar__SquareNeckHalf",
|
||||
"left__collar__f_collar__TrapezoidNeckHalf",
|
||||
"left__collar__f_collar__CircleArcNeckHalf",
|
||||
"left__collar__f_collar__Bezier2NeckHalf"
|
||||
],
|
||||
"left__collar__b_collar": [
|
||||
"left__collar__b_collar__CircleNeckHalf",
|
||||
"left__collar__b_collar__CurvyNeckHalf",
|
||||
"left__collar__b_collar__VNeckHalf",
|
||||
"left__collar__b_collar__SquareNeckHalf",
|
||||
"left__collar__b_collar__TrapezoidNeckHalf",
|
||||
"left__collar__b_collar__CircleArcNeckHalf",
|
||||
"left__collar__b_collar__Bezier2NeckHalf"
|
||||
],
|
||||
"left__collar__width": [
|
||||
"left__collar__width__narrow",
|
||||
"left__collar__width__medium",
|
||||
"left__collar__width__wide"
|
||||
],
|
||||
"left__collar__fc_angle": [
|
||||
"left__collar__fc_angle__acute",
|
||||
"left__collar__fc_angle__standard",
|
||||
"left__collar__fc_angle__obtuse"
|
||||
],
|
||||
"left__collar__bc_angle": [
|
||||
"left__collar__bc_angle__acute",
|
||||
"left__collar__bc_angle__standard",
|
||||
"left__collar__bc_angle__obtuse"
|
||||
],
|
||||
"left__collar__f_bezier_x": [
|
||||
"left__collar__f_bezier_x__left",
|
||||
"left__collar__f_bezier_x__center",
|
||||
"left__collar__f_bezier_x__right"
|
||||
],
|
||||
"left__collar__f_bezier_y": [
|
||||
"left__collar__f_bezier_y__top",
|
||||
"left__collar__f_bezier_y__center",
|
||||
"left__collar__f_bezier_y__bottom"
|
||||
],
|
||||
"left__collar__b_bezier_x": [
|
||||
"left__collar__b_bezier_x__left",
|
||||
"left__collar__b_bezier_x__center",
|
||||
"left__collar__b_bezier_x__right"
|
||||
],
|
||||
"left__collar__b_bezier_y": [
|
||||
"left__collar__b_bezier_y__top",
|
||||
"left__collar__b_bezier_y__center",
|
||||
"left__collar__b_bezier_y__bottom"
|
||||
],
|
||||
"left__collar__f_flip_curve": [
|
||||
"left__collar__f_flip_curve__True",
|
||||
"left__collar__f_flip_curve__False"
|
||||
],
|
||||
"left__collar__b_flip_curve": [
|
||||
"left__collar__b_flip_curve__True",
|
||||
"left__collar__b_flip_curve__False"
|
||||
],
|
||||
"left__sleeve__sleeveless": [
|
||||
"left__sleeve__sleeveless__True",
|
||||
"left__sleeve__sleeveless__False"
|
||||
],
|
||||
"left__sleeve__armhole_shape": [
|
||||
"left__sleeve__armhole_shape__ArmholeSquare",
|
||||
"left__sleeve__armhole_shape__ArmholeAngle",
|
||||
"left__sleeve__armhole_shape__ArmholeCurve"
|
||||
],
|
||||
"left__sleeve__length": [
|
||||
"left__sleeve__length__short",
|
||||
"left__sleeve__length__half",
|
||||
"left__sleeve__length__three-quarter",
|
||||
"left__sleeve__length__long",
|
||||
"left__sleeve__length__full"
|
||||
],
|
||||
"left__sleeve__connecting_width": [
|
||||
"left__sleeve__connecting_width__narrow",
|
||||
"left__sleeve__connecting_width__medium",
|
||||
"left__sleeve__connecting_width__loose",
|
||||
"left__sleeve__connecting_width__very-loose"
|
||||
],
|
||||
"left__sleeve__end_width": [
|
||||
"left__sleeve__end_width__closing",
|
||||
"left__sleeve__end_width__straight",
|
||||
"left__sleeve__end_width__opening"
|
||||
],
|
||||
"left__sleeve__sleeve_angle": [
|
||||
"left__sleeve__sleeve_angle__small",
|
||||
"left__sleeve__sleeve_angle__medium",
|
||||
"left__sleeve__sleeve_angle__large"
|
||||
],
|
||||
"left__sleeve__opening_dir_mix": [
|
||||
"left__sleeve__opening_dir_mix__negative-twist",
|
||||
"left__sleeve__opening_dir_mix__standard",
|
||||
"left__sleeve__opening_dir_mix__positive-twist"
|
||||
],
|
||||
"left__sleeve__standing_shoulder": [
|
||||
"left__sleeve__standing_shoulder__True",
|
||||
"left__sleeve__standing_shoulder__False"
|
||||
],
|
||||
"left__sleeve__standing_shoulder_len": [
|
||||
"left__sleeve__standing_shoulder_len__short",
|
||||
"left__sleeve__standing_shoulder_len__medium",
|
||||
"left__sleeve__standing_shoulder_len__long"
|
||||
],
|
||||
"left__sleeve__connect_ruffle": [
|
||||
"left__sleeve__connect_ruffle__none",
|
||||
"left__sleeve__connect_ruffle__some",
|
||||
"left__sleeve__connect_ruffle__obvious"
|
||||
],
|
||||
"left__sleeve__smoothing_coeff": [
|
||||
"left__sleeve__smoothing_coeff__very-smooth",
|
||||
"left__sleeve__smoothing_coeff__moderate",
|
||||
"left__sleeve__smoothing_coeff__less-smooth"
|
||||
],
|
||||
"left__sleeve__cuff__type": [
|
||||
"left__sleeve__cuff__type__CuffBand",
|
||||
"left__sleeve__cuff__type__CuffSkirt",
|
||||
"left__sleeve__cuff__type__CuffBandSkirt",
|
||||
"left__sleeve__cuff__type__None"
|
||||
],
|
||||
"left__sleeve__cuff__top_ruffle": [
|
||||
"left__sleeve__cuff__top_ruffle__none",
|
||||
"left__sleeve__cuff__top_ruffle__moderate",
|
||||
"left__sleeve__cuff__top_ruffle__obvious"
|
||||
],
|
||||
"left__sleeve__cuff__cuff_len": [
|
||||
"left__sleeve__cuff__cuff_len__short",
|
||||
"left__sleeve__cuff__cuff_len__medium",
|
||||
"left__sleeve__cuff__cuff_len__long"
|
||||
],
|
||||
"left__sleeve__cuff__skirt_fraction": [
|
||||
"left__sleeve__cuff__skirt_fraction__small",
|
||||
"left__sleeve__cuff__skirt_fraction__medium",
|
||||
"left__sleeve__cuff__skirt_fraction__large"
|
||||
],
|
||||
"left__sleeve__cuff__skirt_flare": [
|
||||
"left__sleeve__cuff__skirt_flare__slight",
|
||||
"left__sleeve__cuff__skirt_flare__moderate",
|
||||
"left__sleeve__cuff__skirt_flare__significant"
|
||||
],
|
||||
"left__sleeve__cuff__skirt_ruffle": [
|
||||
"left__sleeve__cuff__skirt_ruffle__none",
|
||||
"left__sleeve__cuff__skirt_ruffle__some"
|
||||
],
|
||||
"skirt__length": [
|
||||
"skirt__length__micro",
|
||||
"skirt__length__mini",
|
||||
"skirt__length__above-knee",
|
||||
"skirt__length__knee-length",
|
||||
"skirt__length__midi",
|
||||
"skirt__length__floor-length"
|
||||
],
|
||||
"skirt__rise": [
|
||||
"skirt__rise__low",
|
||||
"skirt__rise__mid",
|
||||
"skirt__rise__high"
|
||||
],
|
||||
"skirt__ruffle": [
|
||||
"skirt__ruffle__none",
|
||||
"skirt__ruffle__moderate",
|
||||
"skirt__ruffle__rich"
|
||||
],
|
||||
"skirt__bottom_cut": [
|
||||
"skirt__bottom_cut__none",
|
||||
"skirt__bottom_cut__shallow",
|
||||
"skirt__bottom_cut__deep"
|
||||
],
|
||||
"skirt__flare": [
|
||||
"skirt__flare__small",
|
||||
"skirt__flare__medium",
|
||||
"skirt__flare__large"
|
||||
],
|
||||
"flare-skirt__length": [
|
||||
"flare-skirt__length__micro",
|
||||
"flare-skirt__length__mini",
|
||||
"flare-skirt__length__above-knee",
|
||||
"flare-skirt__length__knee-length",
|
||||
"flare-skirt__length__midi",
|
||||
"flare-skirt__length__floor-length"
|
||||
],
|
||||
"flare-skirt__rise": [
|
||||
"flare-skirt__rise__low",
|
||||
"flare-skirt__rise__mid",
|
||||
"flare-skirt__rise__high"
|
||||
],
|
||||
"flare-skirt__suns": [
|
||||
"flare-skirt__suns__slight",
|
||||
"flare-skirt__suns__moderate",
|
||||
"flare-skirt__suns__significant"
|
||||
],
|
||||
"flare-skirt__skirt-many-panels__n_panels": [
|
||||
"flare-skirt__skirt-many-panels__n_panels__few",
|
||||
"flare-skirt__skirt-many-panels__n_panels__medium",
|
||||
"flare-skirt__skirt-many-panels__n_panels__many"
|
||||
],
|
||||
"flare-skirt__skirt-many-panels__panel_curve": [
|
||||
"flare-skirt__skirt-many-panels__panel_curve__inward",
|
||||
"flare-skirt__skirt-many-panels__panel_curve__straight",
|
||||
"flare-skirt__skirt-many-panels__panel_curve__outward"
|
||||
],
|
||||
"flare-skirt__asymm__front_length": [
|
||||
"flare-skirt__asymm__front_length__highly-asymmetric",
|
||||
"flare-skirt__asymm__front_length__strongly-asymmetric",
|
||||
"flare-skirt__asymm__front_length__moderately-asymmetric",
|
||||
"flare-skirt__asymm__front_length__slightly-asymmetric",
|
||||
"flare-skirt__asymm__front_length__symmetric"
|
||||
],
|
||||
"flare-skirt__cut__add": [
|
||||
"flare-skirt__cut__add__True",
|
||||
"flare-skirt__cut__add__False"
|
||||
],
|
||||
"flare-skirt__cut__depth": [
|
||||
"flare-skirt__cut__depth__shallow",
|
||||
"flare-skirt__cut__depth__medium",
|
||||
"flare-skirt__cut__depth__deep"
|
||||
],
|
||||
"flare-skirt__cut__width": [
|
||||
"flare-skirt__cut__width__narrow",
|
||||
"flare-skirt__cut__width__medium",
|
||||
"flare-skirt__cut__width__wide"
|
||||
],
|
||||
"flare-skirt__cut__place": [
|
||||
"flare-skirt__cut__place__back_left",
|
||||
"flare-skirt__cut__place__back_center",
|
||||
"flare-skirt__cut__place__back_right",
|
||||
"flare-skirt__cut__place__front_left",
|
||||
"flare-skirt__cut__place__front_center",
|
||||
"flare-skirt__cut__place__front_right",
|
||||
],
|
||||
"godet-skirt__base": [
|
||||
"godet-skirt__base__Skirt2",
|
||||
"godet-skirt__base__PencilSkirt"
|
||||
],
|
||||
"godet-skirt__insert_w": [
|
||||
"godet-skirt__insert_w__narrow",
|
||||
"godet-skirt__insert_w__medium",
|
||||
"godet-skirt__insert_w__wide"
|
||||
],
|
||||
"godet-skirt__insert_depth": [
|
||||
"godet-skirt__insert_depth__shallow",
|
||||
"godet-skirt__insert_depth__medium",
|
||||
"godet-skirt__insert_depth__deep"
|
||||
],
|
||||
"godet-skirt__num_inserts": [
|
||||
"godet-skirt__num_inserts__4",
|
||||
"godet-skirt__num_inserts__6",
|
||||
"godet-skirt__num_inserts__8",
|
||||
"godet-skirt__num_inserts__10",
|
||||
"godet-skirt__num_inserts__12"
|
||||
],
|
||||
"godet-skirt__cuts_distance": [
|
||||
"godet-skirt__cuts_distance__close",
|
||||
"godet-skirt__cuts_distance__medium",
|
||||
"godet-skirt__cuts_distance__far"
|
||||
],
|
||||
"pencil-skirt__length": [
|
||||
"pencil-skirt__length__micro",
|
||||
"pencil-skirt__length__mini",
|
||||
"pencil-skirt__length__above-knee",
|
||||
"pencil-skirt__length__knee-length",
|
||||
"pencil-skirt__length__midi",
|
||||
"pencil-skirt__length__floor-length"
|
||||
],
|
||||
"pencil-skirt__rise": [
|
||||
"pencil-skirt__rise__low",
|
||||
"pencil-skirt__rise__mid",
|
||||
"pencil-skirt__rise__high"
|
||||
],
|
||||
"pencil-skirt__flare": [
|
||||
"pencil-skirt__flare__tight",
|
||||
"pencil-skirt__flare__straight",
|
||||
"pencil-skirt__flare__slight-flare"
|
||||
],
|
||||
"pencil-skirt__low_angle": [
|
||||
"pencil-skirt__low_angle__inward",
|
||||
"pencil-skirt__low_angle__straight",
|
||||
"pencil-skirt__low_angle__outward"
|
||||
],
|
||||
"pencil-skirt__front_slit": [
|
||||
"pencil-skirt__front_slit__none",
|
||||
"pencil-skirt__front_slit__shallow",
|
||||
"pencil-skirt__front_slit__deep"
|
||||
],
|
||||
"pencil-skirt__back_slit": [
|
||||
"pencil-skirt__back_slit__none",
|
||||
"pencil-skirt__back_slit__shallow",
|
||||
"pencil-skirt__back_slit__deep"
|
||||
],
|
||||
"pencil-skirt__left_slit": [
|
||||
"pencil-skirt__left_slit__none",
|
||||
"pencil-skirt__left_slit__shallow",
|
||||
"pencil-skirt__left_slit__deep"
|
||||
],
|
||||
"pencil-skirt__right_slit": [
|
||||
"pencil-skirt__right_slit__none",
|
||||
"pencil-skirt__right_slit__shallow",
|
||||
"pencil-skirt__right_slit__deep"
|
||||
],
|
||||
"pencil-skirt__style_side_cut": [
|
||||
"pencil-skirt__style_side_cut__Sun",
|
||||
"pencil-skirt__style_side_cut__SIGGRAPH_logo",
|
||||
"pencil-skirt__style_side_cut__None"
|
||||
],
|
||||
"levels-skirt__base": [
|
||||
"levels-skirt__base__Skirt2",
|
||||
"levels-skirt__base__PencilSkirt",
|
||||
"levels-skirt__base__SkirtCircle",
|
||||
"levels-skirt__base__AsymmSkirtCircle"
|
||||
],
|
||||
"levels-skirt__level": [
|
||||
"levels-skirt__level__Skirt2",
|
||||
"levels-skirt__level__SkirtCircle",
|
||||
"levels-skirt__level__AsymmSkirtCircle"
|
||||
],
|
||||
"levels-skirt__num_levels": [
|
||||
"levels-skirt__num_levels__1",
|
||||
"levels-skirt__num_levels__2",
|
||||
"levels-skirt__num_levels__3",
|
||||
"levels-skirt__num_levels__4",
|
||||
"levels-skirt__num_levels__5"
|
||||
],
|
||||
"levels-skirt__level_ruffle": [
|
||||
"levels-skirt__level_ruffle__none",
|
||||
"levels-skirt__level_ruffle__moderate",
|
||||
"levels-skirt__level_ruffle__rich"
|
||||
],
|
||||
"levels-skirt__length": [
|
||||
"levels-skirt__length__micro",
|
||||
"levels-skirt__length__mini",
|
||||
"levels-skirt__length__above-knee",
|
||||
"levels-skirt__length__knee-length",
|
||||
"levels-skirt__length__midi",
|
||||
"levels-skirt__length__floor-length"
|
||||
],
|
||||
"levels-skirt__rise": [
|
||||
"levels-skirt__rise__low",
|
||||
"levels-skirt__rise__mid",
|
||||
"levels-skirt__rise__high"
|
||||
],
|
||||
"levels-skirt__base_length_frac": [
|
||||
"levels-skirt__base_length_frac__short",
|
||||
"levels-skirt__base_length_frac__medium",
|
||||
"levels-skirt__base_length_frac__long"
|
||||
],
|
||||
"pants__length": [
|
||||
"pants__length__micro",
|
||||
"pants__length__short",
|
||||
"pants__length__knee-length",
|
||||
"pants__length__capri",
|
||||
"pants__length__ankle-length",
|
||||
"pants__length__full-length"
|
||||
],
|
||||
"pants__width": [
|
||||
"pants__width__fitted",
|
||||
"pants__width__normal",
|
||||
"pants__width__loose"
|
||||
],
|
||||
"pants__flare": [
|
||||
"pants__flare__tapering",
|
||||
"pants__flare__straight",
|
||||
"pants__flare__slight-flare"
|
||||
],
|
||||
"pants__rise": [
|
||||
"pants__rise__low",
|
||||
"pants__rise__mid",
|
||||
"pants__rise__high"
|
||||
],
|
||||
"pants__cuff__type": [
|
||||
"pants__cuff__type__CuffBand",
|
||||
"pants__cuff__type__CuffSkirt",
|
||||
"pants__cuff__type__CuffBandSkirt",
|
||||
"pants__cuff__type__None"
|
||||
],
|
||||
"pants__cuff__top_ruffle": [
|
||||
"pants__cuff__top_ruffle__none",
|
||||
"pants__cuff__top_ruffle__moderate",
|
||||
"pants__cuff__top_ruffle__rich"
|
||||
],
|
||||
"pants__cuff__cuff_len": [
|
||||
"pants__cuff__cuff_len__short",
|
||||
"pants__cuff__cuff_len__medium",
|
||||
"pants__cuff__cuff_len__long"
|
||||
],
|
||||
"pants__cuff__skirt_fraction": [
|
||||
"pants__cuff__skirt_fraction__small",
|
||||
"pants__cuff__skirt_fraction__medium",
|
||||
"pants__cuff__skirt_fraction__large"
|
||||
],
|
||||
"pants__cuff__skirt_flare": [
|
||||
"pants__cuff__skirt_flare__slight",
|
||||
"pants__cuff__skirt_flare__moderate",
|
||||
"pants__cuff__skirt_flare__significant"
|
||||
],
|
||||
"pants__cuff__skirt_ruffle": [
|
||||
"pants__cuff__skirt_ruffle__none",
|
||||
"pants__cuff__skirt_ruffle__some"
|
||||
]
|
||||
}
|
||||
|
||||
def list_to_prefix_dict(strings):
|
||||
"""
|
||||
Converts the list of strings to a dictionary of Prefix -> Original String List (in the order in which it appears).
|
||||
A prefix refers to the part after the last '__' segment is removed.
|
||||
"""
|
||||
result = OrderedDict()
|
||||
for s in strings:
|
||||
parts = s.split(CONNECT_TAG)
|
||||
prefix = CONNECT_TAG.join(parts[:-1]) # Remove the last fragment
|
||||
if prefix not in result:
|
||||
result[prefix] = []
|
||||
result[prefix].append(s) # Put the full string in
|
||||
return result
|
||||
def input_caption2random_default_cption(test_gpt_caption=None):
|
||||
'''all_text_dict: dict is the dict of the text space, and it is ordered,
|
||||
which can guarantee the order (because the network is trained to ensure the order)
|
||||
test_gpt_caption:list is the list of gpt_caption, here there is no specific order,
|
||||
Below, the text in the test_gpt_caption will be retained, and the others will be randomly selected,
|
||||
and the order will be guaranteed, and a list will be returned'''
|
||||
test_gpt_caption_list=list_to_prefix_dict(test_gpt_caption)
|
||||
random_list = []
|
||||
for key ,value_list in all_text_dict.items():
|
||||
# if key == 'levels-skirt__base':
|
||||
# print(value_list)
|
||||
if key not in test_gpt_caption_list:
|
||||
if key == 'shirt__length':
|
||||
random_list.append("shirt__length__super-cropped")
|
||||
elif key == 'pants__cuff__type':
|
||||
random_list.append("pants__cuff__type__None")
|
||||
elif key == "sleeve__cuff__type":
|
||||
random_list.append("sleeve__cuff__type__None")
|
||||
elif key == "left__sleeve__cuff__type":
|
||||
random_list.append("left__sleeve__cuff__type__None")
|
||||
elif key == 'collar__component__style':
|
||||
random_list.append("collar__component__style__None")
|
||||
elif key == "pencil-skirt__low_angle":
|
||||
random_list.append("pencil-skirt__low_angle__straight")
|
||||
elif key == "sleeve__connecting_width":
|
||||
random_list.append("sleeve__connecting_width__medium")
|
||||
elif key == "sleeve__end_width":
|
||||
random_list.append("sleeve__end_width__straight")
|
||||
elif key == "left__sleeve__connecting_width":
|
||||
random_list.append("left__sleeve__connecting_width__medium")
|
||||
elif key == "left__sleeve__end_width":
|
||||
random_list.append("left__sleeve__end_width__straight")
|
||||
elif 'False' in value_list[0] or 'True' in value_list[0]:
|
||||
random_list.append(value_list[1])
|
||||
else:
|
||||
chice_num=random.randint(0,len(value_list)-1)
|
||||
random_list.append(value_list[chice_num])
|
||||
else:
|
||||
if test_gpt_caption_list[key][0] in value_list:
|
||||
res= test_gpt_caption_list[key][0]
|
||||
random_list.append(res)
|
||||
else:
|
||||
chice_num = random.randint(0, len(value_list) - 1)
|
||||
random_list.append(value_list[chice_num])
|
||||
|
||||
|
||||
return random_list
|
||||
def vec_2_pattern_yaml(yaml_data,param_vec,mask_list):
|
||||
yaml_file_name = None
|
||||
pattern_data_dict = {}
|
||||
pattern_vec = {}
|
||||
cont = 0
|
||||
param_vec = np.clip(param_vec, 0, 1)
|
||||
|
||||
|
||||
def extract_new_vec_v2v(data, path=''):
|
||||
"""
|
||||
Recursively traverses the data structure and extracts all the 'v' values.
|
||||
:p aram data: YAML loaded data
|
||||
:p aram path: The path of the current field, which is used to display the hierarchy
|
||||
:return: No return value, print directly
|
||||
"""
|
||||
nonlocal cont
|
||||
nonlocal param_vec
|
||||
nonlocal mask_list
|
||||
if isinstance(data, dict):
|
||||
for key, value in data.items():
|
||||
# If the key is 'v', the path and the corresponding value are printed
|
||||
if key == 'v':
|
||||
range = data['range']
|
||||
|
||||
if mask_list[cont] == 0 or data['v'] == 0:
|
||||
cont = cont + 1
|
||||
continue
|
||||
elif data['type'] == 'select_null' or data['type'] == 'select':
|
||||
pass
|
||||
elif data['type'] == 'bool':
|
||||
pass
|
||||
elif data['type'] == 'int':
|
||||
re_normal_value = param_vec[cont] * (range[1] - range[0]) + range[0]
|
||||
data['v'] = int(re_normal_value)
|
||||
elif data['type'] == 'float':
|
||||
re_normal_value = param_vec[cont] * (range[1] - range[0]) + range[0]
|
||||
data['v'] = float(re_normal_value*mask_list[cont])
|
||||
|
||||
cont = cont + 1
|
||||
else:
|
||||
# Recursively moves on to the next layer
|
||||
extract_new_vec_v2v(value, path + key + '.')
|
||||
extract_new_vec_v2v(yaml_data, path='')
|
||||
return yaml_data
|
||||
|
||||
class NoAliasDumper(yaml.Dumper):
|
||||
def ignore_aliases(self, data):
|
||||
return True
|
||||
|
||||
def save_design2yaml(design, new_yaml_path):
|
||||
garment_param = {'design': design}
|
||||
new_yaml_path_dir = os.path.dirname(new_yaml_path)
|
||||
os.makedirs(new_yaml_path_dir, exist_ok=True)
|
||||
|
||||
with open(new_yaml_path, 'w') as yaml_file:
|
||||
yaml.dump(garment_param, yaml_file, default_flow_style=False, allow_unicode=True, sort_keys=False,
|
||||
Dumper=NoAliasDumper)
|
||||
160
lmm_utils/sim_utils.py
Normal file
160
lmm_utils/sim_utils.py
Normal file
@@ -0,0 +1,160 @@
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
|
||||
import pygarment.data_config as data_config
|
||||
from assets.bodies.body_params import BodyParameters
|
||||
from assets.garment_programs.meta_garment import MetaGarment
|
||||
from pygarment.data_config import Properties
|
||||
from pygarment.meshgen.boxmeshgen import BoxMesh
|
||||
from pygarment.meshgen.sim_config import PathCofig
|
||||
# from pygarment.meshgen.simulation import run_sim
|
||||
|
||||
|
||||
#Convert the yaml file directly into an output plate
|
||||
def garmentyaml_folder2json_folder(input_folder='', output_folder='',
|
||||
body_to_use='neutral'):
|
||||
''' Convert the yaml files of the input folder to json files and save them in the output folder There is a folder with a yaml file name under the output folder, which contains JSON files and boilerplate images
|
||||
Args:
|
||||
input_folder(string): Enter a folder
|
||||
output_folder (string): The output folder
|
||||
body_to_use (string): Select a human body parameter
|
||||
'''
|
||||
|
||||
bodies_measurements = {
|
||||
# Our model
|
||||
'neutral': './assets/bodies/mean_all.yaml',
|
||||
'mean_female': './assets/bodies/mean_female.yaml',
|
||||
'mean_male': './assets/bodies/mean_male.yaml',
|
||||
|
||||
# SMPL
|
||||
'f_smpl': './assets/bodies/f_smpl_average_A40.yaml',
|
||||
'm_smpl': './assets/bodies/m_smpl_average_A40.yaml',
|
||||
#t pose
|
||||
'mean_all_tpose': './assets/bodies/mean_all_tpose.yaml'
|
||||
}
|
||||
body_to_use = body_to_use # CHANGE HERE to use different set of body measurements
|
||||
# body_to_use = 'mean_all_tpose'
|
||||
body = BodyParameters(bodies_measurements[body_to_use])
|
||||
|
||||
design_files = {
|
||||
't-shirt': './assets/design_params/t-shirt.yaml',
|
||||
# Add paths HERE to load other parameters
|
||||
}
|
||||
|
||||
pattern_dir = input_folder # This is the input file, write the yaml folder here
|
||||
design_files_list = [file for file in Path(pattern_dir).iterdir() if file.suffix == '.yaml']
|
||||
|
||||
design_files_stem = [item.stem for item in design_files_list]
|
||||
design_files_list = [str(item) for item in design_files_list]
|
||||
print(design_files_list)
|
||||
design_files = {
|
||||
k: v for k, v in zip(design_files_stem, design_files_list)
|
||||
|
||||
}
|
||||
|
||||
designs = {}
|
||||
for df in design_files:
|
||||
with open(design_files[df], 'r') as f:
|
||||
designs[df] = yaml.safe_load(f)['design']
|
||||
|
||||
test_garments = []
|
||||
for df in designs:
|
||||
try:
|
||||
garment = MetaGarment(df, body, designs[df])
|
||||
test_garments.append(garment)
|
||||
except Exception as e:
|
||||
print(f"An error occurred with {df}: {e}")
|
||||
continue
|
||||
outpath = Path(output_folder) # This is the folder for the output
|
||||
outpath.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for piece in test_garments:
|
||||
pattern = piece.assembly()
|
||||
|
||||
if piece.is_self_intersecting():
|
||||
print(f'{piece.name} is Self-intersecting')
|
||||
|
||||
folder = pattern.serialize(
|
||||
outpath,
|
||||
tag='',
|
||||
to_subfolder=True,
|
||||
with_3d=False, with_text=False, view_ids=False,
|
||||
with_printable=True
|
||||
)
|
||||
|
||||
body.save(folder)
|
||||
if piece.name in design_files:
|
||||
shutil.copy(design_files[piece.name], folder)
|
||||
else:
|
||||
shutil.copy(design_files['base'], folder)
|
||||
|
||||
print(f'Success! {piece.name} saved to {folder}')
|
||||
|
||||
|
||||
def json2modelfolder(input_json):
|
||||
''' If you simulate a json file, you will save a folder in the directory where the json file is located, removing the specifiction ending as the folder name, and containing all the simulated information
|
||||
Args:
|
||||
input_json (string): A json file that needs to be mocked
|
||||
|
||||
'''
|
||||
props = data_config.Properties('./assets/Sim_props/default_sim_props.yaml')
|
||||
props.set_section_stats('sim', fails={}, sim_time={}, spf={}, fin_frame={}, body_collisions={}, self_collisions={})
|
||||
props.set_section_stats('render', render_time={})
|
||||
|
||||
input_path = Path(input_json)
|
||||
garment_name, _, _ = input_path.stem.rpartition('_') # assuming ending in '_specification'
|
||||
# garment_name = os.path.splitext(os.path.basename(input_path))[0]
|
||||
sys_props = data_config.Properties('./system.json')
|
||||
paths = PathCofig(
|
||||
in_element_path=input_path.parent,
|
||||
out_path=input_path.parent,
|
||||
in_name=garment_name,
|
||||
body_name='mean_all', # 'f_smpl_average_A40'
|
||||
smpl_body=False, # NOTE: depends on chosen body model
|
||||
add_timestamp=False
|
||||
)
|
||||
|
||||
# Generate and save garment box mesh (if not existent)
|
||||
print(f"Generate box mesh of {garment_name} with resolution {props['sim']['config']['resolution_scale']}...")
|
||||
print('\nGarment load: ', paths.in_g_spec)
|
||||
|
||||
garment_box_mesh = BoxMesh(paths.in_g_spec, props['sim']['config']['resolution_scale'])
|
||||
garment_box_mesh.load()
|
||||
garment_box_mesh.serialize(
|
||||
paths, store_panels=False, uv_config=props['render']['config']['uv_texture'])
|
||||
|
||||
props.serialize(paths.element_sim_props)
|
||||
|
||||
run_sim(
|
||||
garment_box_mesh.name,
|
||||
props,
|
||||
paths,
|
||||
save_v_norms=False,
|
||||
store_usd=False, # NOTE: False for fast simulation!
|
||||
optimize_storage=False, # props['sim']['config']['optimize_storage'],
|
||||
verbose=False
|
||||
)
|
||||
|
||||
props.serialize(paths.element_sim_props)
|
||||
|
||||
|
||||
def modelandreturn_picture_path(input_json):
|
||||
|
||||
''' If you simulate a json file, you will save a folder in the directory where the json file is located, removing the specifiction ending as the folder name.
|
||||
Contains all the information for the simulation and returns the path to the simulation from the front perspective
|
||||
Args:
|
||||
input_json (string): A json file that needs to be mocked
|
||||
Responds:
|
||||
image_path2 (string): the path of the simulation map in the front view.
|
||||
|
||||
'''
|
||||
|
||||
json2modelfolder(input_json)
|
||||
input_json=Path(input_json)
|
||||
input_folder=input_json.parent
|
||||
garment_name, _, _ = input_json.stem.rpartition('_')
|
||||
image_path2=input_folder / garment_name / f"{str(garment_name)}_render_front.png"
|
||||
|
||||
return image_path2
|
||||
89
lmm_utils/test_picture_batch.py
Executable file
89
lmm_utils/test_picture_batch.py
Executable file
@@ -0,0 +1,89 @@
|
||||
import os
|
||||
import json
|
||||
import uuid
|
||||
import shutil
|
||||
import argparse
|
||||
import tqdm
|
||||
from functools import partial
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
from helper import category2yaml2json
|
||||
from lmm_utils.predict_garmentcode_picture import Predictor
|
||||
|
||||
|
||||
def search_picture_files(directory):
|
||||
"""Search for all image files in the directory"""
|
||||
picture_files = []
|
||||
for root, _, files in os.walk(directory):
|
||||
for file in files:
|
||||
if file.lower().endswith(('.jpg', '.png', '.jpeg', '.gif')):
|
||||
picture_files.append(os.path.join(root, file))
|
||||
return picture_files
|
||||
|
||||
|
||||
def main(input_folder_path, output_folder_path,sim_bool=False):
|
||||
dsl_ga = Predictor()
|
||||
all_picture_files = search_picture_files(input_folder_path)
|
||||
input_output_list = []
|
||||
uuid_list = []
|
||||
for input_picture_path in all_picture_files:
|
||||
output_json_path = input_picture_path.replace(input_folder_path, output_folder_path)
|
||||
output_json_dir = os.path.dirname(output_json_path)
|
||||
json_file_name = os.path.splitext(os.path.basename(input_picture_path))[0]
|
||||
output_json_path = os.path.join(output_json_dir, json_file_name, json_file_name + '.json')
|
||||
input_output_list.append((input_picture_path, output_json_path))
|
||||
threadPool = ThreadPoolExecutor(max_workers=5, thread_name_prefix="img_thread")
|
||||
futures = []
|
||||
for i, (input_picture_path, output_json_path) in tqdm.tqdm(enumerate(input_output_list)):
|
||||
if os.path.exists(output_json_path):
|
||||
continue
|
||||
item_id = str(uuid.uuid4())
|
||||
uuid_list.append(item_id)
|
||||
task = partial(
|
||||
category2yaml2json,
|
||||
category='picture',
|
||||
category_data=input_picture_path,
|
||||
final_json_path=output_json_path,
|
||||
id=item_id,
|
||||
model='Qwen/Qwen2.5-VL-72B-Instruct',
|
||||
base_url='https://api-inference.modelscope.cn/v1/',
|
||||
api_key='108a28f0-de01-4c43-b189-6cad25d32990',
|
||||
dsl_ga=dsl_ga,
|
||||
sim_bool=sim_bool
|
||||
)
|
||||
future = threadPool.submit(task)
|
||||
futures.append((future, input_picture_path))
|
||||
print(i, input_picture_path, output_json_path)
|
||||
fail_picture_list = []
|
||||
for future, input_picture_path in futures:
|
||||
try:
|
||||
result = future.result()
|
||||
print(f"Task result: {result}")
|
||||
except Exception as e:
|
||||
fail_picture_list.append(input_picture_path)
|
||||
print(f"Task failed: {e}")
|
||||
|
||||
# Clean up the temporary folder
|
||||
for _uuid in uuid_list:
|
||||
try:
|
||||
shutil.rmtree(f"user_data/temp_user_folder_for{_uuid}gpt")
|
||||
except Exception as e:
|
||||
print(f"Error when deleting temp folder: {e}")
|
||||
pass
|
||||
|
||||
threadPool.shutdown(wait=True)
|
||||
|
||||
# Save the list of failures
|
||||
fail_dir = f"user_data/fail_{os.path.basename(input_folder_path)}"
|
||||
os.makedirs(fail_dir, exist_ok=True)
|
||||
with open(os.path.join(fail_dir, "fail_picture_list.json"), "w") as file:
|
||||
json.dump(fail_picture_list, file, indent=4)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Image-to-GarmentCode generation script")
|
||||
parser.add_argument('--input', type=str, required=True, help='Input image folder path')
|
||||
parser.add_argument('--output', type=str, required=True, help='Output folder path')
|
||||
parser.add_argument('--sim', type=bool, default=False, help='Enable simulation mode (default: False)')
|
||||
args = parser.parse_args()
|
||||
main(args.input, args.output,args.sim)
|
||||
89
lmm_utils/test_text_batch.py
Executable file
89
lmm_utils/test_text_batch.py
Executable file
@@ -0,0 +1,89 @@
|
||||
import os
|
||||
import shutil
|
||||
import traceback
|
||||
import json
|
||||
import uuid
|
||||
import argparse
|
||||
from functools import partial
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
from helper import category2yaml2json
|
||||
from lmm_utils.predict_garmentcode_picture import Predictor
|
||||
import tqdm
|
||||
|
||||
|
||||
def main(input_text_json_path, output_folder_path,sim_bool=False):
|
||||
# Load the entered JSON text list
|
||||
with open(input_text_json_path, 'r') as f:
|
||||
all_text = json.load(f)
|
||||
|
||||
input_output_list = []
|
||||
json_file_name = os.path.splitext(os.path.basename(input_text_json_path))[0]
|
||||
|
||||
for index, input_text in enumerate(all_text):
|
||||
output_json_path = os.path.join(output_folder_path, json_file_name, str(index), f"{index}.json")
|
||||
input_output_list.append((input_text, output_json_path))
|
||||
|
||||
# Initialize the predictor and thread pool
|
||||
dsl_ga = Predictor()
|
||||
threadPool = ThreadPoolExecutor(max_workers=10, thread_name_prefix="test_")
|
||||
futures = []
|
||||
uuid_list = []
|
||||
|
||||
for i, (input_text, output_json_path) in tqdm.tqdm(enumerate(input_output_list)):
|
||||
if os.path.exists(output_json_path):
|
||||
continue
|
||||
item_id = str(uuid.uuid4())
|
||||
uuid_list.append(item_id)
|
||||
|
||||
task = partial(
|
||||
category2yaml2json,
|
||||
category='text',
|
||||
category_data=input_text,
|
||||
final_json_path=output_json_path,
|
||||
id=item_id,
|
||||
model='Qwen/Qwen2.5-72B-Instruct',
|
||||
base_url='https://api-inference.modelscope.cn/v1/',
|
||||
api_key='108a28f0-de01-4c43-b189-6cad25d32990',
|
||||
dsl_ga=dsl_ga,
|
||||
sim_bool=sim_bool
|
||||
)
|
||||
future = threadPool.submit(task)
|
||||
futures.append((future, input_text))
|
||||
print(i, input_text, output_json_path)
|
||||
|
||||
# Collect failure information
|
||||
fail_picture_list = []
|
||||
for future, input_text in futures:
|
||||
try:
|
||||
result = future.result()
|
||||
print(f"Task result: {result}")
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
fail_picture_list.append(input_text)
|
||||
print(f"Task failed: {e}")
|
||||
|
||||
# Clean up the temporary folder
|
||||
for _uuid in uuid_list:
|
||||
try:
|
||||
shutil.rmtree(f"user_data/temp_user_folder_for{_uuid}gpt")
|
||||
except Exception as e:
|
||||
print(f"Error when deleting temp folder: {e}")
|
||||
pass
|
||||
|
||||
threadPool.shutdown(wait=True)
|
||||
|
||||
# Save a record of failures
|
||||
fail_dir = f"user_data/fail_{os.path.basename(input_text_json_path).split('.')[0]}"
|
||||
os.makedirs(fail_dir, exist_ok=True)
|
||||
with open(os.path.join(fail_dir, "fail_picture_list.json"), "w") as file:
|
||||
json.dump(fail_picture_list, file, indent=4)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Text-to-GarmentCode generation script")
|
||||
parser.add_argument('--input', type=str, required=True, help='Path to input JSON file with text list')
|
||||
parser.add_argument('--output', type=str, required=True, help='Path to output folder for results')
|
||||
parser.add_argument('--sim', type=bool, default=False, help='Enable simulation mode (default: False)')
|
||||
args = parser.parse_args()
|
||||
main(args.input, args.output,args.sim)
|
||||
635
lmm_utils/validation.py
Normal file
635
lmm_utils/validation.py
Normal file
@@ -0,0 +1,635 @@
|
||||
_ALL_TEXT =[
|
||||
"meta__upper__FittedShirt",
|
||||
"meta__upper__Shirt",
|
||||
"meta__upper__None",
|
||||
"meta__wb__StraightWB",
|
||||
"meta__wb__FittedWB",
|
||||
"meta__wb__None",
|
||||
"meta__bottom__SkirtCircle",
|
||||
"meta__bottom__AsymmSkirtCircle",
|
||||
"meta__bottom__GodetSkirt",
|
||||
"meta__bottom__Pants",
|
||||
"meta__bottom__Skirt2",
|
||||
"meta__bottom__SkirtManyPanels",
|
||||
"meta__bottom__PencilSkirt",
|
||||
"meta__bottom__SkirtLevels",
|
||||
"meta__bottom__None",
|
||||
"meta__connected__True",
|
||||
"meta__connected__False",
|
||||
"waistband__waist__fitted",
|
||||
"waistband__waist__slightly-loose",
|
||||
"waistband__waist__loose",
|
||||
"waistband__width__narrow",
|
||||
"waistband__width__medium",
|
||||
"waistband__width__wide",
|
||||
"fitted_shirt__strapless__True",
|
||||
"fitted_shirt__strapless__False",
|
||||
"shirt__length__super-cropped",
|
||||
"shirt__length__regular",
|
||||
"shirt__length__long",
|
||||
"shirt__width__normal",
|
||||
"shirt__width__relaxed",
|
||||
"shirt__flare__tight",
|
||||
"shirt__flare__straight",
|
||||
"shirt__flare__flared",
|
||||
"shirt__flare__very-flared",
|
||||
"collar__f_collar__CircleNeckHalf",
|
||||
"collar__f_collar__CurvyNeckHalf",
|
||||
"collar__f_collar__VNeckHalf",
|
||||
"collar__f_collar__SquareNeckHalf",
|
||||
"collar__f_collar__TrapezoidNeckHalf",
|
||||
"collar__f_collar__CircleArcNeckHalf",
|
||||
"collar__f_collar__Bezier2NeckHalf",
|
||||
"collar__b_collar__CircleNeckHalf",
|
||||
"collar__b_collar__CurvyNeckHalf",
|
||||
"collar__b_collar__VNeckHalf",
|
||||
"collar__b_collar__SquareNeckHalf",
|
||||
"collar__b_collar__TrapezoidNeckHalf",
|
||||
"collar__b_collar__CircleArcNeckHalf",
|
||||
"collar__b_collar__Bezier2NeckHalf",
|
||||
"collar__width__very-narrow",
|
||||
"collar__width__medium",
|
||||
"collar__width__wide",
|
||||
"collar__fc_depth__shallow",
|
||||
"collar__fc_depth__medium",
|
||||
"collar__fc_depth__deep",
|
||||
"collar__bc_depth__shallow",
|
||||
"collar__bc_depth__medium",
|
||||
"collar__bc_depth__deep",
|
||||
"collar__fc_angle__acute",
|
||||
"collar__fc_angle__standard",
|
||||
"collar__fc_angle__obtuse",
|
||||
"collar__bc_angle__acute",
|
||||
"collar__bc_angle__standard",
|
||||
"collar__bc_angle__obtuse",
|
||||
"collar__f_bezier_x__left",
|
||||
"collar__f_bezier_x__center",
|
||||
"collar__f_bezier_x__right",
|
||||
"collar__f_bezier_y__top",
|
||||
"collar__f_bezier_y__center",
|
||||
"collar__f_bezier_y__bottom",
|
||||
"collar__b_bezier_x__left",
|
||||
"collar__b_bezier_x__center",
|
||||
"collar__b_bezier_x__right",
|
||||
"collar__b_bezier_y__top",
|
||||
"collar__b_bezier_y__center",
|
||||
"collar__b_bezier_y__bottom",
|
||||
"collar__f_flip_curve__True",
|
||||
"collar__f_flip_curve__False",
|
||||
"collar__b_flip_curve__True",
|
||||
"collar__b_flip_curve__False",
|
||||
"collar__component__style__Turtle",
|
||||
"collar__component__style__SimpleLapel",
|
||||
"collar__component__style__Hood2Panels",
|
||||
"collar__component__style__None",
|
||||
"collar__component__depth__shallow",
|
||||
"collar__component__depth__medium",
|
||||
"collar__component__depth__deep",
|
||||
"collar__component__lapel_standing__True",
|
||||
"collar__component__lapel_standing__False",
|
||||
"collar__component__hood_depth__shallow",
|
||||
"collar__component__hood_depth__medium",
|
||||
"collar__component__hood_depth__deep",
|
||||
"collar__component__hood_length__short",
|
||||
"collar__component__hood_length__medium",
|
||||
"collar__component__hood_length__long",
|
||||
"sleeve__sleeveless__True",
|
||||
"sleeve__sleeveless__False",
|
||||
"sleeve__armhole_shape__ArmholeSquare",
|
||||
"sleeve__armhole_shape__ArmholeAngle",
|
||||
"sleeve__armhole_shape__ArmholeCurve",
|
||||
"sleeve__length__short",
|
||||
"sleeve__length__half",
|
||||
"sleeve__length__three-quarter",
|
||||
"sleeve__length__long",
|
||||
"sleeve__length__full",
|
||||
"sleeve__connecting_width__narrow",
|
||||
"sleeve__connecting_width__medium",
|
||||
"sleeve__connecting_width__loose",
|
||||
"sleeve__connecting_width__very-loose",
|
||||
"sleeve__end_width__closing",
|
||||
"sleeve__end_width__straight",
|
||||
"sleeve__end_width__opening",
|
||||
"sleeve__sleeve_angle__small",
|
||||
"sleeve__sleeve_angle__medium",
|
||||
"sleeve__sleeve_angle__large",
|
||||
"sleeve__opening_dir_mix__negative-twist",
|
||||
"sleeve__opening_dir_mix__standard",
|
||||
"sleeve__opening_dir_mix__positive-twist",
|
||||
"sleeve__standing_shoulder__True",
|
||||
"sleeve__standing_shoulder__False",
|
||||
"sleeve__standing_shoulder_len__short",
|
||||
"sleeve__standing_shoulder_len__medium",
|
||||
"sleeve__standing_shoulder_len__long",
|
||||
"sleeve__connect_ruffle__none",
|
||||
"sleeve__connect_ruffle__some",
|
||||
"sleeve__connect_ruffle__obvious",
|
||||
"sleeve__smoothing_coeff__very-smooth",
|
||||
"sleeve__smoothing_coeff__moderate",
|
||||
"sleeve__smoothing_coeff__less-smooth",
|
||||
"sleeve__cuff__type__CuffBand",
|
||||
"sleeve__cuff__type__CuffSkirt",
|
||||
"sleeve__cuff__type__CuffBandSkirt",
|
||||
"sleeve__cuff__type__None",
|
||||
"sleeve__cuff__top_ruffle__straight",
|
||||
"sleeve__cuff__top_ruffle__tapered",
|
||||
"sleeve__cuff__top_ruffle__very_tapered",
|
||||
"sleeve__cuff__cuff_len__short",
|
||||
"sleeve__cuff__cuff_len__medium",
|
||||
"sleeve__cuff__cuff_len__long",
|
||||
"sleeve__cuff__skirt_fraction__small",
|
||||
"sleeve__cuff__skirt_fraction__medium",
|
||||
"sleeve__cuff__skirt_fraction__large",
|
||||
"sleeve__cuff__skirt_flare__slight",
|
||||
"sleeve__cuff__skirt_flare__moderate",
|
||||
"sleeve__cuff__skirt_flare__significant",
|
||||
"sleeve__cuff__skirt_ruffle__none",
|
||||
"sleeve__cuff__skirt_ruffle__some",
|
||||
"left__enable_asym__True",
|
||||
"left__enable_asym__False",
|
||||
"left__fitted_shirt__strapless__True",
|
||||
"left__fitted_shirt__strapless__False",
|
||||
"left__shirt__width__normal",
|
||||
"left__shirt__width__relaxed",
|
||||
"left__shirt__flare__tight",
|
||||
"left__shirt__flare__straight",
|
||||
"left__shirt__flare__flared",
|
||||
"left__shirt__flare__very-flared",
|
||||
"left__collar__f_collar__CircleNeckHalf",
|
||||
"left__collar__f_collar__CurvyNeckHalf",
|
||||
"left__collar__f_collar__VNeckHalf",
|
||||
"left__collar__f_collar__SquareNeckHalf",
|
||||
"left__collar__f_collar__TrapezoidNeckHalf",
|
||||
"left__collar__f_collar__CircleArcNeckHalf",
|
||||
"left__collar__f_collar__Bezier2NeckHalf",
|
||||
"left__collar__b_collar__CircleNeckHalf",
|
||||
"left__collar__b_collar__CurvyNeckHalf",
|
||||
"left__collar__b_collar__VNeckHalf",
|
||||
"left__collar__b_collar__SquareNeckHalf",
|
||||
"left__collar__b_collar__TrapezoidNeckHalf",
|
||||
"left__collar__b_collar__CircleArcNeckHalf",
|
||||
"left__collar__b_collar__Bezier2NeckHalf",
|
||||
"left__collar__width__narrow",
|
||||
"left__collar__width__medium",
|
||||
"left__collar__width__wide",
|
||||
"left__collar__fc_angle__acute",
|
||||
"left__collar__fc_angle__standard",
|
||||
"left__collar__fc_angle__obtuse",
|
||||
"left__collar__bc_angle__acute",
|
||||
"left__collar__bc_angle__standard",
|
||||
"left__collar__bc_angle__obtuse",
|
||||
"left__collar__f_bezier_x__left",
|
||||
"left__collar__f_bezier_x__center",
|
||||
"left__collar__f_bezier_x__right",
|
||||
"left__collar__f_bezier_y__top",
|
||||
"left__collar__f_bezier_y__center",
|
||||
"left__collar__f_bezier_y__bottom",
|
||||
"left__collar__b_bezier_x__left",
|
||||
"left__collar__b_bezier_x__center",
|
||||
"left__collar__b_bezier_x__right",
|
||||
"left__collar__b_bezier_y__top",
|
||||
"left__collar__b_bezier_y__center",
|
||||
"left__collar__b_bezier_y__bottom",
|
||||
"left__collar__f_flip_curve__True",
|
||||
"left__collar__f_flip_curve__False",
|
||||
"left__collar__b_flip_curve__True",
|
||||
"left__collar__b_flip_curve__False",
|
||||
"left__sleeve__sleeveless__True",
|
||||
"left__sleeve__sleeveless__False",
|
||||
"left__sleeve__armhole_shape__ArmholeSquare",
|
||||
"left__sleeve__armhole_shape__ArmholeAngle",
|
||||
"left__sleeve__armhole_shape__ArmholeCurve",
|
||||
"left__sleeve__length__short",
|
||||
"left__sleeve__length__half",
|
||||
"left__sleeve__length__three-quarter",
|
||||
"left__sleeve__length__long",
|
||||
"left__sleeve__length__full",
|
||||
"left__sleeve__connecting_width__narrow",
|
||||
"left__sleeve__connecting_width__medium",
|
||||
"left__sleeve__connecting_width__loose",
|
||||
"left__sleeve__connecting_width__very-loose",
|
||||
"left__sleeve__end_width__closing",
|
||||
"left__sleeve__end_width__straight",
|
||||
"left__sleeve__end_width__opening",
|
||||
"left__sleeve__sleeve_angle__small",
|
||||
"left__sleeve__sleeve_angle__medium",
|
||||
"left__sleeve__sleeve_angle__large",
|
||||
"left__sleeve__opening_dir_mix__negative-twist",
|
||||
"left__sleeve__opening_dir_mix__standard",
|
||||
"left__sleeve__opening_dir_mix__positive-twist",
|
||||
"left__sleeve__standing_shoulder__True",
|
||||
"left__sleeve__standing_shoulder__False",
|
||||
"left__sleeve__standing_shoulder_len__short",
|
||||
"left__sleeve__standing_shoulder_len__medium",
|
||||
"left__sleeve__standing_shoulder_len__long",
|
||||
"left__sleeve__connect_ruffle__none",
|
||||
"left__sleeve__connect_ruffle__some",
|
||||
"left__sleeve__connect_ruffle__obvious",
|
||||
"left__sleeve__smoothing_coeff__very-smooth",
|
||||
"left__sleeve__smoothing_coeff__moderate",
|
||||
"left__sleeve__smoothing_coeff__less-smooth",
|
||||
"left__sleeve__cuff__type__CuffBand",
|
||||
"left__sleeve__cuff__type__CuffSkirt",
|
||||
"left__sleeve__cuff__type__CuffBandSkirt",
|
||||
"left__sleeve__cuff__type__None",
|
||||
"left__sleeve__cuff__top_ruffle__straight",
|
||||
"left__sleeve__cuff__top_ruffle__tapered",
|
||||
"left__sleeve__cuff__top_ruffle__very_tapered",
|
||||
"left__sleeve__cuff__cuff_len__short",
|
||||
"left__sleeve__cuff__cuff_len__medium",
|
||||
"left__sleeve__cuff__cuff_len__long",
|
||||
"left__sleeve__cuff__skirt_fraction__small",
|
||||
"left__sleeve__cuff__skirt_fraction__medium",
|
||||
"left__sleeve__cuff__skirt_fraction__large",
|
||||
"left__sleeve__cuff__skirt_flare__slight",
|
||||
"left__sleeve__cuff__skirt_flare__moderate",
|
||||
"left__sleeve__cuff__skirt_flare__significant",
|
||||
"left__sleeve__cuff__skirt_ruffle__none",
|
||||
"left__sleeve__cuff__skirt_ruffle__some",
|
||||
"skirt__length__micro",
|
||||
"skirt__length__mini",
|
||||
"skirt__length__above-knee",
|
||||
"skirt__length__knee-length",
|
||||
"skirt__length__midi",
|
||||
"skirt__length__floor-length",
|
||||
"skirt__rise__low",
|
||||
"skirt__rise__mid",
|
||||
"skirt__rise__high",
|
||||
"skirt__ruffle__none",
|
||||
"skirt__ruffle__moderate",
|
||||
"skirt__ruffle__rich",
|
||||
"skirt__bottom_cut__none",
|
||||
"skirt__bottom_cut__shallow",
|
||||
"skirt__bottom_cut__deep",
|
||||
"skirt__flare__none",
|
||||
"skirt__flare__small",
|
||||
"skirt__flare__medium",
|
||||
"skirt__flare__large",
|
||||
"flare-skirt__length__micro",
|
||||
"flare-skirt__length__mini",
|
||||
"flare-skirt__length__above-knee",
|
||||
"flare-skirt__length__knee-length",
|
||||
"flare-skirt__length__midi",
|
||||
"flare-skirt__length__floor-length",
|
||||
"flare-skirt__rise__low",
|
||||
"flare-skirt__rise__mid",
|
||||
"flare-skirt__rise__high",
|
||||
"flare-skirt__suns__slight",
|
||||
"flare-skirt__suns__moderate",
|
||||
"flare-skirt__suns__significant",
|
||||
"flare-skirt__skirt-many-panels__n_panels__few",
|
||||
"flare-skirt__skirt-many-panels__n_panels__medium",
|
||||
"flare-skirt__skirt-many-panels__n_panels__many",
|
||||
"flare-skirt__skirt-many-panels__panel_curve__inward",
|
||||
"flare-skirt__skirt-many-panels__panel_curve__straight",
|
||||
"flare-skirt__skirt-many-panels__panel_curve__outward",
|
||||
"flare-skirt__asymm__front_length__highly-asymmetric",
|
||||
"flare-skirt__asymm__front_length__strongly-asymmetric",
|
||||
"flare-skirt__asymm__front_length__moderately-asymmetric",
|
||||
"flare-skirt__asymm__front_length__slightly-asymmetric",
|
||||
"flare-skirt__asymm__front_length__symmetric",
|
||||
"flare-skirt__cut__add__True",
|
||||
"flare-skirt__cut__add__False",
|
||||
"flare-skirt__cut__depth__shallow",
|
||||
"flare-skirt__cut__depth__medium",
|
||||
"flare-skirt__cut__depth__deep",
|
||||
"flare-skirt__cut__width__narrow",
|
||||
"flare-skirt__cut__width__medium",
|
||||
"flare-skirt__cut__width__wide",
|
||||
"flare-skirt__cut__place__back_left",
|
||||
"flare-skirt__cut__place__back_center",
|
||||
"flare-skirt__cut__place__back_right",
|
||||
"flare-skirt__cut__place__front_left",
|
||||
"flare-skirt__cut__place__front_center",
|
||||
"flare-skirt__cut__place__front_right",
|
||||
"godet-skirt__base__Skirt2",
|
||||
"godet-skirt__base__PencilSkirt",
|
||||
"godet-skirt__insert_w__narrow",
|
||||
"godet-skirt__insert_w__medium",
|
||||
"godet-skirt__insert_w__wide",
|
||||
"godet-skirt__insert_depth__shallow",
|
||||
"godet-skirt__insert_depth__medium",
|
||||
"godet-skirt__insert_depth__deep",
|
||||
"godet-skirt__num_inserts__4",
|
||||
"godet-skirt__num_inserts__6",
|
||||
"godet-skirt__num_inserts__8",
|
||||
"godet-skirt__num_inserts__10",
|
||||
"godet-skirt__num_inserts__12",
|
||||
"godet-skirt__cuts_distance__close",
|
||||
"godet-skirt__cuts_distance__medium",
|
||||
"godet-skirt__cuts_distance__far",
|
||||
"pencil-skirt__length__micro",
|
||||
"pencil-skirt__length__mini",
|
||||
"pencil-skirt__length__above-knee",
|
||||
"pencil-skirt__length__knee-length",
|
||||
"pencil-skirt__length__midi",
|
||||
"pencil-skirt__length__floor-length",
|
||||
"pencil-skirt__rise__low",
|
||||
"pencil-skirt__rise__mid",
|
||||
"pencil-skirt__rise__high",
|
||||
"pencil-skirt__flare__tight",
|
||||
"pencil-skirt__flare__straight",
|
||||
"pencil-skirt__flare__slight-flare",
|
||||
"pencil-skirt__low_angle__inward",
|
||||
"pencil-skirt__low_angle__straight",
|
||||
"pencil-skirt__low_angle__outward",
|
||||
"pencil-skirt__front_slit__none",
|
||||
"pencil-skirt__front_slit__shallow",
|
||||
"pencil-skirt__front_slit__deep",
|
||||
"pencil-skirt__back_slit__none",
|
||||
"pencil-skirt__back_slit__shallow",
|
||||
"pencil-skirt__back_slit__deep",
|
||||
"pencil-skirt__left_slit__none",
|
||||
"pencil-skirt__left_slit__shallow",
|
||||
"pencil-skirt__left_slit__deep",
|
||||
"pencil-skirt__right_slit__none",
|
||||
"pencil-skirt__right_slit__shallow",
|
||||
"pencil-skirt__right_slit__deep",
|
||||
"pencil-skirt__style_side_cut__Sun",
|
||||
"pencil-skirt__style_side_cut__SIGGRAPH_logo",
|
||||
"pencil-skirt__style_side_cut__None",
|
||||
"levels-skirt__base__Skirt2",
|
||||
"levels-skirt__base__PencilSkirt",
|
||||
"levels-skirt__base__SkirtCircle",
|
||||
"levels-skirt__base__AsymmSkirtCircle",
|
||||
"levels-skirt__level__Skirt2",
|
||||
"levels-skirt__level__SkirtCircle",
|
||||
"levels-skirt__level__AsymmSkirtCircle",
|
||||
"levels-skirt__num_levels__1",
|
||||
"levels-skirt__num_levels__2",
|
||||
"levels-skirt__num_levels__3",
|
||||
"levels-skirt__num_levels__4",
|
||||
"levels-skirt__num_levels__5",
|
||||
"levels-skirt__level_ruffle__none",
|
||||
"levels-skirt__level_ruffle__moderate",
|
||||
"levels-skirt__level_ruffle__rich",
|
||||
"levels-skirt__length__micro",
|
||||
"levels-skirt__length__mini",
|
||||
"levels-skirt__length__above-knee",
|
||||
"levels-skirt__length__knee-length",
|
||||
"levels-skirt__length__midi",
|
||||
"levels-skirt__length__floor-length",
|
||||
"levels-skirt__rise__low",
|
||||
"levels-skirt__rise__mid",
|
||||
"levels-skirt__rise__high",
|
||||
"levels-skirt__base_length_frac__short",
|
||||
"levels-skirt__base_length_frac__medium",
|
||||
"levels-skirt__base_length_frac__long",
|
||||
"pants__length__micro",
|
||||
"pants__length__short",
|
||||
"pants__length__knee-length",
|
||||
"pants__length__capri",
|
||||
"pants__length__ankle-length",
|
||||
"pants__length__full-length",
|
||||
"pants__width__fitted",
|
||||
"pants__width__normal",
|
||||
"pants__width__loose",
|
||||
"pants__flare__tapering",
|
||||
"pants__flare__straight",
|
||||
"pants__flare__slight-flare",
|
||||
"pants__rise__low",
|
||||
"pants__rise__mid",
|
||||
"pants__rise__high",
|
||||
"pants__cuff__type__CuffBand",
|
||||
"pants__cuff__type__CuffSkirt",
|
||||
"pants__cuff__type__CuffBandSkirt",
|
||||
"pants__cuff__type__None",
|
||||
"pants__cuff__top_ruffle__straight",
|
||||
"pants__cuff__top_ruffle__tapered",
|
||||
"pants__cuff__top_ruffle__very_tapered",
|
||||
"pants__cuff__cuff_len__short",
|
||||
"pants__cuff__cuff_len__medium",
|
||||
"pants__cuff__cuff_len__long",
|
||||
"pants__cuff__skirt_fraction__small",
|
||||
"pants__cuff__skirt_fraction__medium",
|
||||
"pants__cuff__skirt_fraction__large",
|
||||
"pants__cuff__skirt_flare__slight",
|
||||
"pants__cuff__skirt_flare__moderate",
|
||||
"pants__cuff__skirt_flare__significant",
|
||||
"pants__cuff__skirt_ruffle__none",
|
||||
"pants__cuff__skirt_ruffle__some"
|
||||
]
|
||||
|
||||
fail_text_list = []
|
||||
|
||||
def list_in_text( my_list, start_string, connect_tag='__'):
|
||||
''' Search the list for all items starting with start-string and only the first two items separated by __,
|
||||
Then use the text to correspond to the startstring. The first two items are used to determine whether they are in the list of predictions
|
||||
Args:_ALL_TEXT
|
||||
my_list (list): The list to be searched
|
||||
start_string(string): A string at the beginning
|
||||
connect_tag (string): The symbol that Chinese the book is connected in the my_list
|
||||
|
||||
'''
|
||||
global fail_text_list
|
||||
temp_flag=True
|
||||
meta_item_list = [
|
||||
connect_tag.join(item.split(connect_tag)[:start_string.count(connect_tag) + 2]) # Split and keep the next field starting with the start_string
|
||||
for item in my_list
|
||||
if item.startswith(start_string) and item.count(connect_tag) >= start_string.count(connect_tag) + 1
|
||||
]
|
||||
meta_item_text = [
|
||||
connect_tag.join(item.split(connect_tag)[:start_string.count(connect_tag) + 2]) # Split and keep the next field starting with the start_string
|
||||
for item in _ALL_TEXT
|
||||
if item.startswith(start_string) and item.count(connect_tag) >= start_string.count(connect_tag) + 1
|
||||
]
|
||||
|
||||
meta_item_list_set=set(meta_item_list)
|
||||
meta_item_text_set=set(meta_item_text)
|
||||
start_string_fail_list=list(meta_item_text_set-meta_item_list_set)
|
||||
if len(start_string_fail_list)>0:
|
||||
fail_text_list.extend(start_string_fail_list)
|
||||
temp_flag=False
|
||||
print(f'mylist{start_string}Words that are missing from the opening word:{start_string_fail_list}')
|
||||
return temp_flag
|
||||
def bool2condition(my_list,connect_tag='__'):
|
||||
global fail_text_list
|
||||
fail_text_list = []
|
||||
bool_flag = True
|
||||
meta_list=[item.split(connect_tag)[-1] for item in my_list if item.startswith("meta")]
|
||||
# Determine whether meta_upper wb bottom is generated
|
||||
if list_in_text(my_list,'meta')==False:
|
||||
bool_flag = False
|
||||
left_bool="False"
|
||||
left_flag = [s for s in my_list if s.startswith("left__enable_asym")]
|
||||
if meta_list[0]!="None":
|
||||
if len(left_flag)==0:
|
||||
print("left__enable_asym")
|
||||
fail_text_list.append("left__enable_asym")
|
||||
bool_flag = False
|
||||
else:
|
||||
left_bool=left_flag[0].split(connect_tag)[-1]
|
||||
left_fittedshirt_strapless_bool=True
|
||||
left_fittedshirt_strapless_flag=[s for s in my_list if s.startswith("left__fitted_shirt__strapless")]
|
||||
if meta_list[0] =='FittedShirt' and left_bool=='True' :
|
||||
if len(left_fittedshirt_strapless_flag)==0 :
|
||||
print("left__fitted_shirt__strapless")
|
||||
fail_text_list.append("left__fitted_shirt__strapless")
|
||||
bool_flag = False
|
||||
return bool_flag, sorted(list(set(fail_text_list)))
|
||||
if len(left_fittedshirt_strapless_flag) > 0:
|
||||
if left_fittedshirt_strapless_flag[0].split(connect_tag)[-1] == 'False':
|
||||
left_fittedshirt_strapless_bool = False
|
||||
for meta_item in meta_list:
|
||||
if meta_item =='FittedShirt':
|
||||
if list_in_text(my_list,'fitted_shirt')==False:
|
||||
bool_flag = False
|
||||
if meta_item =='Shirt':
|
||||
if list_in_text(my_list,'shirt')==False:
|
||||
bool_flag=False
|
||||
if left_bool == 'True':
|
||||
if list_in_text(my_list, 'left__shirt') == False:
|
||||
bool_flag = False
|
||||
if meta_item=="StraightWB" or meta_item=="FittedWB":
|
||||
if list_in_text(my_list,'waistband')==False:
|
||||
bool_flag=False
|
||||
if meta_item =="SkirtCircle" or meta_item =="SkirtManyPanels" or meta_item=='AsymmSkirtCircle':
|
||||
if list_in_text(my_list,"flare-skirt")==False:
|
||||
bool_flag=False
|
||||
if meta_item=='SkirtCircle':
|
||||
if list_in_text(my_list,'flare-skirt__cut')==False:
|
||||
bool_flag=False
|
||||
if meta_item=='AsymmSkirtCircle':
|
||||
if list_in_text(my_list,"flare-skirt__asymm__front_length")==False:
|
||||
bool_flag=False
|
||||
if list_in_text(my_list,'flare-skirt__cut')==False:
|
||||
bool_flag=False
|
||||
if meta_item=='SkirtManyPanels':
|
||||
if list_in_text(my_list,"flare-skirt__skirt-many-panels")==False:
|
||||
bool_flag=False
|
||||
if meta_item=='GodetSkirt':
|
||||
if list_in_text(my_list,"godet-skirt")==False:
|
||||
bool_flag=False
|
||||
godet_base_item = [item.split(connect_tag)[-1] for item in my_list if item.startswith("godet-skirt__base")]
|
||||
if len(godet_base_item) == 0:
|
||||
bool_flag = False
|
||||
print("godet-skirt__base")
|
||||
fail_text_list.append("godet-skirt__base")
|
||||
return bool_flag, sorted(list(set(fail_text_list)))
|
||||
else:
|
||||
godet_base_item = godet_base_item[0]
|
||||
if godet_base_item == "Skirt2":
|
||||
if list_in_text(my_list, "skirt") == False:
|
||||
bool_flag = False
|
||||
if godet_base_item == "PencilSkirt":
|
||||
if list_in_text(my_list, "pencil-skirt") == False:
|
||||
bool_flag = False
|
||||
if meta_item=='SkirtLevels':
|
||||
if list_in_text(my_list,"levels-skirt")==False:
|
||||
bool_flag=False
|
||||
base_item = [item.split(connect_tag)[-1] for item in my_list if item.startswith("levels-skirt__base")]
|
||||
if len(base_item) == 0:
|
||||
bool_flag = False
|
||||
print("levels-skirt__base")
|
||||
fail_text_list.append("levels-skirt__base")
|
||||
return bool_flag, sorted(list(set(fail_text_list)))
|
||||
else:
|
||||
base_item = base_item[0]
|
||||
if base_item == "Skirt2":
|
||||
if list_in_text(my_list, "skirt") == False:
|
||||
bool_flag = False
|
||||
if base_item == "PencilSkirt":
|
||||
if list_in_text(my_list, "pencil-skirt") == False:
|
||||
bool_flag = False
|
||||
if base_item=="SkirtCircle" or base_item=="AsymmSkirtCircle":
|
||||
if list_in_text(my_list, "flare-skirt") == False:
|
||||
bool_flag = False
|
||||
level_item = [item.split(connect_tag)[-1] for item in my_list if item.startswith("levels-skirt__level")]
|
||||
if len(level_item) == 0:
|
||||
bool_flag = False
|
||||
print("levels-skirt__level")
|
||||
fail_text_list.append("levels-skirt__level")
|
||||
return bool_flag, sorted(list(set(fail_text_list)))
|
||||
else:
|
||||
level_item = level_item[0]
|
||||
if level_item == "Skirt2":
|
||||
if list_in_text(my_list, "skirt") == False:
|
||||
bool_flag = False
|
||||
if level_item == "SkirtCircle" or level_item == "AsymmSkirtCircle":
|
||||
if list_in_text(my_list, "flare-skirt") == False:
|
||||
bool_flag = False
|
||||
if meta_item=='Pants':
|
||||
if list_in_text(my_list,"pants")==False:
|
||||
bool_flag=False
|
||||
cuff_type = [item.split(connect_tag)[-1] for item in my_list if item.startswith("pants__cuff__type")]
|
||||
if len(cuff_type) == 0:
|
||||
bool_flag = False
|
||||
print("pants__cuff__type")
|
||||
fail_text_list.append("pants__cuff__type")
|
||||
return bool_flag, sorted(list(set(fail_text_list)))
|
||||
else:
|
||||
if cuff_type[0] != "None":
|
||||
if list_in_text(my_list, "pants__cuff") == False:
|
||||
bool_flag = False
|
||||
if meta_item=='Skirt2':
|
||||
if list_in_text(my_list,"skirt")==False:
|
||||
bool_flag=False
|
||||
if meta_item=='PencilSkirt':
|
||||
if list_in_text(my_list,"pencil-skirt")==False:
|
||||
bool_flag=False
|
||||
meta_upper = [item.split(connect_tag)[-1] for item in my_list if item.startswith("meta__upper")]
|
||||
if len(meta_upper)==0:
|
||||
print("meta__upper")
|
||||
fail_text_list.append("meta__upper")
|
||||
bool_flag = False
|
||||
return bool_flag,list(set(fail_text_list))
|
||||
meta_upper=meta_upper[0]
|
||||
strapless=None
|
||||
if meta_upper =='FittedShirt':
|
||||
strapless=[item.split(connect_tag)[-1] for item in my_list if item.startswith("fitted_shirt__strapless")]
|
||||
if len(strapless)==0:
|
||||
bool_flag=False
|
||||
print("fitted_shirt__strapless")
|
||||
fail_text_list.append("fitted_shirt__strapless")
|
||||
return bool_flag,list(set(fail_text_list))
|
||||
strapless=strapless[0]
|
||||
if (strapless==str(False) and meta_upper =='FittedShirt')or meta_upper=="Shirt":
|
||||
if list_in_text(my_list,"collar")==False:
|
||||
bool_flag=False
|
||||
if left_bool == 'True' and (left_fittedshirt_strapless_bool ==False or meta_upper=="Shirt") :
|
||||
if list_in_text(my_list, "left__collar") == False:
|
||||
bool_flag = False
|
||||
sleeveless=[item.split(connect_tag)[-1] for item in my_list if item.startswith("sleeve__sleeveless")]
|
||||
if len(sleeveless)==0:
|
||||
bool_flag=False
|
||||
print("sleeve__sleeveless")
|
||||
fail_text_list.append("sleeve__sleeveless")
|
||||
return bool_flag,list(set(fail_text_list))
|
||||
sleeveless=sleeveless[0]
|
||||
if sleeveless==str(False):
|
||||
if list_in_text(my_list,"sleeve")==False :
|
||||
bool_flag=False
|
||||
cuff_type = [item.split(connect_tag)[-1] for item in my_list if item.startswith("sleeve__cuff__type")]
|
||||
if len(cuff_type)==0:
|
||||
bool_flag = False
|
||||
print("sleeve__cuff__type")
|
||||
fail_text_list.append("sleeve__cuff__type")
|
||||
return bool_flag, sorted(list(set(fail_text_list)))
|
||||
else:
|
||||
if cuff_type[0]!="None":
|
||||
if list_in_text(my_list,"sleeve__cuff")==False:
|
||||
bool_flag = False
|
||||
if left_bool == 'True' and (left_fittedshirt_strapless_bool == False or meta_upper == "Shirt"):
|
||||
if list_in_text(my_list,"left__sleeve")==False:
|
||||
bool_flag=False
|
||||
#For the processing of the neckline, find the type of neckline and see if it is empty
|
||||
cuff_type = [item.split(connect_tag)[-1] for item in my_list if item.startswith("left__sleeve__cuff__type")]
|
||||
if len(cuff_type) == 0:
|
||||
bool_flag = False
|
||||
print("left__sleeve__cuff__type")
|
||||
fail_text_list.append("left__sleeve__cuff__type")
|
||||
return bool_flag, sorted(list(set(fail_text_list)))
|
||||
else:
|
||||
if cuff_type[0] != "None":
|
||||
if list_in_text(my_list, "left__sleeve__cuff") == False:
|
||||
bool_flag = False
|
||||
return bool_flag,sorted(list(set(fail_text_list)))
|
||||
|
||||
def bool2text_alltext(text_list):
|
||||
flag = True
|
||||
no_in_textspace = list(set(text_list)-set(_ALL_TEXT))
|
||||
if len(no_in_textspace) > 0:
|
||||
flag = False
|
||||
return flag,no_in_textspace
|
||||
|
||||
|
||||
Reference in New Issue
Block a user