17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248 | class CollectTextureSet(pyblish.api.InstancePlugin):
"""Extract Textures using an output template config"""
# TODO: Production-test usage of color spaces
# TODO: Detect what source data channels end up in each file
label = "Collect Texture Set images"
hosts = ["substancepainter"]
families = ["textureSet"]
order = pyblish.api.CollectorOrder + 0.01
def process(self, instance):
config = self.get_export_config(instance)
project_name = instance.context.data["projectName"]
folder_entity = ayon_api.get_folder_by_path(
project_name,
instance.data["folderPath"]
)
task_name = instance.data.get("task")
task_entity = None
if folder_entity and task_name:
task_entity = ayon_api.get_task_by_name(
project_name, folder_entity["id"], task_name
)
instance.data["exportConfig"] = config
strip_texture_set = instance.data["creator_attributes"].get(
"flattenTextureSets", False)
maps = get_parsed_export_maps(config, strip_texture_set)
# Let's break the instance into multiple instances to integrate
# a product per generated texture or texture UDIM sequence
for (texture_set_name, stack_name), template_maps in maps.items():
self.log.info(f"Processing {texture_set_name}/{stack_name}")
for template, outputs in template_maps.items():
self.log.info(f"Processing {template}")
self.create_image_instance(instance, template, outputs,
task_entity=task_entity,
texture_set_name=texture_set_name,
stack_name=stack_name,
strip_texture_set=strip_texture_set)
def create_image_instance(self, instance, template, outputs,
task_entity, texture_set_name, stack_name,
strip_texture_set=False):
"""Create a new instance per image or UDIM sequence.
The new instances will be of product type `image`.
"""
context = instance.context
first_filepath = outputs[0]["filepath"]
fnames = [os.path.basename(output["filepath"]) for output in outputs]
ext = os.path.splitext(first_filepath)[1]
assert ext.lstrip("."), f"No extension: {ext}"
# all_texture_sets = substance_painter.textureset.all_texture_sets()
# Define the suffix we want to give this particular texture
# set and set up a remapped product naming for it.
suffix = ""
if not strip_texture_set:
texture_set = substance_painter.textureset.TextureSet.from_name(
texture_set_name
)
# More than one texture set, include texture set name
suffix += f".{texture_set_name}"
if texture_set.is_layered_material() and stack_name:
# More than one stack, include stack name
suffix += f".{stack_name}"
# Always include the map identifier
map_identifier = strip_template(template)
suffix += f".{map_identifier}"
task_name = task_type = None
if task_entity:
task_name = task_entity["name"]
task_type = task_entity["taskType"]
# TODO: The product type actually isn't 'texture' currently but
# for now this is only done so the product name starts with
# 'texture'
image_product_name = get_product_name(
context.data["projectName"],
task_name,
task_type,
context.data["hostName"],
product_type="texture",
variant=instance.data["variant"] + suffix,
project_settings=context.data["project_settings"]
)
image_product_group_name = get_product_name(
context.data["projectName"],
task_name,
task_type,
context.data["hostName"],
product_type="texture",
variant=instance.data["variant"],
project_settings=context.data["project_settings"]
)
# Prepare representation
representation = {
"name": ext.lstrip("."),
"ext": ext.lstrip("."),
"files": fnames if len(fnames) > 1 else fnames[0],
}
# Mark as UDIM explicitly if it has UDIM tiles.
if bool(outputs[0].get("udim")):
# The representation for a UDIM sequence should have a `udim` key
# that is a list of all udim tiles (str) like: ["1001", "1002"]
# strings. See CollectTextures plug-in and Integrators.
representation["udim"] = [output["udim"] for output in outputs]
# Set up the representation for thumbnail generation
# TODO: Simplify this once thumbnail extraction is refactored
staging_dir = os.path.dirname(first_filepath)
representation["tags"] = ["review"]
representation["stagingDir"] = staging_dir
# Clone the instance
product_type = "image"
image_instance = context.create_instance(image_product_name)
image_instance[:] = instance[:]
image_instance.data.update(copy.deepcopy(dict(instance.data)))
image_instance.data["name"] = image_product_name
image_instance.data["label"] = image_product_name
image_instance.data["productName"] = image_product_name
image_instance.data["productType"] = product_type
image_instance.data["family"] = product_type
image_instance.data["families"] = [product_type, "textures"]
if instance.data["creator_attributes"].get("review"):
image_instance.data["families"].append("review")
image_instance.data["representations"] = [representation]
# Group the textures together in the loader
image_instance.data["productGroup"] = image_product_group_name
# Store the texture set name and stack name on the instance
image_instance.data["textureSetName"] = texture_set_name
image_instance.data["textureStackName"] = stack_name
# Store color space with the instance
# Note: The extractor will assign it to the representation
colorspace = outputs[0].get("colorSpace")
if colorspace:
self.log.debug(f"{image_product_name} colorspace: {colorspace}")
image_instance.data["colorspace"] = colorspace
# Store the instance in the original instance as a member
instance.append(image_instance)
def get_export_config(self, instance):
"""Return an export configuration dict for texture exports.
This config can be supplied to:
- `substance_painter.export.export_project_textures`
- `substance_painter.export.list_project_textures`
See documentation on substance_painter.export module about the
formatting of the configuration dictionary.
Args:
instance (pyblish.api.Instance): Texture Set instance to be
published.
Returns:
dict: Export config
"""
creator_attrs = instance.data["creator_attributes"]
preset_url = creator_attrs["exportPresetUrl"]
is_single_output = creator_attrs.get(
"flattenTextureSets", False)
# Temporary directory purely for 'collecting' the expected output files
# which is replaced in the export config by the
# `CollectTextureSetStagingDir` plug-in below at a later collector
# order that has correctly defined anatomy data for the instance's
# custom staging dir.
temp_dir = tempdir.get_temp_dir(
instance.context.data["projectName"],
use_local_temp=True)
# See: https://substance3d.adobe.com/documentation/ptpy/api/substance_painter/export # noqa
config = { # noqa
"exportShaderParams": True,
"exportPath": temp_dir,
"defaultExportPreset": preset_url,
# Custom overrides to the exporter
"exportParameters": [
{
"parameters": {
"fileFormat": creator_attrs["exportFileFormat"],
"sizeLog2": creator_attrs["exportSize"],
"paddingAlgorithm": creator_attrs["exportPadding"],
"dilationDistance": creator_attrs["exportDilationDistance"] # noqa
}
}
]
}
# Create the list of Texture Sets to export.
export_texture_sets = creator_attrs.get("exportTextureSets", [])
if not export_texture_sets:
# Export all texture sets
export_texture_sets = [
texture_set.name() for texture_set in
substance_painter.textureset.all_texture_sets()
]
config["exportList"] = [
{"rootPath": texture_set_name}
for texture_set_name in export_texture_sets
]
for override in config["exportParameters"]:
parameters = override.get("parameters")
for key, value in dict(parameters).items():
if value is None:
parameters.pop(key)
channel_layer = creator_attrs.get("exportChannel", [])
maps = get_filtered_export_preset(
preset_url, channel_layer, is_single_output
)
config.update(maps)
return config
|