Skip to content

collect_usd_layers

CollectUsdLayers

Bases: HoudiniInstancePlugin

Collect the USD Layers that have configured save paths.

Source code in client/ayon_houdini/plugins/publish/collect_usd_layers.py
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
class CollectUsdLayers(plugin.HoudiniInstancePlugin):
    """Collect the USD Layers that have configured save paths."""

    order = pyblish.api.CollectorOrder + 0.25
    label = "Collect USD Layers"
    families = ["usdrop"]

    def process(self, instance):
        # TODO: Replace this with a Hidden Creator so we collect these BEFORE
        #   starting the publish so the user sees them before publishing
        #   - however user should not be able to individually enable/disable
        #   this from the main ROP its created from?

        output = instance.data.get("output_node")
        if not output:
            self.log.debug("No output node found..")
            return

        rop_node = hou.node(instance.data["instance_node"])

        save_layers = []
        stack: list[Sdf.Layer] = list(instance.data.get("layers", []))
        processed: set[str] = set(layer.identifier for layer in stack)

        def _add_layer(layer_identifier: str, relative_to: Sdf.Layer):
            """Add a child layer to track in the stack"""
            layer_identifier = relative_to.ComputeAbsolutePath(
                layer_identifier
            )
            if layer_identifier in processed:
                return
            processed.add(layer_identifier)
            child_layer = Sdf.Layer.FindOrOpen(layer_identifier)
            if child_layer is None:
                # The layer may not exist yet, if e.g. it has not been computed
                # or saved by Solaris yet.
                # TODO: We'll need to pinpoint which layers this may happen for
                self.log.warning(
                    "Unable to find Sdf Layer for %s", layer_identifier
                )
                return
            stack.append(child_layer)

        for layer in stack:
            # We need to proceed into sublayers and external references
            # because these can also have configured save paths that we need
            # to collect. This logic mimics Houdini's `scenegraphlayers` model
            # used by the Scene Graph Layers panel in Solaris. Fix: #361
            # Note: We use `list` over `set` here just to match the behavior of
            #  Houdini's Scene Graph Layers panel to increase our chances the
            #  sorting of the layers is somewhat similar to what artists see in
            #  the panel. (It's purely for cosmetic reasons though)
            child_sublayers = list(layer.subLayerPaths)
            child_refs = list(ref for ref in layer.externalReferences
                              if ref and ref not in layer.subLayerPaths)
            for child_layer in child_sublayers + child_refs:
                _add_layer(child_layer, relative_to=layer)

            info = layer.GetPrimAtPath("/HoudiniLayerInfo")
            if not info:
                continue
            save_path = info.customData.get("HoudiniSavePath")
            creator = info.customData.get("HoudiniCreatorNode")
            save_control = info.customData.get("HoudiniSaveControl")
            if save_control != "Explicit":
                continue

            self.log.debug("Found configured save path: "
                           "%s -> %s", layer, save_path)

            # Log node that configured this save path
            creator_node = hou.nodeBySessionId(creator) if creator else None
            if creator_node:
                self.log.debug(
                    "Created by: %s", creator_node.path()
                )

            # Skip any explicit save layer that is created by a geoclipsequence
            # node, because this will be the topology layer - which will be
            # included with a USD instance relatively by the
            # CollectUSDValueClips plug-in
            # Note: `geoclipsequence` nodes do not have explicit save control.
            # If explicit save controls are present, they are most likely
            # created by another node.
            if (
                creator_node
                and creator_node.type().name() == "geoclipsequence"
                and save_control != "Explicit"
            ):
                continue

            save_layers.append((layer, save_path, creator_node))

        # Store on the instance
        instance.data["usdConfiguredSavePaths"] = save_layers

        context = instance.context
        # In ideal case this plugin should run after plugin
        #   "CollectAnatomyInstanceData" but because is running before
        #   the entities has to be fetched here
        project_name = context.data["projectName"]
        folder_path = instance.data["folderPath"]
        task_name = instance.data.get("task")
        folder_entity = instance.data.get("folderEntity")
        task_entity = instance.data.get("taskEntity")
        if not folder_entity and folder_path:
            folder_entity = ayon_api.get_folder_by_path(
                project_name, folder_path
            )
            instance.data["folderEntity"] = folder_entity

        if not task_entity and folder_entity and task_name:
            task_entity = ayon_api.get_task_by_name(
                project_name,
                folder_entity["id"],
                task_name
            )
            instance.data["taskEntity"] = task_entity

        # Create configured layer instances so User can disable updating
        # specific configured layers for publishing.
        for layer, save_path, creator_node in save_layers:
            name = os.path.basename(save_path)
            layer_inst = context.create_instance(name)

            # include same USD ROP
            layer_inst.append(rop_node)

            staging_dir, fname_with_args = os.path.split(save_path)

            # The save path may include :SDF_FORMAT_ARGS: which will conflict
            # with how we end up integrating these files because those will
            # NOT be included in the actual output filename on disk, so we
            # remove the SDF_FORMAT_ARGS from the filename.
            fname = Sdf.Layer.SplitIdentifier(fname_with_args)[0]
            fname_no_ext, ext = os.path.splitext(fname)

            variant = fname_no_ext

            # Strip off any trailing version number in the form of _v[0-9]+
            variant = re.sub("_v[0-9]+$", "", variant)

            layer_inst.data["usd_layer"] = layer
            layer_inst.data["usd_layer_save_path"] = save_path

            product_base_type = "usd"
            variant_base = instance.data["variant"]

            get_product_name_kwargs = {}
            if getattr(get_product_name, "use_entities", False):
                get_product_name_kwargs.update({
                    "folder_entity": folder_entity,
                    "task_entity": task_entity,
                    "product_base_type": product_base_type,
                })
            else:
                task_name = task_type = None
                if task_entity:
                    task_name = task_entity["name"]
                    task_type = task_entity["taskType"]
                get_product_name_kwargs.update({
                    "task_name": task_name,
                    "task_type": task_type,
                })

            product_name = get_product_name(
                project_name=instance.context.data["projectName"],
                host_name=instance.context.data["hostName"],
                product_type=instance.data["productType"],
                variant=variant_base + "_" + variant,
                project_settings=context.data["project_settings"],
                project_entity=context.data["projectEntity"],
                dynamic_data={
                    "folder": {
                        "label": folder_entity["label"],
                        "name": folder_entity["name"],
                        "type": folder_entity["folderType"]
                    },
                },
                **get_product_name_kwargs,
            )

            label = "{0} -> {1}".format(instance.data["name"], product_name)

            layer_inst.data["family"] = product_base_type
            layer_inst.data["families"] = [product_base_type]
            layer_inst.data["productBaseType"] = product_base_type
            layer_inst.data["productType"] = product_base_type
            layer_inst.data["productName"] = product_name
            layer_inst.data["label"] = label
            layer_inst.data["folderPath"] = instance.data["folderPath"]
            layer_inst.data["task"] = instance.data.get("task")
            layer_inst.data["instance_node"] = instance.data["instance_node"]
            layer_inst.data["render"] = False
            layer_inst.data["output_node"] = creator_node

            # Inherit "use handles" from the source instance
            # TODO: Do we want to maybe copy full `publish_attributes` instead?
            copy_instance_data(
                instance, layer_inst,
                attr="publish_attributes.CollectRopFrameRange.use_handles"
            )

            # Allow this product to be grouped into a USD Layer on creation
            layer_inst.data["productGroup"] = (
                instance.data.get("productGroup") or "USD Layer"
            )
            # For now just assume the representation will get published
            representation = {
                "name": "usd",
                "ext": ext.lstrip("."),
                "stagingDir": staging_dir,
                "files": fname,

                # Store an additional key with filenames including the
                # SDF_FORMAT_ARGS so we can use this to remap paths
                # accurately later.
                "files_raw": fname_with_args
            }
            layer_inst.data.setdefault("representations", []).append(
                representation)

copy_instance_data(instance_src, instance_dest, attr)

Copy instance data from src instance to dest instance.

Examples:

>>> copy_instance_data(instance_src, instance_dest,
>>>                    attr="publish_attributes.CollectRopFrameRange")

Parameters:

Name Type Description Default
instance_src Instance

Source instance to copy from

required
instance_dest Instance

Target instance to copy to

required
attr str

Attribute on the source instance to copy. This can be a nested key joined by . to only copy sub entries of dictionaries in the source instance's data.

required

Raises:

Type Description
KnownPublishError

If a parent key already exists on the destination instance but is not of the correct type (= is not a dict)

Source code in client/ayon_houdini/plugins/publish/collect_usd_layers.py
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
def copy_instance_data(instance_src, instance_dest, attr):
    """Copy instance data from `src` instance to `dest` instance.

    Examples:
        >>> copy_instance_data(instance_src, instance_dest,
        >>>                    attr="publish_attributes.CollectRopFrameRange")

    Arguments:
        instance_src (pyblish.api.Instance): Source instance to copy from
        instance_dest (pyblish.api.Instance): Target instance to copy to
        attr (str): Attribute on the source instance to copy. This can be
            a nested key joined by `.` to only copy sub entries of dictionaries
            in the source instance's data.

    Raises:
        KnownPublishError: If a parent key already exists on the destination
            instance but is not of the correct type (= is not a dict)

    """

    src_data = instance_src.data
    dest_data = instance_dest.data
    keys = attr.split(".")
    for i, key in enumerate(keys):
        if key not in src_data:
            break

        src_value = src_data[key]
        if i != len(key):
            dest_data = dest_data.setdefault(key, {})
            if not isinstance(dest_data, dict):
                raise KnownPublishError("Destination must be a dict.")
            src_data = src_value
        else:
            # Last iteration - assign the value
            dest_data[key] = copy.deepcopy(src_value)