Skip to content

collect_usd_layers

CollectUsdLayers

Bases: HoudiniInstancePlugin

Collect the USD Layers that have configured save paths.

Source code in client/ayon_houdini/plugins/publish/collect_usd_layers.py
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
class CollectUsdLayers(plugin.HoudiniInstancePlugin):
    """Collect the USD Layers that have configured save paths."""

    order = pyblish.api.CollectorOrder + 0.25
    label = "Collect USD Layers"
    families = ["usdrop"]

    def process(self, instance):
        # TODO: Replace this with a Hidden Creator so we collect these BEFORE
        #   starting the publish so the user sees them before publishing
        #   - however user should not be able to individually enable/disable
        #   this from the main ROP its created from?

        output = instance.data.get("output_node")
        if not output:
            self.log.debug("No output node found..")
            return

        rop_node = hou.node(instance.data["instance_node"])

        save_layers = []
        for layer in usdlib.get_configured_save_layers(rop_node):

            info = layer.rootPrims.get("HoudiniLayerInfo")
            save_path = info.customData.get("HoudiniSavePath")
            creator = info.customData.get("HoudiniCreatorNode")

            self.log.debug("Found configured save path: "
                           "%s -> %s", layer, save_path)

            # Log node that configured this save path
            creator_node = hou.nodeBySessionId(creator) if creator else None
            if creator_node:
                self.log.debug(
                    "Created by: %s", creator_node.path()
                )

            save_layers.append((layer, save_path, creator_node))

        # Store on the instance
        instance.data["usdConfiguredSavePaths"] = save_layers

        # Create configured layer instances so User can disable updating
        # specific configured layers for publishing.
        context = instance.context
        for layer, save_path, creator_node in save_layers:
            name = os.path.basename(save_path)
            layer_inst = context.create_instance(name)

            # include same USD ROP
            layer_inst.append(rop_node)

            staging_dir, fname_with_args = os.path.split(save_path)

            # The save path may include :SDF_FORMAT_ARGS: which will conflict
            # with how we end up integrating these files because those will
            # NOT be included in the actual output filename on disk, so we
            # remove the SDF_FORMAT_ARGS from the filename.
            fname = Sdf.Layer.SplitIdentifier(fname_with_args)[0]
            fname_no_ext, ext = os.path.splitext(fname)

            variant = fname_no_ext

            # Strip off any trailing version number in the form of _v[0-9]+
            variant = re.sub("_v[0-9]+$", "", variant)

            layer_inst.data["usd_layer"] = layer
            layer_inst.data["usd_layer_save_path"] = save_path

            project_name = context.data["projectName"]
            variant_base = instance.data["variant"]
            product_name = get_product_name(
                project_name=project_name,
                # TODO: This should use task from `instance`
                task_name=context.data["anatomyData"]["task"]["name"],
                task_type=context.data["anatomyData"]["task"]["type"],
                host_name=context.data["hostName"],
                product_type="usd",
                variant=variant_base + "_" + variant,
                project_settings=context.data["project_settings"]
            )

            label = "{0} -> {1}".format(instance.data["name"], product_name)
            family = "usd"
            layer_inst.data["family"] = family
            layer_inst.data["families"] = [family]
            layer_inst.data["productName"] = product_name
            layer_inst.data["productType"] = instance.data["productType"]
            layer_inst.data["label"] = label
            layer_inst.data["folderPath"] = instance.data["folderPath"]
            layer_inst.data["task"] = instance.data.get("task")
            layer_inst.data["instance_node"] = instance.data["instance_node"]
            layer_inst.data["render"] = False
            layer_inst.data["output_node"] = creator_node

            # Inherit "use handles" from the source instance
            # TODO: Do we want to maybe copy full `publish_attributes` instead?
            copy_instance_data(
                instance, layer_inst,
                attr="publish_attributes.CollectRopFrameRange.use_handles"
            )

            # Allow this subset to be grouped into a USD Layer on creation
            layer_inst.data["productGroup"] = (
                instance.data.get("productGroup") or "USD Layer"
            )
            # For now just assume the representation will get published
            representation = {
                "name": "usd",
                "ext": ext.lstrip("."),
                "stagingDir": staging_dir,
                "files": fname,

                # Store an additional key with filenames including the
                # SDF_FORMAT_ARGS so we can use this to remap paths
                # accurately later.
                "files_raw": fname_with_args
            }
            layer_inst.data.setdefault("representations", []).append(
                representation)

copy_instance_data(instance_src, instance_dest, attr)

Copy instance data from src instance to dest instance.

Examples:

>>> copy_instance_data(instance_src, instance_dest,
>>>                    attr="publish_attributes.CollectRopFrameRange")

Parameters:

Name Type Description Default
instance_src Instance

Source instance to copy from

required
instance_dest Instance

Target instance to copy to

required
attr str

Attribute on the source instance to copy. This can be a nested key joined by . to only copy sub entries of dictionaries in the source instance's data.

required

Raises:

Type Description
KnownPublishError

If a parent key already exists on the destination instance but is not of the correct type (= is not a dict)

Source code in client/ayon_houdini/plugins/publish/collect_usd_layers.py
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
def copy_instance_data(instance_src, instance_dest, attr):
    """Copy instance data from `src` instance to `dest` instance.

    Examples:
        >>> copy_instance_data(instance_src, instance_dest,
        >>>                    attr="publish_attributes.CollectRopFrameRange")

    Arguments:
        instance_src (pyblish.api.Instance): Source instance to copy from
        instance_dest (pyblish.api.Instance): Target instance to copy to
        attr (str): Attribute on the source instance to copy. This can be
            a nested key joined by `.` to only copy sub entries of dictionaries
            in the source instance's data.

    Raises:
        KnownPublishError: If a parent key already exists on the destination
            instance but is not of the correct type (= is not a dict)

    """

    src_data = instance_src.data
    dest_data = instance_dest.data
    keys = attr.split(".")
    for i, key in enumerate(keys):
        if key not in src_data:
            break

        src_value = src_data[key]
        if i != len(key):
            dest_data = dest_data.setdefault(key, {})
            if not isinstance(dest_data, dict):
                raise KnownPublishError("Destination must be a dict.")
            src_data = src_value
        else:
            # Last iteration - assign the value
            dest_data[key] = copy.deepcopy(src_value)