Skip to content

pipeline

Basic avalon integration

ResolveHost

Bases: HostBase, IWorkfileHost, ILoadHost, IPublishHost

Source code in client/ayon_resolve/api/pipeline.py
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
class ResolveHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
    name = "resolve"

    def install(self):
        """Install resolve-specific functionality of avalon-core.

        This is where you install menus and register families, data
        and loaders into resolve.

        It is called automatically when installing via `api.install(resolve)`.

        See the Maya equivalent for inspiration on how to implement this.

        """

        log.info("ayon_resolve installed")

        pyblish.register_host(self.name)
        pyblish.register_plugin_path(PUBLISH_PATH)
        print("Registering DaVinci Resolve plug-ins..")

        register_loader_plugin_path(LOAD_PATH)
        register_creator_plugin_path(CREATE_PATH)
        register_inventory_action_path(INVENTORY_PATH)

        # register callback for switching publishable
        pyblish.register_callback("instanceToggled",
                                  on_pyblish_instance_toggled)

        get_resolve_module()

    def open_workfile(self, filepath):
        return open_file(filepath)

    def save_workfile(self, filepath=None):
        return save_file(filepath)

    def work_root(self, session):
        return work_root(session)

    def get_current_workfile(self):
        return current_file()

    def workfile_has_unsaved_changes(self):
        return has_unsaved_changes()

    def get_workfile_extensions(self):
        return file_extensions()

    def get_containers(self):
        return ls()

    def get_context_data(self):
        # TODO: implement to support persisting context attributes
        return {}

    def update_context_data(self, data, changes):
        # TODO: implement to support persisting context attributes
        pass

install()

Install resolve-specific functionality of avalon-core.

This is where you install menus and register families, data and loaders into resolve.

It is called automatically when installing via api.install(resolve).

See the Maya equivalent for inspiration on how to implement this.

Source code in client/ayon_resolve/api/pipeline.py
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
def install(self):
    """Install resolve-specific functionality of avalon-core.

    This is where you install menus and register families, data
    and loaders into resolve.

    It is called automatically when installing via `api.install(resolve)`.

    See the Maya equivalent for inspiration on how to implement this.

    """

    log.info("ayon_resolve installed")

    pyblish.register_host(self.name)
    pyblish.register_plugin_path(PUBLISH_PATH)
    print("Registering DaVinci Resolve plug-ins..")

    register_loader_plugin_path(LOAD_PATH)
    register_creator_plugin_path(CREATE_PATH)
    register_inventory_action_path(INVENTORY_PATH)

    # register callback for switching publishable
    pyblish.register_callback("instanceToggled",
                              on_pyblish_instance_toggled)

    get_resolve_module()

containerise(timeline_item, name, namespace, context, loader=None, data=None)

Bundle Resolve's object into an assembly and imprint it with metadata

Containerization enables a tracking of version, author and origin for loaded assets.

Parameters:

Name Type Description Default
timeline_item TimelineItem

The object to containerise

required
name str

Name of resulting assembly

required
namespace str

Namespace under which to host container

required
context dict

Asset information

required
loader str

Name of node used to produce this container.

None

Returns:

Name Type Description
timeline_item TimelineItem

containerized object

Source code in client/ayon_resolve/api/pipeline.py
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
def containerise(timeline_item,
                 name,
                 namespace,
                 context,
                 loader=None,
                 data=None):
    """Bundle Resolve's object into an assembly and imprint it with metadata

    Containerization enables a tracking of version, author and origin
    for loaded assets.

    Arguments:
        timeline_item (resolve.TimelineItem): The object to containerise
        name (str): Name of resulting assembly
        namespace (str): Namespace under which to host container
        context (dict): Asset information
        loader (str, optional): Name of node used to produce this container.

    Returns:
        timeline_item (resolve.TimelineItem): containerized object

    """

    data_imprint = OrderedDict({
        "schema": "openpype:container-2.0",
        "id": AVALON_CONTAINER_ID,
        "name": str(name),
        "namespace": str(namespace),
        "loader": str(loader),
        "representation": context["representation"]["id"],
    })

    if data:
        data_imprint.update(data)

    lib.set_timeline_item_ayon_tag(timeline_item, data_imprint)

    return timeline_item

ls()

List available containers.

This function is used by the Container Manager in Nuke. You'll need to implement a for-loop that then yields one Container at a time.

See the container.json schema for details on how it should look, and the Maya equivalent, which is in avalon.maya.pipeline

Source code in client/ayon_resolve/api/pipeline.py
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
def ls():
    """List available containers.

    This function is used by the Container Manager in Nuke. You'll
    need to implement a for-loop that then *yields* one Container at
    a time.

    See the `container.json` schema for details on how it should look,
    and the Maya equivalent, which is in `avalon.maya.pipeline`
    """

    # Media Pool instances from Load Media loader
    for clip in lib.iter_all_media_pool_clips():
        data = clip.GetMetadata(constants.AYON_TAG_NAME)
        if not data:
            continue

        try:
            data = json.loads(data)
        except json.JSONDecodeError:
            log.warning(
                f"Failed to parse json data from media pool item: "
                f"{clip.GetName()}"
            )
            continue

        # treat data as container
        # There might be cases where clip's metadata are having additional
        # because it needs to store 'load' and 'publish' data. In that case
        # we need to get only 'load' data
        if data.get("load"):
            data = data["load"]

        # If not all required data, skip it
        required = ['schema', 'id', 'loader', 'representation']
        if not all(key in data for key in required):
            continue

        container = {key: data[key] for key in required}
        container["objectName"] = clip.GetName()  # Get path in folders
        container["namespace"] = clip.GetName()
        container["name"] = clip.GetUniqueId()
        container["_item"] = clip
        yield container

    # Timeline instances from Load Clip loader
    # get all track items from current timeline
    all_timeline_items = lib.get_current_timeline_items(filter=False)

    for timeline_item_data in all_timeline_items:
        timeline_item = timeline_item_data["clip"]["item"]
        container = parse_container(timeline_item)
        if container:
            yield container

maintained_selection()

Maintain selection during context

Example

with maintained_selection(): ... node['selected'].setValue(True) print(node['selected'].value()) False

Source code in client/ayon_resolve/api/pipeline.py
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
@contextlib.contextmanager
def maintained_selection():
    """Maintain selection during context

    Example:
        >>> with maintained_selection():
        ...     node['selected'].setValue(True)
        >>> print(node['selected'].value())
        False
    """
    try:
        # do the operation
        yield
    finally:
        pass

on_pyblish_instance_toggled(instance, old_value, new_value)

Toggle node passthrough states on instance toggles.

Source code in client/ayon_resolve/api/pipeline.py
292
293
294
295
296
297
298
299
300
301
302
def on_pyblish_instance_toggled(instance, old_value, new_value):
    """Toggle node passthrough states on instance toggles."""

    log.info("instance toggle: {}, old_value: {}, new_value:{} ".format(
        instance, old_value, new_value))

    from ayon_resolve.api import set_publish_attribute

    # Whether instances should be passthrough based on new value
    timeline_item = instance.data["item"]
    set_publish_attribute(timeline_item, new_value)

parse_container(timeline_item, validate=True)

Return container data from timeline_item's marker data.

Parameters:

Name Type Description Default
timeline_item TimelineItem

A containerized track item.

required
validate bool)[optional]

validating with avalon scheme

True

Returns:

Name Type Description
dict

The container schema data for input containerized track item.

Source code in client/ayon_resolve/api/pipeline.py
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
def parse_container(timeline_item, validate=True):
    """Return container data from timeline_item's marker data.

    Args:
        timeline_item (resolve.TimelineItem): A containerized track item.
        validate (bool)[optional]: validating with avalon scheme

    Returns:
        dict: The container schema data for input containerized track item.

    """
    # convert tag metadata to normal keys names
    data = lib.get_timeline_item_ayon_tag(timeline_item)

    if validate and data and data.get("schema"):
        schema.validate(data)

    if not isinstance(data, dict):
        return

    # If not all required data return the empty container
    required = ['schema', 'id', 'name',
                'namespace', 'loader', 'representation']

    if not all(key in data for key in required):
        return

    container = {key: data[key] for key in required}

    container["objectName"] = timeline_item.GetName()

    # Store reference to the node object
    container["_timeline_item"] = timeline_item

    return container

reset_selection()

Deselect all selected nodes

Source code in client/ayon_resolve/api/pipeline.py
286
287
288
289
def reset_selection():
    """Deselect all selected nodes
    """
    pass

update_container(timeline_item, data=None)

Update container data to input timeline_item's ayon marker data.

Parameters:

Name Type Description Default
timeline_item TimelineItem

A containerized track item.

required
data dict)[optional]

dictionary with data to be updated

None

Returns:

Name Type Description
bool

True if container was updated correctly

Source code in client/ayon_resolve/api/pipeline.py
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
def update_container(timeline_item, data=None):
    """Update container data to input timeline_item's ayon marker data.

    Args:
        timeline_item (resolve.TimelineItem): A containerized track item.
        data (dict)[optional]: dictionary with data to be updated

    Returns:
        bool: True if container was updated correctly

    """
    data = data or {}

    container = lib.get_timeline_item_ayon_tag(timeline_item)

    for _key, _value in container.items():
        try:
            container[_key] = data[_key]
        except KeyError:
            pass

    log.info("Updating container: `{}`".format(timeline_item))
    return bool(lib.set_timeline_item_ayon_tag(timeline_item, container))