Skip to content

pipeline

BlenderHost

Bases: HostBase, IWorkfileHost, IPublishHost, ILoadHost

Source code in client/ayon_blender/api/pipeline.py
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
class BlenderHost(HostBase, IWorkfileHost, IPublishHost, ILoadHost):
    name = "blender"

    def get_app_information(self):
        from ayon_core.host import ApplicationInformation

        return ApplicationInformation(
            app_name="Blender",
            app_version=bpy.app.version_string,
        )

    def install(self):
        """Override install method from HostBase.
        Install Blender host functionality."""
        install()

    def get_containers(self) -> Iterator:
        """List containers from active Blender scene."""
        return ls()

    def get_workfile_extensions(self) -> List[str]:
        """Override get_workfile_extensions method from IWorkfileHost.
        Get workfile possible extensions.

        Returns:
            List[str]: Workfile extensions.
        """
        return file_extensions()

    def save_workfile(self, dst_path: str = None):
        """Override save_workfile method from IWorkfileHost.
        Save currently opened workfile.

        Args:
            dst_path (str): Where the current scene should be saved. Or use
                current path if `None` is passed.
        """
        save_file(dst_path if dst_path else bpy.data.filepath)

    def open_workfile(self, filepath: str):
        """Override open_workfile method from IWorkfileHost.
        Open workfile at specified filepath in the host.

        Args:
            filepath (str): Path to workfile.
        """
        open_file(filepath)

    def get_current_workfile(self) -> str:
        """Override get_current_workfile method from IWorkfileHost.
        Retrieve currently opened workfile path.

        Returns:
            str: Path to currently opened workfile.
        """
        return current_file()

    def workfile_has_unsaved_changes(self) -> bool:
        """Override wokfile_has_unsaved_changes method from IWorkfileHost.
        Returns True if opened workfile has no unsaved changes.

        Returns:
            bool: True if scene is saved and False if it has unsaved
                modifications.
        """
        return has_unsaved_changes()

    def work_root(self, session) -> str:
        """Override work_root method from IWorkfileHost.
        Modify workdir per host.

        Args:
            session (dict): Session context data.

        Returns:
            str: Path to new workdir.
        """
        return work_root(session)

    def get_context_data(self) -> dict:
        """Override abstract method from IPublishHost.
        Get global data related to creation-publishing from workfile.

        Returns:
            dict: Context data stored using 'update_context_data'.
        """
        property = bpy.context.scene.get(AYON_PROPERTY)
        if property:
            return property.to_dict()
        return {}

    def update_context_data(self, data: dict, changes: dict):
        """Override abstract method from IPublishHost.
        Store global context data to workfile.

        Args:
            data (dict): New data as are.
            changes (dict): Only data that has been changed. Each value has
                tuple with '(<old>, <new>)' value.
        """
        bpy.context.scene[AYON_PROPERTY] = data

get_containers()

List containers from active Blender scene.

Source code in client/ayon_blender/api/pipeline.py
88
89
90
def get_containers(self) -> Iterator:
    """List containers from active Blender scene."""
    return ls()

get_context_data()

Override abstract method from IPublishHost. Get global data related to creation-publishing from workfile.

Returns:

Name Type Description
dict dict

Context data stored using 'update_context_data'.

Source code in client/ayon_blender/api/pipeline.py
151
152
153
154
155
156
157
158
159
160
161
def get_context_data(self) -> dict:
    """Override abstract method from IPublishHost.
    Get global data related to creation-publishing from workfile.

    Returns:
        dict: Context data stored using 'update_context_data'.
    """
    property = bpy.context.scene.get(AYON_PROPERTY)
    if property:
        return property.to_dict()
    return {}

get_current_workfile()

Override get_current_workfile method from IWorkfileHost. Retrieve currently opened workfile path.

Returns:

Name Type Description
str str

Path to currently opened workfile.

Source code in client/ayon_blender/api/pipeline.py
120
121
122
123
124
125
126
127
def get_current_workfile(self) -> str:
    """Override get_current_workfile method from IWorkfileHost.
    Retrieve currently opened workfile path.

    Returns:
        str: Path to currently opened workfile.
    """
    return current_file()

get_workfile_extensions()

Override get_workfile_extensions method from IWorkfileHost. Get workfile possible extensions.

Returns:

Type Description
List[str]

List[str]: Workfile extensions.

Source code in client/ayon_blender/api/pipeline.py
92
93
94
95
96
97
98
99
def get_workfile_extensions(self) -> List[str]:
    """Override get_workfile_extensions method from IWorkfileHost.
    Get workfile possible extensions.

    Returns:
        List[str]: Workfile extensions.
    """
    return file_extensions()

install()

Override install method from HostBase. Install Blender host functionality.

Source code in client/ayon_blender/api/pipeline.py
83
84
85
86
def install(self):
    """Override install method from HostBase.
    Install Blender host functionality."""
    install()

open_workfile(filepath)

Override open_workfile method from IWorkfileHost. Open workfile at specified filepath in the host.

Parameters:

Name Type Description Default
filepath str

Path to workfile.

required
Source code in client/ayon_blender/api/pipeline.py
111
112
113
114
115
116
117
118
def open_workfile(self, filepath: str):
    """Override open_workfile method from IWorkfileHost.
    Open workfile at specified filepath in the host.

    Args:
        filepath (str): Path to workfile.
    """
    open_file(filepath)

save_workfile(dst_path=None)

Override save_workfile method from IWorkfileHost. Save currently opened workfile.

Parameters:

Name Type Description Default
dst_path str

Where the current scene should be saved. Or use current path if None is passed.

None
Source code in client/ayon_blender/api/pipeline.py
101
102
103
104
105
106
107
108
109
def save_workfile(self, dst_path: str = None):
    """Override save_workfile method from IWorkfileHost.
    Save currently opened workfile.

    Args:
        dst_path (str): Where the current scene should be saved. Or use
            current path if `None` is passed.
    """
    save_file(dst_path if dst_path else bpy.data.filepath)

update_context_data(data, changes)

Override abstract method from IPublishHost. Store global context data to workfile.

Parameters:

Name Type Description Default
data dict

New data as are.

required
changes dict

Only data that has been changed. Each value has tuple with '(, )' value.

required
Source code in client/ayon_blender/api/pipeline.py
163
164
165
166
167
168
169
170
171
172
def update_context_data(self, data: dict, changes: dict):
    """Override abstract method from IPublishHost.
    Store global context data to workfile.

    Args:
        data (dict): New data as are.
        changes (dict): Only data that has been changed. Each value has
            tuple with '(<old>, <new>)' value.
    """
    bpy.context.scene[AYON_PROPERTY] = data

work_root(session)

Override work_root method from IWorkfileHost. Modify workdir per host.

Parameters:

Name Type Description Default
session dict

Session context data.

required

Returns:

Name Type Description
str str

Path to new workdir.

Source code in client/ayon_blender/api/pipeline.py
139
140
141
142
143
144
145
146
147
148
149
def work_root(self, session) -> str:
    """Override work_root method from IWorkfileHost.
    Modify workdir per host.

    Args:
        session (dict): Session context data.

    Returns:
        str: Path to new workdir.
    """
    return work_root(session)

workfile_has_unsaved_changes()

Override wokfile_has_unsaved_changes method from IWorkfileHost. Returns True if opened workfile has no unsaved changes.

Returns:

Name Type Description
bool bool

True if scene is saved and False if it has unsaved modifications.

Source code in client/ayon_blender/api/pipeline.py
129
130
131
132
133
134
135
136
137
def workfile_has_unsaved_changes(self) -> bool:
    """Override wokfile_has_unsaved_changes method from IWorkfileHost.
    Returns True if opened workfile has no unsaved changes.

    Returns:
        bool: True if scene is saved and False if it has unsaved
            modifications.
    """
    return has_unsaved_changes()

add_to_ayon_container(container)

Add the container (object or collection) to the AYON container.

Source code in client/ayon_blender/api/pipeline.py
531
532
533
534
535
536
537
538
539
def add_to_ayon_container(
    container: Union[bpy.types.Collection, bpy.types.Object]
):
    """Add the container (object or collection) to the AYON container."""
    ayon_container = get_ayon_container()
    if isinstance(container, bpy.types.Collection):
        ayon_container.children.link(container)
    elif isinstance(container, bpy.types.Object):
        ayon_container.objects.link(container)

containerise(name, namespace, nodes, context, loader=None, suffix='CON')

Bundle nodes into an assembly and imprint it with metadata

Containerisation enables a tracking of version, author and origin for loaded assets.

Parameters:

Name Type Description Default
name str

Name of resulting assembly

required
namespace str

Namespace under which to host container

required
nodes List

Long names of nodes to containerise

required
context Dict

Asset information

required
loader Optional[str]

Name of loader used to produce this container.

None
suffix Optional[str]

Suffix of container, defaults to _CON.

'CON'

Returns:

Type Description
Collection

The container assembly

Source code in client/ayon_blender/api/pipeline.py
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
def containerise(name: str,
                 namespace: str,
                 nodes: List,
                 context: Dict,
                 loader: Optional[str] = None,
                 suffix: Optional[str] = "CON") -> bpy.types.Collection:
    """Bundle `nodes` into an assembly and imprint it with metadata

    Containerisation enables a tracking of version, author and origin
    for loaded assets.

    Arguments:
        name: Name of resulting assembly
        namespace: Namespace under which to host container
        nodes: Long names of nodes to containerise
        context: Asset information
        loader: Name of loader used to produce this container.
        suffix: Suffix of container, defaults to `_CON`.

    Returns:
        The container assembly

    """

    node_name = get_container_name(name, namespace, context, suffix)
    container = bpy.data.collections.new(name=node_name)
    # Link the children nodes
    for obj in nodes:
        if isinstance(obj, bpy.types.Object):
            container.objects.link(obj)
        elif isinstance(obj, bpy.types.Collection):
            container.children.link(obj)
        else:
            raise TypeError(f"Unsupported type {type(obj)} in nodes list.")

    data = {
        "schema": "ayon:container-3.0",
        "id": AYON_CONTAINER_ID,
        "name": name,
        "namespace": namespace or '',
        "loader": str(loader),
        "representation": context["representation"]["id"],
        "project_name": context["project"]["name"],
    }

    metadata_update(container, data)
    add_to_ayon_container(container)

    return container

containerise_existing(container, name, namespace, context, loader=None, suffix='CON')

Imprint or update container with metadata.

Parameters:

Name Type Description Default
name str

Name of resulting assembly

required
namespace str

Namespace under which to host container

required
context Dict

Asset information

required
loader Optional[str]

Name of loader used to produce this container.

None
suffix Optional[str]

Suffix of container, defaults to _CON.

'CON'

Returns:

Type Description
Collection

The container assembly

Source code in client/ayon_blender/api/pipeline.py
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
def containerise_existing(
        container: bpy.types.Collection,
        name: str,
        namespace: str,
        context: Dict,
        loader: Optional[str] = None,
        suffix: Optional[str] = "CON") -> bpy.types.Collection:
    """Imprint or update container with metadata.

    Arguments:
        name: Name of resulting assembly
        namespace: Namespace under which to host container
        context: Asset information
        loader: Name of loader used to produce this container.
        suffix: Suffix of container, defaults to `_CON`.

    Returns:
        The container assembly
    """

    node_name = container.name
    if suffix:
        node_name = f"{node_name}_{suffix}"
    container.name = node_name
    data = {
        "schema": "ayon:container-3.0",
        "id": AYON_CONTAINER_ID,
        "name": name,
        "namespace": namespace or '',
        "loader": str(loader),
        "representation": context["representation"]["id"],
        "project_name": context["project"]["name"],
    }

    metadata_update(container, data)
    add_to_ayon_container(container)

    return container

ensure_ayon_container()

Ensure AYON_CONTAINERS exists and is ready for use.

Source code in client/ayon_blender/api/pipeline.py
567
568
569
570
571
572
573
574
575
576
577
578
def ensure_ayon_container() -> bpy.types.Collection:
    """Ensure AYON_CONTAINERS exists and is ready for use."""
    ayon_container = bpy.data.collections.get(AYON_CONTAINERS)
    if ayon_container:
        return ayon_container

    # Create and configure container
    ayon_container = bpy.data.collections.new(name=AYON_CONTAINERS)
    bpy.context.scene.collection.children.link(ayon_container)
    ayon_container.color_tag = "COLOR_02"
    ayon_container.use_fake_user = True
    return ayon_container

get_ayon_container()

Get Ayon Container

Returns:

Type Description
Collection

bpy.types.Collection: Ayon containers collection

Source code in client/ayon_blender/api/pipeline.py
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
def get_ayon_container() -> bpy.types.Collection:
    """Get Ayon Container

    Returns:
         bpy.types.Collection: Ayon containers collection
    """
    ayon_container = bpy.data.collections.get(AYON_CONTAINERS)
    if ayon_container:
        return ayon_container

    # Backwards compatibility, check for legacy Avalon container
    avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
    if avalon_container:
        # Convert legacy Avalon container to Ayon container
        log.debug(
            "Converting legacy Avalon container to AYON container."
        )
        # Rename the collection
        avalon_container.name = AYON_CONTAINERS
        return avalon_container

    # Create a new AYON container if it does not exist
    return ensure_ayon_container()

get_container_name(name, namespace, context, suffix)

Function to get container name

Parameters:

Name Type Description Default
name str

Name of resulting assembly

required
namespace str

Namespace under which to host container

required
context Dict

Asset information

required
suffix str

Suffix of container

required

Returns:

Type Description

The name of the container assembly

Source code in client/ayon_blender/api/pipeline.py
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
def get_container_name(name: str,
                       namespace: str,
                       context: Dict,
                       suffix: str):
    """Function to get container name

    Args:
        name: Name of resulting assembly
        namespace: Namespace under which to host container
        context: Asset information
        suffix: Suffix of container

    Returns:
        The name of the container assembly
    """
    node_name = f"{context['folder']['name']}_{name}"
    if namespace:
        node_name = f"{namespace}:{node_name}"
    if suffix:
        node_name = f"{node_name}_{suffix}"

    return node_name

get_frame_range(task_entity=None)

Get the task entity's frame range and handles

Parameters:

Name Type Description Default
task_entity Optional[dict]

Task Entity. When not provided defaults to current context task.

None

Returns:

Type Description
Union[Dict[str, int], None]

Union[Dict[str, int], None]: Dictionary with frame start, frame end, handle start, handle end.

Source code in client/ayon_blender/api/pipeline.py
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
def get_frame_range(task_entity=None) -> Union[Dict[str, int], None]:
    """Get the task entity's frame range and handles

    Args:
        task_entity (Optional[dict]): Task Entity.
            When not provided defaults to current context task.

    Returns:
        Union[Dict[str, int], None]: Dictionary with
            frame start, frame end, handle start, handle end.
    """
    # Set frame start/end
    if task_entity is None:
        task_entity = get_current_task_entity(fields={"attrib"})
    task_attributes = task_entity["attrib"]
    frame_start = int(task_attributes["frameStart"])
    frame_end = int(task_attributes["frameEnd"])
    handle_start = int(task_attributes["handleStart"])
    handle_end = int(task_attributes["handleEnd"])
    frame_start_handle = frame_start - handle_start
    frame_end_handle = frame_end + handle_end

    return {
        "frameStart": frame_start,
        "frameEnd": frame_end,
        "handleStart": handle_start,
        "handleEnd": handle_end,
        "frameStartHandle": frame_start_handle,
        "frameEndHandle": frame_end_handle,
    }

install()

Install Blender configuration for AYON.

Source code in client/ayon_blender/api/pipeline.py
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
def install():
    """Install Blender configuration for AYON."""
    sys.excepthook = pype_excepthook_handler

    pyblish.api.register_host("blender")
    pyblish.api.register_plugin_path(str(PUBLISH_PATH))

    register_loader_plugin_path(str(LOAD_PATH))
    register_creator_plugin_path(str(CREATE_PATH))

    lib.append_user_scripts()
    lib.set_app_templates_path()

    register_event_callback("new", on_new)
    register_event_callback("open", on_open)
    register_event_callback("before.save", on_before_save)

    _register_callbacks()

    if not IS_HEADLESS:
        ops.register()

ls()

List containers from active Blender scene.

This is the host-equivalent of api.ls(), but instead of listing assets on disk, it lists assets already loaded in Blender; once loaded they are called containers.

Source code in client/ayon_blender/api/pipeline.py
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
def ls() -> Iterator:
    """List containers from active Blender scene.

    This is the host-equivalent of api.ls(), but instead of listing assets on
    disk, it lists assets already loaded in Blender; once loaded they are
    called containers.
    """
    container_ids = {
        AYON_CONTAINER_ID,
        # Backwards compatibility
        AVALON_CONTAINER_ID
    }

    for id_type in container_ids:
        for container in lib.lsattr("id", id_type):
            yield parse_container(container)

    # Compositor nodes are not in `bpy.data` that `lib.lsattr` looks in.
    node_tree = bpy.context.scene.node_tree
    if node_tree:
        for node in node_tree.nodes:
            ayon_prop = node.get(AYON_PROPERTY)
            if not ayon_prop:
                avalon_prop = node.get(AVALON_PROPERTY)
                if not avalon_prop:
                    continue
                else:
                    node[AYON_PROPERTY] = avalon_prop
                    ayon_prop = avalon_prop
                    del node[AVALON_PROPERTY]

            if ayon_prop.get("id") not in container_ids:
                continue

            yield parse_container(node)

    # Shader nodes are not available in a way that `lib.lsattr` can find.
    for material in bpy.data.materials:
        material_node_tree = material.node_tree
        if not material_node_tree:
            continue

        for shader_node in material_node_tree.nodes:
            ayon_shader_node = get_ayon_property(shader_node)
            if not ayon_shader_node:
                continue

            if ayon_shader_node.get("id") not in container_ids:
                continue

            yield parse_container(shader_node)

metadata_update(node, data)

Imprint the node with metadata.

Existing metadata will be updated.

Source code in client/ayon_blender/api/pipeline.py
581
582
583
584
585
586
587
588
589
590
591
592
def metadata_update(node: bpy.types.bpy_struct_meta_idprop, data: Dict):
    """Imprint the node with metadata.

    Existing metadata will be updated.
    """

    if not node.get(AYON_PROPERTY):
        node[AYON_PROPERTY] = dict()
    for key, value in data.items():
        if value is None:
            continue
        node[AYON_PROPERTY][key] = value

on_before_save(event)

Handle the event before saving a Blender file.

When saving to a new scene name, e.g. on incrementing the workfile then update the render paths to match the new scene name by replacing the old scene name with the new one in all render paths.

Source code in client/ayon_blender/api/pipeline.py
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
def on_before_save(event):
    """Handle the event before saving a Blender file.

    When saving to a new scene name, e.g. on incrementing the workfile then
    update the render paths to match the new scene name by replacing the old
    scene name with the new one in all render paths.
    """
    blend_path_before: str = bpy.data.filepath
    blend_path_after: str = event.get("filename")

    if not blend_path_before:
        # Saving from a new unsaved file, no need to check for changes.
        return

    blend_name_before: str = os.path.splitext(
        os.path.basename(blend_path_before))[0]
    blend_name_after: str = os.path.splitext(
        os.path.basename(blend_path_after))[0]
    if blend_name_before != blend_name_after:
        print(f"Detected scene name change from {blend_name_before} to "
              f"{blend_name_after}")
        # TODO: We may want to allow disabling this feature, especially after
        #  Blender 4.5+ supporting the `{blend_name}` in templates in render
        #  paths to avoid needing to hardcode the blender scene filename into
        #  those paths.
        # Update any render paths if they previously contained the scene name
        # Warning: if the scene name is `a` before and now `helloworld` then
        #  this may easily get out of hand by turning `asset` into
        #  `helloworldsset`, but since filenames tend to be longer and
        #  contain version numbers, this is not expected to happen often.
        search_replace_render_paths(blend_name_before, blend_name_after)

parse_container(container, validate=True)

Return the container node's full container data.

Parameters:

Name Type Description Default
container Collection

A container node name.

required
validate bool

turn the validation for the container on or off

True

Returns:

Type Description
Dict

The container schema data for this container node.

Source code in client/ayon_blender/api/pipeline.py
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
def parse_container(container: bpy.types.Collection,
                    validate: bool = True) -> Dict:
    """Return the container node's full container data.

    Args:
        container: A container node name.
        validate: turn the validation for the container on or off

    Returns:
        The container schema data for this container node.

    """

    data = lib.read(container)

    # Append transient data
    data["objectName"] = container.name
    data["node"] = container  # store parsed object for easy access in loader

    if validate:
        schema.validate(data)

    return data

publish()

Shorthand to publish from within host.

Source code in client/ayon_blender/api/pipeline.py
789
790
791
792
def publish():
    """Shorthand to publish from within host."""

    return pyblish.util.publish()

uninstall()

Uninstall Blender configuration for AYON.

Source code in client/ayon_blender/api/pipeline.py
202
203
204
205
206
207
208
209
210
211
212
213
def uninstall():
    """Uninstall Blender configuration for AYON."""
    sys.excepthook = ORIGINAL_EXCEPTHOOK

    pyblish.api.deregister_host("blender")
    pyblish.api.deregister_plugin_path(str(PUBLISH_PATH))

    deregister_loader_plugin_path(str(LOAD_PATH))
    deregister_creator_plugin_path(str(CREATE_PATH))

    if not IS_HEADLESS:
        ops.unregister()