Skip to content

pipeline

Pipeline tools for AYON Speedtree integration.

SpeedtreeHost

Bases: HostBase, IWorkfileHost, ILoadHost, IPublishHost

Source code in client/ayon_speedtree/api/pipeline.py
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
class SpeedtreeHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
    name = "speedtree"

    @staticmethod
    def show_tools_dialog():
        """Show tools dialog with actions leading to show other tools."""
        show_tools_dialog()

    def install(self):

        plugins_dir = os.path.join(SPTREE_ADDON_ROOT, "plugins")
        publish_dir = os.path.join(plugins_dir, "publish")
        load_dir = os.path.join(plugins_dir, "load")
        create_dir = os.path.join(plugins_dir, "create")

        pyblish.api.register_host("speedtree")
        pyblish.api.register_plugin_path(publish_dir)
        register_loader_plugin_path(load_dir)
        register_creator_plugin_path(create_dir)

        register_event_callback(
            "application.launched", self.initial_app_launch
        )
        register_event_callback("application.exit", self.application_exit)

    def get_current_project_name(self):
        """
        Returns:
            Union[str, None]: Current project name.
        """

        return self.get_current_context().get("project_name")

    def get_current_folder_path(self):
        """
        Returns:
            Union[str, None]: Current folder path.
        """

        return self.get_current_context().get("folder_path")

    def get_current_task_name(self):
        """
        Returns:
            Union[str, None]: Current task name.
        """

        return self.get_current_context().get("task_name")

    def get_current_context(self):
        context = get_current_workfile_context()
        if not context:
            return get_global_context()

        return context

    def get_current_workfile(self):
        return os.environ["CURRENT_SPM"]

    def workfile_has_unsaved_changes(self):
        # Pop-up dialog would be located to ask if users
        # save scene if it has unsaved changes
        return True

    def get_workfile_extensions(self):
        return [".spm"]

    def open_workfile(self, filepath):
        os.environ["CURRENT_SPM"] = filepath
        load_spm_file(filepath)
        return filepath

    def save_workfile(self, filepath=None):
        with save_scene("Work Files"):
            context = open_workfile()
            filepath = save_workfile(filepath, context)
        print(f"Saving Spm file: {filepath}")
        copy_ayon_data(filepath)
        load_spm_file(filepath)
        return filepath

    def list_instances(self):
        """Get all AYON instances."""
        # Figure out how to deal with this
        return get_instance_workfile_metadata()

    def write_instances(self, data):
        """Write all AYON instances"""
        return write_workfile_metadata(SPTREE_SECTION_NAME_INSTANCES, data)

    def get_containers(self):
        """Get the data of the containers

        Returns:
            list: the list which stores the data of the containers
        """
        return get_containers()

    def initial_app_launch(self):
        """Triggers on launch of the communication server for Speedtree.

        Usually this aligns roughly with the start of Speedtree.
        """
        context = get_global_context()
        save_current_workfile_context(context)
        # Initialize the SpeedTree system
        SpeedTree.StpInit()

    def application_exit(self):
        """Event action when the application exit
        """
        remove_tmp_data()
        SpeedTree.StpShutDown()

    def update_context_data(self, data, changes):
        return write_workfile_metadata(SPTREE_METADATA_CREATE_CONTEXT, data)

    def get_context_data(self):
        get_load_workfile_metadata(SPTREE_METADATA_CREATE_CONTEXT)

application_exit()

Event action when the application exit

Source code in client/ayon_speedtree/api/pipeline.py
142
143
144
145
146
def application_exit(self):
    """Event action when the application exit
    """
    remove_tmp_data()
    SpeedTree.StpShutDown()

get_containers()

Get the data of the containers

Returns:

Name Type Description
list

the list which stores the data of the containers

Source code in client/ayon_speedtree/api/pipeline.py
124
125
126
127
128
129
130
def get_containers(self):
    """Get the data of the containers

    Returns:
        list: the list which stores the data of the containers
    """
    return get_containers()

get_current_folder_path()

Returns:

Type Description

Union[str, None]: Current folder path.

Source code in client/ayon_speedtree/api/pipeline.py
67
68
69
70
71
72
73
def get_current_folder_path(self):
    """
    Returns:
        Union[str, None]: Current folder path.
    """

    return self.get_current_context().get("folder_path")

get_current_project_name()

Returns:

Type Description

Union[str, None]: Current project name.

Source code in client/ayon_speedtree/api/pipeline.py
59
60
61
62
63
64
65
def get_current_project_name(self):
    """
    Returns:
        Union[str, None]: Current project name.
    """

    return self.get_current_context().get("project_name")

get_current_task_name()

Returns:

Type Description

Union[str, None]: Current task name.

Source code in client/ayon_speedtree/api/pipeline.py
75
76
77
78
79
80
81
def get_current_task_name(self):
    """
    Returns:
        Union[str, None]: Current task name.
    """

    return self.get_current_context().get("task_name")

initial_app_launch()

Triggers on launch of the communication server for Speedtree.

Usually this aligns roughly with the start of Speedtree.

Source code in client/ayon_speedtree/api/pipeline.py
132
133
134
135
136
137
138
139
140
def initial_app_launch(self):
    """Triggers on launch of the communication server for Speedtree.

    Usually this aligns roughly with the start of Speedtree.
    """
    context = get_global_context()
    save_current_workfile_context(context)
    # Initialize the SpeedTree system
    SpeedTree.StpInit()

list_instances()

Get all AYON instances.

Source code in client/ayon_speedtree/api/pipeline.py
115
116
117
118
def list_instances(self):
    """Get all AYON instances."""
    # Figure out how to deal with this
    return get_instance_workfile_metadata()

show_tools_dialog() staticmethod

Show tools dialog with actions leading to show other tools.

Source code in client/ayon_speedtree/api/pipeline.py
37
38
39
40
@staticmethod
def show_tools_dialog():
    """Show tools dialog with actions leading to show other tools."""
    show_tools_dialog()

write_instances(data)

Write all AYON instances

Source code in client/ayon_speedtree/api/pipeline.py
120
121
122
def write_instances(self, data):
    """Write all AYON instances"""
    return write_workfile_metadata(SPTREE_SECTION_NAME_INSTANCES, data)

copy_ayon_data(filepath)

Copy any ayon-related data( such as instances, create-context, cotnainers) from the previous workfile to the new one when incrementing and saving workfile.

Parameters:

Name Type Description Default
filepath str

the workfile path to be saved

required
Source code in client/ayon_speedtree/api/pipeline.py
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
def copy_ayon_data(filepath):
    """Copy any ayon-related data(
        such as instances, create-context, cotnainers)
        from the previous workfile to the new one
        when incrementing and saving workfile.

    Args:
        filepath (str): the workfile path to be saved
    """
    filename = os.path.splitext(os.path.basename(filepath))[0].strip()
    current_file = registered_host().get_current_workfile()
    if current_file:
        current_file = os.path.splitext(
            os.path.basename(current_file))[0].strip()
    work_dir = get_workdir()
    for name in [SPTREE_METADATA_CREATE_CONTEXT,
                 SPTREE_SECTION_NAME_INSTANCES,
                 SPTREE_SECTION_NAME_CONTAINERS]:
        src_json_dir = os.path.join(
            work_dir, ".sptree_metadata", current_file, name).replace(
                "\\", "/"
            )
        if not os.path.exists(src_json_dir):
            continue
        dst_json_dir = os.path.join(
            work_dir, ".sptree_metadata", filename, name).replace(
                "\\", "/"
            )
        os.makedirs(dst_json_dir, exist_ok=True)
        all_fname_list = [jfile for jfile in os.listdir(src_json_dir)
                        if jfile.endswith("json")]
        if all_fname_list:
            for fname in all_fname_list:
                src_json = f"{src_json_dir}/{fname}"
                dst_json = f"{dst_json_dir}/{fname}"
                shutil.copy(src_json, dst_json)

get_containers()

Function to get the container data

Returns:

Name Type Description
list

list of container data

Source code in client/ayon_speedtree/api/pipeline.py
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
def get_containers():
    """Function to get the container data

    Returns:
        list: list of container data
    """
    output = get_load_workfile_metadata(SPTREE_SECTION_NAME_CONTAINERS)
    if output:
        for item in output:
            if "objectName" not in item and "name" in item:
                members = item["name"]
                if isinstance(members, list):
                    members = "|".join([str(member) for member in members])
                item["objectName"] = members

    return output

get_current_workfile_context()

Function to get the current context data from the related json file in .sptree_metadata/context folder

The current context data includes thing like project name, folder path and task name.

Returns:

Name Type Description
list

list of context data

Source code in client/ayon_speedtree/api/pipeline.py
250
251
252
253
254
255
256
257
258
259
260
def get_current_workfile_context():
    """Function to get the current context data from the related
    json file in .sptree_metadata/context folder

    The current context data includes thing like project name,
    folder path and task name.

    Returns:
        list: list of context data
    """
    return get_load_context_metadata()

get_instance_workfile_metadata()

Get instance data from the related metadata json("instances.json") which stores in .sptree_metadata/{workfile}/instances folder in the project work directory.

Instance data includes the info like the workfile instance and any instances created by the users for publishing.

Returns:

Name Type Description
dict

instance data

Source code in client/ayon_speedtree/api/pipeline.py
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
def get_instance_workfile_metadata():
    """Get instance data from the related metadata json("instances.json")
    which stores in .sptree_metadata/{workfile}/instances folder
    in the project work directory.

    Instance data includes the info like the workfile instance
    and any instances created by the users for publishing.

    Returns:
        dict: instance data
    """
    file_content = []
    current_file = registered_host().get_current_workfile()
    if current_file:
        current_file = os.path.splitext(
            os.path.basename(current_file))[0].strip()
    work_dir = get_workdir()
    json_dir = os.path.join(
        work_dir, ".sptree_metadata",
        current_file, SPTREE_SECTION_NAME_INSTANCES).replace(
            "\\", "/"
        )
    if not os.path.exists(json_dir) or not os.listdir(json_dir):
        return file_content
    for file in os.listdir(json_dir):
        with open (f"{json_dir}/{file}", "r") as data:
            file_content = json.load(data)

    return file_content

get_load_context_metadata()

Get the context data from the related json file ("context.json") which stores in .sptree_metadata/context folder in the project work directory.

The context data includes the project name, folder path and task name.

Returns:

Name Type Description
list

context data

Source code in client/ayon_speedtree/api/pipeline.py
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
def get_load_context_metadata():
    """Get the context data from the related json file
    ("context.json") which stores in .sptree_metadata/context
    folder in the project work directory.

    The context data includes the project name, folder path and
    task name.

    Returns:
        list: context data
    """
    file_content = {}
    work_dir = get_workdir()
    json_dir = os.path.join(
        work_dir, ".sptree_metadata", SPTREE_SECTION_NAME_CONTEXT).replace(
            "\\", "/"
        )
    if not os.path.exists(json_dir):
        return file_content
    file_list = os.listdir(json_dir)
    if not file_list:
        return file_content
    for file in file_list:
        with open (f"{json_dir}/{file}", "r") as data:
            content = ast.literal_eval(str(data.read().strip()))
            file_content.update(content)
            data.close()
    return file_content

get_load_workfile_metadata(metadata_key)

Get to load the workfile json metadata(such as creator's context data and container data) which stores in .sptree_metadata/{workfile}/{metadata_key} folder in the project work directory. It mainly supports to the metadata_key below: SPTREE_METADATA_CREATE_CONTEXT: loading create_context.json where stores the data with publish_attributes(e.g. whether the optional validator is enabled.) SPTREE_SECTION_NAME_CONTAINERS: loading {subset_name}.json where includes all the loaded asset data to the zbrush scene.

Parameters:

Name Type Description Default
metadata_key str

name of the metadata key

required

Returns:

Name Type Description
list

list of metadata(create-context data or container data)

Source code in client/ayon_speedtree/api/pipeline.py
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
def get_load_workfile_metadata(metadata_key):
    """Get to load the workfile json metadata(such as
    creator's context data and container data) which stores in
    .sptree_metadata/{workfile}/{metadata_key} folder in the project
    work directory.
    It mainly supports to the metadata_key below:
    SPTREE_METADATA_CREATE_CONTEXT: loading create_context.json where
        stores the data with publish_attributes(e.g. whether the
        optional validator is enabled.)
    SPTREE_SECTION_NAME_CONTAINERS: loading {subset_name}.json where
        includes all the loaded asset data to the zbrush scene.

    Args:
        metadata_key (str): name of the metadata key

    Returns:
        list: list of metadata(create-context data or container data)
    """
    file_content = []
    current_file = registered_host().get_current_workfile()
    if current_file:
        current_file = os.path.splitext(
            os.path.basename(current_file))[0].strip()
    work_dir = get_workdir()
    json_dir = os.path.join(
        work_dir, ".sptree_metadata",
        current_file, metadata_key).replace(
            "\\", "/"
        )
    if not os.path.exists(json_dir):
        return file_content
    file_list = os.listdir(json_dir)
    if not file_list:
        return file_content
    for file in file_list:
        with open (f"{json_dir}/{file}", "r") as data:
            content = json.load(data)
            file_content.extend(content)
            data.close()
    return file_content

imprint(container, representation_id)

Function to update the container data from the related json file in .sptree_metadata/{workfile}/container when updating or switching asset(s)

Parameters:

Name Type Description Default
container str

container

required
representation_id str

representation id

required
Source code in client/ayon_speedtree/api/pipeline.py
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
def imprint(container, representation_id):
    """Function to update the container data from
    the related json file in .sptree_metadata/{workfile}/container
    when updating or switching asset(s)

    Args:
        container (str): container
        representation_id (str): representation id
    """
    old_container_data = []
    data = {}
    name = container["objectName"]
    current_file = registered_host().get_current_workfile()
    if current_file:
        current_file = os.path.splitext(
            os.path.basename(current_file))[0].strip()
    work_dir = get_workdir()
    json_dir = os.path.join(
        work_dir, ".sptree_metadata",
        current_file, SPTREE_SECTION_NAME_CONTAINERS).replace(
            "\\", "/"
        )
    js_fname = next((jfile for jfile in os.listdir(json_dir)
                     if jfile.endswith(f"{name}.json")), None)
    if js_fname:
        with open(f"{json_dir}/{js_fname}", "r") as file:
            old_container_data = json.load(file)
            print(f"data: {type(old_container_data)}")
            file.close()

        open(f"{json_dir}/{js_fname}", 'w').close()
        for item in old_container_data:
            item["representation"] = representation_id
            data.update(item)
        with open(f"{json_dir}/{js_fname}", "w") as file:
            new_container_data = json.dumps([data])
            file.write(new_container_data)
            file.close()

remove_container_data(name)

Function to remove the specific container data from {subset_name}.json in .sptree_metadata/{workfile}/containers folder

Parameters:

Name Type Description Default
name str

object name stored in the container

required
Source code in client/ayon_speedtree/api/pipeline.py
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
def remove_container_data(name):
    """Function to remove the specific container data from
    {subset_name}.json in .sptree_metadata/{workfile}/containers folder

    Args:
        name (str): object name stored in the container
    """
    current_file = registered_host().get_current_workfile()
    if current_file:
        current_file = os.path.splitext(
            os.path.basename(current_file))[0].strip()
    work_dir = get_workdir()
    json_dir = os.path.join(
        work_dir, ".sptree_metadata",
        current_file, SPTREE_SECTION_NAME_CONTAINERS).replace(
            "\\", "/"
        )
    all_fname_list = os.listdir(json_dir)
    json_file = next((jfile for jfile in all_fname_list
                               if jfile == f"{name}.json"), None)
    if json_file:
        os.remove(f"{json_dir}/{json_file}")

remove_tmp_data()

Remove all temporary data which is created by AYON without saving changes when launching Speedtree without enabling skip opening last workfile

Source code in client/ayon_speedtree/api/pipeline.py
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
def remove_tmp_data():
    """Remove all temporary data which is created by AYON without
    saving changes when launching Speedtree without enabling `skip
    opening last workfile`

    """
    work_dir = get_workdir()
    for name in [SPTREE_METADATA_CREATE_CONTEXT,
                 SPTREE_SECTION_NAME_INSTANCES,
                 SPTREE_SECTION_NAME_CONTAINERS]:
        json_dir = os.path.join(
            work_dir, ".sptree_metadata", name).replace(
                "\\", "/"
            )
        if not os.path.exists(json_dir):
            continue
        all_fname_list = [jfile for jfile in os.listdir(json_dir)
                          if jfile.endswith("json")]
        for fname in all_fname_list:
            os.remove(f"{json_dir}/{fname}")

save_current_workfile_context(context)

Save current workfile context data to .sptree_metadata/{workfile}/key

This persists the current in-memory context to be set for a specific workfile on disk. Usually used on save to persist the local sessions' workfile context on save.

The context data includes things like the project name, folder path, etc.

Parameters:

Name Type Description Default
context dict

context data

required
Source code in client/ayon_speedtree/api/pipeline.py
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
def save_current_workfile_context(context):
    """Save current workfile context data to `.sptree_metadata/{workfile}/key`

    This persists the current in-memory context to be set for a specific
    workfile on disk. Usually used on save to persist the local sessions'
    workfile context on save.

    The context data includes things like the project name, folder path,
    etc.

    Args:
        context (dict): context data

    """
    return write_context_metadata(SPTREE_SECTION_NAME_CONTEXT, context)

show_tools_dialog()

Show dialog with tools.

Dialog will stay visible.

Source code in client/ayon_speedtree/api/pipeline.py
542
543
544
545
546
547
548
549
def show_tools_dialog():
    """Show dialog with tools.

    Dialog will stay visible.
    """
    from ayon_speedtree.api import tools_ui

    tools_ui.show_tools_dialog()

write_context_metadata(metadata_key, context)

Write context data into the related json which stores in .sptree_metadata/key folder in the project work directory.

The context data includes the project name, folder path and task name.

Parameters:

Name Type Description Default
metadata_key str

metadata key

required
context dict

context data

required
Source code in client/ayon_speedtree/api/pipeline.py
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
def write_context_metadata(metadata_key, context):
    """Write context data into the related json
    which stores in .sptree_metadata/key folder
    in the project work directory.

    The context data includes the project name, folder path
    and task name.

    Args:
        metadata_key (str): metadata key
        context (dict): context data
    """
    work_dir = get_workdir()
    json_dir = os.path.join(
        work_dir, ".sptree_metadata", metadata_key).replace(
            "\\", "/"
        )
    os.makedirs(json_dir, exist_ok=True)
    json_file = f"{json_dir}/{metadata_key}.json"
    if os.path.exists(json_file):
        with open (json_file, "r") as file:
            value = json.load(file)
            if value == context:
                return
    with open (json_file, "w") as file:
        value = json.dumps(context)
        file.write(value)
        file.close()

write_load_metadata(data)

Write/Edit the container data into the related json file ("{subset_name}.json") which stores in .sptree_metadata/{workfile}/containers folder. This persists the current in-memory containers data to be set for updating and switching assets in scene inventory.

Parameters:

Name Type Description Default
metadata_key str

metadata key for container

required
data list

list of container data

required
Source code in client/ayon_speedtree/api/pipeline.py
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
def write_load_metadata(data):
    """Write/Edit the container data into the related json file
    ("{subset_name}.json")
    which stores in .sptree_metadata/{workfile}/containers folder.
    This persists the current in-memory containers data
    to be set for updating and switching assets in scene inventory.

    Args:
        metadata_key (str): metadata key for container
        data (list): list of container data
    """
    current_file = registered_host().get_current_workfile()
    if current_file:
        current_file = os.path.splitext(
            os.path.basename(current_file))[0].strip()
    work_dir = get_workdir()
    name = next((d["name"] for d in data), None)
    json_dir = os.path.join(
        work_dir, ".sptree_metadata",
        current_file, SPTREE_SECTION_NAME_CONTAINERS).replace(
            "\\", "/"
        )
    os.makedirs(json_dir, exist_ok=True)
    json_file = f"{json_dir}/{name}.json"
    if os.path.exists(json_file):
        with open(json_file, "w"):
            pass

    with open(json_file, "w") as file:
        value = json.dumps(data)
        file.write(value)
        file.close()

write_workfile_metadata(metadata_key, data=None)

Function to write workfile metadata(such as creator's context data and instance data) in .sptree_metadata/{workfile}/{metadata_key} folder This persists the current in-memory instance/creator's context data to be set for a specific workfile on disk. Usually used on save to persist updating instance data and context data used in publisher.

Parameters:

Name Type Description Default
metadata_key str

metadata key

required
data list

metadata. Defaults to None.

None
Source code in client/ayon_speedtree/api/pipeline.py
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
def write_workfile_metadata(metadata_key, data=None):
    """Function to write workfile metadata(such as creator's context data
    and instance data) in .sptree_metadata/{workfile}/{metadata_key} folder
    This persists the current in-memory instance/creator's context data
    to be set for a specific workfile on disk. Usually used on save to
    persist updating instance data and context data used in publisher.

    Args:
        metadata_key (str): metadata key
        data (list, optional): metadata. Defaults to None.
    """
    if data is None:
        data = []
    current_file = registered_host().get_current_workfile()
    if current_file:
        current_file = os.path.splitext(
            os.path.basename(current_file))[0].strip()
    work_dir = get_workdir()
    json_dir = os.path.join(
        work_dir, ".sptree_metadata",
        current_file, metadata_key).replace(
            "\\", "/"
        )
    os.makedirs(json_dir, exist_ok=True)
    with open (f"{json_dir}/{metadata_key}.json", "w") as file:
        value = json.dumps(data)
        file.write(value)
        file.close()