Skip to content

workfile

BuildWorkfile

Wrapper for build workfile process.

Load representations for current context by build presets. Build presets are host related, since each host has it's loaders.

Source code in client/ayon_core/pipeline/workfile/build_workfile.py
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
class BuildWorkfile:
    """Wrapper for build workfile process.

    Load representations for current context by build presets. Build presets
    are host related, since each host has it's loaders.
    """

    _log = None

    @property
    def log(self):
        if self._log is None:
            self._log = Logger.get_logger(self.__class__.__name__)
        return self._log

    @staticmethod
    def map_products_by_type(product_entities):
        products_by_type = collections.defaultdict(list)
        for product_entity in product_entities:
            product_type = product_entity["productType"]
            products_by_type[product_type].append(product_entity)
        return products_by_type

    def process(self):
        """Main method of this wrapper.

        Building of workfile is triggered and is possible to implement
        post processing of loaded containers if necessary.

        Returns:
            List[Dict[str, Any]]: Loaded containers during build.
        """

        return self.build_workfile()

    def build_workfile(self):
        """Prepares and load containers into workfile.

        Loads latest versions of current and linked folders to workfile by
        logic stored in Workfile profiles from presets. Profiles are set
        by host, filtered by current task name and used by families.

        Each product type can specify representation names and loaders for
        representations and first available and successful loaded
        representation is returned as container.

        At the end you'll get list of loaded containers per each folder.

        loaded_containers [{
            "folder_entity": <FolderEntity1>,
            "containers": [<Container1>, <Container2>, ...]
        }, {
            "folder_entity": <FolderEntity2>,
            "containers": [<Container3>, ...]
        }, {
            ...
        }]

        Returns:
            List[Dict[str, Any]]: Loaded containers during build.
        """

        from ayon_core.pipeline.context_tools import get_current_context

        loaded_containers = []

        # Get current folder and task entities
        context = get_current_context()
        project_name = context["project_name"]
        current_folder_path = context["folder_path"]
        current_task_name = context["task_name"]

        current_folder_entity = ayon_api.get_folder_by_path(
            project_name, current_folder_path
        )
        # Skip if folder was not found
        if not current_folder_entity:
            print("Folder entity `{}` was not found".format(
                current_folder_path
            ))
            return loaded_containers

        # Prepare available loaders
        loaders_by_name = {}
        for loader in discover_loader_plugins():
            if not loader.enabled:
                continue
            loader_name = loader.__name__
            if loader_name in loaders_by_name:
                raise KeyError(
                    "Duplicated loader name {0}!".format(loader_name)
                )
            loaders_by_name[loader_name] = loader

        # Skip if there are any loaders
        if not loaders_by_name:
            self.log.warning("There are no registered loaders.")
            return loaded_containers

        # Load workfile presets for task
        self.build_presets = self.get_build_presets(
            current_task_name, current_folder_entity["id"]
        )

        # Skip if there are any presets for task
        if not self.build_presets:
            self.log.warning(
                "Current task `{}` does not have any loading preset.".format(
                    current_task_name
                )
            )
            return loaded_containers

        # Get presets for loading current folder
        current_context_profiles = self.build_presets.get("current_context")
        # Get presets for loading linked folders
        link_context_profiles = self.build_presets.get("linked_assets")
        # Skip if both are missing
        if not current_context_profiles and not link_context_profiles:
            self.log.warning(
                "Current task `{}` has empty loading preset.".format(
                    current_task_name
                )
            )
            return loaded_containers

        elif not current_context_profiles:
            self.log.warning((
                "Current task `{}` doesn't have any loading"
                " preset for it's context."
            ).format(current_task_name))

        elif not link_context_profiles:
            self.log.warning((
                "Current task `{}` doesn't have any"
                "loading preset for it's linked folders."
            ).format(current_task_name))

        # Prepare folders to process by workfile presets
        folder_entities = []
        current_folder_id = None
        if current_context_profiles:
            # Add current folder entity if preset has current context set
            folder_entities.append(current_folder_entity)
            current_folder_id = current_folder_entity["id"]

        if link_context_profiles:
            # Find and append linked folders if preset has set linked mapping
            linked_folder_entities = self._get_linked_folder_entities(
                project_name, current_folder_entity["id"]
            )
            if linked_folder_entities:
                folder_entities.extend(linked_folder_entities)

        # Skip if there are no folders. This can happen if only linked mapping
        # is set and there are no links for his folder.
        if not folder_entities:
            self.log.warning(
                "Folder does not have linked folders. Nothing to process."
            )
            return loaded_containers

        # Prepare entities from database for folders
        prepared_entities = self._collect_last_version_repres(
            folder_entities
        )

        # Load containers by prepared entities and presets
        # - Current folder containers
        if current_folder_id and current_folder_id in prepared_entities:
            current_context_data = prepared_entities.pop(current_folder_id)
            loaded_data = self.load_containers_by_folder_data(
                current_context_data,
                current_context_profiles,
                loaders_by_name
            )
            if loaded_data:
                loaded_containers.append(loaded_data)

        # - Linked assets container
        for linked_folder_data in prepared_entities.values():
            loaded_data = self.load_containers_by_folder_data(
                linked_folder_data,
                link_context_profiles,
                loaders_by_name
            )
            if loaded_data:
                loaded_containers.append(loaded_data)

        # Return list of loaded containers
        return loaded_containers

    def get_build_presets(self, task_name, folder_id):
        """ Returns presets to build workfile for task name.

        Presets are loaded for current project received by
        'get_current_project_name', filtered by registered host
        and entered task name.

        Args:
            task_name (str): Task name used for filtering build presets.
            folder_id (str): Folder id.

        Returns:
            Dict[str, Any]: preset per entered task name
        """

        from ayon_core.pipeline.context_tools import (
            get_current_host_name,
            get_current_project_name,
        )

        project_name = get_current_project_name()
        host_name = get_current_host_name()
        project_settings = get_project_settings(project_name)

        host_settings = project_settings.get(host_name) or {}
        # Get presets for host
        wb_settings = host_settings.get("workfile_builder")
        if not wb_settings:
            # backward compatibility
            wb_settings = host_settings.get("workfile_build") or {}

        builder_profiles = wb_settings.get("profiles")
        if not builder_profiles:
            return None

        task_entity = ayon_api.get_task_by_name(
            project_name,
            folder_id,
            task_name,
        )
        task_type = None
        if task_entity:
            task_type = task_entity["taskType"]

        filter_data = {
            "task_types": task_type,
            "tasks": task_name
        }
        return filter_profiles(builder_profiles, filter_data)

    def _filter_build_profiles(self, build_profiles, loaders_by_name):
        """ Filter build profiles by loaders and prepare process data.

        Valid profile must have "loaders", "families" and "repre_names" keys
        with valid values.
        - "loaders" expects list of strings representing possible loaders.
        - "families" expects list of strings for filtering
                     by product type.
        - "repre_names" expects list of strings for filtering by
                        representation name.

        Lowered "families" and "repre_names" are prepared for each profile with
        all required keys.

        Args:
            build_profiles (Dict[str, Any]): Profiles for building workfile.
            loaders_by_name (Dict[str, LoaderPlugin]): Available loaders
                per name.

        Returns:
            List[Dict[str, Any]]: Filtered and prepared profiles.
        """

        valid_profiles = []
        for profile in build_profiles:
            # Check loaders
            profile_loaders = profile.get("loaders")
            if not profile_loaders:
                self.log.warning((
                    "Build profile has missing loaders configuration: {0}"
                ).format(json.dumps(profile, indent=4)))
                continue

            # Check if any loader is available
            loaders_match = False
            for loader_name in profile_loaders:
                if loader_name in loaders_by_name:
                    loaders_match = True
                    break

            if not loaders_match:
                self.log.warning((
                    "All loaders from Build profile are not available: {0}"
                ).format(json.dumps(profile, indent=4)))
                continue

            # Check product types
            profile_product_types = profile.get("product_types")
            if not profile_product_types:
                self.log.warning((
                    "Build profile is missing families configuration: {0}"
                ).format(json.dumps(profile, indent=4)))
                continue

            # Check representation names
            profile_repre_names = profile.get("repre_names")
            if not profile_repre_names:
                self.log.warning((
                    "Build profile is missing"
                    " representation names filtering: {0}"
                ).format(json.dumps(profile, indent=4)))
                continue

            # Prepare lowered families and representation names
            profile["product_types_lowered"] = [
                product_type.lower()
                for product_type in profile_product_types
            ]
            profile["repre_names_lowered"] = [
                name.lower() for name in profile_repre_names
            ]

            valid_profiles.append(profile)

        return valid_profiles

    def _get_linked_folder_entities(self, project_name, folder_id):
        """Get linked folder entities for entered folder.

        Args:
            project_name (str): Project name.
            folder_id (str): Folder id.

        Returns:
            list[dict[str, Any]]: Linked folder entities.

        """
        links = ayon_api.get_folder_links(
            project_name, folder_id, link_direction="in"
        )
        linked_folder_ids = {
            link["entityId"]
            for link in links
            if link["entityType"] == "folder"
        }
        if not linked_folder_ids:
            return []
        return list(ayon_api.get_folders(
            project_name, folder_ids=linked_folder_ids
        ))

    def _prepare_profile_for_products(self, product_entities, profiles):
        """Select profile for each product by it's data.

        Profiles are filtered for each product individually.
        Profile is filtered by product type, optionally by name regex and
        representation names set in profile.
        It is possible to not find matching profile for product, in that case
        product is skipped and it is possible that none of products have
        matching profile.

        Args:
            product_entities (List[Dict[str, Any]]): product entities.
            profiles (List[Dict[str, Any]]): Build profiles.

        Returns:
            Dict[str, Any]: Profile by product id.
        """

        # Prepare products
        products_by_type = self.map_products_by_type(product_entities)

        profiles_by_product_id = {}
        for product_type, product_entities in products_by_type.items():
            product_type_low = product_type.lower()
            for profile in profiles:
                # Skip profile if does not contain product type
                if product_type_low not in profile["product_types_lowered"]:
                    continue

                # Precompile name filters as regexes
                profile_regexes = profile.get("product_name_filters")
                if profile_regexes:
                    _profile_regexes = []
                    for regex in profile_regexes:
                        _profile_regexes.append(re.compile(regex))
                    profile_regexes = _profile_regexes

                # TODO prepare regex compilation
                for product_entity in product_entities:
                    # Verify regex filtering (optional)
                    if profile_regexes:
                        valid = False
                        for pattern in profile_regexes:
                            if re.match(pattern, product_entity["name"]):
                                valid = True
                                break

                        if not valid:
                            continue

                    profiles_by_product_id[product_entity["id"]] = profile

                # break profiles loop on finding the first matching profile
                break
        return profiles_by_product_id

    def load_containers_by_folder_data(
        self, linked_folder_data, build_profiles, loaders_by_name
    ):
        """Load containers for entered folder entity by Build profiles.

        Args:
            linked_folder_data (Dict[str, Any]): Prepared data with products,
                last versions and representations for specific folder.
            build_profiles (Dict[str, Any]): Build profiles.
            loaders_by_name (Dict[str, LoaderPlugin]): Available loaders
                per name.

        Returns:
            Dict[str, Any]: Output contains folder entity
                and loaded containers.
        """

        # Make sure all data are not empty
        if not linked_folder_data or not build_profiles or not loaders_by_name:
            return

        folder_entity = linked_folder_data["folder_entity"]

        valid_profiles = self._filter_build_profiles(
            build_profiles, loaders_by_name
        )
        if not valid_profiles:
            self.log.warning(
                "There are not valid Workfile profiles. Skipping process."
            )
            return

        self.log.debug("Valid Workfile profiles: {}".format(valid_profiles))

        products_by_id = {}
        version_by_product_id = {}
        repres_by_version_id = {}
        for product_id, in_data in linked_folder_data["products"].items():
            product_entity = in_data["product_entity"]
            products_by_id[product_entity["id"]] = product_entity

            version_data = in_data["version"]
            version_entity = version_data["version_entity"]
            version_by_product_id[product_id] = version_entity
            repres_by_version_id[version_entity["id"]] = (
                version_data["repres"]
            )

        if not products_by_id:
            self.log.warning("There are not products for folder {}".format(
                folder_entity["path"]
            ))
            return

        profiles_by_product_id = self._prepare_profile_for_products(
            products_by_id.values(), valid_profiles
        )
        if not profiles_by_product_id:
            self.log.warning("There are not valid products.")
            return

        valid_repres_by_product_id = collections.defaultdict(list)
        for product_id, profile in profiles_by_product_id.items():
            profile_repre_names = profile["repre_names_lowered"]

            version_entity = version_by_product_id[product_id]
            version_id = version_entity["id"]
            repres = repres_by_version_id[version_id]
            for repre in repres:
                repre_name_low = repre["name"].lower()
                if repre_name_low in profile_repre_names:
                    valid_repres_by_product_id[product_id].append(repre)

        # DEBUG message
        msg = "Valid representations for Folder: `{}`".format(
            folder_entity["path"]
        )
        for product_id, repres in valid_repres_by_product_id.items():
            product_entity = products_by_id[product_id]
            msg += "\n# Product Name/ID: `{}`/{}".format(
                product_entity["name"], product_id
            )
            for repre in repres:
                msg += "\n## Repre name: `{}`".format(repre["name"])

        self.log.debug(msg)

        containers = self._load_containers(
            valid_repres_by_product_id, products_by_id,
            profiles_by_product_id, loaders_by_name
        )

        return {
            "folder_entity": folder_entity,
            "containers": containers
        }

    def _load_containers(
        self, repres_by_product_id, products_by_id,
        profiles_by_product_id, loaders_by_name
    ):
        """Real load by collected data happens here.

        Loading of representations per product happens here. Each product can
        loads one representation. Loading is tried in specific order.
        Representations are tried to load by names defined in configuration.
        If product has representation matching representation name each loader
        is tried to load it until any is successful. If none of them was
        successful then next representation name is tried.
        Product process loop ends when any representation is loaded or
        all matching representations were already tried.

        Args:
            repres_by_product_id (Dict[str, Dict[str, Any]]): Available
                representations mapped by their parent (product) id.
            products_by_id (Dict[str, Dict[str, Any]]): Product entities
                mapped by their id.
            profiles_by_product_id (Dict[str, Dict[str, Any]]): Build profiles
                mapped by product id.
            loaders_by_name (Dict[str, LoaderPlugin]): Available loaders
                per name.

        Returns:
            List[Dict[str, Any]]: Objects of loaded containers.
        """

        loaded_containers = []

        # Get product id order from build presets.
        build_presets = self.build_presets.get("current_context", [])
        build_presets += self.build_presets.get("linked_assets", [])
        product_ids_ordered = []
        for preset in build_presets:
            for product_type in preset["product_types"]:
                for product_id, product_entity in products_by_id.items():
                    # TODO 'families' is not available on product
                    families = product_entity["data"].get("families") or []
                    if product_type not in families:
                        continue

                    product_ids_ordered.append(product_id)

        # Order representations from products.
        print("repres_by_product_id", repres_by_product_id)
        representations_ordered = []
        representations = []
        for ordered_product_id in product_ids_ordered:
            for product_id, repres in repres_by_product_id.items():
                if repres in representations:
                    continue

                if ordered_product_id == product_id:
                    representations_ordered.append((product_id, repres))
                    representations.append(repres)

        print("representations", representations)

        # Load ordered representations.
        for product_id, repres in representations_ordered:
            product_name = products_by_id[product_id]["name"]

            profile = profiles_by_product_id[product_id]
            loaders_last_idx = len(profile["loaders"]) - 1
            repre_names_last_idx = len(profile["repre_names_lowered"]) - 1

            repre_by_low_name = {
                repre["name"].lower(): repre for repre in repres
            }

            is_loaded = False
            for repre_name_idx, profile_repre_name in enumerate(
                profile["repre_names_lowered"]
            ):
                # Break iteration if representation was already loaded
                if is_loaded:
                    break

                repre = repre_by_low_name.get(profile_repre_name)
                if not repre:
                    continue

                for loader_idx, loader_name in enumerate(profile["loaders"]):
                    if is_loaded:
                        break

                    loader = loaders_by_name.get(loader_name)
                    if not loader:
                        continue
                    try:
                        container = load_container(
                            loader,
                            repre["id"],
                            name=product_name
                        )
                        loaded_containers.append(container)
                        is_loaded = True

                    except Exception as exc:
                        if exc == IncompatibleLoaderError:
                            self.log.info((
                                "Loader `{}` is not compatible with"
                                " representation `{}`"
                            ).format(loader_name, repre["name"]))

                        else:
                            self.log.error(
                                "Unexpected error happened during loading",
                                exc_info=True
                            )

                        msg = "Loading failed."
                        if loader_idx < loaders_last_idx:
                            msg += " Trying next loader."
                        elif repre_name_idx < repre_names_last_idx:
                            msg += (
                                " Loading of product `{}` was not successful."
                            ).format(product_name)
                        else:
                            msg += " Trying next representation."
                        self.log.info(msg)

        return loaded_containers

    def _collect_last_version_repres(self, folder_entities):
        """Collect products, versions and representations for folder_entities.

        Args:
            folder_entities (List[Dict[str, Any]]): Folder entities for which
                want to find data.

        Returns:
            Dict[str, Any]: collected entities

        Example output:
        ```
        {
            <folder id>: {
                "folder_entity": <dict[str, Any]>,
                "products": {
                    <product id>: {
                        "product_entity": <dict[str, Any]>,
                        "version": {
                            "version_entity": <VersionEntity>,
                            "repres": [
                                <RepreEntity1>, <RepreEntity2>, ...
                            ]
                        }
                    },
                    ...
                }
            },
            ...
        }
        output[folder_id]["products"][product_id]["version"]["repres"]
        ```
        """

        from ayon_core.pipeline.context_tools import get_current_project_name

        output = {}
        if not folder_entities:
            return output

        folder_entities_by_id = {
            folder_entity["id"]: folder_entity
            for folder_entity in folder_entities
        }

        project_name = get_current_project_name()
        product_entities = list(ayon_api.get_products(
            project_name, folder_ids=folder_entities_by_id.keys()
        ))
        product_entities_by_id = {
            product_entity["id"]: product_entity
            for product_entity in product_entities
        }

        last_version_by_product_id = ayon_api.get_last_versions(
            project_name, product_entities_by_id.keys()
        )
        last_version_entities_by_id = {
            version_entity["id"]: version_entity
            for version_entity in last_version_by_product_id.values()
        }
        repre_entities = ayon_api.get_representations(
            project_name, version_ids=last_version_entities_by_id.keys()
        )

        for repre_entity in repre_entities:
            version_id = repre_entity["versionId"]
            version_entity = last_version_entities_by_id[version_id]

            product_id = version_entity["productId"]
            product_entity = product_entities_by_id[product_id]

            folder_id = product_entity["folderId"]
            folder_entity = folder_entities_by_id[folder_id]

            if folder_id not in output:
                output[folder_id] = {
                    "folder_entity": folder_entity,
                    "products": {}
                }

            if product_id not in output[folder_id]["products"]:
                output[folder_id]["products"][product_id] = {
                    "product_entity": product_entity,
                    "version": {
                        "version_entity": version_entity,
                        "repres": []
                    }
                }

            (
                output
                [folder_id]
                ["products"]
                [product_id]
                ["version"]
                ["repres"]
            ).append(repre_entity)

        return output

build_workfile()

Prepares and load containers into workfile.

Loads latest versions of current and linked folders to workfile by logic stored in Workfile profiles from presets. Profiles are set by host, filtered by current task name and used by families.

Each product type can specify representation names and loaders for representations and first available and successful loaded representation is returned as container.

At the end you'll get list of loaded containers per each folder.

loaded_containers [{ "folder_entity": , "containers": [, , ...] }, { "folder_entity": , "containers": [, ...] }, { ... }]

Returns:

Type Description

List[Dict[str, Any]]: Loaded containers during build.

Source code in client/ayon_core/pipeline/workfile/build_workfile.py
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
def build_workfile(self):
    """Prepares and load containers into workfile.

    Loads latest versions of current and linked folders to workfile by
    logic stored in Workfile profiles from presets. Profiles are set
    by host, filtered by current task name and used by families.

    Each product type can specify representation names and loaders for
    representations and first available and successful loaded
    representation is returned as container.

    At the end you'll get list of loaded containers per each folder.

    loaded_containers [{
        "folder_entity": <FolderEntity1>,
        "containers": [<Container1>, <Container2>, ...]
    }, {
        "folder_entity": <FolderEntity2>,
        "containers": [<Container3>, ...]
    }, {
        ...
    }]

    Returns:
        List[Dict[str, Any]]: Loaded containers during build.
    """

    from ayon_core.pipeline.context_tools import get_current_context

    loaded_containers = []

    # Get current folder and task entities
    context = get_current_context()
    project_name = context["project_name"]
    current_folder_path = context["folder_path"]
    current_task_name = context["task_name"]

    current_folder_entity = ayon_api.get_folder_by_path(
        project_name, current_folder_path
    )
    # Skip if folder was not found
    if not current_folder_entity:
        print("Folder entity `{}` was not found".format(
            current_folder_path
        ))
        return loaded_containers

    # Prepare available loaders
    loaders_by_name = {}
    for loader in discover_loader_plugins():
        if not loader.enabled:
            continue
        loader_name = loader.__name__
        if loader_name in loaders_by_name:
            raise KeyError(
                "Duplicated loader name {0}!".format(loader_name)
            )
        loaders_by_name[loader_name] = loader

    # Skip if there are any loaders
    if not loaders_by_name:
        self.log.warning("There are no registered loaders.")
        return loaded_containers

    # Load workfile presets for task
    self.build_presets = self.get_build_presets(
        current_task_name, current_folder_entity["id"]
    )

    # Skip if there are any presets for task
    if not self.build_presets:
        self.log.warning(
            "Current task `{}` does not have any loading preset.".format(
                current_task_name
            )
        )
        return loaded_containers

    # Get presets for loading current folder
    current_context_profiles = self.build_presets.get("current_context")
    # Get presets for loading linked folders
    link_context_profiles = self.build_presets.get("linked_assets")
    # Skip if both are missing
    if not current_context_profiles and not link_context_profiles:
        self.log.warning(
            "Current task `{}` has empty loading preset.".format(
                current_task_name
            )
        )
        return loaded_containers

    elif not current_context_profiles:
        self.log.warning((
            "Current task `{}` doesn't have any loading"
            " preset for it's context."
        ).format(current_task_name))

    elif not link_context_profiles:
        self.log.warning((
            "Current task `{}` doesn't have any"
            "loading preset for it's linked folders."
        ).format(current_task_name))

    # Prepare folders to process by workfile presets
    folder_entities = []
    current_folder_id = None
    if current_context_profiles:
        # Add current folder entity if preset has current context set
        folder_entities.append(current_folder_entity)
        current_folder_id = current_folder_entity["id"]

    if link_context_profiles:
        # Find and append linked folders if preset has set linked mapping
        linked_folder_entities = self._get_linked_folder_entities(
            project_name, current_folder_entity["id"]
        )
        if linked_folder_entities:
            folder_entities.extend(linked_folder_entities)

    # Skip if there are no folders. This can happen if only linked mapping
    # is set and there are no links for his folder.
    if not folder_entities:
        self.log.warning(
            "Folder does not have linked folders. Nothing to process."
        )
        return loaded_containers

    # Prepare entities from database for folders
    prepared_entities = self._collect_last_version_repres(
        folder_entities
    )

    # Load containers by prepared entities and presets
    # - Current folder containers
    if current_folder_id and current_folder_id in prepared_entities:
        current_context_data = prepared_entities.pop(current_folder_id)
        loaded_data = self.load_containers_by_folder_data(
            current_context_data,
            current_context_profiles,
            loaders_by_name
        )
        if loaded_data:
            loaded_containers.append(loaded_data)

    # - Linked assets container
    for linked_folder_data in prepared_entities.values():
        loaded_data = self.load_containers_by_folder_data(
            linked_folder_data,
            link_context_profiles,
            loaders_by_name
        )
        if loaded_data:
            loaded_containers.append(loaded_data)

    # Return list of loaded containers
    return loaded_containers

get_build_presets(task_name, folder_id)

Returns presets to build workfile for task name.

Presets are loaded for current project received by 'get_current_project_name', filtered by registered host and entered task name.

Parameters:

Name Type Description Default
task_name str

Task name used for filtering build presets.

required
folder_id str

Folder id.

required

Returns:

Type Description

Dict[str, Any]: preset per entered task name

Source code in client/ayon_core/pipeline/workfile/build_workfile.py
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
def get_build_presets(self, task_name, folder_id):
    """ Returns presets to build workfile for task name.

    Presets are loaded for current project received by
    'get_current_project_name', filtered by registered host
    and entered task name.

    Args:
        task_name (str): Task name used for filtering build presets.
        folder_id (str): Folder id.

    Returns:
        Dict[str, Any]: preset per entered task name
    """

    from ayon_core.pipeline.context_tools import (
        get_current_host_name,
        get_current_project_name,
    )

    project_name = get_current_project_name()
    host_name = get_current_host_name()
    project_settings = get_project_settings(project_name)

    host_settings = project_settings.get(host_name) or {}
    # Get presets for host
    wb_settings = host_settings.get("workfile_builder")
    if not wb_settings:
        # backward compatibility
        wb_settings = host_settings.get("workfile_build") or {}

    builder_profiles = wb_settings.get("profiles")
    if not builder_profiles:
        return None

    task_entity = ayon_api.get_task_by_name(
        project_name,
        folder_id,
        task_name,
    )
    task_type = None
    if task_entity:
        task_type = task_entity["taskType"]

    filter_data = {
        "task_types": task_type,
        "tasks": task_name
    }
    return filter_profiles(builder_profiles, filter_data)

load_containers_by_folder_data(linked_folder_data, build_profiles, loaders_by_name)

Load containers for entered folder entity by Build profiles.

Parameters:

Name Type Description Default
linked_folder_data Dict[str, Any]

Prepared data with products, last versions and representations for specific folder.

required
build_profiles Dict[str, Any]

Build profiles.

required
loaders_by_name Dict[str, LoaderPlugin]

Available loaders per name.

required

Returns:

Type Description

Dict[str, Any]: Output contains folder entity and loaded containers.

Source code in client/ayon_core/pipeline/workfile/build_workfile.py
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
def load_containers_by_folder_data(
    self, linked_folder_data, build_profiles, loaders_by_name
):
    """Load containers for entered folder entity by Build profiles.

    Args:
        linked_folder_data (Dict[str, Any]): Prepared data with products,
            last versions and representations for specific folder.
        build_profiles (Dict[str, Any]): Build profiles.
        loaders_by_name (Dict[str, LoaderPlugin]): Available loaders
            per name.

    Returns:
        Dict[str, Any]: Output contains folder entity
            and loaded containers.
    """

    # Make sure all data are not empty
    if not linked_folder_data or not build_profiles or not loaders_by_name:
        return

    folder_entity = linked_folder_data["folder_entity"]

    valid_profiles = self._filter_build_profiles(
        build_profiles, loaders_by_name
    )
    if not valid_profiles:
        self.log.warning(
            "There are not valid Workfile profiles. Skipping process."
        )
        return

    self.log.debug("Valid Workfile profiles: {}".format(valid_profiles))

    products_by_id = {}
    version_by_product_id = {}
    repres_by_version_id = {}
    for product_id, in_data in linked_folder_data["products"].items():
        product_entity = in_data["product_entity"]
        products_by_id[product_entity["id"]] = product_entity

        version_data = in_data["version"]
        version_entity = version_data["version_entity"]
        version_by_product_id[product_id] = version_entity
        repres_by_version_id[version_entity["id"]] = (
            version_data["repres"]
        )

    if not products_by_id:
        self.log.warning("There are not products for folder {}".format(
            folder_entity["path"]
        ))
        return

    profiles_by_product_id = self._prepare_profile_for_products(
        products_by_id.values(), valid_profiles
    )
    if not profiles_by_product_id:
        self.log.warning("There are not valid products.")
        return

    valid_repres_by_product_id = collections.defaultdict(list)
    for product_id, profile in profiles_by_product_id.items():
        profile_repre_names = profile["repre_names_lowered"]

        version_entity = version_by_product_id[product_id]
        version_id = version_entity["id"]
        repres = repres_by_version_id[version_id]
        for repre in repres:
            repre_name_low = repre["name"].lower()
            if repre_name_low in profile_repre_names:
                valid_repres_by_product_id[product_id].append(repre)

    # DEBUG message
    msg = "Valid representations for Folder: `{}`".format(
        folder_entity["path"]
    )
    for product_id, repres in valid_repres_by_product_id.items():
        product_entity = products_by_id[product_id]
        msg += "\n# Product Name/ID: `{}`/{}".format(
            product_entity["name"], product_id
        )
        for repre in repres:
            msg += "\n## Repre name: `{}`".format(repre["name"])

    self.log.debug(msg)

    containers = self._load_containers(
        valid_repres_by_product_id, products_by_id,
        profiles_by_product_id, loaders_by_name
    )

    return {
        "folder_entity": folder_entity,
        "containers": containers
    }

process()

Main method of this wrapper.

Building of workfile is triggered and is possible to implement post processing of loaded containers if necessary.

Returns:

Type Description

List[Dict[str, Any]]: Loaded containers during build.

Source code in client/ayon_core/pipeline/workfile/build_workfile.py
53
54
55
56
57
58
59
60
61
62
63
def process(self):
    """Main method of this wrapper.

    Building of workfile is triggered and is possible to implement
    post processing of loaded containers if necessary.

    Returns:
        List[Dict[str, Any]]: Loaded containers during build.
    """

    return self.build_workfile()

create_workdir_extra_folders(workdir, host_name, task_type, task_name, project_name, project_settings=None)

Create extra folders in work directory based on context.

Parameters:

Name Type Description Default
workdir str

Path to workdir where workfiles is stored.

required
host_name str

Name of host implementation.

required
task_type str

Type of task for which extra folders should be created.

required
task_name str

Name of task for which extra folders should be created.

required
project_name str

Name of project on which task is.

required
project_settings dict

Prepared project settings. Are loaded if not passed.

None
Source code in client/ayon_core/pipeline/workfile/path_resolving.py
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
def create_workdir_extra_folders(
    workdir,
    host_name,
    task_type,
    task_name,
    project_name,
    project_settings=None
):
    """Create extra folders in work directory based on context.

    Args:
        workdir (str): Path to workdir where workfiles is stored.
        host_name (str): Name of host implementation.
        task_type (str): Type of task for which extra folders should be
            created.
        task_name (str): Name of task for which extra folders should be
            created.
        project_name (str): Name of project on which task is.
        project_settings (dict): Prepared project settings. Are loaded if not
            passed.
    """

    # Load project settings if not set
    if not project_settings:
        project_settings = get_project_settings(project_name)

    # Load extra folders profiles
    extra_folders_profiles = (
        project_settings["core"]["tools"]["Workfiles"]["extra_folders"]
    )
    # Skip if are empty
    if not extra_folders_profiles:
        return

    # Prepare profiles filters
    filter_data = {
        "task_types": task_type,
        "task_names": task_name,
        "hosts": host_name
    }
    profile = filter_profiles(extra_folders_profiles, filter_data)
    if profile is None:
        return

    for subfolder in profile["folders"]:
        # Make sure backslashes are converted to forwards slashes
        #   and does not start with slash
        subfolder = subfolder.replace("\\", "/").lstrip("/")
        # Skip empty strings
        if not subfolder:
            continue

        fullpath = os.path.join(workdir, subfolder)
        if not os.path.exists(fullpath):
            os.makedirs(fullpath)

get_custom_workfile_template(project_entity, folder_entity, task_entity, host_name, anatomy=None, project_settings=None)

Filter and fill workfile template profiles by passed context.

Custom workfile template can be used as first version of workfiles. Template is a file on a disk which is set in settings. Expected settings structure to have this feature enabled is: project settings |- |- workfile_builder |- create_first_version - a bool which must be set to 'True' |- custom_templates - profiles based on task name/type which points to a file which is copied as first workfile

It is expected that passed argument are already queried entities of project and folder as parents of processing task name.

Parameters:

Name Type Description Default
project_entity Dict[str, Any]

Project entity.

required
folder_entity Dict[str, Any]

Folder entity.

required
task_entity Dict[str, Any]

Task entity.

required
host_name str

Name of host.

required
anatomy Anatomy

Optionally passed anatomy object for passed project name.

None
project_settings(Dict[str, Any]

Preloaded project settings.

required

Returns:

Name Type Description
str

Path to template or None if none of profiles match current context. Existence of formatted path is not validated.

None

If no profile is matching context.

Source code in client/ayon_core/pipeline/workfile/path_resolving.py
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
def get_custom_workfile_template(
    project_entity,
    folder_entity,
    task_entity,
    host_name,
    anatomy=None,
    project_settings=None
):
    """Filter and fill workfile template profiles by passed context.

    Custom workfile template can be used as first version of workfiles.
    Template is a file on a disk which is set in settings. Expected settings
    structure to have this feature enabled is:
    project settings
    |- <host name>
      |- workfile_builder
        |- create_first_version   - a bool which must be set to 'True'
        |- custom_templates       - profiles based on task name/type which
                                    points to a file which is copied as
                                    first workfile

    It is expected that passed argument are already queried entities of
    project and folder as parents of processing task name.

    Args:
        project_entity (Dict[str, Any]): Project entity.
        folder_entity (Dict[str, Any]): Folder entity.
        task_entity (Dict[str, Any]): Task entity.
        host_name (str): Name of host.
        anatomy (Anatomy): Optionally passed anatomy object for passed project
            name.
        project_settings(Dict[str, Any]): Preloaded project settings.

    Returns:
        str: Path to template or None if none of profiles match current
            context. Existence of formatted path is not validated.
        None: If no profile is matching context.
    """

    log = Logger.get_logger("CustomWorkfileResolve")

    project_name = project_entity["name"]
    if project_settings is None:
        project_settings = get_project_settings(project_name)

    host_settings = project_settings.get(host_name)
    if not host_settings:
        log.info("Host \"{}\" doesn't have settings".format(host_name))
        return None

    workfile_builder_settings = host_settings.get("workfile_builder")
    if not workfile_builder_settings:
        log.info((
            "Seems like old version of settings is used."
            " Can't access custom templates in host \"{}\"."
        ).format(host_name))
        return

    if not workfile_builder_settings["create_first_version"]:
        log.info((
            "Project \"{}\" has turned off to create first workfile for"
            " host \"{}\""
        ).format(project_name, host_name))
        return

    # Backwards compatibility
    template_profiles = workfile_builder_settings.get("custom_templates")
    if not template_profiles:
        log.info(
            "Custom templates are not filled. Skipping template copy."
        )
        return

    if anatomy is None:
        anatomy = Anatomy(project_name)

    # get project, folder, task anatomy context data
    anatomy_context_data = get_template_data(
        project_entity, folder_entity, task_entity, host_name
    )
    # add root dict
    anatomy_context_data["root"] = anatomy.roots

    # get task type for the task in context
    current_task_type = anatomy_context_data["task"]["type"]

    # get path from matching profile
    matching_item = filter_profiles(
        template_profiles,
        {"task_types": current_task_type}
    )
    # when path is available try to format it in case
    # there are some anatomy template strings
    if matching_item:
        # extend anatomy context with os.environ to
        # also allow formatting against env
        full_context_data = os.environ.copy()
        full_context_data.update(anatomy_context_data)

        template = matching_item["path"][platform.system().lower()]
        return StringTemplate.format_strict_template(
            template, full_context_data
        ).normalized()

    return None

get_custom_workfile_template_by_string_context(project_name, folder_path, task_name, host_name, anatomy=None, project_settings=None)

Filter and fill workfile template profiles by passed context.

Passed context are string representations of project, folder and task. Function will query documents of project and folder to be able to use get_custom_workfile_template for rest of logic.

Parameters:

Name Type Description Default
project_name str

Project name.

required
folder_path str

Folder path.

required
task_name str

Task name.

required
host_name str

Name of host.

required
anatomy Anatomy

Optionally prepared anatomy object for passed project.

None
project_settings Dict[str, Any]

Preloaded project settings.

None

Returns:

Type Description

Union[str, None]: Path to template or None if none of profiles match current context. (Existence of formatted path is not validated.)

Source code in client/ayon_core/pipeline/workfile/path_resolving.py
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
def get_custom_workfile_template_by_string_context(
    project_name,
    folder_path,
    task_name,
    host_name,
    anatomy=None,
    project_settings=None
):
    """Filter and fill workfile template profiles by passed context.

    Passed context are string representations of project, folder and task.
    Function will query documents of project and folder to be able to use
    `get_custom_workfile_template` for rest of logic.

    Args:
        project_name (str): Project name.
        folder_path (str): Folder path.
        task_name (str): Task name.
        host_name (str): Name of host.
        anatomy (Anatomy): Optionally prepared anatomy object for passed
            project.
        project_settings (Dict[str, Any]): Preloaded project settings.

    Returns:
        Union[str, None]: Path to template or None if none of profiles match
            current context. (Existence of formatted path is not validated.)

    """

    project_entity = ayon_api.get_project(project_name)
    folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
    task_entity = ayon_api.get_task_by_name(
        project_name, folder_entity["id"], task_name
    )

    return get_custom_workfile_template(
        project_entity,
        folder_entity,
        task_entity,
        host_name,
        anatomy,
        project_settings
    )

get_last_workfile(workdir, file_template, fill_data, extensions, full_path=False)

Return last workfile filename.

Returns file with version 1 if there is not workfile yet.

Parameters:

Name Type Description Default
workdir str

Path to dir where workfiles are stored.

required
file_template str

Template of file name.

required
fill_data Dict[str, Any]

Data for filling template.

required
extensions Iterable[str]

All allowed file extensions of workfile.

required
full_path Optional[bool]

Full path to file is returned if set to True.

False

Returns:

Name Type Description
str

Last or first workfile as filename of full path to filename.

Source code in client/ayon_core/pipeline/workfile/path_resolving.py
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
def get_last_workfile(
    workdir, file_template, fill_data, extensions, full_path=False
):
    """Return last workfile filename.

    Returns file with version 1 if there is not workfile yet.

    Args:
        workdir (str): Path to dir where workfiles are stored.
        file_template (str): Template of file name.
        fill_data (Dict[str, Any]): Data for filling template.
        extensions (Iterable[str]): All allowed file extensions of workfile.
        full_path (Optional[bool]): Full path to file is returned if
            set to True.

    Returns:
        str: Last or first workfile as filename of full path to filename.
    """

    filename, version = get_last_workfile_with_version(
        workdir, file_template, fill_data, extensions
    )
    if filename is None:
        data = copy.deepcopy(fill_data)
        data["version"] = version_start.get_versioning_start(
            data["project"]["name"],
            data["app"],
            task_name=data["task"]["name"],
            task_type=data["task"]["type"],
            product_type="workfile"
        )
        data.pop("comment", None)
        if not data.get("ext"):
            data["ext"] = extensions[0]
        data["ext"] = data["ext"].lstrip(".")
        filename = StringTemplate.format_strict_template(file_template, data)

    if full_path:
        return os.path.normpath(os.path.join(workdir, filename))

    return filename

get_last_workfile_with_version(workdir, file_template, fill_data, extensions)

Return last workfile version.

Usign workfile template and it's filling data find most possible last version of workfile which was created for the context.

Functionality is fully based on knowing which keys are optional or what values are expected as value.

The last modified file is used if more files can be considered as last workfile.

Parameters:

Name Type Description Default
workdir str

Path to dir where workfiles are stored.

required
file_template str

Template of file name.

required
fill_data Dict[str, Any]

Data for filling template.

required
extensions Iterable[str]

All allowed file extensions of workfile.

required

Returns:

Type Description

Tuple[Union[str, None], Union[int, None]]: Last workfile with version if there is any workfile otherwise None for both.

Source code in client/ayon_core/pipeline/workfile/path_resolving.py
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
def get_last_workfile_with_version(
    workdir, file_template, fill_data, extensions
):
    """Return last workfile version.

    Usign workfile template and it's filling data find most possible last
    version of workfile which was created for the context.

    Functionality is fully based on knowing which keys are optional or what
    values are expected as value.

    The last modified file is used if more files can be considered as
    last workfile.

    Args:
        workdir (str): Path to dir where workfiles are stored.
        file_template (str): Template of file name.
        fill_data (Dict[str, Any]): Data for filling template.
        extensions (Iterable[str]): All allowed file extensions of workfile.

    Returns:
        Tuple[Union[str, None], Union[int, None]]: Last workfile with version
            if there is any workfile otherwise None for both.
    """

    if not os.path.exists(workdir):
        return None, None

    dotted_extensions = set()
    for ext in extensions:
        if not ext.startswith("."):
            ext = ".{}".format(ext)
        dotted_extensions.add(ext)

    # Fast match on extension
    filenames = [
        filename
        for filename in os.listdir(workdir)
        if os.path.splitext(filename)[-1] in dotted_extensions
    ]

    # Build template without optionals, version to digits only regex
    # and comment to any definable value.
    # Escape extensions dot for regex
    regex_exts = [
        "\\" + ext
        for ext in dotted_extensions
    ]
    ext_expression = "(?:" + "|".join(regex_exts) + ")"

    # Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end
    file_template = re.sub(r"\.?{ext}", ext_expression, file_template)
    # Replace optional keys with optional content regex
    file_template = re.sub(r"<.*?>", r".*?", file_template)
    # Replace `{version}` with group regex
    file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template)
    file_template = re.sub(r"{comment.*?}", r".+?", file_template)
    file_template = StringTemplate.format_strict_template(
        file_template, fill_data
    )

    # Match with ignore case on Windows due to the Windows
    # OS not being case-sensitive. This avoids later running
    # into the error that the file did exist if it existed
    # with a different upper/lower-case.
    kwargs = {}
    if platform.system().lower() == "windows":
        kwargs["flags"] = re.IGNORECASE

    # Get highest version among existing matching files
    version = None
    output_filenames = []
    for filename in sorted(filenames):
        match = re.match(file_template, filename, **kwargs)
        if not match:
            continue

        if not match.groups():
            output_filenames.append(filename)
            continue

        file_version = int(match.group(1))
        if version is None or file_version > version:
            output_filenames[:] = []
            version = file_version

        if file_version == version:
            output_filenames.append(filename)

    output_filename = None
    if output_filenames:
        if len(output_filenames) == 1:
            output_filename = output_filenames[0]
        else:
            last_time = None
            for _output_filename in output_filenames:
                full_path = os.path.join(workdir, _output_filename)
                mod_time = os.path.getmtime(full_path)
                if last_time is None or last_time < mod_time:
                    output_filename = _output_filename
                    last_time = mod_time

    return output_filename, version

get_workdir(project_entity, folder_entity, task_entity, host_name, anatomy=None, template_key=None, project_settings=None)

Fill workdir path from entered data and project's anatomy.

Parameters:

Name Type Description Default
project_entity Dict[str, Any]

Project entity.

required
folder_entity Dict[str, Any]

Folder entity.

required
task_entity dict[str, Any]

Task entity.

required
host_name str

Host which is used to workdir. This is required because workdir template may contain {app} key. In Session is stored under AYON_HOST_NAME key.

required
anatomy Anatomy

Optional argument. Anatomy object is created using project name from project_entity. It is preferred to pass this argument as initialization of a new Anatomy object may be time consuming.

None
template_key str

Key of work templates in anatomy templates. Default value is defined in get_workdir_with_workdir_data.

None
project_settings(Dict[str, Any]

Prepared project settings for project name. Optional to make processing faster. Ans id used only if 'template_key' is not passed.

required

Returns:

Name Type Description
TemplateResult

Workdir path.

Source code in client/ayon_core/pipeline/workfile/path_resolving.py
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
def get_workdir(
    project_entity,
    folder_entity,
    task_entity,
    host_name,
    anatomy=None,
    template_key=None,
    project_settings=None
):
    """Fill workdir path from entered data and project's anatomy.

    Args:
        project_entity (Dict[str, Any]): Project entity.
        folder_entity (Dict[str, Any]): Folder entity.
        task_entity (dict[str, Any]): Task entity.
        host_name (str): Host which is used to workdir. This is required
            because workdir template may contain `{app}` key. In `Session`
            is stored under `AYON_HOST_NAME` key.
        anatomy (Anatomy): Optional argument. Anatomy object is created using
            project name from `project_entity`. It is preferred to pass this
            argument as initialization of a new Anatomy object may be time
            consuming.
        template_key (str): Key of work templates in anatomy templates. Default
            value is defined in `get_workdir_with_workdir_data`.
        project_settings(Dict[str, Any]): Prepared project settings for
            project name. Optional to make processing faster. Ans id used only
            if 'template_key' is not passed.

    Returns:
        TemplateResult: Workdir path.
    """

    if not anatomy:
        anatomy = Anatomy(
            project_entity["name"], project_entity=project_entity
        )

    workdir_data = get_template_data(
        project_entity,
        folder_entity,
        task_entity,
        host_name,
    )
    # Output is TemplateResult object which contain useful data
    return get_workdir_with_workdir_data(
        workdir_data,
        anatomy.project_name,
        anatomy,
        template_key,
        project_settings
    )

get_workdir_with_workdir_data(workdir_data, project_name, anatomy=None, template_key=None, project_settings=None)

Fill workdir path from entered data and project's anatomy.

It is possible to pass only project's name instead of project's anatomy but one of them must be entered. It is preferred to enter anatomy if is available as initialization of a new Anatomy object may be time consuming.

Parameters:

Name Type Description Default
workdir_data Dict[str, Any]

Data to fill workdir template.

required
project_name str

Project's name.

required
anatomy Anatomy

Anatomy object for specific project. Faster processing if is passed.

None
template_key str

Key of work templates in anatomy templates. If not passed get_workfile_template_key_from_context is used to get it.

None
project_settings(Dict[str, Any]

Prepared project settings for project name. Optional to make processing faster. Ans id used only if 'template_key' is not passed.

required

Returns:

Name Type Description
TemplateResult

Workdir path.

Source code in client/ayon_core/pipeline/workfile/path_resolving.py
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
def get_workdir_with_workdir_data(
    workdir_data,
    project_name,
    anatomy=None,
    template_key=None,
    project_settings=None
):
    """Fill workdir path from entered data and project's anatomy.

    It is possible to pass only project's name instead of project's anatomy but
    one of them **must** be entered. It is preferred to enter anatomy if is
    available as initialization of a new Anatomy object may be time consuming.

    Args:
        workdir_data (Dict[str, Any]): Data to fill workdir template.
        project_name (str): Project's name.
        anatomy (Anatomy): Anatomy object for specific project. Faster
            processing if is passed.
        template_key (str): Key of work templates in anatomy templates. If not
            passed `get_workfile_template_key_from_context` is used to get it.
        project_settings(Dict[str, Any]): Prepared project settings for
            project name. Optional to make processing faster. Ans id used only
            if 'template_key' is not passed.

    Returns:
        TemplateResult: Workdir path.
    """

    if not anatomy:
        anatomy = Anatomy(project_name)

    if not template_key:
        template_key = get_workfile_template_key(
            workdir_data["project"]["name"],
            workdir_data["task"]["type"],
            workdir_data["app"],
            project_settings
        )

    template_obj = anatomy.get_template_item(
        "work", template_key, "directory"
    )
    # Output is TemplateResult object which contain useful data
    output = template_obj.format_strict(workdir_data)
    if output:
        return output.normalized()
    return output

get_workfile_template_key(project_name, task_type, host_name, project_settings=None)

Workfile template key which should be used to get workfile template.

Function is using profiles from project settings to return right template for passed task type and host name.

Parameters:

Name Type Description Default
project_name(str)

Project name.

required
task_type(str)

Task type.

required
host_name(str)

Host name (e.g. "maya", "nuke", ...)

required
project_settings(Dict[str, Any]

Prepared project settings for project name. Optional to make processing faster.

required
Source code in client/ayon_core/pipeline/workfile/path_resolving.py
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
def get_workfile_template_key(
    project_name, task_type, host_name, project_settings=None
):
    """Workfile template key which should be used to get workfile template.

    Function is using profiles from project settings to return right template
    for passed task type and host name.

    Args:
        project_name(str): Project name.
        task_type(str): Task type.
        host_name(str): Host name (e.g. "maya", "nuke", ...)
        project_settings(Dict[str, Any]): Prepared project settings for
            project name. Optional to make processing faster.
    """

    default = "work"
    if not task_type or not host_name:
        return default

    if not project_settings:
        project_settings = get_project_settings(project_name)

    try:
        profiles = (
            project_settings
            ["core"]
            ["tools"]
            ["Workfiles"]
            ["workfile_template_profiles"]
        )
    except Exception:
        profiles = []

    if not profiles:
        return default

    profile_filter = {
        "task_types": task_type,
        "hosts": host_name
    }
    profile = filter_profiles(profiles, profile_filter)
    if profile:
        return profile["workfile_template"] or default
    return default

get_workfile_template_key_from_context(project_name, folder_path, task_name, host_name, project_settings=None)

Helper function to get template key for workfile template.

Do the same as get_workfile_template_key but returns value for "session context".

Parameters:

Name Type Description Default
project_name str

Project name.

required
folder_path str

Folder path.

required
task_name str

Task name.

required
host_name str

Host name.

required
project_settings Dict[str, Any]

Project settings for passed 'project_name'. Not required at all but makes function faster.

None

Returns:

Name Type Description
str str

Workfile template name.

Source code in client/ayon_core/pipeline/workfile/path_resolving.py
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
def get_workfile_template_key_from_context(
    project_name: str,
    folder_path: str,
    task_name: str,
    host_name: str,
    project_settings: Optional[Dict[str, Any]] = None,
) -> str:
    """Helper function to get template key for workfile template.

    Do the same as `get_workfile_template_key` but returns value for "session
    context".

    Args:
        project_name (str): Project name.
        folder_path (str): Folder path.
        task_name (str): Task name.
        host_name (str): Host name.
        project_settings (Dict[str, Any]): Project settings for passed
            'project_name'. Not required at all but makes function faster.

    Returns:
        str: Workfile template name.

    """
    folder_entity = ayon_api.get_folder_by_path(
        project_name,
        folder_path,
        fields={"id"},
    )
    task_entity = ayon_api.get_task_by_name(
        project_name,
        folder_entity["id"],
        task_name,
        fields={"taskType"},
    )
    task_type = task_entity.get("taskType")

    return get_workfile_template_key(
        project_name, task_type, host_name, project_settings
    )

should_open_workfiles_tool_on_launch(project_name, host_name, task_name, task_type, default_output=False, project_settings=None)

Define if host should start workfile tool at host launch.

Default output is False. Can be overridden with environment variable AYON_WORKFILE_TOOL_ON_START, valid values without case sensitivity are "0", "1", "true", "false", "yes", "no".

Parameters:

Name Type Description Default
project_name str

Name of project.

required
host_name str

Name of host which is launched. In avalon's application context it's value stored in app definition under key "application_dir". Is not case sensitive.

required
task_name str

Name of task which is used for launching the host. Task name is not case sensitive.

required
task_type str

Task type.

required
default_output Optional[bool]

Default output value if no profile is found.

False
project_settings Optional[dict[str, Any]]

Project settings.

None

Returns:

Name Type Description
bool

True if host should start workfile.

Source code in client/ayon_core/pipeline/workfile/utils.py
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
def should_open_workfiles_tool_on_launch(
    project_name,
    host_name,
    task_name,
    task_type,
    default_output=False,
    project_settings=None,
):
    """Define if host should start workfile tool at host launch.

    Default output is `False`. Can be overridden with environment variable
    `AYON_WORKFILE_TOOL_ON_START`, valid values without case sensitivity are
    `"0", "1", "true", "false", "yes", "no"`.

    Args:
        project_name (str): Name of project.
        host_name (str): Name of host which is launched. In avalon's
            application context it's value stored in app definition under
            key `"application_dir"`. Is not case sensitive.
        task_name (str): Name of task which is used for launching the host.
            Task name is not case sensitive.
        task_type (str): Task type.
        default_output (Optional[bool]): Default output value if no profile
            is found.
        project_settings (Optional[dict[str, Any]]): Project settings.

    Returns:
        bool: True if host should start workfile.

    """

    if project_settings is None:
        project_settings = get_project_settings(project_name)
    profiles = (
        project_settings
        ["core"]
        ["tools"]
        ["Workfiles"]
        ["open_workfile_tool_on_startup"]
    )

    if not profiles:
        return default_output

    filter_data = {
        "tasks": task_name,
        "task_types": task_type,
        "hosts": host_name
    }
    matching_item = filter_profiles(profiles, filter_data)

    output = None
    if matching_item:
        output = matching_item.get("enabled")

    if output is None:
        return default_output
    return output

should_use_last_workfile_on_launch(project_name, host_name, task_name, task_type, default_output=False, project_settings=None)

Define if host should start last version workfile if possible.

Default output is False. Can be overridden with environment variable AYON_OPEN_LAST_WORKFILE, valid values without case sensitivity are "0", "1", "true", "false", "yes", "no".

Parameters:

Name Type Description Default
project_name str

Name of project.

required
host_name str

Name of host which is launched. In avalon's application context it's value stored in app definition under key "application_dir". Is not case sensitive.

required
task_name str

Name of task which is used for launching the host. Task name is not case sensitive.

required
task_type str

Task type.

required
default_output Optional[bool]

Default output value if no profile is found.

False
project_settings Optional[dict[str, Any]]

Project settings.

None

Returns:

Name Type Description
bool

True if host should start workfile.

Source code in client/ayon_core/pipeline/workfile/utils.py
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
def should_use_last_workfile_on_launch(
    project_name,
    host_name,
    task_name,
    task_type,
    default_output=False,
    project_settings=None,
):
    """Define if host should start last version workfile if possible.

    Default output is `False`. Can be overridden with environment variable
    `AYON_OPEN_LAST_WORKFILE`, valid values without case sensitivity are
    `"0", "1", "true", "false", "yes", "no"`.

    Args:
        project_name (str): Name of project.
        host_name (str): Name of host which is launched. In avalon's
            application context it's value stored in app definition under
            key `"application_dir"`. Is not case sensitive.
        task_name (str): Name of task which is used for launching the host.
            Task name is not case sensitive.
        task_type (str): Task type.
        default_output (Optional[bool]): Default output value if no profile
            is found.
        project_settings (Optional[dict[str, Any]]): Project settings.

    Returns:
        bool: True if host should start workfile.

    """
    if project_settings is None:
        project_settings = get_project_settings(project_name)
    profiles = (
        project_settings
        ["core"]
        ["tools"]
        ["Workfiles"]
        ["last_workfile_on_startup"]
    )

    if not profiles:
        return default_output

    filter_data = {
        "tasks": task_name,
        "task_types": task_type,
        "hosts": host_name
    }
    matching_item = filter_profiles(profiles, filter_data)

    output = None
    if matching_item:
        output = matching_item.get("enabled")

    if output is None:
        return default_output
    return output