Skip to content

extract_gpu_cache

ExtractGPUCache

Bases: MayaExtractorPlugin, OptionalPyblishPluginMixin

Extract the content of the instance to a GPU cache file.

Source code in client/ayon_maya/plugins/publish/extract_gpu_cache.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
class ExtractGPUCache(plugin.MayaExtractorPlugin,
                      publish.OptionalPyblishPluginMixin):
    """Extract the content of the instance to a GPU cache file."""

    label = "GPU Cache"
    families = ["model", "animation", "pointcache"]
    step = 1.0
    stepSave = 1
    optimize = True
    optimizationThreshold = 40000
    optimizeAnimationsForMotionBlur = True
    writeMaterials = True
    useBaseTessellation = True

    def process(self, instance):
        if not self.is_active(instance.data):
            return

        cmds.loadPlugin("gpuCache", quiet=True)

        staging_dir = self.staging_dir(instance)
        filename = "{}_gpu_cache".format(instance.name)

        # Write out GPU cache file.
        kwargs = {
            "directory": staging_dir,
            "fileName": filename,
            "saveMultipleFiles": False,
            "simulationRate": self.step,
            "sampleMultiplier": self.stepSave,
            "optimize": self.optimize,
            "optimizationThreshold": self.optimizationThreshold,
            "optimizeAnimationsForMotionBlur": (
                self.optimizeAnimationsForMotionBlur
            ),
            "writeMaterials": self.writeMaterials,
            "useBaseTessellation": self.useBaseTessellation
        }
        self.log.debug(
            "Extract {} with:\n{}".format(
                instance[:], json.dumps(kwargs, indent=4, sort_keys=True)
            )
        )
        cmds.gpuCache(instance[:], **kwargs)

        if "representations" not in instance.data:
            instance.data["representations"] = []

        representation = {
            "name": "gpu_cache",
            "ext": "abc",
            "files": filename + ".abc",
            "stagingDir": staging_dir,
            "outputName": "gpu_cache"
        }

        instance.data["representations"].append(representation)

        self.log.debug(
            "Extracted instance {} to: {}".format(instance.name, staging_dir)
        )