[{"data":1,"prerenderedAt":2122},["ShallowReactive",2],{"tag-count-pipeline-en":3,"tag-pipeline-en":4,"tags-sidebar-en":19,"posts-tag-pipeline-en-1":90,"tags-header-en":2064,"tags-footer-en":2093},77,{"id":5,"title":6,"body":7,"description":7,"extension":8,"meta":9,"name":13,"navigation":14,"path":15,"seo":16,"slug":17,"stem":17,"__hash__":18},"tag/pipeline.json","Pipeline",null,"json",{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":11,"url":12},"public",{"posts":3},"https://blog.cg-wire.com/tag/pipeline/","Pipeline Automation",true,"/pipeline",{"description":7},"pipeline","qa7lmThepbMYAJ--m7WHgcY7p9lpC51BDn7imjnLoHY",[20,31,43,54,65,69,80],{"id":21,"title":22,"body":7,"description":7,"extension":8,"meta":23,"name":22,"navigation":14,"path":27,"seo":28,"slug":29,"stem":29,"__hash__":30},"tag/blender.json","Blender",{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":24,"url":26},{"posts":25},15,"https://blog.cg-wire.com/tag/blender/","/blender",{"description":7},"blender","NGhuNL5GEEpGrAt0Y1hoiAFOBRkB8zKBFq90XcJR47E",{"id":32,"title":33,"body":7,"description":7,"extension":8,"meta":34,"name":38,"navigation":14,"path":39,"seo":40,"slug":41,"stem":41,"__hash__":42},"tag/company.json","Company",{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":35,"url":37},{"posts":36},35,"https://blog.cg-wire.com/tag/company/","Company News","/company",{"description":7},"company","CSg2BLNemwEASf_RYxGHsJOXTxg3xNUldTg2Upc7ZC0",{"id":44,"title":45,"body":7,"description":7,"extension":8,"meta":46,"name":45,"navigation":14,"path":50,"seo":51,"slug":52,"stem":52,"__hash__":53},"tag/customer-stories.json","Customer Stories",{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":47,"url":49},{"posts":48},3,"https://blog.cg-wire.com/tag/customer-stories/","/customer-stories",{"description":7},"customer-stories","vO2w4OuionBXR7-dsFeWvCucjpG7VuCqGV3NZOYyVw0",{"id":55,"title":56,"body":7,"description":7,"extension":8,"meta":57,"name":60,"navigation":14,"path":61,"seo":62,"slug":63,"stem":63,"__hash__":64},"tag/glossary.json","Glossary",{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":58,"url":59},{"posts":25},"https://blog.cg-wire.com/tag/glossary/","Animation Glossary","/glossary",{"description":7},"glossary","ahYw1ulGqHh4X1VqtWmRXHQzLH25NsXPHgKJ8kwOMwA",{"id":5,"title":6,"body":7,"description":7,"extension":8,"meta":66,"name":13,"navigation":14,"path":15,"seo":68,"slug":17,"stem":17,"__hash__":18},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":67,"url":12},{"posts":3},{"description":7},{"id":70,"title":71,"body":7,"description":7,"extension":8,"meta":72,"name":71,"navigation":14,"path":76,"seo":77,"slug":78,"stem":78,"__hash__":79},"tag/production-management.json","Production Management",{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":73,"url":75},{"posts":74},52,"https://blog.cg-wire.com/tag/production-management/","/production-management",{"description":7},"production-management","CK3g20iyLvLAN6TiR91N008bRCUY5R5T0A-dnAm-nfI",{"id":81,"title":82,"body":7,"description":7,"extension":8,"meta":83,"name":82,"navigation":14,"path":86,"seo":87,"slug":88,"stem":88,"__hash__":89},"tag/resources.json","Resources",{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":84,"url":85},{"posts":48},"https://blog.cg-wire.com/tag/resources/","/resources",{"description":7},"resources","uMVK_T3_oD87qJ7NOx5cVBCT5uXC9zFj44ZZatYH5RQ",[91,124,149,174,198,224,250,275,302,327,353,379,406,431,458,484,510,533,558,584,617,641,666,691,716,741,766,791,816,841,866,891,916,941,966,991,1016,1041,1065,1090,1115,1141,1166,1191,1216,1241,1266,1291,1316,1349,1374,1399,1424,1449,1474,1500,1527,1551,1577,1603,1628,1655,1680,1704,1731,1757,1782,1808,1834,1860,1885,1910,1936,1963,1988,2015,2041],{"id":92,"title":93,"authors":94,"body":7,"description":7,"extension":8,"html":100,"meta":101,"navigation":14,"path":117,"published_at":107,"seo":118,"slug":119,"stem":120,"tags":121,"__hash__":123,"uuid":102,"comment_id":103,"feature_image":104,"featured":105,"visibility":10,"created_at":106,"updated_at":107,"custom_excerpt":108,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":111,"primary_tag":112,"url":114,"excerpt":108,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":116},"ghost/posts:blender-python-event-automation.json","Automating Blender with Python Event Handlers",[95],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"630632b2ca5910003d4a70af","Basile Samel","basile","https://blog.cg-wire.com/author/basile/","\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">⚙️\u003C/div>\u003Cdiv class=\"kg-callout-text\">Blender events let you automate workflows without adding extra steps for artists.\u003C/div>\u003C/div>\u003Cp>A render finishes at 2am, no one is watching, and the output sits in a temporary folder until someone remembers to move it. An artist exports a file with the wrong name. A camera gets left at the wrong focal length before a client delivery.\u003C/p>\u003Cp>All these issues add up. Fortunately, there is a simple solution for all of them: Blender's Python API gives you direct access to the events that drive the application. You can write code that listens for those events and acts on them automatically, without any artist involvement. By the end of this article, you will have two working examples you can adapt in your own pipeline.\u003C/p>\u003Chr>\u003Ch2 id=\"3-ways-to-listen-to-events-in-blender\">3 Ways To Listen to Events in Blender\u003C/h2>\u003Cp>Blender exposes three main mechanisms for responding to events through its Python API:\u003C/p>\u003Cul>\u003Cli>\u003Ccode>app.handlers\u003C/code> are passive listeners that fire when Blender performs a specific action: a render completes, a file loads, a frame changes. Your code registers a function and Blender calls it when the moment arrives. The artist does not need to do anything, so this is often the right tool for automating background pipeline tasks.\u003C/li>\u003Cli>Modal operators are active listeners. They take over Blender's event loop for a given window and intercept everything the artist does in real time, mouse clicks, key presses, cursor movement, until the operator finishes or is cancelled. This is the right tool when you want to build interactive tools that respond to what an artist is physically doing inside the viewport.\u003C/li>\u003Cli>The third way to listen to events, \u003Ccode>msgbus\u003C/code>, lets you subscribe to changes on specific data properties, like the active object or a scene setting. It is useful but narrower in scope. This article does not cover it.\u003C/li>\u003C/ul>\u003Cp>The two examples this article builds cover the most common studio automation needs: the first removes a background task from your artists entirely with a handler, the other replaces a slow, manual workflow with a single click with a modal operator.\u003C/p>\u003Chr>\u003Ch2 id=\"1-auto-export-on-render-complete\">1. Auto-Export on Render Complete\u003C/h2>\u003Cp>There are many useful handlers available, among them:\u003C/p>\u003Cul>\u003Cli>\u003Ccode>render_init\u003C/code> - fires when a render job starts\u003C/li>\u003Cli>\u003Ccode>render_pre\u003C/code> - fires before each frame renders\u003C/li>\u003Cli>\u003Ccode>render_post\u003C/code> - fires after each frame renders\u003C/li>\u003Cli>\u003Ccode>load_pre\u003C/code> / \u003Ccode>load_post\u003C/code> - before/after a \u003Ccode>.blend\u003C/code> file is loaded\u003C/li>\u003Cli>\u003Ccode>save_pre\u003C/code> / \u003Ccode>save_post\u003C/code> - before/after a \u003Ccode>.blend\u003C/code> file is saved\u003C/li>\u003C/ul>\u003Cp>Open Blender and switch to the Scripting workspace from the top tab bar. You will see the Python console on the left and the Text Editor on the right. Write your code in the Text Editor and run it with Alt+P.\u003C/p>\u003Cp>You can also \u003Ca href=\"https://blog.cg-wire.com/blender-addon-ui-scripting-guide/\">use an addon to keep the script persistent\u003C/a>.\u003C/p>\u003Cp>Instead of building a full render pipeline tool, we'll start with something small to understand the main pattern: a minimal handler that fires the moment a render finishes and writes a timestamped confirmation to a file. It's a useful starting point for verifying that your handler is working correctly before building out more complex post-render logic:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\nfrom datetime import datetime\n\n@bpy.app.handlers.persistent\ndef on_render_complete(scene, depsgraph):\n    timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n    open(\"test.txt\", \"w\").write(f\"Completed: {timestamp}\\n\")\n\nbpy.app.handlers.render_complete.append(on_render_complete)\n\u003C/code>\u003C/pre>\u003Cp>The \u003Ccode>@bpy.app.handlers.persistent\u003C/code> decorator keeps the handler registered across file loads, so it survives scene changes during a session.\u003C/p>\u003Cp>On render complete, \u003Ccode>datetime.now()\u003C/code> captures the finish time and formats it as a compact timestamp string. That string is written directly to a hardcoded path, overwriting the file on each render.\u003C/p>\u003Cp>Lastly, \u003Ccode>bpy.app.handlers.render_complete.append\u003C/code> registers the function so Blender calls it automatically when a render finishes.\u003C/p>\u003Cp>To test this without waiting for a full render, use Render Single Frame and then check that \u003Ccode>test.txt\u003C/code> exists at the target path and contains the expected timestamp.\u003C/p>\u003Cp>You can then extend the handler to copy output files, record scene metadata, or trigger downstream workflows.\u003C/p>\u003Cp>The pattern is always the same as in the example: define a function, optionally decorate it with \u003Ccode>@bpy.app.handlers.persistent\u003C/code>, then append it to the relevant list.\u003C/p>\u003Chr>\u003Ch2 id=\"2-modal-operators\">2. Modal Operators\u003C/h2>\u003Cp>\u003Ccode>app.handlers\u003C/code> cannot help you when the task involves responding to what an artist is actively doing in the viewport. You need a modal operator instead.\u003C/p>\u003Cp>The use case here is a one-click camera framer: an artist clicks an object and the active camera repositions and reframes to a studio-standard composition. No manual camera adjustment and no guessing at focal length, so no inconsistency between artists.\u003C/p>\u003Cp>A modal operator is a class with two key methods:\u003C/p>\u003Cul>\u003Cli>\u003Ccode>invoke()\u003C/code> starts the operator and registers it with the window manager.\u003C/li>\u003Cli>\u003Ccode>modal()\u003C/code> receives every event that occurs after that and decides what to do with it. The operator stays active and keeps receiving events until it returns \u003Ccode>FINISHED\u003C/code> or \u003Ccode>CANCELLED\u003C/code>.\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\nclass AutoFrameOperator(bpy.types.Operator):\n    bl_idname = \"studio.auto_frame\"\n    bl_label = \"Auto Frame Selected\"\n\n    def invoke(self, context, event):\n        context.window_manager.modal_handler_add(self)\n        return {'RUNNING_MODAL'}\n\n    def modal(self, context, event):\n        if event.type == 'LEFTMOUSE' and event.value == 'PRESS':\n            target = context.active_object\n            if target:\n                self.frame_camera_to(context, target)\n            return {'FINISHED'}\n\n        if event.type in {'RIGHTMOUSE', 'ESC'}:\n            return {'CANCELLED'}\n\n        return {'RUNNING_MODAL'}\n\n    def frame_camera_to(self, context, target):\n        camera = context.scene.camera\n        if not camera:\n            return\n        focal_length = 85\n        camera.data.lens = focal_length\n        \n        print(f\"Framed camera on: {target.name}\")\n\ndef register():\n    bpy.utils.register_class(AutoFrameOperator)\n\ndef unregister():\n    bpy.utils.unregister_class(AutoFrameOperator)\n\u003C/code>\u003C/pre>\u003Cp>We define a Blender operator called \u003Ccode>AutoFrameOperator\u003C/code>, a reusable action that Blender exposes under the ID \u003Ccode>studio.auto_frame\u003C/code>. When triggered, \u003Ccode>invoke\u003C/code> registers it as a modal handler, meaning it stays active and listens for user input rather than executing immediately.\u003C/p>\u003Cp>The \u003Ccode>modal\u003C/code> method is the event loop that runs on every interaction. A left click grabs the currently active object and passes it to \u003Ccode>frame_camera_to\u003C/code>, then exits. Right-click or Escape cancels cleanly, and anything else keeps the operator waiting.\u003C/p>\u003Cp>The \u003Ccode>RUNNING_MODAL\u003C/code> return value is what keeps the operator alive and listening. Any event that does not match a condition you handle should return \u003Ccode>RUNNING_MODAL\u003C/code> so the operator stays active. Returning \u003Ccode>PASS_THROUGH\u003C/code> instead tells Blender to process the event normally in addition to passing it to your operator, which is useful when you want the artist to still be able to navigate the viewport while the operator is running.\u003C/p>\u003Cp>\u003Ccode>frame_camera_to\u003C/code> is the core logic. It retrieves the scene's active camera and sets its focal length to 85mm, though the actual math to reposition the camera and properly frame the target object isn't implemented as it's out of the scope of this article.\u003C/p>\u003Cp>\u003Ccode>register\u003C/code> and \u003Ccode>unregister\u003C/code> are standard Blender add-on boilerplate that make the operator available when the script loads and remove it cleanly when it unloads.\u003C/p>\u003Cp>To invoke the operator after installing the script as an addon, we open the search menu with F3 and type \"Auto Frame Selected\". To bind it to a shortcut, we can simply add the following snippet inside the \u003Ccode>register()\u003C/code> function:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">wm = bpy.context.window_manager\nkc = wm.keyconfigs.addon\nif kc:\n    km = kc.keymaps.new(name='3D View', space_type='VIEW_3D')\n    kmi = km.keymap_items.new(\"studio.auto_frame\", type='F', value='PRESS', ctrl=True)\n\u003C/code>\u003C/pre>\u003Cp>It's important to namespace your shortcuts carefully. \u003Ccode>Ctrl+F\u003C/code> in the 3D viewport has no default binding in Blender, but check against your studio's existing configuration before deploying. A shortcut conflict that silently overrides a default Blender action is hard to debug and will frustrate your artists.\u003C/p>\u003Cp>One more rule to follow: keep the \u003Ccode>modal()\u003C/code> method lean. Heavy computation inside \u003Ccode>modal()\u003C/code> runs on every single event, which means every mouse movement. If your framing logic is expensive, offload it to a separate method and only call it when the relevant event fires, as shown above with \u003Ccode>frame_camera_to\u003C/code>.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">Conclusion\u003C/h2>\u003Cp>You now have two examples of tools that address real studio problems without adding steps to your artists' workflow.\u003C/p>\u003Cp>The render handler can remove a manual, error-prone handoff from your pipeline entirely. And the modal operator gives artists a consistent, one-click way to frame a camera to your studio standard.\u003C/p>\u003Cp>The same patterns extend further. A \u003Ccode>load_post\u003C/code> handler could enforce naming conventions the moment a file opens. A \u003Ccode>depsgraph_update_post\u003C/code> handler might flag objects that violate your scene budget. A render complete handler can fire an HTTP request to a webhook and post a Slack notification to your production channel when a shot is done.\u003C/p>\u003Cp>The event system is already there: you just have to start listening!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":102,"comment_id":103,"feature_image":104,"featured":105,"visibility":10,"created_at":106,"updated_at":107,"custom_excerpt":108,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":111,"primary_tag":112,"url":114,"excerpt":108,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":116},"3d817b37-7ce7-4d96-b479-e6915371fade","69d4d1fdc037da0001fce81f","https://images.unsplash.com/photo-1686157251060-3ea1f90857aa?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDExfHwzZCUyMGFuaW1hdGlvbiUyMGF1dG9tYXRpb258ZW58MHx8fHwxNzc1NTU1Mzc0fDA&ixlib=rb-4.1.0&q=80&w=2000",false,"2026-04-07T11:44:29.000+02:00","2026-04-07T11:54:18.000+02:00","Learn how to use Blender’s Python API to listen to events and automate workflows. This guide covers handlers and modal operators with practical examples for production pipelines.","\u003C!-- Prism.js theme (syntax colors) -->\n\u003Clink rel=\"stylesheet\" href=\"https://cdn.jsdelivr.net/npm/prismjs@1.29.0/themes/prism.min.css\">\n\n\u003C!-- Toolbar plugin styles (for the Copy button) -->\n\u003Clink rel=\"stylesheet\" href=\"https://cdn.jsdelivr.net/npm/prismjs@1.29.0/plugins/toolbar/prism-toolbar.min.css\">\n\n\u003C!-- (Optional) Line-numbers styles -->\n\u003C!-- \u003Clink rel=\"stylesheet\" href=\"https://cdn.jsdelivr.net/npm/prismjs@1.29.0/plugins/line-numbers/prism-line-numbers.min.css\"> -->\n\n\u003Cstyle>\n/* Tweak code block appearance a bit (keeps theme styles intact) */\npre[class*=\"language-\"] {\n  border-radius: 8px;\n  overflow: auto;\n}\n\n/* ✅ Always wrap long lines (no horizontal scroll needed) */\npre[class*=\"language-\"],\npre[class*=\"language-\"] code {\n  white-space: pre-wrap;    /* preserve indentation but allow wrapping */\n  word-break: break-word;   /* break long tokens if needed */\n  overflow-wrap: anywhere;  /* last-resort wrapping */\n}\n\n/* Improve toolbar (Copy button) spacing/looks */\ndiv.code-toolbar > .toolbar {\n  opacity: 1;\n  right: 6px;\n  top: 6px;\n}\ndiv.code-toolbar > .toolbar .toolbar-item > button {\n  background: #1f2937;\n  color: #fff;\n  border-radius: 6px;\n  padding: 6px 10px;\n  font-size: 12px;\n}\ndiv.code-toolbar > .toolbar .toolbar-item > button:hover {\n  filter: brightness(1.1);\n}\n\n/* (Optional) Auto line numbers on all code blocks\n   If you want line numbers, uncomment both this and the CSS/JS includes above/below. */\n/*\npre[class*=\"language-\"] {\n  padding-left: 3.25em;\n}\n*/\n\u003C/style>","\u003C!-- Prism core -->\n\u003Cscript defer src=\"https://cdn.jsdelivr.net/npm/prismjs@1.29.0/components/prism-core.min.js\">\u003C/script>\n\u003Cscript defer src=\"https://cdn.jsdelivr.net/npm/prismjs@1.29.0/plugins/autoloader/prism-autoloader.min.js\">\u003C/script>\n\n\u003C!-- Toolbar + Copy-to-Clipboard plugins -->\n\u003Cscript defer src=\"https://cdn.jsdelivr.net/npm/prismjs@1.29.0/plugins/toolbar/prism-toolbar.min.js\">\u003C/script>\n\u003Cscript defer src=\"https://cdn.jsdelivr.net/npm/prismjs@1.29.0/plugins/copy-to-clipboard/prism-copy-to-clipboard.min.js\">\u003C/script>\n\n\u003C!-- (Optional) Line-numbers plugin -->\n\u003C!-- \u003Cscript defer src=\"https://cdn.jsdelivr.net/npm/prismjs@1.29.0/plugins/line-numbers/prism-line-numbers.min.js\">\u003C/script> -->\n\n\u003Cscript>\n  // Configure autoloader to fetch language definitions (bash, python, etc.)\n  window.Prism = window.Prism || {};\n  Prism.plugins = Prism.plugins || {};\n  Prism.plugins.autoloader = Prism.plugins.autoloader || {};\n  Prism.plugins.autoloader.languages_path = 'https://cdn.jsdelivr.net/npm/prismjs@1.29.0/components/';\n\n  // OPTIONAL: If you want line numbers on every block automatically, uncomment:\n  /*\n  document.addEventListener('DOMContentLoaded', function () {\n    document.querySelectorAll('pre > code').forEach(function (code) {\n      const pre = code.parentElement;\n      pre.classList.add('line-numbers');\n    });\n  });\n  */\n\u003C/script>",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"5fff0e54653a0c003924f7f2","https://blog.cg-wire.com/blender-python-event-automation/",5,"\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@hiestudio?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">HI! ESTUDIO\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/blender-python-event-automation",{"title":93},"blender-python-event-automation","posts/blender-python-event-automation",[122],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"9JCrUEGsAvw10rXO5Icp6lhnx5Mume3AbiZ5__SpE9U",{"id":125,"title":126,"authors":127,"body":7,"description":7,"extension":8,"html":129,"meta":130,"navigation":14,"path":142,"published_at":135,"seo":143,"slug":144,"stem":145,"tags":146,"__hash__":148,"uuid":131,"comment_id":132,"feature_image":133,"featured":105,"visibility":10,"created_at":134,"updated_at":135,"custom_excerpt":136,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":137,"primary_tag":138,"url":139,"excerpt":136,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":141},"ghost/posts:kitsu-telegram-bot-integration.json","Integrating Messaging Platforms with Kitsu Production Data",[128],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">💬\u003C/div>\u003Cdiv class=\"kg-callout-text\">Turn production events into instant chat notifications and commands with a Kitsu messaging bot.\u003C/div>\u003C/div>\u003Cp>Chat interfaces dominate the modern workplace: production teams coordinate in threads, approvals happen in emails, and LLM-powered assistants are becoming part of daily operations.\u003C/p>\u003Cp>The real problem is proper integration. A message that says \"Shot ready for review\" should let a supervisor approve that shot and update the status in Kitsu under the correct user in an ideal world, but this would require a small backend service, a secure API connection to Kitsu, and a reliable mapping between chat users and Kitsu users. The good news is, you can already do so with Kitsu!\u003C/p>\u003Cp>A simple starting point is a Telegram bot with one command like /hello. The bot links the chat user to their Kitsu account once, then replies through the API and displays them in chat. Whenever an event happens in Kitsu, the bot notified you. That small integration proves the concept, and that's exactly what we're going to build in this article.\u003C/p>\u003Chr>\u003Ch2 id=\"why-custom-messaging-integrations\">Why Custom Messaging Integrations\u003C/h2>\u003Cp>Custom messaging integrations centralize communication around a single source of truth. Instead of supervisors forwarding emails about a task status change, the update can be pushed automatically to the relevant team channel. For example, when a lighting task switches to \"retake\" in Kitsu, the lighting Telegram group instantly receives a structured message with the shot name, assignee, and deadline. The production tracker becomes proactive.\u003C/p>\u003Cp>User experience improves when raw database events are reshaped into readable summaries. Artists should not need to dig through activity logs to understand what changed. A daily digest sent to a Telegram channel can summarize approvals, new assignments, and upcoming deadlines in plain language. That digest can be generated directly from the Kitsu API and delivered automatically every evening to turn production data into something people actually consume.\u003C/p>\u003Cp>Automation is where this approach truly pays off, however. Messaging platforms can act as lightweight command interfaces. A coordinator typing \"/late_shots\" in Telegram can trigger a query against Kitsu and receive an instant report of overdue tasks. A lead typing \"/assign SH010 alice\" can trigger a backend call that updates the assignment in Kitsu. Chat becomes an operational surface for the production database.\u003C/p>\u003Cp>But as we said, let's start simple with a Telegram bot that interacts with Kitsu.\u003C/p>\u003Chr>\u003Ch2 id=\"1-create-a-new-telegram-bot\">1. Create a New Telegram Bot\u003C/h2>\u003Cp>Start by creating a dedicated bot in Telegram. Separation keeps credentials clean and avoids future security headaches when the integration is handed over to production IT.\u003C/p>\u003Cp>Open Telegram and search for BotFather, which is the official bot for managing other bots.\u003C/p>\u003Cp>Initiate a chat and send \u003Ccode>/newbot\u003C/code>. The flow is straightforward: provide a human-readable name like “Kitsu Notifications” and then a unique username such as \u003Ccode>kitsu_pipeline_bot\u003C/code>. The username must end with “bot,” and it has to be globally unique, so expect to try a few variations in a studio environment.\u003C/p>\u003Cp>BotFather returns an API token. Treat this token as a production secret, not as a convenience string to paste into Slack or commit to Git. Store it in your environment configuration system. If this token leaks, anyone can send messages as your production bot, which quickly turns from amusing to catastrophic when producers start receiving spam.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-10.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"976\" height=\"925\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/03/image-10.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-10.png 976w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Before wiring this into Kitsu’s event system, let's validate the token manually.\u003C/p>\u003Cp>Search for your newly created bot by its username inside Telegram and start a conversation with it. Send a simple \"/start\" so Telegram registers your chat.\u003C/p>\u003Cp>To retrieve your client (chat) ID, call the \u003Ccode>getUpdates\u003C/code> endpoint with curl using the token. For example:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">curl https://api.telegram.org/bot&lt;TOKEN&gt;/getUpdates\n\u003C/code>\u003C/pre>\u003Cp>The response will contain a JSON payload with a \u003Ccode>chat\u003C/code> object and an \u003Ccode>id\u003C/code> field. That numeric ID is what your integration will target. In a real pipeline scenario, this might be the chat ID of a supervisors group rather than an individual user.\u003C/p>\u003Cp>Now test outbound messaging directly. Use curl to send a message to yourself:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">curl -X POST https://api.telegram.org/bot&lt;TOKEN&gt;/sendMessage -d chat_id=&lt;CHAT_ID&gt; -d text=\"Kitsu integration test\"\n\u003C/code>\u003C/pre>\u003Cp>If the message appears in Telegram, the token and chat ID are valid. This manual verification step saves hours of debugging later when you plug the same call into a Kitsu event hook and something silently fails.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-11.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"976\" height=\"925\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/03/image-11.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-11.png 976w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>With the bot validated, the next step is to connect it to Kitsu’s event system so that, for example, when a new asset is created, a message is pushed automatically to the supervisors’ Telegram group.\u003C/p>\u003Cp>The exact same \u003Ccode>sendMessage\u003C/code> endpoint you tested with curl becomes part of a small service or serverless function triggered by Kitsu.\u003C/p>\u003Chr>\u003Ch2 id=\"2-set-a-kitsu-event-listener\">2. Set a Kitsu Event Listener\u003C/h2>\u003Cp>Next, we need to subscribe to real-time events from Kitsu. The objective is simple: react the moment production data changes.\u003C/p>\u003Cp>We can use Kitsu's \u003Ccode>zou\u003C/code> Python SDK to open a websocket connection and listen for task update events.\u003C/p>\u003Cp>For example, connect to the Kitsu event stream and filter for asset creation events:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import gazu \n\ngazu.set_host(\"http://localhost:80/api\")\ngazu.set_event_host(\"http://localhost:80/api\")\ngazu.log_in(\"admin@example.com\", \"mysecretpassword\")\n\n\ndef my_callback(data):\n    print(\"Asset created %s\" % data[\"asset_id\"])\n\nevent_client = gazu.events.init()\ngazu.events.add_listener(event_client, \"asset:new\", my_callback)\ngazu.events.run_client(event_client)\n\u003C/code>\u003C/pre>\u003Cp>We use the \u003Ccode>gazu\u003C/code> library to connect to a locally hosted Kitsu API server at \u003Ccode>http://localhost:80/api\u003C/code>, authenticate with the provided admin credentials, and then listen for real-time events.\u003C/p>\u003Cp>The snippet defines a callback function \u003Ccode>my_callback\u003C/code> that prints the ID of a newly created asset whenever it is triggered.\u003C/p>\u003Cp>After initializing an event client with \u003Ccode>gazu.events.init()\u003C/code>, the script registers the callback to listen for the \u003Ccode>\"asset:new\"\u003C/code> event (which fires whenever a new asset is created in the system).\u003C/p>\u003Cp>\u003Ccode>gazu.events.run_client(event_client)\u003C/code> starts the event loop that keeps the script running so that each time a new asset is added to Kitsu, the callback executes and prints its \u003Ccode>asset_id\u003C/code>.\u003C/p>\u003Chr>\u003Ch2 id=\"3-use-the-telegram-api-to-send-a-message\">3. Use the Telegram API to Send a Message\u003C/h2>\u003Cp>With events flowing in, push messages out using Telegram’s \u003Ccode>sendMessage\u003C/code> endpoint like we did earlier for testing. The API is just an HTTP POST that includes the bot token, chat ID, and text payload.\u003C/p>\u003Cp>Encapsulate that in a small utility function:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import requests\nimport os\n\nTELEGRAM_BOT_TOKEN = os.getenv('TELEGRAM_BOT_TOKEN')\nTELEGRAM_CHAT_ID = os.getenv('TELEGRAM_CHAT_ID')\n\ndef send_telegram_message(text):\n    url = f\"https://api.telegram.org/bot{TELEGRAM_BOT_TOKEN}/sendMessage\"\n    payload = {\n        \"chat_id\": TELEGRAM_CHAT_ID,\n        \"text\": text,\n        \"parse_mode\": \"Markdown\"\n    }\n\n    response = requests.post(url, json=payload, timeout=5)\n\n    if not response.ok:\n        raise RuntimeError(\n            f\"Telegram API error {response.status_code}: {response.text}\"\n        )\n\u003C/code>\u003C/pre>\u003Cp>Note that we defined secret environment variables to prevent persisting them in a Git repository.\u003C/p>\u003Cp>Then call it from the event callback:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">from your_telegram_module import send_telegram_message\n\ndef my_callback(data):\n    send_telegram_message(\"Asset created %s\" % data[\"asset_id\"])\n\u003C/code>\u003C/pre>\u003Cp>To test our event listener:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">TELEGRAM_BOT_TOKEN=&lt;TELEGRAM_BOT_TOKEN&gt; TELEGRAM_CHAT_ID=&lt;CHAT_ID&gt; python server.py\n\u003C/code>\u003C/pre>\u003Chr>\u003Ch2 id=\"4-receiving-messages-with-a-custom-kitsu-api-endpoint\">4. Receiving Messages with a Custom Kitsu API Endpoint\u003C/h2>\u003Cp>Notifications are useful, but bidirectional communication is where the integration becomes truly useful.\u003C/p>\u003Cp>To do so, we need to extend the Kitsu backend with a custom plugin that registers a new route like \u003Ccode>/plugins/telegram/webhook\u003C/code>. Please refer to our official guide on Developing Kitsu Plugins for in-depth steps.\u003C/p>\u003Cp>The manifesto will look like this:\u003C/p>\u003Cpre>\u003Ccode class=\"language-toml\">id = \"telegram\"\nname = \"Telegram Bot\"\ndescription = \"Telegram Bot\"\nversion = \"0.1.0\"\nmaintainer = \"Frank Rousseau &lt;frank@cg-wire.com&gt;\"\nwebsite = \"kitsu.cloud\"\nlicense = \"AGPL-3.0-only\"\nmaintainer_name = \"Frank Rousseau\"\nmaintainer_email = \"frank@cg-wire.com\"\nfrontend_project_enabled = true\nfrontend_studio_enabled = true\nicon = \"telegram\"\n\u003C/code>\u003C/pre>\u003Cp>And our custom route will parse incoming commands and map them to explicit backend actions:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">from flask_restful import Resource\n\nclass WebhookResource(Resource):\n    def post(self):\n        args = self.get_args([\n            (\"message\", {}, True),\n            (\"chat\", {}, True),\n        ])\n        \n        message = args['message']\n        chat_id = args['chat'].get(\"id\")\n        text = message.get(\"text\", \"\")\n    \n        if text == \"/hello\":    \n            send_telegram_message(\"it works\")\n    \n        return jsonify({\"status\": \"ok\"})\n\u003C/code>\u003C/pre>\u003Cp>For the sake of simplicity we define a single command \u003Ccode>/hello\u003C/code>, but you can create many more and use Kitsu services to query production data.\u003C/p>\u003Cp>Deterministic commands are easier to test, log, and secure. You can go a step further and call a LLM to map a natural language request into a command.\u003C/p>\u003Cp>We just need to register the route in the main entrypoint \u003Ccode>__init__.py\u003C/code>:\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">from . import resources\n\n\nroutes = [(f\"/telegram/webhook\", resources.WebhookResource)]\n\u003C/code>\u003C/pre>\u003Cp>After packaging and installing your plugin on your Kitsu server instance, it's time to tell your Telegram bot how to reach it.\u003C/p>\u003Cp>If you use a local development environment, you can expose the server via tunnel. With ngrok for example, if your server runs on port 5000:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">ngrok http 5000\n\u003C/code>\u003C/pre>\u003Cp>You then need to configure your Telegram bot webhook to point to that URL:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">curl -X POST \"https://api.telegram.org/bot&lt;YOUR_BOT_TOKEN&gt;/setWebhook\" \\\n     -H \"Content-Type: application/json\" \\\n     -d '{\"url\": \"https://&lt;random&gt;.ngrok-free.app/plugin/telegram/webhook\"}'\n\u003C/code>\u003C/pre>\u003Cp>Now send \u003Ccode>/hello\u003C/code> to your bot in your Telegram chat and see the result:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-12.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"525\" height=\"560\">\u003C/figure>\u003Chr>\u003Ch2 id=\"conclusion\">Conclusion\u003C/h2>\u003Cp>A custom messaging integration with Kitsu always follows a similar pattern: create a bot on a messaging platform, subscribe to Kitsu events, send structured notifications, and expose backend routes to handle incoming messages.\u003C/p>\u003Cp>But that's not all: consider extending your Kitsu plugin with views!\u003C/p>\u003Cp>For example, to display bot activity or recent interactions directly in the dashboard. Supervisors working inside Kitsu will be able to see which alerts were sent and which commands were triggered. The possibilities are limitless!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":131,"comment_id":132,"feature_image":133,"featured":105,"visibility":10,"created_at":134,"updated_at":135,"custom_excerpt":136,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":137,"primary_tag":138,"url":139,"excerpt":136,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":141},"16ecaf7a-bc5a-4d86-b08b-bf62ac7701e4","69ae62c591be760001bf7d81","https://images.unsplash.com/photo-1577563908411-5077b6dc7624?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDF8fG1lc3NhZ2V8ZW58MHx8fHwxNzczMDM5MzU5fDA&ixlib=rb-4.1.0&q=80&w=2000","2026-03-09T07:03:49.000+01:00","2026-03-09T08:00:23.000+01:00","Learn how to integrate Kitsu with Telegram by building a bot that listens to production events and sends notifications. This guide explains how to connect Kitsu events, trigger messages, and create simple chat commands for production workflows.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/kitsu-telegram-bot-integration/",7,"\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@lunarts?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Volodymyr Hryshchenko\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/kitsu-telegram-bot-integration",{"title":126},"kitsu-telegram-bot-integration","posts/kitsu-telegram-bot-integration",[147],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"CSt4jGbywZgG5gwq1J_TVNugmU5Ed34skMslwCBWcmM",{"id":150,"title":151,"authors":152,"body":7,"description":7,"extension":8,"html":154,"meta":155,"navigation":14,"path":167,"published_at":160,"seo":168,"slug":169,"stem":170,"tags":171,"__hash__":173,"uuid":156,"comment_id":157,"feature_image":158,"featured":105,"visibility":10,"created_at":159,"updated_at":160,"custom_excerpt":161,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":162,"primary_tag":163,"url":164,"excerpt":161,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":166},"ghost/posts:retopology-animation-blender-guide.json","Why Retopology Matters for Animation Pipelines",[153],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🧩\u003C/div>\u003Cdiv class=\"kg-callout-text\">&nbsp;Retopology turns messy 3D meshes into animation-ready assets.\u003C/div>\u003C/div>\u003Cp>AI tools can now generate 3D models in minutes, but they usually produce messy topology, meaning the way polygons are arranged across the surface is uneven and poorly structured. It might look fine on the surface, but it'll break the moment you start trying to animate it.\u003C/p>\u003Cp>If you're doing any kind of animation or rendering, assume \u003Cstrong>you will need retopology\u003C/strong>.\u003C/p>\u003Cp>If you don't know where to start, we've got you covered. In this article, we'll go through the process step-by-step and explain different tools you can use to make it easier.\u003C/p>\u003Chr>\u003Ch2 id=\"whats-retopology\">What's Retopology\u003C/h2>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Retopology is the process of rebuilding the surface topology of a 3D model to create a cleaner arrangement of polygons over an existing sculpt\u003C/strong>\u003C/b> so it deforms correctly in animation.\u003C/div>\u003C/div>\u003Cp>\u003Ca href=\"https://github.com/cgwire/blog/blob/main/drafts/retopology/index.md?ref=blog.cg-wire.com#whats-retopology\">\u003C/a>\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-5.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"560\" height=\"220\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Blender Manual\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>For example, we don't usually animate the dense sculpt that comes out of ZBrush directly. Instead, we build a lighter, structured mesh on top of it.\u003C/p>\u003Cp>A mesh is a 3D object made of vertices (points), edges (lines between points), and faces (surfaces).\u003C/p>\u003Cp>Before we even think about rigging, we inspect the mesh in wireframe mode and identify dense clusters, stretched polygons, and chaotic edge flow (the direction edges follow across the surface).\u003C/p>\u003Cp>For a character, for example, we could rebuild the shoulder using evenly spaced quads (four-sided polygons) instead of triangles so that the arm could rotate without pinching. This is retopology.\u003C/p>\u003Chr>\u003Ch2 id=\"why-retopology-is-key\">Why Retopology Is Key\u003C/h2>\u003Cp>\u003Ca href=\"https://github.com/cgwire/blog/blob/main/drafts/retopology/index.md?ref=blog.cg-wire.com#why-retopology-is-key\">\u003C/a>\u003C/p>\u003Cp>Retopology rebuilds a model's surface with clean geometry, and you need it if you want \u003Cstrong>assets that are maintainable and reusable\u003C/strong> across productions. Animators don't ship dense sculpt topology downstream. Instead, they rebuild it with clean edge loops so that the next animator or rigger can understand and modify it quickly.\u003C/p>\u003Cp>\u003Cstrong>Good retopology also makes animation easier because deformation becomes predictable.\u003C/strong> Deformation is how a mesh changes shape when a joint rotates, and support it with evenly spaced quads around elbows, knees, and mouths. If you place five to seven radial edge loops around a joint, you give the skin enough geometry to bend without collapsing.\u003C/p>\u003Cp>Lastly, \u003Cstrong>controlling polygon density reduces rendering cost.\u003C/strong> A polygon is a single face of geometry, and more polygons means more data to process, so we usually concentrate on details where silhouettes change and keep flat areas lightweight to cut costs.\u003C/p>\u003Cp>\u003Cstrong>Retopology always comes in handy at some point\u003C/strong>, whether it's to fix a 3D model or create different levels of detail (LOD), so roll up your sleeves and let's dive in.\u003C/p>\u003Chr>\u003Ch2 id=\"1-back-up-your-3d-model\">1. Back Up Your 3D Model\u003C/h2>\u003Cp>\u003Ca href=\"https://github.com/cgwire/blog/blob/main/drafts/retopology/index.md?ref=blog.cg-wire.com#1-back-up-your-3d-model\">\u003C/a>\u003C/p>\u003Cp>First, \u003Cstrong>it's important you back up your model before you touch retopology\u003C/strong>, every single time.\u003C/p>\u003Cp>Automated retopology tools rebuild topology from scratch, which means they overwrite or delete the original mesh data. It happens artists run an auto-retopo pass at the end of a long day, only to realize the new edge flow breaks deformation around the shoulders and the original sculpt is gone.\u003C/p>\u003Cp>Don't rely on undo. Save a clean duplicate of the file and archive the current mesh in your scene before running anything destructive.\u003C/p>\u003Cp>In production, also create a new version in Kitsu to keep changes traceable and recoverable. That way, if the new topology fails in rigging tests, you can roll back in minutes instead of asking IT for a file restore.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-6.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1438\" height=\"809\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/03/image-6.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/03/image-6.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-6.png 1438w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Treat backups as part of the retopology process itself! A two-minute version bump and duplicate save can protect days of sculpting and keep the pipeline moving when supervisors ask to compare \"before\" and \"after\" meshes.\u003C/p>\u003Chr>\u003Ch2 id=\"2-general-process\">2. General Process\u003C/h2>\u003Cp>\u003Ca href=\"https://github.com/cgwire/blog/blob/main/drafts/retopology/index.md?ref=blog.cg-wire.com#2-general-process\">\u003C/a>\u003C/p>\u003Cp>The general workflow is simple: clean the sculpt, voxel remesh for stability, quad remesh for structure, then manually refine deformation areas like shoulders and hips.\u003C/p>\u003Cp>Always test with quick skin weights and extreme poses early.\u003C/p>\u003Chr>\u003Ch2 id=\"3-automated-retopology-with-remeshing\">3. Automated Retopology With Remeshing\u003C/h2>\u003Cp>\u003Ca href=\"https://github.com/cgwire/blog/blob/main/drafts/retopology/index.md?ref=blog.cg-wire.com#3-automated-retopology-with-remeshing\">\u003C/a>\u003C/p>\u003Cp>If a creature comes in with 8 million polygons and chaotic triangles, \u003Cstrong>we don't start hand-retopo immediately\u003C/strong>. Instead, we run an automated remesh pass to establish structure first.\u003C/p>\u003Cp>To do so, Blender proposes two remeshing algorithms: Voxel and quad.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-7.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1280\" height=\"720\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/03/image-7.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/03/image-7.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-7.png 1280w\" sizes=\"(min-width: 720px) 720px\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Sofia Pahaoja on Medium\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>\u003Cstrong>Voxel remeshing\u003C/strong> (VDB Remesh) works by converting the mesh into a 3D grid of tiny cubes (voxels), rebuilding the surface based on volume rather than original edge flow.\u003C/p>\u003Cp>The produced evenly distributed geometry is why it's great for fixing holes, non-manifold geometry (a structure that cannot be unfolded into a 2D plane with consistent surface normals), and intersecting parts. You use voxel when you need a fresh base mesh and don't care much about preserving existing topology, so the result can be messy.\u003C/p>\u003Cp>On the other hand, you can use \u003Cstrong>quad remeshing\u003C/strong> when you want animation-friendly edge loops. Quad remeshing analyzes surface curvature and generates quads which deform predictably under skinning. QuadriFlow follows the shape of your model.\u003C/p>\u003Cp>Naturally, you can combine the two. On a facial rig for example, you could ran quad remesh after voxel cleanup, then adjust guides to force loops around the eyes and mouth.\u003C/p>\u003Cp>It's important to keep in mind that \u003Cstrong>automated retopology is most often than not a starting point, not a final deliverable.\u003C/strong>\u003C/p>\u003Chr>\u003Ch2 id=\"4-manual-retopology-with-poly-build\">4. Manual Retopology With Poly Build\u003C/h2>\u003Cp>\u003Ca href=\"https://github.com/cgwire/blog/blob/main/drafts/retopology/index.md?ref=blog.cg-wire.com#4-manual-retopology-with-poly-build\">\u003C/a>\u003C/p>\u003Cp>\u003Cstrong>Manual retopology with the Poly Build tool\u003C/strong> is what you reach for when deformation quality is key, especially on hero characters that will carry close-ups.\u003C/p>\u003Cp>In Blender, the Poly Build tool lets you draw new polygons directly on the surface of a dense mesh, snapping every vertex to the sculpt.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-8.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1078\" height=\"516\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/03/image-8.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/03/image-8.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-8.png 1078w\" sizes=\"(min-width: 720px) 720px\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Blender Nation\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>To keep the example of the facial rig, an artist could rebuild the mouth area by placing quads (four-sided polygons) around the lips first to make sure edge loops follow the smile lines. It would give the rigger predictable loops for blendshapes and avoid collapsing geometry during extreme phonemes.\u003C/p>\u003Cp>\u003Cstrong>You can also use other modifiers like the Subdivision Surface Modifier or the Multiresolution Modifier\u003C/strong> to perform specific jobs.\u003C/p>\u003Cp>In this step, experience matters a lot. Most animators learn by studying the topology of high-quality models and re-applying the same principles to their own models. It's tacit knowledge, so practice is key!\u003C/p>\u003Chr>\u003Ch2 id=\"5-measuring-retopology-performance\">5. Measuring Retopology Performance\u003C/h2>\u003Cp>\u003Ca href=\"https://github.com/cgwire/blog/blob/main/drafts/retopology/index.md?ref=blog.cg-wire.com#5-measuring-retopology-performance\">\u003C/a>\u003C/p>\u003Cp>Retopology is all about aesthetics, but \u003Cstrong>it's good practice to measure retopology performance with numbers\u003C/strong> by counting meshes in your scene. This way you can assess the amount of work a retopology requires and track your progress.\u003C/p>\u003Cp>In Blender, open the Outliner and check how many mesh objects are present, then enable Statistics in the viewport overlays to see vertex and face counts in real time.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-9.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"317\" height=\"159\">\u003C/figure>\u003Cp>A character model can look light, but the stats could show 120k faces across separate clothing meshes and simply merging static accessories and removing hidden interior faces could drop the count substantially before starting more complex retopology operations.\u003C/p>\u003Cp>It's also important to consider separate mesh counts depending on LOD strategies.\u003C/p>\u003Cp>LOD, or Level of Detail, means creating multiple versions of the same asset at different resolutions so the engine swaps them based on camera distance.\u003C/p>\u003Cp>Reducing mesh count is also about optimizing LOD is about performance at runtime, so we can retopologize key deformation areas like shoulders and hips so the lower LOD still bends correctly during animation without spending too much time on details. Context is important.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">Conclusion\u003C/h2>\u003Cp>\u003Ca href=\"https://github.com/cgwire/blog/blob/main/drafts/retopology/index.md?ref=blog.cg-wire.com#conclusion\">\u003C/a>\u003C/p>\u003Cp>AI-generated 3D models have made it incredibly fast to go from idea to mesh. But speed without structure comes at a cost. Clean topology is what transforms a raw, messy asset into something production-ready.\u003C/p>\u003Cp>In this guide, we covered what retopology is, why it matters for maintainability, animation, and rendering performance, and how to approach it step by step inside Blender.\u003C/p>\u003Cp>You've seen why backing up your original mesh is critical. From there, we explored automated retopology using remeshing tools like Voxel and Quad methods for fast results, as well as manual retopology with modifiers when precision matters most. Finally, we looked at how to measure performance by analyzing mesh counts and understanding the trade-offs between LODs and topology.\u003C/p>\u003Cp>Retopology isn't just a cleanup step. And while we demonstrated the process in Blender, the same principles apply across all major DCC tools: whether you're working in Blender, Maya, Houdini, or any other 3D software, the fundamentals remain the same.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":156,"comment_id":157,"feature_image":158,"featured":105,"visibility":10,"created_at":159,"updated_at":160,"custom_excerpt":161,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":162,"primary_tag":163,"url":164,"excerpt":161,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":166},"05e17976-e873-4ea1-b896-fef84f99fcd7","69ae62ca91be760001bf7d8d","https://images.unsplash.com/photo-1590285359328-dce54ee24c1c?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDQwfHxhbmltYXRpb24lMjBtb2RlbHxlbnwwfHx8fDE3NzMwMzgxMDN8MA&ixlib=rb-4.1.0&q=80&w=2000","2026-03-09T07:03:54.000+01:00","2026-03-09T07:41:49.000+01:00","Learn what retopology is and why it’s essential for animation. This guide walks through the retopology workflow in Blender, from automated remeshing to manual topology cleanup for production-ready 3D models.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/retopology-animation-blender-guide/",6,"\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@jhc?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">James Coleman\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/retopology-animation-blender-guide",{"title":151},"retopology-animation-blender-guide","posts/retopology-animation-blender-guide",[172],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"1nVpTXkQokkNvujUo-ojOWyHqGGI1Qw-Q6oHLAzOgBg",{"id":175,"title":176,"authors":177,"body":7,"description":7,"extension":8,"html":179,"meta":180,"navigation":14,"path":191,"published_at":185,"seo":192,"slug":193,"stem":194,"tags":195,"__hash__":197,"uuid":181,"comment_id":182,"feature_image":183,"featured":105,"visibility":10,"created_at":184,"updated_at":185,"custom_excerpt":186,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":187,"primary_tag":188,"url":189,"excerpt":186,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":190},"ghost/posts:kitsu-webhooks-pipeline-automation.json","Using Kitsu Webhooks to Trigger Pipeline Actions",[178],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">⚡\u003C/div>\u003Cdiv class=\"kg-callout-text\">Turn production events into instant pipeline actions with Kitsu webhooks.\u003C/div>\u003C/div>\u003Cp>As a studio grows, the cracks in a manual pipeline get louder: an artist publishes an asset, a supervisor approves a shot, a task flips to \u003Cem>Done\u003C/em>, but somewhere down the line, another tool is still waiting to be told. Those delays add up.\u003C/p>\u003Cp>Kitsu's Event API changes the game by broadcasting what's happening in production the moment it happens. No polling, no guesswork. Just real-time signals you can act upon.\u003C/p>\u003Cp>With webhooks, you can trigger automated actions the instant production data changes, like \u003Ca href=\"https://blog.cg-wire.com/blender-programmatic-rendering/\">launching renders\u003C/a>, syncing tracking tools, notifying teams, or updating downstream systems without human hand-offs.\u003C/p>\u003Cp>In this article, we'll break down how to set them up and put them to work, with a practical, studio-tested example you can drop into a real pipeline.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blog-tutorials/tree/main/kitsu-webhooks%20?ref=blog.cg-wire.com\">https://github.com/cgwire/blog-tutorials/tree/main/kitsu-webhooks%20\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"why-webhooks\">Why Webhooks\u003C/h2>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/02/data-src-image-9c7a79f2-b129-45df-bea5-52e3d0e07988.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"900\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/02/data-src-image-9c7a79f2-b129-45df-bea5-52e3d0e07988.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/02/data-src-image-9c7a79f2-b129-45df-bea5-52e3d0e07988.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/02/data-src-image-9c7a79f2-b129-45df-bea5-52e3d0e07988.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Polling the API every few minutes is like asking production for updates by shouting across the floor: it's slow, noisy, and easy to miss at the exact moment something matters.\u003C/p>\u003Cp>Webhooks flip that model: instead of checking whether Kitsu changed, Kitsu tells your pipeline immediately when it does.\u003C/p>\u003Cp>This brings several production benefits in practice: a modeler creates a new prop in Kitsu, and within seconds, your asset build system spins up the correct directory structure on the server, registers the asset in your DCC tools, and makes it visible to layout. No artist has to copy a name or click a button.\u003C/p>\u003Cp>Later in the schedule, a lighting task moves to Done. That single status change can trigger your render management system to submit the shot automatically, using the latest approved files and the correct render settings for the show. By the time anyone notices the task is finished, frames are already rendering.\u003C/p>\u003Cp>When an artist publishes a file, the webhook can push that version straight into your review stack. The media is transcoded, uploaded, and attached to the correct shot before the supervisor opens their inbox. Reviews happen sooner, notes come back faster, and work keeps flowing instead of waiting for someone to remember the next step.\u003C/p>\u003Cp>This is what webhooks buy you: production data turning directly into action. Fewer hand-offs, tighter feedback loops, and a pipeline that reacts at the same speed your artists work.\u003C/p>\u003Chr>\u003Ch2 id=\"available-events\">Available events\u003C/h2>\u003Cp>Kitsu emits events for all production actions covered by \u003Ca href=\"https://gazu.cg-wire.com/data?ref=blog.cg-wire.com\">available data models\u003C/a>:\u003C/p>\u003Cul>\u003Cli>Asset creation and updates\u003C/li>\u003Cli>Shot creation and updates\u003C/li>\u003Cli>Task status changes\u003C/li>\u003Cli>Preview file creation and publication\u003C/li>\u003Cli>People management\u003C/li>\u003Cli>Organization changes\u003C/li>\u003Cli>Shot and sequence updates\u003C/li>\u003C/ul>\u003Cp>Each event carries structured data (IDs, timestamps, user information) so you can precisely identify what changed and react accordingly: a real-time production log you can subscribe to!\u003C/p>\u003Chr>\u003Ch2 id=\"1-create-an-event-listener\">1. Create an event listener\u003C/h2>\u003Cp>The first step is to register an event listener using the Kitsu Python client (\u003Ccode>gazu\u003C/code>). This listener acts like a webhook endpoint: it waits for events and calls your callback function when they occur.\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">import gazu\n\ngazu.set_host(\"http://localhost/api\")\ngazu.set_event_host(\"http://localhost/api\")\nuser = gazu.log_in(\"admin@example.com\", \"mysecretpassword\")\n\ndef my_callback(data):\n    print(\"Asset created %s\" % data[\"asset_id\"])\n\nevent_client = gazu.events.init()\ngazu.events.add_listener(event_client, \"asset:new\", my_callback)\ngazu.events.run_client(event_client)\n\u003C/code>\u003C/pre>\u003Cp>First, we import Gazu, the official Python client for Kitsu, and configure it to talk to a Kitsu server running locally. Both \u003Ccode>set_host\u003C/code> and \u003Ccode>set_event_host\u003C/code> point to the same API URL: the first is used for standard REST calls, while the second is specifically for the event (websocket) endpoint. In production, it's recommended to set up the two in different threads because listening to events is blocking. But for the sake of simplicity, we do it all in one endpoint in this tutorial.\u003C/p>\u003Cp>Next, we authenticate as a user. Calling \u003Ccode>gazu.log_in\u003C/code> logs in with the provided credentials and establishes a session so the client is authorized to receive events from Kitsu.\u003C/p>\u003Cp>The \u003Ccode>my_callback\u003C/code> function defines how your pipeline reacts when an event is received. It takes the event payload as input and, in this case, simply prints the ID of the newly created asset. In a mid-size animation studio, this callback could, for example, trigger a script that creates a standardized directory structure on the file server whenever a new asset is added in Kitsu. Artists no longer need to set this up manually, and naming conventions stay consistent.\u003C/p>\u003Cp>After that, the script initializes an event client with \u003Ccode>gazu.events.init()\u003C/code>. This client maintains a persistent connection to Kitsu's event system.\u003C/p>\u003Cp>The call to \u003Ccode>gazu.events.add_listener\u003C/code> registers the callback function for a specific event type: \u003Ccode>\"asset:new\"\u003C/code>. This tells Gazu, \"Whenever Kitsu emits an event indicating that a new asset was created, call \u003Ccode>my_callback\u003C/code> with the event data.\"\u003C/p>\u003Cp>Finally, \u003Ccode>gazu.events.run_client(event_client)\u003C/code> starts the event loop. From this point on, the script blocks and listens continuously via a WebSocket connection. As soon as someone creates an asset in Kitsu, Kitsu emits an \u003Ccode>asset:new\u003C/code> event, Gazu receives it, and \u003Ccode>my_callback\u003C/code> is executed immediately.\u003C/p>\u003Chr>\u003Ch2 id=\"2-send-test-events\">2. Send test events\u003C/h2>\u003Cp>To validate your setup, you need to generate real events. The easiest way is to perform standard API actions that you already use in production. For example, by creating an asset programmatically:\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">import gazu\n\ngazu.set_host(\"http://localhost/api\")\nuser = gazu.log_in(\"admin@example.com\", \"mysecretpassword\")\n\nprojects = gazu.project.all_projects()\nproject = projects[0]\n\nasset_types = gazu.asset.all_asset_types()\nasset_type = asset_types[0]\n\nasset = gazu.asset.new_asset(\n    project,\n    asset_type,\n    \"My new asset\",\n    \"My asset description\"\n)\n\u003C/code>\u003C/pre>\u003Cp>After authentication, we retrieve the list of all projects visible to the logged-in user by calling \u003Ccode>gazu.project.all_projects()\u003C/code>. From that list, we select the first project. In a real production tool, you'd usually look up a specific project by name or ID, but this keeps the example simple.\u003C/p>\u003Cp>The same pattern is used for asset types. The script queries all available asset types, then picks the first one. Asset types define what kind of asset is being created (character, prop, environment, and so on), and Kitsu requires one to be specified when creating a new asset.\u003C/p>\u003Cp>With a project and an asset type in hand, we create a new asset by calling \u003Ccode>gazu.asset.new_asset\u003C/code>. The function takes the target project, the asset type, a name, and a description. When this call succeeds, Kitsu immediately creates the asset in its database and returns the newly created asset object.\u003C/p>\u003Cp>At this point, the asset exists in Kitsu exactly as if it had been created through the web interface. This action also emits an \u003Ccode>asset:new\u003C/code> event, allowing the rest of your pipeline to react automatically.\u003C/p>\u003Cp>Before rolling this out studio-wide, a pipeline TD can create assets in a staging project to confirm that the event triggers downstream automation without touching real production data.\u003C/p>\u003Chr>\u003Ch2 id=\"3-react-to-events-with-callbacks\">3. React to events with callbacks\u003C/h2>\u003Cp>Callbacks are the point where Kitsu events turn into concrete pipeline actions. When a callback is executed, it receives a payload describing exactly what changed: an asset was created, a task moved to a new status, or a file was published. That payload becomes your entry point for driving automation.\u003C/p>\u003Cp>A common first step inside a callback is to use the IDs in the event data to pull full context from Kitsu. For example, when you receive a task update event, you can fetch the complete task, the linked shot, and the associated project to understand where in the show this change happened and what rules should apply.\u003C/p>\u003Cp>From there, callbacks typically perform side effects that would otherwise require manual intervention. An asset creation event could, for example, result in a standardized folder tree being created on disk. A file publish event can push media into your review system, attach metadata, and make it visible to supervisors immediately.\u003C/p>\u003Cp>The key idea is that callbacks let production state drive behavior. Instead of people reacting to updates, your pipeline does, consistently and instantly, using the same rules every time.\u003C/p>\u003Cp>\u003Ca href=\"https://github.com/cgwire/blog-tutorials/tree/main/kitsu-webhooks%20?ref=blog.cg-wire.com\" rel=\"noreferrer\">Fork our example Github repository\u003C/a> to try it out for yourself.\u003C/p>\u003Chr>\u003Ch2 id=\"4-search-events\">4. Search events\u003C/h2>\u003Cp>Live events are only half the story. Kitsu also keeps a record of past events, which gives you a reliable paper trail of what actually happened in production. When something goes wrong or when you need to prove that something worked, this event history is an essential debugging tool.\u003C/p>\u003Cp>Through the API, you can query recent events and filter them by time range or event type. Pulling the last hundred events is often enough to get immediate context after a failure. Narrowing the query to a specific date range lets you inspect what happened during a particular shift or overnight run. Filtering to file-related events is especially useful when tracking publishes and media ingestion issues.\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">events = gazu.client.get(\"data/events/last?page_size=100\")\nevents = gazu.client.get(\"data/events/last?page_size=100&amp;before=2019-02-01\")\nevents = gazu.client.get(\"data/events/last?page_size=100&amp;before=2019-02-01&amp;after=2019-01-01\")\nevents = gazu.client.get(\"data/events/last?page_size=100&amp;only_files=true\")\n\u003C/code>\u003C/pre>\u003Cp>In practice, this is how you reconstruct a broken automation. Imagine a publishing script fails sometime during the night, and the morning team finds missing media in the review system. Instead of asking artists when they published or digging through logs across multiple machines, you can query Kitsu for all file events from the previous day. That gives you an exact sequence of publishes, timestamps, users, and linked entities.\u003C/p>\u003Cp>You can also keep track of specific events in your pipeline for productivity reports. For example, you could compile the activity log of your animation team to know who did what.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">Conclusion\u003C/h2>\u003Cp>Kitsu API events give you a clean, reliable way to build reactive pipelines. By listening to production changes instead of polling for them, you reduce latency, eliminate manual steps, and make your studio more resilient as it scales.\u003C/p>\u003Cp>Of course, webhooks only go as far as your knowledge of Kitsu scripting, so make sure to have a look at more technical tutorials from our blog to get a better idea of what you can build!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":181,"comment_id":182,"feature_image":183,"featured":105,"visibility":10,"created_at":184,"updated_at":185,"custom_excerpt":186,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":187,"primary_tag":188,"url":189,"excerpt":186,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":190},"0ba0384a-27a2-4189-ac13-8aca0933041c","6980b67a4304f600017051ef","https://images.unsplash.com/photo-1644088379091-d574269d422f?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDJ8fGNvbm5lY3Rpb25zfGVufDB8fHx8MTc3MDA0NTM2OXww&ixlib=rb-4.1.0&q=80&w=2000","2026-02-02T15:36:42.000+01:00","2026-02-23T10:00:39.000+01:00","Learn how to use the Kitsu Event API and webhooks to build reactive animation pipelines. Trigger automation instantly on asset creation, task updates, and publishes without polling or manual hand-offs.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/kitsu-webhooks-pipeline-automation/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@choys_?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Conny Schneider\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/kitsu-webhooks-pipeline-automation",{"title":176},"kitsu-webhooks-pipeline-automation","posts/kitsu-webhooks-pipeline-automation",[196],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"_WyfIj_ToGV8wcQxWVnE_qvsU0aULS6YDQRThnH1vO0",{"id":199,"title":200,"authors":201,"body":7,"description":7,"extension":8,"html":203,"meta":204,"navigation":14,"path":216,"published_at":217,"seo":218,"slug":219,"stem":220,"tags":221,"__hash__":223,"uuid":205,"comment_id":206,"feature_image":207,"featured":105,"visibility":10,"created_at":208,"updated_at":209,"custom_excerpt":210,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":211,"primary_tag":212,"url":213,"excerpt":210,"reading_time":214,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":215},"ghost/posts:flamenco-without-nas-kitsu.json","NAS-Free Flamenco Rendering with Kitsu Integration (2026)",[202],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🧠\u003C/div>\u003Cdiv class=\"kg-callout-text\">Run Flamenco without shared storage by letting Kitsu drive render context and files.\u003C/div>\u003C/div>\u003Cp>You want to use Flamenco, but you don't want to buy a NAS.\u003C/p>\u003Cp>If you're a solo artist or a micro animation studio, that's a completely rational decision: shared storage can be expensive, adds maintenance overhead, and solves problems you may not actually have until you try to run a render farm.\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/self-hosted-blender-render-farm\">Flamenco assumes a traditional studio setup\u003C/a>: shared files, shared paths, instant access. Without a NAS, that assumption is hard to circumvent. Flamenco has no concept of production context, so it doesn't know which shot you want rendered, which version is approved, or where the job files live. And without that knowledge, it can't safely operate in a NAS-less environment.\u003C/p>\u003Cp>That's where Kitsu comes in.\u003C/p>\u003Cp>Kitsu already knows what Flamenco doesn't: tasks, shots, versions, approvals. By treating Kitsu as asynchronous network storage, you can move data to a Flamenco manager when it's needed, render, and avoid hard shared storage entirely.\u003C/p>\u003Cp>Flamenco doesn't support this workflow out of the box. To make it work, you need to build a custom Flamenco job type that pulls context and files from Kitsu, stages them locally, and controls when and how renders run. This article shows you how to build exactly that.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blog-tutorials/tree/main/flamenco-kitsu-render-farm?ref=blog.cg-wire.com\">https://github.com/cgwire/blog-tutorials/tree/main/flamenco-kitsu-render-farm\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"high-level-architecture\">High-level architecture\u003C/h2>\u003Cp>Our setup is built around a simple idea: Flamenco does the rendering, Kitsu provides the truth.\u003C/p>\u003Cpre>\u003Ccode>Kitsu\n  ↑↓ (REST API)\nCustom Flamenco Job Type\n  ├── Pre-task Python (fetch task data &amp; files)\n  ├── Blender render tasks (Flamenco-managed)\n  └── Post-task Python (upload renders back to Kitsu)\nFlamenco Manager\n  ↓\nFlamenco Worker(s)\n\u003C/code>\u003C/pre>\u003Cp>Flamenco runs exactly as intended, with a Manager scheduling work and Workers executing Blender tasks. What changes is how jobs are defined. Instead of pointing Flamenco at a shared folder and hoping every machine sees the same files, we introduce a custom Flamenco job type that understands production data and knows how to talk to Kitsu.\u003C/p>\u003Cp>Kitsu sits outside the farm and exposes everything through its REST API: shots, tasks, versions, and file locations. When a render job is started—either manually or through automation—the custom job type queries Kitsu to figure out exactly what should be rendered. For example, it might ask: \"Give me the latest approved lighting version for shot 020.\" Kitsu answers, and that answer becomes the render job.\u003C/p>\u003Cp>On the Flamenco side, the Manager doesn't poll Kitsu or track production state. It simply runs the job definition it's given. The custom job type uses a small Python pre-task to fetch metadata and files from Kitsu, stage them locally in a job folder, and then hand them off to standard Blender render tasks that Flamenco already knows how to manage efficiently.\u003C/p>\u003Cp>When rendering is done, a post-task Python step pushes the results back to Kitsu to upload rendered frames, create a new version, or update task status. At no point do workers need shared storage or permanent access to the same filesystem. Each worker pulls what it needs, renders locally, and pushes results back asynchronously.\u003C/p>\u003Chr>\u003Ch2 id=\"1-creating-a-new-job-type\">1. Creating a new job type\u003C/h2>\u003Cp>A Flamenco job type defines how a job turns into actual work. It's the translation layer between \"I want to render this\" and the concrete tasks that Flamenco schedules across the farm. Conceptually, a job type declares what information it needs and how to compile that information into tasks.\u003C/p>\u003Cp>At its simplest, a job type describes a label and a set of settings, then provides a function that receives those settings and builds the job. In code, it looks something like this:\u003C/p>\u003Cpre>\u003Ccode class=\"language-js\">const JOB_TYPE = {\n  label: \"Kitsu Render\",\n  settings: [\n    // { key: \"message\", type: \"string\", required: true },\n    // { key: \"sleep_duration_seconds\", type: \"int32\", default: 1 },\n  ],\n};\n\nfunction compileJob(job) {\n  const settings = job.settings;\n}\n\u003C/code>\u003C/pre>\u003Cp>This code defines the skeleton of a custom Flamenco job type. The \u003Ccode>JOB_TYPE\u003C/code> object declares how the job appears in Flamenco: its human-readable label and the settings it expects when a job is created.\u003C/p>\u003Cp>Those settings act as typed inputs, with validation handled by Flamenco: in this example, a required string and an optional integer with a default value.\u003C/p>\u003Cp>The \u003Ccode>compileJob\u003C/code> function is where the job is turned into executable tasks; it receives the submitted job, reads the resolved settings, and would normally use them to generate render, pre-task, and post-task steps. As written, the function doesn't do any work yet, but it establishes the entry point where production logic will live.\u003C/p>\u003Cp>In a real production setup, instead of a generic message, you pass in a Kitsu task ID, a shot name, the desired output location, or even the Blender version that should be used.\u003C/p>\u003Cp>Where this logic lives matters. Custom Flamenco job types run on the \u003Cstrong>Flamenco Manager\u003C/strong>, not on the workers. On disk, they sit alongside the manager program, for example:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">$ flamenco\n└── flamenco-manager\n└── scripts/\n    └── kitsu-render.js\n\u003C/code>\u003C/pre>\u003Cp>In practice, studios treat these job type scripts as part of their pipeline codebase. They live in version control, evolve over time, and get deployed together with Flamenco updates. That way, you can change how jobs are built and how Kitsu is queried without redeploying or reconfiguring every worker machine on the farm.\u003C/p>\u003Cp>For worker scripts called by custom job types as commands, we put them next to our flamenco-worker program:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">$ flamenco\n└── flamenco-worker\n└── kitsu-render.py\n\u003C/code>\u003C/pre>\u003Chr>\u003Ch2 id=\"2-adding-tasks\">2. Adding tasks\u003C/h2>\u003Cp>Inside \u003Ccode>compileJob\u003C/code>, you explicitly define the tasks that make up the job. This is where a high-level \"render this shot\" request turns into concrete, schedulable work that Flamenco can hand off to workers.\u003C/p>\u003Cp>The example below shows the simplest possible task. An \u003Ccode>echo\u003C/code> task is created using Flamenco's task authoring API, given a category, and then assigned a single command. That command passes the resolved job setting into the task, which will simply print the message when it runs. Finally, the task is added to the job so the Manager can schedule it.\u003C/p>\u003Cpre>\u003Ccode class=\"language-js\">const echoTask = author.Task(\"echo\", \"misc\");\nechoTask.addCommand(\n  author.Command(\"echo\", {\n    message: settings.message,\n  }),\n);\njob.addTask(echoTask);\n\u003C/code>\u003C/pre>\u003Cp>While this task doesn't do anything useful by itself, the pattern is the important part. The same mechanism is used to run Python scripts, launch Blender in background mode for rendering, or perform validation checks before a task is marked complete. Each task is designed to be atomic and restartable, which means if a worker crashes or a render fails at 3 a.m., Flamenco can retry just that task without derailing the entire job. That reliability is what makes this approach scale when you're running hundreds of shots overnight.\u003C/p>\u003Cp>Now, let's get into the meaty part of the tutorial and code a task to download assets from Kitsu, render with Blender, and re-upload the result to Kitsu.\u003C/p>\u003Chr>\u003Ch2 id=\"3-subcommand-1-downloading-assets-from-kitsu\">3. Subcommand 1: Downloading assets from Kitsu\u003C/h2>\u003Cp>The first real task in our Kitsu-driven job is to pull the exact data we need from Kitsu and set up a clean local workspace on the worker. Before Blender ever starts, the worker needs to know which task it's rendering and where the job files live.\u003C/p>\u003Cp>Instead of writing the logic in Javascript, we use the much simpler gazu Python SDK to create a \u003Ccode>kitsu-render\u003C/code> script, then call it in Javascript. If you don't have Python installed in your worker environment, consider \u003Ca href=\"https://blog.cg-wire.com/kitsu-cli-single-binary/\">creating a binary executable from the Python script\u003C/a>.\u003C/p>\u003Cpre>\u003Ccode class=\"language-js\">function compileJob(job) {\n  const settings = job.settings;\n\n  const task = author.Task(\"kitsu-render\", \"misc\");\n\n  task.addCommand(\n    author.Command(\"exec\", { exe: \"python3\", args: [\"kitsu-render.py\"] }),\n  );\n\n  job.addTask(task);\n}\n\u003C/code>\u003C/pre>\u003Cp>The Python script authenticates against the Kitsu API, looks for TODO rendering tasks, and downloads the associated preview file containing a .blend project to render.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import os\nimport gazu\n\ngazu.set_host(\"http://localhost/api\")\nuser = gazu.log_in(\"admin@example.com\", \"mysecretpassword\")\n\nprojects = gazu.project.all_projects()\nproject = projects[0]\n\ntasks = gazu.task.all_tasks_for_project(project)\n\nrendering = gazu.task.get_task_type_by_name(\"Rendering\")\ntodo = gazu.task.get_task_status_by_name(\"todo\")\n\nrender_tasks = [\n    t\n    for t in tasks\n    if t[\"task_type_id\"] == rendering[\"id\"] and t[\"task_status_id\"] == todo[\"id\"]\n]\n\nfor task in render_tasks:\n    files = gazu.files.get_all_preview_files_for_task(task)\n    if not files:\n        continue\n\n    latest = files[-1]\n    if latest[\"extension\"] == \"blend\":\n        task_to_render = task\n        latest_blend = latest\n        break\n\nif task_to_render is None:\n    raise RuntimeError(\"No render task with a .blend preview found\")\n\ntarget_path = os.path.join(\n    \"/tmp\", latest_blend[\"original_name\"] + \".\" + latest_blend[\"extension\"]\n)\n\ngazu.files.download_preview_file(latest_blend, target_path)\n\u003C/code>\u003C/pre>\u003Cp>This step is what makes a NAS-less workflow viable. Each worker pulls only the files it needs for the specific task it's running, instead of mounting or syncing an entire production tree. If the download fails, Flamenco can retry the task automatically without human intervention.\u003C/p>\u003Chr>\u003Ch2 id=\"4-subcommand-2-blender-render\">4. Subcommand 2: Blender render\u003C/h2>\u003Cp>Once the blend file to render is staged locally on the worker, we can \u003Ca href=\"https://blog.cg-wire.com/blender-programmatic-rendering/\">render it programmatically\u003C/a> with the \u003Ccode>bpy\u003C/code> library:\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">bpy.ops.wm.open_mainfile(filepath=target_path)\n\noutput_path = os.path.join(\n    \"/tmp\", latest_blend[\"name\"] + \".mp4\"\n)\n\nbpy.context.scene.render.image_settings.file_format = \"FFMPEG\"\nbpy.context.scene.render.ffmpeg.format = \"MPEG4\"\nbpy.context.scene.render.ffmpeg.codec = \"H264\"\nbpy.context.scene.render.ffmpeg.constant_rate_factor = \"HIGH\"\nbpy.context.scene.render.ffmpeg.gopsize = 12\nbpy.context.scene.render.ffmpeg.audio_codec = \"AAC\"\nbpy.context.scene.render.filepath = output_path\n\nbpy.ops.render.render(animation=True)\n\u003C/code>\u003C/pre>\u003Cp>A more advanced pipeline would leverage Flamenco's native 'blender-render' command to automatically split the frame range into smaller units of work and distribute them across available workers. If a machine drops out or a frame fails, only those frames are retried, so there's no need to restart the entire shot or build custom queue logic to handle parallelism.\u003C/p>\u003Cp>But to keep our example simple, we just render the whole video in one worker.\u003C/p>\u003Chr>\u003Ch2 id=\"5-subcommand-3-uploading-results-back-to-kitsu\">5. Subcommand 3: Uploading results back to Kitsu\u003C/h2>\u003Cp>The final step in the job is \u003Ca href=\"https://blog.cg-wire.com/blender-kitsu-low-res-preview/\">a post-render subcommand that pushes the render results back to Kitsu\u003C/a>. At this point, the worker has finished its frame range locally, and the farm's responsibility shifts from computation to publishing. This is where rendered output becomes visible to the rest of the production.\u003C/p>\u003Cp>The example below shows a minimal Python instruction that uploads the resulting video file to Kitsu as an attachment on the original task.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">result = gazu.task.publish_preview(\n    task_to_render,\n    todo,\n    comment=\"rendered\",\n    preview_file_path=output_path,\n)\n\u003C/code>\u003C/pre>\u003Cp>In a real production pipeline, this step usually does more than just upload files. We can create a new version in Kitsu, update the task status to something like Done, and trigger review or notification workflows so supervisors know new output is ready. Because this logic is just Python running inside a Flamenco task, it's easy to evolve as production needs change without touching the render farm itself.\u003C/p>\u003Chr>\u003Ch2 id=\"6-triggering-the-workflow\">6. Triggering the workflow\u003C/h2>\u003Cp>Once the custom job type is in place, the workflow is triggered by submitting a job request to the Flamenco Manager. During development, this is often done manually by calling the Manager's REST API directly. It's a fast way to validate that job compilation works, settings are wired correctly, and tasks behave as expected before any automation is layered on top.\u003C/p>\u003Cp>The example below submits a job of type \u003Ccode>kitsu-render\u003C/code> to the Manager. Along with basic metadata for tracking and attribution, the request includes a priority value and an empty \u003Ccode>settings\u003C/code> object, which would normally carry production-specific inputs like a Kitsu production ID. When the job is accepted, the Manager invokes the custom job type, compiles tasks, and schedules them across available workers.\u003C/p>\u003Cpre>\u003Ccode class=\"language-sh\">curl -X 'POST' \\\n  'http://172.17.0.1:8080/api/v3/jobs' \\\n  -H 'accept: application/json' \\\n  -H 'Content-Type: application/json' \\\n  -d '{\n  \"metadata\": {\n    \"project\": \"kitsu\",\n    \"user.email\": \"basunako@gmail.com\",\n    \"user.name\": \"kitsu\"\n  },\n  \"name\": \"Kitsu Render\",\n  \"priority\": 50,\n  \"settings\": {},\n  \"submitter_platform\": \"linux\",\n  \"type\": \"kitsu-render\"\n}'\n\u003C/code>\u003C/pre>\u003Cp>We can see the manager received the job request and assigned it to a worker:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/02/data-src-image-8815284e-9d0e-49a0-bdd8-ff4ada8a8961.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"900\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/02/data-src-image-8815284e-9d0e-49a0-bdd8-ff4ada8a8961.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/02/data-src-image-8815284e-9d0e-49a0-bdd8-ff4ada8a8961.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/02/data-src-image-8815284e-9d0e-49a0-bdd8-ff4ada8a8961.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>This manual trigger is primarily a development tool. It lets you iterate on job logic, test edge cases, and rerun jobs without involving artists or production tools.\u003C/p>\u003Cp>In production, studios always automate this step. A small service (often a cron job or lightweight webhook listener) periodically queries Kitsu for tasks that are ready to render, like shots that were just approved or published. When it finds one, it submits a corresponding job to the Flamenco Manager using the same API call.\u003C/p>\u003Cp>With this in place, Flamenco becomes a production-aware render backend instead of waiting for humans to push buttons, reacting automatically to changes in Kitsu and keeping the farm in sync with the state of the production.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">Conclusion\u003C/h2>\u003Cp>What you've built in this article is a fundamentally different way to think about rendering in small studios.\u003C/p>\u003Cp>By using a custom Flamenco job type to pull context and data from Kitsu, stage work locally, render through Flamenco's native scheduler, and push results back asynchronously, you've removed the need for shared storage without sacrificing reliability or scale.\u003C/p>\u003Cp>Each piece has a clear responsibility: Kitsu defines what is true in production, Flamenco decides how work runs, and your custom job type is the glue that keeps them in sync. That separation is what makes the system resilient, debuggable, and adaptable as your pipeline grows.\u003C/p>\u003Cp>Understanding this pattern is important because it lets you build render infrastructure that matches the reality of solo artists and micro-studios.\u003C/p>\u003Cp>But don't just leave here, \u003Ca href=\"https://github.com/cgwire/blog-tutorials/tree/main/flamenco-kitsu-render-farm?ref=blog.cg-wire.com\">clone our example Github repository\u003C/a> for this article and start rendering today!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":205,"comment_id":206,"feature_image":207,"featured":105,"visibility":10,"created_at":208,"updated_at":209,"custom_excerpt":210,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":211,"primary_tag":212,"url":213,"excerpt":210,"reading_time":214,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":215},"e864ab4c-75a4-40e8-b787-3d0f5937eac3","6980b6744304f600017051e3","https://images.unsplash.com/photo-1666858452715-1399b952befb?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDR8fHJlbmRlcmluZ3xlbnwwfHx8fDE3NzAwNDMxNzB8MA&ixlib=rb-4.1.0&q=80&w=2000","2026-02-02T15:36:36.000+01:00","2026-02-20T06:04:25.000+01:00","Learn how to run Blender Flamenco without a NAS by using Kitsu as asynchronous storage. This guide explains custom Flamenco job types that fetch assets from Kitsu, render locally, and upload results back automatically.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/flamenco-without-nas-kitsu/",9,"\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@fachrizalm?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Fachrizal Maulana\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/flamenco-without-nas-kitsu","2026-02-09T10:00:32.000+01:00",{"title":200},"flamenco-without-nas-kitsu","posts/flamenco-without-nas-kitsu",[222],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"jc6jt91jTF5PCUsPSQcrVR_U5Arx-eKplKRNnp9hexM",{"id":225,"title":226,"authors":227,"body":7,"description":7,"extension":8,"html":229,"meta":230,"navigation":14,"path":242,"published_at":243,"seo":244,"slug":245,"stem":246,"tags":247,"__hash__":249,"uuid":231,"comment_id":232,"feature_image":233,"featured":105,"visibility":10,"created_at":234,"updated_at":235,"custom_excerpt":236,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":237,"primary_tag":238,"url":239,"excerpt":236,"reading_time":240,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":241},"ghost/posts:automated-kitsu-pdf-reports.json","Automating Kitsu Reports with Python and Gazu (2026)",[228],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📊\u003C/div>\u003Cdiv class=\"kg-callout-text\">Turn hours of manual status reporting into a fully automated Kitsu PDF in seconds.\u003C/div>\u003C/div>\u003Cp>How many hours do you spend each week pulling data and generating reports?\u003C/p>\u003Cp>Animator studios use Kitsu to track progress, yet we still see supervisors spend hours manually compiling that data into PDFs just to keep producers and directors in the loop. It's a massive drain on creative energy and a manual point of failure that a senior team shouldn't have to deal with. If the data already exists in our tracking software, sharing it shouldn't be a struggle.\u003C/p>\u003Cp>As a technical lead, your job is to automate mundane tasks so the artists can focus on the art. And by using the Gazu Python client, we can bridge the gap between Kitsu's database and the final stakeholder report.\u003C/p>\u003Cp>Today, we're going to build a script that programmatically pulls project metrics and generates a custom PDF, turning a 2-hour manual chore into a five-second automated task.\u003C/p>\u003Chr>\u003Ch2 id=\"why-custom-reports\">Why Custom Reports?\u003C/h2>\u003Cp>Kitsu is a lifesaver for keeping the chaos of a production organized. The built-in dashboard covers all use cases, even multi-production analysis. But sometimes, \"standard\" doesn't cut it.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/02/data-src-image-4807effb-72e4-4fe8-9684-7f8a44579c42.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"900\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/02/data-src-image-4807effb-72e4-4fe8-9684-7f8a44579c42.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/02/data-src-image-4807effb-72e4-4fe8-9684-7f8a44579c42.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/02/data-src-image-4807effb-72e4-4fe8-9684-7f8a44579c42.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>For example, clients might want to feel like they're paying for a premium service. Sending them a raw software screenshot or a generic link feels a bit amateur. By using custom reports, you can deliver progress updates wrapped in your studio's branding, ensuring the presentation looks as polished as the frames you're delivering.\u003C/p>\u003Cp>Then there is the struggle of finding a producer-friendly format. A producer asks for a very specific Excel pivot table or a legacy PDF for the archives that follows a bizarre internal logic only they understand. If you need to export a filtered list of every shot in Sequence 02 that's currently \"In Progress\" but stuck with \"Overdue\" retakes, a custom report gets you that data instantly. \u003Ca href=\"https://blog.cg-wire.com/reduce-rework-animation/\">It saves you from the manual copy-pasting nightmare\u003C/a> and lets you get back to animating.\u003C/p>\u003Cp>Some studios also need custom views for advanced tracking. Custom data can help you spot department bottlenecks, like when the lighting team is consistently stalled because the FX cache is lagging, allowing you to solve the friction before it turns into a Friday night crunch.\u003C/p>\u003Cp>Fortunately, Kitsu is extremely easy to build upon.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blog-tutorials/tree/main/custom-kitsu-reports?ref=blog.cg-wire.com\">https://github.com/cgwire/blog-tutorials/tree/main/custom-kitsu-reports\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-kitsu-setup-authentication\">1. Kitsu Setup &amp; Authentication\u003C/h2>\u003Cp>First, you need to talk to your Kitsu instance.\u003C/p>\u003Cp>If you don't have a studio URL yet and want to run Kitsu on your own machine, Docker is the fastest way to get a production-ready environment up and running:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">docker run --init -ti --rm -p 80:80 -p 1080:1080 --name cgwire cgwire/cgwire\n\u003C/code>\u003C/pre>\u003Cp>For scripting, we will use the official Kitsu Python SDK, \u003Ccode>gazu\u003C/code>.\u003C/p>\u003Cp>You can authenticate using your user credentials, which is fine for local testing:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import gazu\n\ngazu.set_host(\"http://localhost/api\")\nuser = gazu.log_in(\"admin@example.com\", \"mysecretpassword\")\n\u003C/code>\u003C/pre>\u003Chr>\u003Ch2 id=\"2-fetch-production-data\">2. Fetch Production Data\u003C/h2>\u003Cp>Before we write a single line of code, we need to talk about the data Kitsu exposes. If it exists in the UI, you can probably grab it via Gazu.\u003C/p>\u003Cp>The API is surprisingly deep. \u003Ca href=\"https://blog.cg-wire.com/how-to-track-properly-the-cg-artist-progress/\">For a solid production report\u003C/a>, you could typically be pulling:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Progress Metrics:\u003C/strong> Status changes (e.g., moving from \"WIP\" to \"Internal Review\" using events).\u003C/li>\u003Cli>\u003Cstrong>Time Tracking:\u003C/strong> How long a shot has been \"In Progress\" versus the original estimate.\u003C/li>\u003Cli>\u003Cstrong>Cast Lists:\u003C/strong> Every Character, Environment, and Prop associated with a specific Episode or Sequence.\u003C/li>\u003Cli>\u003Cstrong>Workload:\u003C/strong> The exact number of frames or assets currently assigned to a specific artist.\u003C/li>\u003Cli>\u003Cstrong>Budget:\u003C/strong> How the team quota evolves over time.\u003C/li>\u003Cli>And many more resources you can read about in \u003Ca href=\"https://gazu.cg-wire.com/data?ref=blog.cg-wire.com\">our detailed developer documentation\u003C/a>.\u003C/li>\u003C/ul>\u003Cp>Let's look at a common scenario: you need a quick rundown of every task currently assigned to your team members for a specific project. This is the foundation of any \"Who is doing what?\" report.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">projects = gazu.project.all_projects()\nproject = projects[0]\n\ntasks = gazu.task.all_tasks_for_project(project)\n\nreport = []\n\nfor task in tasks:\n    assignees = [gazu.person.get_person(p_id)[\"full_name\"] for p_id in task[\"assignees\"]]\n\n    task_info = {\n        \"date\": task[\"updated_at\"],\n        \"entity\": gazu.entity.get_entity(task[\"entity_id\"])[\"name\"],\n        \"type\": gazu.task.get_task_type(task[\"task_type_id\"])[\"name\"],\n        \"status\": gazu.task.get_task_status(task[\"task_status_id\"])[\"name\"]\n    }\n\n    for artist in assignees:\n        report.append({**task_info, \"artist\": artist})\n\u003C/code>\u003C/pre>\u003Cp>Gazu returns dictionaries. When you're fetching \u003Ccode>all_tasks_for_project\u003C/code>, keep in mind that on a feature-length production, this can be a massive amount of data. Always try to filter your data. For example, by \u003Ccode>task_status\u003C/code> or \u003Ccode>entity_type\u003C/code>, if you only need to see, say, active Animation shots.\u003C/p>\u003Chr>\u003Ch2 id=\"3-creating-a-reusable-template\">3. Creating a Reusable Template\u003C/h2>\u003Cp>Now you need to decide how to render the PDF. There are two main options here.\u003C/p>\u003Cp>You can use ReportLab. This is the barebones method. It is fast and requires no external non-Python dependencies. Best for internal tech reports, simple line-item tables, and high-speed batch automation.\u003C/p>\u003Cp>Or you can create an HTML to PDF rendering pipeline using Jinja2 (templating) and WeasyPrint. This is often the preferred method because you can use CSS to style the report. If you can make a webpage, you can make a report. It's best for client-facing deliverables, heavy branding, and complex layouts.\u003C/p>\u003Cp>Let's define your configuration and template:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">STUDIO_NAME = \"My Animation Studio\"\nSTUDIO_LOGO = \"studio_logo.png\"  # local file path\nPROJECT_NAME = \"My Project\"\nOUTPUT_PDF = \"activity_report.pdf\"\n\u003C/code>\u003C/pre>\u003Cp>You use Jinja2 syntax (\u003Ccode>{{ variable }}\u003C/code>) to inject your Python data into standard HTML.\u003C/p>\u003Cpre>\u003Ccode class=\"language-html\">&lt;!doctype html&gt;\n&lt;html&gt;\n    &lt;head&gt;\n        &lt;meta charset=\"utf-8\" /&gt;\n        &lt;style&gt;\n            body {\n                font-family: Arial, sans-serif;\n                margin: 40px;\n            }\n            header {\n                display: flex;\n                align-items: center;\n                margin-bottom: 30px;\n            }\n            header img {\n                height: 50px;\n                margin-right: 20px;\n            }\n            h1 {\n                color: #2a2a2a;\n            }\n            table {\n                width: 100%;\n                border-collapse: collapse;\n                margin-top: 20px;\n            }\n            th {\n                background: #222;\n                color: white;\n                padding: 8px;\n                text-align: left;\n            }\n            td {\n                padding: 8px;\n                border-bottom: 1px solid #ccc;\n            }\n            .footer {\n                margin-top: 40px;\n                font-size: 10px;\n                color: #777;\n                text-align: center;\n            }\n        &lt;/style&gt;\n    &lt;/head&gt;\n\n    &lt;body&gt;\n        &lt;header&gt;\n            &lt;img src=\"{{ studio_logo }}\" /&gt;\n            &lt;h1&gt;{{ studio_name }} – Activity Report&lt;/h1&gt;\n        &lt;/header&gt;\n\n        &lt;p&gt;\n            &lt;strong&gt;Project:&lt;/strong&gt; {{ project_name }}&lt;br /&gt;\n            &lt;strong&gt;Report Date:&lt;/strong&gt; {{ report_date }}\n        &lt;/p&gt;\n\n        &lt;table&gt;\n            &lt;tr&gt;\n                &lt;th&gt;Date&lt;/th&gt;\n                &lt;th&gt;Artist&lt;/th&gt;\n                &lt;th&gt;Task&lt;/th&gt;\n                &lt;th&gt;Entity&lt;/th&gt;\n                &lt;th&gt;Status&lt;/th&gt;\n            &lt;/tr&gt;\n            {% for row in rows %}\n            &lt;tr&gt;\n                &lt;td&gt;{{ row.date }}&lt;/td&gt;\n                &lt;td&gt;{{ row.artist }}&lt;/td&gt;\n                &lt;td&gt;{{ row.entity }}&lt;/td&gt;\n                &lt;td&gt;{{ row.type }}&lt;/td&gt;\n                &lt;td&gt;{{ row.status }}&lt;/td&gt;\n            &lt;/tr&gt;\n            {% endfor %}\n        &lt;/table&gt;\n\n        &lt;div class=\"footer\"&gt;Generated automatically by {{ studio_name }}&lt;/div&gt;\n    &lt;/body&gt;\n&lt;/html&gt;\n\u003C/code>\u003C/pre>\u003Cp>This HTML file acts as a Jinja2 template that defines the visual structure and styling of the report, including page layout, fonts, colors, and a table for displaying activity data. The \u003Ccode>{{ ... }}\u003C/code> expressions mark placeholders for values such as the studio name, logo URL, project name, and report date, while the embedded CSS ensures the document looks polished and print-ready when rendered or converted to PDF.\u003C/p>\u003Cp>When the Python code renders this template, Jinja2 replaces all placeholders with the actual values passed in from the script and executes the \u003Ccode>{% for row in rows %}\u003C/code> loop to generate one table row per activity record. Each \u003Ccode>row\u003C/code> dictionary supplies the date, artist, task, entity, status, and hours values, with the hours field explicitly formatted to two decimal places, producing a complete HTML document with a fully populated table.\u003C/p>\u003Cp>The rendered HTML is given to WeasyPrint, which interprets both the HTML structure and the inline CSS to lay out the content as a printable document. The studio logo is loaded via its URL or relative path, the table and text are styled exactly as defined in the template, and everything is rendered into a PDF file that visually matches the HTML design, ending with the footer that confirms the report was generated automatically.\u003C/p>\u003Chr>\u003Ch2 id=\"4-rendering\">4. Rendering\u003C/h2>\u003Cp>Finally, you glue it all together. You use \u003Ccode>jinja2\u003C/code> to fill in the placeholders in the HTML with your data, and then \u003Ccode>WeasyPrint\u003C/code> converts that HTML string into a PDF file:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">from jinja2 import Environment, FileSystemLoader\nfrom weasyprint import HTML\nfrom datetime import date\n\nenv = Environment(loader=FileSystemLoader(\".\"))\ntemplate = env.get_template(\"report.html\")\n\nhtml = template.render(\n    studio_name=STUDIO_NAME,\n    studio_logo=STUDIO_LOGO,\n    project_name=PROJECT_NAME,\n    report_date=date.today().isoformat(),\n    rows=report,\n)\n\nHTML(string=html, base_url=\".\").write_pdf(OUTPUT_PDF)\n\nprint(f\"PDF generated: {OUTPUT_PDF}\")\n\u003C/code>\u003C/pre>\u003Cp>The first part of the code sets up Jinja2 to load an HTML template from the current directory and then retrieves the aforementioned \u003Ccode>report.html\u003C/code>.\u003C/p>\u003Cp>Next, the template is rendered into a complete HTML document by injecting runtime data into those placeholders. Studio and project metadata are passed in, and the current date is generated in ISO format. The result of this step is a plain HTML string with all dynamic values resolved.\u003C/p>\u003Cp>Finally, the rendered HTML is handed to WeasyPrint, which parses the HTML and any associated CSS and assets, then converts it into a PDF file. The \u003Ccode>base_url\u003C/code> parameter ensures relative paths to images or stylesheets work correctly, and the finished PDF is written to the output path before printing a confirmation message.\u003C/p>\u003Cp>We obtain this final result:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/02/data-src-image-13e6f8e7-6700-4219-a7ed-6bbdb4850aab.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"900\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/02/data-src-image-13e6f8e7-6700-4219-a7ed-6bbdb4850aab.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/02/data-src-image-13e6f8e7-6700-4219-a7ed-6bbdb4850aab.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/02/data-src-image-13e6f8e7-6700-4219-a7ed-6bbdb4850aab.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>You can try running the script yourself in a minute by \u003Ca href=\"https://github.com/cgwire/blog-tutorials/tree/main/custom-kitsu-reports?ref=blog.cg-wire.com\">cloning our corresponding Github repository\u003C/a>.\u003C/p>\u003Chr>\u003Ch2 id=\"5-automation-tips\">5. Automation Tips\u003C/h2>\u003Cp>Automation is where this workflow actually pays off the biggest dividends: once your report script works locally, the next step is making sure it runs reliably without human intervention, and that the output ends up where people already look.\u003C/p>\u003Cp>Instead of manually running the script, set up a cron job on your server to execute it at a predictable time. For example, running the script every weekday at 6:00 PM ensures the PDF is generated overnight and ready when producers start their day. This is especially useful for daily burn-downs or shot status summaries.\u003C/p>\u003Cp>Once the PDF is generated, use \u003Ccode>gazu\u003C/code> to attach it directly to a relevant entity in Kitsu, like a Production, Episode, or a recurring Task. This turns your report into a first-class production artifact with a permanent history. For example, uploading each day's report to a \"Daily Production Report\" task makes it easy to audit changes over time or reference past decisions. A practical tip: include the date in both the filename and the attachment comment so reports are easy to scan in the Kitsu UI without downloading each one.\u003C/p>\u003Cp>To push the report directly to stakeholders, use Python's built-in \u003Ccode>smtplib\u003C/code> (or a transactional email service) to send the PDF as an attachment. This is ideal for \u003Ca href=\"https://blog.cg-wire.com/collaborative-animation-production/\">producers or clients who don't live in Kitsu\u003C/a> all day. A concrete pattern is to email a short summary in the body—\"Shots blocked: 12, shots finaled: 3\"—and attach the full PDF for details.\u003C/p>\u003Cp>Instead of hardcoding a single HTML layout, store multiple Jinja2 templates like \u003Ccode>client_report.html\u003C/code> and \u003Ccode>internal_audit.html\u003C/code> to generate different report styles from the same Kitsu data. For example, clean, high-level summaries for clients and more detailed tables for internal tracking. A useful approach is to share base templates and macros (headers, tables, status badges) so changes to branding or layout propagate across all report types. Version these templates alongside your code so you can reproduce older reports exactly if needed.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">Conclusion\u003C/h2>\u003Cp>The bigger idea here isn't just about PDFs: it's about reclaiming time and attention for the work that actually moves a production forward!\u003C/p>\u003Cp>By pulling structured data out of Kitsu with Gazu, shaping it with Python, and rendering it into polished, automated reports, you replace a fragile, manual ritual with a repeatable system that runs quietly in the background. What used to be hours of copy-pasting, formatting, and double-checking becomes a dependable pipeline: accurate data, delivered on time, in a format producers and clients actually want to read. Custom reports let you communicate progress with confidence, surface problems before they become crunch, and present your studio as both creatively sharp and technically disciplined.\u003C/p>\u003Cp>The more complex your pipeline is, the more important it becomes to create custom reports, so make sure to read more of our scripting guides for inspiration!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":231,"comment_id":232,"feature_image":233,"featured":105,"visibility":10,"created_at":234,"updated_at":235,"custom_excerpt":236,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":237,"primary_tag":238,"url":239,"excerpt":236,"reading_time":240,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":241},"d3e7cb7f-7151-4881-acef-25bc18bf3edc","69805d244304f600017051c5","https://images.unsplash.com/photo-1666875753105-c63a6f3bdc86?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDR8fGRhc2hib2FyZHxlbnwwfHx8fDE3NzAwMjAyODZ8MA&ixlib=rb-4.1.0&q=80&w=2000","2026-02-02T09:15:32.000+01:00","2026-02-20T06:03:57.000+01:00","Learn how to use the Gazu Python SDK to extract production data from Kitsu and generate custom, branded PDF reports. Automate progress tracking, task summaries, and stakeholder updates without manual copy-paste work.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/automated-kitsu-pdf-reports/",8,"\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@dengxiangs?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Deng Xiang\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/automated-kitsu-pdf-reports","2026-02-02T10:00:12.000+01:00",{"title":226},"automated-kitsu-pdf-reports","posts/automated-kitsu-pdf-reports",[248],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"YJ-_wQpipngnWEzDu9U0RCIsq_Blt5_8TSOGLX9c1zw",{"id":251,"title":252,"authors":253,"body":7,"description":7,"extension":8,"html":255,"meta":256,"navigation":14,"path":267,"published_at":268,"seo":269,"slug":270,"stem":271,"tags":272,"__hash__":274,"uuid":257,"comment_id":258,"feature_image":259,"featured":105,"visibility":10,"created_at":260,"updated_at":261,"custom_excerpt":262,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":263,"primary_tag":264,"url":265,"excerpt":262,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":266},"ghost/posts:share-kitsu-playlists.json","(2026) How to Export and Share Kitsu Playlists with Python",[254],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📥\u003C/div>\u003Cdiv class=\"kg-callout-text\">Share Kitsu playlists clearly, even when clients can’t access Kitsu directly.\u003C/div>\u003C/div>\u003Cp>Early in your career as an animator, you'll likely learn a hard truth—sometimes the painful way: \u003Cstrong>doing great work is only half the job, sharing it clearly is the other half\u003C/strong>. You might remember a short film project where the animation itself was solid, but the review process was pure chaos. QuickTimes flying back and forth over email, files named things like \u003Ccode>shot_final_v3_really_final.mov\u003C/code>, and no one is quite sure which notes apply to which version. Clients were confused, supervisors were frustrated, and you were spending more time managing files than animating.\u003C/p>\u003Cp>Fast forward a few years, and tools like \u003Cstrong>Kitsu playlists\u003C/strong> completely change how studios review animation.\u003C/p>\u003Cp>They give you structure, traceability, and a clean way to present work. You can group shots, track versions, and centralize feedback. For most teams, that alone is a huge win.\u003C/p>\u003Cp>But here's the thing you learn after years in production: \u003Ca href=\"https://blog.cg-wire.com/how-to-give-efficient-animation-feedback/\">no two studios or clients share the exact same review workflow\u003C/a>. Sometimes you need to send assets offline. Sometimes a client wants everything neatly packaged by sequence. Sometimes legal or security constraints mean you can't give direct Kitsu access. In those cases, you still want to leverage Kitsu's strengths without being locked into a single way of sharing.\u003C/p>\u003Cp>That's exactly what this article is about.\u003C/p>\u003Cp>By the end, you'll know how to \u003Cstrong>create a Kitsu playlist, extract its data with Python, download all related assets in a clean folder structure, and compress everything for easy sharing\u003C/strong>. This approach can save you hours on real productions and make reviews smoother for both artists and clients.\u003C/p>\u003Cp>Let's break it down step by step.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blog-tutorials/tree/main/share-kitsu-playlist?ref=blog.cg-wire.com\">https://github.com/cgwire/blog-tutorials/tree/main/share-kitsu-playlist\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-create-a-kitsu-playlist\">1. Create a Kitsu Playlist\u003C/h2>\u003Cp>\u003Cstrong>Every solid review workflow starts with a clear intention\u003C/strong>: what exactly do you want feedback on? Kitsu playlists are built for that purpose.\u003C/p>\u003Cp>Creating a playlist from the Kitsu dashboard is straightforward. Navigate to your project, head into the Shots or Assets section, and start selecting the items you want reviewed. It helps to think of playlists as review narratives. Instead of dumping everything in, ask yourself:\u003C/p>\u003Cul>\u003Cli>Is this a blocking review?\u003C/li>\u003Cli>Is this a polishing pass?\u003C/li>\u003Cli>Is this focused on animation, lighting, or comp?\u003C/li>\u003C/ul>\u003Cp>For example, on a short cinematic project, you might create separate playlists for:\u003C/p>\u003Cul>\u003Cli>\"Animation Blocking – Act 1\"\u003C/li>\u003Cli>\"Facial Polish – Key Shots\"\u003C/li>\u003Cli>\"Final Lighting Review\"\u003C/li>\u003C/ul>\u003Cp>That small bit of organization can make client reviews dramatically more focused.\u003C/p>\u003Cp>In Kitsu, once your shots are selected, you can create a new playlist, name it clearly, and order the shots in a way that tells a story. Order matters more than people think. \u003Ca href=\"https://blog.cg-wire.com/client-communication-animation/\">When a client presses play, they can judge the art, timing, and revisions in one place.\u003C/a>\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-712558f4-4b58-4b1e-8bb1-7bfa2fee1c74.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1319\" height=\"821\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-712558f4-4b58-4b1e-8bb1-7bfa2fee1c74.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-712558f4-4b58-4b1e-8bb1-7bfa2fee1c74.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-712558f4-4b58-4b1e-8bb1-7bfa2fee1c74.png 1319w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"2-get-the-playlist-data\">2. Get the Playlist Data\u003C/h2>\u003Cp>Now that we have a playlist ready, it's time to code.\u003C/p>\u003Cp>We start by \u003Cstrong>authenticating with Kitsu\u003C/strong> using the Gazu API client:\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">import gazu\n\ngazu.set_host(\"http://localhost/api\")\ngazu.log_in(\"admin@example.com\", \"mysecretpassword\")\n\u003C/code>\u003C/pre>\u003Cp>We can then \u003Cstrong>query Kitsu for available projects\u003C/strong> and present them in the terminal. The user selects a project, and that choice defines the scope of everything that follows. Because projects are fetched dynamically, the script works across productions without modification:\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">productions = gazu.project.all_projects()\n\nfor i, p in enumerate(productions):\n    print(f\"[{i}] {p['name']}\")\n\nproduction = productions[int(input(\"Select project: \"))]\n\u003C/code>\u003C/pre>\u003Cp>From there, \u003Cstrong>playlists are queried from the selected project\u003C/strong> and shown the same way. When a playlist is chosen, the script retrieves the full playlist object from the API.\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">playlists = gazu.playlist.all_playlists_for_project(production)\n\nfor i, pl in enumerate(playlists):\n    print(f\"[{i}] {pl['name']}\")\n\nplaylist = gazu.playlist.get_playlist(playlists[int(input(\"Select playlist: \"))])\n\u003C/code>\u003C/pre>\u003Cp>\u003Ccode>playlist\u003C/code> contains the full editorial selection reference: shots, versions, ordering, and linked files are all accessible through this object.\u003C/p>\u003Chr>\u003Ch2 id=\"3-download-related-assets\">3. Download Related Assets\u003C/h2>\u003Cp>The next step is turning the playlist data into something reviewable on disk.\u003C/p>\u003Cp>\u003Cstrong>The output is a folder hierarchy that mirrors production reality\u003C/strong>: playlist at the top, sequences underneath, shots inside those, and the actual media sitting where anyone expects to find it.\u003C/p>\u003Cpre>\u003Ccode>Playlist_Name/\n└── Seq_010/\n    ├── Shot_010_001/\n    │   ├── anim_v003.mov\n    │   └── anim_v003.png\n    └── Shot_010_002/\n└── Seq_020/\n    └── Shot_020_005/\n\u003C/code>\u003C/pre>\u003Cp>That structure is the point. It removes ambiguity, avoids flat dumps of files, and lets supervisors and clients navigate by context instead of filenames.\u003C/p>\u003Cp>The playlist name is used as the root folder, so every export stays self-contained and re-runnable.\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">playlist_name = playlist[\"name\"]\n\u003C/code>\u003C/pre>\u003Cp>We then iterate over each playlist entry and fetch the full shot record because the playlist itself does not include sequence data.\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">for shot in playlist[\"shots\"]:\n    shot_data = gazu.shot.get_shot(shot[\"entity_id\"])\n\u003C/code>\u003C/pre>\u003Cp>We use the sequence name and shot name to build a deterministic directory path. This enforces a consistent \u003Ccode>playlist/sequence/shot\u003C/code> layout on disk.\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">shot_name = shot_data[\"name\"]\nsequence_name = shot_data[\"sequence_name\"]\n\nshot_dir = os.path.join(\n    playlist_name,\n    sequence_name,\n    shot_name,\n)\n\u003C/code>\u003C/pre>\u003Cp>If the directory doesn't exist, we create it. This lets the script run multiple times without failing or overwriting partial downloads.\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">os.makedirs(shot_dir, exist_ok=True)\n\u003C/code>\u003C/pre>\u003Cp>We can then fetch the preview file information corresponding to each shot. Typically, a picture or video:\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">preview = gazu.files.get_preview_file(shot[\"preview_file_id\"])\n\u003C/code>\u003C/pre>\u003Cp>We preserve the original filename and extension so the output matches what artists and supervisors expect to see.\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">preview_filename = f\"{preview['original_name']}.{preview['extension']}\"\npreview_path = os.path.join(shot_dir, preview_filename)\n\u003C/code>\u003C/pre>\u003Cp>We download the preview media directly into the shot folder. At this point, the playlist exists on disk as a clean, review-ready directory tree.\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">gazu.files.download_preview_file(preview, preview_path)\n\u003C/code>\u003C/pre>\u003Cp>The result is a local mirror of the playlist that can be zipped, sent, archived, or reviewed without explanation.\u003C/p>\u003Chr>\u003Ch2 id=\"4-compress-the-folder\">4. Compress the Folder\u003C/h2>\u003Cp>Once everything is downloaded, the final step is making it easy to share. \u003Cstrong>Your script should automatically compress the root playlist folder into a single archive\u003C/strong>:\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">import shutil\n\nshutil.make_archive(\n    base_name=playlist_name,\n    format=\"zip\",\n    root_dir=os.path.dirname(playlist_name),\n    base_dir=os.path.basename(playlist_name),\n)\n\u003C/code>\u003C/pre>\u003Cp>This archive becomes your handoff artifact. You can upload it to cloud storage, send it through a secure client portal, or archive it internally as a backup folder.\u003C/p>\u003Cp>\u003Cstrong>Clients don't worry about missing files or broken structures. They download once, unzip once, and everything just works.\u003C/strong>\u003C/p>\u003Cp>Include the playlist name and date in the archive filename. Six months later, when someone asks, \"Which version did we send?\", you'll be glad you did.\u003C/p>\u003Chr>\u003Ch2 id=\"onboard-clients-in-kitsu\">Onboard Clients In Kitsu\u003C/h2>\u003Cp>At some point, exporting Kitsu playlists just starts getting in the way. It’s fine when you’re sending a quick snapshot or getting a one-off note pass, but once the project goes into real iteration, things get messy fast. You’re re-exporting for every tweak, clients are commenting on outdated cuts, and feedback ends up split between emails, PDFs, and chat threads. \u003Cstrong>A lot of energy goes into figuring out what the note is referring to instead of actually fixing the shot.\u003C/strong>\u003C/p>\u003Cp>\u003Cstrong>That’s usually when it makes sense to bring clients directly into Kitsu.\u003C/strong> They’re always looking at the current version, they can draw or comment right on the frame, and everyone sees the notes in context. Version history stays intact, so when a client asks about something “from two versions ago,” you can actually see it. For the team, it means fewer guesswork moments and less time copying notes from one place to another.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-1b596b1f-9757-47e5-a893-2c41164a1eab.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1438\" height=\"809\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-1b596b1f-9757-47e5-a893-2c41164a1eab.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-1b596b1f-9757-47e5-a893-2c41164a1eab.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-1b596b1f-9757-47e5-a893-2c41164a1eab.png 1438w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Exports are good for quick check-ins, but they don’t scale with real production. \u003Cstrong>Having clients in Kitsu keeps everyone grounded in the same reality.\u003C/strong>\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">Conclusion\u003C/h2>\u003Cp>After years in animation, one lesson keeps repeating itself: the smoother your review workflow, the better your creative output. Kitsu already gives you a powerful foundation with playlists, versioning, and centralized feedback. \u003Cstrong>By tapping into its data and building small automation tools, you can adapt it to almost any review scenario.\u003C/strong>\u003C/p>\u003Cp>But you can also extract playlist data from Kitsu and reshape it to fit your custom review workflows. Whether you're sending offline packages, organizing assets for external partners, or just trying to make life easier for your clients, this approach puts you in control.\u003C/p>\u003Cp>\u003Ca>Check out the public Github repository\u003C/a> to clone and modify our code to match your workflow!\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-5c610ee3-e726-4198-8b9b-480d3546530c.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1319\" height=\"821\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-5c610ee3-e726-4198-8b9b-480d3546530c.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-5c610ee3-e726-4198-8b9b-480d3546530c.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-5c610ee3-e726-4198-8b9b-480d3546530c.png 1319w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>And if there's one final piece of advice worth following: \u003Cstrong>onboard your clients directly onto Kitsu whenever possible!\u003C/strong> Once they \u003Ca href=\"https://www.cg-wire.com/review-engine?ref=blog.cg-wire.com\">experience real-time review rooms\u003C/a>, annotated notes, and version history, most never want to go back to messy email threads again.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":257,"comment_id":258,"feature_image":259,"featured":105,"visibility":10,"created_at":260,"updated_at":261,"custom_excerpt":262,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":263,"primary_tag":264,"url":265,"excerpt":262,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":266},"503be05a-cfc1-4b66-8010-a46dab1bd231","695bb6ffc665470001df4dc7","https://images.unsplash.com/photo-1727142073871-d40f5a7c76d8?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDJ8fHZpZGVvJTIwZWRpdCUyMHN1aXRlfGVufDB8fHx8MTc2NzYyMDEwNnww&ixlib=rb-4.1.0&q=80&w=2000","2026-01-05T14:05:03.000+01:00","2026-02-20T06:04:53.000+01:00","Learn how to create, export, and share Kitsu playlists using Python. This guide shows how to extract playlist data, download previews into a clean folder structure, and package everything for offline or client-friendly reviews.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/share-kitsu-playlists/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@mdesign85?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">MD Duran\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/share-kitsu-playlists","2026-01-26T10:00:19.000+01:00",{"title":252},"share-kitsu-playlists","posts/share-kitsu-playlists",[273],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"-jbM2U_O1PNcpV8f4TxvrmeMLejonGVTVrjVX5RZW_E",{"id":276,"title":277,"authors":278,"body":7,"description":7,"extension":8,"html":280,"meta":281,"navigation":14,"path":292,"published_at":293,"seo":294,"slug":295,"stem":296,"tags":297,"__hash__":301,"uuid":282,"comment_id":283,"feature_image":284,"featured":105,"visibility":10,"created_at":285,"updated_at":286,"custom_excerpt":287,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":288,"primary_tag":289,"url":290,"excerpt":287,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":291},"ghost/posts:self-hosted-blender-render-farm.json","Self-Hosting a Blender Render Farm Using Flamenco In 2026",[279],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🖥️\u003C/div>\u003Cdiv class=\"kg-callout-text\">Turn idle machines into a powerful Blender render farm without touching the cloud.\u003C/div>\u003C/div>\u003Cp>When was the last time you almost missed a deadline because of rendering?\u003C/p>\u003Cp>Every time you open Blender, your workstation sounds like a jet engine preparing for takeoff, and your entire film worth months of work is held hostage by a single progress bar.\u003C/p>\u003Cp>Meanwhile, your old college laptop sits in a box gathering dust. It's not a powerhouse, but it has a GPU. It has RAM. It's a perfectly functional computer doing absolutely nothing while you panic.\u003C/p>\u003Cp>The concept of a \"render farm\" can sound intimidating to one-person studios. You might imagine server racks in a chilled room, expensive licenses, and IT professionals shouting about IP addresses.\u003C/p>\u003Cp>But in the modern Blender ecosystem, that's no longer the reality.\u003C/p>\u003Cp>In this article, \u003Cstrong>I'm going to walk you through how to turn old devices into a unified rendering system using \u003Cem>Flamenco\u003C/em>.\u003C/strong> We will demystify the network setup and get you rendering on multiple machines in a few hours.\u003C/p>\u003Chr>\u003Ch2 id=\"why-self-host-a-render-farm\">Why Self-Host a Render Farm?\u003C/h2>\u003Cp>Before we start plugging in Ethernet cables, let's talk about why you should bother. You might think, \"Why not just send everything to a cloud farm?\" Cloud farms are amazing, but having a local, self-hosted render farm changes your workflow in three fundamental ways.\u003C/p>\u003Cp>When you pay for a cloud farm, you are paying for the final output. \u003Ca href=\"https://blog.cg-wire.com/blender-kitsu-low-res-preview/\">This psychologically discourages you from test rendering\u003C/a>. \u003Cstrong>You become afraid to hit \"Render\" until you are 100% sure everything is perfect.\u003C/strong>\u003C/p>\u003Cp>When you own the farm, the cost of a render is electricity. \u003Ca href=\"https://blog.cg-wire.com/getting-started-with-blender-rendering/\">You can render a rough animation\u003C/a> at 50% resolution just to check the timing or lighting. \u003Cstrong>This freedom allows you to iterate faster.\u003C/strong> You stop guessing and start testing.\u003C/p>\u003Cp>Sometimes, working on a commercial project for a tech client with an NDA is so strict you aren't allowed to even whisper the product name. \u003Cstrong>Uploading those assets to a third-party cloud server - even a secure one - can sometimes violate strict NDA contracts.\u003C/strong> Keeping your data on your local network (LAN) ensures that no pixels leave your studio until you say so.\u003C/p>\u003Cp>There is a specific kind of agony in uploading a 2GB project file to the cloud, waiting for it to render, downloading the frames, and realizing you left a physics cache unbaked. \u003Cstrong>With a local farm like Flamenco, if you spot a mistake, you just hit \"Cancel,\" fix it, and hit \"Render\" again. No upload times, no download times.\u003C/strong> It feels like an extension of your workstation.\u003C/p>\u003Chr>\u003Ch2 id=\"what-is-blender-flamenco\">What is Blender Flamenco?\u003C/h2>\u003Cp>Setting up a render farm from scratch \u003Ca href=\"https://blog.cg-wire.com/blender-programmatic-rendering/\">used to involve complex scripting\u003C/a> or expensive third-party software. Now, we have Blender Flamenco.\u003C/p>\u003Cp>\u003Cstrong>Flamenco is Blender's open-source render farm.\u003C/strong> It's extremely easy to setup: the manager is the brain holding the list of tasks (frames to render) and tells the other computers what to do. The workers are your extra laptops or desktops. They listen to the Manager, ask for a frame, render it, save it, and ask for another.\u003C/p>\u003Cp>Flamenco is designed to be zero-config. It practically discovers itself on your network. If you can install Blender, you can set up Flamenco.\u003C/p>\u003Chr>\u003Ch2 id=\"1-the-setup\">1. The Setup\u003C/h2>\u003Cp>For this tutorial, we start with the simplest configuration possible with our desktop computer acting both as manager and worker. We'll later see how to add our laptop.\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Install Blender\u003C/strong> - Ensure your computer has Blender installed.\u003C/li>\u003Cli>\u003Cstrong>Download Flamenco\u003C/strong> - Go to the Flamenco website and download the package for your OS. Extract it to a folder.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-cec7140f-c6aa-4e18-83fb-be86e5a39ac7.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1064\" height=\"721\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-cec7140f-c6aa-4e18-83fb-be86e5a39ac7.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-cec7140f-c6aa-4e18-83fb-be86e5a39ac7.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-cec7140f-c6aa-4e18-83fb-be86e5a39ac7.png 1064w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"2-run-flamenco-manager\">2. Run Flamenco Manager\u003C/h2>\u003Col>\u003Cli>Open the Flamenco folder you extracted.\u003C/li>\u003Cli>Double-click \u003Ccode>flamenco-manager\u003C/code>.\u003C/li>\u003Cli>A terminal window will pop up with some text logs.\u003C/li>\u003Cli>Go through the configuration wizard to set up the job folder where you'll upload your blend files to render.\u003C/li>\u003Cli>Shortly after, your web browser should open automatically to \u003Ccode>http://localhost:8080\u003C/code>. This is the Flamenco web interface.\u003C/li>\u003C/ol>\u003Cp>If you see a friendly, dark-themed dashboard, congratulations. You are half a server admin already. The Manager is alive.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-ac803a05-e189-4c17-9fe9-d5749f916aa0.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1319\" height=\"821\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-ac803a05-e189-4c17-9fe9-d5749f916aa0.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-ac803a05-e189-4c17-9fe9-d5749f916aa0.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-ac803a05-e189-4c17-9fe9-d5749f916aa0.png 1319w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>The manager will tell you to download the addon. Do it now as we'll need it for step 4.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-ccd6a3fb-4abd-469e-a566-5adfddf76196.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1064\" height=\"721\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-ccd6a3fb-4abd-469e-a566-5adfddf76196.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-ccd6a3fb-4abd-469e-a566-5adfddf76196.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-ccd6a3fb-4abd-469e-a566-5adfddf76196.png 1064w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"3-the-worker\">3. The Worker\u003C/h2>\u003Cp>Now, leave the manager running and double-click \u003Ccode>flamenco-worker\u003C/code>.\u003C/p>\u003Cp>That's it.\u003C/p>\u003Cp>The Worker will scan your local network, find the Manager running on the same computer, and introduce itself. If you look back at your Desktop's web browser (the Manager interface), you should see it appear in the \"Workers\" tab, listed as \"Idle\" and ready for duty.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-6bad58f1-615a-4a7b-8aff-38f07279ebe0.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1319\" height=\"821\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-6bad58f1-615a-4a7b-8aff-38f07279ebe0.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-6bad58f1-615a-4a7b-8aff-38f07279ebe0.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-6bad58f1-615a-4a7b-8aff-38f07279ebe0.png 1319w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>You should also run \u003Ccode>flamenco-worker\u003C/code> on your Desktop! Your main computer can render and manage at the same time.\u003C/p>\u003Chr>\u003Ch2 id=\"4-add-the-blend-file-and-render\">4. Add the Blend File and Render\u003C/h2>\u003Cp>The stage is set. Now, we can get to work!\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Open Blender\u003C/strong> on your Desktop.\u003C/li>\u003Cli>\u003Cstrong>Enable the Addon\u003C/strong> - Go to Edit &gt; Preferences &gt; Add-ons &gt; Install from Disk. Search for the flamenco zip file you downloaded during the manager setup.\u003C/li>\u003Cli>\u003Cstrong>Link the Manager\u003C/strong> - In the Flamenco add-on preferences, copy/paste the manager's URL address.\u003C/li>\u003Cli>\u003Cstrong>Save Your File\u003C/strong> - Save your \u003Ccode>.blend\u003C/code> file in the configured job folder.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-88504c81-44cf-4d32-a374-0b2dc6746b56.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"724\" height=\"732\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-88504c81-44cf-4d32-a374-0b2dc6746b56.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-88504c81-44cf-4d32-a374-0b2dc6746b56.png 724w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>In the Render Properties tab in Blender, scroll down to the \u003Cstrong>Flamenco\u003C/strong> panel.\u003C/p>\u003Col>\u003Cli>Click \u003Cstrong>\"Fetch Job Types\"\u003C/strong>.\u003C/li>\u003Cli>Select \u003Cstrong>\"Simple Render\"\u003C/strong>.\u003C/li>\u003Cli>Hit \u003Cstrong>\"Submit to Flamenco\"\u003C/strong>.\u003C/li>\u003C/ol>\u003Cp>Now, tab over to your web browser. You will see the job pop up. The status bars on your \"Workers\" list will turn green. Your Desktop will grab one frame to render at a time.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-6e7fa2fb-b997-4f6f-ba60-bcc3c70d5bb0.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1319\" height=\"918\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-6e7fa2fb-b997-4f6f-ba60-bcc3c70d5bb0.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-6e7fa2fb-b997-4f6f-ba60-bcc3c70d5bb0.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-6e7fa2fb-b997-4f6f-ba60-bcc3c70d5bb0.png 1319w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"5-bringing-in-the-laptop\">5. Bringing in the Laptop\u003C/h2>\u003Cp>Now, to add your dusty laptop to the farm.\u003C/p>\u003Cp>Here is the single most actionable piece of advice I can give you, and it is where 90% of beginners fail: \u003Cstrong>All computers must see the files in the exact same place.\u003C/strong>\u003C/p>\u003Cp>If your texture is located at \u003Ccode>C:\\Users\\Dave\\Texture.png\u003C/code> on your desktop, your laptop \u003Cem>cannot\u003C/em> access that path. The laptop doesn't have a user named Dave, and it doesn't have the file on its C drive.\u003C/p>\u003Cp>You need a shared network folder, typically through a NAS. Depending on your operating system, the steps are similar but will slightly differ:\u003C/p>\u003Col>\u003Cli>Connect your desktop and laptop via Ethernet cable\u003C/li>\u003Cli>Create a NAS folder on your Desktop called \u003Ccode>RenderFarm\u003C/code>.\u003C/li>\u003Cli>Right-click it &gt; \u003Cstrong>Properties\u003C/strong> &gt; \u003Cstrong>Sharing\u003C/strong> &gt; \u003Cstrong>Share\u003C/strong>. Give read/write permission to your user.\u003C/li>\u003Cli>\u003Cstrong>Map the Network Drive:\u003C/strong> On your Desktop, map this folder to a drive letter, say \u003Ccode>Z:\u003C/code>. On your Laptop, navigate to the Desktop's network share and map it to **the same letter \u003Ccode>Z:**\u003C/code>.\u003C/li>\u003C/ol>\u003Cp>Now, when you save your Blender file to \u003Ccode>Z:\\RenderFarm\\MyProject.blend\u003C/code>, both computers see it at \u003Ccode>Z:\\RenderFarm\\MyProject.blend\u003C/code>. The path is absolute and identical.\u003C/p>\u003Cp>Now, leave the Desktop running and move over to \u003Cstrong>Computer B (Laptop)\u003C/strong>.\u003C/p>\u003Col>\u003Cli>Make sure your \u003Ccode>Z:\u003C/code> drive (or whatever shared storage you set up) is accessible. Open a file inside it just to be sure.\u003C/li>\u003Cli>Install and open the Flamenco folder on the laptop.\u003C/li>\u003Cli>Make sure you have the same Blender version installed as the one on your desktop.\u003C/li>\u003Cli>Double-click \u003Ccode>flamenco-worker\u003C/code>.\u003C/li>\u003C/ol>\u003Cp>That's it.\u003C/p>\u003Cp>The Worker will scan your local network and find the Manager running on the Desktop.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-90501d50-29c3-4d8f-9b54-511e6c674739.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1504\" height=\"932\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-90501d50-29c3-4d8f-9b54-511e6c674739.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-90501d50-29c3-4d8f-9b54-511e6c674739.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-90501d50-29c3-4d8f-9b54-511e6c674739.png 1504w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Flamenco will now automatically orchestrate jobs between your computers.\u003C/p>\u003Cp>If you do not have access to a NAS or do not wish to purchase one, you can have a look at installing a free Samba server on a Linux workstation. Using cloud storage isn't possible because Flamenco doesn't handle asynchronous services, unless you create your own custom job type. We'll see how to do that \u003Ca href=\"https://blog.cg-wire.com/\">in a future article\u003C/a>, using Kitsu as an asynchronous \u003Ca href=\"https://blog.cg-wire.com/animation-asset-storage/\">asset storage server\u003C/a>.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion-knowing-when-to-scale\">Conclusion: Knowing When to Scale\u003C/h2>\u003Cp>We have covered the hardware setup, the crucial shared storage logic, and the software installation. If you have followed along, \u003Cstrong>you have a functioning render farm in your house and your dusty laptop is now a productive member of your team.\u003C/strong>\u003C/p>\u003Cp>Flamenco makes the barrier to entry for self-hosted rendering incredibly low. It respects your privacy, costs nothing but electricity, and allows you to squeeze every ounce of performance out of the hardware you already own.\u003C/p>\u003Cp>But there is a limit on what you can achieve by yourself.\u003C/p>\u003Cp>Eventually, you will hit a deadline where even your Desktop + Laptop combo isn't enough. Maybe you need to render a 4K sequence with heavy volumetrics in 24 hours and your home farm estimates a completion time of 3 weeks. This is the ceiling of self-hosting.\u003C/p>\u003Cp>When you hit this wall, you don't need to buy five more computers. \u003Cstrong>That's when you transition to a service like Ranch Computing\u003C/strong> that allows you to access hundreds of CPU/GPU nodes instantly. Your home farm is a great daily driver that's perfect for tests, previews, and lighter projects, while a cloud render farm is invaluable for quickly rendering high-quality deliverables to your clients.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":282,"comment_id":283,"feature_image":284,"featured":105,"visibility":10,"created_at":285,"updated_at":286,"custom_excerpt":287,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":288,"primary_tag":289,"url":290,"excerpt":287,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":291},"80ad6c13-1312-46ac-a74b-94e022668680","695bb702c665470001df4dcd","https://images.unsplash.com/photo-1683322499436-f4383dd59f5a?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDd8fGRhdGElMjBjZW50ZXJ8ZW58MHx8fHwxNzY3NjE4NDAxfDA&ixlib=rb-4.1.0&q=80&w=2000","2026-01-05T14:05:06.000+01:00","2026-02-20T06:04:52.000+01:00","Learn how to build a self-hosted Blender render farm using Flamenco. This guide walks through setup, shared storage, workers, and scaling strategies to help artists render faster using the hardware they already own.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/self-hosted-blender-render-farm/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@scottrodgerson?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Scott Rodgerson\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/self-hosted-blender-render-farm","2026-01-19T10:00:41.000+01:00",{"title":277},"self-hosted-blender-render-farm","posts/self-hosted-blender-render-farm",[298,299],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},{"id":300,"name":22,"slug":29,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":26},"69c20ddbcb09d8000107cfe5","DCPLn1PWShGHKlv5NXuil2qtBDL7tnabWDmi33KjLoc",{"id":303,"title":304,"authors":305,"body":7,"description":7,"extension":8,"html":307,"meta":308,"navigation":14,"path":319,"published_at":320,"seo":321,"slug":322,"stem":323,"tags":324,"__hash__":326,"uuid":309,"comment_id":310,"feature_image":311,"featured":105,"visibility":10,"created_at":312,"updated_at":313,"custom_excerpt":314,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":315,"primary_tag":316,"url":317,"excerpt":314,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":318},"ghost/posts:kitsu-cli-single-binary.json","Building a Portable Kitsu CLI with Python and Gazu (2026)",[306],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🧰\u003C/div>\u003Cdiv class=\"kg-callout-text\">Turn fragile Python scripts into a single reliable tool that just runs.\u003C/div>\u003C/div>\u003Cp>It's late in production, the schedule is tight, and you need to roll out a critical pipeline tool on a new machine—something to sync shot statuses, publish playblasts, or \u003Ca href=\"https://blog.cg-wire.com/dcc-integration-blender-kitsu/\">automate a Kitsu workflow\u003C/a>. The tool itself isn't complicated. It's just Python. You already wrote it.\u003C/p>\u003Cp>\u003Cstrong>The problem is everything around it.\u003C/strong>\u003C/p>\u003Cp>The machine you're deploying to doesn't have Python installed. Or it has the wrong version. The studio's Linux server is locked down. A freelancer's Windows box can't compile dependencies. Someone asks whether they need \u003Ccode>pip\u003C/code>, a virtual environment, or the Gazu SDK. Suddenly, a \"simple script\" turns into documentation, troubleshooting, and lost time.\u003C/p>\u003Cp>Instead of building pipeline tools, you're managing environments.\u003C/p>\u003Cp>This is the part no one enjoys: installing Python, pinning versions, chasing missing libraries, and hoping nothing breaks when the OS updates. And when your tool needs to run on artist workstations, render nodes, or CI servers, that fragility becomes a real production risk.\u003C/p>\u003Cp>What you actually want is simple: one tool, one command, that just runs.\u003C/p>\u003Cp>\u003Cstrong>In this article, you'll learn how to package your Kitsu workflows by wrapping the Kitsu Python SDK (Gazu) into a Command Line Interface (CLI) and compiling it into a single binary executable.\u003C/strong> No Python installs. No dependency management. Just a reliable executable you can drop onto any machine and use immediately.\u003C/p>\u003Chr>\u003Ch2 id=\"why-you-need-a-cli\">Why You Need a CLI\u003C/h2>\u003Cp>GUIs are great for creative work, but \u003Cstrong>once you're dealing with pipeline management, a web UI can quickly become a burden\u003C/strong>. When you move the right Kitsu tasks into a CLI, you unlock a faster, more scalable, and more automation-friendly way of working.\u003C/p>\u003Cp>You finish animating five shots and need to update their status and upload previews. In a browser, that means context-switching: Alt-Tab, open Chrome, navigate to Kitsu, drill into the project, find the episode, click the shot, change the status, upload the movie. Then repeat the whole process for every shot. With a CLI, you stay exactly where you are. You type \u003Ccode>kitsu publish --status Review\u003C/code>, hit Enter, and move on. \u003Cstrong>You never leave the keyboard, you never break focus, and you don't pay the cognitive tax of clicking through menus.\u003C/strong>\u003C/p>\u003Cp>A CLI naturally pushes you toward thinking in arguments, lists, and automation, and that's where it starts to compound. \u003Cstrong>If you can update one shot, you can update ten or a hundred using the exact same command.\u003C/strong> You can loop over a sequence, pipe in shot names, or drive the operation directly from a DCC or render output. What would be an hour of repetitive clicking in a web UI becomes a few seconds of scripted work. And it's consistent, repeatable, and easy to version-control.\u003C/p>\u003Cp>Lastly, \u003Cstrong>not everything in a pipeline runs on a workstation with a monitor.\u003C/strong> Sometimes tasks need to happen on a render farm node, a build server, or a background process reacting to files on disk. In those environments, there is no browser and no user to click buttons. A CLI works anywhere you have a shell. You can automate publishes, status changes, validations, and sync operations, and Kitsu gets integrated deeper into the pipeline.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blog-tutorials/tree/main/kitsu-cli?ref=blog.cg-wire.com\">https://github.com/cgwire/blog-tutorials/tree/main/kitsu-cli\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-designing-the-cli-interface\">1. Designing the CLI Interface\u003C/h2>\u003Cp>Before we touch the Kitsu API, we need the skeleton of our tool. In Python, there are several ways to parse command-line arguments, but for a professional pipeline tool, I highly recommend using libraries like \u003Ccode>Click\u003C/code> or \u003Ccode>Typer\u003C/code>.\u003C/p>\u003Cp>For this walkthrough, let's conceptualize a tool called \u003Ccode>kitsu-cli\u003C/code>.\u003C/p>\u003Cp>\u003Cstrong>Think of your tool like a tree.\u003C/strong> The trunk is the main executable, and the branches are your commands and subcommands:\u003C/p>\u003Cpre>\u003Ccode class=\"language-text\">kitsu-cli (root)\n└── production (commands related to productions)\n    └── list (list all productions)\n\u003C/code>\u003C/pre>\u003Cp>Here is how you structure this logic in Python using \u003Ccode>Click\u003C/code>. This structure is crucial because it allows your tool to be extendable. Today you are managing productions; tomorrow you might be managing assets or playlists.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import click\n\n@click.group()\ndef cli():\n    \"\"\"My Studio Kitsu Tool\"\"\"\n    pass\n\n@cli.group()\ndef production():\n    \"\"\"Commands for managing productions\"\"\"\n    pass\n\n@production.command()\n@click.option('--name', help='Filter by name')\ndef list(name):\n    \"\"\"List productions\"\"\"\n    click.echo(f\"Listing productions: {name}\")\n\nif __name__ == '__main__':\n    cli()\n\u003C/code>\u003C/pre>\u003Cp>This snippet alone gives you a help menu for free. If the user types \u003Ccode>kit-cli --help\u003C/code>, they see the documentation. This is developer empathy, building tools that teach the user how to use them.\u003C/p>\u003Cp>\u003Cstrong>To run the CLI\u003C/strong>, you just use the same command as a regular Python program:\u003C/p>\u003Cpre>\u003Ccode class=\"language-py\">python3 cli.py production list\n\u003C/code>\u003C/pre>\u003Chr>\u003Ch2 id=\"2-adding-gazu-features\">2. Adding Gazu Features\u003C/h2>\u003Cp>Now that we have the skeleton, we need the muscle. Kitsu provides a fantastic Python client called \u003Cstrong>Gazu\u003C/strong>.\u003C/p>\u003Cp>If you haven't used Gazu before, it is the bridge between your script and your Kitsu server.\u003C/p>\u003Cp>The first hurdle in any pipeline tool is \u003Cstrong>authentication\u003C/strong>. You do not want your artists hard-coding their passwords into scripts. A robust CLI checks if a session already exists. If not, it prompts the user to log in once and saves the token locally. For the sake of simplicity, we'll just hardcode our authentication logic:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import gazu\n\ngazu.set_host(\"http://localhost/api\")\nuser = gazu.log_in(\"admin@example.com\", \"mysecretpassword\")\n\u003C/code>\u003C/pre>\u003Cp>Once authenticated, we can flesh out that \u003Ccode>list\u003C/code> command we wrote earlier. To list productions:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">@production.command()\n@click.option('--name', help='Filter by name')\ndef list(name):\n    \"\"\"List productions\"\"\"\n    click.echo(f\"Listing productions: {name}\")\n\u003C/code>\u003C/pre>\u003Cp>No need to open a browser, wait for the Vue app to load, and filter the view. \u003Cstrong>This script returns raw data instantly.\u003C/strong>\u003C/p>\u003Chr>\u003Ch2 id=\"3-interactive-interface\">3. Interactive Interface\u003C/h2>\u003Cp>While command flags (like \u003Ccode>--name test\u003C/code>) are great, \u003Cstrong>it would be a much better experience to pick productions from an interactive list\u003C/strong>.\u003C/p>\u003Cp>Instead of forcing the user to type the exact name of a sequence (which they will inevitably misspell), we can make our CLI smarter by adding prompts. If the user forgets to supply an argument, you just ask them for it.\u003C/p>\u003Cp>A library like \u003Ccode>questionary\u003C/code> is great for this because it adds self-documented, interactive selection lists to the terminal.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import questionary\n\n@production.command()\ndef select():\n    \"\"\"List available productions\"\"\"\n    productions = gazu.project.all_projects()\n\n    selected_project = questionary.select(\n        \"Which project are you working on?\", choices=productions\n    ).ask()\n\n    click.echo(f\"You selected {selected_project}. Loading assets...\")\n\n\u003C/code>\u003C/pre>\u003Cp>This tiny addition changes the user experience from \"scary hacker tool\" to \"helpful assistant.\" It reduces error rates to near zero because the user can only select valid options retrieved directly from Kitsu.\u003C/p>\u003Chr>\u003Ch2 id=\"4-the-single-executable-binary\">4. The Single Executable Binary\u003C/h2>\u003Cp>Last but not least, \u003Cstrong>we need to solve the \"It doesn't work on my laptop\" problem\u003C/strong>. We have a Python script with dependencies:\u003Ccode>gazu\u003C/code>, \u003Ccode>click\u003C/code>, \u003Ccode>questionary\u003C/code>, etc.\u003C/p>\u003Cp>To run this on a freelancer's machine, they would normally need to install Python, or maybe create a virtual environment, and \u003Ccode>pip install\u003C/code> the requirements. To eliminate all those steps, we can use \u003Ccode>PyInstaller\u003C/code>.\u003C/p>\u003Cpre>\u003Ccode class=\"language-sh\">python3 -m pip install pyinstaller\n\u003C/code>\u003C/pre>\u003Cp>PyInstaller analyzes your Python script, finds every library you imported, bundles the Python interpreter itself, and wraps it all into a single \u003Ccode>.exe\u003C/code> file (on Windows) or target binary (on Linux/Mac).\u003C/p>\u003Cp>Navigate to your script's folder in your terminal and run:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">python3 -m PyInstaller --onefile --name kitsu-cli cli.py\n\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>--onefile\u003C/code>: This flag tells PyInstaller to bundle everything into a single file, rather than a folder of loose dependencies.\u003C/li>\u003Cli>\u003Ccode>--name\u003C/code>: The name of your final binary file.\u003C/li>\u003C/ul>\u003Cp>After the process finishes, check the \u003Ccode>dist/\u003C/code> folder. You will find a file named \u003Ccode>kitsu-cli\u003C/code> (or \u003Ccode>kitsu-cli.exe\u003C/code>).\u003C/p>\u003Cp>You can now take this file, put it on a USB drive, email it, or put it on a network drive. An artist can drag it to their desktop and run it as long as it's compiled on the same OS architecture (macOS, Windows, etc.). They do not need Python installed. They do not need to install Gazu manually. It just works:\u003C/p>\u003Cpre>\u003Ccode class=\"language-sh\">./kitsu-cli production list\n\u003C/code>\u003C/pre>\u003Cp>But don't take my word for it, try it out yourself by \u003Ca>cloning our Github repository\u003C/a>.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-f4c09502-e96e-4692-8fc7-d4dd59d6482c.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1319\" height=\"913\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-f4c09502-e96e-4692-8fc7-d4dd59d6482c.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-f4c09502-e96e-4692-8fc7-d4dd59d6482c.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-f4c09502-e96e-4692-8fc7-d4dd59d6482c.png 1319w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>If you need to cross-compile your CLI to different OS targets, you can use Github Actions.\u003C/p>\u003Chr>\u003Ch2 id=\"cli-example-the-render-fetcher\">CLI Example: The \"Render Fetcher\"\u003C/h2>\u003Cp>Let's switch to a more pipeline-centric scenario.\u003C/p>\u003Cp>Picture a workflow where you're \u003Ca href=\"https://blog.cg-wire.com/getting-started-with-blender-rendering/\">managing distributed rendering\u003C/a> across multiple machines. Each render node needs to regularly pull new work from Kitsu: shots marked \u003Cem>TODO\u003C/em> for rendering, along with their corresponding preview \u003Ccode>.blend\u003C/code> files. These machines are headless, locked down, and deliberately minimal—no Python installs, no virtual environments, no dependency juggling.\u003C/p>\u003Cp>What you want is a single executable you can drop onto any server and run as a cron job or service:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">./kitsu-cli pull MechaFight /home/user/flamenco/jobs\n\u003C/code>\u003C/pre>\u003Cp>The corresponding code would look like this:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import os\n\nimport click\nimport gazu\nimport questionary\n\ngazu.set_host(\"http://localhost/api\")\nuser = gazu.log_in(\"admin@example.com\", \"mysecretpassword\")\n\n\n@click.group()\ndef cli():\n    \"\"\"My Studio Kitsu Tool\"\"\"\n    pass\n\n\n@cli.command()\n@click.argument(\"project_name\", required=True)\n@click.argument(\"output_path\", required=True)\ndef pull(project_name, output_path):\n    click.echo(f\"Fetching TODO render tasks for project: {project_name}\")\n\n    project = gazu.project.get_project_by_name(project_name)\n\n    tasks = gazu.task.all_tasks_for_project(project)\n\n    rendering = gazu.task.get_task_type_by_name(\"Rendering\")\n    todo = gazu.task.get_task_status_by_name(\"todo\")\n\n    render_tasks = [\n        t\n        for t in tasks\n        if t[\"task_type_id\"] == rendering[\"id\"] and t[\"task_status_id\"] == todo[\"id\"]\n    ]\n\n    for task in render_tasks:\n        files = gazu.files.get_all_preview_files_for_task(task)\n        size = len(files)\n\n        if size &gt; 0:\n            latest = files[size - 1]\n            if latest[\"extension\"] == \"blend\":\n                target_path = os.path.join(\n                    output_path, latest[\"name\"] + \".\" + latest[\"extension\"]\n                )\n                gazu.files.download_preview_file(latest, target_path)\n\n\nif __name__ == \"__main__\":\n    cli()\n\u003C/code>\u003C/pre>\u003Col>\u003Cli>\u003Cstrong>Query Kitsu\u003C/strong> - The CLI connects to Kitsu (via Gazu) and retrieves all rendering tasks with a \u003Cem>TODO\u003C/em> status for a given project.\u003C/li>\u003Cli>\u003Cstrong>Filter tasks\u003C/strong> - It filters tasks that are marked \u003Ccode>todo\u003C/code> and have an associated preview file (in this case, a \u003Ccode>.blend\u003C/code> file).\u003C/li>\u003Cli>\u003Cstrong>Download assets\u003C/strong> - For each task, the CLI downloads the corresponding preview \u003Ccode>.blend\u003C/code> file to the specified output path on disk.\u003C/li>\u003Cli>\u003Cstrong>Render\u003C/strong> - Once downloaded, the files are ready for Blender to pick up, manually or via an automated render orchestrator like Flamenco.\u003C/li>\u003C/ol>\u003Cp>When this CLI is compiled into a single binary, it becomes trivial to deploy. You can drop it onto Linux render nodes and run it from cron or systemd without installing Python or dependencies. Every server pulls work the same way. Folder structures are consistent. Task state comes straight from Kitsu. And your render farm stays focused on rendering.\u003C/p>\u003Cp>Again, check it out in \u003Ca>the corresponding Github repository\u003C/a>.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">Conclusion\u003C/h2>\u003Cp>\u003Cstrong>Creating your own Kitsu CLI doesn't have to be complex.\u003C/strong> By wrapping the Gazu library in a user-friendly CLI and freezing it with PyInstaller, you scale your pipeline. You remove the technical friction of environment management and let your artists focus on what they do best: creating beautiful animations.\u003C/p>\u003Cp>Learn more about combining Kitsu and Blender scripting by \u003Ca href=\"https://blog.cg-wire.com/\">subscribing to our blog\u003C/a>!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":309,"comment_id":310,"feature_image":311,"featured":105,"visibility":10,"created_at":312,"updated_at":313,"custom_excerpt":314,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":315,"primary_tag":316,"url":317,"excerpt":314,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":318},"8ece75b9-d27d-4edb-b152-e03c93326889","695b8678c665470001df4da3","https://images.unsplash.com/photo-1484417894907-623942c8ee29?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDIxfHxzb2Z0d2FyZSUyMGRldmVsb3BtZW50fGVufDB8fHx8MTc2NzYwNzcwNHww&ixlib=rb-4.1.0&q=80&w=2000","2026-01-05T10:38:00.000+01:00","2026-02-20T06:04:43.000+01:00","Learn how to package Kitsu workflows into a standalone command-line tool using Python, Gazu, and PyInstaller. This guide covers CLI design, interactive prompts, and compiling a single executable for reliable deployment across studios and render farms.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/kitsu-cli-single-binary/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@emilep?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Emile Perron\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/kitsu-cli-single-binary","2026-01-12T10:00:37.000+01:00",{"title":304},"kitsu-cli-single-binary","posts/kitsu-cli-single-binary",[325],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"tzpUcwj2c_RrpCjFHR0EJ7oQimUwcACObhVr3BaAavI",{"id":328,"title":329,"authors":330,"body":7,"description":7,"extension":8,"html":332,"meta":333,"navigation":14,"path":344,"published_at":345,"seo":346,"slug":347,"stem":348,"tags":349,"__hash__":352,"uuid":334,"comment_id":335,"feature_image":336,"featured":105,"visibility":10,"created_at":337,"updated_at":338,"custom_excerpt":339,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":340,"primary_tag":341,"url":342,"excerpt":339,"reading_time":240,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":343},"ghost/posts:blender-shaders-explained.json","Working with Blender Shaders (2026): Nodes & Scripting",[331],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🎨\u003C/div>\u003Cdiv class=\"kg-callout-text\">Shaders are not magic, they’re visual recipes you can control and automate.\u003C/div>\u003C/div>\u003Cp>It's easy to panic the first time you hear the word \u003Cem>shader\u003C/em>. Someone mentions GLSL, GPUs start sweating, and suddenly you're imagining walls of unreadable code and your computer fan screaming for mercy.\u003C/p>\u003Cp>oHere's the part no one tells you early enough: you don't need to be a mathematician or a graphics programmer to work with shaders. You're not required to write low-level GPU code or understand every equation behind light physics. Blender doesn't expect that from you. Instead, it gives you nodes: visual building blocks that behave more like Lego than code. You plug things together, see the result instantly, and adjust until it feels right.\u003C/p>\u003Cp>Think of shaders less as code and more as recipes. You're mixing values, textures, and logic to describe how a surface should react to light. Sometimes you'll follow a known recipe, sometimes you'll improvise, and sometimes you'll break things just to see what happens. It's how you'll learn.\u003C/p>\u003Cp>\u003Cstrong>In this article, we're going to demystify what shading actually is, strip away the fear around it, and explore how to manipulate shaders procedurally using Blender's node system or a bit of scripting for an animation pipeline.\u003C/strong> By the end, shading won't feel like a forbidden room anymore.\u003C/p>\u003Chr>\u003Ch2 id=\"whats-a-shader\">\u003Cstrong>What's a Shader?\u003C/strong>\u003C/h2>\u003Cp>To understand shaders, we have to stop thinking about \"colors\" and start thinking about \"physics.\"\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/hard-surface-modeling/\">\u003Cu>If you paint a wooden chair red in the real world\u003C/u>\u003C/a>, you aren't just changing its color. You are adding a layer of material that interacts with light. That red paint has a specific roughness (how much it scatters light), a specific specularity (how shiny it is), and a specific refractive index.\u003C/p>\u003Cp>\u003Cstrong>A shader is a set of instructions that tells the computer how to simulate that light interaction.\u003C/strong>\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-296bf085-924e-40f9-92fc-346c5dc31de0.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"1067\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-296bf085-924e-40f9-92fc-346c5dc31de0.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-296bf085-924e-40f9-92fc-346c5dc31de0.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-296bf085-924e-40f9-92fc-346c5dc31de0.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: TurboSquid\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>When a ray of light from your digital sun hits the surface of your object, the shader steps in and asks:\u003C/p>\u003Cul>\u003Cli>\"Are you bouncing off?\" (Reflection)\u003C/li>\u003Cli>\"Are you going through?\" (Transmission/Glass)\u003C/li>\u003Cli>\"Are you getting trapped inside?\" (Absorption)\u003C/li>\u003Cli>\"Are you scattering under the skin?\" (Subsurface Scattering)\u003C/li>\u003C/ul>\u003Cp>If you're modeling a wet cobblestone street, a simple image texture makes it look like a flat photo of a street. A shader tells the renderer that the water in the cracks is perfectly reflective and smooth, while the stone is rough and dull. It tells the light to bounce differently off the wet parts than the dry parts.\u003Ca href=\"https://blog.cg-wire.com/how-light-shapes-emotion-in-animation/\"> \u003Cu>Light shapes reality.\u003C/u>\u003C/a>\u003C/p>\u003Chr>\u003Ch2 id=\"why-you-must-master-shader-nodes\">\u003Cstrong>Why You Must Master Shader Nodes\u003C/strong>\u003C/h2>\u003Cp>You might ask, \"Why not just download textures?\"\u003C/p>\u003Cp>Photo-scanning is great, but procedural shading gives you three superpowers that static images cannot match.\u003C/p>\u003Cp>When you use an image texture (a JPG or PNG), you are limited by pixels. Zoom in too close to a wall, and it becomes blurry.\u003C/p>\u003Cp>Shaders use math. \u003Cstrong>Math has no resolution limit.\u003C/strong> You can zoom into a procedural scratch on metal until you see the microscopic grooves, and it will remain crisp. Even if you have a model you're proud of, with clean topology and nice proportions, it'll still look flat without shaders.\u003C/p>\u003Cp>Blender's shader nodes make it \u003Cstrong>easy to tweak your textures in a consistent way\u003C/strong>. Let's say you are texturing a spaceship: you paint rust onto the hull using a texture map. Your Art Director walks in and says, \"Great, but the ship looks too old. Reduce the rust by 50%.\" If you hand-painted that, you have to start over or spend hours erasing. With shader nodes, you simply locate the \"Rust Amount\" value you created and slide it from \u003Ccode>1.0\u003C/code> to \u003Ccode>0.5\u003C/code>. Done.\u003C/p>\u003Cp>Static textures look frozen, but \u003Cstrong>shaders can also be animated\u003C/strong>. You can build a shader setup where moss grows on a rock over time based on the frame number, or where a shield glows brighter as it gets hit. Shaders allow your materials to react to the environment.\u003C/p>\u003Cp>For all these reasons, learning to master shader nodes is an incredible unlock for professional artists working with tight deadlines.\u003C/p>\u003Chr>\u003Ch2 id=\"the-different-types-of-shader-nodes\">\u003Cstrong>The Different Types of Shader Nodes\u003C/strong>\u003C/h2>\u003Cp>Blender's node system works like a flow chart. You click \u003Ccode>Add\u003C/code> to add nodes and connect them together. Data flows from left to right. To understand how to leverage each feature, you need to understand the different node types available.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-2573386d-adc9-4979-a848-89d1cae3645e.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"900\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-2573386d-adc9-4979-a848-89d1cae3645e.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-2573386d-adc9-4979-a848-89d1cae3645e.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-2573386d-adc9-4979-a848-89d1cae3645e.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Ch3 id=\"1-input-nodes\">\u003Cstrong>1. Input Nodes\u003C/strong>\u003C/h3>\u003Cp>Input nodes provide data from the scene, object, geometry, or user-defined values into the shader network.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Texture Coordinate\u003C/strong> - Provides UV, object, generated, and camera coordinates + use UV output to correctly map an image texture onto a UV-unwrapped model\u003C/li>\u003Cli>\u003Cstrong>Geometry\u003C/strong> - Outputs geometric information such as normals and pointiness + use Pointiness to create dirt accumulation in crevices\u003C/li>\u003Cli>\u003Cstrong>Fresnel\u003C/strong> - Calculates view-angle-based reflectivity + use it to create stronger reflections on the edges of glass\u003C/li>\u003Cli>\u003Cstrong>Object Info\u003C/strong> - Supplies per-object data like random values or object color + use Random output to give each object a slightly different color\u003C/li>\u003Cli>\u003Cstrong>Value\u003C/strong> - Outputs a constant numerical value + use it to control roughness with a single slider\u003C/li>\u003Cli>\u003Cstrong>Color\u003C/strong> - Outputs a constant color value + use it as a base color for a stylized material\u003C/li>\u003C/ul>\u003Ch3 id=\"2-output-nodes\">\u003Cstrong>2. Output Nodes\u003C/strong>\u003C/h3>\u003Cp>Output nodes define the final result of a shader and connect the node network to Blender’s rendering system.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Material Output\u003C/strong> - Outputs the final surface, volume, and displacement data + connect a Principled BSDF to the Surface input\u003C/li>\u003C/ul>\u003Ch3 id=\"3-shader-nodes\">\u003Cstrong>3. Shader Nodes\u003C/strong>\u003C/h3>\u003Cp>Shader nodes define how light interacts with a surface, including reflection, refraction, and emission.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Principled BSDF\u003C/strong> - Physically based all-in-one surface shader + create realistic metal, plastic, or skin materials\u003C/li>\u003Cli>\u003Cstrong>Diffuse BSDF\u003C/strong> - Produces matte, non-reflective surfaces + use for chalk, clay, or unpolished stone\u003C/li>\u003Cli>\u003Cstrong>Glossy BSDF\u003C/strong> - Produces mirror-like reflections + use for polished metal or mirrors\u003C/li>\u003Cli>\u003Cstrong>Glass BSDF\u003C/strong> - Combines refraction and reflection + use for windows or glass bottles\u003C/li>\u003Cli>\u003Cstrong>Emission\u003C/strong> - Emits light from a surface + use for screens, LEDs, or neon signs\u003C/li>\u003Cli>\u003Cstrong>Mix Shader\u003C/strong> - Blends two shader outputs + mix diffuse and glossy shaders for worn metal\u003C/li>\u003C/ul>\u003Ch3 id=\"4-displacement-nodes\">\u003Cstrong>4. Displacement Nodes\u003C/strong>\u003C/h3>\u003Cp>Displacement nodes alter surface detail by modifying geometry or shading normals.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Displacement\u003C/strong> - Performs true geometric displacement + create real depth in a brick wall using a height map (Cycles)\u003C/li>\u003Cli>\u003Cstrong>Bump\u003C/strong> - Simulates surface detail using normal perturbation + add fine scratches without increasing geometry\u003C/li>\u003Cli>\u003Cstrong>Normal Map\u003C/strong> - Converts normal textures into usable normal data + apply a baked normal map from a game asset\u003C/li>\u003C/ul>\u003Ch3 id=\"5-color-nodes\">\u003Cstrong>5. Color Nodes\u003C/strong>\u003C/h3>\u003Cp>Color nodes adjust, blend, and transform color information within the shader network.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Mix Color\u003C/strong> - Blends two colors or textures + mix a dirt texture over a clean base color\u003C/li>\u003Cli>\u003Cstrong>RGB Curves\u003C/strong> - Adjusts contrast and color balance + increase texture contrast without re-editing the image\u003C/li>\u003Cli>\u003Cstrong>Hue/Saturation\u003C/strong> - Modifies hue, saturation, and value + tint a material blue without repainting textures\u003C/li>\u003Cli>\u003Cstrong>Invert\u003C/strong> - Reverses color values + invert a roughness map to create a glossiness map\u003C/li>\u003C/ul>\u003Ch3 id=\"6-texture-nodes\">\u003Cstrong>6. Texture Nodes\u003C/strong>\u003C/h3>\u003Cp>Texture nodes generate or load image and procedural textures for materials.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Image Texture\u003C/strong> - Loads external image files + use an albedo map for a PBR material\u003C/li>\u003Cli>\u003Cstrong>Noise Texture\u003C/strong> - Generates smooth procedural noise + add subtle roughness variation to plastic\u003C/li>\u003Cli>\u003Cstrong>Voronoi Texture\u003C/strong> - Produces cell-based patterns + create cracks, scales, or stone tiles\u003C/li>\u003Cli>\u003Cstrong>Gradient Texture\u003C/strong> - Outputs smooth gradients + use as a mask for blending materials\u003C/li>\u003C/ul>\u003Ch3 id=\"7-utility-nodes\">\u003Cstrong>7. Utility Nodes\u003C/strong>\u003C/h3>\u003Cp>Utility nodes perform mathematical operations and data conversions.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Mapping\u003C/strong> - Transforms texture coordinates + scale and rotate a texture pattern\u003C/li>\u003Cli>\u003Cstrong>Math\u003C/strong> - Performs numerical operations + clamp roughness values to prevent extremes\u003C/li>\u003Cli>\u003Cstrong>Vector Math\u003C/strong> - Performs vector-based calculations + modify normal or direction vectors\u003C/li>\u003Cli>\u003Cstrong>Clamp\u003C/strong> - Limits values to a specified range + prevent over-bright emission values\u003C/li>\u003C/ul>\u003Ch3 id=\"8-group-nodes\">\u003Cstrong>8. Group Nodes\u003C/strong>\u003C/h3>\u003Cp>Group nodes package multiple nodes into reusable, organized components.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Node Group\u003C/strong> - Encapsulates complex node setups + create a reusable “Rust Shader” used across multiple assets\u003C/li>\u003C/ul>\u003Ch3 id=\"9-layout-nodes\">\u003Cstrong>9. Layout Nodes\u003C/strong>\u003C/h3>\u003Cp>Layout nodes organize the node graph visually and do not affect rendering output.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Frame\u003C/strong> - Visually groups related nodes + frame all texture-related nodes together\u003C/li>\u003Cli>\u003Cstrong>Reroute\u003C/strong> - Redirects node connections for clarity + clean up overlapping noodle connections\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"the-next-level-scripting-your-shaders\">\u003Cstrong>The Next Level: Scripting Your Shaders\u003C/strong>\u003C/h2>\u003Cp>When you get comfortable connecting nodes manually, you can make wood, plastic, gold, or any kind of material. But \u003Cstrong>what if you have a scene with 500 unique objects, and you need to generate a random variation\u003C/strong> of a worn metal material for each one with some tweaks?\u003C/p>\u003Cp>This is where Python scripting becomes key. You can use it to ensure every material in your project follows the same node structure. You can write a script that says, \"Make this material red, but vary the hue slightly by a random number for every object.\"\u003C/p>\u003Cp>Let's get our hands dirty. We are going to write a Python script that creates a new material, adds a Principled BSDF, generates a noise texture to control the color, and links it all up.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blog-tutorials/tree/main/blender-shaders?ref=blog.cg-wire.com\">https://github.com/cgwire/blog-tutorials/tree/main/blender-shaders\u003C/a>\u003C/div>\u003C/div>\u003Cp>Open the \u003Cem>Scripting\u003C/em> tab in Blender, create a new text block, and follow along.\u003C/p>\u003Cp>First, we need to import the library and tell Blender we want to create a new material.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import random\n\nimport bpy\n\ndef create_procedural_material(mat_name):\n&nbsp;&nbsp;&nbsp;&nbsp;mat = bpy.data.materials.new(name=mat_name)\n\n&nbsp;&nbsp;&nbsp;&nbsp;mat.use_nodes = True\n&nbsp;&nbsp;&nbsp;&nbsp;nodes = mat.node_tree.nodes\n&nbsp;&nbsp;&nbsp;&nbsp;links = mat.node_tree.links\n\n&nbsp;&nbsp;&nbsp;&nbsp;nodes.clear()\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Now, let's add the nodes. Think of this as pulling items out of the \"Add\" menu programmatically:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">    node_output = nodes.new(type='ShaderNodeOutputMaterial')\n&nbsp;&nbsp;&nbsp;&nbsp;node_output.location = (400, 0)\n\n&nbsp;&nbsp;&nbsp;&nbsp;node_principled = nodes.new(type='ShaderNodeBsdfPrincipled')\n&nbsp;&nbsp;&nbsp;&nbsp;node_principled.location = (0, 0)\n\n&nbsp;&nbsp;&nbsp;&nbsp;node_principled.inputs['Roughness'].default_value = 0.2\n&nbsp;&nbsp;&nbsp;&nbsp;node_principled.inputs['Metallic'].default_value = 1.0\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Now, let's make it interesting. We will add a Noise Texture and a ColorRamp to generate a random color pattern.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">node_noise = nodes.new(type='ShaderNodeTexNoise')\n&nbsp;&nbsp;&nbsp;&nbsp;node_noise.location = (-600, 0)\n&nbsp;&nbsp;&nbsp;&nbsp;node_noise.inputs['Scale'].default_value = 15.0\n&nbsp;&nbsp;&nbsp;&nbsp;node_noise.inputs['Detail'].default_value = 10.0\n\n&nbsp;&nbsp;&nbsp;&nbsp;node_ramp = nodes.new(type='ShaderNodeValToRGB')\n&nbsp;&nbsp;&nbsp;&nbsp;node_ramp.location = (-300, 0)\n\n&nbsp;&nbsp;&nbsp;&nbsp;node_ramp.color_ramp.elements[0].color = (0.1, 0.1, 0.1, 1)\n\n&nbsp;&nbsp;&nbsp;&nbsp;rand_r = random.random()\n&nbsp;&nbsp;&nbsp;&nbsp;rand_g = random.random()\n&nbsp;&nbsp;&nbsp;&nbsp;rand_b = random.random()\n&nbsp;&nbsp;&nbsp;&nbsp;node_ramp.color_ramp.elements[1].color = (rand_r, rand_g, rand_b, 1)\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>\u003C/p>\u003Cp>Finally, we have to wire them together and apply this new shader to the current context (the default cube):\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">    links.new(node_noise.outputs['Fac'], node_ramp.inputs['Fac'])\n\n&nbsp;&nbsp;&nbsp;&nbsp;links.new(node_ramp.outputs['Color'], node_principled.inputs['Base Color'])\n\n&nbsp;&nbsp;&nbsp;&nbsp;links.new(node_principled.outputs['BSDF'], node_output.inputs['Surface'])\n\n&nbsp;&nbsp;&nbsp;&nbsp;return mat\n\nmy_new_mat = create_procedural_material(\"SciFi_Metal_Random\")\n\nbpy.context.object.data.materials.append(my_new_mat)\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Copy that code into your text editor and press \"Run Script\" (the Play button). Look at your active object. It is now a metallic surface with a noise pattern of a random color. Run it again (change the name in the function call), and you get a different color.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-99dc12fe-068b-40f7-9f10-ef0c5e000ba0.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1268\" height=\"827\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-99dc12fe-068b-40f7-9f10-ef0c5e000ba0.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-99dc12fe-068b-40f7-9f10-ef0c5e000ba0.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-99dc12fe-068b-40f7-9f10-ef0c5e000ba0.png 1268w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Congratulations, \u003Cstrong>you just created a procedural material generator!\u003C/strong>\u003C/p>\u003Cp>Have a look at\u003Ca href=\"https://github.com/cgwire/blog-tutorials/tree/main/blender-shaders?ref=blog.cg-wire.com\" rel=\"noreferrer\"> \u003Cu>our corresponding Github repository\u003C/u>\u003C/a> to play with the code!\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Shaders are more than just coloring within the lines. They are the skin of your digital world. \u003Cstrong>They tell the story of the object\u003C/strong>: how old it is, where it has been, and what it is made of.\u003C/p>\u003Cp>By understanding the logic of shader nodes, \u003Cstrong>you can create anything from photorealistic skin to stylized cartoon fire\u003C/strong>. And by taking that leap into Python scripting, you unlock the ability to \u003Cstrong>work faster and smarter\u003C/strong>, automating the tedious parts of the job so you can focus on the art.\u003C/p>\u003Cp>But this is just one piece of the puzzle. You can change the surface, but what about the shape? The next logical step in your journey is \u003Cem>Geometry Nodes\u003C/em>. Just as Shader Nodes control the color and light procedurally, Geometry Nodes control the mesh and structure programmatically.\u003Ca href=\"https://blog.cg-wire.com/blender-scripting-geometry-nodes-2/\"> \u003Cu>Have a look at our dedicated article\u003C/u>\u003C/a> to create entire scenes from code!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":334,"comment_id":335,"feature_image":336,"featured":105,"visibility":10,"created_at":337,"updated_at":338,"custom_excerpt":339,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":340,"primary_tag":341,"url":342,"excerpt":339,"reading_time":240,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":343},"67a0028f-66b2-4116-ac34-040c8a14d052","695b7d1dc665470001df4d80","https://images.unsplash.com/photo-1664526936810-ec0856d31b92?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDd8fHNoYWRlciUyMG5vZGVzfGVufDB8fHx8MTc2NzYwMzU4M3ww&ixlib=rb-4.1.0&q=80&w=2000","2026-01-05T09:58:05.000+01:00","2026-03-26T09:56:11.000+01:00","Learn how Blender shaders really work, from node-based materials to procedural shading and Python-driven automation. This guide breaks down shader concepts, node types, and scripting techniques to help artists build flexible, production-ready materials.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/blender-shaders-explained/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@guerrillabuzz?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">GuerrillaBuzz\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/blender-shaders-explained","2026-01-05T10:35:18.000+01:00",{"title":329},"blender-shaders-explained","posts/blender-shaders-explained",[350,351],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},{"id":300,"name":22,"slug":29,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":26},"L9nHGKFoNkSSxbDZv_Z2mmZLxxHmhill232zPkpfpCE",{"id":354,"title":355,"authors":356,"body":7,"description":7,"extension":8,"html":358,"meta":359,"navigation":14,"path":370,"published_at":371,"seo":372,"slug":373,"stem":374,"tags":375,"__hash__":378,"uuid":360,"comment_id":361,"feature_image":362,"featured":105,"visibility":10,"created_at":363,"updated_at":364,"custom_excerpt":365,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":366,"primary_tag":367,"url":368,"excerpt":365,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":369},"ghost/posts:blender-programmatic-rendering.json","Programmatic Video Rendering in Blender Using Python (2026)",[357],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🧠\u003C/div>\u003Cdiv class=\"kg-callout-text\">Turn Blender into a programmable rendering engine with just a few lines of Python.\u003C/div>\u003C/div>\u003Cp>Learning Blender as a 3D artist usually means learning about its addon ecosystem. Tasks that would take hours like rigging a character can be turned into seconds with addons like Rigify. The same goes for most workflows, and we often end up asking ourselves the same recurring question: \"Can Blender do this automatically?\"\u003C/p>\u003Cp>The answer is yes. The key is the programming language Python.\u003C/p>\u003Cp>Blender includes a powerful built-in scripting engine, and with just a few lines of code, you can create objects, position cameras, and even trigger full renders.\u003C/p>\u003Cp>You won't need to pay for an addon if you know how to build one yourself. And at its core, an addon is just a script wrapped in a custom Blender user interface.\u003C/p>\u003Cp>If you've never scripted in Blender before, discovering the \u003Ccode>bpy\u003C/code> module feels like opening a secret door inside a tool you thought you already knew: suddenly, every part of the interface becomes programmable. You're not just clicking buttons anymore but giving instructions to build repeatable systems.\u003C/p>\u003Cp>One of the most important workflows you can automate is rendering. Not only to make your pipeline faster but also to help keep rendering settings consistent and predictable. In this tutorial, we'll implement a basic programmatic rendering system to automatically animate a 3D text and turn it into a full HD video. We'll start from zero, exploring how to run Python for Blender and how to use it to control the scene. By the end, you'll have a good overview of how to automate common animation tasks.\u003C/p>\u003Chr>\u003Ch2 id=\"use-cases\">\u003Cstrong>Use Cases\u003C/strong>\u003C/h2>\u003Cp>Programmatic rendering unlocks a wide range of powerful workflows that go far beyond traditional manual scene building:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Data-driven motion graphics\u003C/strong> — Animated charts, realtime API-driven broadcast graphics, or automatically generated social videos.\u003C/li>\u003Cli>\u003Cstrong>Generative art\u003C/strong> — Procedural patterns, noise fields, particle experiments, and algorithmic illustrations that evolve from code.\u003C/li>\u003Cli>\u003Cstrong>Batch-rendered variants\u003C/strong> — Personalized ads, product color variations, automated aspect-ratio crops, and bulk social asset generation.\u003C/li>\u003Cli>\u003Cstrong>Procedural 3D content\u003C/strong> — Terrain builders, parametric modeling, foliage/world population, and automated 3D asset variations.\u003C/li>\u003Cli>\u003Cstrong>Generative UI &amp; design systems\u003C/strong> — Dynamic SVGs, templated banners, and brand-consistent graphics rendered on demand.\u003C/li>\u003Cli>\u003Cstrong>VFX and animation scripting\u003C/strong> — Automated rig controls, crowd systems, particle population, and repeatable simulation setups.\u003C/li>\u003Cli>\u003Cstrong>Simulation visualizations\u003C/strong> — Fluid and smoke simulations, traffic and crowd dynamics, and scientific or physics-based renders.\u003C/li>\u003C/ul>\u003Cp>Many 3D modeling tasks are repetitive and time-consuming. By integrating them into an automated, script-driven pipeline, artists can focus more on creative worldbuilding while Python handles the tedious parts in the background.\u003C/p>\u003Cp>In any case, the development workflow is pretty much the same:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Setup\u003C/strong> - define needed input data and scene cleanup\u003C/li>\u003Cli>\u003Cstrong>Geometry generation\u003C/strong> - modeling the actual assets needed for the task\u003C/li>\u003Cli>\u003Cstrong>Animation\u003C/strong> - defining the transforms and their associated keyframes\u003C/li>\u003Cli>\u003Cstrong>Output\u003C/strong> - the desired assets (3D models, video, image sequence, etc.)\u003C/li>\u003C/ol>\u003Cp>This is exactly the path we're going to take for our 3D text video rendering example.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blender-programmatic-rendering?ref=blog.cg-wire.com\">https://github.com/cgwire/blender-programmatic-rendering\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-scene-setup\">\u003Cstrong>1. Scene Setup\u003C/strong>\u003C/h2>\u003Cp>Before we dive into generating scenes, we first need a clean starting point. When you open Blender, it loads a default scene usually containing a cube, a camera, and a light. For this tutorial, we'll only need the latter two.\u003C/p>\u003Cp>The first step in using Blender programmatically is importing the \u003Ccode>bpy\u003C/code> module. This gives you full access to Blender's data, tools, and rendering pipeline directly from Python:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\nbpy.data.objects.remove(bpy.data.objects.get(\"Cube\"), do_unlink=True)\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Here, we remove the default \u003Cstrong>Cube\u003C/strong> object. The \u003Ccode>do_unlink=True\u003C/code> parameter makes sure Blender not only deletes the object but also unlinks it from any scene that might reference it.\u003C/p>\u003Chr>\u003Ch2 id=\"2-manipulating-3d-text\">\u003Cstrong>2. Manipulating 3D Text\u003C/strong>\u003C/h2>\u003Cp>Next, we add a 3D text object to the scene to serve as the core element we'll manipulate and eventually render programmatically.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.ops.object.text_add(location=(0, 0, 0))\ntext_obj = bpy.context.object\ntext_obj.name = \"CaptionText\"\ntext_obj.data.body = \"Hello world!\"\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>This code snippet creates a new text object at the world origin, assigns it a readable name, and sets its displayed text to \u003Ccode>\"Hello world!\"\u003C/code>.\u003C/p>\u003Cp>To give the text more presence in the scene, we can adjust its geometry. Increasing the size and adding extrusion make the text fully 3D, and centering it on both axes simplifies future transformations and animations:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">text_obj.data.size = 0.6\ntext_obj.data.extrude = 0.05\ntext_obj.data.align_x = \"CENTER\"\ntext_obj.data.align_y = \"CENTER\"\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>With these adjustments, the text is cleanly centered, properly scaled, and ready for further processing.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-8cb519b5-e128-4bdd-9348-9aa0dfe2c36c.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"901\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-8cb519b5-e128-4bdd-9348-9aa0dfe2c36c.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-8cb519b5-e128-4bdd-9348-9aa0dfe2c36c.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-8cb519b5-e128-4bdd-9348-9aa0dfe2c36c.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"3-adding-keyframes\">\u003Cstrong>3. Adding Keyframes\u003C/strong>\u003C/h2>\u003Cp>We\u003Ca href=\"https://blog.cg-wire.com/stepped-animation/\"> \u003Cu>create a simple animation by inserting keyframes\u003C/u>\u003C/a> for the text position over time.\u003C/p>\u003Cp>First, we move our timeline cursor to frame 1, position the text at the starting location, and record that position with a keyframe:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.context.scene.frame_set(1)\ntext_obj.location = (-4.0, 0.0, 1.0)\ntext_obj.keyframe_insert(data_path=\"location\", frame=1)\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Next, we advance to frame 40, shift the text along the X axis, and insert another keyframe to mark its new position:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.context.scene.frame_set(40)\ntext_obj.location = (0.0, 0.0, 1.0)\ntext_obj.keyframe_insert(data_path=\"location\", frame=40)\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>With these two keyframes in place, Blender automatically interpolates the movement between them, creating a smooth animation as the text glides into the center of the frame.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-c33d7b37-264c-4c9f-a1ea-e8f2e2a39ff2.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"901\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-c33d7b37-264c-4c9f-a1ea-e8f2e2a39ff2.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-c33d7b37-264c-4c9f-a1ea-e8f2e2a39ff2.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-c33d7b37-264c-4c9f-a1ea-e8f2e2a39ff2.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"4-video-rendering\">\u003Cstrong>4. Video Rendering\u003C/strong>\u003C/h2>\u003Cp>All we have left to do is\u003Ca href=\"https://blog.cg-wire.com/getting-started-with-blender-rendering/\"> \u003Cu>configure Blender's rendering settings\u003C/u>\u003C/a> and output the final video.\u003C/p>\u003Cp>The first choice is which rendering engine to use: \u003Cstrong>Eevee\u003C/strong> or \u003Cstrong>Cycles\u003C/strong>.\u003C/p>\u003Cp>Eevee is a real-time rasterization engine, making it extremely fast and ideal for previews or stylized animation. Cycles, on the other hand, is a physically based path tracer that produces more realistic lighting but requires much longer render times. For quick iteration and most automated workflows, Eevee is generally the better option:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.context.scene.render.engine = \"BLENDER_EEVEE\"\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Next, we specify the output resolution:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.context.scene.render.resolution_x = 1920\nbpy.context.scene.render.resolution_y = 1080\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Then we set the frame rate and define the animation range. Here, a 60-frame shot at 24 fps:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.context.scene.render.fps = 24\nbpy.context.scene.frame_start = 1\nbpy.context.scene.frame_end = 60\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Blender also needs to know how to encode the final video. We'll export it as an MP4 using H.264 video encoding for rendering speed:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.context.scene.render.image_settings.file_format = \"FFMPEG\"\nbpy.context.scene.render.ffmpeg.format = \"MPEG4\"\nbpy.context.scene.render.ffmpeg.codec = \"H264\"\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Finally, we choose where the output file will be written using the current folder for convenience:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.context.scene.render.filepath = \"//render.mp4\"\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>With everything configured, we can start the render process with a single command:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.ops.render.render(animation=True)\u003C/code>\u003C/pre>\u003Chr>\u003Ch2 id=\"5-putting-it-all-together\">\u003Cstrong>5. Putting it all together\u003C/strong>\u003C/h2>\u003Cp>Our code is complete and we just need to put it into a Python file \u003Ccode>render.py\u003C/code>:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\nbpy.data.objects.remove(bpy.data.objects.get(\"Cube\"), do_unlink=True)\n\nbpy.ops.object.text_add(location=(0, 0, 0))\ntext_obj = bpy.context.object\ntext_obj.name = \"CaptionText\"\ntext_obj.data.body = \"Hello world!\"\n\ntext_obj.data.size = 0.6\ntext_obj.data.extrude = 0.05\ntext_obj.data.align_x = \"CENTER\"\ntext_obj.data.align_y = \"CENTER\"\n\nbpy.context.scene.frame_set(1)\ntext_obj.location = (-4.0, 0.0, 1.0)\ntext_obj.keyframe_insert(data_path=\"location\", frame=1)\n\nbpy.context.scene.frame_set(40)\ntext_obj.location = (0.0, 0.0, 1.0)\ntext_obj.keyframe_insert(data_path=\"location\", frame=40)\n\nbpy.context.scene.render.engine = \"BLENDER_EEVEE\"\nbpy.context.scene.render.resolution_x = 1920\nbpy.context.scene.render.resolution_y = 1080\nbpy.context.scene.render.resolution_percentage = 100\nbpy.context.scene.render.fps = 24\nbpy.context.scene.frame_start = 1\nbpy.context.scene.frame_end = 60\n\nbpy.context.scene.render.image_settings.file_format = \"FFMPEG\"\nbpy.context.scene.render.ffmpeg.format = \"MPEG4\"&nbsp; # container\nbpy.context.scene.render.ffmpeg.codec = \"H264\"\nbpy.context.scene.render.ffmpeg.constant_rate_factor = \"HIGH\"\nbpy.context.scene.render.ffmpeg.gopsize = 12\nbpy.context.scene.render.ffmpeg.audio_codec = \"AAC\"\nbpy.context.scene.render.filepath = \"//render.mp4\"\n\nbpy.ops.render.render(animation=True)\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Now, run the script to start rendering:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">python3 render.py\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Once the render finishes, check your working directory and your fully programmatically generated animation should now be ready to view.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-2b287259-a96b-456b-b95e-375bf116e3a1.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1088\" height=\"722\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-2b287259-a96b-456b-b95e-375bf116e3a1.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-2b287259-a96b-456b-b95e-375bf116e3a1.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-2b287259-a96b-456b-b95e-375bf116e3a1.png 1088w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">🔗\u003C/div>\u003Cdiv class=\"kg-callout-text\">You can find our code in a Github repository for easy reproducibility:\u003Ca href=\"https://github.com/cgwire/blender-programmatic-rendering?ref=blog.cg-wire.com\"> \u003Cu>github.com/cgwire/blender-programmatic-rendering\u003C/u>\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>In this walkthrough, you built a complete automated pipeline inside Blender: setting up a clean scene, creating and modifying 3D text, animating it with keyframes, and rendering the sequence with smooth interpolation. All of it handled through Python with no manual adjustments needed!\u003C/p>\u003Cp>Now that you've seen how much control the Blender API provides, you can take these ideas much further: automate your workflows, generate graphics from data, build internal tools that assemble scenes, render variations, or create entire animations with a single command... the list to help your animation studio become more productive never ends.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":360,"comment_id":361,"feature_image":362,"featured":105,"visibility":10,"created_at":363,"updated_at":364,"custom_excerpt":365,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":366,"primary_tag":367,"url":368,"excerpt":365,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":369},"4457d779-ae8e-4ed7-9398-91772c0996c0","6948dba20bfbc7000190a8bf","https://images.unsplash.com/photo-1622547748225-3fc4abd2cca0?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDJ8fHJlbmRlcnN8ZW58MHx8fHwxNzY2MzgyNjA1fDA&ixlib=rb-4.1.0&q=80&w=2000","2025-12-22T06:48:18.000+01:00","2026-02-20T06:04:02.000+01:00","Learn how to automate animation and video rendering in Blender using Python. This tutorial covers scene setup, 3D text generation, keyframe animation, and programmatic rendering to build repeatable, script-driven workflows.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/blender-programmatic-rendering/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@sebastiansvenson?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Sebastian Svenson\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/blender-programmatic-rendering","2025-12-29T10:00:10.000+01:00",{"title":355},"blender-programmatic-rendering","posts/blender-programmatic-rendering",[376,377],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},{"id":300,"name":22,"slug":29,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":26},"vOpwec7s0eruEbUu1OcdDfl9ESqnn1LglPRNKNn4kgw",{"id":380,"title":381,"authors":382,"body":7,"description":7,"extension":8,"html":384,"meta":385,"navigation":14,"path":397,"published_at":398,"seo":399,"slug":400,"stem":401,"tags":402,"__hash__":405,"uuid":386,"comment_id":387,"feature_image":388,"featured":105,"visibility":10,"created_at":389,"updated_at":390,"custom_excerpt":391,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":392,"primary_tag":393,"url":394,"excerpt":391,"reading_time":395,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":396},"ghost/posts:blender-kitsu-versioning-addon.json","Managing Blender File Revisions with a Kitsu Versioning Addon (2026)",[383],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🧱\u003C/div>\u003Cdiv class=\"kg-callout-text\">Replace chaotic file naming with a single source of truth for Blender revisions.\u003C/div>\u003C/div>\u003Cp>Every project begins with good intentions. You start with a clean \u003Ccode>model.blend\u003C/code>, organized folders, and the promise that this time you’ll keep things tidy.\u003C/p>\u003Cp>But as deadlines tighten, the quiet entropy of production sets in. Before long, your project directory starts to resemble an archaeological dig site of panicked last-minute edits:\u003C/p>\u003Cpre>\u003Ccode>model.blend\nmodel_v2.blend\nmodel_v2b.blend\nmodel_final.blend\nmodel_final_really_final.blend\nmodel_FINAL_v3.blend\u003C/code>\u003C/pre>\u003Cp>You know how it happens: someone needs a quick change, another artist branches off a version \"just in case,\" and soon no one is entirely certain which file is \"the real one.\" Comments in chat threads contradict filenames, shots render from outdated versions, and the supervisor sighs deeply.\u003C/p>\u003Cp>In an animation studio, these micro-chaos moments add up. That’s where a proper source of truth needs to enter the story.\u003C/p>\u003Cp>For many teams, that source is Kitsu. And for Blender artists, the missing piece is an automated bridge that keeps files versioned, traceable, and aligned with the project’s production data.\u003C/p>\u003Cp>So you decide to take control: you’re going to make Blender talk to Kitsu and build a versioning system that makes your pipeline feel like it finally has your back.\u003C/p>\u003Cp>In this tutorial, we’ll create an addon that manages file revisions directly from Blender. You’ll be able to connect Blender to a Kitsu project, create and upload revisions of your 3D models, view all existing revisions, and pull older revisions back into Blender.\u003C/p>\u003Chr>\u003Ch2 id=\"workflow-overview\">\u003Cstrong>Workflow Overview\u003C/strong>\u003C/h2>\u003Cp>In a typical Kitsu-driven workflow, an artist opens a Blender scene, does their work, hits a milestone, and uploads a revision. Artists review, iterate, revise, and upload again. Kitsu keeps every step neatly.\u003C/p>\u003Cp>But it wouldn't hurt if you could just upload or pull revisions with a click, right?\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Start in Blender\u003C/strong> - We open our working scene—modeling, shading, rigging, whatever the task at hand demands.\u003C/li>\u003Cli>\u003Cstrong>Checkpoint the work\u003C/strong> - When we hit a milestone (\"blocking complete,\" \"ready for review\"), we create a new revision in Kitsu.\u003C/li>\u003Cli>\u003Cstrong>Review the history\u003C/strong> - Kitsu stores all revisions, giving supervisors a clear timeline and letting you compare versions without digging through files.\u003C/li>\u003Cli>\u003Cstrong>Pull new changes\u003C/strong> - When we need a different version, we can just click to pull in an asset in our current workspace.\u003C/li>\u003C/ol>\u003Cp>This is a very basic workflow, so we are bound to run into problems like how to handle conflict resolution (what if two artists work on the same shot and create a new revision each, how do we handle this?), but it's good enough to give us a functional addon we can improve later on to fit our animation pipeline needs.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blender-kitsu-versioning-addon?ref=blog.cg-wire.com\">https://github.com/cgwire/blender-kitsu-versioning-addon\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-populating-the-kitsu-dashboard\">\u003Cstrong>1. Populating The Kitsu Dashboard\u003C/strong>\u003C/h2>\u003Cp>Kitsu’s web interface is designed so producers, coordinators, or leads can quickly set up the structure of a project. Before Blender artists can publish revisions, we need to populate our production with work-in-progress assets. In\u003Ca href=\"https://blog.cg-wire.com/dcc-integration-blender-kitsu/\"> \u003Cu>the Kitsu Docker instance for local development\u003C/u>\u003C/a>:\u003C/p>\u003Col>\u003Cli>Log into the \u003Cstrong>Kitsu dashboard\u003C/strong>.\u003C/li>\u003Cli>In the main navigation bar, go to \u003Cstrong>Productions\u003C/strong>.\u003C/li>\u003Cli>Click \u003Cstrong>\"Create production\"\u003C/strong> (usually top-right corner).\u003C/li>\u003Cli>Fill in the production details\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-58cb0571-2b74-4110-9b07-9e15030bbd05.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"985\" height=\"694\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-58cb0571-2b74-4110-9b07-9e15030bbd05.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-58cb0571-2b74-4110-9b07-9e15030bbd05.png 985w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>The new production will appear in the list, and you can open it to begin adding assets.\u003C/p>\u003Cp>Assets are the building blocks of your project: characters, props, environments, vehicles... anything that needs production tracking.\u003C/p>\u003Col>\u003Cli>Go to \u003Cstrong>Productions → Your Production Name\u003C/strong>.\u003C/li>\u003Cli>Switch to the \u003Cstrong>Assets\u003C/strong> tab within the production.\u003C/li>\u003Cli>Click \u003Cstrong>\"Create Asset\"\u003C/strong>.\u003C/li>\u003Cli>Enter an \u003Cstrong>Asset Name\u003C/strong> (e.g., \"RobotHead\") and \u003Cstrong>Asset Type\u003C/strong> (Character, Prop, Set, etc.)\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-f4336c33-57ef-4baa-9715-e0c749f7d9b4.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1270\" height=\"870\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-f4336c33-57ef-4baa-9715-e0c749f7d9b4.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-f4336c33-57ef-4baa-9715-e0c749f7d9b4.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-f4336c33-57ef-4baa-9715-e0c749f7d9b4.png 1270w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Your asset now exists and has 3 tasks assigned to it.&nbsp;\u003C/p>\u003Cp>Tasks define the workflow steps (Modeling, Shading, Rigging, etc.) that artists will perform on each asset.\u003C/p>\u003Cp>We now have everything we need to test our addon.\u003C/p>\u003Chr>\u003Ch2 id=\"2-linking-the-current-blender-project-to-a-kitsu-task\">\u003Cstrong>2. Linking the Current Blender Project to a Kitsu Task\u003C/strong>\u003C/h2>\u003Cp>We start with a minimal addon declaration that defines the UI location, loads \u003Ccode>gazu\u003C/code>, and prepares the data we’ll expose in dropdown menus:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bl_info = {\n&nbsp;&nbsp;&nbsp;&nbsp;\"name\": \"Model Versioning (Production/Task/Asset/Revisions)\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"author\": \"cgwire\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"version\": (1, 0, 0),\n&nbsp;&nbsp;&nbsp;&nbsp;\"blender\": (2, 80, 0),\n&nbsp;&nbsp;&nbsp;&nbsp;\"location\": \"View3D &gt; Sidebar &gt; ModelVersioning\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"description\": \"Browse productions, tasks, assets, and manage revisions (list/create/load)\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"category\": \"3D View\",\n}\n\nimport sys\n\nsys.path.append(\"~/.local/lib/python3.11/site-packages\")\n\nimport os\nimport tempfile\n\nimport bpy\nimport gazu\nfrom bpy.props import EnumProperty, PointerProperty\nfrom bpy.types import Operator, Panel, PropertyGroup\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Note that \u003Ccode>sys.path.append(\"~/.local/lib/python3.11/site-packages\")\u003C/code> allows us to use our local Python installation to access external packages like \u003Ccode>gazu\u003C/code>. By default, Blender runs its own Python environment, so installing packages can be cumbersome. To solve this, we just tell Blender to have a look at our local modules. Update this path accordingly to match your system configuration.\u003C/p>\u003Cp>Before we can automate versioning, Blender needs to know \u003Cem>where\u003C/em> in Kitsu the current model belongs. That means identifying the project, the asset, the task, and eventually the revisions associated with it.\u003C/p>\u003Cp>The first step is simple: authenticate with Kitsu, retrieve available productions, and let the artist pick the context directly from the Sidebar UI.\u003C/p>\u003Cp>Once the addon loads, we authenticate and point the addon at the Kitsu API host:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">gazu.set_host(\"&lt;http://localhost/api&gt;\")\nuser = gazu.log_in(\"admin@example.com\", \"mysecretpassword\")\n\ntemp_dir_path = tempfile.gettempdir()\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>This establishes the session we’ll use to browse productions, find tasks, and eventually create revisions.\u003C/p>\u003Cp>From here, we can begin exposing the production structure. With helper functions for project, asset, task, and revision lookup, we populate each dropdown dynamically:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">def find_project(name):\n&nbsp;&nbsp;&nbsp;&nbsp;return gazu.project.get_project_by_name(name)\n\ndef find_asset(project, name):\n&nbsp;&nbsp;&nbsp;&nbsp;return gazu.asset.get_asset_by_name(project, name)\n\ndef find_task(asset, type_id):\n&nbsp;&nbsp;&nbsp;&nbsp;return gazu.task.get_task_by_name(asset, type_id, \"main\")\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Each \u003Ccode>EnumProperty\u003C/code> callback pulls fresh data from Kitsu:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">def enum_projects(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;items = []\n&nbsp;&nbsp;&nbsp;&nbsp;projects = gazu.project.all_projects()\n&nbsp;&nbsp;&nbsp;&nbsp;for p in projects:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;items.append((p[\"name\"], p[\"name\"], \"\"))\n&nbsp;&nbsp;&nbsp;&nbsp;if not items:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;items.append((\"NONE\", \"--- no productions ---\", \"\"))\n&nbsp;&nbsp;&nbsp;&nbsp;return items\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Assets, tasks, and revisions follow the same pattern:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">def enum_assets(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;project = find_project(context.scene.mv_state.project)\n&nbsp;&nbsp;&nbsp;&nbsp;items = []\n&nbsp;&nbsp;&nbsp;&nbsp;if project:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;assets = gazu.asset.all_assets_for_project(project)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;for t in assets:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;items.append((t[\"name\"], t[\"name\"], \"\"))\n&nbsp;&nbsp;&nbsp;&nbsp;if not items:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;items.append((\"NONE\", \"--- no tasks ---\", \"\"))\n&nbsp;&nbsp;&nbsp;&nbsp;return items\n\ndef enum_tasks(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;project = find_project(context.scene.mv_state.project)\n&nbsp;&nbsp;&nbsp;&nbsp;asset = find_asset(project, context.scene.mv_state.asset)\n&nbsp;&nbsp;&nbsp;&nbsp;items = []\n&nbsp;&nbsp;&nbsp;&nbsp;if asset:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;tasks = gazu.task.all_tasks_for_asset(asset)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;for t in tasks:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;items.append((t[\"task_type_id\"], t[\"task_type_name\"], \"\"))\n&nbsp;&nbsp;&nbsp;&nbsp;if not items:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;items.append((\"NONE\", \"--- no tasks ---\", \"\"))\n&nbsp;&nbsp;&nbsp;&nbsp;return items\n\ndef enum_revisions(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;project = find_project(context.scene.mv_state.project)\n&nbsp;&nbsp;&nbsp;&nbsp;asset = find_asset(project, context.scene.mv_state.asset)\n&nbsp;&nbsp;&nbsp;&nbsp;task = find_task(asset, context.scene.mv_state.task)\n&nbsp;&nbsp;&nbsp;&nbsp;items = []\n&nbsp;&nbsp;&nbsp;&nbsp;if task:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;revisions = gazu.files.get_all_preview_files_for_task(task)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;for r in revisions:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;items.append((str(r[\"revision\"]), str(r[\"revision\"]), \"\"))\n&nbsp;&nbsp;&nbsp;&nbsp;if not items:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;items.append((\"NONE\", \"--- no revisions ---\", \"\"))\n&nbsp;&nbsp;&nbsp;&nbsp;return items\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Finally, we store all UI selections in a single state object:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">class MV_State(PropertyGroup):\n&nbsp;&nbsp;&nbsp;&nbsp;project: EnumProperty(\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;name=\"Project\", description=\"Select project\", items=enum_projects\n&nbsp;&nbsp;&nbsp;&nbsp;)\n&nbsp;&nbsp;&nbsp;&nbsp;asset: EnumProperty(name=\"Asset\", description=\"Select asset\", items=enum_assets)\n&nbsp;&nbsp;&nbsp;&nbsp;task: EnumProperty(name=\"Task\", description=\"Select task\", items=enum_tasks)\n&nbsp;&nbsp;&nbsp;&nbsp;revision: EnumProperty(\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;name=\"Revision\", description=\"Select revision\", items=enum_revisions\n&nbsp;&nbsp;&nbsp;&nbsp;)\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>This is the foundation of our pipeline integration: Blender now knows how to browse Kitsu and bind itself to the exact task the artist is working on. From here, we can start working on the revision lifecycle.\u003C/p>\u003Chr>\u003Ch2 id=\"3-creating-a-new-revision-button\">\u003Cstrong>3. Creating a \"New Revision\" Button\u003C/strong>\u003C/h2>\u003Cp>We can start automating the part artists interact with most: creating new revisions. In a typical manual workflow, you’d export your file and upload it in Kitsu to the correct task. Our addon will streamline this into a single button press inside Blender.\u003C/p>\u003Cp>Kitsu handles new revisions through \u003Ccode>publish_preview()\u003C/code>. This call sends both the file and metadata:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">temp_file_path = os.path.join(temp_dir_path, \"new_version.glb\")\n\nbpy.ops.export_scene.gltf(filepath=temp_file_path, export_format=\"GLB\")\n\n(comment, preview_file) = gazu.task.publish_preview(\n&nbsp;&nbsp;&nbsp;&nbsp;task,\n&nbsp;&nbsp;&nbsp;&nbsp;task_status,\n&nbsp;&nbsp;&nbsp;&nbsp;revision=new_revision,\n&nbsp;&nbsp;&nbsp;&nbsp;comment=\"increment revision\",\n&nbsp;&nbsp;&nbsp;&nbsp;preview_file_path=temp_file_path,\n)\n\nos.remove(temp_file_path)\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>In our addon, we’ll trigger this from a button in the Sidebar.\u003C/p>\u003Cp>The operator performs three main steps: grab the user’s selections from the addon's state, compute the next revision number, and upload the exported file as the new revision:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">class MV_OT_create_revision(Operator):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"mv.create_revision\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Create Revision\"\n\n&nbsp;&nbsp;&nbsp;&nbsp;def invoke(self, context, event):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;wm = context.window_manager\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return wm.invoke_props_dialog(self, width=400)\n\n&nbsp;&nbsp;&nbsp;&nbsp;def execute(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;project = find_project(context.scene.mv_state.project)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;asset = find_asset(project, context.scene.mv_state.asset)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;task = find_task(asset, context.scene.mv_state.task)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;revision = context.scene.mv_state.revision\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;new_revision = int(revision) + 1\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;task_status = gazu.task.get_task_status_by_name(\"todo\")\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;temp_file_path = os.path.join(temp_dir_path, \"new_version.glb\")\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.ops.export_scene.gltf(filepath=temp_file_path, export_format=\"GLB\")\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;(comment, preview_file) = gazu.task.publish_preview(\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;task,\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;task_status,\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;revision=new_revision,\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;comment=\"increment revision\",\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;preview_file_path=temp_file_path,\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;os.remove(temp_file_path)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;self.report({\"INFO\"}, \"Revision created\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return {\"FINISHED\"}\u003C/code>\u003C/pre>\u003Chr>\u003Ch2 id=\"4-pulling-a-revision-into-blender\">\u003Cstrong>4. Pulling a Revision into Blender\u003C/strong>\u003C/h2>\u003Cp>Versioning isn’t just about publishing your work, it's also about being able to \u003Cem>go back\u003C/em>. Whether you’re reviewing earlier stages, comparing topology, or recovering a detail from a previous iteration, you need a quick, reliable way to load new and older revisions into Blender.\u003C/p>\u003Cp>Once a task is selected, pulling a revision from Kitsu becomes a simple two-step operation: download the preview file associated with the selected revision, and import it into Blender.\u003C/p>\u003Cp>After fetching all preview files for the current task, we can target the revision by index and bring the asset directly into Blender:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">temp_file_path = os.path.join(temp_dir_path, \"new_version.glb\")\n\npreview_file = preview_files[int(revision) - 1]\ngazu.files.download_preview_file(preview_file, temp_file_path)\nbpy.ops.import_scene.gltf(filepath=temp_file_path)\n\nos.remove(temp_file_path)\u003C/code>\u003C/pre>\u003Cp>This gives us a consistent way to retrieve assets exactly as they were at that point in production.\u003C/p>\u003Cp>We encapsulate this workflow inside an operator that mirrors the structure of the Create Revision button:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">class MV_OT_load_revision(Operator):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"mv.load_revision\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Load Revision\"\n\n&nbsp;&nbsp;&nbsp;&nbsp;def execute(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;project = find_project(context.scene.mv_state.project)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;asset = find_asset(project, context.scene.mv_state.asset)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;task = find_task(asset, context.scene.mv_state.task)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;revision = context.scene.mv_state.revision\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;preview_files = gazu.files.get_all_preview_files_for_task(task)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;temp_file_path = os.path.join(temp_dir_path, \"new_version.glb\")\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;preview_file = preview_files[int(revision) - 1]\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;gazu.files.download_preview_file(preview_file, temp_file_path)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.ops.import_scene.gltf(filepath=temp_file_path)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;os.remove(temp_file_path)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;self.report({\"INFO\"}, \"Opened Revision\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return {\"FINISHED\"}\u003C/code>\u003C/pre>\u003Cp>This operator makes it trivial for artists to browse and load any version stored in Kitsu without leaving Blender.\u003C/p>\u003Chr>\u003Ch2 id=\"5-registering-the-addon\">\u003Cstrong>5. Registering The Addon\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/blender-addon-ui-scripting-guide/\">\u003Cu>The panel now ties the whole revision workflow together\u003C/u>\u003C/a>:\u003C/p>\u003Cul>\u003Cli>Select the project\u003C/li>\u003Cli>Choose the asset\u003C/li>\u003Cli>Pick the task\u003C/li>\u003Cli>Browse revisions\u003C/li>\u003Cli>Create or load versions with a single click\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">class MV_PT_panel(Panel):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Model Versioning\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"MV_PT_panel\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_space_type = \"VIEW_3D\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_region_type = \"UI\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_category = \"ModelVersion\"\n\n&nbsp;&nbsp;&nbsp;&nbsp;def draw(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout = self.layout\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;scene = context.scene\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;mv = scene.mv_state\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.label(text=\"Project\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.prop(mv, \"project\", text=\"\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.separator()\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.label(text=\"Asset\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.prop(mv, \"asset\", text=\"\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.separator()\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.label(text=\"Task\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.prop(mv, \"task\", text=\"\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.separator()\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.label(text=\"Revision\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.prop(mv, \"revision\", text=\"\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.separator()\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;row = layout.row(align=True)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;row.operator(\"mv.create_revision\", text=\"Create Revision\", icon=\"ADD\")\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.operator(\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;\"mv.load_revision\", text=\"Load Selected Revision\", icon=\"IMPORT\"\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;)\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Finally, we register the operators, panel, and state so Blender knows how to construct the UI:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">classes = (\n&nbsp;&nbsp;&nbsp;&nbsp;MV_State,\n&nbsp;&nbsp;&nbsp;&nbsp;MV_OT_create_revision,\n&nbsp;&nbsp;&nbsp;&nbsp;MV_OT_load_revision,\n&nbsp;&nbsp;&nbsp;&nbsp;MV_PT_panel,\n)\n\ndef register():\n&nbsp;&nbsp;&nbsp;&nbsp;for c in classes:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.register_class(c)\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.types.Scene.mv_state = PointerProperty(type=MV_State)\n\ndef unregister():\n&nbsp;&nbsp;&nbsp;&nbsp;for c in reversed(classes):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.unregister_class(c)\n&nbsp;&nbsp;&nbsp;&nbsp;if hasattr(bpy.types.Scene, \"mv_state\"):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;del bpy.types.Scene.mv_state\n\nif __name__ == \"__main__\":\n&nbsp;&nbsp;&nbsp;&nbsp;register()\u003C/code>\u003C/pre>\u003Cp>At this point, the model versioning workflow is fully bidirectional: you can publish new revisions from Blender and retrieve earlier ones instantly.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-00e861e7-3b2e-4bdc-80b8-1af740cab480.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"759\" height=\"488\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-00e861e7-3b2e-4bdc-80b8-1af740cab480.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-00e861e7-3b2e-4bdc-80b8-1af740cab480.png 759w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>With just a handful of Blender API operators and the convenience of the Gazu SDK, we’ve built a practical (yet basic) versioning workflow that lives directly inside Blender and stays in sync with Kitsu. Artists can link their Blender scene to a Kitsu project, asset, and task, create new revisions with a single button press, browse the full revision history for any task, and pull older versions straight into Blender whenever they need to compare or recover work.\u003C/p>\u003Cp>This workflow is only the beginning. From here, you could expand the addon with automated exports, thumbnail or turntable renders, support for multiple output formats, supervisor review tools, or even hooks into a render farm.\u003C/p>\u003Cp>To get you started, make sure to clone\u003Ca href=\"https://github.com/cgwire/blender-kitsu-versioning-addon?ref=blog.cg-wire.com\"> \u003Cu>our Github repository\u003C/u>\u003C/a> for this versioning addon and try it out yourself!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":386,"comment_id":387,"feature_image":388,"featured":105,"visibility":10,"created_at":389,"updated_at":390,"custom_excerpt":391,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":392,"primary_tag":393,"url":394,"excerpt":391,"reading_time":395,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":396},"4ee5e3ab-dd50-4121-99cb-c59d96c2eb7d","6948ca070bfbc7000190a884","https://images.unsplash.com/photo-1617746533234-288e5cf484e2?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDMwfHxhbmltYXRpb24lMjBwaXBlbGluZXxlbnwwfHx8fDE3NjYzODE5ODZ8MA&ixlib=rb-4.1.0&q=80&w=2000","2025-12-22T05:33:11.000+01:00","2026-02-20T06:04:01.000+01:00","Learn how to build a Blender addon that connects to Kitsu to manage asset revisions. This tutorial covers creating, browsing, and loading file versions directly from Blender, keeping production files traceable and in sync with studio workflows.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/blender-kitsu-versioning-addon/",12,"\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@jaspergarrattphotography?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Jasper Garratt\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/blender-kitsu-versioning-addon","2025-12-22T10:00:20.000+01:00",{"title":381},"blender-kitsu-versioning-addon","posts/blender-kitsu-versioning-addon",[403,404],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},{"id":300,"name":22,"slug":29,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":26},"RvLHyMPCRMLBkkSF0lzBXOx7AHvfvlghiFKTD38-uwg",{"id":407,"title":408,"authors":409,"body":7,"description":7,"extension":8,"html":411,"meta":412,"navigation":14,"path":422,"published_at":423,"seo":424,"slug":425,"stem":426,"tags":427,"__hash__":430,"uuid":413,"comment_id":414,"feature_image":415,"featured":105,"visibility":10,"created_at":416,"updated_at":390,"custom_excerpt":417,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":418,"primary_tag":419,"url":420,"excerpt":417,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":421},"ghost/posts:blender-kitsu-low-res-preview.json","Automating Low-Res Animation Previews in Blender with Kitsu (2026)",[410],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">⚡\u003C/div>\u003Cdiv class=\"kg-callout-text\">Speed up animation reviews with lightweight previews that render in seconds, not hours.\u003C/div>\u003C/div>\u003Cp>Waiting for full-resolution renders just to review a shot slows down the entire production. Artists spend time waiting and supervisors get delayed feedback. The iteration loop is inefficient.\u003C/p>\u003Cp>To address this, we can create low-resolution animation previews directly in Blender and auto-upload them to Kitsu using Python as a part of our animation pipeline. These previews are fast to render, easy to review, and can be quickly used in Kitsu for approval.\u003C/p>\u003Cp>This is a big deal because full-resolution renders can take hours, and the cloud storage and network bandwidth costs are no joke when you're dealing with thousands of shots. Going from 1080p to 480p can divide the size by up to 5x!\u003C/p>\u003Cp>In this tutorial, we’ll cover how to:\u003C/p>\u003Cul>\u003Cli>Adjust Blender render settings for low-resolution previews\u003C/li>\u003Cli>Automate the render process using Python\u003C/li>\u003Cli>Use \u003Ccode>ffmpeg\u003C/code> to watermark and timestamp the video for fast contextualization\u003C/li>\u003Cli>Export videos and upload them to Kitsu\u003C/li>\u003C/ul>\u003Cp>By the end, you’ll have a script that saves time on shot reviews without sacrificing feedback quality.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blender-kitsu-low-res-preview?ref=blog.cg-wire.com\">https://github.com/cgwire/blender-kitsu-low-res-preview\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-simple-blender-scene-setup\">\u003Cstrong>1. Simple Blender Scene Setup\u003C/strong>\u003C/h2>\u003Cp>Before we can create an animated preview, we need a starting object in the scene. For this tutorial, we’ll use Blender’s default cube.\u003C/p>\u003Cp>First, we create a reference of the scene and the cube:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\ncube = bpy.data.objects[\"Cube\"]\nscene = bpy.context.scene\u003C/code>\u003C/pre>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-bf950a7a-c387-4b8d-9318-49e5bd3251bd.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"901\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-bf950a7a-c387-4b8d-9318-49e5bd3251bd.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-bf950a7a-c387-4b8d-9318-49e5bd3251bd.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-bf950a7a-c387-4b8d-9318-49e5bd3251bd.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"2-adding-keyframes-for-animation\">\u003Cstrong>2. Adding Keyframes for Animation\u003C/strong>\u003C/h2>\u003Cp>The next step is animating our cube. For quick modeling previews, short sequences are ideal. Here, we’ll create a \u003Cstrong>360° rotation\u003C/strong> over 48 frames (2 seconds at 24 FPS):\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">for frame, angle in [(1, 0), (12, 1.57), (24, 3.14), (36, 4.71), (48, 6.28)]:\n&nbsp;&nbsp;&nbsp;&nbsp;scene.frame_set(frame)\n&nbsp;&nbsp;&nbsp;&nbsp;cube.rotation_euler[2] = angle\n&nbsp;&nbsp;&nbsp;&nbsp;cube.keyframe_insert(data_path=\"rotation_euler\", index=2)\u003C/code>\u003C/pre>\u003Cp>This loop sets keyframes at regular intervals, rotating the cube smoothly around its Z-axis by increments of pi/2. Using a small number of frames keeps rendering fast and makes it perfect for preview purposes.\u003C/p>\u003Cp>At this point, you could scrub the timeline in Blender to verify the cube rotates as expected.\u003C/p>\u003Chr>\u003Ch2 id=\"3-low-resolution-rendering\">\u003Cstrong>3. Low-Resolution Rendering\u003C/strong>\u003C/h2>\u003Cp>With animation in place, we can configure Blender to render a \u003Cstrong>fast, low-resolution preview\u003C/strong>. The goal is speed over quality: we want something clear enough for review but quick to produce.\u003C/p>\u003Cp>Here, we use\u003Ca href=\"https://blog.cg-wire.com/getting-started-with-blender-rendering/\"> \u003Cu>the Eevee rendering engine for speed and to reduce unnecessary rendering overhead\u003C/u>\u003C/a>. It's much faster than Cycles because it's a simple rasterisation engine, and we don't need a hyper-realistic output in 90% of cases.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">scene.render.engine = \"BLENDER_EEVEE\"\n\nscene.render.resolution_x = 1920\nscene.render.resolution_y = 1080\nscene.render.resolution_percentage = 50\n\nscene.render.fps = 24\nscene.frame_start = 1\nscene.frame_end = 48&nbsp; # match your animation length\n\nscene.render.image_settings.file_format = \"FFMPEG\"\nscene.render.ffmpeg.format = \"MPEG4\"\nscene.render.ffmpeg.codec = \"H264\"\n\nscene.render.filepath = \"//preview.mp4\"\u003C/code>\u003C/pre>\u003Cp>Although we go for a classic landscape resolution, reducing \u003Ccode>resolution_percentage\u003C/code> or turning off high-quality sampling in Eevee can drastically reduce render times for previews.\u003C/p>\u003Cp>The rest of the settings are pretty standard: 24 frames per second, 48 frames total, and a mp4 output video with H264 encoding (for faster compression) written in the script's current folder.\u003C/p>\u003Cp>Depending on your use case, you can reduce the resolution, decrease the frame rate, and lower the bitrate to lower the size of your previews. You still need enough quality for the review process, though, so tweak the settings for an optimal balance with performance.\u003C/p>\u003Cp>Finally, we can trigger the render in one line:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.ops.render.render(animation=True)\u003C/code>\u003C/pre>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-27b0c802-b589-4306-b52b-5f910b58320b.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1088\" height=\"722\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-27b0c802-b589-4306-b52b-5f910b58320b.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-27b0c802-b589-4306-b52b-5f910b58320b.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-27b0c802-b589-4306-b52b-5f910b58320b.png 1088w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>The preview video can be immediately used for review or further processed with tools like FFmpeg for timestamps, watermarks, or custom naming conventions before uploading to Kitsu.\u003C/p>\u003Chr>\u003Ch2 id=\"4-ffmpeg-processing-timestamp-naming-watermark\">\u003Cstrong>4. FFmpeg Processing: Timestamp, Naming, Watermark\u003C/strong>\u003C/h2>\u003Cp>Once Blender has rendered your animation to a video file, you can further process it using \u003Cstrong>FFmpeg\u003C/strong>. This is\u003Ca href=\"https://blog.cg-wire.com/ffmpeg-commands-for-animators/\"> \u003Cu>a common step in production pipelines\u003C/u>\u003C/a> to add timestamps, watermarks, or custom naming-making the previews ready for review.\u003C/p>\u003Cp>Run the following command in a terminal after rendering your preview:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">ffmpeg -framerate 24 \\\\\\\\\n&nbsp;&nbsp;-i preview.mp4 \\\\\\\\\n&nbsp;&nbsp;-i watermark.png \\\\\\\\\n&nbsp;&nbsp;-filter_complex \"\\\\\\\\\n&nbsp;&nbsp;&nbsp;&nbsp;[0:v]drawtext=text='%{pts\\\\\\\\:hms}':x=10:y=10:fontsize=24:fontcolor=white:bordercolor=black:borderw=2[v1]; \\\\\\\\\n&nbsp;&nbsp;&nbsp;&nbsp;[v1][1:v]overlay=W-w-20:H-h-20\" \\\\\\\\\n&nbsp;&nbsp;-c:v libx264 -crf 22 -pix_fmt yuv420p \\\\\\\\\n&nbsp;&nbsp;preview_with_stamp.mp4\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Cstrong>\u003Ccode>drawtext\u003C/code>\u003C/strong> overlays a running timestamp in the top-left corner.\u003C/li>\u003Cli>\u003Ccode>\u003Cstrong>overlay\u003C/strong>\u003C/code> places a watermark image (\u003Ccode>watermark.png\u003C/code>) in the bottom-right corner.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>c:v libx264 -crf 22 -pix_fmt yuv420p\u003C/code>\u003C/strong> ensures good quality and broad compatibility for video playback.\u003C/li>\u003Cli>The output file, \u003Ccode>preview_with_stamp.mp4\u003C/code>, is your finalised preview ready for review.\u003C/li>\u003C/ul>\u003Cp>Of course, you can adjust the font size, position, or watermark placement as needed to standardise previews for your team or client reviews.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-aaed9f6c-1b29-4592-b629-1830a6f2aa79.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1088\" height=\"722\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-aaed9f6c-1b29-4592-b629-1830a6f2aa79.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-aaed9f6c-1b29-4592-b629-1830a6f2aa79.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-aaed9f6c-1b29-4592-b629-1830a6f2aa79.png 1088w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>This step completes the preparation of a production-ready, low-resolution animation preview. The file is now ready to be uploaded to \u003Cstrong>Kitsu\u003C/strong> for quick feedback.\u003C/p>\u003Chr>\u003Ch2 id=\"5-uploading-to-kitsu-via-gazu\">\u003Cstrong>5. Uploading to Kitsu via Gazu\u003C/strong>\u003C/h2>\u003Cp>Once your low-resolution preview is ready, you can upload it directly to \u003Cstrong>Kitsu\u003C/strong> via the dashboard or use the \u003Ccode>gazu\u003C/code> Python SDK. Kitsu is a collaborative pipeline tracker allowing artists and supervisors to access the preview immediately for review.\u003C/p>\u003Cp>The following Python script provides a simple interactive CLI that lets you choose the project and task to upload your preview to:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import gazu\n\ndef pickProject(label, list_of_items):\n&nbsp;&nbsp;&nbsp;&nbsp;\"\"\"Helper UI to pick one item from a list.\"\"\"\n&nbsp;&nbsp;&nbsp;&nbsp;for i, item in enumerate(list_of_items):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;print(f\"{i + 1}. {item['name']}\")\n&nbsp;&nbsp;&nbsp;&nbsp;idx = int(input(f\"Choose {label} number: \")) - 1\n&nbsp;&nbsp;&nbsp;&nbsp;return list_of_items[idx]\n\ndef pickTask(label, list_of_items):\n&nbsp;&nbsp;&nbsp;&nbsp;\"\"\"Helper UI to pick one item from a list.\"\"\"\n&nbsp;&nbsp;&nbsp;&nbsp;for i, item in enumerate(list_of_items):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;asset = gazu.entity.get_entity(item[\"entity_id\"])\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;status = gazu.task.get_task_status(item[\"task_status_id\"])\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;type = gazu.task.get_task_type(item[\"task_type_id\"])\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;print(f\"{i + 1}. {asset['name']} {type['name']} {status['name']}\")\n&nbsp;&nbsp;&nbsp;&nbsp;idx = int(input(f\"Choose {label} number: \")) - 1\n&nbsp;&nbsp;&nbsp;&nbsp;return list_of_items[idx]\n\ngazu.set_host(\"&lt;http://localhost/api&gt;\")\nuser = gazu.log_in(\"admin@example.com\", \"mysecretpassword\")\n\nprojects = gazu.project.all_projects()\nproject = pickProject(\"project\", projects)\n\ntasks = gazu.task.all_tasks_for_project(project)\ntask = pickTask(\"task\", tasks)\n\nprint(\"Uploading preview...\")\ntask_status = gazu.task.get_task_status_by_name(\"todo\")\nresult = gazu.task.publish_preview(\n&nbsp;&nbsp;&nbsp;&nbsp;task,\n&nbsp;&nbsp;&nbsp;&nbsp;task_status,\n&nbsp;&nbsp;&nbsp;&nbsp;comment=\"Auto-generated preview\",\n&nbsp;&nbsp;&nbsp;&nbsp;preview_file_path=\"./preview.mp4\",\n)\n\nprint(\"Done:\", result)\u003C/code>\u003C/pre>\u003Cp>First, we log in to Kitsu via \u003Ccode>gazu\u003C/code> with your credentials. We use the\u003Ca href=\"https://blog.cg-wire.com/dcc-integration-blender-kitsu/\"> \u003Cu>local development environment installation via Kitsu Docker\u003C/u>\u003C/a>. The program lets you select the \u003Cstrong>project\u003C/strong> and \u003Cstrong>task\u003C/strong> from available options using different Kitsu API endpoints to get all your production data:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-21091709-64dd-41c6-875e-2cdce8b5b178.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1343\" height=\"816\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-21091709-64dd-41c6-875e-2cdce8b5b178.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-21091709-64dd-41c6-875e-2cdce8b5b178.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-21091709-64dd-41c6-875e-2cdce8b5b178.png 1343w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>We then upload the generated preview video from the previous steps to the selected task.\u003C/p>\u003Cp>Once complete, the preview is available in Kitsu’s review interface, making it easy for team members and supervisors to give feedback without waiting for high-resolution renders.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-78d2cd48-21e9-4599-9b2b-a5e5bef63f76.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"985\" height=\"948\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-78d2cd48-21e9-4599-9b2b-a5e5bef63f76.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-78d2cd48-21e9-4599-9b2b-a5e5bef63f76.png 985w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>The review engine is perfect to quickly annotate frames and add comments on precise shots:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-6ae9b3dd-18e9-4d85-9fa6-e5106babc87e.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1438\" height=\"809\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-6ae9b3dd-18e9-4d85-9fa6-e5106babc87e.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-6ae9b3dd-18e9-4d85-9fa6-e5106babc87e.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-6ae9b3dd-18e9-4d85-9fa6-e5106babc87e.png 1438w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"6-putting-it-all-together\">\u003Cstrong>6. Putting it all together\u003C/strong>\u003C/h2>\u003Cp>To automate the task end-to-end, let's write a quick bash command:\u003C/p>\u003Cp>\u003Cstrong>\u003Cu>preview.sh\u003C/u>\u003C/strong>\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">python3 render.py &amp;&amp; ./watermark.sh &amp;&amp; python3 upload.py\u003C/code>\u003C/pre>\u003Cp>We can then run the script every time we need to share a preview:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">./preview.sh\u003C/code>\u003C/pre>\u003Cp>Check out our\u003Ca href=\"https://github.com/cgwire/blender-kitsu-low-res-preview?ref=blog.cg-wire.com\"> \u003Cu>Github repository blender-kitsu-low-res-preview\u003C/u>\u003C/a> to try out the final result yourself.\u003C/p>\u003Chr>\u003Ch2 id=\"7-artist-friendly-addon-overview\">\u003Cstrong>7. Artist-Friendly Addon Overview\u003C/strong>\u003C/h2>\u003Cp>Though this is out of the scope of this article, it could be easy to wrap up our code in a Blender addon for artists to easily use.\u003C/p>\u003Cp>You would need a main panel to hold dropdown menus to pick a production, asset, and task to upload to. And a button to click to upload. The uploading logic would take care of rendering, calling ffmpeg as a subprocess for watermarking, and actually sending the temporary files to Kitsu.\u003C/p>\u003Cp>Have a look at our article on\u003Ca href=\"https://blog.cg-wire.com/blender-addon-ui-scripting-guide/\"> \u003Cu>Blender Add-on UI Development\u003C/u>\u003C/a> for more information.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>By now, you’ve set up a full pipeline: creating a simple 3D object in Blender, animating it, generating a low-resolution preview, adding timestamps and watermarks, and uploading it to Kitsu. The benefits are immediately clear:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Faster reviews\u003C/strong> - Supervisors and team members can watch previews immediately without waiting for full-resolution renders.\u003C/li>\u003Cli>\u003Cstrong>Quicker iterations\u003C/strong> - Artists get feedback faster, which shortens the iteration loop and reduces bottlenecks.\u003C/li>\u003Cli>\u003Cstrong>Fewer blockers\u003C/strong> - Automated previews and uploads eliminate repetitive manual steps in the pipeline to keep deliverables consistent.\u003C/li>\u003C/ul>\u003Cp>What used to take an hour of manual work can now be handled with a few scripts, giving the team more time to focus on the creative side of production instead of repetitive tasks.\u003C/p>\u003Cp>You can take this workflow even further depending on your animation studio's needs: add buttons or panels in Blender to run the entire pipeline with one click, automatically batch-generate previews for multiple shots or scenes in a single script, etc.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":413,"comment_id":414,"feature_image":415,"featured":105,"visibility":10,"created_at":416,"updated_at":390,"custom_excerpt":417,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":418,"primary_tag":419,"url":420,"excerpt":417,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":421},"d4c6e01e-3b37-4c90-b42c-cbfeecc518c2","693549d4ee42880001e4b1dc","https://images.unsplash.com/photo-1653200256306-6dc84510dfb6?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDN8fGFuaW1hdGlvbiUyMHBpcGVsaW5lfGVufDB8fHx8MTc2NTA5ODQ2Mnww&ixlib=rb-4.1.0&q=80&w=2000","2025-12-07T10:33:08.000+01:00","Learn how to generate low-resolution animation previews in Blender and automatically upload them to Kitsu. This tutorial covers Blender render settings, Python automation, FFmpeg processing, and preview publishing to streamline animation reviews.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/blender-kitsu-low-res-preview/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@allisonsaeng?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Allison Saeng\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/blender-kitsu-low-res-preview","2025-12-15T10:00:23.000+01:00",{"title":408},"blender-kitsu-low-res-preview","posts/blender-kitsu-low-res-preview",[428,429],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},{"id":300,"name":22,"slug":29,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":26},"slGFk0J3LjB1nVzZocG4Vk6gTdZMox1-G7CWEnotp_I",{"id":432,"title":433,"authors":434,"body":7,"description":7,"extension":8,"html":436,"meta":437,"navigation":14,"path":449,"published_at":450,"seo":451,"slug":452,"stem":453,"tags":454,"__hash__":457,"uuid":438,"comment_id":439,"feature_image":440,"featured":105,"visibility":10,"created_at":441,"updated_at":442,"custom_excerpt":443,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":444,"primary_tag":445,"url":446,"excerpt":443,"reading_time":447,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":448},"ghost/posts:blender-kitsu-breakdown-automation.json","How to Build Blender Shots Automatically Using Python and Kitsu (2026)",[435],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🧩\u003C/div>\u003Cdiv class=\"kg-callout-text\">Automate your shot setup and eliminate hours of manual asset placement.\u003C/div>\u003C/div>\u003Cp>Animation studios rely on \u003Cstrong>breakdown lists\u003C/strong> to track which assets must appear in each shot.\u003C/p>\u003Cp>Picture this. You’re a VFX artist staring at a blank Blender viewport for your latest production. Your manager hands you the detailed list of assets, shots, and timing cues and says, \u003Cem>\"Turn this into a Blender scene.\"\u003C/em>\u003C/p>\u003Cp>Your first thought could be to log in to your asset manager and place every object manually. But what about complex scenes with hundreds of assets?\u003C/p>\u003Cp>This is the moment where a simple automation can save the day. With Python Blender scripting, you can read Kitsu breakdown data and generate an initial scene automatically in a few minutes.\u003C/p>\u003Cp>In this article, we walk through a full example: fetching breakdowns via the \u003Cstrong>Gazu\u003C/strong> Python API, creating a fresh Blender scene, downloading the assets, and importing them into Blender. By the end, you’ll have a minimal pipeline that builds scenes automatically, ready for layout or animation.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blender-kitsu-automated-scene-composition?ref=blog.cg-wire.com\">https://github.com/cgwire/blender-kitsu-automated-scene-composition\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-getting-the-breakdown\">\u003Cstrong>1. Getting the Breakdown\u003C/strong>\u003C/h2>\u003Cp>Every 3D shot begins as a blank canvas, but the instructions for filling that canvas already exist in Kitsu:\u003Ca href=\"https://blog.cg-wire.com/3d-animation-process/\"> \u003Cu>the \u003Cstrong>breakdown\u003C/strong> dictates exactly what needs to be on stage\u003C/u>\u003C/a> before the animator begins working.\u003C/p>\u003Cp>A typical breakdown provides the essential narrative context your script needs to assemble the scene: the stage (start and end frames, duration, and other annotations stored in the sequence information), and the cast (the actual breakdown of character models, props, and environment assets).\u003C/p>\u003Cp>Before writing code, you need to define the breakdown in the Kitsu dashboard. This is where you manually link your library of 3D assets to the specific shots where they are required. You aren't creating new models here, just casting existing \"actors\" (assets) to a specific shot:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Enter your production\u003C/strong> - Navigate to your project in Kitsu and open the \u003Cstrong>Shots\u003C/strong> tab.\u003C/li>\u003Cli>\u003Cstrong>Locate the casting sheet\u003C/strong> - Look for the \u003Cstrong>Breakdown\u003C/strong> tab (usually found on the right-hand panel or a dedicated tab depending on your version).\u003C/li>\u003Cli>\u003Cstrong>Select the shot\u003C/strong> - Click on the specific shot you want to populate (e.g., \u003Ccode>SH01\u003C/code>) to open the detailed casting view.\u003C/li>\u003Cli>\u003Cstrong>Assign the assets\u003C/strong> - In the right side panel, click the \u003Cstrong>+ (Plus)\u003C/strong> button or \"Add Asset.\" You can also specify the quantity of each asset you need here.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-ef6fba58-9c73-4a38-b466-0b9d92e4efc0.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1466\" height=\"804\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-ef6fba58-9c73-4a38-b466-0b9d92e4efc0.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-ef6fba58-9c73-4a38-b466-0b9d92e4efc0.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-ef6fba58-9c73-4a38-b466-0b9d92e4efc0.png 1466w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Make sure your \u003Cstrong>Assets\u003C/strong> page is already populated with the models (Characters, Props, etc.) you intend to use.\u003C/p>\u003Cp>Once you hit save, the link is established. Now, when your Python script asks Gazu, \"Who is in this shot?\", Kitsu will reply with the list of assets you just assigned. Your Python script acts as the bridge, parsing this casting to automatically populate the Blender viewport.\u003C/p>\u003Cp>If you need a local development environment, have a look at\u003Ca href=\"https://blog.cg-wire.com/dcc-integration-blender-kitsu/\"> \u003Cu>how to install Kitsu from Docker in how Custom DCC Bridge guide\u003C/u>\u003C/a>.\u003C/p>\u003Cp>While Kitsu holds the data, we need a way to fetch it. Enter \u003Cstrong>Gazu\u003C/strong>, the Python SDK for Kitsu’s REST API:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import gazu\n\ngazu.set_host(\"&lt;http://localhost/api&gt;\")\nuser = gazu.log_in(\"admin@example.com\", \"mysecretpassword\")\n\nprojects = gazu.project.all_projects()\nproject = projects[0]\n\nsequence = gazu.shot.get_sequence_by_name(project, \"SQ01\")\nshot = gazu.shot.get_shot_by_name(sequence, \"SH01\")\n\nassets = gazu.casting.get_shot_casting(shot)\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>We connect to our local Kitsu instance, and then we pick our first production (you can also retrieve a production by name) and the shot we need the casting for.\u003C/p>\u003Cp>We can use this shot ID to retrieve the corresponding casting of assets, the breakdown list.\u003C/p>\u003Chr>\u003Ch2 id=\"2-getting-assets-from-a-breakdown\">\u003Cstrong>2. Getting Assets From a Breakdown\u003C/strong>\u003C/h2>\u003Cp>Now that we know \u003Cem>who\u003C/em> is in the shot, we need to find out \u003Cem>what\u003C/em> they look like.\u003C/p>\u003Cp>In Kitsu, an asset can have many preview files we can use depending on revisions. Our script needs to be able to navigate this data to get the last revision of each asset:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">local_paths = []\nfor asset in assets:\n&nbsp;&nbsp;&nbsp;&nbsp;tasks = gazu.task.all_tasks_for_asset(asset[\"asset_id\"])\n&nbsp;&nbsp;&nbsp;&nbsp;last_task = max(tasks, key=lambda x: x[\"updated_at\"])\n\n&nbsp;&nbsp;&nbsp;&nbsp;preview_files = gazu.files.get_all_preview_files_for_task(last_task)\n&nbsp;&nbsp;&nbsp;&nbsp;last_preview_file = max(preview_files, key=lambda x: x[\"updated_at\"])\n\n&nbsp;&nbsp;&nbsp;&nbsp;download_dir = \"./previews\"\n&nbsp;&nbsp;&nbsp;&nbsp;os.makedirs(download_dir, exist_ok=True)\n\n&nbsp;&nbsp;&nbsp;&nbsp;save_path = os.path.join(\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;download_dir,\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;last_preview_file[\"original_name\"] + \".\" + last_preview_file[\"extension\"],\n&nbsp;&nbsp;&nbsp;&nbsp;)\n&nbsp;&nbsp;&nbsp;&nbsp;gazu.files.download_preview_file(last_preview_file, save_path)\n\n&nbsp;&nbsp;&nbsp;&nbsp;local_paths.append(save_path)\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>For each asset, we retrieve a list of all corresponding tasks of any type ('Modeling', 'Animation', etc.) or status ('done', 'todo'...). We filter this list to retrieve the last updated task.\u003C/p>\u003Cp>We can use this task ID to get the last corresponding preview file revision and download it to a local folder \u003Ccode>previews\u003C/code>. We keep these download paths in memory for the importing step.\u003C/p>\u003Cp>At the end of this loop, you have successfully turned database entries into tangible model files on your hard drive, ready for Blender to ingest.\u003C/p>\u003Chr>\u003Ch2 id=\"3-creating-a-new-blender-scene\">\u003Cstrong>3. Creating a New Blender Scene\u003C/strong>\u003C/h2>\u003Cp>With the asset files safely downloaded, the next task is preparing the Blender environment to receive its new cast member.\u003C/p>\u003Cp>The \u003Ccode>bpy\u003C/code> module, Blender's native Python API, acts as your command console allowing you to manipulate every element of the application.\u003C/p>\u003Cp>Before we import our Kitsu assets, we must eliminate any default objects that come with a new Blender scene. For this simple tutorial, we're targeting the default \u003Cstrong>Cube\u003C/strong>, which is often the only object present besides the default Camera and Light:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.data.objects.remove(bpy.data.objects.get(\"Cube\"), do_unlink=True)\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>The \u003Ccode>do_unlink=True\u003C/code> flag tells Blender to fully delete the object's data block (like its mesh data) if it’s no longer used by any other object to leave no clutter behind.\u003C/p>\u003Cp>We are now ready for the imported assets to take their places.\u003C/p>\u003Chr>\u003Ch2 id=\"4-importing-asset-files\">\u003Cstrong>4. Importing Asset Files\u003C/strong>\u003C/h2>\u003Cp>Now for the payoff! Since the file we downloaded from Kitsu is a standardised interchange \u003Ccode>.glb\u003C/code> format, which handles both geometry and basic materials, we use Blender’s dedicated \u003Ccode>gltf\u003C/code> import operator.\u003C/p>\u003Cp>The crucial part is providing the correct \u003Cstrong>absolute file path\u003C/strong> (\u003Ccode>glb_path\u003C/code>) to the downloaded asset. Fortunately, we stored those in the previous code snippet:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">for path in local_paths:\n&nbsp;&nbsp;&nbsp;&nbsp;if path.lower().endswith((\".glb\")):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;print(f\"Importing: {path}\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.ops.import_scene.gltf(filepath=path)\n\nprint(\"All preview GLB files imported successfully!\")\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>Once \u003Ccode>bpy.ops.import_scene.gltf()\u003C/code> executes, Blender reads the file and automatically creates the corresponding \u003Cstrong>objects\u003C/strong>, \u003Cstrong>meshes\u003C/strong>, and \u003Cstrong>materials\u003C/strong> in the current scene.\u003C/p>\u003Cp>The imported asset is now a full-fledged Blender object, placed at the world origin (0, 0, 0), ready for subsequent pipeline steps.\u003C/p>\u003Chr>\u003Ch2 id=\"5-saving-the-scene\">\u003Cstrong>5. Saving the Scene\u003C/strong>\u003C/h2>\u003Cp>The final step in this pipeline segment is to save the assembled layout into a permanent, versionable file. If you close Blender without this step, all the automated work is lost, so we use the \u003Ccode>bpy.ops.wm.save_as_mainfile\u003C/code> operator. This is the programmatic equivalent of clicking \u003Cstrong>File \\&gt; Save As\u003C/strong> in the Blender interface:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">scene_save_dir = \"./\"\nos.makedirs(scene_save_dir, exist_ok=True)\n\nblend_filename = \"SH01.blend\"\nblend_path = os.path.join(scene_save_dir, blend_filename)\n\nbpy.ops.wm.save_as_mainfile(filepath=blend_path)\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>The result is a new Blender file, \u003Ccode>SH01.blend\u003C/code>, that perfectly reflects the \u003Cstrong>breakdown requirements\u003C/strong> from Kitsu, ready for the next department to pick up.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-91e5cf8e-acb1-4ac0-b5ec-d2c37a6a1ed6.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1460\" height=\"828\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-91e5cf8e-acb1-4ac0-b5ec-d2c37a6a1ed6.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-91e5cf8e-acb1-4ac0-b5ec-d2c37a6a1ed6.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-91e5cf8e-acb1-4ac0-b5ec-d2c37a6a1ed6.png 1460w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"6-user-friendly-addon\">\u003Cstrong>6. User-Friendly Addon\u003C/strong>\u003C/h2>\u003Cp>The script works as expected, but what about artists? Not everyone knows how to run a script.\u003C/p>\u003Cp>Let's slightly modify our code to\u003Ca href=\"https://blog.cg-wire.com/blender-addon-ui-scripting-guide/\"> \u003Cu>turn it into a Blender addon\u003C/u>\u003C/a>:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bl_info = {\n&nbsp;&nbsp;&nbsp;&nbsp;\"name\": \"Kitsu Shot Auto-Importer\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"description\": \"Pick a project and shot and auto-import the latest preview assets\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"author\": \"cgwire\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"version\": (1, 0, 0),\n&nbsp;&nbsp;&nbsp;&nbsp;\"blender\": (3, 0, 0),\n&nbsp;&nbsp;&nbsp;&nbsp;\"location\": \"Viewport &gt; N-Panel &gt; Kitsu\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"category\": \"Import-Export\",\n}\n\nimport os\nimport sys\n\nsys.path.append(\"~/.local/lib/python3.11/site-packages\")\n\nimport bpy\nimport gazu\nfrom bpy.props import EnumProperty, StringProperty\n\ndef get_projects():\n&nbsp;&nbsp;&nbsp;&nbsp;try:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;projects = gazu.project.all_projects()\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return [(p[\"id\"], p[\"name\"], \"\") for p in projects]\n&nbsp;&nbsp;&nbsp;&nbsp;except:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return []\n\ndef get_sequences(project_id):\n&nbsp;&nbsp;&nbsp;&nbsp;if not project_id:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return []\n&nbsp;&nbsp;&nbsp;&nbsp;try:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;seqs = gazu.shot.all_sequences_for_project(project_id)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return [(s[\"id\"], s[\"name\"], \"\") for s in seqs]\n&nbsp;&nbsp;&nbsp;&nbsp;except:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return []\n\ndef get_shots(sequence_id):\n&nbsp;&nbsp;&nbsp;&nbsp;if not sequence_id:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return []\n&nbsp;&nbsp;&nbsp;&nbsp;try:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;shots = gazu.shot.all_shots_for_sequence(sequence_id)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return [(s[\"id\"], s[\"name\"], \"\") for s in shots]\n&nbsp;&nbsp;&nbsp;&nbsp;except:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return []\n\nclass KITSU_Props(bpy.types.PropertyGroup):\n&nbsp;&nbsp;&nbsp;&nbsp;project: EnumProperty(name=\"Project\", items=lambda self, context: get_projects())\n\n&nbsp;&nbsp;&nbsp;&nbsp;sequence: EnumProperty(\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;name=\"Sequence\", items=lambda self, context: get_sequences(self.project)\n&nbsp;&nbsp;&nbsp;&nbsp;)\n\n&nbsp;&nbsp;&nbsp;&nbsp;shot: EnumProperty(\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;name=\"Shot\", items=lambda self, context: get_shots(self.sequence)\n&nbsp;&nbsp;&nbsp;&nbsp;)\n\nclass KITSU_OT_import_shot(bpy.types.Operator):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"kitsu.import_shot_assets\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Import Shot Assets\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_description = (\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;\"Download and import latest preview GLB/GLTF files for selected shot\"\n&nbsp;&nbsp;&nbsp;&nbsp;)\n\n&nbsp;&nbsp;&nbsp;&nbsp;def execute(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;props = context.scene.kitsu_props\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;# Fetch shot data\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;shot = gazu.shot.get_shot(props.shot)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;assets = gazu.casting.get_shot_casting(shot)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;download_dir = os.path.join(bpy.app.tempdir, \"kitsu_previews\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;os.makedirs(download_dir, exist_ok=True)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;local_paths = []\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;for asset in assets:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;tasks = gazu.task.all_tasks_for_asset(asset[\"asset_id\"])\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;if not tasks:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;continue\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;last_task = max(tasks, key=lambda x: x[\"updated_at\"])\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;preview_files = gazu.files.get_all_preview_files_for_task(last_task)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;if not preview_files:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;continue\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;last_preview = max(preview_files, key=lambda x: x[\"updated_at\"])\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;save_path = os.path.join(\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;download_dir,\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;last_preview[\"original_name\"] + \".\" + last_preview[\"extension\"],\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;gazu.files.download_preview_file(last_preview, save_path)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;local_paths.append(save_path)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;# Clean default cube\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;obj = bpy.data.objects.get(\"Cube\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;if obj:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.data.objects.remove(obj, do_unlink=True)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;# Import GLB/GLTF assets\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;for path in local_paths:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;if path.lower().endswith((\".glb\", \".gltf\")):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.ops.import_scene.gltf(filepath=path)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;# Auto-save blend file\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;save_dir = os.path.join(os.path.expanduser(\"~\"), \"kitsu_scenes\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;os.makedirs(save_dir, exist_ok=True)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;blend_path = os.path.join(save_dir, f\"{shot['name']}.blend\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.ops.wm.save_as_mainfile(filepath=blend_path)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;self.report({\"INFO\"}, f\"Imported assets and saved: {blend_path}\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return {\"FINISHED\"}\n\nclass KITSU_PT_panel(bpy.types.Panel):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Kitsu Auto-Importer\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"KITSU_PT_auto_importer\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_space_type = \"VIEW_3D\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_region_type = \"UI\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_category = \"Kitsu\"\n\n&nbsp;&nbsp;&nbsp;&nbsp;def draw(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;props = context.scene.kitsu_props\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout = self.layout\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.separator()\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.prop(props, \"project\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.prop(props, \"sequence\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.prop(props, \"shot\")\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.separator()\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.operator(\"kitsu.import_shot_assets\", icon=\"IMPORT\")\n\nclasses = (\n&nbsp;&nbsp;&nbsp;&nbsp;KITSU_Props,\n&nbsp;&nbsp;&nbsp;&nbsp;KITSU_OT_import_shot,\n&nbsp;&nbsp;&nbsp;&nbsp;KITSU_PT_panel,\n)\n\ndef register():\n&nbsp;&nbsp;&nbsp;&nbsp;for c in classes:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.register_class(c)\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.types.Scene.kitsu_props = bpy.props.PointerProperty(type=KITSU_Props)\n\ndef unregister():\n&nbsp;&nbsp;&nbsp;&nbsp;for c in classes:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.unregister_class(c)\n&nbsp;&nbsp;&nbsp;&nbsp;del bpy.types.Scene.kitsu_props\n\nif __name__ == \"__main__\":\n&nbsp;&nbsp;&nbsp;&nbsp;register()\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>We can now manually pick a production, sequence, and shot to get breakdown data from, and import the corresponding casting in the current Blender viewport:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-bf3ea18d-fd62-4db5-9977-6374b3ee1aef.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"480\" height=\"270\">\u003C/figure>\u003Cp>The logic is simple: we use the same \u003Ccode>gazu\u003C/code> code to populate dropdown menus, and we encapsulate them all in a panel in the viewport. An \u003Ccode>import\u003C/code> button downloads all the corresponding breakdown assets and imports them into the current workspace.\u003C/p>\u003Cp>Keep in mind that adding \u003Ccode>sys.path.append(\"~/.local/lib/python3.11/site-packages\")\u003C/code> lets Blender use your system’s Python installation to load external libraries like \u003Ccode>gazu\u003C/code>. Since Blender ships with its own isolated Python environment, managing package installations can be inconvenient. By extending the path, you simply instruct Blender to check your local modules as well. Make sure to adjust this path to match your own setup.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>By pulling breakdown lists directly from Kitsu and scripting Blender to assemble scenes, you eliminate repetitive manual steps and ensure asset consistency across all shots. This approach doesn't just save time but also reduces human error and ensures every artist starts with the correct asset version and scene setup required by the producer. This way, you can easily handle ten shots or ten thousand with equal reliability.\u003C/p>\u003Cp>But don't take our word for it,\u003Ca href=\"https://github.com/cgwire/blender-kitsu-automated-scene-composition?ref=blog.cg-wire.com\"> \u003Cu>clone the Github repository\u003C/u>\u003C/a> to try out the result!\u003C/p>\u003Cp>You can extend this workflow by generating automated previews, reports, or even updating asset information from the new revisions created during the shot animation.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":438,"comment_id":439,"feature_image":440,"featured":105,"visibility":10,"created_at":441,"updated_at":442,"custom_excerpt":443,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":444,"primary_tag":445,"url":446,"excerpt":443,"reading_time":447,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":448},"d090d72e-fa3b-4af9-806a-a44f7732a7c4","6909b6d2df0ae600014fbb54","https://images.unsplash.com/photo-1725888358557-9f70661012c4?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDJ8fGFuaW1hdGlvbiUyMHBpcGVsaW5lfGVufDB8fHx8MTc2NTA5ODQ2Mnww&ixlib=rb-4.1.0&q=80&w=2000","2025-11-04T09:18:26.000+01:00","2026-02-20T06:04:00.000+01:00","Learn how to automate Blender scene creation using Kitsu breakdown data and Python scripting. This guide walks through retrieving breakdowns via Gazu, downloading assets, importing GLB files, and generating a complete Blender scene ready for layout or animation.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/blender-kitsu-breakdown-automation/",11,"\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@steve_j?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Steve Johnson\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/blender-kitsu-breakdown-automation","2025-12-07T18:11:31.000+01:00",{"title":433},"blender-kitsu-breakdown-automation","posts/blender-kitsu-breakdown-automation",[455,456],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},{"id":300,"name":22,"slug":29,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":26},"i5Pduvllq_hTDBHuCFEVMzMxTyU5evzIUkMxND7t3YY",{"id":459,"title":460,"authors":461,"body":7,"description":7,"extension":8,"html":463,"meta":464,"navigation":14,"path":475,"published_at":476,"seo":477,"slug":478,"stem":479,"tags":480,"__hash__":483,"uuid":465,"comment_id":466,"feature_image":467,"featured":105,"visibility":10,"created_at":468,"updated_at":469,"custom_excerpt":470,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":471,"primary_tag":472,"url":473,"excerpt":470,"reading_time":214,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":474},"ghost/posts:blender-addon-ui-scripting-guide.json","A 2026 Guide to Blender Add-on UI Development",[462],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📄\u003C/div>\u003Cdiv class=\"kg-callout-text\">Turn your Blender scripts into real tools artists love using—here’s how to build clean, intuitive UI panels for your add-ons.\u003C/div>\u003C/div>\u003Cp>If you’ve ever \u003Ca href=\"https://blog.cg-wire.com/blender-scripting-animation/\">written a Blender script\u003C/a>, you’ve probably realized that getting the feature right is only half the battle: the other half is getting someone else to use it! A clean user interface is a must to share and sell Blender add-ons.\u003C/p>\u003Cp>In this guide, you’ll learn how to build user interfaces for your Blender add-ons using the built-in layout system. We’ll cover the most common types of UI components, where panels can appear, and walk through a minimal working example. By the end, you’ll know how to give your add-on a Blender-native graphical interface.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blender-ui-addon-script?ref=blog.cg-wire.com\">https://github.com/cgwire/blender-ui-addon-script\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-common-ui-components\">\u003Cstrong>1. Common UI Components\u003C/strong>\u003C/h2>\u003Cp>In Blender, every element of the user interface has its equivalent in the Python library. You build UI by creating classes that inherit from one of the following types:\u003C/p>\u003Cul>\u003Cli>\u003Ccode>bpy.types.Panel\u003C/code> - for custom panels (the most common)\u003C/li>\u003Cli>\u003Ccode>bpy.types.Menu\u003C/code> - for menus and submenus\u003C/li>\u003Cli>\u003Ccode>bpy.types.Operator\u003C/code> - for actions or tools that can be run from buttons\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-daa22afa-ac20-4e3e-8543-c694588146bf.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"334\" height=\"542\">\u003C/figure>\u003Cp>Each of these classes can implement a \u003Ccode>draw(self, context)\u003C/code> method where you describe what the interface should look like using layout commands. Blender’s layout system handles the spacing, alignment, and positioning automatically: it's a declarative UI system where you just describe what should appear and in what order.\u003C/p>\u003Cp>Here are the most common layout elements you’ll use:\u003C/p>\u003Ch3 id=\"basic-display-elements\">\u003Cstrong>Basic Display Elements\u003C/strong>\u003C/h3>\u003Cul>\u003Cli>\u003Cstrong>Label\u003C/strong> - Displays plain, non-interactive text. Format: \u003Ccode>layout.label(text=\"Hello!\")\u003C/code>\u003C/li>\u003Cli>\u003Cstrong>Separator\u003C/strong> - Adds vertical space between items for readability. Format: \u003Ccode>layout.separator()\u003C/code>\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Ch3 id=\"buttons-inputs-props-and-operators\">\u003Cstrong>Buttons, Inputs, Props, and Operators\u003C/strong>\u003C/h3>\u003Cul>\u003Cli>\u003Cstrong>Operator Button\u003C/strong> - Creates a clickable button that triggers an operator (a function registered as a Blender command). You can use this for actions like exporting, duplicating, or running a custom script. Syntax: \u003Ccode>layout.operator(\"myaddon.some_action\", text=\"Run Action\")\u003C/code>\u003C/li>\u003C/ul>\u003Cp>The \u003Ccode>layout.prop()\u003C/code> method is used to display editable Blender properties which are either built-in data (like \u003Ccode>context.object\u003C/code>) or your own custom properties. For example, \u003Ccode>layout.prop(context.object, \"name\")\u003C/code> shows an editable text field for the object’s name. Blender automatically chooses the right widget (text box, slider, checkbox, etc.) based on the property’s type:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Checkbox (Boolean property)\u003C/strong> - Displays a toggle checkbox. Example: \u003Ccode>layout.prop(context.object, \"hide_viewport\")\u003C/code>\u003C/li>\u003Cli>\u003Cstrong>Number Field / Slider (Float or Int)\u003C/strong> - Displays a numeric input, often with a slider. Example: \u003Ccode>layout.prop(context.object, \"location\", index=0, text=\"X Location\")\u003C/code>\u003C/li>\u003Cli>\u003Cstrong>Dropdown Menu (Enum property)\u003C/strong> - Displays a dropdown list when the property is an EnumProperty. Example: \u003Ccode>layout.prop(context.object, \"type\")\u003C/code>\u003C/li>\u003Cli>\u003Cstrong>Text Input \u003C/strong>- Displays a text box for string properties. Example: \u003Ccode>layout.prop(my_settings, \"username\")\u003C/code>\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Ch3 id=\"organizing-the-layout\">\u003Cstrong>Organizing the Layout\u003C/strong>\u003C/h3>\u003Cp>To keep your UI structured and easy to understand, Blender provides layout containers like rows, columns, and boxes.\u003C/p>\u003Cp>A panel contains rows and columns. Rows and columns contain properties, operators, and labels. Blender automatically handles padding, alignment, and scaling to match the theme and layout rules.\u003C/p>\u003Cul>\u003Cli>A row (horizontal grouping) puts elements next to each other horizontally:\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">row = layout.row()\nrow.prop(obj, \"location\")\nrow.prop(obj, \"rotation_euler\")\u003C/code>\u003C/pre>\u003Cul>\u003Cli>A column (vertical grouping) stacks elements vertically:\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">col = layout.column()\ncol.prop(obj, \"scale\")\ncol.prop(obj, \"dimensions\")\u003C/code>\u003C/pre>\u003Cul>\u003Cli>box (Visual grouping) draws a bordered box that visually groups related controls, like sections:\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">box = layout.box()\nbox.label(text=\"Transform Settings\")\nbox.prop(obj, \"location\")\nbox.prop(obj, \"rotation_euler\")\u003C/code>\u003C/pre>\u003Cp>For the full list of UI components, have a look at \u003Ca href=\"https://docs.blender.org/manual/en/latest/interface/index.html?ref=blog.cg-wire.com\">the User Interface page of the official Blender documentation\u003C/a>.\u003C/p>\u003Chr>\u003Ch2 id=\"2-where-you-can-put-ui-panels\">\u003Cstrong>2. Where You Can Put UI Panels\u003C/strong>\u003C/h2>\u003Cp>When you create a custom panel in Blender, you can decide where in the interface it appears and what region it occupies with two key class attributes:\u003C/p>\u003Cul>\u003Cli>\u003Ccode>bl_space_type\u003C/code> - which editor or workspace your panel belongs to (for example, the 3D View, the Properties Editor, or the Node Editor).\u003C/li>\u003Cli>\u003Ccode>bl_region_type\u003C/code> - which part of that editor the panel appears in (for example, the sidebar, toolbar, or main window).\u003C/li>\u003C/ul>\u003Cp>Here is a list of the most typical areas where you might place a custom panel:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-070d3dfe-eb98-42a2-90a2-d2eabc4fc2d4.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1125\" height=\"650\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-070d3dfe-eb98-42a2-90a2-d2eabc4fc2d4.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-070d3dfe-eb98-42a2-90a2-d2eabc4fc2d4.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-070d3dfe-eb98-42a2-90a2-d2eabc4fc2d4.png 1125w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cul>\u003Cli>The 3D view sidebar appears in the right-hand N-panel sidebar of the 3D Viewport. This is the most common location for modeling, rigging, or scene tools:\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">bl_space_type = 'VIEW_3D'\nbl_region_type = 'UI'\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cul>\u003Cli>You can add panels inside the Properties Editor, among the Object, Material, or Scene tabs. Use this when your add-on deals with materials, objects, render settings, or scene properties:\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">bl_space_type = 'PROPERTIES'\nbl_region_type = 'WINDOW'\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cul>\u003Cli>In the UV/Image Editor sidebar (useful for texture tools or image utilities):\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">bl_space_type = 'IMAGE_EDITOR'\nbl_region_type = 'UI'\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cul>\u003Cli>In the sidebar of the Shader, Geometry Node, or Compositor editors for tools that work with nodes, shaders, or procedural systems:\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">bl_space_type = 'NODE_EDITOR'\nbl_region_type = 'UI'\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>The best panel location depends on your tool’s purpose:\u003C/p>\u003Cul>\u003Cli>Modeling / Object tools → 3D View sidebar (\u003Ccode>VIEW_3D\u003C/code> + \u003Ccode>UI\u003C/code>)\u003C/li>\u003Cli>Material or render settings → Properties editor (\u003Ccode>PROPERTIES\u003C/code> + \u003Ccode>WINDOW\u003C/code>)\u003C/li>\u003Cli>Texture utilities → Image editor sidebar (\u003Ccode>IMAGE_EDITOR\u003C/code> + \u003Ccode>UI\u003C/code>)\u003C/li>\u003Cli>Shader / Geometry tools → Node editor sidebar (\u003Ccode>NODE_EDITOR\u003C/code> + \u003Ccode>UI\u003C/code>)\u003C/li>\u003C/ul>\u003Cp>Picking the right space helps users find your add-on where they naturally expect to, keeping your UI consistent with Blender’s.\u003C/p>\u003Chr>\u003Ch2 id=\"3-minimal-example-custom-panel-in-the-3d-view-sidebar\">\u003Cstrong>3. Minimal Example: Custom Panel in the 3D View Sidebar\u003C/strong>\u003C/h2>\u003Cp>Let's experiment with a simple plugin: a custom panel in the 3D view sidebar that displays a \"hello world\" text alert when clicking on a button.\u003C/p>\u003Ch3 id=\"1-blinfoaddon-metadata\">\u003Cstrong>1) \u003Ccode>bl_info\u003C/code> - addon metadata\u003C/strong>\u003C/h3>\u003Cp>We start by specifying the add-on metadata to tell Blender how to present our add-on to a potential user:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bl_info = {\n&nbsp;&nbsp;&nbsp;&nbsp;\"name\": \"Simple Addon Example\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"author\": \"Your Name\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"version\": (1, 0),\n&nbsp;&nbsp;&nbsp;&nbsp;\"blender\": (4, 0, 0),\n&nbsp;&nbsp;&nbsp;&nbsp;\"location\": \"View3D &gt; Sidebar &gt; Simple Tab\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"description\": \"A simple example addon that prints a message\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"category\": \"3D View\",\n}\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cul>\u003Cli>\u003Ccode>bl_info\u003C/code> is a module-level dictionary Blender uses to show addon info in Preferences → Add-ons\u003Cul>\u003Cli>\u003Ccode>name:\u003C/code> human-readable name shown in the list\u003C/li>\u003Cli>\u003Ccode>author:\u003C/code> author string\u003C/li>\u003Cli>\u003Ccode>version:\u003C/code> tuple representing addon version\u003C/li>\u003Cli>\u003Ccode>blender:\u003C/code> minimum Blender version this addon targets (tuple)\u003C/li>\u003Cli>\u003Ccode>location:\u003C/code> where the addon UI appears (helpful for users)\u003C/li>\u003Cli>\u003Ccode>description:\u003C/code> short description used in the UI\u003C/li>\u003Cli>\u003Ccode>category:\u003C/code> category grouping in the Add-ons list\u003C/li>\u003C/ul>\u003C/li>\u003C/ul>\u003Cp>It's essential to keep your \u003Ccode>bl_info\u003C/code> accurate, as Blender reads it when scanning installed add-ons.\u003C/p>\u003Cp>\u003C/p>\u003Ch3 id=\"2-define-an-operator-class\">\u003Cstrong>2) Define an operator class\u003C/strong>\u003C/h3>\u003Cp>We then define an Operator subclass. Operators are the official way to perform actions in Blender: they can be invoked from UI, shortcuts, search menu, etc.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">class SIMPLEADDON_OT_hello(bpy.types.Operator):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"simple_addon.say_hello\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Say Hello\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_description = \"Prints a message to the console\"\n\n&nbsp;&nbsp;&nbsp;&nbsp;def execute(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;self.report({'INFO'}, \"Hello from Blender Addon!\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;print(\"Hello from Blender Addon!\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return {'FINISHED'}\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>bl_idname\u003C/code> - A unique identifier string in the form \u003Ccode>\"module_name.operator_name\"\u003C/code>, all lowercase and with a dot. This is how you call the operator from code or UI (\u003Ccode>bpy.ops.simple_addon.say_hello()\u003C/code>).\u003C/li>\u003Cli>\u003Ccode>bl_label\u003C/code> - User-facing label that appears on buttons/menus.\u003C/li>\u003Cli>\u003Ccode>bl_description\u003C/code> - Tooltip/description shown in the UI.\u003C/li>\u003Cli>\u003Ccode>execute(self, context)\u003C/code> - Core method called when the operator runs (synchronous execution). \u003Ccode>context\u003C/code> gives access to Blender's current state (active object, scene, area, etc.). \u003Ccode>self.report({'INFO'}, \"…\")\u003C/code> shows a small message in Blender's info bar / status (good for user feedback). \u003Ccode>print(\"…\")\u003C/code> prints to the system/Blender console (useful for debugging). Returns a set like \u003Ccode>{'FINISHED'}\u003C/code> or \u003Ccode>{'CANCELLED'}\u003C/code>. Blender uses this result to know whether the operator completed successfully.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Ch3 id=\"3-panel-classui-placement\">\u003Cstrong>3) Panel class - UI placement\u003C/strong>\u003C/h3>\u003Cp>We can then get to the Panel subclass to add UI in Blender:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">class SIMPLEADDON_PT_panel(bpy.types.Panel):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Simple Addon Panel\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"SIMPLEADDON_PT_panel\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_space_type = 'VIEW_3D'\n&nbsp;&nbsp;&nbsp;&nbsp;bl_region_type = 'UI'\n&nbsp;&nbsp;&nbsp;&nbsp;bl_category = 'Simple'\n\n&nbsp;&nbsp;&nbsp;&nbsp;def draw(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout = self.layout\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.operator(\"simple_addon.say_hello\")\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>bl_label\u003C/code> - panel title shown in the UI.\u003C/li>\u003Cli>\u003Ccode>bl_idname\u003C/code> - unique panel identifier.\u003C/li>\u003Cli>\u003Ccode>bl_space_type = 'VIEW_3D'\u003C/code> tells Blender this panel belongs in the 3D Viewport area.\u003C/li>\u003Cli>\u003Ccode>bl_region_type = 'UI'\u003C/code> places it in the right-side region (the N-panel). Other regions exist (e.g., \u003Ccode>'TOOLS', 'WINDOW'\u003C/code>).\u003C/li>\u003Cli>\u003Ccode>bl_category = 'Simple'\u003C/code> - The tab name in the sidebar. The panel will appear under a tab labeled “Simple”.\u003C/li>\u003Cli>\u003Ccode>draw(self, context)\u003C/code> is called to draw UI layout.\u003C/li>\u003Cli>\u003Ccode>self.layout\u003C/code> is a \u003Ccode>UILayout\u003C/code> object used to place buttons, labels, properties, etc.\u003C/li>\u003Cli>\u003Ccode>layout.operator(\"simple_addon.say_hello\")\u003C/code> creates a button that, when clicked, calls the operator with bl_idname \u003Ccode>\"simple_addon.say_hello\"\u003C/code>. The button text is taken from the operator's \u003Ccode>bl_label\u003C/code>.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Ch3 id=\"4-register-unregister-functions\">\u003Cstrong>4) Register / unregister functions\u003C/strong>\u003C/h3>\u003Cp>Blender requires classes that define UI, operators, panels, properties, etc., to be registered so Blender knows about them:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">def register():\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.register_class(SIMPLEADDON_OT_hello)\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.register_class(SIMPLEADDON_PT_panel)\n\ndef unregister():\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.unregister_class(SIMPLEADDON_PT_panel)\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.unregister_class(SIMPLEADDON_OT_hello)\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>bpy.utils.register_class(Class)\u003C/code> registers a class; \u003Ccode>unregister_class\u003C/code> removes it.\u003C/li>\u003Cli>It's important to unregister classes in the reverse order of registration, especially when classes reference each other. This is why the panel is unregistered before the operator.\u003C/li>\u003Cli>When the addon is enabled in Preferences, Blender calls \u003Ccode>register()\u003C/code>. When disabled, it calls \u003Ccode>unregister()\u003C/code>.\u003C/li>\u003C/ul>\u003Cp>We put the full code in a Python file \u003Ccode>addon.py\u003C/code>:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bl_info = {\n&nbsp;&nbsp;&nbsp;&nbsp;\"name\": \"Simple Addon Example\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"author\": \"Your Name\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"version\": (1, 0),\n&nbsp;&nbsp;&nbsp;&nbsp;\"blender\": (4, 0, 0),\n&nbsp;&nbsp;&nbsp;&nbsp;\"location\": \"View3D &gt; Sidebar &gt; Simple Tab\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"description\": \"A simple example addon that prints a message\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"category\": \"3D View\",\n}\n\nimport bpy\n\nclass SIMPLEADDON_OT_hello(bpy.types.Operator):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"simple_addon.say_hello\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Say Hello\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_description = \"Prints a message to the console\"\n\n&nbsp;&nbsp;&nbsp;&nbsp;def execute(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;self.report({'INFO'}, \"Hello from Blender Addon!\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;print(\"Hello from Blender Addon!\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return {'FINISHED'}\n\nclass SIMPLEADDON_PT_panel(bpy.types.Panel):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Simple Addon Panel\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"SIMPLEADDON_PT_panel\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_space_type = 'VIEW_3D'\n&nbsp;&nbsp;&nbsp;&nbsp;bl_region_type = 'UI'\n&nbsp;&nbsp;&nbsp;&nbsp;bl_category = 'Simple'\n\n&nbsp;&nbsp;&nbsp;&nbsp;def draw(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout = self.layout\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.operator(\"simple_addon.say_hello\")\n\ndef register():\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.register_class(SIMPLEADDON_OT_hello)\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.register_class(SIMPLEADDON_PT_panel)\n\ndef unregister():\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.unregister_class(SIMPLEADDON_PT_panel)\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.unregister_class(SIMPLEADDON_OT_hello)\u003C/code>\u003C/pre>\u003Chr>\u003Ch2 id=\"4-running-and-packaging-your-add-on\">\u003Cstrong>4. Running and Packaging Your Add-on\u003C/strong>\u003C/h2>\u003Cp>Once you’ve written your add-on script, you can load it into Blender and test it right away. No tools required.\u003C/p>\u003Col>\u003Cli>Save your script - Save your Python file with a clear name like \u003Ccode>my_addon.py\u003C/code>.\u003C/li>\u003Cli>Open Blender’s Add-ons Preferences - Go to Edit → Preferences → Add-ons. This is where Blender manages all installed extensions.\u003C/li>\u003Cli>Install the add-on - Click the Install… button at the top of the preferences window. \u003Ccode>Select your my_addon.py\u003C/code> file and click Install Add-on.\u003C/li>\u003Cli>Enable it - After installing, your add-on should appear in the list. Find it (you can search for “My Add-on”) and check the box to enable it if it's not already.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-27ff3592-fed1-4347-8930-9dd62b2d950b.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1227\" height=\"800\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-27ff3592-fed1-4347-8930-9dd62b2d950b.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-27ff3592-fed1-4347-8930-9dd62b2d950b.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-27ff3592-fed1-4347-8930-9dd62b2d950b.png 1227w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Col start=\"5\">\u003Cli>Check it in the interface - Open the 3D Viewport, open the sidebar, and look for the tab named Simple. Your custom panel should be there, ready to use!\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-2a90e13f-b338-4235-a830-f9c8d8060562.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1227\" height=\"741\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-2a90e13f-b338-4235-a830-f9c8d8060562.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-2a90e13f-b338-4235-a830-f9c8d8060562.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-2a90e13f-b338-4235-a830-f9c8d8060562.png 1227w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>When you want to share your add-on with others, you can upload it to GitHub, Blender Artists, or Gumroad for distribution. Add a short README.md explaining what the add-on does and how to install it.\u003C/p>\u003Cp>For add-ons with multiple files (e.g. separate modules, icons, or resources), create a folder then zip the entire folder (\u003Ccode>my_addon.zip\u003C/code>) and share that. Blender can install \u003Ccode>.zip\u003C/code> archives directly via the same Install… button so no need to extract it beforehand. The main entry point must be named \u003Ccode>__init__.py\u003C/code>, since Blender treats it as a Python package.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Creating UI for Blender add-ons is intimidating at first, but it’s one of the easiest ways to share a tool you created. Once you understand how panels and layouts work, you can quickly add buttons, properties, and organized sections that users will find intuitive.\u003C/p>\u003Cp>\u003Ca href=\"https://github.com/cgwire/blender-ui-addon-script?ref=blog.cg-wire.com\">Have a look at the code repository on Github\u003C/a> to try the example yourself.\u003C/p>\u003Cp>Start small by adding a simple panel, a label, and a button to create an action, and build from there!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":465,"comment_id":466,"feature_image":467,"featured":105,"visibility":10,"created_at":468,"updated_at":469,"custom_excerpt":470,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":471,"primary_tag":472,"url":473,"excerpt":470,"reading_time":214,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":474},"e18120b7-5615-497e-8db8-9f03ceee9526","6922df21009fc3000190e38e","https://images.unsplash.com/photo-1760548425425-e42e77fa38f1?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDF8fGNvZGluZyUyMGludGVyZmFjZSUyMHRvb2xzfGVufDB8fHx8MTc2Mzg5MzE4MXww&ixlib=rb-4.1.0&q=80&w=2000","2025-11-23T11:17:05.000+01:00","2026-02-20T06:03:59.000+01:00","Turn your Blender scripts into real tools artists love using—here’s how to build clean, intuitive UI panels for your add-ons.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/blender-addon-ui-scripting-guide/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@jakubzerdzicki?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Jakub Żerdzicki\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/blender-addon-ui-scripting-guide","2025-11-24T10:00:34.000+01:00",{"title":460},"blender-addon-ui-scripting-guide","posts/blender-addon-ui-scripting-guide",[481,482],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},{"id":300,"name":22,"slug":29,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":26},"3-HhmFMhJkG_7Y2WuAQl2Cmyemg5YE38Mtwl_osaN7w",{"id":485,"title":486,"authors":487,"body":7,"description":7,"extension":8,"html":489,"meta":490,"navigation":14,"path":501,"published_at":502,"seo":503,"slug":504,"stem":505,"tags":506,"__hash__":509,"uuid":491,"comment_id":492,"feature_image":493,"featured":105,"visibility":10,"created_at":494,"updated_at":495,"custom_excerpt":496,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":497,"primary_tag":498,"url":499,"excerpt":496,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":500},"ghost/posts:blender-scripting-geometry-nodes-2.json","How to Script Geometry Nodes in Blender with Python (2026)",[488],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🐍\u003C/div>\u003Cdiv class=\"kg-callout-text\">Procedural modeling becomes far more powerful when you generate nodes with code instead of wiring them by hand.\u003C/div>\u003C/div>\u003Cp>Geometry nodes are an incredible Blender feature, but did you know Blender's Python API also lets you script geometry nodes just like any other data block?\u003C/p>\u003Cp>You can create nodes, set their parameters, and connect them programmatically, opening the door to automated scene generation, custom tools, and rapid model prototyping with just a few lines of code instead of manually wiring dozens of nodes.\u003C/p>\u003Cp>In this tutorial, you'll learn how to create geometry node setups entirely from a Python script. We'll cover the full process from building a new node tree to assigning it to an object with clear examples you can paste directly into Blender's scripting editor.\u003C/p>\u003Cp>In case you missed it, have a look at \u003Ca href=\"https://blog.cg-wire.com/blender-scripting-animation/\">our introduction to Blender scripting\u003C/a> first.\u003C/p>\u003Chr>\u003Ch2 id=\"why-script-geometry-nodes\">\u003Cstrong>Why Script Geometry Nodes?\u003C/strong>\u003C/h2>\u003Cp>Blender's Geometry Nodes editor is an excellent visual system for building procedural tools: it's intuitive, flexible, and great for experimentation once you get the hang of it. But as projects grow in complexity, manually managing large node networks can become tedious and difficult to maintain, especially if you need to reuse them throughout many 3D modeling pipelines.\u003C/p>\u003Cp>Scripting allows you to generate, modify, and connect nodes automatically. Instead of manually recreating the same setups across multiple projects, you can write a script once and reuse it whenever you need it to save time or make your animations more consistent.\u003C/p>\u003Cp>A scripted node setup isn't tied to a single .blend file: it can be stored, versioned, and shared just like any other piece of code. This makes it easy to build a library of procedural tools that can be reused across different projects or shared with other artists and developers.\u003C/p>\u003Cp>Let's see how scripting works in practice with a few code snippets.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blender-scripting-geometry-nodes?ref=blog.cg-wire.com\">https://github.com/cgwire/blender-scripting-geometry-nodes\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-creating-a-new-node-tree\">\u003Cstrong>1. Creating a New Node Tree\u003C/strong>\u003C/h2>\u003Cp>Every Geometry Nodes setup starts as a node tree, which stores nodes and their connections. You can create one from Python using Blender's data API:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\nnode_tree = bpy.data.node_groups.new(\"MyGeoNodesTree\", 'GeometryNodeTree')\u003C/code>\u003C/pre>\u003Cp>You can think of this \u003Ccode>node_tree\u003C/code> as the digital canvas that will hold all your procedural logic. Once created, you can add nodes, connect them, and set their properties like in Blender's graphical user interface.\u003C/p>\u003Chr>\u003Ch2 id=\"2-add-nodes-and-connect-them\">\u003Cstrong>2. Add Nodes and Connect Them\u003C/strong>\u003C/h2>\u003Cp>Next, let's add a few basic nodes. We'll create an Input Geometry node, a Subdivision Surface node, and a Group Output node, then connect them and apply the result to our cube.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\"># ADD NODES\ngeo_input = node_tree.interface.new_socket(\n&nbsp;&nbsp;&nbsp;&nbsp;name=\"Geometry\",\n&nbsp;&nbsp;&nbsp;&nbsp;in_out='INPUT',\n&nbsp;&nbsp;&nbsp;&nbsp;socket_type='NodeSocketGeometry'\n)\ngeo_output = node_tree.interface.new_socket(\n&nbsp;&nbsp;&nbsp;&nbsp;name=\"Geometry\",\n&nbsp;&nbsp;&nbsp;&nbsp;in_out='OUTPUT',\n&nbsp;&nbsp;&nbsp;&nbsp;socket_type='NodeSocketGeometry'\n)\n\ninput_node = node_tree.nodes.new(\"NodeGroupInput\")\nsubdivide_node = node_tree.nodes.new(\"GeometryNodeSubdivideMesh\")\noutput_node = node_tree.nodes.new(\"NodeGroupOutput\")\n\ninput_node.location = (-300, 0)\nsubdivide_node.location = (0, 0)\noutput_node.location = (300, 0)\n\n# LINK NODES\nnode_tree.links.new(input_node.outputs['Geometry'], subdivide_node.inputs['Mesh'])\nnode_tree.links.new(subdivide_node.outputs['Mesh'], output_node.inputs['Geometry'])\n\n# APPLY TO CURRENT OBJECT\nobj = bpy.context.object\nmod = obj.modifiers.new(\"MyGeoNodesModifier\", \"NODES\")\nmod.node_group = node_tree\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>When you run this script, you'll have a functional (though simple) geometry node setup that subdivides any geometry it's applied to:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-de23dbc9-781f-4730-9a46-a6fec93c97a7.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1314\" height=\"889\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-de23dbc9-781f-4730-9a46-a6fec93c97a7.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-de23dbc9-781f-4730-9a46-a6fec93c97a7.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-de23dbc9-781f-4730-9a46-a6fec93c97a7.png 1314w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"3-set-parameters-and-link-geometry-to-objects\">\u003Cstrong>3. Set Parameters and Link Geometry to Objects\u003C/strong>\u003C/h2>\u003Cp>You can modify parameters directly via the node's properties. For example, let's increase the subdivision level and apply this node group to an object:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">subdivide_node.inputs['Level'].default_value = 3\u003C/code>\u003C/pre>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-18e48250-6a76-4eda-b14c-ce8065b78f9e.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1314\" height=\"889\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-18e48250-6a76-4eda-b14c-ce8065b78f9e.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-18e48250-6a76-4eda-b14c-ce8065b78f9e.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-18e48250-6a76-4eda-b14c-ce8065b78f9e.png 1314w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Adjusting \u003Ccode>default_value\u003C/code> for inputs is an easy way to parameterize your setup.\u003C/p>\u003Cp>For a full breakdown of the available parameters and types, refer to \u003Ca href=\"https://docs.blender.org/api/current/bpy.types.Node.html?ref=blog.cg-wire.com\">the official Blender Python API documentation\u003C/a>.\u003C/p>\u003Chr>\u003Ch2 id=\"4-create-a-custom-%E2%80%9Ccube-crowd-generator%E2%80%9D-node-group-programmatically\">\u003Cstrong>4. Create a Custom “Cube Crowd Generator” Node Group Programmatically\u003C/strong>\u003C/h2>\u003Cp>We now know how to define geometry nodes programmatically, but what about creating reusable custom nodes?\u003C/p>\u003Cp>Let's work on a new example that builds a tiny procedural system that scatters many cubes on a surface. The script creates a Geometry Nodes group that takes a surface, scatters points across it, randomly offsets those points, places a cube on each point (instances), converts the instances to real geometry, and outputs the final mesh as \"Cubes\".\u003C/p>\u003Cp>\u003C/p>\u003Ch3 id=\"1-create-a-new-node-group\">\u003Cstrong>1) Create a new node group\u003C/strong>\u003C/h3>\u003Cp>First, we create a new Geometry Node group in Blender named \u003Ccode>\"CubeCrowdGenerator\"\u003C/code>.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">crowd_group = bpy.data.node_groups.new(\"CubeCrowdGenerator\", \"GeometryNodeTree\")\u003C/code>\u003C/pre>\u003Cp>Like a function, we want to be able to attach this node to any object with a Geometry Nodes modifier later on.\u003C/p>\u003Cp>\u003C/p>\u003Ch3 id=\"2-add-group-input-and-output-nodes-uientry-points\">\u003Cstrong>2) Add group input and output nodes (UI/entry points)\u003C/strong>\u003C/h3>\u003Cp>We place standard input and output groups on the canvas as usual:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">group_in = crowd_group.nodes.new(\"NodeGroupInput\")\ngroup_out = crowd_group.nodes.new(\"NodeGroupOutput\")\n\ngroup_in.location = (-600, 0)\ngroup_out.location = (600, 0)\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>group_in\u003C/code> and \u003Ccode>group_out\u003C/code> are the visible sockets of the node group in the Geometry Nodes editor.\u003C/li>\u003Cli>The script also positions them so the graph is readable.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Ch3 id=\"3-define-the-group-interface-what-the-group-acceptsreturns\">\u003Cstrong>3) Define the group interface (what the group accepts/returns)\u003C/strong>\u003C/h3>\u003Cp>We need to expose an \u003Cstrong>input socket named \u003Ccode>Surface\u003C/code>\u003C/strong> where we'll plug the mesh you want to populate (e.g., a plane) and an \u003Cstrong>output socket named \u003Ccode>Cubes\u003C/code>\u003C/strong>, the resulting geometry.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">interface = crowd_group.interface\ninterface.new_socket(name=\"Surface\", in_out=\"INPUT\", socket_type=\"NodeSocketGeometry\")\ninterface.new_socket(name=\"Cubes\", in_out=\"OUTPUT\", socket_type=\"NodeSocketGeometry\")\u003C/code>\u003C/pre>\u003Cp>In practice, when you add this node group to an object, you will plug its surface (an object's original geometry) into \u003Ccode>Surface\u003C/code>.\u003C/p>\u003Cp>\u003C/p>\u003Ch3 id=\"4-create-the-internal-nodes-the-building-blocks\">\u003Cstrong>4) Create the internal nodes (the building blocks)\u003C/strong>\u003C/h3>\u003Cp>We can then work on the actual internal logic:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">distribute = crowd_group.nodes.new(\"GeometryNodeDistributePointsOnFaces\")\nrand_vec = crowd_group.nodes.new(\"FunctionNodeRandomValue\")\nset_pos = crowd_group.nodes.new(\"GeometryNodeSetPosition\")\ncube = crowd_group.nodes.new(\"GeometryNodeMeshCube\")\ninstance = crowd_group.nodes.new(\"GeometryNodeInstanceOnPoints\")\nrealize = crowd_group.nodes.new(\"GeometryNodeRealizeInstances\")\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Cstrong>GeometryNodeDistributePointsOnFaces\u003C/strong>: creates points across the input surface (controls how many points, distribution).\u003C/li>\u003Cli>\u003Cstrong>FunctionNodeRandomValue (Float Vector)\u003C/strong>: produces a random 3D vector per point used as an offset.\u003C/li>\u003Cli>\u003Cstrong>GeometryNodeSetPosition\u003C/strong>: moves each point by a vector (the random offset).\u003C/li>\u003Cli>\u003Cstrong>GeometryNodeMeshCube\u003C/strong>: generates a cube mesh that will be used as the instance object.\u003C/li>\u003Cli>\u003Cstrong>GeometryNodeInstanceOnPoints\u003C/strong>: places the cube on each point. It doesn't create real geometry, it's just a cheap instance of the original cube.\u003C/li>\u003Cli>\u003Cstrong>GeometryNodeRealizeInstances\u003C/strong>: converts instances into actual mesh geometry so they can be output as a single mesh.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Ch3 id=\"5-configure-the-random-vector-node\">\u003Cstrong>5) Configure the random vector node\u003C/strong>\u003C/h3>\u003Cp>We set the \u003Ccode>Random Value\u003C/code> node to return a \u003Cstrong>3-component vector \u003C/strong>we can use to offset the generated cubes in the 3D space:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">rand_vec.data_type = \"FLOAT_VECTOR\"\nrand_vec.inputs[\"Min\"].default_value = (-0.5, -0.5, 0.0)\nrand_vec.inputs[\"Max\"].default_value = (0.5, 0.5, 0.5)\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>Min\u003C/code> and \u003Ccode>Max\u003C/code> define the range for each component. For example, X will be between \u003Ccode>-0.5\u003C/code> and \u003Ccode>0.5\u003C/code>.\u003C/li>\u003Cli>Result: each point gets a slightly different offset so cubes don't sit exactly on top of one another.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Ch3 id=\"6-node-layout-ui-only\">\u003Cstrong>6) Node layout (UI only)\u003C/strong>\u003C/h3>\u003Cp>We then position the internal nodes to make them easy to understand if we want to check our workflow in Blender:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">distribute.location = (-400, 0)\nrand_vec.location = (-200, -200)\nset_pos.location = (-100, 0)\ninstance.location = (100, 0)\ncube.location = (-400, -200)\nrealize.location = (300, 0)\u003C/code>\u003C/pre>\u003Cp>These \u003Ccode>location\u003C/code> assignments only affect how the nodes are visually arranged in the node editor. They don't affect what the graph does.\u003C/p>\u003Cp>\u003C/p>\u003Ch3 id=\"7-wire-the-nodes-together\">\u003Cstrong>7) Wire the nodes together\u003C/strong>\u003C/h3>\u003Cp>Finally, we define how the data flows:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">links.new(group_in.outputs[\"Surface\"], distribute.inputs[\"Mesh\"])\nlinks.new(distribute.outputs[\"Points\"], set_pos.inputs[\"Geometry\"])\nlinks.new(rand_vec.outputs[\"Value\"], set_pos.inputs[\"Offset\"])\nlinks.new(set_pos.outputs[\"Geometry\"], instance.inputs[\"Points\"])\nlinks.new(cube.outputs[\"Mesh\"], instance.inputs[\"Instance\"])\nlinks.new(instance.outputs[\"Instances\"], realize.inputs[\"Geometry\"])\nlinks.new(realize.outputs[\"Geometry\"], group_out.inputs[\"Cubes\"])\u003C/code>\u003C/pre>\u003Col>\u003Cli>\u003Cstrong>Surface → DistributePointsOnFaces\u003C/strong>: the input surface (plane) is used to create scattered points.\u003C/li>\u003Cli>\u003Cstrong>Points → SetPosition (Geometry)\u003C/strong>: set position receives the points as geometry to be moved.\u003C/li>\u003Cli>\u003Cstrong>RandomValue → SetPosition (Offset)\u003C/strong>: each point gets a random vector offset.\u003C/li>\u003Cli>\u003Cstrong>SetPosition → InstanceOnPoints (Points)\u003C/strong>: the moved points become the anchor positions for instances.\u003C/li>\u003Cli>\u003Cstrong>Cube Mesh → InstanceOnPoints (Instance)\u003C/strong>: each point receives a cube instance.\u003C/li>\u003Cli>\u003Cstrong>InstanceOnPoints → RealizeInstances\u003C/strong>: instances are converted to mesh geometry.\u003C/li>\u003Cli>\u003Cstrong>RealizeInstances → Group Output (\"Cubes\")\u003C/strong>: final result is made available as the group's output.\u003C/li>\u003C/ol>\u003Cp>This is the full code we obtained:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\n# Create a new Geometry Node group\ncrowd_group = bpy.data.node_groups.new(\"CubeCrowdGenerator\", \"GeometryNodeTree\")\n\n# Create input/output nodes\ngroup_in = crowd_group.nodes.new(\"NodeGroupInput\")\ngroup_out = crowd_group.nodes.new(\"NodeGroupOutput\")\n\ngroup_in.location = (-600, 0)\ngroup_out.location = (600, 0)\n\n# Define group interface sockets\ninterface = crowd_group.interface\ninterface.new_socket(name=\"Surface\", in_out=\"INPUT\", socket_type=\"NodeSocketGeometry\")\ninterface.new_socket(name=\"Cubes\", in_out=\"OUTPUT\", socket_type=\"NodeSocketGeometry\")\n\n# Create internal nodes\ndistribute = crowd_group.nodes.new(\"GeometryNodeDistributePointsOnFaces\")\ninstance = crowd_group.nodes.new(\"GeometryNodeInstanceOnPoints\")\ncube = crowd_group.nodes.new(\"GeometryNodeMeshCube\")\nrealize = crowd_group.nodes.new(\"GeometryNodeRealizeInstances\")\nset_pos = crowd_group.nodes.new(\"GeometryNodeSetPosition\")\nrand_vec = crowd_group.nodes.new(\"FunctionNodeRandomValue\")\n\n# Configure random vector node\nrand_vec.data_type = \"FLOAT_VECTOR\"\nrand_vec.inputs[\"Min\"].default_value = (-0.5, -0.5, 0.0)&nbsp; # minimum offset\nrand_vec.inputs[\"Max\"].default_value = (0.5, 0.5, 0.5)&nbsp; # maximum offset\n\n# Layout nodes\ndistribute.location = (-400, 0)\nrand_vec.location = (-200, -200)\nset_pos.location = (-100, 0)\ninstance.location = (100, 0)\ncube.location = (-400, -200)\nrealize.location = (300, 0)\n\n# Create links\nlinks = crowd_group.links\nlinks.new(group_in.outputs[\"Surface\"], distribute.inputs[\"Mesh\"])\nlinks.new(distribute.outputs[\"Points\"], set_pos.inputs[\"Geometry\"])\nlinks.new(rand_vec.outputs[\"Value\"], set_pos.inputs[\"Offset\"])\nlinks.new(set_pos.outputs[\"Geometry\"], instance.inputs[\"Points\"])\nlinks.new(cube.outputs[\"Mesh\"], instance.inputs[\"Instance\"])\nlinks.new(instance.outputs[\"Instances\"], realize.inputs[\"Geometry\"])\nlinks.new(realize.outputs[\"Geometry\"], group_out.inputs[\"Cubes\"])\u003C/code>\u003C/pre>\u003Cp>Now we just copy/paste this script into the scripting workspace, run it, and we can now add our custom node from the geometry node workspace:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-d4ff8437-efb6-43b0-b45d-a54fce0b74b6.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1430\" height=\"920\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-d4ff8437-efb6-43b0-b45d-a54fce0b74b6.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-d4ff8437-efb6-43b0-b45d-a54fce0b74b6.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-d4ff8437-efb6-43b0-b45d-a54fce0b74b6.png 1430w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>We can open the node group to see what's inside by double-clicking on it:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-679df6c4-2877-4419-8b79-4758df98290a.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1430\" height=\"920\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-679df6c4-2877-4419-8b79-4758df98290a.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-679df6c4-2877-4419-8b79-4758df98290a.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-679df6c4-2877-4419-8b79-4758df98290a.png 1430w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>With just a few dozen lines of code, you can script Geometry Nodes setups that would take much longer to assemble manually. You've learned in this article how to create Geometry Node trees, add and connect nodes programmatically, control parameters and assign node trees to objects, and build a full procedural system.\u003C/p>\u003Cp>Have a look at \u003Ca href=\"https://github.com/cgwire/blender-scripting-geometry-nodes?ref=blog.cg-wire.com\">the code repository on Github\u003C/a> to try the example yourself!\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-db488d5a-7ab5-4471-a904-0926b1fa7d11.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1314\" height=\"889\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-db488d5a-7ab5-4471-a904-0926b1fa7d11.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-db488d5a-7ab5-4471-a904-0926b1fa7d11.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-db488d5a-7ab5-4471-a904-0926b1fa7d11.png 1314w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>This approach unlocks endless automation potential, from tool development to generative art. \u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":491,"comment_id":492,"feature_image":493,"featured":105,"visibility":10,"created_at":494,"updated_at":495,"custom_excerpt":496,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":497,"primary_tag":498,"url":499,"excerpt":496,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":500},"93358eb1-5534-43ed-89a8-0b0de2f00072","691ae1dba0beff00013f02eb","https://images.unsplash.com/photo-1675044794037-9262cedb6d5d?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDV8fGJsZW5kZXIlMjBnZW9tZXRyeSUyMG5vZGVzfGVufDB8fHx8MTc2MzM2OTc0N3ww&ixlib=rb-4.1.0&q=80&w=2000","2025-11-17T09:50:35.000+01:00","2026-02-20T06:04:04.000+01:00","Learn how to script Blender Geometry Nodes using Python to automate procedural setups, generate node trees programmatically, and build reusable tools for your animation pipeline.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/blender-scripting-geometry-nodes-2/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@mirzaie?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Mehdi Mirzaie\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/blender-scripting-geometry-nodes-2","2025-11-17T10:13:21.000+01:00",{"title":486},"blender-scripting-geometry-nodes-2","posts/blender-scripting-geometry-nodes-2",[507,508],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},{"id":300,"name":22,"slug":29,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":26},"ABwCKyHYQd2e24_gRrEcz2gAc349u2DzqkOMZrfJtyU",{"id":511,"title":512,"authors":513,"body":7,"description":7,"extension":8,"html":515,"meta":516,"navigation":14,"path":525,"published_at":526,"seo":527,"slug":528,"stem":529,"tags":530,"__hash__":532,"uuid":517,"comment_id":518,"feature_image":519,"featured":105,"visibility":10,"created_at":520,"updated_at":209,"custom_excerpt":521,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":522,"primary_tag":523,"url":524,"excerpt":521,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":266},"ghost/posts:ffmpeg-commands-for-animators.json","10 FFmpeg Commands Every Animator Should Know In 2026",[514],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📼\u003C/div>\u003Cdiv class=\"kg-callout-text\">Think video conversion tools are just for editors? Think again. FFmpeg is the secret weapon hiding inside every animation pipeline — used by studios like YouTube, Blender, and DaVinci Resolve — and it can save you \u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">hours\u003C/em>\u003C/i> of manual work once you know how to use it.\u003C/div>\u003C/div>\u003Cp>If you work in animation or video production, you have already met FFmpeg.\u003C/p>\u003Cp>Despite being open-source and used by giants like YouTube, Blender, and DaVinci Resolve, FFmpeg often stays hidden in the background and few artists are aware of its worth.\u003C/p>\u003Cp>In this guide, we’ll walk through 10 practical FFmpeg commands every animator or pipeline artist should know to save hours of manual work.\u003C/p>\u003Chr>\u003Ch2 id=\"whats-ffmpeg\">\u003Cstrong>What's FFmpeg?\u003C/strong>\u003C/h2>\u003Cp>FFmpeg is a powerful open-source command line toolkit for working with video, audio, and image data. It’s not a single program, rather a suite of tools that handle nearly every kind of media processing task imaginable:\u003C/p>\u003Cul>\u003Cli>Convert between almost any video, audio, or image format.\u003C/li>\u003Cli>Assemble image sequences into movies (and vice versa).\u003C/li>\u003Cli>Compress or transcode large files for reviews or uploads.\u003C/li>\u003Cli>Filters: crop, scale, color adjust, overlay, blur, etc.\u003C/li>\u003Cli>Sync or combine multiple audio/video sources.\u003C/li>\u003Cli>Analyze media metadata (frame rate, codec, bit depth, etc.).\u003C/li>\u003Cli>Automate batch processing in pipelines via scripts.\u003C/li>\u003C/ul>\u003Cp>We can't list down all the nice features it offers, but let’s start with 10 practical FFmpeg commands with examples you can drop straight into your terminal.\u003C/p>\u003Chr>\u003Ch2 id=\"1-compile-an-image-sequence-into-a-video\">\u003Cstrong>1. Compile an Image Sequence into a Video\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/getting-started-with-blender-rendering/\">Renderers like Blender's\u003C/a> allow outputting image sequences (e.g., thousands of EXRs or PNGs) rather than single movie files. This is safer because if a render crashes, you can resume from there. The problem is that those sequences aren’t playable or easy to review.\u003C/p>\u003Cp>FFmpeg can stitch all frames into a single video file in seconds to create a lightweight, shareable version of your shot:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">ffmpeg -framerate 24 -i frame_%04d.png -c:v libx264 -pix_fmt yuv420p output.mp4\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>-framerate 24\u003C/code> - tells FFmpeg to read the sequence at 24 frames per second.\u003C/li>\u003Cli>\u003Ccode>-i frame_%04d.png - %04d\u003C/code> means four digits padded with zeros (e.g. \u003Ccode>0001\u003C/code>, \u003Ccode>0002\u003C/code> …). You'll need more digits if your sequence goes above 1000 frames.\u003C/li>\u003Cli>\u003Ccode>-c:v libx264\u003C/code> - encodes the video using the H.264 codec, a good default for reviews.\u003C/li>\u003Cli>\u003Ccode>-pix_fmt yuv420p\u003C/code> - ensures broad compatibility (especially with media players and browsers).\u003C/li>\u003Cli>\u003Ccode>output.mp4\u003C/code> - the name of the final video file.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"2-create-a-quick-low-res-review\">\u003Cstrong>2. Create a Quick Low-Res Review\u003C/strong>\u003C/h2>\u003Cp>High-res renders (4K, full-quality EXRs, or ProRes) of several Gbs are too heavy \u003Ca href=\"https://blog.cg-wire.com/how-to-give-efficient-animation-feedback/\">to send over Slack for feedback\u003C/a>: you need smaller, fast-loading versions for daily reviews.\u003C/p>\u003Cp>Just scale and compress a master video automatically to get a playable version without re-rendering:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">ffmpeg -i output.mp4 -vf scale=960:-1 -b:v 1M review.mp4\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>-i output.mp4\u003C/code> - input file (your high-quality render).\u003C/li>\u003Cli>\u003Ccode>-vf scale=960:-1\u003C/code> - rescales video width to 960 pixels and automatically adjusts height (\u003Ccode>-1\u003C/code>) to keep aspect ratio.\u003C/li>\u003Cli>\u003Ccode>-b:v 1M\u003C/code> - sets video bitrate to 1 megabit per second - a good low size/high speed compromise.\u003C/li>\u003Cli>\u003Ccode>review.mp4\u003C/code> - output file.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"3-overlay-a-logo-or-watermark\">\u003Cstrong>3. Overlay a Logo or Watermark\u003C/strong>\u003C/h2>\u003Cp>Studios and freelancers often share work-in-progress files. But without a watermark, previews can be redistributed, leaked, or confused for final versions.\u003C/p>\u003Cp>With a single FFmpeg command, you can overlay a studio logo, username, or “Work In Progress” tag on every frame.\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">ffmpeg -i input.mp4 -i logo.png -filter_complex \"overlay=10:10\" branded.mp4\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>-i input.mp4\u003C/code> - main video.\u003C/li>\u003Cli>\u003Ccode>-i logo.png\u003C/code> - image to overlay (must have transparency or you’ll get a solid box).\u003C/li>\u003Cli>\u003Ccode>-filter_complex \"overlay=10:10\"\u003C/code> - applies an overlay filter, positioning logo 10px from top-left corner.\u003C/li>\u003Cli>\u003Ccode>branded.mp4\u003C/code> - result with watermark applied.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-44bab5d8-5532-4d0b-9347-12812a0e1271.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"848\" height=\"527\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-44bab5d8-5532-4d0b-9347-12812a0e1271.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-44bab5d8-5532-4d0b-9347-12812a0e1271.png 848w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"4-burn-frame-numbers-or-timecode\">\u003Cstrong>4. Burn Frame Numbers or Timecode\u003C/strong>\u003C/h2>\u003Cp>During client or team reviews, everyone needs to reference exact frames for notes, so unlabelled footage makes it impossible to align feedback.\u003C/p>\u003Cp>FFmpeg’s drawtext filter can burn frame numbers or running timecodes into your video to provide a precise reference system, helping supervisors and animators stay synchronized during reviews.\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">ffmpeg -i input.mp4 -vf \"drawtext=text='%{n}':x=10:y=H-th-10:fontsize=24:fontcolor=white\" numbered.mp4\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>drawtext\u003C/code> filter draws text on each frame.\u003C/li>\u003Cli>\u003Ccode>text='%{n}'\u003C/code> - inserts frame number.\u003C/li>\u003Cli>\u003Ccode>x=10:y=H-th-10\u003C/code> - places text 10px from bottom-left.\u003C/li>\u003Cli>\u003Ccode>fontsize\u003C/code>, \u003Ccode>fontcolor\u003C/code> - control look.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-84b1c23e-6e65-493e-bf3c-96c254d28234.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"848\" height=\"527\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-84b1c23e-6e65-493e-bf3c-96c254d28234.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-84b1c23e-6e65-493e-bf3c-96c254d28234.png 848w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Or for timecode using the presentation timestamp (PTS) formatted as hours:minutes:seconds:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">ffmpeg -i input.mp4 -vf \"drawtext=text='%{pts\\:hms}':x=10:y=H-th-10:fontsize=24:fontcolor=white\" timecode.mp4\u003C/code>\u003C/pre>\u003Chr>\u003Ch2 id=\"5-create-looping-clips-turntables\">\u003Cstrong>5. Create Looping Clips (Turntables)\u003C/strong>\u003C/h2>\u003Cp>When presenting 3D models or shots, you often need looping turntables for portfolios, internal libraries, or demo reels. Manually duplicating clips in an editor is tedious.\u003C/p>\u003Cp>FFmpeg can loop any clip a chosen number of times with -stream_loop, creating continuous playbacks instantly without re-rendering:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">ffmpeg -stream_loop 3 -i turntable.mp4 -c copy looped.mp4\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>-stream_loop 3\u003C/code> - plays the input 3 extra times.\u003C/li>\u003Cli>\u003Ccode>-i turntable.mp4\u003C/code> - your original animation.\u003C/li>\u003Cli>\u003Ccode>-c copy\u003C/code> - copies audio/video streams without re-encoding (fast, lossless).\u003C/li>\u003Cli>\u003Ccode>looped.mp4\u003C/code> - final output.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"6-add-sound-to-a-silent-render\">\u003Cstrong>6. Add Sound to a Silent Render\u003C/strong>\u003C/h2>\u003Cp>Renders from 3D software don’t include audio, even if your animation is synced to dialogue or music, and adding sound manually in Premiere or After Effects can be time-consuming for quick previews.\u003C/p>\u003Cp>FFmpeg can merge a silent render with an audio track instantly, syncing them without a timeline-based editor.\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">ffmpeg -i render.mp4 -i music.wav -c:v copy -c:a aac -shortest final.mp4\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>-i render.mp4\u003C/code> - video input.\u003C/li>\u003Cli>\u003Ccode>-i music.wav\u003C/code> - audio input.\u003C/li>\u003Cli>\u003Ccode>-c:v copy\u003C/code> - keeps the existing video stream (no re-rendering).\u003C/li>\u003Cli>\u003Ccode>-c:a aac\u003C/code> - encodes audio to AAC (widely supported).\u003C/li>\u003Cli>\u003Ccode>-shortest\u003C/code> - stops encoding when the shorter of the two tracks ends.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"7-extract-every-nth-frame\">\u003Cstrong>7. Extract Every Nth Frame\u003C/strong>\u003C/h2>\u003Cp>Reviewing every single frame from a long shot is slow, especially for motion analysis, flicker detection, or checking exposure shifts. Sometimes, you just want to sample frames like one every 10 or 20.\u003C/p>\u003Cp>FFmpeg’s \u003Ccode>select\u003C/code> filter allows you to extract every nth frame automatically. It’s perfect for quick motion diagnostics, creating contact sheets, or generating thumbnails:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">ffmpeg -i input.mp4 -vf \"select='not(mod(n,10))',setpts=N/FRAME_RATE/TB\" frames_%04d.png\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>select='not(mod(n,10))'\u003C/code> - processes only frames where the frame number n is divisible by 10 (every 10th).\u003C/li>\u003Cli>\u003Ccode>setpts=N/FRAME_RATE/TB\u003C/code> - corrects timestamps so output doesn’t play back too fast.\u003C/li>\u003Cli>\u003Ccode>frames_%04d.png\u003C/code> - naming pattern for extracted images.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"8-compare-two-versions-ab-diff\">\u003Cstrong>8. Compare Two Versions (A/B Diff)\u003C/strong>\u003C/h2>\u003Cp>When testing lighting tweaks, color corrections, or denoising updates, it’s hard to see small visual differences between two versions by eye.\u003C/p>\u003Cp>FFmpeg’s \u003Ccode>blend=all_mode=difference\u003C/code> filter subtracts one version from the other and shows differences as bright pixels. It’s a fast way to QA version changes.\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">ffmpeg -i old.mp4 -i new.mp4 -filter_complex \"blend=all_mode=difference\" diff.mp4\u003C/code>\u003C/pre>\u003Cul>\u003Cli>Two input files: old and new render.\u003C/li>\u003Cli>\u003Ccode>blend=all_mode=difference\u003C/code> - subtracts pixel values of one from the other, showing where they differ.\u003C/li>\u003Cli>\u003Ccode>diff.mp4\u003C/code> - bright pixels = changes, dark = no difference.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-57adc37e-d8c2-407a-9057-1739a959c61f.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"848\" height=\"527\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-57adc37e-d8c2-407a-9057-1739a959c61f.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-57adc37e-d8c2-407a-9057-1739a959c61f.png 848w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"9-combine-render-passes-side-by-side\">\u003Cstrong>9. Combine Render Passes Side-by-Side\u003C/strong>\u003C/h2>\u003Cp>Artists often need to compare two passes (e.g., old vs. new). Opening them in compositing software just to compare layout or lighting is overkill.\u003C/p>\u003Cp>The \u003Ccode>hstack\u003C/code> (or \u003Ccode>vstack\u003C/code>) filter places videos side-by-side or vertically for easy comparison. It’s perfect for review exports or before/after videos showing changes to clients or supervisors.\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">ffmpeg -i pass1.mp4 -i pass2.mp4 -filter_complex \"hstack\" side_by_side.mp4\u003C/code>\u003C/pre>\u003Cul>\u003Cli>Two input videos.\u003C/li>\u003Cli>\u003Ccode>hstack\u003C/code> - stacks them horizontally. Use \u003Ccode>vstack\u003C/code> to stack vertically instead.\u003C/li>\u003Cli>\u003Ccode>side_by_side.mp4\u003C/code> - output file.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-77024499-0432-4930-97d8-c1aa0942c2e9.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1186\" height=\"748\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-77024499-0432-4930-97d8-c1aa0942c2e9.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-77024499-0432-4930-97d8-c1aa0942c2e9.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-77024499-0432-4930-97d8-c1aa0942c2e9.png 1186w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>You can also include the resulting video from the previous \u003Ccode>blend=all_mode=difference\u003C/code> command to quickly see the differences between frames:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">ffmpeg -i pass1.mp4 -i diff.mp4 -i pass2.mp4 \\\n-filter_complex \"[0:v][1:v]hstack=inputs=2[top]; [top][2:v]hstack=inputs=2\" \\\nside_by_side2.mp4\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-3179b0a2-949d-468c-ba70-153ae97f0d0c.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1186\" height=\"748\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-3179b0a2-949d-468c-ba70-153ae97f0d0c.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-3179b0a2-949d-468c-ba70-153ae97f0d0c.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-3179b0a2-949d-468c-ba70-153ae97f0d0c.png 1186w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"10-re-time-animation-slow-mo-or-speed-up\">\u003Cstrong>10. Re-Time Animation (Slow-Mo or Speed-Up)\u003C/strong>\u003C/h2>\u003Cp>Timing tweaks like previewing a slower camera move or checking a fast motion test usually require re-rendering or editing in software. That’s inefficient just to try different pacing.\u003C/p>\u003Cp>FFmpeg can alter playback speed on the fly by adjusting frame timestamps to let animators preview alternate speeds instantly.\u003C/p>\u003Cp>Slow down to half speed:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">ffmpeg -i input.mp4 -filter:v \"setpts=2.0*PTS\" slowmo.mp4\u003C/code>\u003C/pre>\u003Cp>Speed up 2×:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">ffmpeg -i input.mp4 -filter:v \"setpts=0.5*PTS\" fast.mp4\u003C/code>\u003C/pre>\u003Cul>\u003Cli>The \u003Ccode>setpts\u003C/code> filter manipulates the presentation timestamps (PTS) of each frame.\u003C/li>\u003Cli>Multiplying by 2.0 doubles playback time (slower).\u003C/li>\u003Cli>Multiplying by 0.5 halves it (faster).\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>FFmpeg isn’t just a video converter. With a few lines of text, you can automate tasks that usually take minutes or hours in traditional software: batch rendering, version comparisons, review exports... You name it.\u003C/p>\u003Cp>Once you get comfortable with the syntax, FFmpeg is an extension of your creative workflow. Pick one command from this list, drop it into your next render pipeline, and watch how much smoother your daily production becomes!\u003C/p>\u003Cp>But that's not all. Combine the power of ffmpeg with DCC scripts (like \u003Ca href=\"https://blog.cg-wire.com/blender-scripting-animation/\">Blender scripting\u003C/a>) and you'll unlock superpowers beyond human comprehension (like automating entire scene creations). \u003Ca href=\"https://blog.cg-wire.com/\">Subscribe to our blog\u003C/a> for more!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>\u003Cp>\u003C/p>",{"uuid":517,"comment_id":518,"feature_image":519,"featured":105,"visibility":10,"created_at":520,"updated_at":209,"custom_excerpt":521,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":522,"primary_tag":523,"url":524,"excerpt":521,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":266},"93788414-98a7-4015-8e42-e5214d9567d9","6909b6f1df0ae600014fbb5a","https://images.unsplash.com/photo-1727142073871-d40f5a7c76d8?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDMwfHx2aWRlbyUyMGVuY29kaW5nJTIwdGVybWluYWx8ZW58MHx8fHwxNzYyMjQ1NjQ3fDA&ixlib=rb-4.1.0&q=80&w=2000","2025-11-04T09:18:57.000+01:00","FFmpeg is one of the most powerful media tools used in animation and video production — yet many artists barely scratch the surface of what it can do. Learn 10 essential FFmpeg commands for assembling renders, adding audio, overlaying logos, comparing versions, and optimizing review exports.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/ffmpeg-commands-for-animators/","/posts/ffmpeg-commands-for-animators","2025-11-04T10:09:54.000+01:00",{"title":512},"ffmpeg-commands-for-animators","posts/ffmpeg-commands-for-animators",[531],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"5xJDX4mIKsXbLF-f4fY8sM4QlPjb16pw8NEom2bEGLE",{"id":534,"title":535,"authors":536,"body":7,"description":7,"extension":8,"html":538,"meta":539,"navigation":14,"path":549,"published_at":550,"seo":551,"slug":552,"stem":553,"tags":554,"__hash__":557,"uuid":540,"comment_id":541,"feature_image":542,"featured":105,"visibility":10,"created_at":543,"updated_at":544,"custom_excerpt":545,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":546,"primary_tag":547,"url":548,"excerpt":545,"reading_time":240,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":474},"ghost/posts:blender-scripting-animation.json","Blender Scripting for Animation Pipelines: 2026 Introduction",[537],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">⚙️\u003C/div>\u003Cdiv class=\"kg-callout-text\">You can bend Blender to your will with just a few lines of code. Repetitive clicks? Gone. Complex scenes? Built in seconds. Custom tools? Yours to design. That’s the magic of scripting.\u003C/div>\u003C/div>\u003Cp>Blender’s graphical user interface is no doubt amazing, but there are always some tasks that feel like a grind: sharing previews with the team, tweaking endless settings in a new project, or doing the same steps over and over. Sometimes, you just wish there was a button that just did the thing, and scripting is how you unlock it!\u003C/p>\u003Cp>In this article, we’ll crack open Blender’s scripting feature using the Python programming language. You’ll learn how to write your first script, how to run it, and how Blender’s scripting modules are organized. By the end, you’ll have a good understanding of how to start optimizing your production pipeline.\u003C/p>\u003Chr>\u003Ch2 id=\"what-can-i-do-with-scripting\">\u003Cstrong>What Can I Do With Scripting?\u003C/strong>\u003C/h2>\u003Cp>Blender scripting isn’t just a neat trick for hobbyists: it’s a necessity for studios of every size.\u003C/p>\u003Cp>In production, speed and consistency are everything. Studios constantly face tight deadlines, large asset libraries, and the need to keep dozens of shots and scenes perfectly in sync across workstations. Doing that by hand is slow, error-prone, and expensive: that’s why automation is such a big deal!\u003C/p>\u003Cp>Scripting isn’t about writing code, it’s about giving yourself creative shortcuts and superpowers. With Python, you can automate the boring, repetitive tasks that eat up your time, or generate procedural geometry, materials, and even entire environments in just a few lines. You can \u003Cstrong>design your own tools and menus\u003C/strong> tailored to your workflow, and \u003Cstrong>take full control over scenes\u003C/strong>,\u003Ca href=\"https://blog.cg-wire.com/getting-started-with-blender-rendering/\"> \u003Cu>render settings\u003C/u>\u003C/a>, cameras, and lights. Scripting even lets you \u003Cstrong>connect Blender with external tools or APIs\u003C/strong>, making it a powerful part of larger pipelines.\u003C/p>\u003Chr>\u003Ch2 id=\"prerequisites\">\u003Cstrong>Prerequisites\u003C/strong>\u003C/h2>\u003Cp>Before diving in, make sure you have the following:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Blender\u003C/strong> - Download and install the latest version from\u003Ca href=\"https://www.blender.org/download/?ref=blog.cg-wire.com\"> \u003Cu>blender.org\u003C/u>\u003C/a>.\u003C/li>\u003Cli>\u003Cstrong>Python\u003C/strong> - You'll need the Python programming language to use Blender's native scripting modules and run programs from your operating system's terminal.\u003C/li>\u003C/ul>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-green\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/intro-blender-scripting?ref=blog.cg-wire.com\">https://github.com/cgwire/intro-blender-scripting\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-create-a-new-script\">\u003Cstrong>1. Create a New Script\u003C/strong>\u003C/h2>\u003Cp>Inside Blender, open the \u003Cstrong>Scripting workspace\u003C/strong>. You’ll see a text editor panel where you can create a new script by clicking \u003Cstrong>New\u003C/strong>. This is where you can write your Python code, and it's particularly useful to see results in real-time:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-05bcd44b-e1a3-4f6a-a5c7-edb11e40b1fb.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"731\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-05bcd44b-e1a3-4f6a-a5c7-edb11e40b1fb.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/10/data-src-image-05bcd44b-e1a3-4f6a-a5c7-edb11e40b1fb.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-05bcd44b-e1a3-4f6a-a5c7-edb11e40b1fb.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>For a production pipeline, it's usually more useful to run a script from the command line interface. Fortunately, Python now ships Blender modules. In this tutorial, we'll run a Python program directly from the OS terminal to avoid the extra steps of navigating the graphical user interface, so the first step is to install the required Blender module:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">pip install bpy==3.6.0 --extra-index-url &lt;https://download.blender.org/pypi/&gt;\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>As a test, let's create a new empty Blender file using Python:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\nbpy.ops.wm.save_as_mainfile(filepath=\"./new_empty_file.blend\")\u003C/code>\u003C/pre>\u003Cp>First, we import Blender’s \u003Cstrong>Python API module\u003C/strong> \u003Ccode>bpy\u003C/code>, which lets us control almost everything in Blender (objects, materials, rendering, etc.). Then, we save the current workspace in a new file.\u003C/p>\u003Cp>\u003C/p>\u003Cp>We can run the program in the terminal like so:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">python3 script.py\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>We can also open the newly created file with the Blender CLI:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">blender new_empty_file.blend\u003C/code>\u003C/pre>\u003Cp>Congrats! You completed your first script. Now, let's get to a more useful example: generating 3D text.\u003C/p>\u003Chr>\u003Ch2 id=\"2-hello-world-text-example\">\u003Cstrong>2. Hello World Text Example\u003C/strong>\u003C/h2>\u003Cp>Imagine you want to create a Star Wars intro animation. You know, the one with text slowly scrolling up at an angle:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-02ff3b4e-8e6f-4f1a-b6d0-e4fb9e0622eb.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"681\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-02ff3b4e-8e6f-4f1a-b6d0-e4fb9e0622eb.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/10/data-src-image-02ff3b4e-8e6f-4f1a-b6d0-e4fb9e0622eb.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-02ff3b4e-8e6f-4f1a-b6d0-e4fb9e0622eb.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>How would you do this efficiently to make it easy to edit? By using a script, of course! So let's try a simple example and generate some 3D text.\u003C/p>\u003Cp>We create a new file and delete all objects in the scene to start clean:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\nbpy.ops.object.select_all(action='SELECT')\nbpy.ops.object.delete(use_global=False)\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>bpy.ops.object.select_all(action='SELECT')\u003C/code>: Selects all objects currently in the scene.\u003C/li>\u003Cli>\u003Ccode>bpy.ops.object.delete(use_global=False)\u003C/code>: Deletes all selected objects.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Cp>Just two instructions are needed to add a new text object to the scene:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.ops.object.text_add(enter_editmode=False, location=(0, 0, 0))\ntext_obj = bpy.context.object\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>bpy.ops.object.text_add(...)\u003C/code>: Adds a new \u003Cstrong>Text object\u003C/strong> at the location \u003Ccode>(0, 0, 0)\u003C/code> in the 3D world (XYZ coordinates).\u003C/li>\u003Cli>\u003Ccode>text_obj = bpy.context.object\u003C/code>: Stores a reference to the newly created text object in the variable \u003Ccode>text_obj\u003C/code>. Whenever you add something new, Blender makes it the active object, which you can access via \u003Ccode>bpy.context.object\u003C/code>.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Cp>Let's change the text string to \"Hello World\":\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">text_obj.data.body = \"Hello World\"\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>text_obj.data\u003C/code> refers to the \u003Cstrong>Text DataBlock\u003C/strong>, the actual content or settings of the text object.\u003C/li>\u003Cli>\u003Ccode>.body = \"Hello World\"\u003C/code> sets the displayed string to “Hello World”.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Cp>We can then adjust some text settings to give the text a little thickness and center it on the x and y axes:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">text_obj.data.extrude = 0.05\ntext_obj.data.align_x = 'CENTER'\ntext_obj.data.align_y = 'CENTER'\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>extrude = 0.05\u003C/code>: Gives the text depth, turning it from flat 2D text into slightly extruded 3D text.\u003C/li>\u003Cli>\u003Ccode>align_x = 'CENTER'\u003C/code>: Horizontally centers the text.\u003C/li>\u003Cli>\u003Ccode>align_y = 'CENTER'\u003C/code>: Vertically centers the text.\u003C/li>\u003C/ul>\u003Cp>You can find more options by reading\u003Ca href=\"https://docs.blender.org/manual/en/latest/modeling/texts/properties.html?ref=blog.cg-wire.com\"> \u003Cu>the documentation on Blender’s text object properties\u003C/u>\u003C/a>.\u003C/p>\u003Cp>\u003C/p>\u003Cp>Last but not least, we can rotate the text so it faces the camera instead of lying flat on the ground, since Blender text defaults to lying flat on the XY plane:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">text_obj.rotation_euler[0] = 1.5708 &nbsp; # 90 degrees in radians\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>rotation_euler[0]\u003C/code>: Refers to the \u003Cstrong>rotation around the X-axis\u003C/strong>.\u003C/li>\u003Cli>\u003Ccode>1.5708\u003C/code> radians ≈ \u003Cstrong>90 degrees\u003C/strong>.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Cp>We can save the result using the previously mentioned instruction:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.ops.wm.save_as_mainfile(filepath=\"./text.blend\")\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>\u003C/p>\u003Cp>To sum up, this is what our final code looks like:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\nbpy.ops.object.select_all(action='SELECT')\nbpy.ops.object.delete(use_global=False)\n\nbpy.ops.object.text_add(enter_editmode=False, location=(0, 0, 0))\ntext_obj = bpy.context.object\n\ntext_obj.data.body = \"Hello World\"\n\ntext_obj.data.extrude = 0.05\ntext_obj.data.align_x = 'CENTER'\ntext_obj.data.align_y = 'CENTER'\n\ntext_obj.rotation_euler[0] = 1.5708\n\nbpy.ops.wm.save_as_mainfile(filepath=\"./text.blend\")\u003C/code>\u003C/pre>\u003Chr>\u003Ch2 id=\"3-how-to-run-a-script-script-loading\">\u003Cstrong>3. How to Run a Script (Script Loading)\u003C/strong>\u003C/h2>\u003Cp>As previously mentioned, the syntax to run a script in headless mode is simply like any Python program:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">python3 text.py\u003C/code>\u003C/pre>\u003Cp>And that's it! You’ve just run your first \u003Cem>useful\u003C/em> Blender script. It's super useful for automation, pipelines, or batch processing.\u003C/p>\u003Cp>Just open the \u003Ccode>text.blend\u003C/code> file and see the result:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-eab235c1-3513-4b9d-9f89-8a4d7c1cd122.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"731\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-eab235c1-3513-4b9d-9f89-8a4d7c1cd122.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/10/data-src-image-eab235c1-3513-4b9d-9f89-8a4d7c1cd122.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-eab235c1-3513-4b9d-9f89-8a4d7c1cd122.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>You can also open a specific \u003Ccode>.blend\u003C/code> file and run the script inside that context:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.ops.wm.open_mainfile(filepath='my_scene.blend')\u003C/code>\u003C/pre>\u003Cp>This loads \u003Ccode>my_scene.blend\u003C/code> first, then runs the rest of the script on it.\u003C/p>\u003Cp>\u003C/p>\u003Cp>Sometimes, you want to send custom arguments:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">python3 args.py – --text \"CLI Hello\"\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>\u003C/p>\u003Cp>Inside \u003Ccode>args.py\u003C/code>, you can access these arguments like this:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import sys\n\nargv = sys.argv\nargv = argv[argv.index(\"--\") + 1:]&nbsp; # get args after --\n\nprint(\"Custom args:\", argv)\u003C/code>\u003C/pre>\u003Cp>That's it for the basics, but you still have a lot to discover.\u003C/p>\u003Chr>\u003Ch2 id=\"4-scripting-modules-explained\">\u003Cstrong>4. Scripting Modules Explained\u003C/strong>\u003C/h2>\u003Cp>Blender exposes its scripting features through different modules. Understanding what each module does helps you define what you can script and how to search the documentation to code it.\u003C/p>\u003Cp>First, you have the core \u003Ccode>bpy\u003C/code> modules:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>\u003Ccode>bpy.context\u003C/code> (Context Access)\u003C/strong> - Provides information about Blender’s current state (active object, scene, mode, selected objects, etc.), e.g., \u003Ccode>bpy.context.object\u003C/code> gets the active object.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy.data\u003C/code> (Data Access)\u003C/strong> - Gives direct access to Blender’s datablocks such as meshes, objects, materials, and cameras. Example: \u003Ccode>bpy.data.objects[\"Cube\"]\u003C/code> gets the Cube object.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy.msgbus\u003C/code> (Message Bus)\u003C/strong> - A pub/sub system for listening to changes in Blender’s data and triggering callbacks like subscribing to frame-change events.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy.ops\u003C/code> (Operators)\u003C/strong> - Exposes functions that mimic UI actions like adding objects, deleting, or rendering. Example: \u003Ccode>bpy.ops.mesh.primitive_cube_add()\u003C/code> adds a cube.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy.types\u003C/code> (Types)\u003C/strong> - Defines the core classes of Blender’s data (e.g., \u003Ccode>Object\u003C/code>, \u003Ccode>Mesh\u003C/code>, \u003Ccode>Material\u003C/code>) for extension and customization, to create custom panels or operators.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy.utils\u003C/code> (Utilities)\u003C/strong> - Provides helper functions for class registration, add-on handling, and system path access, e.g., \u003Ccode>bpy.utils.register_class(MyOperator)\u003C/code>.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy.path\u003C/code> (Path Utilities)\u003C/strong> - Tools for handling file paths, including resolving relative paths and creating absolute paths, e.g., \u003Ccode>bpy.path.abspath(\"//textures/wood.png\")\u003C/code>.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy.app\u003C/code> (Application Data)\u003C/strong> - Provides information about Blender itself like version, build details, and runtime mode. Example: \u003Ccode>bpy.app.version\u003C/code> returns \u003Ccode>(3, 6, 2)\u003C/code>.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy.props\u003C/code> (Property Definitions)\u003C/strong> - Used to define custom properties like numbers, strings, and enums for operators, panels, or addons, e.g., \u003Ccode>my_prop: bpy.props.IntProperty(name=\"My Number\")\u003C/code>.\u003C/li>\u003C/ul>\u003Cp>Then, you can find more specialized libraries:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>\u003Ccode>aud\u003C/code> (Audio System)\u003C/strong> - Blender’s audio library for playing sounds, loading files, and mixing audio. Example: play a .wav file directly in Blender with Python.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bgl\u003C/code> (OpenGL Wrapper)\u003C/strong> - Low-level OpenGL wrapper for custom 3D viewport drawing (being replaced by \u003Ccode>gpu\u003C/code>). To draw custom overlays, for example.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bl_math\u003C/code> (Additional Math Functions)\u003C/strong> - Extra math helpers for interpolation, distance calculations, and geometry operations, like computing distances between points.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>blf\u003C/code> (Font Drawing)\u003C/strong> - Blender’s font drawing module for rendering text in viewport overlays or panels, e.g., \u003Ccode>blf.draw(font_id, \"Hello World\")\u003C/code>.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bmesh\u003C/code> (BMesh Module)\u003C/strong> - Provides direct low-level access to Blender’s mesh editing system for procedural modeling and topology operations. Example: creating or modifying vertices and faces in edit mode.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy_extras\u003C/code> (Extra Utilities)\u003C/strong> - Contains helper functions like import/export support, math conversions, and view3d utilities, e.g., simplifying coordinate conversions.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>freestyle\u003C/code> (Freestyle Module)\u003C/strong> - Controls Blender’s Freestyle line rendering for non-photorealistic edge rendering. Example: adjusting line styles or visibility rules.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>gpu\u003C/code> (GPU Module)\u003C/strong> - Modern GPU drawing API that allows custom shaders and viewport overlays (successor to \u003Ccode>bgl\u003C/code>). Example: rendering with custom GLSL shaders.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>gpu_extras\u003C/code> (GPU Utilities)\u003C/strong> - Helper functions for GPU drawing, simplifying shape rendering without full GLSL code, e.g., drawing a simple rectangle.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>idprop.types\u003C/code> (ID Property Access)\u003C/strong> - Provides structured access to Blender’s custom ID properties in dictionary/array form. For example, to manipulate custom metadata on objects.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>imbuf\u003C/code> (Image Buffer)\u003C/strong> - Handles image buffers, enabling loading, saving, and pixel-level manipulation, e.g., procedural image generation.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>mathutils\u003C/code> (Math Types &amp; Utilities)\u003C/strong> - Blender’s math library offering \u003Ccode>Vector\u003C/code>, \u003Ccode>Matrix\u003C/code>, \u003Ccode>Quaternion\u003C/code>, and geometric utilities, e.g., \u003Ccode>Vector((1,0,0)).cross(Vector((0,1,0))) → (0,0,1)\u003C/code>.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Blender scripting with Python is one of the most powerful ways to extend and personalize your workflow.\u003C/p>\u003Cp>In this article, we explored how to create and run scripts, print your very first \"Hello World\" in the 3D world, and use the bpy module to make Blender do exactly what you want.\u003C/p>\u003Cp>At first glance, scripting might feel intimidating, but as you’ve seen, even a handful of lines can open doors to entirely new possibilities!\u003C/p>\u003Cp>Now, it’s your turn. Automate the boring stuff or craft tools from scratch for your studio pipeline. You can do it!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>\u003Cp>\u003C/p>",{"uuid":540,"comment_id":541,"feature_image":542,"featured":105,"visibility":10,"created_at":543,"updated_at":544,"custom_excerpt":545,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":546,"primary_tag":547,"url":548,"excerpt":545,"reading_time":240,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":474},"a68ec682-3536-4c62-ab40-f59e63eae8b1","68ec43d4ded61600017fff7b","https://images.unsplash.com/photo-1760548425425-e42e77fa38f1?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDd8fCUyMHNjcmlwdGluZ3xlbnwwfHx8fDE3NjA2MTMxODl8MA&ixlib=rb-4.1.0&q=80&w=2000","2025-10-13T02:12:04.000+02:00","2026-02-20T06:04:03.000+01:00","Learn how to automate Blender with Python! Discover how scripting can speed up production, eliminate repetitive work, and let you build custom tools tailored to your animation pipeline.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/blender-scripting-animation/","/posts/blender-scripting-animation","2025-10-21T10:00:42.000+02:00",{"title":535},"blender-scripting-animation","posts/blender-scripting-animation",[555,556],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},{"id":300,"name":22,"slug":29,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":26},"bdKf7MIhtakMGVSZgWhSLqHvKXdi7Me_aKU6pQKUlbI",{"id":559,"title":560,"authors":561,"body":7,"description":7,"extension":8,"html":563,"meta":564,"navigation":14,"path":575,"published_at":576,"seo":577,"slug":578,"stem":579,"tags":580,"__hash__":583,"uuid":565,"comment_id":566,"feature_image":567,"featured":105,"visibility":10,"created_at":568,"updated_at":569,"custom_excerpt":570,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":571,"primary_tag":572,"url":573,"excerpt":570,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":574},"ghost/posts:dcc-integration-blender-kitsu.json","From Blender to Kitsu: How to Create a Custom DCC Bridge (2026)",[562],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">⚙️\u003C/div>\u003Cdiv class=\"kg-callout-text\">Ever wished your creative tools could talk to your production tracker? With a custom DCC integration, they finally can — no more manual uploads, mismatched versions, or lost time between Blender and Kitsu.\u003C/div>\u003C/div>\u003Cp>Artists rely on Digital Content Creation (DCC) tools like \u003Cstrong>Blender\u003C/strong>, \u003Cstrong>Maya\u003C/strong>, or \u003Cstrong>Houdini\u003C/strong> to bring stories to life.\u003C/p>\u003Cp>But while the creative work happens inside these tools, production tracking happens elsewhere. This disconnect can lead to version mismatches, time lost in repetitive manual uploads, and eventually less time spent creating. Without a smooth connection between the DCC software and your production tracker, your pipeline suffers.\u003C/p>\u003Cp>That’s where custom integrations come in.\u003C/p>\u003Cp>In this article, we walk through the basics of creating a Blender integration in Kitsu similar to Kitsu Publisher to publish 3D model previews from Blender to Kitsu.\u003C/p>\u003Chr>\u003Ch2 id=\"what%E2%80%99s-a-dcc-integration\">\u003Cstrong>What’s a DCC Integration?\u003C/strong>\u003C/h2>\u003Cp>A DCC integration is \u003Cstrong>a bridge between a creative software and another software tool\u003C/strong>, like a production tracker.\u003C/p>\u003Cp>For example, instead of exporting files, navigating to a web browser, and manually uploading versions, an integration could\u003Ca href=\"https://blog.cg-wire.com/working-with-multiple-digital-content-creation-tools/\"> \u003Cu>allow artists to publish directly from their tool of choice\u003C/u>\u003C/a>.\u003C/p>\u003Cp>Integrations can handle tasks like\u003Ca href=\"https://blog.cg-wire.com/rendering-explained/\"> \u003Cu>managing complex rendering pipelines\u003C/u>\u003C/a>,\u003Ca href=\"https://blog.cg-wire.com/animation-asset-storage/\"> \u003Cu>managing asset storage and versioning\u003C/u>\u003C/a>, or generating preview images: they automate the boring parts of production so artists can focus on telling stories.\u003C/p>\u003Chr>\u003Ch2 id=\"why-dcc-integration\">\u003Cstrong>Why DCC Integration?\u003C/strong>\u003C/h2>\u003Cp>Every studio eventually hits the same bottleneck: as projects grow, manual processes break down.\u003C/p>\u003Cp>\u003Cstrong>Integrations save time\u003C/strong> because they remove context switching between software.\u003C/p>\u003Cp>They also \u003Cstrong>reduce errors by standardizing repetitive tasks\u003C/strong> like delivering outputs by enforcing naming conventions, formats, and metadata consistency.\u003C/p>\u003Cp>Last but not least, they \u003Cstrong>improve project management and communication\u003C/strong> by giving supervisors and producers real-time updates.\u003C/p>\u003Cp>All professional animation studios rely on a pipeline, and DCC integrations are essential.\u003C/p>\u003Cp>To give you a concrete example, let's try building a script integration that uploads a preview from Blender to Kitsu to easily review work with your team.\u003C/p>\u003Chr>\u003Ch2 id=\"1-getting-started\">\u003Cstrong>1. Getting Started\u003C/strong>\u003C/h2>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-green\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example Blender–Kitsu integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blender-kitsu-dcc-integration-example?ref=blog.cg-wire.com\">github.com/cgwire/blender-kitsu-dcc-integration-example\u003C/a>\u003C/div>\u003C/div>\u003Cp>Before we dive into scripting, let’s set up a local Kitsu instance where we can safely test our integration.\u003C/p>\u003Cp>The easiest way to run Kitsu locally is by using the kitsu-docker repository. Clone the repository to your machine and follow the instructions:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">git clone &lt;https://github.com/cgwire/kitsu-docker.git&gt;\ncd kitsu-docker\ndocker build -t cgwire/cgwire .\ndocker run --init -ti --rm -p 80:80 -p 1080:1080 --name cgwire cgwire/cgwire\u003C/code>\u003C/pre>\u003Cp>This will start all necessary services: Kitsu, the postgres database, and supporting components.\u003C/p>\u003Cp>Once the containers are running, open \u003Ccode>http://localhost:80\u003C/code> in your browser. Use the default credentials:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Email\u003C/strong>: admin@example.com\u003C/li>\u003Cli>\u003Cstrong>Password:\u003C/strong> mysecretpassword\u003C/li>\u003C/ul>\u003Cp>You’ll be taken to the Kitsu dashboard.\u003C/p>\u003Cp>Before we can upload previews, we need something to upload them to. In Kitsu:\u003C/p>\u003Col>\u003Cli>Create a new production (e.g., Blender Test Project) by going to the \"\u003Cstrong>Productions\u003C/strong>\" page from the sidebar.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/CleanShot-2025-10-13-at-9---.26.46-1.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"206\" height=\"479\">\u003C/figure>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-0e43401b-afb6-4345-b773-db3d9b03bed3.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"946\" height=\"914\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-0e43401b-afb6-4345-b773-db3d9b03bed3.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-0e43401b-afb6-4345-b773-db3d9b03bed3.png 946w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Col start=\"2\">\u003Cli>Inside the production, create an asset.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-83cce3b0-70a0-486d-87e7-4914a5304262.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"946\" height=\"914\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-83cce3b0-70a0-486d-87e7-4914a5304262.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-83cce3b0-70a0-486d-87e7-4914a5304262.png 946w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Col start=\"3\">\u003Cli>Creating an asset automatically adds new tasks for all the selected task categories during the production creation. We can use those to upload previews.\u003C/li>\u003C/ol>\u003Cp>To interact with Kitsu programmatically,\u003Ca href=\"https://github.com/cgwire/gazu?ref=blog.cg-wire.com\"> \u003Cu>we use gazu, the official Python client for the Kitsu API\u003C/u>\u003C/a>. It allows us to authenticate, create entities, and upload previews directly from scripts.\u003C/p>\u003Cp>Install it with:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">pip install gazu\u003C/code>\u003C/pre>\u003Cp>Next, authenticate with your Kitsu instance using your username and password:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import gazu\n\ngazu.set_host(\"&lt;http://localhost/api&gt;\")\n\nuser = gazu.log_in(\"admin@example.com\", \"mysecretpassword\")\n\nprint(\"Logged in as:\", user['user']['full_name'])\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>Once logged in, we can\u003Ca href=\"https://gazu.cg-wire.com/?ref=blog.cg-wire.com\"> \u003Cu>use gazu to fetch productions, assets, and tasks, then attach media files to them\u003C/u>\u003C/a>.\u003C/p>\u003Chr>\u003Ch2 id=\"2-creating-a-preview-from-blender\">\u003Cstrong>2. Creating a preview from Blender\u003C/strong>\u003C/h2>\u003Cp>Producing a preview render is a common use case for animators. You need to get regular feedback throughout the production phase, and a preview is easier to reason with than importing an entire project.\u003C/p>\u003Cp>You can automate this with Blender’s Python API by setting up a viewport capture to render a single frame, saving the output to a temporary folder, and applying studio-wide render settings (resolution, format, watermarking):\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\nbpy.ops.wm.open_mainfile(filepath=\"./project.blend\")\n\nbpy.context.scene.render.resolution_x = 256\nbpy.context.scene.render.resolution_y = 256\nbpy.context.scene.render.resolution_percentage = 100\n\nbpy.context.scene.render.image_settings.file_format = 'PNG'\nbpy.context.scene.render.filepath = \"./preview.png\"\n\nbpy.ops.render.render(write_still=True)\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>import bpy\u003C/code>: Import Blender’s Python API\u003C/li>\u003Cli>b\u003Ccode>py.ops.wm.open_mainfile(filepath=\"./project.blend\")\u003C/code>: Opens an existing Blender project file called \u003Ccode>project.blend\u003C/code>\u003C/li>\u003Cli>\u003Ccode>bpy.context.scene.render.resolution_x = 256 [...]\u003C/code>We configure the render resolution to 256 pixels by 256 pixels with no downscale.\u003C/li>\u003Cli>\u003Ccode>bpy.context.scene.render.image_settings.file_format = 'PNG'\u003C/code>: Set the output format to PNG and define the output path to  \u003Ccode>preview.png\u003C/code> before executing a still render of the scene.\u003C/li>\u003C/ul>\u003Cp>This script gives you a lightweight preview file that’s easy to store in Kitsu and quick for supervisors to review.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-e936efc9-2c3b-43ea-86f7-8845bdc6c50f.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"946\" height=\"914\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-e936efc9-2c3b-43ea-86f7-8845bdc6c50f.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-e936efc9-2c3b-43ea-86f7-8845bdc6c50f.png 946w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>To run it, just install the bpy package and launch the program like you would for any other python script:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">python3 preview.py\u003C/code>\u003C/pre>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-8fc4a1a4-01c7-4fcb-a8a6-b5d50588d6b8.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"687\" height=\"768\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-8fc4a1a4-01c7-4fcb-a8a6-b5d50588d6b8.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-8fc4a1a4-01c7-4fcb-a8a6-b5d50588d6b8.png 687w\">\u003C/figure>\u003Chr>\u003Ch2 id=\"3-uploading-a-preview-to-kitsu\">\u003Cstrong>3. Uploading a preview to Kitsu\u003C/strong>\u003C/h2>\u003Cp>With the preview file ready, the final step is pushing the data into Kitsu with gazu.\u003C/p>\u003Cp>First, we retrieve the task we previously created:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">projects = gazu.project.all_projects()\n\nassets = gazu.asset.all_assets_for_project(projects[0])\n\ntasks = gazu.task.all_tasks_for_asset(assets[0])\ntask_status = gazu.task.get_task_status_by_short_name(\"todo\")\u003C/code>\u003C/pre>\u003Cp>To do so, we get a list of all available projects, then the assets of our newly created project, and finally the tasks assigned to this asset.\u003C/p>\u003Cp>We publish a comment for the task while linking the preview file to it:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">(comment, preview_file) = gazu.task.publish_preview(\n&nbsp;tasks[0],\n&nbsp;task_status,\n&nbsp;&nbsp;&nbsp;&nbsp;comment=\"upload preview\",\n&nbsp;&nbsp;&nbsp;&nbsp;preview_file_path=\"./preview.png\"\n)\u003C/code>\u003C/pre>\u003Cp>And run the script:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">python3 upload.py\u003C/code>\u003C/pre>\u003Cp>Once uploaded, the file becomes instantly available in Kitsu’s web interface. Supervisors can review it, leave feedback, and mark statuses—all without any manual file juggling from the artist.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-e9710dd1-d727-4e9f-85f8-9db075a159f4.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"955\" height=\"931\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-e9710dd1-d727-4e9f-85f8-9db075a159f4.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-e9710dd1-d727-4e9f-85f8-9db075a159f4.png 955w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"4-distribution\">\u003Cstrong>4. Distribution\u003C/strong>\u003C/h2>\u003Cp>Once your script is working, you have a few options for how to use or share it:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Run it directly in Blender\u003C/strong> - Open the \u003Cem>Scripting\u003C/em> workspace and execute the script from there.\u003C/li>\u003Cli>\u003Cstrong>Run it from the command line\u003C/strong> - Just like we did earlier, you can run your script from the terminal like you would for any Python program.\u003C/li>\u003Cli>\u003Cstrong>Package it as an add-on\u003C/strong> - This allows you to enable it from Blender’s preferences and even design a custom user interface for easier access.\u003C/li>\u003C/ul>\u003Cp>Creating a full add-on with its own UI is a must for sharing integrations with artists, but it's a much bigger topic we won’t cover here. If you’d like to dive deeper, check out the\u003Ca href=\"https://docs.blender.org/manual/en/latest/advanced/scripting/addon_tutorial.html?ref=blog.cg-wire.com\"> \u003Cu>official Blender add-on tutorial\u003C/u>\u003C/a>. And stay tuned, we’ll be covering this in more detail in a future post!\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>DCC pipeline integrations are foundational for efficient animation studios: by connecting tools like Blender directly with Kitsu, you reduce friction, improve communication, and make life easier for both artists and production managers.\u003C/p>\u003Cp>You don’t need a massive pipeline team to see the benefits of integrations. Even a small studio can start simple, automate a few pain points, and scale up over time as needed.\u003C/p>\u003Cp>\u003Ca href=\"https://github.com/cgwire/kitsu-publisher-next?ref=blog.cg-wire.com#readme\">\u003Cu>Check out the Kitsu Publisher documentation\u003C/u>\u003C/a> for a production-ready DCC integration solution for Blender, Toon Boom Harmony, and Unreal Engine!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":565,"comment_id":566,"feature_image":567,"featured":105,"visibility":10,"created_at":568,"updated_at":569,"custom_excerpt":570,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":571,"primary_tag":572,"url":573,"excerpt":570,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":574},"1618a7a1-ff36-4259-910d-2902ca5adbbf","68ec43d0ded61600017fff75","https://images.unsplash.com/photo-1580894894513-541e068a3e2b?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDV8fFNvZnR3YXJlJTIwaW50ZWdyYXRpb258ZW58MHx8fHwxNzYwMzE0NjM1fDA&ixlib=rb-4.1.0&q=80&w=2000","2025-10-13T02:12:00.000+02:00","2026-02-20T06:04:22.000+01:00","Learn how to build a custom Blender integration for Kitsu using Python. This guide walks you through setting up a local environment, generating previews in Blender, and uploading them to Kitsu automatically—streamlining your DCC pipeline for faster, more reliable production.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":300,"name":22,"slug":29,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":26},"https://blog.cg-wire.com/dcc-integration-blender-kitsu/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@thisisengineering?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">ThisisEngineering\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/dcc-integration-blender-kitsu","2025-10-14T11:23:34.000+02:00",{"title":560},"dcc-integration-blender-kitsu","posts/dcc-integration-blender-kitsu",[581,582],{"id":300,"name":22,"slug":29,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":26},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"Y6D4qXodYv1lXvjekjP26GNUDi8I9hI336Agp1r2n8s",{"id":585,"title":586,"authors":587,"body":7,"description":7,"extension":8,"html":597,"meta":598,"navigation":14,"path":609,"published_at":610,"seo":611,"slug":612,"stem":613,"tags":614,"__hash__":616,"uuid":599,"comment_id":600,"feature_image":601,"featured":105,"visibility":10,"created_at":602,"updated_at":603,"custom_excerpt":604,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":605,"primary_tag":606,"url":607,"excerpt":604,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":608},"ghost/posts:animation-file-formats-guide.json","Animation File Extensions: What Every Animator Should Know In 2026",[588],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"5fe9b27094f20f00398a1673","Gwénaëlle Dupré","gwen","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2020/12/profile_pics.png","Product Manager at CGWire","Paris, France","@gelnior","https://blog.cg-wire.com/author/gwen/","\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📁\u003C/div>\u003Cdiv class=\"kg-callout-text\">File formats are the backbone of every animation pipeline, and choosing the right one can save hours of rework. Here’s how to pick the perfect format for each stage of production.\u003C/div>\u003C/div>\u003Cp>\"What format should I export this in?\"\u003C/p>\u003Cp>You’re suddenly staring at an alphabet soup of acronyms: GIF, MP4, MOV, WebM… and that’s just the start.\u003C/p>\u003Cp>Each format plays by its own rules: some are perfect for looping animated memes, others for high-resolution video, and others to share digital content creation projects quickly. But choosing the wrong one can wreck your hard work with bad topology, bloat, or platforms that refuse to cooperate.\u003C/p>\u003Cp>That’s why we wrote this article for you to learn what makes each animation file format unique, where they excel, and where they fall short. Let's get started right away!\u003C/p>\u003Chr>\u003Ch2 id=\"1-nativesoftware-specific-formats\">\u003Cstrong>1. Native/Software-Specific Formats\u003C/strong>\u003C/h2>\u003Cp>Every major 3D/2D animation software has its own native file format designed to capture everything about a project: from geometry and textures to rigging, scene composition, animation, lighting, and even rendering settings. These formats are invaluable when you’re working within the same software ecosystem:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>.BLEND\u003C/strong> – The BLEND format is Blender’s native project file working in an open-source ecosystem.\u003C/li>\u003Cli>\u003Cstrong>.MB / .MA\u003C/strong> – Maya Binary (.MB) and Maya ASCII (.MA) are the standard formats for Autodesk Maya. Both store full scene data, including models, rigs, and animations, but they serve slightly different purposes. Binary files are compact and efficient, while ASCII files are written in plain text, making them easier to debug or manage in version control systems. Maya’s formats have long been an industry standard in film, VFX, and AAA games, but they come with the downside of being proprietary and tied to costly software licenses.\u003C/li>\u003Cli>\u003Cstrong>.MAX\u003C/strong> – The .MAX format is native to Autodesk 3ds Max and is particularly strong in architectural visualization and motion graphics. While not as dominant as Maya in film or VFX, .MAX files remain a cornerstone in industries like architectural rendering and game asset creation.\u003C/li>\u003Cli>\u003Cstrong>.C4D\u003C/strong> – For motion designers, the .C4D format is the backbone of Cinema 4D projects. It captures every detail of a scene, including advanced setups like MoGraph effects that make Cinema 4D especially powerful for motion graphics. The format’s strengths lie in its seamless integration with other Adobe tools like Adobe After Effects, which has made it a favorite in the advertising and design industries.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-775fa889-ad23-49d9-81a3-6e32161aa863.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1105\" height=\"674\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-775fa889-ad23-49d9-81a3-6e32161aa863.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/10/data-src-image-775fa889-ad23-49d9-81a3-6e32161aa863.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-775fa889-ad23-49d9-81a3-6e32161aa863.png 1105w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>The downside of each file format is limited portability. Files created in one software are often difficult or impossible to open in another without conversion or data loss, making them less ideal for cross-platform collaboration. To solve this, animators use exchange formats.\u003C/p>\u003Chr>\u003Ch2 id=\"2-exchangeinterchange-formats\">\u003Cstrong>2. Exchange/Interchange Formats\u003C/strong>\u003C/h2>\u003Cp>Exchange formats allow\u003Ca href=\"https://blog.cg-wire.com/getting-started-with-export-in-blender/\"> \u003Cu>transferring models and animations across different 3D tools\u003C/u>\u003C/a>:\u003C/p>\u003Cul>\u003Cli>For modern workflows, \u003Cstrong>.GLTF\u003C/strong> and \u003Cstrong>.GLB\u003C/strong> have emerged as the “JPEG of 3D.” Developed by the Khronos Group, these formats were built with the web and real-time rendering in mind. They support geometry, PBR (physically based rendering) materials, animations, and even scene hierarchies. The .GLB binary variant keeps everything in a compact package, making it especially efficient for game engines and AR/VR applications.\u003C/li>\u003Cli>In contrast, the \u003Cstrong>.OBJ\u003C/strong> format is one of the simplest and oldest interchange standards. It focuses solely on geometry, storing vertices, edges, and faces, with optional support for textures and materials. While it does not support animation or rigging, its strength lies in reliability and universality: almost every 3D program can import and export OBJ files. This makes it a staple for static models and asset sharing.\u003C/li>\u003Cli>Among the most widely recognized interchange formats is also \u003Cstrong>.FBX\u003C/strong> (Filmbox), which has become a backbone for transferring animation and rigged assets between software. Originally developed by Kaydara and later acquired by Autodesk, it supports meshes, bones, skinning, keyframes, cameras, and lights, making it highly versatile. Game engines like Unity and Unreal rely heavily on FBX, and it has become a standard for animation pipelines. It’s proprietary, which sometimes leads to version inconsistencies and conversion quirks across different tools.\u003C/li>\u003Cli>The \u003Cstrong>.DAE\u003C/strong> (Collada) format was designed as an open standard to encourage interoperability. Short for “COLLAborative Design Activity,” it can store both geometry and animation data. While Collada saw significant adoption in its early years, its popularity has waned as other formats like FBX and glTF gained traction. Still, it remains useful in pipelines that prioritize open standards over proprietary solutions.\u003C/li>\u003Cli>Finally, \u003Cstrong>.USD\u003C/strong> (Universal Scene Description), along with its variants .USDA (ASCII) and .USDC (binary), is Pixar’s ambitious solution for modern pipelines. USD goes beyond storing animation or simulations: it’s a full framework for managing 3D scenes, assets, and workflows at scale. With features like non-destructive editing, layering, and support for complex hierarchies, it has quickly gained traction in studios and is increasingly integrated into tools like Maya, Houdini, and Blender. While still relatively new, USD is rapidly positioning itself as the future standard for large-scale production, particularly in VFX and collaborative pipelines.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-fef19519-ed43-4e90-99bd-7377f6866787.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1105\" height=\"674\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-fef19519-ed43-4e90-99bd-7377f6866787.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/10/data-src-image-fef19519-ed43-4e90-99bd-7377f6866787.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-fef19519-ed43-4e90-99bd-7377f6866787.png 1105w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"3-simulations-motion-capture\">\u003Cstrong>3. Simulations &amp; Motion Capture\u003C/strong>\u003C/h2>\u003Cp>For procedural tools that generate a lot of data, like physics simulations, more efficient interchange file formats are used:\u003C/p>\u003Cul>\u003Cli>The \u003Cstrong>.BVH\u003C/strong> (Biovision Hierarchy) format has long been a staple in the motion capture world. It was developed to store skeleton-based animation data, including bone hierarchies and motion curves, making it an accessible way to transfer mocap files into 3D software. While BVH is somewhat dated and limited in terms of advanced features, it remains popular because of its simplicity and wide support across animation tools, particularly for character motion capture workflows.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-b81d8932-aa6e-4061-96b7-03a938ca1ba6.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1200\" height=\"709\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-b81d8932-aa6e-4061-96b7-03a938ca1ba6.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/10/data-src-image-b81d8932-aa6e-4061-96b7-03a938ca1ba6.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-b81d8932-aa6e-4061-96b7-03a938ca1ba6.png 1200w\" sizes=\"(min-width: 720px) 720px\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: 3DArt.it\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cul>\u003Cli>The \u003Cstrong>.MDD\u003C/strong> format, short for Mesh Deformation Data, serves a different purpose. Instead of focusing on skeletons or rigs, it records how a mesh’s vertices move over time and stores the baked deformations like morph targets or soft-body dynamics. However, MDD files can be quite large, and since they lock animations to specific meshes, they lack the flexibility of rigged animation formats. Despite these limitations, they remain useful for passing along baked simulations where precise deformation data needs to be preserved (for example, exporting cloth simulations from Houdini to be rendered in LightWave or Maya).\u003C/li>\u003Cli>For more complex needs, the \u003Cstrong>.ABC\u003C/strong> (Alembic) format is a powerhouse developed by Sony Pictures Imageworks and Lucasfilm to handle heavy production workflows. It excels at baking complex simulations like particles, cloth, fluids, and destructible environments into efficient cache files that can be read across multiple applications. While it does not support rigging or procedural animation in the same way as FBX or BVH, Alembic has become a trusted standard in VFX and film production because of its reliability with massive datasets.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"4-renderingoutput-formats\">\u003Cstrong>4. Rendering/Output Formats\u003C/strong>\u003C/h2>\u003Cp>When it comes to\u003Ca href=\"https://blog.cg-wire.com/getting-started-with-blender-rendering/\"> \u003Cu>rendering and delivering final animations\u003C/u>\u003C/a>, the choice of format depends heavily on how and where the content will be viewed.\u003C/p>\u003Cp>Standard video formats are the most common choices for playback:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>.MP4\u003C/strong> — The most common option thanks to its balance of quality and compression. MP4 is widely compatible, making it ideal for everything from social media to professional presentations.\u003C/li>\u003Cli>\u003Cstrong>.MOV\u003C/strong> — Closely associated with Apple’s QuickTime, MOV supports higher-quality codecs and is popular in professional production environments.\u003C/li>\u003Cli>\u003Cstrong>.AVI\u003C/strong> — An older Microsoft format that is less common today, but still used in some workflows.\u003C/li>\u003Cli>\u003Cstrong>Apple ProRes\u003C/strong> — A high-quality codec frequently used in editing and finishing. ProRes balances efficiency and image fidelity, making it a standard in post-production pipelines.\u003C/li>\u003Cli>\u003Cstrong>Avid DNxHR\u003C/strong> — Similar to ProRes, DNxHR is designed for high-quality, multigenerational editing and is widely used in broadcast and film.\u003C/li>\u003Cli>Animators also need to consider the \u003Cstrong>individual codecs\u003C/strong> used inside these containers like H.264 or H.265 for video and AAC or PCM for audio, since they directly affect compatibility, compression quality, and playback performance depending on the rendering hardware used.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-87b55b08-c8f1-45ed-9774-852aed69006c.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"509\" height=\"512\">\u003C/figure>\u003Cp>These formats bundle frames into a compressed video file that can be easily shared, uploaded, or embedded across platforms. The trade-off is that they are final outputs: once rendered, they cannot be easily adjusted frame by frame without re-exporting.\u003C/p>\u003Cp>For professional pipelines that require more flexibility, studios often turn to image sequences. Instead of compressing everything into a single video file, each frame is rendered as an individual image. This approach has several advantages: it allows precise frame-level editing,\u003Ca href=\"https://blog.cg-wire.com/rendering-explained/\"> \u003Cu>makes recovery easier if a long render crashes\u003C/u>\u003C/a>, and provides higher-quality data for compositing and post-production.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>.EXR (OpenEXR)\u003C/strong> — An industry standard in VFX for its support of high dynamic range (HDR) and multiple render passes within a single file.\u003C/li>\u003Cli>\u003Cstrong>.PNG\u003C/strong> — A popular choice for lossless compression and transparency, often used in projects requiring clean edges or alpha channels.\u003C/li>\u003Cli>\u003Cstrong>.TIFF\u003C/strong> — Valued for robustness and color depth, making it a strong option for compositing workflows.\u003C/li>\u003C/ul>\u003Cp>The main inconvenience is storage: image sequences can take up massive amounts of space compared to compressed video files.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Animation is all about creativity and telling stories, but as we’ve seen in this article, the technical foundation of file formats is what ensures that creativity actually reaches an audience.\u003C/p>\u003Cp>From native project files that safeguard every detail of a scene to interchange formats that make collaboration possible and rendering formats that deliver results, each type of file plays a distinct role in the animation pipeline.\u003C/p>\u003Cp>No single format is \"the best.\" The right choice depends on your objectives: whether you’re preserving a work-in-progress, moving assets across platforms, baking a simulation, or exporting a final product for distribution. Understanding these differences saves time and helps design an efficient production pipeline to juggle all these file formats.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":599,"comment_id":600,"feature_image":601,"featured":105,"visibility":10,"created_at":602,"updated_at":603,"custom_excerpt":604,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":605,"primary_tag":606,"url":607,"excerpt":604,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":608},"84f2fcf0-13d3-4e64-9bad-4a714bed533e","68e3137bce96dd00012008a7","https://images.unsplash.com/photo-1637423604589-3eab4f288ddc?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDJ8fGFuaW1hdGlvbiUyMGZpbGUlMjBmb3JtYXRzfGVufDB8fHx8MTc1OTcxMjE4Nnww&ixlib=rb-4.1.0&q=80&w=2000","2025-10-06T02:55:23.000+02:00","2026-02-20T06:03:48.000+01:00","From BLEND and FBX to MP4 and EXR, each animation file format serves a distinct role in production. This guide breaks down native, exchange, simulation, and rendering formats—helping you choose the right one for collaboration, rendering, and delivery.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/animation-file-formats-guide/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@brett_jordan?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Brett Jordan\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/animation-file-formats-guide","2025-10-06T10:00:13.000+02:00",{"title":586},"animation-file-formats-guide","posts/animation-file-formats-guide",[615],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"I8sTHk7Zgl2wlJN7dxIftAjyJAS_ZeY1wKXVAMwgLO0",{"id":618,"title":619,"authors":620,"body":7,"description":7,"extension":8,"html":622,"meta":623,"navigation":14,"path":633,"published_at":634,"seo":635,"slug":636,"stem":637,"tags":638,"__hash__":640,"uuid":624,"comment_id":625,"feature_image":626,"featured":105,"visibility":10,"created_at":627,"updated_at":469,"custom_excerpt":628,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":629,"primary_tag":630,"url":631,"excerpt":628,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":632},"ghost/posts:background-characters-animation.json","Why Background Characters Matter More Than You Think (2026)",[621],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">👥\u003C/div>\u003Cdiv class=\"kg-callout-text\">A lively scene isn’t just about the main cast—background characters breathe life into worlds and make them feel real.\u003C/div>\u003C/div>\u003Cp>Ever notice how an animated world can look gorgeous… yet somehow feel empty? The protagonists might be front and center, but without the buzz of life around them, the scene falls flat. That’s where background characters step in to make the world believable.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/09/data-src-image-e015949a-4c62-4d1c-9dcd-828f8001a1c4.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"512\" height=\"606\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: SpongeBob SquarePants\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Here’s the tricky part: animators can’t spend endless hours designing and animating every extra in a crowd. So, how do they manage to create a bustling street, a roaring stadium, or a festival brimming with energy without blowing the budget?\u003C/p>\u003Cp>Read on to find out!\u003C/p>\u003Chr>\u003Ch2 id=\"why-background-characters-matter-in-animation\">\u003Cstrong>Why Background Characters Matter in Animation\u003C/strong>\u003C/h2>\u003Cp>The spotlight often shines on main protagonists who drive the story forward. Yet, some of the most powerful storytelling elements come not from the main characters, but from the ones quietly filling the margins: background characters.\u003C/p>\u003Cp>First, \u003Cstrong>they transform a setting from a static backdrop into a living environment\u003C/strong>. Whether it’s a bustling city street, a lively marketplace, or a quiet village, the presence of extras walking, chatting, or going about their daily routines adds authenticity. Their subtle actions give the illusion of a world that exists beyond the main narrative, making the story feel more expansive and believable.\u003C/p>\u003Cp>\u003Cstrong>The mood of a scene is shaped by its context\u003C/strong>, including the collective energy of the surrounding crowd. Background characters can subtly convey emotions that complement or contrast the main plot: laughing in a joyful scene, murmuring in a tense moment, or standing still in silence during a dramatic pause. These small, carefully choreographed movements establish tone and emotional depth to guide the audience’s response to the story.\u003C/p>\u003Cp>While background characters don’t carry the story’s central plot, \u003Cstrong>they still serve a storytelling purpose\u003C/strong>: a passerby reacting to a key event, a shopkeeper offering a quiet nod, or a child playing in the distance provides context, foreshadowing, or cultural texture. Their presence subtly reinforces themes, world-building details, and character dynamics.\u003C/p>\u003Chr>\u003Ch2 id=\"1-grey-silhouettes-subtle-background-animation\">\u003Cstrong>1. Grey Silhouettes &amp; Subtle Background Animation\u003C/strong>\u003C/h2>\u003Cp>In most animation productions, background characters serve as visual punctuation\u003Cstrong>,\u003C/strong> enhancing the world without drawing attention away from the central narrative. One of the most effective techniques for achieving this is the use of \u003Cstrong>low-resolution characters\u003C/strong>, which are designed with minimal detail, muted colour palettes, or simplified linework.\u003C/p>\u003Cp>This approach is especially prevalent in anime, where a stylistic economy of detail is often embraced. Sometimes, animators don't even bother and go as far as using literal grey silhouettes in place of background characters:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/09/data-src-image-91a98f90-be07-42ca-b9d9-07b535951482.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"661\" height=\"349\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/09/data-src-image-91a98f90-be07-42ca-b9d9-07b535951482.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/09/data-src-image-91a98f90-be07-42ca-b9d9-07b535951482.png 661w\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: JoJo's Bizarre Adventure Stone Ocean\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Background extras may lack facial features entirely, like eyes or mouths, unless they are directly involved in a scene with the protagonists. Their forms are often rendered in monochromatic or low-contrast tones, making them appear more subtle within the environment. It not only streamlines production but also reinforces the hierarchy of visual importance: the foreground characters command attention, while the background hums with quiet life.\u003C/p>\u003Cp>Sometimes, less is more. \u003Ca href=\"about:blank\">\u003Cu>Another simple way to avoid over-animating\u003C/u>\u003C/a> is to reuse assets.\u003C/p>\u003Chr>\u003Ch2 id=\"2-asset-reuse-efficiently-populating-scenes\">\u003Cstrong>2. Asset Reuse: Efficiently Populating Scenes\u003C/strong>\u003C/h2>\u003Cp>Rather than creating entirely new characters from scratch for every scene, \u003Cstrong>animators frequently repurpose existing character rigs, models, or designs, applying only minor variations\u003C/strong> like different clothing, color schemes, or hairstyles.\u003C/p>\u003Cp>This approach not only saves significant time and resources but also helps keep visual consistency across a production, which is especially important in large-scale projects:\u003C/p>\u003Cul>\u003Cli>In 2D animation, asset reuse often involves re-coloring or re-drawing existing character cels. For example, a basic character design can be quickly adapted by changing the shirt color, adding a hat, or altering hair style. Tasks that require minimal effort compared to \u003Ca href=\"https://blog.cg-wire.com/character-design-animation/\">full character creation\u003C/a>. These small adjustments allow animators to populate bustling city streets, crowded markets, or busy office scenes with diverse background characters.\u003C/li>\u003Cli>In \u003Ca href=\"https://blog.cg-wire.com/3d-modeling-animation/\">3D animation\u003C/a>, reuse is achieved through modular design and texture swapping. Animators can take a base character rig and swap out textures for different outfits, skin tones, or accessories. In some cases, entire body parts like arms or heads can be interchanged to create variety. Advanced rigs may even support dynamic changes in hairstyle or facial features via simple parameter adjustments.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/09/data-src-image-29ae003f-3f85-42af-baa3-35affc5bd437.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1493\" height=\"514\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/09/data-src-image-29ae003f-3f85-42af-baa3-35affc5bd437.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/09/data-src-image-29ae003f-3f85-42af-baa3-35affc5bd437.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/09/data-src-image-29ae003f-3f85-42af-baa3-35affc5bd437.png 1493w\" sizes=\"(min-width: 720px) 720px\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Sword of the Demon Hunter\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"3-looping-for-natural-movement\">\u003Cstrong>3. Looping for Natural Movement\u003C/strong>\u003C/h2>\u003Cp>Background characters are typically \u003Cstrong>animated using simple, repeating cycles\u003C/strong> like walking loops, idle breathing motions, or subtle talking gestures to give the illusion of life without the need for complex, frame-by-frame animation.\u003C/p>\u003Cp>These minimal loops are designed to be efficient, allowing animators to maintain visual interest across large groups or extended scenes without overextending rendering resources.\u003C/p>\u003Cp>A key technique in making these loops feel organic is \u003Cstrong>timing variation\u003C/strong>. \u003Ca href=\"https://blog.cg-wire.com/timing-animation-principle/\">\u003Cu>By offsetting the start and duration of individual loops\u003C/u>\u003C/a> so that one character begins walking slightly before another, animators can avoid the mechanical, repetitive feel that often comes with identical cycles.\u003C/p>\u003Chr>\u003Ch2 id=\"4-layering-for-depth-and-realism\">\u003Cstrong>4. Layering for Depth and Realism\u003C/strong>\u003C/h2>\u003Cp>Layering is another technique that creates a sense of depth by varying the level of detail and motion based on a background character’s perceived distance from the camera.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Simplify with distance\u003C/strong> - Characters farther from the camera should have reduced detail in both design and motion. Broad, slower movements read better at a distance.\u003C/li>\u003Cli>\u003Cstrong>Blur and depth cues\u003C/strong> - Apply motion blur to deeper layers in compositing. This not only mimics \u003Ca href=\"https://blog.cg-wire.com/compositing-in-animation-definition-process-challenges/\">real camera depth of field but also naturally separates focal planes\u003C/a>.\u003C/li>\u003Cli>\u003Cstrong>Light and shadow separation\u003C/strong> - Adjust lighting so that background characters subtly fall into shadow or softer highlights compared to the main action.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/09/data-src-image-a8b442ea-c6ef-4f84-a4c7-9068902a695f.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1037\" height=\"536\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/09/data-src-image-a8b442ea-c6ef-4f84-a4c7-9068902a695f.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/09/data-src-image-a8b442ea-c6ef-4f84-a4c7-9068902a695f.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/09/data-src-image-a8b442ea-c6ef-4f84-a4c7-9068902a695f.png 1037w\" sizes=\"(min-width: 720px) 720px\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Captain Tsubasa\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"5-procedural-generation\">\u003Cstrong>5. Procedural Generation\u003C/strong>\u003C/h2>\u003Cp>Procedural generation is a modern approach \u003Cstrong>using algorithms for the creation of dynamic crowds\u003C/strong> without manually animating each individual. This approach is especially effective in large-scale scenes like bustling city streets, epic battlefields, and other massive crowds where the sheer number of characters would make traditional animation impractical.\u003C/p>\u003Cp>A prime example of this is the use of VFX simulation systems like Massive, originally developed for The Lord of the Rings trilogy. It enables background agents to exhibit autonomous behaviours, making decisions based on environmental cues, social dynamics, and scripted rules. These agents can walk, react to nearby characters, respond to threats, or even improvise actions during a scene, adding a layer of unpredictability that feels organic. (See: \u003Ca href=\"https://blog.cg-wire.com/vfx-artist-definition-process-challenges/\">\u003Cu>VFX artist role and challenges\u003C/u>\u003C/a>)\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/09/data-src-image-e7970f24-3765-4066-80d3-0036a3dbc190.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"874\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/09/data-src-image-e7970f24-3765-4066-80d3-0036a3dbc190.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/09/data-src-image-e7970f24-3765-4066-80d3-0036a3dbc190.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/09/data-src-image-e7970f24-3765-4066-80d3-0036a3dbc190.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: The Lord of the Rings\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Through procedural generation, background characters aren't just passive fillers: their movements are varied and responsive. This technology not only saves time and resources but also enhances storytelling by making the environment feel truly alive.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Animation isn’t just about the heroes, the villains, or the pivotal moments: it’s about the world they inhabit. And that world only feels real when it breathes and moves beyond the frame.\u003C/p>\u003Cp>When background characters are animated with intention, they make scenes much more impactful.\u003C/p>\u003Cp>Ask yourself: Is this world really alive? Don’t just show characters, use them to tell a story. They need purpose, rhythm, and presence. Whether through a subtle loop, a strategic silhouette, or a dynamic procedural system, make every extra matter!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":624,"comment_id":625,"feature_image":626,"featured":105,"visibility":10,"created_at":627,"updated_at":469,"custom_excerpt":628,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":629,"primary_tag":630,"url":631,"excerpt":628,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":632},"ed19a217-eabc-45d8-b763-13a050eb6fc6","68c7c605a4afcc0001232bde","https://images.unsplash.com/photo-1736180613586-f275eace0731?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDEwfHxhbmltYXRlZCUyMGNyb3dkfGVufDB8fHx8MTc1NzkyMjkwMHww&ixlib=rb-4.1.0&q=80&w=2000","2025-09-15T09:53:41.000+02:00","Background characters make animated worlds feel alive, adding realism and depth to every scene. Learn how animators use techniques like asset reuse, looping cycles, layering, and procedural generation to create believable crowds without overspending time or budget.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/background-characters-animation/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@artchicago?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Art Institute of Chicago\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/background-characters-animation","2025-09-15T10:02:09.000+02:00",{"title":619},"background-characters-animation","posts/background-characters-animation",[639],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"gEHiLSfUpV7L64DtZG7xclJ0PGA_i5qIPGfc-eHU2wk",{"id":642,"title":643,"authors":644,"body":7,"description":7,"extension":8,"html":646,"meta":647,"navigation":14,"path":658,"published_at":659,"seo":660,"slug":661,"stem":662,"tags":663,"__hash__":665,"uuid":648,"comment_id":649,"feature_image":650,"featured":105,"visibility":10,"created_at":651,"updated_at":652,"custom_excerpt":653,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":654,"primary_tag":655,"url":656,"excerpt":653,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":657},"ghost/posts:animation-moodboard.json","Building Moodboards for Animation (2026): From Vibe to Vision",[645],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🖼️\u003C/div>\u003Cdiv class=\"kg-callout-text\">A moodboard isn’t decoration—it’s your animation’s creative compass.\u003C/div>\u003C/div>\u003Cp>Before the first frame moves, there’s one thing every great animation needs: \u003Cem>a unique mood\u003C/em>.\u003C/p>\u003Cp>And no, it doesn’t magically appear in the storyboard.\u003C/p>\u003Cp>You don't want to jump straight into production only to realize halfway through that the tone feels off.\u003C/p>\u003Cp>That’s where moodboards come in.\u003C/p>\u003Cp>In this guide, we’ll show you how to build animation moodboards that do more than just look pretty. Let’s dive in.\u003C/p>\u003Chr>\u003Ch2 id=\"what-is-a-moodboard\">\u003Cstrong>What Is A Moodboard\u003C/strong>\u003C/h2>\u003Cp>In animation, \u003Cstrong>a moodboard is a curated collage of visual references\u003C/strong> that captures the overall look and tone of your project before you start animating.\u003C/p>\u003Cp>It combines elements such as colour palettes, lighting styles, character designs, environments, typography, and even texture or motion references to convey the animation’s intended vibe.\u003C/p>\u003Chr>\u003Ch2 id=\"why-you-need-a-moodboard-for-animation\">\u003Cstrong>Why You Need A Moodboard For Animation\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>A moodboard makes the look and feel of your animation tangible \u003C/strong>before production begins.\u003C/p>\u003Cp>Instead of relying on abstract descriptions like “dreamlike but grounded” or “retro-futuristic,” you can show your team exactly what you mean. Animation is rarely a solo act. Whether you’re working with a big studio or a small crew, \u003Cstrong>a moodboard keeps everyone on the same page\u003C/strong>. It’s a shared reference that guides decision-making in character design, environment building, lighting, and even movement. With a clear visual foundation, you reduce revisions, avoid miscommunication, and speed up the creative process.\u003C/p>\u003Cp>Before you animate a single frame,\u003Ca href=\"https://blog.cg-wire.com/animation-pitch/\"> \u003Cu>a moodboard gives clients, producers, or stakeholders something tangible to react to\u003C/u>\u003C/a>. \u003Cstrong>It helps sell your idea\u003C/strong> by making the vision \u003Cem>visible\u003C/em>—not just conceptual.\u003C/p>\u003Chr>\u003Ch2 id=\"1-define-the-creative-direction\">\u003Cstrong>1. Define the Creative Direction\u003C/strong>\u003C/h2>\u003Cp>Before building a moodboard, take a step back and define the broader creative vision: the tone, theme, and emotional intent behind the project.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Explore the story’s core message and genre\u003C/strong> - Ask key questions like: What is the animation trying to say? What emotions should it evoke? Is the tone light and playful, or dark and moody? This initial clarity acts as a compass for the rest of your process. Dig into the heart of the narrative and genre to identify the emotional arc you want the audience to experience. The more specific and intentional your answers are, the easier it will be to collect cohesive and relevant visual references later on.\u003C/li>\u003Cli>\u003Cstrong>Consider your audience\u003C/strong> - Think about who the animation is for. Define your audience by demographics like age range—children, teens, or adults—as well as cultural or regional context. Understanding what your viewers are drawn to visually will help you make aesthetic choices that resonate more deeply.\u003C/li>\u003Cli>\u003Cstrong>Set clear visual constraints\u003C/strong> - Before you begin collecting images, establish boundaries to focus your visual exploration. Write concise guiding statements like, “The animation should feel like a blend of \\[X] and \\[Y],” or “Avoid neon palettes; stick to natural tones.” These constraints help eliminate guesswork.\u003C/li>\u003C/ul>\u003Cp>Condense your creative direction into one powerful sentence that captures the project. For example: “A sun-bleached, lo-fi sci-fi world with surreal animation and a melancholic tone.” This sentence should guide your entire moodboard and serve as a benchmark when selecting visual references.\u003C/p>\u003Cp>Share this creative vision with key stakeholders to ensure alignment early and avoid costly revisions later.\u003C/p>\u003Chr>\u003Ch2 id=\"2-gather-references\">\u003Cstrong>2. Gather References\u003C/strong>\u003C/h2>\u003Cp>Once your creative direction is defined, it's time to start\u003Ca href=\"https://blog.cg-wire.com/concept-in-animation-definition-process-and-challenges/\"> \u003Cu>collecting references that visually support your vision\u003C/u>\u003C/a>.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Source from a wide range of media\u003C/strong> - Pull inspiration from films, animations, illustrations, photography, fashion, architecture, music videos, and motion graphics reels. Each medium can bring unique visual cues, helping you craft a richer and more layered moodboard.\u003C/li>\u003Cli>\u003Cstrong>Choose emotionally and stylistically aligned visuals\u003C/strong> - Don’t just collect impressive or trendy visuals. Prioritize references that reflect your project’s unique identity, emotional tone, and narrative voice. The right reference should feel like it belongs in the world you're building.\u003C/li>\u003Cli>\u003Cstrong>Use dynamic references for motion and energy\u003C/strong> - Inspiration doesn’t have to be static. Music videos, live-action commercials, and motion graphics reels can spark ideas for pacing, editing rhythm, transitions, and animated visual storytelling techniques that elevate your work.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"3-curate-don%E2%80%99t-dump\">\u003Cstrong>3. Curate, Don’t Dump\u003C/strong>\u003C/h2>\u003Cp>After gathering a wide range of references, it’s time to shift from collecting to editing.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Be selective and stay cohesive\u003C/strong> - Don’t aim for quantity. Aim for unity. Return to your core creative foundation—your tone, theme, and visual constraints. For every image, ask: \u003Cem>Does this support the direction we defined?\u003C/em> If not, it doesn’t belong. Throwing in everything you like can result in clashing visuals and mixed messages, which weakens the impact of your board and confuses your audience.\u003C/li>\u003Cli>\u003Cstrong>Eliminate redundancy without losing meaning\u003C/strong> - When multiple images say the same thing, keep only the strongest one. Repetition is useful only when it deepens a visual idea, not when it creates clutter. Prioritize clarity over volume to make each reference count.\u003C/li>\u003Cli>\u003Cstrong>Use a “Maybe” folder to stay focused\u003C/strong> - If you're unsure about certain images, move them to a separate “maybe” folder. This allows you to keep your main board clean and cohesive while preserving options to revisit later. It’s easier to judge the core direction when distractions are removed.\u003C/li>\u003Cli>\u003Cstrong>Limit the total number of images\u003C/strong> - Aim for 10–25 high-impact visuals. That’s enough to convey depth and dimension without overwhelming your viewer. A well-edited board should speak for itself in under 30 seconds and communicate mood, setting, and tone at a glance.\u003C/li>\u003C/ul>\u003Cp>Curating takes time, so don't hesitate to take a break. Sleep on it if possible. Come back with a fresh perspective—you’ll likely spot a few more images that don’t belong. The tighter and more focused your board becomes, the stronger and more persuasive your visual direction will be.\u003C/p>\u003Chr>\u003Ch2 id=\"4-organize-the-layout\">\u003Cstrong>4. Organize the Layout\u003C/strong>\u003C/h2>\u003Cp>A well-curated moodboard isn’t just about the content—it’s also about how that content is arranged.\u003C/p>\u003Cp>Good layout brings clarity, helps communicate your creative vision more effectively, and makes the board easier to read at a glance.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Group visuals by category or theme\u003C/strong> - Organize images into clear sections such as character designs, environments, color palettes, lighting references, typography, or motion cues like GIFs or frame stills. This structure makes your board easier to navigate and understand. Step back and look at the overall board. Are you seeing repeated colors, lighting styles, shapes, or emotional tones? These patterns are valuable—they reveal the visual DNA of your project. Highlight and prioritize the references that reinforce these patterns to build cohesion.\u003C/li>\u003Cli>\u003Cstrong>Create a logical flow\u003C/strong> - Design the layout to guide the viewer’s attention naturally from one section to the next. Use visual hierarchy by placing the most important references in prominent positions, and maintain readability through thoughtful spacing and composition.\u003C/li>\u003Cli>\u003Cstrong>Balance consistency with flexibility\u003C/strong> - Maintain rhythm through consistent sizing and alignment, but don’t be afraid to break the grid when emphasis is needed. Use negative space to balance dense visuals, and avoid clutter by keeping groups distinct yet visually connected.\u003C/li>\u003C/ul>\u003Cp>Once your references are in place, refine them as needed.\u003C/p>\u003Chr>\u003Ch2 id=\"5-add-context\">\u003Cstrong>5. Add Context\u003C/strong>\u003C/h2>\u003Cp>A single image can evoke wildly different interpretations depending on the viewer’s perspective or background.\u003C/p>\u003Cp>For example, a neon-lit alley might suggest a gritty cyberpunk noir to one person and a stylish rom-com backdrop to another. Adding brief captions or notes grounds the imagery and helps ensure everyone understands your intent. This doesn’t mean you need to write essays for each image. Just a few purposeful words can do the job: “gritty texture for dystopian vibe” or “soft rim lighting for emotional climax scene.”\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Label and structure your moodboard clearly\u003C/strong> - If your board is organized by category—characters, environments, lighting, typography—make that structure obvious. Use clear section headers to guide the viewer’s eye and reinforce the logic behind your layout, making the content more skimmable and presentation-ready.\u003C/li>\u003Cli>\u003Cstrong>Highlight specific details with annotations\u003C/strong> - Sometimes, only a part of an image is relevant. Use arrows, lines, or highlight shapes to draw attention to those key areas, paired with a short note like, “Note the high-contrast shadows—great for dramatic tension.” This makes your references more precise and helps avoid misreading.\u003C/li>\u003C/ul>\u003Chr>\u003Cp>Consider adding a short paragraph at the top or side of your board summarizing your overall creative vision. This can expand on your earlier one-sentence style summary by including emotional tone, visual influences, or design philosophy. For example: “This board explores a grounded, emotional sci-fi world with washed-out colors, warm lighting, and hand-drawn imperfections. Inspired by lo-fi indie films and 90s anime, it aims to feel both nostalgic and strange.”\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>A great animation doesn’t start in the timeline—it starts with vision. \u003C/p>\u003Cp>And your moldboard? That’s your first real shot at making that vision tangible.\u003C/p>\u003Cp>It’s where scattered ideas become direction. Where tone, style, and storytelling begin to lock into place. It’s not busywork—it’s your blueprint.\u003C/p>\u003Cp>First, you define your creative direction. Then you gather references with intention. You curate like a ruthless art director, lay it all out like a designer, and add context like a storyteller.\u003C/p>\u003Cp>Done right, your moodboard is a decision-making machine. It sells ideas. Aligns teams. Saves hours (if not days) of confused revisions.\u003C/p>\u003Cp>Don’t wait. Build it now. Before the animatics, before the keyframes—lock in your visual language.\u003C/p>\u003Cp>Grab your references, sketch your boundaries, and make your board say, “This is what we’re making, and here’s exactly how it should feel.” Because if your moodboard is solid, everything else flows.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":648,"comment_id":649,"feature_image":650,"featured":105,"visibility":10,"created_at":651,"updated_at":652,"custom_excerpt":653,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":654,"primary_tag":655,"url":656,"excerpt":653,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":657},"c5c59ec6-260f-4dfa-bf8f-ce5e8488ce0e","6891b6e2ce2b0a0001420405","https://images.unsplash.com/photo-1521249664898-864e6c1b6d5c?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDZ8fGFuaW1hdGlvbiUyMG1vb2QlMjBib2FyZHxlbnwwfHx8fDE3NTQ4OTc3Nzl8MA&ixlib=rb-4.1.0&q=80&w=2000","2025-08-05T09:46:42.000+02:00","2026-03-26T10:22:24.000+01:00","Learn how to create animation moodboards that define your project’s tone, style, and direction. From gathering references to curating layouts, this guide shows how to turn abstract ideas into a clear visual blueprint.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/animation-moodboard/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@chuttersnap?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">CHUTTERSNAP\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/animation-moodboard","2025-08-11T10:00:44.000+02:00",{"title":643},"animation-moodboard","posts/animation-moodboard",[664],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"wC0g0V9PGdSoIWroNFjtTNlxLtSEIApICZSH2uGXPng",{"id":667,"title":668,"authors":669,"body":7,"description":7,"extension":8,"html":671,"meta":672,"navigation":14,"path":683,"published_at":684,"seo":685,"slug":686,"stem":687,"tags":688,"__hash__":690,"uuid":673,"comment_id":674,"feature_image":675,"featured":105,"visibility":10,"created_at":676,"updated_at":677,"custom_excerpt":678,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":679,"primary_tag":680,"url":681,"excerpt":678,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":682},"ghost/posts:secondary-action-principle.json","The Secondary Action Principle (2026): Animation’s Unsung Hero",[670],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🎭\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">It’s the little things—a glance, a twitch, a rustling leaf—that bring animated worlds to life.\u003C/strong>\u003C/b>\u003C/div>\u003C/div>\u003Cp>Animating a character's movements isn't enough.\u003C/p>\u003Cp>If you have two men talking at a bar, you can't just lip sync them and call it a day: you need to take into account background animations, the subtle glass cleaning movements of the bartender, or even the vibrations of the jukebox in the corner.\u003C/p>\u003Cp>Sometimes, it's the less obvious secondary actions that make the difference between good and great animation.\u003C/p>\u003Cp>Let's have a look at how animators do it in this article, and how you can too!\u003C/p>\u003Chr>\u003Ch2 id=\"what-is-the-secondary-action-principle\">\u003Cstrong>What is the Secondary Action Principle?\u003C/strong>\u003C/h2>\u003Cp>The secondary action principle refers to \u003Cstrong>the additional movements that support the primary actions\u003C/strong> within a scene.\u003C/p>\u003Cp>If a character is waving, the primary action is the arm motion, but the head nodding or hair swaying in response introduces secondary actions that enrich the scene.\u003C/p>\u003Chr>\u003Ch2 id=\"why-is-the-secondary-action-principle-important\">\u003Cstrong>Why is the Secondary Action Principle Important?\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>Secondary actions add depth to characters and their environments\u003C/strong> without overshadowing the main action. While a character angrily closes a book (primary action), animators can have them tap their foot impatiently (secondary action) to highlight their frustration.\u003C/p>\u003Cp>Adding secondary actions results in \u003Cstrong>movements that feel more realistic\u003C/strong>. The body is a complex system with interconnected parts, and so is our environment or our relational network. When one part moves, another is bound to follow.\u003C/p>\u003Cp>It's not just entertaining but also \u003Cstrong>appealing to viewers\u003C/strong>: secondary actions give rhythm to scenes and prevent them from feeling dull. A character might take a slight pause to scratch their head after delivering a line to give the audience a moment to absorb the dialogue.\u003C/p>\u003Chr>\u003Ch2 id=\"1-start-with-the-primary-action\">\u003Cstrong>1. Start With the Primary Action\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>Focus on the main action first.\u003C/strong> It should be clear and purposeful. This is the foundation for any secondary actions. If the primary action is not solid, secondary actions weaken the animation.\u003C/p>\u003Cp>Consider this example: if a character is jumping over a puddle, the path of the jump from take-off to landing is your primary action. The jump has to feel natural, so you pay attention to how the character bends their knees, pushes off the ground, and lands.\u003C/p>\u003Cp>Once the primary action is smooth and believable, you add secondary actions―the flutter of a cloth or a facial expression of triumph―\u003Cstrong>but it's not a must-have\u003C/strong>. Lots of low-budget animations forgo secondary actions in most scenes, and it's not the end of the world. Wojack animations don’t even lip syn,c but are still watched by millions:\u003C/p>\u003Cp>But even a minimal secondary action like a slight stagger can elevate a basic move like walking. It shows that thought was put into the character's weight and momentum, and invites the viewer to feel the impact physically: you don’t need many layers to create a richer experience!\u003C/p>\u003Chr>\u003Ch2 id=\"2-avoid-over-animating\">\u003Cstrong>2. Avoid Over-Animating\u003C/strong>\u003C/h2>\u003Cp>Secondary actions add a lot to animations, but \u003Cstrong>it's crucial to strike the right balance with the main action\u003C/strong>.\u003C/p>\u003Cp>\u003Cstrong>Too many movements overwhelm your scene\u003C/strong> and distract the viewers.\u003C/p>\u003Cp>If your character is picking a book off a shelf, a simple hand grasp should be your focus. A head tilt or a shift in weight are subtle yet effective choices for secondary actions because they do not steal attention, but they enrich the moment.\u003C/p>\u003Cp>On the contrary, you should avoid unnecessary movements like tapping fingers or exaggerated blinking to avoid cluttering your scene.\u003C/p>\u003Cp>To plan these actions effectively,\u003Ca href=\"https://blog.cg-wire.com/storyboard-animation/\"> \u003Cu>use storyboards\u003C/u>\u003C/a>: sketch out your scene to visualize where secondary actions fit naturally. Planning allows your animation to stay consistent without needing never-ending revisions.\u003C/p>\u003Cp>Observe people doing everyday things: you’ll find most people rarely stand perfectly still. A foot adjusts. A hand brushes against a sleeve. These are natural micro-actions, and mimicking them selectively keeps your animation grounded without going overboard.\u003C/p>\u003Chr>\u003Ch2 id=\"3-experiment-with-timing\">\u003Cstrong>3. Experiment with Timing\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>Adjusting the speed or delay of secondary actions shapes the mood\u003C/strong> and highlights the main action. By experimenting with timing, you can add nuance.\u003C/p>\u003Cp>One practical tool to consider is\u003Ca href=\"https://blog.cg-wire.com/onion-skinning/\"> \u003Cu>ghost frames to visualize and tweak the timing\u003C/u>\u003C/a> of secondary motions. They show how actions unfold over time.\u003C/p>\u003Cp>In Blender, onion skinning allows you to see both past and future frames to get a feel of the timing of your animation:\u003C/p>\u003Cp>But motion paths are more effective in isolating different body parts:\u003C/p>\u003Cp>If a character claps their hands, a delayed head nod or subtle grin amplifies the gesture. \u003Cstrong>This delay allows the secondary action to emphasize the impact\u003C/strong> of the clap. By carefully timing these elements, you enrich the character’s expression.\u003C/p>\u003Chr>\u003Ch2 id=\"4-dont-forget-the-context-of-the-scene\">\u003Cstrong>4. Don't Forget The Context of The Scene\u003C/strong>\u003C/h2>\u003Cp>When using secondary action in your animations, \u003Cstrong>always consider the scene’s context\u003C/strong>.\u003C/p>\u003Cp>If we go back to the example of a character waving, you need to imagine where this action takes place and how it affects or is affected by other characters and the environment.\u003C/p>\u003Cp>\u003Cstrong>Consider interactions with the environment or props as rich sources for secondary actions\u003C/strong>. If your character is drinking coffee, they might fidget with the cup’s handle. This small gesture adds depth to their personality. The steam wafting upwards from the cup introduces a subtle secondary action that adds realism.\u003C/p>\u003Cp>Background animations are also crucial. They make the entire environment feel alive. Leaves rustling in the wind or a cat stretching lazily on a warm sidewalk support the story and create a believable world.\u003C/p>\u003Cp>By paying attention to the context, you not only improve your primary actions but also enrich the scene with layers of meaning that make for a more memorable experience.\u003C/p>\u003Chr>\u003Ch2 id=\"5-work-these-rigs\">\u003Cstrong>5. Work These Rigs\u003C/strong>\u003C/h2>\u003Cp>A rig is \u003Cstrong>the digital skeleton of a character in 3D modeling\u003C/strong>. It controls how the character moves.\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/rigging-in-animation/\">\u003Cu>A good rig\u003C/u>\u003C/a> is crucial for secondary action. When a character nods in agreement, their hair also needs to sway slightly. Your rig needs to support these extra motions.\u003C/p>\u003Cp>Rigging artists usually start with Inverse Kinematics (IK) to manipulate limbs and Forward Kinematics (FK) for individual joints. These rig constraints allow animators to make poses.\u003C/p>\u003Cp>Only then do they add controls for secondary elements like hair, tails, or clothing.\u003C/p>\u003Cp>Sometimes, \u003Cstrong>these secondary elements have their own physics engine to render them\u003C/strong> more realistically. Typically, photo-realistic hair simulations are extremely resource-intensive:\u003C/p>\u003Chr>\u003Ch2 id=\"6-mastering-layers\">\u003Cstrong>6. Mastering Layers\u003C/strong>\u003C/h2>\u003Cp>When you're working with DCC tools like Maya, Blender, or Toon Boom Harmony, \u003Cstrong>managing layers efficiently becomes crucial\u003C/strong> to maintain clarity while working with other teammates on a scene.\u003C/p>\u003Cp>Main and secondary actions are usually on separate layers. For example, the major body movement should be one layer, while hair, clothing, or a waving hand can be placed on a secondary layer. Animators use layer groups or nesting features to organize related elements and streamline visibility toggling.\u003C/p>\u003Cp>Constraint stacks allow animators to create complex hierarchies of animation layers where one action can influence another. It's an advanced technique in tools like Blender, where you can link objects and bones, allowing a base action to be intently nuanced by a secondary one.\u003C/p>\u003Cp>Make sure to add metadata to your layers―whether it's through naming conventions, color coding, or notes―to make your models more readable.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Secondary actions are precious storytelling tools to make your animation more immersive.\u003C/p>\u003Cp>Getting the primary action right is key, and you should beware of over-animating, but well-timed secondary actions can truly make movements shine or reveal more about the world the characters evolve in.\u003C/p>\u003Cp>Secondary actions are\u003Ca href=\"https://blog.cg-wire.com/follow-through-overlapping-action/\"> \u003Cu>not to be confused with overlapping actions\u003C/u>\u003C/a>, and just like primary actions, you need to plan for follow-through actions. Have a look at our detailed guide on this topic here!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":673,"comment_id":674,"feature_image":675,"featured":105,"visibility":10,"created_at":676,"updated_at":677,"custom_excerpt":678,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":679,"primary_tag":680,"url":681,"excerpt":678,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":682},"3d37b682-e6e0-4cdb-ab99-8624a57064fb","686c11e083fca9000148f4aa","https://images.unsplash.com/photo-1655532391070-ef6c6e922e39?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDI2fHxhbmltYXRpb24lMjBiYWNrZ3JvdW5kfGVufDB8fHx8MTc1MTkxMzc3OXww&ixlib=rb-4.1.0&q=80&w=2000","2025-07-07T20:28:48.000+02:00","2026-03-26T10:40:29.000+01:00","Learn how to master the secondary action principle in animation—subtle, supporting movements that add realism and emotional depth to your scenes without stealing focus from the main action.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/secondary-action-principle/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@pato_abyss?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Pato González\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/secondary-action-principle","2025-07-28T10:00:56.000+02:00",{"title":668},"secondary-action-principle","posts/secondary-action-principle",[689],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"fYA2jFwt-pc_ji7_SCw-42r6-77nm5vSTcaYsBEh7gw",{"id":692,"title":693,"authors":694,"body":7,"description":7,"extension":8,"html":696,"meta":697,"navigation":14,"path":708,"published_at":709,"seo":710,"slug":711,"stem":712,"tags":713,"__hash__":715,"uuid":698,"comment_id":699,"feature_image":700,"featured":105,"visibility":10,"created_at":701,"updated_at":702,"custom_excerpt":703,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":704,"primary_tag":705,"url":706,"excerpt":703,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":707},"ghost/posts:color-correction-animation.json","Color Correction in Animation (2026): The Secret to Polished Scenes",[695],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🎨\u003C/div>\u003Cdiv class=\"kg-callout-text\">Color correction can take your animation from decent to dazzling—all without reanimating a single frame.\u003C/div>\u003C/div>\u003Cp>Sometimes you finish an animation and find the colors dull. Or you just want to change the tones to match an aesthetic.\u003C/p>\u003Cp>Instead of editing each frame one by one, you can just correct colors during post-production in one go.\u003C/p>\u003Cp>Read on for tips on getting started with color correction!\u003C/p>\u003Chr>\u003Ch2 id=\"what-is-color-correction\">\u003Cstrong>What is Color Correction?\u003C/strong>\u003C/h2>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Color correction alters the color values of your animation to achieve a visually appealing result.\u003C/strong>\u003C/b>\u003C/div>\u003C/div>\u003Cp>That's how you go from a rough-looking 3D scene to an appealing final product:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-14.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1920\" height=\"1080\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/03/image-14.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/03/image-14.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1600/2026/03/image-14.png 1600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-14.png 1920w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>See the difference? It's immediately noticeable.\u003C/p>\u003Chr>\u003Ch2 id=\"why-is-color-correction-important\">\u003Cstrong>Why Is Color Correction Important?\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>Different scenes are animated at different times or by different artists.\u003C/strong> Mistakes can result in slight variations in color tones. Color correction fixes that and ensures all scenes maintain a consistent color palette.\u003C/p>\u003Cp>\u003Cstrong>Colors significantly influence the emotional tone of a scene.\u003C/strong> Through color correction, animators can also emphasize particular feelings like cooler tones to evoke sadness or warmer hues for comfort and happiness. Specific colors are often used to communicate certain themes or motifs within a story.\u003C/p>\u003Cp>Depending on the production's style, \u003Cstrong>color correction can make an animation appear more lifelike or create an aesthetic\u003C/strong>. For instance, a realistic animation might require precise color adjustments to match natural lighting conditions, while a stylized piece might benefit from exaggerated color schemes.\u003C/p>\u003Chr>\u003Ch2 id=\"the-elements-of-color\">\u003Cstrong>The Elements of Color\u003C/strong>\u003C/h2>\u003Cp>To understand how color correction works, you have to be familiar with the fundamental elements of color: hue, saturation, brightness, and the processes of tinting and shading.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Hue\u003C/strong> is the color family, or the base color of your animation. The pure color without any tint or shade. Tools like color wheels help you visualize and select harmonious hues for your compositions.\u003C/li>\u003Cli>\u003Cstrong>Saturation\u003C/strong> defines the intensity or purity of a color. High saturation means vivid colors, while low saturation leads to muted colors. High saturation signals excitement and activity, while desaturated colors suggest nostalgia or solemnity.\u003C/li>\u003Cli>\u003Cstrong>Brightness\u003C/strong> dictates how light or dark a color appears. It significantly impacts the mood and depth of a scene. You can use brightness shifts to create contrast and build visual interest: a bright character against a dark background creates a focal point.\u003C/li>\u003C/ul>\u003Cp>\u003Cstrong>Tinting\u003C/strong> involves adding white to a color to lighten it, while \u003Cstrong>shading\u003C/strong> adds black to darken it. These techniques create lighting effects.\u003C/p>\u003Cp>To stay consistent throughout production, animators use a color script—a strategic outline of the color scheme for the entire project.\u003C/p>\u003Chr>\u003Ch2 id=\"1-white-balance-adjustment\">\u003Cstrong>1. White Balance Adjustment\u003C/strong>\u003C/h2>\u003Cp>White balance adjustment removes unrealistic color casts in your animation to make white appear as pure white and all other colors look natural. This process involves tweaking the colors in your scenes to align them with how they would appear under neutral lighting conditions, \u003Cstrong>mimicking the way our eyes perceive color in different lighting environments\u003C/strong>.\u003C/p>\u003Cp>Most animation and editing software includes a white balance tool to automatically adjust your scene based on a selected neutral point (white or gray areas).\u003C/p>\u003Cp>In Blender for example, you can find the white balance feature in color management panel of the render properties.\u003C/p>\u003Chr>\u003Ch2 id=\"2-exposure-correction\">\u003Cstrong>2. Exposure Correction\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>Exposure correction changes brightness levels to make sure that the details in the darkest and brightest areas are visible and correctly balanced\u003C/strong>: it tweaks the amount of light in your frames so that your audience can see the intended details and colors clearly, without any parts being underexposed (too dark) or overexposed (too bright).\u003C/p>\u003Cp>Histogram tools help gauge correct exposure levels by showing the tonal values in your image: the distribution of shadows, midtones, and highlights. You can use scopes to make sure each scene has the same level of exposure unless you animate flashback or dream sequences.&nbsp;\u003C/p>\u003Cp>Blender proposes the Exposure node to increase the brightness of an area. For example, a window in a room.\u003C/p>\u003Chr>\u003Ch2 id=\"3-contrast-enhancement\">\u003Cstrong>3. Contrast Enhancement\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>Contrast enhancement alters the difference between the lightest and darkest parts of an image\u003C/strong>. By amplifying these differences, animators can make visuals more engaging.\u003C/p>\u003Cp>This is achieved by changing the brightness levels to allow each element within a frame to stand out correctly. A high-contrast area naturally draws the eye.\u003C/p>\u003Chr>\u003Ch2 id=\"4-color-balance-adjustment\">\u003Cstrong>4. Color Balance Adjustment\u003C/strong>\u003C/h2>\u003Cp>Color balance adjustment is the process of \u003Cstrong>changing the intensity of the colors in a scene to achieve a desired visual tone\u003C/strong>: tuning the colors so that they complement each other and create a harmonious look.\u003C/p>\u003Cp>If you have an animated scene set at sunrise, you want to achieve a balance that reflects the gentle warmth of the early morning light. You start by identifying the dominant color of your sunrise scene on the color wheel, which might be a soft yellow-orange, and to enhance this warmth you can slightly adjust the balance towards red, to give the impression of a gentle morning.\u003C/p>\u003Cp>To see color balance in action, you can look at the interaction between colors on the color wheel: if your scene has too much yellow, which sits next to the greens on the wheel, it might inadvertently pull in a cool green hue, contradicting your intention. By carefully adjusting the balance, you can have the yellow remain soft and inviting without tipping into the spectrum's cooler side.\u003C/p>\u003Chr>\u003Ch2 id=\"5-saturation-control\">\u003Cstrong>5. Saturation Control\u003C/strong>\u003C/h2>\u003Cp>It's \u003Cstrong>the adjustment of intensity or purity of colors in your frames\u003C/strong>.\u003C/p>\u003Cp>Say you're animating a serene forest scene at dawn: opting for muted, desaturated colors illustrates a calm and peaceful atmosphere. Inversely, boosting saturation would make everything look exaggerated and distract from the narrative. Oversaturation can also cause loss of detail.\u003C/p>\u003Cp>If the stylistic choice is to make colors pop, then a more saturated palette can be both intentional and effective.\u003C/p>\u003Cp>A simple rule of thumb is to start with a neutral baseline and incrementally adjust.\u003C/p>\u003Chr>\u003Ch2 id=\"6-color-grading\">\u003Cstrong>6. Color Grading\u003C/strong>\u003C/h2>\u003Cp>Color grading is \u003Cstrong>changing the colors in your animation to create a specific look or mood.\u003C/strong>\u003C/p>\u003Cp>Consider a scene where your character is on a sunny beach. By applying color grading techniques, you can enhance the brightness of the sky, saturate the colors of the ocean to create a more vibrant and inviting atmosphere and adjust the skin tones of characters to look more natural under sunlight.\u003C/p>\u003Cp>One way to achieve consistent and repeatable color grading is by using Color Lookup Tables (LUTs): predefined color settings that can be applied to your animation to quickly achieve a professional look.\u003C/p>\u003Cp>Video scopes / histograms again allow you to precisely evaluate color balance, exposure, and brightness levels and avoid colors that are too bright or too dark.\u003C/p>\u003Chr>\u003Ch2 id=\"7-skin-tone-correction\">\u003Cstrong>7. Skin Tone Correction\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>Skin tone correction refines the color of character skin colors to make them look natural and consistent\u003C/strong> across various lighting conditions and scenes.\u003C/p>\u003Cp>Strong sunlight may wash out the colors and make skin tones appear overly pale or unrecognizable.\u003C/p>\u003Cp>To correct this, animators use software tools to adjust the color balance. For example, by increasing the saturation slightly in warmer tones (reds and yellows), skins can retain their vividness under harsh light.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Color correction is an important step in post-production to polish the final result. Different techniques bring different benefits, and it's important to understand how they work to get the most out of them.\u003C/p>\u003Cp>Depending on the DCC tool you use, your workflow will change, but the principles are roughly the same. Have a look at\u003Ca href=\"https://docs.blender.org/manual/en/latest/render/color_management?ref=blog.cg-wire.com\" rel=\"noreferrer\"> \u003Cu>Blender's color management section\u003C/u>\u003C/a> to learn more about color correction for rendering.\u003C/p>\u003Cp>If your animation has multiple shots in the same setting, animators usually pick one \"hero frame\" with the correct balance and use it as a reference for other scenes to speed up the matching process while helping with visual consistency.\u003C/p>\u003Cp>Color correction is not a substitute for good color design though: you need to proactively pick color palettes that suit your story from the conception stage.\u003Ca href=\"https://blog.cg-wire.com/character-color-palettes/\"> \u003Cu>Have a look at this guide on picking a color palette for character design\u003C/u>\u003C/a> for more details.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":698,"comment_id":699,"feature_image":700,"featured":105,"visibility":10,"created_at":701,"updated_at":702,"custom_excerpt":703,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":704,"primary_tag":705,"url":706,"excerpt":703,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":707},"f68ec398-4a3f-4010-87a0-464550babae3","6867f6222ff010000105b71a","https://images.unsplash.com/photo-1627873828998-50b7aeec7ffe?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDMwfHxjb2xvciUyMHBhbGV0dGV8ZW58MHx8fHwxNzUxOTEzMTY3fDA&ixlib=rb-4.1.0&q=80&w=2000","2025-07-04T17:41:22.000+02:00","2026-03-26T10:31:44.000+01:00","Color correction is a crucial step in animation post-production, used to fix inconsistencies, unify tones, and enhance visual storytelling. Learn the essential techniques like white balance, saturation control, and color grading to bring polish and consistency to your animated work.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/color-correction-animation/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@lucasgwendt?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Lucas George Wendt\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/color-correction-animation","2025-07-21T10:00:36.000+02:00",{"title":693},"color-correction-animation","posts/color-correction-animation",[714],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"DqoWcB0044_VTKL5p4tRi_vxxIPMVPn12dI4m2GlrGw",{"id":717,"title":718,"authors":719,"body":7,"description":7,"extension":8,"html":721,"meta":722,"navigation":14,"path":733,"published_at":734,"seo":735,"slug":736,"stem":737,"tags":738,"__hash__":740,"uuid":723,"comment_id":724,"feature_image":725,"featured":105,"visibility":10,"created_at":726,"updated_at":727,"custom_excerpt":728,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":729,"primary_tag":730,"url":731,"excerpt":728,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":732},"ghost/posts:avoid-over-animating-scenes.json","Why Over-Animation Hurts Your Storytelling In 2026",[720],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">⏳\u003C/div>\u003Cdiv class=\"kg-callout-text\">Don’t animate every grain of sand—animate the desert 😮\u003C/div>\u003C/div>\u003Cp>\u003Cstrong>\u003Cem>\"What's even going on here?\"\u003C/em>\u003C/strong>\u003C/p>\u003Cp>If you find yourself wondering that while watching a scene, chances are you are a victim of over-animation.\u003C/p>\u003Cp>Over-animation is when a scene is saturated with excessive movement or detail indicating animators are gripping too hard onto the need for realism at the expense of the story.\u003C/p>\u003Cp>Recently, an episode of One Piece provoked controversy over the animation style being considered over-animated by some critics:\u003C/p>\u003Cp>Without dwelling on this debate, it's interesting to reflect on why over-animation can be a bad thing for studios, and how to design your workflow to prevent that.\u003C/p>\u003Chr>\u003Ch2 id=\"why-is-over-animating-counter-productive\">\u003Cstrong>Why Is Over-Animating Counter-Productive?\u003C/strong>\u003C/h2>\u003Cp>Understanding how the brain processes motion is a crucial piece of the puzzle. Our eyes are drawn to contrast and change—not just movement but also brightness or color. The human eye thrives on balance and focal points.\u003C/p>\u003Cp>If everything is moving, nothing stands out. Studies in visual perception show that the human brain automatically prioritizes motion in peripheral vision, which means that irrelevant animated background clutter can actually distract from the main character. When every element on the screen is animated with equal intensity, \u003Cstrong>viewers struggle to know where to focus\u003C/strong>. Key moments or emotions easily go unnoticed amid the chaos if you leave the audience exhausted.&nbsp;\u003C/p>\u003Cp>\u003Cstrong>Storytelling should always take center stage\u003C/strong>. While adding intricate details seems like a good idea at first to capture realism or creativity, it can divert attention from the main narrative. You need to give the audience room to breathe and take things in.\u003C/p>\u003Cp>Anyone who has worked in animation knows that \u003Cstrong>over-animation is both time-consuming and costly\u003C/strong>. It stretches the resources needed to complete a project, which can lead to inconsistencies in the animation quality if not managed carefully. Instead of aiming for maximum movement in every scene, animators need to prioritize keyframes and scenes that drive the story forward first and foremost.\u003C/p>\u003Chr>\u003Ch2 id=\"1-use-live-footage\">\u003Cstrong>1. Use Live Footage\u003C/strong>\u003C/h2>\u003Cp>One of the best ways to avoid over-animating is to \u003Cstrong>use live footage for reference\u003C/strong>.\u003C/p>\u003Cp>Have you ever noticed how a small sigh speaks volumes of emotion? We often communicate with subtle body language. Capturing that essence in animation helps create relatable characters without overdoing it.\u003C/p>\u003Cp>In DCC software you can usually overlay your reference video with your background:\u003C/p>\u003Cp>You can then use this reference to design your keyposes, but more generally to plan your animation.\u003C/p>\u003Chr>\u003Ch2 id=\"2-you-need-good-planning\">\u003Cstrong>2. You Need Good Planning\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>Planning gives you a clear vision\u003C/strong> of where you're headed.\u003C/p>\u003Cp>Before you even think about sketching your first frame, start by outlining your scenes with great care.\u003C/p>\u003Cp>Identify the key moments that warrant extra emphasis. Whether it's a character’s moment of revelation or a dramatic action sequence, knowing these pivotal points ahead allows you to allocate your time and resources wisely.\u003C/p>\u003Cp>Storyboards and animatics are key in this pre-production phase. Use them to map out the rhythm and flow of your story.\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/storyboard-animation/\">\u003Cu>Storyboards make it easy to plan your shots\u003C/u>\u003C/a> and estimate budgets.\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/how-animatics-bring-stories-to-life/\">\u003Cu>Animatics showcase how your scenes play out\u003C/u>\u003C/a> over time. They help you improve the pacing and eliminate superfluous movements that could cloud the story.\u003C/p>\u003Cp>Build estimates of how long each part of the animation will take and compare these to your actual budget. If you start overanimating the first few episodes of your series but end up with subpar quality in the latter parts because you ran out of budget, you'll piss your audience off.\u003C/p>\u003Chr>\u003Ch2 id=\"3-simplify\">\u003Cstrong>3. Simplify\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>Focus on the purpose behind each scene. \u003C/strong>Over-animation often originates from animators jumping directly into motion without fully considering the why behind each scene.\u003C/p>\u003Cp>Is it a moment of tension between characters? A comedic relief?\u003C/p>\u003Cp>As you animate, continually evaluate what each element contributes to that purpose. Ask yourself: Does this movement advance the storyline, enhance the mood, or develop a character?\u003C/p>\u003Cp>If the answer is no, then perhaps it doesn't belong.\u003C/p>\u003Cp>Simplifying doesn’t mean stripping your work down to bare bones. It means focusing on what serves the story best.\u003C/p>\u003Cp>Think of it as decluttering your canvas. By cleaning up unnecessary animations, you allow key moments to shine brighter.\u003C/p>\u003Cp>Sometimes, a single rotated frame speaks louder than a full-blown 3D-rendered hyper-realistic animation:\u003C/p>\u003Chr>\u003Ch2 id=\"4-leverage-exaggeration-properly\">\u003Cstrong>4. Leverage Exaggeration Properly\u003C/strong>\u003C/h2>\u003Cp>Imagine a character about to burst with excitement. By selectively exaggerating their movements with a leap that defies gravity or a smile that stretches from ear to ear, you convey their overwhelming joy in a way that's both clear and memorable.\u003C/p>\u003Cp>This is\u003Ca href=\"https://blog.cg-wire.com/exaggeration-animation-principle/\"> \u003Cu>the magic of exaggeration\u003C/u>\u003C/a>: it draws in the audience and hooks you into following along with the story.\u003C/p>\u003Cp>This approach doesn't just tell the audience how the character feels: it makes them feel it too!\u003C/p>\u003Cp>Exaggeration makes sure your audience knows exactly where to look and what to feel.\u003C/p>\u003Cp>\u003Cstrong>It's about showing more with less.\u003C/strong>\u003C/p>\u003Cp>But the line between effective exaggeration and over-animating is thin. Overloading a scene with unwarranted exaggeration leads to characters overacting. And nobody likes inauthentic characters.\u003C/p>\u003Chr>\u003Ch2 id=\"5-focus-on-keyframes\">\u003Cstrong>5. Focus On Keyframes\u003C/strong>\u003C/h2>\u003Cp>Keyframes dictate both the starting and ending points of motion.\u003C/p>\u003Cp>It's a common pitfall, especially among new animators, to overcrowd their sequences with unnecessary frames. When an animation feels off, the instinct is to add more frames: more in-betweens, more movement, more secondary actions.\u003C/p>\u003Cp>But this overflow creates noise, muddling the story rather than enhancing it.\u003C/p>\u003Cp>\u003Cstrong>By focusing on perfecting your keyframes, you can convey more in fewer frames.\u003C/strong>\u003C/p>\u003Cp>This is what made\u003Ca href=\"https://blog.cg-wire.com/smear-frames/\"> \u003Cu>smear frames\u003C/u>\u003C/a> so effective back in the day: you didn't need 60 frames per second to make your audience feel something.\u003C/p>\u003Cp>Begin with a clear roadmap―your storyboard―and place your key poses methodically using\u003Ca href=\"https://blog.cg-wire.com/straight-ahead-action-pose-to-pose-animation/\"> \u003Cu>a pose-to-pose approach\u003C/u>\u003C/a>. Only then should you strategically use in-betweens to connect those keyframes.\u003C/p>\u003Chr>\u003Ch2 id=\"6-stay-consistent\">\u003Cstrong>6. Stay Consistent\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>Consistency in animation isn’t just a nice-to-have\u003C/strong>: you need it to create a believable world.\u003C/p>\u003Cp>As we previously mentioned, over-animation gets in the way. It's not sustainable, so the quality varies from one scene to the next.\u003C/p>\u003Cp>Imagine you're watching a scene where the protagonist is moving with exquisitely detailed expressions, their hair and clothes reacting to every subtle shift in movement.\u003C/p>\u003Cp>But then, in the same scene, the background is starkly simple, with crowd members who barely move or express emotion. It's like watching a character in high definition set against an out-of-focus landscape: it just looks out of place and breaks the suspension of disbelief. Some background crowds in Jojo's Stone Ocean anime look particularly funny:\u003C/p>\u003Cp>These inconsistencies often originate from the realities of production: \u003Cstrong>teams change, budgets evolve, and timelines shift\u003C/strong>.\u003C/p>\u003Cp>As animators pour their efforts into perfecting main characters, background elements can become afterthoughts, not getting the attention or time they deserve.\u003C/p>\u003Cp>You need to find a balance early on.\u003Ca href=\"https://blog.cg-wire.com/lod-levels-of-detail/\"> \u003Cu>The level of detail\u003C/u>\u003C/a> across all elements of an animation has to match the story's tone.\u003C/p>\u003Cp>We're not saying the background needs as much detail as the main character, but it shouldn't detract from the narrative:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Establish clear visual standards\u003C/strong>. Define the level of detail appropriate for different elements in a scene.\u003C/li>\u003Cli>\u003Cstrong>Regularly review scenes\u003C/strong> as a whole rather than in isolation.\u003C/li>\u003Cli>\u003Cstrong>Focus on detailed animation efforts where they serve the story best\u003C/strong> while making sure supporting elements maintain a consistent yet less detailed style.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>In summary, while it’s tempting to add as much detail and motion as possible, over-animating dilutes your story and exhausts your audience.\u003C/p>\u003Cp>By using live footage, planning effectively, simplifying movements, using exaggeration, and focusing on keyframes, you can avoid the common pitfalls of over-animating.\u003C/p>\u003Cp>Don’t animate every grain of sand in a desert. Animate the desert.\u003C/p>\u003Cp>Over-animation is subjective, though if you have the budget of Toei to animate One Piece, it can be okay to have every frame become a money shot. If you're a smaller studio, less so.&nbsp;\u003C/p>\u003Cp>There are exceptions where high-impact, stylized scenes benefit from visual overload, like a chaotic battle sequence.\u003C/p>\u003Cp>In one sentence, over-animation is when you can't sustain the same level of detail throughout production.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":723,"comment_id":724,"feature_image":725,"featured":105,"visibility":10,"created_at":726,"updated_at":727,"custom_excerpt":728,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":729,"primary_tag":730,"url":731,"excerpt":728,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":732},"52d0f4ef-c74d-47e8-8318-1f12d20621ee","6867f6252ff010000105b720","https://images.unsplash.com/photo-1737134385541-dd2af8ae1113?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDF8fGV4YWdnZXJhdGV8ZW58MHx8fHwxNzUxOTEyNDk3fDA&ixlib=rb-4.1.0&q=80&w=2000","2025-07-04T17:41:25.000+02:00","2026-03-26T10:26:55.000+01:00","Over-animating can distract from your story, inflate production time, and overwhelm your audience. Learn how to simplify your animation workflow, focus on what matters, and bring clarity to your scenes with practical, studio-tested advice.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/avoid-over-animating-scenes/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@infernisvox?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Jens Riesenberg\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/avoid-over-animating-scenes","2025-07-14T10:00:07.000+02:00",{"title":718},"avoid-over-animating-scenes","posts/avoid-over-animating-scenes",[739],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"pF2TnP9TbcKQelr7POKL_fvDarTvBr0s4BWwnu-lfnQ",{"id":742,"title":743,"authors":744,"body":7,"description":7,"extension":8,"html":746,"meta":747,"navigation":14,"path":758,"published_at":759,"seo":760,"slug":761,"stem":762,"tags":763,"__hash__":765,"uuid":748,"comment_id":749,"feature_image":750,"featured":105,"visibility":10,"created_at":751,"updated_at":752,"custom_excerpt":753,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":754,"primary_tag":755,"url":756,"excerpt":753,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":757},"ghost/posts:arc-animation-principle.json","Mastering the Arc Principle (2026): Bring Flow and Realism to Your Animation",[745],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">💫\u003C/div>\u003Cdiv class=\"kg-callout-text\">Add grace, weight, and story to every motion—your animations deserve more than straight lines.\u003C/div>\u003C/div>\u003Cp>A great way to improve the quality of your animations is to make your movements less linear and more curvy.\u003C/p>\u003Cp>This is what Disney animators call the \u003Cstrong>arc principle\u003C/strong>, and it's one of the 12 principles they used in the 1930s to propel their studios to global success.\u003C/p>\u003Cp>In this article, you'll discover why arcs are essential and the best ways to use them in your animations for maximum effect.\u003C/p>\u003Cp>Read on for practical tips!\u003C/p>\u003Chr>\u003Ch2 id=\"whats-the-arc-principle\">\u003Cstrong>What's the Arc Principle?\u003C/strong>\u003C/h2>\u003Cp>The arc principle refers to \u003Cstrong>the visual path taken by objects or characters as they move through space\u003C/strong>. This path is often an arc rather than a straight line to mirror how things move in the real world.\u003C/p>\u003Cp>For example, a swinging arm doesn't travel straight from one point to another but rather follows a parabolic trajectory. Note how One Piece animators play with Luffy's arm trajectory to make scenes more appealing:\u003C/p>\u003Chr>\u003Ch2 id=\"why-is-the-arc-principle-important\">\u003Cstrong>Why Is the Arc Principle Important?\u003C/strong>\u003C/h2>\u003Cp>As previously mentioned, \u003Cstrong>arcs make animated sequences more realistic\u003C/strong>. In fact, most movements follow a curve: the swing of walking legs, the path of a bouncing ball, or to convey momentum for a follow-through.\u003C/p>\u003Cp>But they also \u003Cstrong>make scenes more appealing\u003C/strong>: our brains are hardwired to appreciate these natural curves. When an animation lacks them, it appears stiff.\u003C/p>\u003Cp>Movement paths can also act as \u003Cstrong>storytelling tools\u003C/strong> to showcase the personality traits of characters or their emotional states: a character who moves in sharp, angular paths seems tense, while another who moves in smooth arcs appears more relaxed.\u003C/p>\u003Chr>\u003Ch2 id=\"1-plan-with-thumbnails-storyboards-animatics\">\u003Cstrong>1. Plan with Thumbnails, Storyboards &amp; Animatics\u003C/strong>\u003C/h2>\u003Cp>Good planning makes sure that the arcs remain consistent while saving time and effort: when you know the trajectory the motion should follow, you avoid unnecessary revisions.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Align arcs with character intentions\u003C/strong> - Think about the motivations and emotions that underlie your character’s movement. Aligning the arcs with these intentions adds not just to the physical movement but also to the storytelling.\u003C/li>\u003Cli>\u003Cstrong>Sketch thumbnails or storyboards to map out key poses and arcs\u003C/strong> - Before you animate anything, take the time\u003Ca href=\"https://blog.cg-wire.com/storyboard-animation/\"> \u003Cu>to sketch out storyboards\u003C/u>\u003C/a>. These rough drawings give you a bird’s-eye view of your animation to help you lay down key poses and the arcs they’ll follow.\u003C/li>\u003Cli>\u003Cstrong>Use animatics to test flow\u003C/strong> -\u003Ca href=\"https://blog.cg-wire.com/how-animatics-bring-stories-to-life/\"> \u003Cu>Convert your storyboards into animatics\u003C/u>\u003C/a>—moving storyboards that include timing, motion, and transitions. They'll allow you to visualize how well your arcs and poses flow over time to polish them before committing to the full animation process.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"2-arc-visualization-with-motion-paths-and-onion-skinning\">\u003Cstrong>2. Arc Visualization With Motion Paths And Onion Skinning\u003C/strong>\u003C/h2>\u003Cp>Visualizing arcs is key to understanding the natural progression of your movement. You can use two DCC tool features for this use case:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Motion paths\u003C/strong> - Motion paths allow you to see the trajectory of a movement across a series of frames. By observing these paths, you can adjust the motion to follow smooth, circular arcs rather than linear ones.\u003C/li>\u003C/ul>\u003Cp>In Blender for example, the motion paths are shown in red for past frames and green for future frames:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Onion skinning\u003C/strong> - Onion skinning shows you multiple frames at once to see their progression over time. This way, you can make sure your arched animations will render well.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"3-easing-for-realism\">\u003Cstrong>3. Easing For Realism\u003C/strong>\u003C/h2>\u003Cp>Without proper timing and spacing, arcs would look off. While\u003Ca href=\"https://blog.cg-wire.com/timing-animation-principle/\"> \u003Cu>timing is the rhythm of your animation\u003C/u>\u003C/a>, spacing is the distance your object travels between each frame.\u003C/p>\u003Cp>Think of a car coming to a stop or a sprinter pushing off the blocks. The secret to animating these motions lies in easing, also known in animation as\u003Ca href=\"https://blog.cg-wire.com/slow-in-out/\"> \u003Cu>the slow in/out principle\u003C/u>\u003C/a>.\u003C/p>\u003Cp>By adjusting the spacing of your keyframes, you create the illusion of acceleration and deceleration to make movements look smoother. Start by having more frames closer together at the beginnings and ends of motion.\u003C/p>\u003Cp>In Blender, \u003Cstrong>the graph editor\u003C/strong> is your best friend when it comes to easing. By manipulating ease curves, you can fine-tune how animation properties change over time. Smooth, S-shaped curves tend to produce more natural motion, while sharper curves can create more snappy, dynamic movements.\u003C/p>\u003Chr>\u003Ch2 id=\"4-exaggeration-for-storytelling\">\u003Cstrong>4. Exaggeration For Storytelling\u003C/strong>\u003C/h2>\u003Cp>Animation isn't just about replicating reality: you need to\u003Ca href=\"https://blog.cg-wire.com/exaggeration-animation-principle/\"> \u003Cu>push boundaries for the sake of storytelling\u003C/u>\u003C/a>.\u003C/p>\u003Cp>By amplifying your arcs, you inject energy into scenes.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Push the boundaries\u003C/strong> - Begin by stretching the arcs of a character's movement slightly further than you initially planned. Experiment with the degree of exaggeration. For example, when a character runs, consider a more pronounced curve in their pose. Naruto's ninja run is iconic precisely because of the exaggerated arched body:\u003C/li>\u003Cli>\u003Cstrong>Facial expressions\u003C/strong> - But arcs aren't confined to limbs and bodies: you can use them in facial animations as well. When transitioning from a frown to a smile, watch for the arcs formed by the eyebrows and the corners of the mouth. Emotional states also influence the arc of your lip-syncing. Look at all the arcs used in the body language of Homer Simpson, showing how he goes from discomfort to shock:\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"5-dont-forget-follow-through-overlapping-action\">\u003Cstrong>5. Don't Forget Follow-Through &amp; Overlapping Action\u003C/strong>\u003C/h2>\u003Cp>A fluid arc should include other animation principles like\u003Ca href=\"https://blog.cg-wire.com/follow-through-overlapping-action/\"> \u003Cu>follow-through and overlapping actions\u003C/u>\u003C/a>.\u003C/p>\u003Cp>Follow-through action is the continuation of movement beyond the primary action. A jumping character will have her hair, loose-fitting clothes, and limbs trail behind the main jumping action. If arcs are involved, they'll also influence the follow-through. Same with overlapping actions, where different parts of a character move at different rates.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Use reference footage\u003C/strong> - Import reference footage in your DCC tool and notice the delays in movement, like how a hand swings after the arm stops swinging.\u003C/li>\u003Cli>\u003Cstrong>Break down the motion\u003C/strong> - When planning your animation, divide the actions into primary and secondary movements. Think about which parts should lead and which should follow.\u003C/li>\u003Cli>\u003Cstrong>Animate in layers\u003C/strong> - Focus first on the core motion, adding follow-through elements afterwards. This method helps keep track of the sequencing and the impact each element has on the overall movement.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"6-anti-principle-linear-animations\">\u003Cstrong>6. Anti-Principle: Linear Animations\u003C/strong>\u003C/h2>\u003Cp>Rules are great, but they are meant to be broken!\u003C/p>\u003Cp>There are scenarios where deviating from arcing motion with \u003Cstrong>linear movements is not just effective, but necessary to convey a specific mood\u003C/strong> or idea.\u003C/p>\u003Cp>Linear motions are perfect for depicting mechanical entities like robots or machines because they come across as precise, calculated, and unnatural. In Cyberpunk Edgerunners, Adam Smasher's animated movements are just translated frames:\u003C/p>\u003Cp>They can also be used to create a feeling of tension or unease. Sudden, direct movements break the fluidity of a scene and introduce abrupt moments for horror scenes or plot twists.\u003C/p>\u003Cp>Inversely, an abrupt, straight-line exit of a character from a scene, ignoring all physical laws, emphasizes the absurdity or urgency of the action to create laughter.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>The arc principle contributes significantly to the visual rhythm, realism, and appeal of movements. You don't need much to improve your animations:\u003C/p>\u003Col>\u003Cli>Start by sketching key poses. Visualize and draw the arc path that connects these poses.\u003C/li>\u003Cli>Use guidelines or onion-skinning techniques in your animation software to see the path of your motion.\u003C/li>\u003Cli>Arcs are not just about positioning but also timing. The speed of motion along the arc affects the animation's fluidity.\u003C/li>\u003Cli>By pushing poses and extending the path beyond what might be realistic, you can amplify the expressiveness of your animation.\u003C/li>\u003Cli>Once your character's primary action concludes, elements like clothing, hair, or limbs continue to follow the arc's path.\u003C/li>\u003C/ol>\u003Cp>Make sure to have a look at\u003Ca href=\"https://blog.cg-wire.com/12-principles-animation/\"> \u003Cu>the 11 other animation principles\u003C/u>\u003C/a> to get a better understanding of the arc principle and how they all relate to each other, but don't forget to sometimes break the rules and use linear motions for interesting results!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":748,"comment_id":749,"feature_image":750,"featured":105,"visibility":10,"created_at":751,"updated_at":752,"custom_excerpt":753,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":754,"primary_tag":755,"url":756,"excerpt":753,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":757},"b13a0bca-d443-4df7-a196-0486e3c0deb8","6867f6262ff010000105b726","https://images.unsplash.com/photo-1457365050282-c53d772ef8b2?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDJ8fGFyY3xlbnwwfHx8fDE3NTE2NDU2MTJ8MA&ixlib=rb-4.1.0&q=80&w=2000","2025-07-04T17:41:26.000+02:00","2026-03-26T10:25:52.000+01:00","Discover how to apply the arc principle in animation to create more realistic, appealing, and emotionally resonant movement. Learn techniques for planning, visualizing, and exaggerating arcs, and when to break the rules for storytelling impact.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/arc-animation-principle/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@spacex?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">SpaceX\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/arc-animation-principle","2025-07-07T10:00:53.000+02:00",{"title":743},"arc-animation-principle","posts/arc-animation-principle",[764],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"5vGyszemHQIfgfivf_sXaAu3v47ZZkGYeVn1dNt-Lxg",{"id":767,"title":768,"authors":769,"body":7,"description":7,"extension":8,"html":771,"meta":772,"navigation":14,"path":783,"published_at":784,"seo":785,"slug":786,"stem":787,"tags":788,"__hash__":790,"uuid":773,"comment_id":774,"feature_image":775,"featured":105,"visibility":10,"created_at":776,"updated_at":777,"custom_excerpt":778,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":779,"primary_tag":780,"url":781,"excerpt":778,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":782},"ghost/posts:animation-asset-storage.json","How to Organize and Manage Animation Assets at Scale In 2026",[770],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">💿\u003C/div>\u003Cdiv class=\"kg-callout-text\">Animation files can balloon into terabytes of data. Learn how to keep your production organized, collaborative, and efficient with better storage workflows.\u003C/div>\u003C/div>\u003Cp>A single animation project can generate a vast amount of digital assets, from complex 3D models and preliminary previews to fully rendered videos.\u003C/p>\u003Cp>Each of these assets takes up significant storage space―sometimes terabytes of data!―presenting a unique set of challenges for studios.\u003C/p>\u003Cp>Effectively managing storage across various digital content creation (DCC) tools while coordinating distributed processes and facilitating remote team collaboration is a major pain point in animation production. We know this from our first-hand experience\u003Ca href=\"https://www.cg-wire.com/kitsu?ref=blog.cg-wire.com\"> \u003Cu>building Kitsu\u003C/u>\u003C/a>.\u003C/p>\u003Cp>Read on for best practices to master storage management!\u003C/p>\u003Chr>\u003Ch2 id=\"why-storage-management\">\u003Cstrong>Why Storage Management\u003C/strong>\u003C/h2>\u003Cp>Animation projects, especially those involving 3D elements and high-resolution outputs, generate massive amounts of data. Efficient storage management makes sure that \u003Cstrong>all assets, from raw footage to rendered files, are organized and accessible\u003C/strong> without overwhelming the infrastructure. As teams use a variety of specialized software, storing, organizing, and sharing these assets is a complex puzzle.\u003C/p>\u003Cp>Animation projects often involve teams distributed across different locations, sometimes different studios. Effective \u003Cstrong>storage solutions facilitate collaboration\u003C/strong> by allowing team members to access, share, and update files efficiently, regardless of where they are located. With cyber threats increasingly targeting creative industries, secure storage management is key to protecting intellectual property from unauthorized access.\u003C/p>\u003Cp>Efficient storage systems also reduce load times for large assets, which increases the performance of software tools and \u003Cstrong>improves the overall productivity of artists\u003C/strong> and technical staff. Proper storage management \u003Cstrong>help reduce costs\u003C/strong> by optimizing existing storage resources, eliminating the need for unnecessary hardware purchases and minimizing cloud storage fees.\u003C/p>\u003Chr>\u003Ch2 id=\"1-resilient-centralized-storage\">\u003Cstrong>1. Resilient, Centralized Storage\u003C/strong>\u003C/h2>\u003Cp>It's important to have a robust storage solution that centralizes all the project assets into a single, secure location.\u003C/p>\u003Cp>It should combine \u003Cstrong>centralized data repositories with redundancy and backup systems\u003C/strong> to ensure that all vital assets are not only easily accessible but also protected against loss or corruption. This system allows animators, designers, and other team members to work together without conflict, knowing that the material they need is shielded against hardware failures, cyber threats, and accidental deletions.\u003C/p>\u003Cp>Time is a critical factor in productions. Decentralized or fragmented storage systems can lead to lost hours or even days as teams search for assets or face repetitive losses that delay production timelines. An elevated risk of losing critical work could result in considerable setbacks and financial losses.\u003C/p>\u003Col>\u003Cli>Start by \u003Cstrong>selecting reliable storage solutions\u003C/strong> that fit your project needs. It is crucial to conduct thorough research and possibly consult with IT experts to identify the right combination of storage technologies, high-capacity servers that can handle intensive data loads and advanced backup technologies to duplicate data regularly to secure locations.\u003C/li>\u003Cli>Invest in \u003Cstrong>strong cybersecurity measures\u003C/strong> to protect against unauthorized access and data breaches. Regular updates to security protocols and continuous monitoring should be standard procedures.\u003C/li>\u003Cli>\u003Cstrong>Foster a collaborative workflow\u003C/strong> by creating a centralized asset management platform where team members can easily access, share, and update assets in real time. This method not only boosts productivity but also ensures consistency throughout production.\u003C/li>\u003C/ol>\u003Cp>\u003Ca href=\"https://www.cg-wire.com/studio-database?ref=blog.cg-wire.com\">\u003Cu>The Kitsu API provides centralized storage\u003C/u>\u003C/a> for assets, shots, casting, tasks, and everything a production pipeline needs. The casting management feature makes it easy to build breakdown tables to list casted assets for each shot in a meaningful way:\u003C/p>\u003Chr>\u003Ch2 id=\"2-dcc-tool-integrations\">\u003Cstrong>2. DCC Tool Integrations\u003C/strong>\u003C/h2>\u003Cp>Modern storage solutions often integrate with other tools in the production pipeline to streamline workflows and automate repetitive tasks for higher productivity.\u003C/p>\u003Cp>\u003Cstrong>DCC tool integrations facilitate the transfer and synchronization of assets\u003C/strong>, including 3D models, textures, animations, and other digital elements, across different software.\u003C/p>\u003Cp>In a typical pipeline, artists rely on a variety of specialized tools for tasks like modelling, rigging, animation, texturing, lighting, rendering, etc. Without effective integrations, transitioning assets between these tools can quickly become a cumbersome and error-prone process leading to inconsistencies. DCC integrations help eliminate redundant processes, reduce the likelihood of errors, and enable teams to focus on creative tasks rather than on technical challenges related to asset compatibility and data transfer.\u003C/p>\u003Col>\u003Cli>It's crucial to \u003Cstrong>ensure compatibility with industry standards and interoperability protocols\u003C/strong>, like Alembic, USD, and FBX, to facilitate smooth data exchange between different systems.\u003C/li>\u003Cli>\u003Cstrong>Maintaining clear documentation of the integration process\u003C/strong> allows technical artists to understand workflows and resolve issues swiftly.\u003C/li>\u003Cli>Regularly \u003Cstrong>updating and testing integrations\u003C/strong> to align with software updates and new features ensures continued performance.\u003C/li>\u003C/ol>\u003Cp>For example,\u003Ca href=\"https://www.cg-wire.com/software-integrations?ref=blog.cg-wire.com\"> \u003Cu>Kitsu allows artists to publish asset previews directly from their DCC tools\u003C/u>\u003C/a>, so that it's easy to keep track of the work you do with your team, without overwhelming your storage space with full-blown 3D models or HD videos:\u003C/p>\u003Cp>Duplicating assets between databases can easily blow up the storage space you need, so using appropriate levels of detail is important.\u003C/p>\u003Chr>\u003Ch2 id=\"3-traceability\">\u003Cstrong>3. Traceability\u003C/strong>\u003C/h2>\u003Cp>Traceability is the ability to track and manage the history, location, and usage of animation assets throughout the project's lifecycle.\u003C/p>\u003Cp>It includes \u003Cstrong>maintaining detailed records of asset changes\u003C/strong>, including who made what modifications and when, to allow all team members to have access to up-to-date information.\u003C/p>\u003Cp>With numerous assets being developed and iterated on simultaneously, having a robust traceability system in place helps maintain order. It enforces accountability, as team members can easily identify the source of any asset-related issue, and facilitates rapid problem resolution. It also helps project management by providing clear insights into the progress and status of assets for teams to meet deadlines and budgets more effectively.\u003C/p>\u003Col>\u003Cli>A centralized asset management system serves as \u003Cstrong>a single source of truth\u003C/strong> for all asset-related information.\u003C/li>\u003Cli>\u003Cstrong>Consistent naming conventions and metadata\u003C/strong> help organize and retrieve assets efficiently.\u003C/li>\u003Cli>\u003Cstrong>Regular audits and updates to the asset database\u003C/strong> ensure its accuracy and reliability.\u003C/li>\u003Cli>Fostering a collaborative culture where \u003Cstrong>team members diligently document changes\u003C/strong> and communicate effectively significantly improves traceability and overall production efficiency.\u003C/li>\u003C/ol>\u003Cp>\u003Ca href=\"https://www.cg-wire.com/review-engine?ref=blog.cg-wire.com\">\u003Cu>A review engine like Kitsu's\u003C/u>\u003C/a> allows you to quickly add annotations with your team to work on the next batch of edits, and\u003Ca href=\"https://www.cg-wire.com/casting-management?ref=blog.cg-wire.com\"> \u003Cu>the breakdown tables\u003C/u>\u003C/a> make it easy to access assets related to each shot in a single place.\u003C/p>\u003Cp>Last but not least, this system should support version control.\u003C/p>\u003Chr>\u003Ch2 id=\"4-versioning\">\u003Cstrong>4. Versioning\u003C/strong>\u003C/h2>\u003Cp>Animation involves multiple iterations, making it essential to track versions of assets and scenes.\u003C/p>\u003Cp>Multiple artists and teams often work concurrently on various elements. \u003Cstrong>Versioning ensures that everyone is working with the most up-to-date and correct files\u003C/strong>, minimizing the risk of errors (e.g. overwriting files) or inconsistencies in the final product. It also allows teams to roll back to previous versions if necessary and maintain a clear history of the asset's development.\u003C/p>\u003Cp>After reviews, Kitsu previews are automatically versioned for easy comparison.\u003C/p>\u003Cp>All new changes, like comments or preview,s appear in the newsfeed to keep everyone informed about available versions.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>An effective asset storage solution for animation productions must include key features like resilience, centralization, integration with digital content creation tools, traceability, and versioning to make sure assets are not only safeguarded against potential data loss but also efficiently organized and easily accessible to the entire production team.\u003C/p>\u003Cp>Additionally, the integration of asset storage solutions with project management tools improves the workflow significantly to coordinate teams across different departments or even studios. Using a pipeline tracker like Kitsu further streamlines this process by providing a comprehensive oversight of project assets, status updates, and team interactions.\u003C/p>\u003Cp>It's important to consider the design of your asset storage solution so that it fits your specific needs as an animation studio: it's not enough to drop everything in a Google Drive when you handle terabytes of data. Kitsu neatly organizes everything in a logical way by shots and previews, and you can use the API to match the storage logic with your studio's workflow.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":773,"comment_id":774,"feature_image":775,"featured":105,"visibility":10,"created_at":776,"updated_at":777,"custom_excerpt":778,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":779,"primary_tag":780,"url":781,"excerpt":778,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":782},"8ad6d628-2316-4ff4-9b32-50f0e8d116c1","683441e02c2dc700019388a3","https://images.unsplash.com/photo-1611153730462-e84a16b8c6e1?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDMyfHxkYXRhJTIwc3RvcmFnZXxlbnwwfHx8fDE3NDgyNzY0Mzl8MA&ixlib=rb-4.1.0&q=80&w=2000","2025-05-26T12:26:40.000+02:00","2026-03-26T10:09:13.000+01:00","Efficient asset storage is critical in animation production. This guide explores best practices for organizing, securing, and versioning digital assets across large-scale projects and distributed teams.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/animation-asset-storage/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@pondjup?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Pond Juprasong\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/animation-asset-storage","2025-06-23T10:00:57.000+02:00",{"title":768},"animation-asset-storage","posts/animation-asset-storage",[789],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"EhRsBQQWWvv2jEKNEb7fQPjonvy2mYF_UL_EWmL3PU8",{"id":792,"title":793,"authors":794,"body":7,"description":7,"extension":8,"html":796,"meta":797,"navigation":14,"path":808,"published_at":809,"seo":810,"slug":811,"stem":812,"tags":813,"__hash__":815,"uuid":798,"comment_id":799,"feature_image":800,"featured":105,"visibility":10,"created_at":801,"updated_at":802,"custom_excerpt":803,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":804,"primary_tag":805,"url":806,"excerpt":803,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":807},"ghost/posts:straight-ahead-action-pose-to-pose-animation.json","(2026) When to Use Straight-Ahead or Pose-to-Pose Animation",[795],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🤔\u003C/div>\u003Cdiv class=\"kg-callout-text\">Every animator faces the same question: should I follow the action frame by frame or block out the key poses first?\u003C/div>\u003C/div>\u003Cp>Should an animator start by drawing the first frame and continue improvising, or should they focus on creating keyframes first? This dilemma points to a fundamental choice in animation techniques, whether to adopt a \"straight-ahead\" or a \"pose-to-pose\" approach.\u003C/p>\u003Cp>Animation has many styles and levels of complexity, each requiring different levels of planning and execution. Not all animations are created equal: some demand meticulous preparation to capture intricate details. The straight-ahead action and pose-to-pose principle was developed by Disney animators to address these varying needs, offering guidance on how to approach this creative process effectively.\u003C/p>\u003Cp>Read on to discover how this principle can be harnessed to meet contemporary animation challenges!\u003C/p>\u003Chr>\u003Ch2 id=\"whats-straight-ahead-action\">\u003Cstrong>What's Straight-Ahead Action\u003C/strong>\u003C/h2>\u003Cp>Straight-ahead action focuses on creating movement frame by frame from the beginning to the end.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXeRF645aEZpyWtRn1uWptYW9FTxISne5v4PrNMGeBnqfKnE7RH-YW1coDZTcK0_0rJSU6gCBM1wBctY805OWNkRCRm62OVpd-EThpFTI5ctnpgCapJjAk7I0lOJ0evmTOTxTdf9HQ?key=CHOiUiba8JrsBzDfvMO3Zw\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"339\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Animost Studio\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>An example of straight-ahead action is animating a character performing a fast, uncontrolled dance. The animator starts with the initial pose and progress frame by frame to allow each movement to flow unpredictably into the next.\u003C/p>\u003Chr>\u003Ch2 id=\"whats-pose-to-pose\">\u003Cstrong>What's Pose-to-Pose\u003C/strong>\u003C/h2>\u003Cp>Pose-to-pose involves planning and creating specific key frames or poses first, and then filling in the in-between frames to smooth the motion.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXeohtT1gRj7AO0hosHESzpyUVT0wAp8s86dgIB3RD8xMF8OzofE0xvlpAUR6N10-bfs9CJVOsDkOB2-Jbb4pjpbyx9_hJLJQjgkjyXD-M1Ph-Lw8gkZC88_j9KLh8aBOM7hF3Fkfg?key=CHOiUiba8JrsBzDfvMO3Zw\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"351\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Creativity School\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>If you're animating a character jumping, you would start by drawing the initial crouch before the jump, the peak of the jump when the character is in the air, and the landing pose. With these keyframes set, you then draw the in-between frames to transition smoothly from one key pose to the next.\u003C/p>\u003Chr>\u003Ch2 id=\"why-is-this-principle-important\">\u003Cstrong>Why Is This Principle Important\u003C/strong>\u003C/h2>\u003Cp>Understanding the straight-ahead and pose-to-pose action principle is important because each method offers distinct advantages and challenges. Animators have to pick the right technique to improve the quality of their animation process.\u003C/p>\u003Cp>The right method prevents costly edits, particularly when dealing with a high number of frames: if an animator uses the straight-ahead method in a situation where pose-to-pose would be more appropriate, they may find themselves having to redo substantial sections of the animation to align with the intended timing or structure, which could strain both the budget and the schedule.\u003C/p>\u003Cp>Choosing the right approach also enables animators to keep as much creative control as possible over the movement being created. They can effectively convey the intended mood, emotion, and storytelling elements without compromising on quality or creative vision.\u003C/p>\u003Chr>\u003Ch2 id=\"straight-ahead-vs-pose-to-pose-pros-cons\">\u003Cstrong>Straight-Ahead vs Pose-to-pose pros &amp; cons\u003C/strong>\u003C/h2>\u003Cp>Straight-Ahead Action is a technique favored by animators when the goal is \u003Cstrong>to capture dynamic, detailed, or unpredictable movements\u003C/strong>. Animating one frame after another in a continuous flow allows for a high degree of creativity and spontaneity in how the action evolves. The result is often a lively and organic quality that can be more challenging to achieve with structured methods.\u003C/p>\u003Cp>But this method can also lead to \u003Cstrong>variations or inaccuracies in timing and proportions\u003C/strong>, as the animator is working without specific predetermined keyframes. Straight-ahead action requires a clear vision, since any mistakes or adjustments would require substantial rework, which can be both time-consuming and costly.\u003C/p>\u003Cp>On the other hand, the Pose-to-Pose approach offers animators \u003Cstrong>greater control over timing and precision\u003C/strong> by starting with key frames and then filling in the in-betweens. This technique ensures that the animated sequence hits specific poses or expressions precisely as intended. It's particularly useful for complex scenes that demand consistency and accuracy by allowing animators to plan the pacing of a sequence meticulously.\u003C/p>\u003Cp>If not executed with care, pose-to-pose animation can result in \u003Cstrong>movements that feel more mechanical\u003C/strong> so animators may need to apply additional tweaks to achieve a more natural sequence flow.\u003C/p>\u003Cp>In the modern animation industry, \u003Cstrong>there's a noticeable trend towards relying more on pose to pose\u003C/strong> rather than straight-ahead action. Animation studios operate under tight schedules and budgets:\u003C/p>\u003Cul>\u003Cli>Pose-to-pose action supports a more efficient workflow because keyframes can be planned and approved before moving into the more time-consuming in-between frames.\u003C/li>\u003Cli>With digital animation tools, the process of setting keyframes and automating in-betweens is streamlined. Pose to pose is the default workflow because of how they handle rigging and interpolation.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"1-step-spline-mode\">\u003Cstrong>1. Step &amp; Spline Mode\u003C/strong>\u003C/h2>\u003Cp>In digital content creation tools like Blender, animations are often created using keyframes, and two common interpolation modes that relate directly to the pose to pose animation principle are \"step\" mode and \"spline\" mode.\u003C/p>\u003Cp>\u003Cstrong>In step mode, the animation holds one keyframe until it reaches the next one\u003C/strong>, with no interpolation in between: the animated object or character will \"jump\" from one pose to the next without transitioning smoothly between them.\u003Ca href=\"https://blog.cg-wire.com/stepped-animation/\"> \u003Cu>Step mode is crucial\u003C/u>\u003C/a> during the early stages of pose-to-pose animation because it allows animators to focus on creating strong, clear poses by eliminating distractions that may arise from premature smoothing transitions.\u003C/p>\u003Cp>\u003Cstrong>Spline mode introduces interpolation between keyframes\u003C/strong> to create smooth transitions. The software calculates the in-between frames using curves (splines), creating fluid movement from pose to pose. Once the key poses and timing are locked down in step mode, animators switch to spline mode to refine the animation. The transition to spline mode allows them to work on the arcs, easing in and out of movements, and other nuanced performance details that bring the animation to completion. Spline interpolation lets animators adjust these curves to control how fast or slow an object moves between poses.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXdo1TYT4fW66k26X2otVlHjlyvkTBTjYkFs_NRZ71EKE4eguU0AV8Zm33vJMyCCy8g0LbdVYPssGWdsPBDEyTILOnpIA7k5uCkw5vcP-KgPfFqJjEXzP14yGBCa1IjlaqZFmQ6y?key=CHOiUiba8JrsBzDfvMO3Zw\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"351\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Wobbe Koning on YouTube\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"2-onion-skinning\">\u003Cstrong>2. Onion Skinning\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>Onion skinning (also known as ghosting) is another feature used to visualize multiple frames at once in a sequence\u003C/strong>, which allows animators to refine and adjust their work with greater precision. This tool is integral in digital content creation (DCC) software and is particularly useful when applying the straight-ahead and pose-to-pose action principle in animation.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXfaBrvcnRnpgagOoWmL9-Kk5utl-LiiCGGd-SDUGngBLF5twVtjbtRLJXwe9utyj8GogxXS3t1jWEsmwiNehHT75iekWRMs7G4dvIkq4_QhsKso0jgaLVmULmmIkdY-LkY7TrnLLQ?key=CHOiUiba8JrsBzDfvMO3Zw\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"316\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Blender Manual\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Onion skinning helps with the straight-ahead approach by allowing animators to see several frames before and after the current frame they are working on. This visibility ensures that the animator maintains consistency in movement, timing, and spacing between frames. By seeing the adjacent frames simultaneously, animators can make more informed decisions about how to progress each frame.\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/onion-skinning/\">\u003Cu>Onion skinning is also key for pose-to-pose\u003C/u>\u003C/a> as it shows keyframes and their in-between progressions simultaneously. It allows animators to compare and adjust the in-between frames effectively.\u003C/p>\u003Chr>\u003Ch2 id=\"3-motion-trails\">\u003Cstrong>3. Motion trails\u003C/strong>\u003C/h2>\u003Cp>Similar to onion skinning and often used in 3D animation, \u003Cstrong>motion trails are particularly useful to visualize the flow of a moving object\u003C/strong>. They provide a line or curve that showcases the path through which an object travels to convey information about its velocity and trajectory.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXfnBVONywBweE62vvKBrQpEQzf3he6HI9FhT39sePsdNKmmY0K5lC4sCQdoC_fOzWJ-Lvnv2UansKRfS86ypdxnsZ0N7AyVZGsJ7hDPfCQA8Tk0BBcW2k3vmp5isEvgFG3XV68SWw?key=CHOiUiba8JrsBzDfvMO3Zw\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"316\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Blender Manual\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>By observing the distribution of points along the trail, animators can adjust the speed and timing of the movement. When animating interactions between characters or objects, it also helps predict and plan movement trajectories to avoid unrealistic overlaps or collisions.\u003C/p>\u003Cp>To animate a sword swing animation using pose-to-pose, an animator would use motion trails to visualize the arc through which the sword travels to adjust the movement, and ghosting to see the sword's position at various intervals.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Both the straight-ahead and pose-to-pose techniques offer distinct advantages and challenges, but pose-to-pose is the predominant method in modern animation studios. While straight-ahead animation allows for more spontaneous and fluid movements, pose-to-pose provides a structured approach that is ideal for achieving precise timing while leaving room for easy edits.\u003C/p>\u003Cp>The pose-to-pose method's prevalence in modern workflows can be attributed to its ability to facilitate collaboration and streamline the animation process, allowing artists to plan out key movements and ensure consistency across complex scenes. This method is further augmented by the mainstream usage of DCC tool features like step/spline mode, onion skinning, and motion trails.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":798,"comment_id":799,"feature_image":800,"featured":105,"visibility":10,"created_at":801,"updated_at":802,"custom_excerpt":803,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":804,"primary_tag":805,"url":806,"excerpt":803,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":807},"377a9e89-3a34-4d0f-9757-37bb64f0fa31","683441e32c2dc700019388af","https://images.unsplash.com/photo-1529229504105-4ea795dcbf59?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDZ8fG1vdmVtZW50fGVufDB8fHx8MTc0ODI2MTMxOXww&ixlib=rb-4.1.0&q=80&w=2000","2025-05-26T12:26:43.000+02:00","2026-02-20T06:05:00.000+01:00","Learn the difference between straight-ahead and pose-to-pose animation, and how to choose the best method for your scene. This guide covers pros, cons, and digital techniques to help animators plan with confidence.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/straight-ahead-action-pose-to-pose-animation/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@aoddeh?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Ahmad Odeh\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/straight-ahead-action-pose-to-pose-animation","2025-06-09T10:00:37.000+02:00",{"title":793},"straight-ahead-action-pose-to-pose-animation","posts/straight-ahead-action-pose-to-pose-animation",[814],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"54oJp8dGh-St4ZQu2f06F8pPe2lgVE9nnBe750bIolU",{"id":817,"title":818,"authors":819,"body":7,"description":7,"extension":8,"html":821,"meta":822,"navigation":14,"path":833,"published_at":834,"seo":835,"slug":836,"stem":837,"tags":838,"__hash__":840,"uuid":823,"comment_id":824,"feature_image":825,"featured":105,"visibility":10,"created_at":826,"updated_at":827,"custom_excerpt":828,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":829,"primary_tag":830,"url":831,"excerpt":828,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":832},"ghost/posts:from-script-to-screen-voice-acting-in-animated-storytelling.json","Voice Acting in Animated Storytelling (2026): From Script to Screen",[820],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🗣️\u003C/div>\u003Cdiv class=\"kg-callout-text\">Voice acting is more than just reading lines—it’s a blend of performance, training, and emotional intelligence. Whether it’s narration, dubbing, or full-on character work, voice actors are essential to making animated worlds feel real and memorable.\u003C/div>\u003C/div>\u003Ch2 id=\"introduction\">\u003Cstrong>Introduction\u003C/strong>\u003C/h2>\u003Cp>In Japan, voice actors possess a star power akin to that of pop idols and movie celebrities, with their own fan clubs and reality shows. These 'seiyuu' showcase the critical role voice acting plays in storytelling.\u003C/p>\u003Cp>But in the rest of the world, voice acting is still an underrated profession. In today's article, we wanted to shed some light on the fascinating profession of voice actors and how they help create memorable characters.\u003C/p>\u003Chr>\u003Ch2 id=\"whats-voice-acting\">\u003Cstrong>What's Voice Acting\u003C/strong>\u003C/h2>\u003Cp>Voice acting is the art of \u003Cstrong>providing voices to animated characters\u003C/strong>.\u003C/p>\u003Cp>An example of voice acting in animation is Tom Hanks as Woody in the \"Toy Story\" films.\u003C/p>\u003Chr>\u003Ch2 id=\"why-animation-needs-voice-actors\">\u003Cstrong>Why Animation Needs Voice Actors\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>Voice actors bring animated characters to life\u003C/strong> by infusing them with personality and emotions. Robin Williams's performance as the Genie in Disney's \"Aladdin\" gave the character a memorable personality that contributed significantly to the film's success. Williams’s improvisational skills added layers to the Genie to make him endearing and humorous.\u003C/p>\u003Cp>Through their vocal performances, \u003Cstrong>voice actors convey narrative nuances\u003C/strong> that visuals alone cannot fully capture. James Earl Jones's deep and resonant voice as Mufasa in \"The Lion King\" helped convey authority, wisdom, and warmth.\u003C/p>\u003Cp>Beyond storytelling, \u003Cstrong>good voice acting has a significant impact on the appeal and memorability of a character\u003C/strong>, especially if the voice actor already has a following. When you think of your favorite animated character, their voice is one of the first elements that comes to mind.\u003C/p>\u003Chr>\u003Ch2 id=\"types-of-voice-acting-in-animation\">\u003Cstrong>Types Of Voice Acting In Animation\u003C/strong>\u003C/h2>\u003Cp>Voice acting can be categorized into four main types: character, narration, commercial, and translation.\u003C/p>\u003Cp>Each category requires a unique set of skills and serves distinct purposes in the storytelling process:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Character voice acting\u003C/strong> - Character voice acting is the most recognized form of voice work in animation. Actors bring animated personas to life through vocal performances that reflect the characters' personalities, emotions, and arcs. This type of voice acting demands versatility, as actors often need to embody a wide range of characters, from heroes and villains to sidekicks and background figures, sometimes even within the same project. Character voice actors must be adept at altering their vocal tone, pitch, and style to match the on-screen characters.\u003C/li>\u003Cli>\u003Cstrong>Narration\u003C/strong> - Narration serves as a guiding voice that provides context, exposition, or insight into the storyline. A narrator's role is to maintain a balance between being informative and engaging, often setting the mood and tone for the entire piece. This form of voice acting is pivotal in storytelling techniques where visual cues alone may not suffice. Narrators often act as an omniscient presence, helping to bridge scenes, introduce characters, or provide backstory.\u003C/li>\u003Cli>\u003Cstrong>Commercial voice acting\u003C/strong> - Commercial voice acting in animation is used in animated advertisements and promotional content. This type of voice work focuses on delivering messages clearly and persuasively while respecting the brand tone. Voice actors adjust their vocal delivery to suit the target audience to evoke specific emotions or drive consumer actions. The challenge lies in conveying enthusiasm, trustworthiness, and clarity, often within a short time frame, to effectively engage and persuade viewers.\u003C/li>\u003Cli>\u003Cstrong>Translation voice acting\u003C/strong> - Translation voice acting, also known as dubbing, is crucial in adapting animated content for international audiences. It replaces the original dialogue with a translated script while maintaining the integrity of the original performance. Like regular voice acting, dubbing requires actors to match the lip movements and emotional expressions of the animated characters. Actors often have to navigate cultural nuances and idiomatic expressions to ensure the translated dialogue resonates with local audiences while preserving the essence of the original content.\u003C/li>\u003C/ul>\u003Cp>In any case, voice actors follow a similar 4-phase process.\u003C/p>\u003Chr>\u003Ch2 id=\"1-voice-training\">\u003Cstrong>1. Voice Training\u003C/strong>\u003C/h2>\u003Cp>Voice training creates \u003Cstrong>the foundation for delivering compelling performances\u003C/strong>.\u003C/p>\u003Cp>One of the primary elements of voice training is \u003Cstrong>versatility\u003C/strong>―developing a wide vocal range that allows actors to bring different characters to life, each with a unique voice. Whether it's portraying a young child, a gruff villain, or a whimsical creature, the ability to switch between various pitches, tones, and styles is essential for creating distinctive and memorable characters.\u003C/p>\u003Cp>\u003Cstrong>Endurance\u003C/strong> is another crucial aspect of voice training. Voice actors often undergo long recording sessions that can be vocally demanding, so developing vocal control and stamina is necessary to maintain consistent quality throughout these extended periods. Proper breathing techniques, posture, and vocal exercises are integral to enhancing endurance.\u003C/p>\u003Chr>\u003Ch2 id=\"2-character-analysis\">\u003Cstrong>2. Character Analysis\u003C/strong>\u003C/h2>\u003Cp>A thorough understanding of the characters not only shapes how they are portrayed but also how they resonate with audiences. This analysis is intrinsically linked with both scriptwriting and character design to form a cohesive framework for voice actors to deliver performances that bring animated characters to life.\u003C/p>\u003Cp>Narration and commercial voice actors aren't exempted―far from it―since they still play a character and relate to an audience.\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/animation-scripts/\">\u003Cu>Scriptwriting lays the foundation for character development\u003C/u>\u003C/a>. It is through the script that characters gain their personalities, motivations, and distinctive voices. \u003Cstrong>A well-crafted script provides voice actors with detailed insights into a character’s background, emotional arc, and behavioral nuances.\u003C/strong> This information is important since it guides actors in making informed decisions on how to approach a role. By interpreting the dialogue and understanding the context, voice actors can add layers of depth to their performances and make sure each character is authentically represented according to the vision of the writers and directors.\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/character-design-animation/\">\u003Cu>Character design is another key component of the character analysis process\u003C/u>\u003C/a>. \u003Cstrong>It involves visual storytelling elements like the character's appearance, movement, and expressiveness, which influence how a character is perceived by both the actors and the audience.\u003C/strong> For example, the physical traits depicted in the design, like the size, shape, and facial features, inform the voice actor on the possible vocal qualities to adopt, whether that involves adjusting their pitch, pace, or tone.\u003C/p>\u003Chr>\u003Ch2 id=\"3-acting\">\u003Cstrong>3. Acting\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>At the heart of voice acting is the performance itself.\u003C/strong> While each actor brings their unique style to a role, they must respect and embody the character's nature. This balance between personal flair and fidelity to the character is what distinguishes exceptional voice acting.\u003C/p>\u003Cp>The foundation of voice acting lies in the actor's ability to \u003Cstrong>read and interpret scripts accurately\u003C/strong>: a skilled voice actor not only reads the lines but also grasps the underlying subtext to bring depth to their performance while matching the vision of the directors.\u003C/p>\u003Cp>Delivering dialogue also requires more than just a clear voice: \u003Cstrong>it demands timing, rhythm, and interaction with other characters\u003C/strong>, even when the actor might be performing alone in the studio. The challenge is to match the energy and intention of the scene while maintaining clarity and fluidity in speech.\u003C/p>\u003Chr>\u003Ch2 id=\"4-sound-design\">\u003Cstrong>4. Sound Design\u003C/strong>\u003C/h2>\u003Cp>Last but not least, voice actors need to take into account sound design.\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/sound-design-in-animation-definition-process-challenges/\">\u003Cu>Sound design goes far beyond merely adding background noise or effects\u003C/u>\u003C/a>. It begins with the initial stages of production, often working in tandem with the voice acting team to ensure that the overall auditory atmosphere aligns with the tone of the animation. This synergy is crucial, as voice acting provides the primary emotional connection between the characters and the audience: \u003Cstrong>sound design supports and amplifies this connection\u003C/strong> by creating an immersive soundscape that captures the unique essence of each scene.\u003C/p>\u003Cp>In the early stages, sound designers might participate in table reads or recording sessions alongside the voice actors to grasp the nuances of the character's voices and the timing of their lines. \u003Cstrong>Understanding these elements helps in designing sounds that match the energy, rhythm, and pacing of the dialogue\u003C/strong>.\u003C/p>\u003Cp>In some cases, \u003Cstrong>voice actors can be responsible for voice foley\u003C/strong>, including sounds like grunts, growls, roars, humming, etc.\u003C/p>\u003Cp>During post-production, \u003Cstrong>sound designers meticulously synchronize sound effects with the animated visuals and voice tracks\u003C/strong>. They layer ambient sounds, Foley effects, and atmospheric audio to flesh out the world in which the characters exist.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Voice acting is a multifaceted art form that brings animated characters to life through a dynamic blend of creativity and technical skill. This article explored the various types of voice acting, emphasizing the importance of specialized voice training and in-depth character analysis in delivering memorable performances. Voice actors not only need to master their vocal abilities but also possess the insight to see into their characters' personalities, motivations, and emotions.\u003C/p>\u003Cp>Voice actors are not just performers; they are storytellers connecting audiences to animated worlds.\u003C/p>\u003Cp>Back in Japan, the seiyuu phenomenon underscores not only the cultural significance but also the artistic depth that expert voice actors contribute to animation. It's clear that the future of animation will continue to be profoundly shaped by these powerful voices, so consider voice acting as a viable career path!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":823,"comment_id":824,"feature_image":825,"featured":105,"visibility":10,"created_at":826,"updated_at":827,"custom_excerpt":828,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":829,"primary_tag":830,"url":831,"excerpt":828,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":832},"f4f19d8f-0c22-4f5b-b409-412c96c4ec0f","6818549287083b0001edea26","https://images.unsplash.com/photo-1516485392461-3961cc21f1e7?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDF8fHZvaWNlJTIwYWN0b3J8ZW58MHx8fHwxNzQ2NjAyNzEyfDA&ixlib=rb-4.1.0&q=80&w=2000","2025-05-05T08:02:58.000+02:00","2026-03-26T10:34:47.000+01:00","From Japan’s seiyuu stars to iconic Disney performances, voice actors are the lifeblood of animated storytelling. Discover how they shape characters, elevate emotion, and bring entire worlds to life through sound.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/from-script-to-screen-voice-acting-in-animated-storytelling/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@dylu?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Jacek Dylag\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/from-script-to-screen-voice-acting-in-animated-storytelling","2025-05-26T10:00:24.000+02:00",{"title":818},"from-script-to-screen-voice-acting-in-animated-storytelling","posts/from-script-to-screen-voice-acting-in-animated-storytelling",[839],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"_o6HGUU3eZlPTc_TisdEszM1GYeSXEYD7rEr1qaxorA",{"id":842,"title":843,"authors":844,"body":7,"description":7,"extension":8,"html":846,"meta":847,"navigation":14,"path":858,"published_at":859,"seo":860,"slug":861,"stem":862,"tags":863,"__hash__":865,"uuid":848,"comment_id":849,"feature_image":850,"featured":105,"visibility":10,"created_at":851,"updated_at":852,"custom_excerpt":853,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":854,"primary_tag":855,"url":856,"excerpt":853,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":857},"ghost/posts:animation-art-style.json","How to Find Your Animation Art Style in a World of AI (2026)",[845],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🎨\u003C/div>\u003Cdiv class=\"kg-callout-text\">Finding your animation style isn’t just about visuals—it’s about storytelling, exploration, and making space for your unique creative voice. This guide shows how to stand out in a sea of sameness with authenticity and intent.\u003C/div>\u003C/div>\u003Ch2 id=\"introduction\">\u003Cstrong>Introduction\u003C/strong>\u003C/h2>\u003Cp>In a world where technology eats everything (\u003Cem>cough\u003C/em> AI \u003Cem>cough\u003C/em>), it's easy to wonder if the art of animation is losing its unique touch.\u003C/p>\u003Cp>Hayao Miyazaki once famously critiqued the use of artificial intelligence in animation, suggesting that the soul of artistry could never be replicated by machines.\u003C/p>\u003Cp>But here we are in 2025 with everyone posting generated \"Ghibli art\".\u003C/p>\u003Cp>Animation is not just about bringing characters to life, fortunately. It can also be about leaving your personal imprint on every frame and scene.\u003C/p>\u003Cp>Miyazaki’s work is iconic not because it follows a formula, but because it is steeped in his unique vision, storytelling prowess, and deep emotional resonance. Just as Miyazaki carved a niche for himself by staying true to his passion and principles, you, too, have the potential to forge your path.\u003C/p>\u003Cp>In a world of \"AI art\", your distinct style can be what sets you apart.\u003C/p>\u003Cp>Read this article for some tips on where to start.\u003C/p>\u003Chr>\u003Ch2 id=\"why-you-need-an-art-style-as-an-animator\">\u003Cstrong>Why You Need An Art Style As An Animator\u003C/strong>\u003C/h2>\u003Cp>In a competitive industry like animation, \u003Cstrong>having a recognizable style sets you apart from other animators\u003C/strong>. It becomes a part of your personal brand.\u003C/p>\u003Cp>For freelance animators or those looking to establish their own studios, \u003Cstrong>a strong, consistent style can attract clients who resonate with your vision\u003C/strong>. Audiences are also drawn to animations that offer a fresh, unique perspective.\u003C/p>\u003Cp>You might wonder what the point of developing your own style is when an AI can blatantly steal it without any repercussions.\u003C/p>\u003Cp>It's important to note that Ghibli's situation is an extreme case. For Internet communities, it's a meme fueled by Ghibli's decades of influence on the collective unconscious. It reflects neither the vision nor the sense of storytelling of the studio―an empty shell.\u003C/p>\u003Cp>Your art style is \u003Cstrong>a reflection of your personality, experiences, and influences\u003C/strong>. It allows you to \u003Cstrong>express your individuality\u003C/strong>, thoughts, feelings, and perspectives through your work. This personal touch makes your animations more relatable.\u003C/p>\u003Cp>Developing a style that feels authentically yours brings \u003Cstrong>a sense of satisfaction\u003C/strong>. It allows for greater creative freedom, as you are not confined by the expectations or norms set by others.\u003C/p>\u003Cp>AI cannot steal any of those, so go ahead and have fun anyway!\u003C/p>\u003Chr>\u003Ch2 id=\"1-finding-your-voice\">\u003Cstrong>1. Finding Your Voice\u003C/strong>\u003C/h2>\u003Cp>Animation is about stories, so \u003Cstrong>your style should be a reflection of the stories you wish to tell\u003C/strong>.\u003C/p>\u003Cp>You'll naturally uncover your distinctive voice as an animator by focusing on storytelling.\u003C/p>\u003Cp>Central to this process is your vision—an individual perspective that influences how you interpret and create stories.\u003C/p>\u003Cp>\u003Cstrong>Your artistic vision guides the choices you make\u003C/strong>, from character design to color palettes, and shapes the narratives you wish to explore. Tim Burton is a perfect example: his distinct style, characterised by gothic whimsy and quirky characters, is a direct reflection of his unique vision and creative mind. His animations are immediately recognizable and have become synonymous with his name.\u003C/p>\u003Cp>Keep in mind \u003Cstrong>you don't need to become overly fixated on pinning down a personal style\u003C/strong> immediately. Your style should feel natural, a genuine form of self-expression that emerges gradually as you continue to grow and experience. Your style evolves with you.\u003C/p>\u003Cp>Embracing this organic development is key, \u003Cstrong>just as stories change over time, so too will your approach to animation\u003C/strong>. By nurturing your vision while remaining open to growth, you'll find that your unique style will reveal itself.\u003C/p>\u003Chr>\u003Ch2 id=\"2-the-importance-of-consumption\">\u003Cstrong>2. The Importance Of Consumption\u003C/strong>\u003C/h2>\u003Cp>The journey to discovering our unique style often begins with what we consume: the media, art, and animation that we experience all play a crucial role in shaping our creative outlook. \u003Cstrong>It's through this consumption that we gather inspiration.\u003C/strong>\u003C/p>\u003Cp>One popular idea in the creative community is the concept of \"stealing like an artist.\" It doesn't mean copying someone else's work outright, but rather \u003Cstrong>absorbing a wide variety of influences and reimagining them\u003C/strong> in a personal and innovative way.\u003C/p>\u003Cp>By observing how other artists solve creative problems or tell compelling stories, we can develop a more refined understanding of what resonates with us and incorporate those elements into our style. That's how humanity evolved for thousands of years.\u003C/p>\u003Cp>This practice encourages us to borrow aspects we admire, mix them with our perspective, and ultimately create something uniquely ours.\u003C/p>\u003Cp>Hayao Miyazaki himself stood on the shoulders of giants, like his mentor Yasuo Otsuka, or French animator Paul Grimault, with The King and The Mockingbird:\u003C/p>\u003Cp>\"\u003Cem>We were formed by the films and filmmakers of the 1950s. At that time\u003C/em>,\u003Cem> I started watching a lot of films. One filmmaker who really influenced me was the French animator Paul Grimault. [...] It was through watching Le Roi et l'Oiseau by Paul Grimault that I understood how it was necessary to use space in a vertical manner.\u003C/em>\"\u003C/p>\u003Cp>But \u003Cstrong>continuous consumption without reflection or intention can dilute our originality\u003C/strong>.\u003C/p>\u003Cp>It's important to periodically step away from the influx of external influences and brain rot and engage in what might be called a creative detox―an intentional pause that allows us to process what we've absorbed, letting our natural preferences emerge without being overshadowed by current trends or the styles of others. We need to create space for our inner voice to speak.\u003C/p>\u003Chr>\u003Ch2 id=\"3-exploration\">\u003Cstrong>3. Exploration\u003C/strong>\u003C/h2>\u003Cp>Consumption is one thing, but \u003Cstrong>you also need intentional practice\u003C/strong>: exploring different artistic mediums and techniques is a great way to work on a unique style.\u003C/p>\u003Cp>One way to begin is by \u003Cstrong>engaging in traditional forms of art\u003C/strong> like drawing and painting.\u003C/p>\u003Cp>Drawing, whether a quick sketch or a detailed study, sharpens your understanding of form, line, and texture. It encourages you to observe the world around you with a keen eye and translate those observations into visual stories.\u003C/p>\u003Cp>Similarly, painting opens up a world of color and composition, allowing you to experiment with mood and lighting in ways that can enrich your animations.\u003C/p>\u003Cp>Sculpting is another avenue for exploration, particularly when it involves creating models for claymation. Working with your hands to mould characters and scenes brings a tactile dimension to your creative process. It challenges you to think in three dimensions and consider the physical space your characters inhabit, which can enhance your ability to convey depth and realism in your animations.\u003C/p>\u003Cp>More broadly, you can find different benefits in exploring all kinds of art forms.\u003C/p>\u003Cp>\u003Cstrong>Trying new tools is another way to push your creative boundaries\u003C/strong>: whether it's getting comfortable with a new type of software, using a tablet for digital drawing, or using animation techniques like stop-motion or 3D modeling, each tool offers unique possibilities and inspires innovative approaches.\u003C/p>\u003Chr>\u003Ch2 id=\"4-change-your-process\">\u003Cstrong>4. Change Your Process\u003C/strong>\u003C/h2>\u003Cp>Beyond tools and art techniques, finding your unique style as an animator \u003Cstrong>requires shaking up your creative routine\u003C/strong>.\u003C/p>\u003Cp>It can be incredibly beneficial to \u003Cstrong>first learn the best practices of the industry\u003C/strong>. Familiarizing yourself with these established methods provides a foundation on which to build, and paradoxically, they can also serve as guideposts for understanding how to break free from the norm when the time is right.\u003C/p>\u003Cp>One effective strategy for evolving your style is to \u003Cstrong>experiment with different environments, workflows, and topics\u003C/strong>.\u003C/p>\u003Cp>As previously mentioned,\u003Ca href=\"https://www.cg-wire.com/kitsu?ref=blog.cg-wire.com\"> \u003Cu>trying out new animation software or tools\u003C/u>\u003C/a> can introduce you to different ways of thinking about your art, but you can also \u003Cstrong>consider altering your usual working space\u003C/strong>.\u003C/p>\u003Cp>\u003Cstrong>Exploring unfamiliar themes or narratives\u003C/strong> in your animations challenges your usual assumptions and pushes the boundaries of your comfort zone.\u003C/p>\u003Chr>\u003Ch2 id=\"5-create-your-studio\">\u003Cstrong>5. Create Your Studio\u003C/strong>\u003C/h2>\u003Cp>Embracing your unique animation style can mean stepping outside the confines of someone else's studio.\u003C/p>\u003Cp>While working within an established studio offers invaluable experience and insight, it limits the full expression of your individual style. \u003Cstrong>To truly let your creativity flourish, you need your own space\u003C/strong>—whether it's a physical studio or a metaphorical artistic environment.\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/how-to-start-an-animation-studio-as-a-freelancer/\">\u003Cu>Creating your own studio\u003C/u>\u003C/a> allows you to define the parameters of your creative process so that your artistic voice can be heard without interference. This freedom enables you to experiment, take risks, and explore new techniques that might not align with the established norms of other studios.\u003C/p>\u003Cp>While independence is empowering, \u003Cstrong>seeking validation and feedback from external sources remains crucial\u003C/strong>. Release your work. Engage with a community of fellow animators, mentors, and audiences to gather diverse perspectives on your work. Constructive criticism provides the motivation and direction needed to refine your style further.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>So, what does it truly mean to find your style as an animator? It's a journey of discovery—blending your influences, experimenting with different techniques, and ultimately, embracing your individuality. You can draw inspiration from other studios, but your signature as an artist will come from the authenticity you bring to your work.\u003C/p>\u003Cp>You're making more than just art, you're defining your legacy in the world of animation, while getting a chance to express who you are. Embrace the challenge and let your style speak for yourself: the magic of animation lies not in the tools we use, but in the soul we pour into our craft.\u003C/p>\u003Cp>For this reason, and leaving the ethical aspect aside, worrying about AI stealing your style is an overreaction. Nobody can tell stories like you do, even if the looks are similar, so go ahead and do it anyway!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":848,"comment_id":849,"feature_image":850,"featured":105,"visibility":10,"created_at":851,"updated_at":852,"custom_excerpt":853,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":854,"primary_tag":855,"url":856,"excerpt":853,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":857},"d1133fc6-a7e7-413b-a006-5070e556063a","6818549587083b0001edea2c","https://images.unsplash.com/photo-1630207831419-3532bcb828d7?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDE0fHxhbmltYXRpb258ZW58MHx8fHwxNzQ2NjAyMzI5fDA&ixlib=rb-4.1.0&q=80&w=2000","2025-05-05T08:03:01.000+02:00","2026-03-26T10:06:15.000+01:00","In a world filled with AI-generated art, developing a personal animation style is more important than ever. Discover how to explore your influences, evolve your process, and build a style that’s uniquely yours.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/animation-art-style/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@javaistan?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Afif Ramdhasuma\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/animation-art-style","2025-05-19T10:00:50.000+02:00",{"title":843},"animation-art-style","posts/animation-art-style",[864],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"7qyXgptKWH9q9rKz-cXBTG776saC22aUUAnqvnIzQ5A",{"id":867,"title":868,"authors":869,"body":7,"description":7,"extension":8,"html":871,"meta":872,"navigation":14,"path":883,"published_at":884,"seo":885,"slug":886,"stem":887,"tags":888,"__hash__":890,"uuid":873,"comment_id":874,"feature_image":875,"featured":105,"visibility":10,"created_at":876,"updated_at":877,"custom_excerpt":878,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":879,"primary_tag":880,"url":881,"excerpt":878,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":882},"ghost/posts:character-sheet-animation.json","Character Sheets (2026): The Blueprint for Consistent Animation",[870],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📄\u003C/div>\u003Cdiv class=\"kg-callout-text\">Consistency is key in animation—and character sheets are how you keep it. Learn how to build expressive, functional character sheets that support your team from concept to final frame.\u003C/div>\u003C/div>\u003Ch2 id=\"introduction\">\u003Cstrong>Introduction\u003C/strong>\u003C/h2>\u003Cp>Creating memorable characters is an art form, but animating them throughout the whole production in a consistent way is just as challenging.\u003C/p>\u003Cp>To make this work more effectively, animators rely on a simple tool: the character sheet.\u003C/p>\u003Cp>This article explores the process of creating a character sheet that not only captures the essence of your character but also serves as a comprehensive guide throughout the whole pipeline, from defining distinct visual styles to ensuring consistency across every animated movement.\u003C/p>\u003Cp>Read on to discover how to make better character sheets!\u003C/p>\u003Chr>\u003Ch2 id=\"whats-a-character-sheet\">\u003Cstrong>What's A Character Sheet\u003C/strong>\u003C/h2>\u003Cp>A character sheet is \u003Cstrong>a reference document that provides detailed information about a character's design, movements, and often personality traits\u003C/strong>.\u003C/p>\u003Cp>It typically includes a turnaround, expressions, poses, and details of specific features or clothing. It can also include notes on color schemes, proportions, and any distinctive characteristics that need to be consistent throughout production.\u003C/p>\u003Cp>In Avatar: The Last Airbender, a character sheet for the character Aang could include his bald head with the signature blue arrow tattoo, and his clothing details reflecting the Air Nomad aesthetic. This sheet ensures that every animator working on the series can accurately depict Aang in any given scene:\u003C/p>\u003Chr>\u003Ch2 id=\"why-a-character-sheet-is-important\">\u003Cstrong>Why A Character Sheet Is Important\u003C/strong>\u003C/h2>\u003Cp>Character sheets make sure \u003Cstrong>all animators and artists working on a project maintain consistency\u003C/strong> in the appearance and design of a character to avoid visual discrepancies that could distract the audience. This uniformity is crucial, especially in long-form animations or series, where multiple scenes might be produced by different teams or at different times.\u003C/p>\u003Cp>Character sheets typically show how they move and emote, not just what they look like. \u003Cstrong>It helps animators understand the character's personality\u003C/strong> to improve the quality of the animation.\u003C/p>\u003Cp>Character sheets are \u003Cstrong>crucial communication tools\u003C/strong> among different departments in animation production, like storyboard artists, animators, character designers, and directors: by providing a visual reference that everyone can follow, character sheets help streamline the workflow, reducing misunderstandings and saving time by minimizing the need for corrections. \u003Cstrong>It's essential for studios to save costs.\u003C/strong>\u003C/p>\u003Chr>\u003Ch2 id=\"1-character-turnaround\">\u003Cstrong>1. Character Turnaround\u003C/strong>\u003C/h2>\u003Cp>A character turnaround provides a 360-degree view of a character through a series of images:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Front view\u003C/strong>—Animators begin with a detailed front-view drawing. This is often the most straightforward angle, laying the foundation for the other views. The front view shows the character facing directly forward, helping the viewer understand the character’s symmetry and primary features.\u003C/li>\u003Cli>\u003Cstrong>Profile/side view\u003C/strong> - We then use horizontal guidelines to ensure features align correctly from the front. The profile view depicts the character from the side, usually the right side. It highlights the depth and silhouette of the character, showing elements like the nose, ears, and arms in profile.\u003C/li>\u003Cli>\u003Cstrong>Back view\u003C/strong> - The front view is used to mirror elements in the back view, adjusting for any asymmetrical details. The back view is a look at the character from the rear. Important for understanding back-specific features such as hairstyles, clothing details, and body posture.\u003C/li>\u003Cli>\u003Cstrong>Three-quarter views\u003C/strong> - These views are typically midway between the front and side views (both front and back). They provide a more dynamic perspective, revealing depth and how elements wrap around the character’s form. These are often the most challenging since they involve foreshortening and perspective. Use the front and side views as references.\u003C/li>\u003C/ol>\u003Chr>\u003Ch2 id=\"2-expression-sheet\">\u003Cstrong>2. Expression Sheet\u003C/strong>\u003C/h2>\u003Cp>An expression sheet is a type of model sheet that showcases various facial expressions a character can make to reflect their emotions, attitude, and personality:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Character's emotional range\u003C/strong> - The sheet displays a variety of emotions like happiness, sadness, anger, surprise, and fear. Each expression is carefully crafted to ensure it feels authentic to the character's personality.\u003C/li>\u003Cli>\u003Cstrong>Head angles\u003C/strong> - Often, expressions are shown from various angles (front, profile, three-quarter views) to guide animators on how a character's face changes with different perspectives.\u003C/li>\u003Cli>\u003Cstrong>Eye and mouth shapes\u003C/strong> - Detailed illustrations of how the eyes and mouth alter with each expression to convey emotion accurately.\u003C/li>\u003C/ol>\u003Chr>\u003Ch2 id=\"3-pose-sheet\">\u003Cstrong>3. Pose Sheet\u003C/strong>\u003C/h2>\u003Cp>A pose sheet explains how a character moves and behaves in different situations.\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Basic poses\u003C/strong> - Standard positions that define the character's physical attributes, personality, and behaviour. It often includes the T-pose, which shows the character standing upright with arms stretched out to the sides to allow animators to see the character’s proportions and details clearly.\u003C/li>\u003Cli>\u003Cstrong>Key poses\u003C/strong> - Significant positions that reflect key actions or emotional states of the character. Examples might include running, jumping, laughing, or any other action that is characteristic of the character.\u003C/li>\u003Cli>\u003Cstrong>Silhouette views\u003C/strong> - Simple outlines of the character in various poses to check if the character’s form is readable without internal details.\u003C/li>\u003Cli>\u003Cstrong>Interaction poses\u003C/strong> - Poses that show the character interacting with objects or other characters can sometimes be included, depending on the complexity of the animation.\u003C/li>\u003C/ol>\u003Chr>\u003Ch2 id=\"4-props\">\u003Cstrong>4. Props\u003C/strong>\u003C/h2>\u003Cp>Props can be extensions of a character’s personality. For example, a character’s unique weapon or quirky gadget can become an iconic aspect of their identity.\u003C/p>\u003Cp>Props are often integral to the plot (e.g., magic wands in a fantasy setting, high-tech gadgets in a sci-fi story): they can drive the narrative forward, and their design should reflect this significance.\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Prop contextualization\u003C/strong> - Make sure props are depicted in relation to the character. Show how a gun fits in a holster or how a hat sits on a character's head to give animators a clear understanding of scale and proportion relative to the character.\u003C/li>\u003Cli>\u003Cstrong>Multiple angles\u003C/strong> - Illustrate props from various angles and perspectives.\u003C/li>\u003Cli>\u003Cstrong>Functional breakdown\u003C/strong> - Include a breakdown of movable parts or components if the prop has functional elements (e.g., a folding umbrella or a transforming gadget). Animators need to know how these elements work mechanically to animate them believably.\u003C/li>\u003Cli>\u003Cstrong>Material and texture notes\u003C/strong> - Add notes regarding the material or texture of the prop to help during texturing and rendering. This could include glossiness, fabric types, or the reflection of light.\u003C/li>\u003C/ol>\u003Chr>\u003Ch2 id=\"5-color-palette\">\u003Cstrong>5. Color Palette\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/character-color-palettes/\">\u003Cu>Color palettes convey the character's personality\u003C/u>\u003C/a> and the overall tone of the animation.\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Highlight key colors\u003C/strong> - Artists identify and list the main colors used in a character’s design: base colors for skin, hair, and clothing, along with details like eyes or accessories. Present these colors as swatches, typically in a row or grid format near the character illustration.\u003C/li>\u003Cli>\u003Cstrong>Shade variations\u003C/strong> - A series of shade variations are included for each key color, covering light, mid-tone, and dark shades to demonstrate how the character looks in different lighting conditions.\u003C/li>\u003Cli>\u003Cstrong>Labelling and notation\u003C/strong> - We clearly label each swatch with names or codes (such as RGB, HEX, or Pantone) to ensure consistency across different platforms and media. Artists often provide notes on the use of each color, if necessary, such as when certain colors should be used (e.g., bright light vs. shadow).\u003C/li>\u003C/ol>\u003Cp>You can also include small illustrations in the character sheet to show the character in different lighting scenarios, using the provided color palette for reference.\u003C/p>\u003Chr>\u003Ch2 id=\"6-dont-underestimate-annotations\">\u003Cstrong>6. Don't Underestimate Annotations\u003C/strong>\u003C/h2>\u003Cp>In animation, the adage \"show, don't tell\" should be a priority.\u003C/p>\u003Cp>But sometimes \u003Cstrong>you need to write things down to avoid guesswork\u003C/strong>: including annotations in character sheets is essential for clarifying important details.\u003C/p>\u003Cp>You can provide information about visual features (head, body structure, clothing, accessories, color palette, etc.), but also on turnarounds and expression sheets to help with nuances, proportions, and contextual cues.\u003C/p>\u003Cp>Annotations are also a great way to mention personality traits that might affect animation, like a character being jittery, confident, or clumsy. You can describe typical mannerisms or habits that should be reflected in movement. You can even note down any particular speech patterns or vocal quirks for lip-syncing.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>In conclusion, character sheets are an invaluable tool for animators to capture every detail necessary for (pre-)production.\u003C/p>\u003Cp>From the careful selection of a color palette to the turnaround, every element is meticulously analyzed for consistency. Annotations offer additional insight into a character's unique attributes, and expression sheets and pose sheets capture the emotional and physical range of the character. Props provide additional contextual storytelling.\u003C/p>\u003Cp>But creating character sheets is pointless if not grounded in\u003Ca href=\"https://blog.cg-wire.com/character-design-animation/\"> \u003Cu>strong character designs\u003C/u>\u003C/a>, so make sure to spend ample time on concept development.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":873,"comment_id":874,"feature_image":875,"featured":105,"visibility":10,"created_at":876,"updated_at":877,"custom_excerpt":878,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":879,"primary_tag":880,"url":881,"excerpt":878,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":882},"648b2437-c639-4f60-862c-7c488c9bd222","6818549787083b0001edea32","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/05/character-design-sheet-for-2d-animation-1024x545.jpg","2025-05-05T08:03:03.000+02:00","2026-03-26T10:29:53.000+01:00","Character sheets are the unsung heroes of animation production. From turnarounds to pose sheets, learn how these vital tools help keep characters consistent, expressive, and production-ready from start to finish.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/character-sheet-animation/","\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: 21 Draw\u003C/em>\u003C/i>","/posts/character-sheet-animation","2025-05-12T10:00:47.000+02:00",{"title":868},"character-sheet-animation","posts/character-sheet-animation",[889],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"UHVqoCao9E_c9ZUns9WUOZoM5_9nQW5goLAwxCjyzYQ",{"id":892,"title":893,"authors":894,"body":7,"description":7,"extension":8,"html":896,"meta":897,"navigation":14,"path":908,"published_at":909,"seo":910,"slug":911,"stem":912,"tags":913,"__hash__":915,"uuid":898,"comment_id":899,"feature_image":900,"featured":105,"visibility":10,"created_at":901,"updated_at":902,"custom_excerpt":903,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":904,"primary_tag":905,"url":906,"excerpt":903,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":907},"ghost/posts:appeal-animation-principle.json","The Appeal Principle (2026): The Heart of Every Great Animation",[895],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">😍\u003C/div>\u003Cdiv class=\"kg-callout-text\">Ever wonder what makes a character like Totoro or Mickey Mouse so instantly lovable? It’s all about appeal.\u003C/div>\u003C/div>\u003Cp>Why do we like animation so much when we can turn anything into live-action?\u003C/p>\u003Cp>It's all about appeal. Animation is a distinct medium for expression with its own unique appeal, and you can't translate this appeal easily to other media.\u003C/p>\u003Cp>The question becomes, how can animators bring out this unique appeal from their work?\u003C/p>\u003Cp>Think of Disney, and the iconic figure of Mickey Mouse comes to mind. With his distinctive round ears and cheerful expression, Mickey embodies the principle of appeal.\u003C/p>\u003Cp>Read on for common techniques and best practices to increase the appeal of your animation to Mickey Mouse levels of charisma!\u003C/p>\u003Chr>\u003Ch2 id=\"whats-appeal\">\u003Cstrong>What's Appeal\u003C/strong>\u003C/h2>\u003Cp>The appeal principle is \u003Cstrong>the concept of creating characters and visuals that are engaging to the audience\u003C/strong>.\u003C/p>\u003Cp>It's about making the characters, expressions, and movements interesting and captivating to make them worth watching.\u003C/p>\u003Cp>It doesn't necessarily mean that the character has to be conventionally beautiful or cute―even villains or unconventional characters can be appealing if they're designed with unique and memorable traits. Scar from The Lion King includes distinctive attributes like his sharp features, expressive eyes, and memorable voice that make him interesting and engaging to watch. His movements and expressions convey his cunning and charisma.\u003C/p>\u003Ch2 id=\"why-appeal\">\u003Cstrong>Why Appeal\u003C/strong>\u003C/h2>\u003Cp>In Up, the design of Carl Fredricksen with his boxy frame, square glasses, and unexpressive face conveys his stubborn yet lovable personality and the emotional depth of his character arc. \u003Cstrong>The appeal of Carl's design plays a significant role in storytelling\u003C/strong> by visually expressing his transformation from a grumpy, isolated widower to a caring, adventurous companion.\u003C/p>\u003Cp>The character Totoro is designed with a simple yet striking silhouette, large eyes, and a soft, huggable appearance. \u003Cstrong>His appeal makes him so memorable that\u003C/strong> \u003Cstrong>it became an iconic symbol\u003C/strong> not just for the film but also for the studio itself, cementing the character in popular culture.\u003C/p>\u003Cp>In Frozen, a secondary character like Olaf the Snowman is designed with rounded shapes, expressive eyes, and a friendly demeanour to make him instantly likeable. These appealing traits help audiences emotionally connect with him, supporting his role as a comic relief and a beloved companion to Elsa and Anna. \u003Cstrong>His appeal is also crucial in making viewers care about his character despite his secondary role\u003C/strong> in the story.\u003C/p>\u003Chr>\u003Ch2 id=\"1-character-design\">\u003Cstrong>1. Character Design\u003C/strong>\u003C/h2>\u003Cp>Character design is the process of creating the appearance, personality, and traits of a character for an animation.\u003C/p>\u003Cp>It often starts with concept drawings and evolves into more detailed 2D or 3D models that animators use as a reference.\u003C/p>\u003Cp>Character design is key for the appeal principle because \u003Cstrong>it directly influences how a character looks\u003C/strong>, both how visually attractive a character is and how effectively they communicate emotion and narrative:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Shape language\u003C/strong> -\u003Ca href=\"https://blog.cg-wire.com/character-shape-language/\"> \u003Cu>Animators make use of basic shapes\u003C/u>\u003C/a> (circles, squares, triangles) to convey different traits and personalities.\u003C/li>\u003Cli>\u003Cstrong>Solid drawing\u003C/strong> -\u003Ca href=\"https://blog.cg-wire.com/solid-drawing/\"> \u003Cu>A sense of depth and weight contributes to the believability\u003C/u>\u003C/a> and appeal of the character. Artists make sure that each character has a distinct silhouette that can be recognized even in shadow: a clear silhouette makes it easier for the audience to quickly understand the character’s shape.\u003C/li>\u003Cli>\u003Cstrong>Color theory\u003C/strong> -\u003Ca href=\"https://blog.cg-wire.com/character-color-palettes/\"> \u003Cu>Colors are used strategically\u003C/u>\u003C/a> to differentiate characters and reflect their personalities. Warm colors can suggest energy or aggression, while cool colors often suggest calmness or mystery. Contrasting colors can make them more striking and memorable.\u003C/li>\u003C/ol>\u003Chr>\u003Ch2 id=\"2-exaggeration\">\u003Cstrong>2. Exaggeration\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/exaggeration-animation-principle/\">\u003Cu>Exaggeration is another one of the 12 principles\u003C/u>\u003C/a> of animation that involves amplifying actions, expressions, or features to enhance the impact of a character or scene.\u003C/p>\u003Cp>Exaggeration is important for the appeal principle as well because \u003Cstrong>it makes the character's personality and actions more memorable\u003C/strong>: a character that moves, reacts, and exaggeratedly expresses emotions is way more appealing and can communicate ideas and feelings more effectively.\u003C/p>\u003Cul>\u003Cli>Animators \u003Cstrong>exaggerate facial features and expressions\u003C/strong> to more clearly depict emotions. A character's eyes pop or eyebrows arch dramatically when surprised.\u003C/li>\u003Cli>Closely related to exaggeration,\u003Ca href=\"https://blog.cg-wire.com/squash-stretch-principle/\"> \u003Cu>Squash and stretch\u003C/u>\u003C/a> is another animation principle where objects or characters are \u003Cstrong>exaggerated in their shape to convey weight and flexibility\u003C/strong>. A bouncing ball squashes on impact and stretches as it leaves the ground. This not only makes the motion more dynamic but also communicates the material properties of the object. The same principle can be applied to depict interacting forces, for example, during a chase or a fight scene, to make them look more interesting to watch.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"3-poses-overlapping-actions\">\u003Cstrong>3. Poses &amp; Overlapping Actions\u003C/strong>\u003C/h2>\u003Cp>Poses and facial expressions influence how we perceive actions and emotions.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Introducing a diverse range of poses\u003C/strong> throughout a scene maintains the viewer's interest. If a character is sad, instead of only showing slumped shoulders, you would include other poses like head in hands or a deep sigh with a turned-away face.\u003C/li>\u003Cli>Animators design \u003Cstrong>strong, readable poses to clearly convey the character’s emotions and intentions\u003C/strong>, even in silhouette, using lines of action. When a character experiences surprise, their body language (wide eyes, open arms) emphasizes this emotion clearly in one frame.\u003C/li>\u003Cli>\u003Cstrong>Overlapping actions\u003C/strong> add a sense of fluidity and realism: actions should not start and end simultaneously to reflect inertia and weight. When a character turns their head, animators let the hair or loose garments follow through the motion with a slight delay. Animate different parts of the body slightly out of phase to achieve a more natural result.\u003C/li>\u003Cli>Similarly, \u003Cstrong>secondary actions\u003C/strong> support the main action while adding depth to the scene. If a character is walking sadly, we include subtle actions like a sad hat bobbing with the step to enrich the primary action.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"4-timing\">\u003Cstrong>4. Timing\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/timing-animation-principle/\">\u003Cu>Timing determines how quickly or slowly actions appear on the screen\u003C/u>\u003C/a> and shapes the flow of the animation.\u003C/p>\u003Cp>Poor timing has a drastic effect on the appeal of an animation. You can use it to make a viewer feel the weight of an object or character. Heavier objects require slower acceleration and deceleration, while lighter ones move more quickly. It can be applied, for instance, in a scene where a character lifts or drops an object, but also for a jump, a walk, a run, etc.\u003C/p>\u003Cul>\u003Cli>By adjusting the timing before a major action, \u003Cstrong>you can build anticipation\u003C/strong>. A character pulling back before a punch or a jump can have a slower timing than the punch or jump itself to create tension and make the subsequent action more impactful.\u003C/li>\u003Cli>\u003Cstrong>Varying the timing of character movements can also communicate different emotional states\u003C/strong>. Quick, sharp movements suggest excitement, anger, or surprise, whereas slow, deliberate movements show sadness, tiredness, or contemplation. In comedic sequences, the precise timing of delayed reactions, quick movements, and sudden pauses is crucial.\u003C/li>\u003Cli>\u003Cstrong>The overall pace of an animation sets the tone\u003C/strong>: a fast-paced sequence suggests action or chaos, while a slow, deliberate pace creates a serene or suspenseful atmosphere.\u003C/li>\u003C/ul>\u003Cp>A lack of timing results in a lifeless scene that's boring to watch and easily forgotten.\u003C/p>\u003Chr>\u003Ch2 id=\"5-camera\">\u003Cstrong>5. Camera\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/camera-work-in-animation/\">\u003Cu>Camera work is not only a means to guide the viewers through the story but also a storytelling tool\u003C/u>\u003C/a> to make the animation more appealing.\u003C/p>\u003Cul>\u003Cli>Using \u003Cstrong>dynamic camera angles adds drama and scale\u003C/strong>. Perspectives like bird’s eye or worm’s eye views immerse the audience further into the animated world. Dutch angles–where the camera is tilted slightly–introduce a sense of unease or tension.\u003C/li>\u003Cli>\u003Cstrong>Smooth camera movements maintain audience engagement\u003C/strong>. Tracking shots that follow the action draw viewers into the unfolding scene. Panning and tilting reveal important scene elements sequentially, naturally guiding the viewer’s focus.\u003C/li>\u003Cli>\u003Cstrong>Zoom techniques elevate tension and emphasize significant details\u003C/strong>. The dolly zoom, or Vertigo effect, creates a disorienting visual impression by simultaneously zooming out while moving the camera closer (or vice versa), adding an intense effect for pivotal moments.\u003C/li>\u003Cli>\u003Cstrong>Composition is also critical in creating visually appealing shots\u003C/strong>. The rule of thirds helps position main elements along grid lines to achieve a balance that pleases the eye. Using leading lines like roads or buildings guides viewer attention directly to the central subject, subtly reinforcing narrative intentions.\u003C/li>\u003C/ul>\u003Cp>Over-the-shoulder shots for intimate conversations, point-of-view shots for perspective shifts, depth of field manipulation, transitions for pacing... the list goes on and on.\u003C/p>\u003Chr>\u003Ch2 id=\"6-audio\">\u003Cstrong>6. Audio\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/sound-design-in-animation-definition-process-challenges/\">\u003Cu>Audio creates a richer, more immersive experience\u003C/u>\u003C/a> for the audience by complementing the visual elements of animation.\u003C/p>\u003Cul>\u003Cli>First, selecting voices that align with the personalities and physical traits of the characters is key. \u003Cstrong>The right voice enriches a character's uniqueness and relatability to make them more memorable and engaging\u003C/strong>. An expressive vocal performance conveys emotions and character growth. By choosing distinctive voice actors, characters are given life beyond their visual representation.\u003C/li>\u003Cli>\u003Cstrong>Background sounds breathe life into scenes\u003C/strong>, whether it's the hustle and bustle of a city, the serene ambiance of a forest, or the calming crash of ocean waves. These subtle layers provide texture, enriching the setting and adding another dimension to the viewer's experience without drawing attention away from the narrative focus.\u003C/li>\u003Cli>\u003Cstrong>Timing sound effects precisely with on-screen actions reinforces the physicality of movements\u003C/strong> and establishes a clear cause-and-effect relationship. This synchronization helps ground the animation in a reality that is believable, even when the visuals stretch beyond the plausible.\u003C/li>\u003Cli>Developing \u003Cstrong>unique sound cues for recurring character actions\u003C/strong> or traits, such as distinctive footsteps or memorable themes, can reflect their personality or mood. These auditory signatures become a part of the character's identity.\u003C/li>\u003C/ul>\u003Cp>Equally important, \u003Cstrong>introducing moments of silence effectively builds tension or highlights visual elements\u003C/strong>. Strategically placed quiet moments allow emotions to linger and resonate more deeply with the audience.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>In summary, appeal is a cornerstone of animation that determines a project's success with audiences. We've explored how various elements like character design, audio, camera work, timing, exaggeration, and poses can significantly add to the appeal of a scene, but there are many more elements to take into account to make animations more appealing!\u003C/p>\u003Cp>All animation elements and tools can be used to increase the appeal of the end result, whether it's a unique art style or the creative use of colors. All you have to keep in mind as an animator is how well you can tell a story to an audience, and the rest will unfold naturally.\u003C/p>\u003Cp>Make sure to\u003Ca href=\"https://blog.cg-wire.com/12-principles-animation/\"> \u003Cstrong>\u003Cu>read about the other 11 foundational animation principles\u003C/u>\u003C/strong>\u003C/a> and how they integrate with the appeal principle!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":898,"comment_id":899,"feature_image":900,"featured":105,"visibility":10,"created_at":901,"updated_at":902,"custom_excerpt":903,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":904,"primary_tag":905,"url":906,"excerpt":903,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":907},"279f6c5e-cde7-4b63-a28e-93f5f273cd83","6818548f87083b0001edea20","https://images.unsplash.com/photo-1615493749953-742903db7e9d?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDEwfHxoYXBweSUyMGNoYXJhY3RlcnxlbnwwfHx8fDE3NDY0MjUzMzV8MA&ixlib=rb-4.0.3&q=80&w=2000","2025-05-05T08:02:55.000+02:00","2026-03-26T10:25:19.000+01:00","From iconic silhouettes to strong poses and perfect timing—appeal is what makes animated characters stick. Learn how to master this vital animation principle and build more engaging, memorable work.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/appeal-animation-principle/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@prince_perry?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Perry Merrity II\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/appeal-animation-principle","2025-05-05T10:00:50.000+02:00",{"title":893},"appeal-animation-principle","posts/appeal-animation-principle",[914],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"58ZU5HfHn-zzk7d2G79akwlPayW7-v_gYJE0R8O3RX0",{"id":917,"title":918,"authors":919,"body":7,"description":7,"extension":8,"html":921,"meta":922,"navigation":14,"path":933,"published_at":934,"seo":935,"slug":936,"stem":937,"tags":938,"__hash__":940,"uuid":923,"comment_id":924,"feature_image":925,"featured":105,"visibility":10,"created_at":926,"updated_at":927,"custom_excerpt":928,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":929,"primary_tag":930,"url":931,"excerpt":928,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":932},"ghost/posts:timing-animation-principle.json","Timing Animation Principle (2026): The Hidden Key to Better Storytelling",[920],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">😀\u003C/div>\u003Cdiv class=\"kg-callout-text\">Timing is everything in animation. Get it right, and your scenes come to life—get it wrong, and you lose the magic.\u003C/div>\u003C/div>\u003Ch2 id=\"introduction\">\u003Cstrong>Introduction\u003C/strong>\u003C/h2>\u003Cp>We can all remember a scene where a character is interrupted mid-action with an awkward pause—just enough to induce a chuckle: this comedic timing is a perfect example of how important timing is in animation for storytelling.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXchOK682m_JCJiaFIBNdRMXIkMsaosbSyETGvAUC5XmfXpEP57EofiqtZ2keBglvVi30ChS1rypx2c61c6im0kYmWFr5_ck6Ad8ydM9CiM9xt4RPESkiokWlafBQy9y5rOdEwfO?key=efl3qnIZjmsN6y8rnyUBQP9A\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"464\" height=\"279\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Hitpig!\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Timing isn't just about getting a sequence right: it's a tool to create more engaging animations, and all animators need to master it.\u003C/p>\u003Cp>In this article, we explore what's timing―one of\u003Ca href=\"https://blog.cg-wire.com/12-principles-animation/\"> \u003Cu>the 12 principles of animation\u003C/u>\u003C/a>―and how it's refined during the production process to make more impactful animations.\u003C/p>\u003Chr>\u003Ch2 id=\"whats-timing\">\u003Cstrong>What's Timing\u003C/strong>\u003C/h2>\u003Cp>Timing is the number of frames or the amount of time between two key poses or actions: it determines both the speed and the fluidity of motion in an animation.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXd-cWFAm02Dr5QM1SXBUff2_Ki9mq8Tc4x9E8vKDfvtEDzs5SZAz0ueUEn9Id9r32MoSugHLNqYlJ6pRCvNxXLQMl2oJBEP8k4kRTIZ9f_Z-M0jdFDvwdUvGTn7LjKFNYYq9bngcg?key=efl3qnIZjmsN6y8rnyUBQP9A\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"459\" height=\"248\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: fullfrontal.moe\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Timing is not to be confused with frame rate―the number of frames per second―or spacing―the distance between two frames or poses.\u003C/p>\u003Cp>A simple change in timing has a noticeable effect on the animation.\u003C/p>\u003Chr>\u003Ch2 id=\"why-timing-is-important\">\u003Cstrong>Why Timing Is Important\u003C/strong>\u003C/h2>\u003Cp>First, timing is key for realism: in the real world, different objects and characters move at different speeds because of their size, weight, and the forces acting upon them. A heavy object takes longer to start or stop moving. Animators create more believable animations by replicating these factors through precise timing.\u003C/p>\u003Cp>As mentioned in the introduction, timing is also a powerful tool for storytelling: by manipulating the speed of movements and the duration of pauses, animators emphasize certain moments and bring depth to character actions and reactions. Slowing down a scene underscores a dramatic moment to allow the audience to absorb the significance of what's happening. Speeding up a sequence injects excitement and urgency to propel the story forward with energy.\u003C/p>\u003Cp>Good timing is a prerequisite of good pacing, which is crucial for keeping the audience invested. The rhythm of an animation, dictated by how quickly or slowly objects move, keeps viewers attentive. For example, an action sequence with rapid timing captures the audience's attention and conveys a thrilling sense of speed. But in a contemplative scene, a slower timing invites viewers to reflect and connect emotionally with the characters.\u003C/p>\u003Cp>Animators have several tools available to improve the timing of a scene.\u003C/p>\u003Chr>\u003Ch2 id=\"1-use-reference-footage\">\u003Cstrong>1. Use Reference Footage\u003C/strong>\u003C/h2>\u003Cp>Timing uses reference footage as a practical guide for animators.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXcaxyzLY8fR20-kD0sNm2sgM6YnZEbV8l8PzmKVsiA_cpYhUVoWP8MozgFaJwHkyv5jNk2wBq0eHrxXz0pF41S1qm5U3Iyii4NoWExQL2vohk1MwU0bZrrQj0lHEuXA5ubGGoAT6A?key=efl3qnIZjmsN6y8rnyUBQP9A\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"217\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Alice In Wonderland, Disney\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Analyzing reference footage offers animators a sense of keyframes—the main poses that define the action of a sequence―and transitions.\u003C/p>\u003Cp>You can also use reference footage to precisely measure the timing between keyframes. These time numbers are foundational during the animation process.\u003C/p>\u003Cp>Discrepancies or awkward transitions between poses can disrupt sequences, but reference footage helps identify potential posing problems before production.\u003C/p>\u003Chr>\u003Ch2 id=\"2-time-charts\">\u003Cstrong>2. Time Charts\u003C/strong>\u003C/h2>\u003Cp>A time chart is a diagram or a series of annotations that indicate how frames are distributed over time: a visual representation of the timing for a particular scene.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXcbu08TtKdGx6BLvx75QgDK8RKfOVynsqQQl_HaZO5nBTOlIuS-nQ0mrLYpID9X_BXYQXwBoFpoOUZCHWHCJDhsCFG0FoAZKnM9f7UPTUZab0OcnTltHpZE5z-sb6hbGx5LGZJKjw?key=efl3qnIZjmsN6y8rnyUBQP9A\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"273\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Animost Studio\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Time charts are often found in the margins of exposure sheets (X-sheets) or as part of a storyboard.\u003C/p>\u003Cp>A time chart helps maintain consistent timing across a sequence and allows animators to plan the number of frames needed for a particular action. It's a communication tool to help multiple animators understand how their individual segments fit into the overall sequence.\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Plotting keyframes\u003C/strong> - Animators begin by determining the keyframes, which are then plotted on the time chart at specific intervals.\u003C/li>\u003Cli>\u003Cstrong>In-betweening\u003C/strong> - The time chart shows how many in-betweens are needed to transition from one keyframe to the next. These in-betweens determine how smooth or fast the movement appears.\u003C/li>\u003Cli>\u003Cstrong>Adjusting timing\u003C/strong> - By adjusting the spacing between frames on the time chart, animators can fine-tune the timing of the action. Increasing the spacing speeds up an action.\u003C/li>\u003Cli>\u003Cstrong>Ease in and ease out\u003C/strong> - Time charts can also show easing animations, where actions start slowly (slow in) or conclude slowly (slow out), adding a more natural movement to the animation.\u003C/li>\u003Cli>\u003Cstrong>Review and refinement\u003C/strong> - Animators frequently refer back to the time chart during reviews to make sure the timing remains consistent throughout the iterative production process.\u003C/li>\u003C/ol>\u003Chr>\u003Ch2 id=\"3-exposure-sheets\">\u003Cstrong>3. Exposure Sheets\u003C/strong>\u003C/h2>\u003Cp>An exposure sheet (X-sheet) is a vertical document divided into rows and columns where each row represents a single frame and columns represent different aspects of the animation like dialogue, music, sound effects, descriptions, and various annotations.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXfDVcojqBbpJXh-rulb14fcG3LZhsNSDjCRr6T_9zBj_W6Of_ISzVR0gpHyd_zDLd-8Zh4cG6dZXuluMGBXcwR6XvdV8QFlUtiV42H78frljNEgP5-RLHvpOO9843Xr_dA_ImarwA?key=efl3qnIZjmsN6y8rnyUBQP9A\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"623\" height=\"841\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Cartoon Buzz\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Animators use notations and symbols to represent the timing of different actions, including keyframes, in-betweens, holds, and other timing cues.\u003C/p>\u003Cp>Like time charts, an exposure sheet provides a clear reference for every animator working on a project to maintain consistency while planning complex scenes.\u003C/p>\u003Chr>\u003Ch2 id=\"4-onion-skinning\">\u003Cstrong>4. Onion Skinning\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/p/6d2adb16-3154-4330-bd22-8617b2e7d658/\" rel=\"noreferrer\">Onion skinning\u003C/a> is a feature in animation software that allows animators to see multiple frames simultaneously in a semi-transparent manner, like layers of an onion, hence the name.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXcCMVGzievXovg4KOVpAn9L3m3RReACxSTOuR8gRBJZqUQrjDXsatQuxn3_VDULndGf11yVAztixUQDZkczx3JIm3kmkeOjN5HcpD1Sew3xfbxxIFNiFbJGtxhDfnmdSg3ieo6T?key=efl3qnIZjmsN6y8rnyUBQP9A\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"316\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Blender Documentation\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Onion skinning is essential for getting a clear understanding of the flow of motion and timing: animators can visually gauge the spacing between frames to edit the timing of movements and transitions. It also helps easily spot discrepancies in motion paths or unintended jumps in sequences for early correction.\u003C/p>\u003Cp>For complex sequences with multiple moving parts, onion skinning provides a clear view of how different elements interact over time.\u003C/p>\u003Chr>\u003Ch2 id=\"5-motion-trails\">\u003Cstrong>5. Motion Trails\u003C/strong>\u003C/h2>\u003Cp>Motion trails represent the path of an object through space over time, often depicted as a sequence of overlapping lines that show the object's previous and future positions:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXepw9_aXHjxtLMHCtETa10yM0tSM-6ZyvIUM8y-oc41PAXuk9_6fd-0E5U63I-gwGX6Txzrnsg1b-BIe7ROGes0kIHO7hB2N_hwJDCaFoMYpK7-J5m5lGF4Wdnf9RkK2F3jrKoVuA?key=efl3qnIZjmsN6y8rnyUBQP9A\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"316\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Blender Manual\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Motion trails make it sometimes easier than onion skinning to see the distribution of keyframes and in-betweens: animators can assess whether the spacing between frames achieves the desired timing, whether they want quick, snappy motion (with frames closer together) or slow, languid movement (with frames spaced further apart).\u003C/p>\u003Cp>They also show the trajectory of the movement. Arcs are a fundamental principle of animation that contributes to fluidity.\u003C/p>\u003Chr>\u003Ch2 id=\"usage-with-other-animation-principles\">\u003Cstrong>Usage With Other Animation Principles\u003C/strong>\u003C/h2>\u003Cp>Because timing is such an important part of animation, it's often indistinguishable from other animation principles:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Slow in/out\u003C/strong> - Timing determines\u003Ca href=\"https://blog.cg-wire.com/slow-in-out/\"> \u003Cu>how an object accelerates and decelerates\u003C/u>\u003C/a>, giving weight and realism. In the case of a bouncing ball, incorrect timing lacks the organic feel of gravity’s effect.\u003C/li>\u003Cli>\u003Cstrong>Follow through\u003C/strong> - If a character comes to a sudden halt after running,\u003Ca href=\"https://blog.cg-wire.com/follow-through-overlapping-action/\"> \u003Cu>their clothes and hair would continue to move slightly forward for a few frames\u003C/u>\u003C/a> due to inertia before settling: if timed too fast, it appears snappy and unrealistic, while if too slow, it loses the sense of energy and momentum.\u003C/li>\u003Cli>\u003Cstrong>Overlapping action\u003C/strong> - Timing helps distinguish between the overlapping motions of different parts of a character or object. If your character waves a hand while walking, you'll need different timing between the arm and the legs to make the sequence more lifelike.\u003C/li>\u003Cli>\u003Cstrong>Anticipation\u003C/strong> - Timing is crucial to build up anticipation. If a character is about to jump, anticipation is having the character bend their knees and pull their arms back. If the timing is too short, the jump feels sudden and unprepared, but if too long, it can break the flow and lose the viewer.\u003C/li>\u003C/ul>\u003Cp>If animation is a series of poses, the timing between these poses is always something to consider.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Timing is a fundamental principle for animators to master. To do that, they can rely on tools like reference footage, time charts, exposure sheets, and onion skinning.\u003C/p>\u003Cp>Good timing is also a pillar of other animation principles like slow in and slow out, follow through, overlapping action, and anticipation: you need to understand each principle individually but also take into account how they bounce off each other.\u003C/p>\u003Cp>Make sure not to underestimate its impact in your next work!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":923,"comment_id":924,"feature_image":925,"featured":105,"visibility":10,"created_at":926,"updated_at":927,"custom_excerpt":928,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":929,"primary_tag":930,"url":931,"excerpt":928,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":932},"6f4cfe8b-87ec-40ad-83ca-6b1cdd757be9","67fcb49d0097450001312566","https://images.unsplash.com/photo-1450897918656-527057db59d3?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDJ8fHJ1bm5pbmclMjBsYXRlfGVufDB8fHx8MTc0NDYxNjA4OXww&ixlib=rb-4.0.3&q=80&w=2000","2025-04-14T09:09:17.000+02:00","2026-02-20T06:05:04.000+01:00","Timing controls the pace, rhythm, and emotional weight of your animations. Learn how to refine your timing using tools like time charts, onion skinning, and motion trails to bring your stories to life.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/timing-animation-principle/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@andybeales?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Andy Beales\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/timing-animation-principle","2025-04-28T10:00:38.000+02:00",{"title":918},"timing-animation-principle","posts/timing-animation-principle",[939],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"fwuc5AOvxuYLKb_jHZ6pKULa_sudf3Qf898-oDQtl24",{"id":942,"title":943,"authors":944,"body":7,"description":7,"extension":8,"html":946,"meta":947,"navigation":14,"path":958,"published_at":959,"seo":960,"slug":961,"stem":962,"tags":963,"__hash__":965,"uuid":948,"comment_id":949,"feature_image":950,"featured":105,"visibility":10,"created_at":951,"updated_at":952,"custom_excerpt":953,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":954,"primary_tag":955,"url":956,"excerpt":953,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":957},"ghost/posts:onion-skinning.json","Why Onion Skinning Is Every Animator’s Secret Weapon (2026)",[945],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🧅\u003C/div>\u003Cdiv class=\"kg-callout-text\">Want to animate like a time traveler? Onion skinning shows you past, present, and future frames all at once.\u003C/div>\u003C/div>\u003Cp>Animators are time travellers: they need to visualize past, present, and future frames to create convincing animations.\u003C/p>\u003Cp>Modern tools help quickly visualize the flow of a character’s motion to identify what needs editing before the entire sequence plays out―onion skinning is one of them.\u003C/p>\u003Cp>Read on to learn how to see multiple points in time simultaneously to make better animations, faster.\u003C/p>\u003Chr>\u003Ch2 id=\"what-is-onion-skinning\">\u003Cstrong>What is Onion Skinning?\u003C/strong>\u003C/h2>\u003Cp>Onion skinning is a tool used in animation to see multiple frames of an action at once.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXeBk98p02_GhsWLkwo6aUR9ebw01nmlIu2m6AYhgLNMo8pmgRu0WhlXWc-TEcMlsHUItN4mMNGkES_rVlISep5340asjKrFIXBVV7poCHr_JhaZIsuoOMWsABUHTlA8rTPgkJLWEw?key=g0FOzCquyy3OxeMvCCnu-SEL\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"480\" height=\"279\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: SideFX\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>The term \"onion skinning\" originates from the metaphor of peeling back layers of an onion: by displaying frames before and after the current one in a translucent manner with a faint overlay, onion skinning shows the progression from one frame to the next―the past, present, and potential future frames merge together on screen.\u003C/p>\u003Chr>\u003Ch2 id=\"why-onion-skinning\">\u003Cstrong>Why Onion Skinning\u003C/strong>\u003C/h2>\u003Cp>Onion skinning used to be done manually by layering translucent sheets of paper (called \"onionskin\"), with each sheet containing a different frame.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXd7Nm9zOSosyqlv3eS7huSMKZrXEHOFjJPopzBRt4yZzL5rfWJ7VXqYxKFmSXnJHI9EvKmvrQkosooeenmi9ZX7y2octRNUjGezpoMUwn2SfAZ9siX9CIKad8SCMafgFH1KlaBI?key=g0FOzCquyy3OxeMvCCnu-SEL\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"351\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: The Art of Aaro, Youtube\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>This method provided animators with a tactile, intuitive understanding of motion, but it was time-consuming and displayed a limited capacity for revisions: changes often required redrawing entire sequences!\u003C/p>\u003Cp>Modern digital onion skinning revolutionizes the process.\u003C/p>\u003Cp>First, there is the obvious productivity benefit: by allowing animators to see multiple frames at once, onion skinning provides a visual reference that enables smoother and more consistent motion throughout the animation sequence. It saves time and effort that would otherwise be spent constantly toggling back and forth between frames. You can make corrections more easily, and you also get advanced user interface features like adjustable transparency and color-coded frames.\u003C/p>\u003Cp>Onion skinning also enables a whole new level of precision in an animator's work: you can more accurately determine the position and timing of elements and ensure a smoother progression of movement. It's especially important in complex scenes with fast motion.\u003C/p>\u003Cp>Since animators can instantly preview how their edits affect the overall sequence, they can identify and correct mistakes early on, minimizing the need for extensive revisions later and thus reducing costs.\u003C/p>\u003Chr>\u003Ch2 id=\"onion-skinning-in-blender\">\u003Cstrong>Onion Skinning In Blender\u003C/strong>\u003C/h2>\u003Cp>Onion skinning comes with potential challenges: animators can encounter software limitations or face visual clutter due to multiple overlays when using this technique, and animators need to master onion skinning features to overcome these issues.\u003C/p>\u003Cp>In Blender, onion skinning provides visual cues or \"ghosts\" of frames in an animation sequence, both before and after the current frame.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXe7yRDX6X3S8lrsNarD9Bz8WwfH4q8WDgGOMPzMY0ZEi6HTT5Nx6HzzKlVYi8mZhDKMFkqi9Wt7l-GQipa-fqsGMJeyFyDqEjaXqARdcr8UptxZIvvlzyNxGuoiOLqtgTv6fKCVEg?key=g0FOzCquyy3OxeMvCCnu-SEL\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"316\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Blender Manual\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>You can toggle the main onion skinning visibility through the Viewport Overlays. For a more customized experience, particularly when using Grease Pencil, onion skinning can also be activated on a per-layer basis from the layer list.\u003C/p>\u003Cp>You can, of course, customize the onion skinning depending on your workflow:\u003C/p>\u003Cul>\u003Cli>The \"Mode\" selection determines how the frames to be ghosted are picked. By choosing \"Keyframes,\" Blender will show keyframes in the range specified by your \"Before\" and \"After\" settings. If you select \"Frames,\" it will show the frames based on the same range settings. The \"Selected\" mode will display only those keyframes that you've manually selected in the Dope Sheet, offering precise control over which frames are ghosted.\u003C/li>\u003Cli>Opacity lets you control how prominently the ghost frames appear. This can be crucial for maintaining clarity and helping you focus on either the primary animation or its surrounding context. The \"Filter by Type\" option refines what kinds of frames are included in the onion skinning display, allowing for more specific frames to be visualized.\u003C/li>\u003Cli>To help distinguish between past and future frames, Blender provides the option to color-code them: previous frames are shown in one color (often red) and future frames in another (often green or blue). This color distinction makes it easier to differentiate between frames quickly and helps in planning the movement of objects and characters.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXdTTQQbWjdZZvBuduhIUlhQv0L-4iJAiDGgZ-kFQhhjeKrdDKrcTeQdI6L2z-dDrlcZwDW6FCZZ6ndXkjAbKgaLRTHzFBTO3Bor1FLiMUK1k0v4EnTAW9PEzWLjUbjhJabFuk3wFw?key=g0FOzCquyy3OxeMvCCnu-SEL\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"292\" height=\"325\">\u003C/figure>\u003Cul>\u003Cli>For display tweaks, the \"Fade\" setting gradually decreases the opacity of ghost frames the further they are from the current frame to focus your attention appropriately.\u003C/li>\u003Cli>The \"Show Start Frame\" feature is particularly beneficial for loop animations: it allows the animator to visualize the first keyframe or frame as a ghost when working on the last frame of an animation, essentially enabling a seamless loop cycle.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"use-cases\">\u003Cstrong>Use Cases\u003C/strong>\u003C/h2>\u003Cp>Onion skinning plays a key role in\u003Ca href=\"https://blog.cg-wire.com/12-principles-animation/\"> \u003Cu>integrating several animation principles\u003C/u>\u003C/a> during production. Depending on the task at hand, you'll probably need to tweak the onion skinning settings to focus on relevant frames:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Timing\u003C/strong> - Onion skinning allows animators to visualize the spacing of frames, which is key to work on the timing of the animation more effectively. By observing the sequence of frames laid on top of each other, animators can discern if the motion is too fast or too slow and make precise edits.\u003C/li>\u003Cli>\u003Cstrong>Anticipation\u003C/strong> - Anticipation is about preparing the audience for an action. Using onion skinning, animators can see preceding and following frames to ensure that anticipation actions (like a character crouching before jumping) are effectively depicted.\u003C/li>\u003Cli>\u003Cstrong>Follow-through and overlapping actions\u003C/strong> - Follow-through are\u003Ca href=\"https://blog.cg-wire.com/follow-through-overlapping-action/\"> \u003Cu>secondary actions that continue after the primary action\u003C/u>\u003C/a> has been completed. Onion skinning allows animators to keep track of these secondary motions by observing how they evolve frame by frame. Filtering frames helps animators fine-tune overlapping actions like hair, clothing, or appendages that move at a slightly delayed timing relative to the main action.\u003C/li>\u003Cli>\u003Cstrong>Slow in/out\u003C/strong> -\u003Ca href=\"https://blog.cg-wire.com/slow-in-out/\"> \u003Cu>Slow in and slow out\u003C/u>\u003C/a> relate to the easing of animations where actions start slowly, pick up speed, and then slow down again toward the end. Through onion skinning, animators make sure more frames are used at the beginning and end of an action for smooth deceleration or acceleration. It's a visual representation of how densely frames are packed together.\u003C/li>\u003Cli>\u003Cstrong>Pose-to-pose\u003C/strong> - Pose-to-pose animation involves drawing key poses and then filling in the in-between actions. With onion skinning, animators can efficiently create these in-betweens manually or automatically through software by observing how their key poses transition into one another.\u003C/li>\u003C/ul>\u003Cp>Last but not least, onion skinning can be used to create an illusion of motion blur. By examining the ghosted images, animators strategically animate intermediate frames that simulate streaks or blurred edges to create an illusion of speed:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXedEkgrMir5bdXJxrcPFFWmvMRQvoLiMFo83qvipS559rqFg_VkbxfI2SgnWDvAOhthQx7vGtLFzgfyUK7uq-wTEdHfRywldDgXjSBqGa0P_ooGNJWY1X696Mx1r2PPbhsVLiMH?key=g0FOzCquyy3OxeMvCCnu-SEL\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"307\" height=\"230\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Wikipedia\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Onion skinning remains an essential technique that bridges both the rich history of traditional animation and the innovation of modern digital methods. It provides animators with the ability to view multiple frames simultaneously to create smooth movements and transitions.\u003C/p>\u003Cp>While it might seem like just another tool, its impact is far-reaching across various workflows. Make sure to leverage it! For example, you could create custom keyboard shortcuts in your DCC tool to quickly navigate between frames or toggle settings while using onion skinning.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":948,"comment_id":949,"feature_image":950,"featured":105,"visibility":10,"created_at":951,"updated_at":952,"custom_excerpt":953,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":954,"primary_tag":955,"url":956,"excerpt":953,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":957},"6d2adb16-3154-4330-bd22-8617b2e7d658","67fcb49f009745000131256c","https://images.unsplash.com/photo-1668295037469-8b0e8d11cd2a?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDJ8fG9uaW9uJTIwc2tpbm5pbmd8ZW58MHx8fHwxNzQ0NjE1MzU3fDA&ixlib=rb-4.0.3&q=80&w=2000","2025-04-14T09:09:19.000+02:00","2026-02-20T06:04:47.000+01:00","Onion skinning helps animators see past and future frames at a glance, making smoother motion, better timing, and fewer mistakes possible. Learn how this powerful tool works and why it’s still a must-have in modern animation workflows.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/onion-skinning/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@eprouzet?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Eric Prouzet\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/onion-skinning","2025-04-21T10:00:22.000+02:00",{"title":943},"onion-skinning","posts/onion-skinning",[964],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"Hr_0lUH6Ygv5vYaK8SRQxzHOS9XRBk5ERARt5gkkwCM",{"id":967,"title":968,"authors":969,"body":7,"description":7,"extension":8,"html":971,"meta":972,"navigation":14,"path":983,"published_at":984,"seo":985,"slug":986,"stem":987,"tags":988,"__hash__":990,"uuid":973,"comment_id":974,"feature_image":975,"featured":105,"visibility":10,"created_at":976,"updated_at":977,"custom_excerpt":978,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":979,"primary_tag":980,"url":981,"excerpt":978,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":982},"ghost/posts:hard-surface-modeling.json","Hard Surface Modeling (2026): The Backbone of 3D Animation",[970],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">😀\u003C/div>\u003Cdiv class=\"kg-callout-text\">From spaceships to coffee makers—if it’s made by humans, it’s modeled with hard surfaces.\u003C/div>\u003C/div>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/3d-modeling-animation/\">\u003Cu>3D modeling\u003C/u>\u003C/a> is seriously hard.\u003C/p>\u003Cp>Just try to imagine your favorite coffee shop as an example: list down all the different assets that you'd need to model to animate a scene in it, and then remember you need to do this for every new scene. Even with reused assets, an animated production takes tremendous time and effort to see the day!\u003C/p>\u003Cp>The people taking care of these environments and props are called hard surface modelers, and this article explores the process and techniques involved in professional hard surface modeling.\u003C/p>\u003Cp>Read on to get an overview of how things work behind the scenes:\u003C/p>\u003Chr>\u003Ch2 id=\"whats-hard-surface-modeling\">\u003Cstrong>What's Hard Surface Modeling\u003C/strong>\u003C/h2>\u003Cp>In 3D modeling, a hard surface is anything that has been manufactured or is machine-made, unlike organic modeling that focuses on \"natural\" objects like humans, animals, plants, etc.\u003C/p>\u003Cp>Rigid objects that don’t deform are typically done with hard-surface modeling.\u003C/p>\u003Cp>Though they are made of rubber and do deform, modeling a car’s wheels would also be regarded as hard-surface modeling because they still have a smooth surface.\u003C/p>\u003Cp>A model is said to be organic if it has smooth curves where the shape seamlessly transitions to another model.\u003C/p>\u003Chr>\u003Ch2 id=\"why-hard-surface-modeling-is-important\">\u003Cstrong>Why Hard Surface Modeling Is Important\u003C/strong>\u003C/h2>\u003Cp>Hard surface modeling is key to create believable animated worlds. Objects like vehicles, buildings, machines, and various props that populate scenes use hard surface modeling techniques. All these elements contribute to the storytelling by providing a relatable context within which the characters evolve.\u003C/p>\u003Cp>In the production pipeline, hard surface models lay the groundwork for animation sequences: rigging these objects demands that they be modeled with animation in mind, ensuring pivots and movement ranges are accurate for the scenes.\u003C/p>\u003Cp>Understanding hard surface modeling is equally important because it requires a specialized set of skills and best practices that contrasts with character modeling. It involves understanding materials, reflection properties, and the way different surfaces interact in terms of physics and function: a car chase requires detailed hard surface models that can realistically simulate motion, crashes, and other dynamic interactions. Organic modeling has a similar but different approach, with different processes and techniques involved.\u003C/p>\u003Chr>\u003Ch2 id=\"the-elements-of-a-hard-surface\">\u003Cstrong>The Elements Of A Hard Surface\u003C/strong>\u003C/h2>\u003Cp>One of the key takeaways of this article is how hard surface modeling uses a different topology―the arrangement, flow, and structure of vertices, edges, and faces that make up a 3D model's surface―than organic modeling.\u003C/p>\u003Cp>Good topology not only supports efficient animation and rendering but also ensures that the model can be easily rigged and textured.\u003C/p>\u003Cp>Here are some key terms used to talk about hard surfaces:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Vertices\u003C/strong> - Points in the 3D space.\u003C/li>\u003Cli>\u003Cstrong>Edges\u003C/strong> - Edges are the lines connecting two vertices. Clean edge placement is crucial for maintaining high-quality surfaces that respond well to lighting and shading.\u003C/li>\u003Cli>\u003Cstrong>Faces/surfaces\u003C/strong> - Ideally, 3D models predominantly use quadrilateral (quad) polygons. Quads deform predictably and are favored in animation for better subdivision and rigging. N-gons (polygons with more than four sides) and triangles can cause issues in deformation and should thus be minimized or used strategically.\u003C/li>\u003Cli>\u003Cstrong>Mesh\u003C/strong> - A collection of vertices, edges, and faces that define the shape of a 3D model.\u003C/li>\u003Cli>\u003Cstrong>Bevel - \u003C/strong>Beveling is the technique of creating rounded or chipped edges instead of sharp, unrealistic lines.\u003C/li>\u003Cli>\u003Cstrong>Edge loops\u003C/strong> - Continuous loops of edges that follow the contours of a model. Properly placed edge loops are important for maintaining smooth deformations, especially around joints for rigging.\u003C/li>\u003Cli>\u003Cstrong>Loose parts\u003C/strong> - Loose parts are separate components of a model that may not be physically connected to the main mesh but are nonetheless part of the overall structure, like screws and bolts.\u003C/li>\u003Cli>\u003Cstrong>Trims\u003C/strong> - Decorative or functional edge modifications applied to give more detail to an object, like grooves, engravings, or other surface details used to break monotony.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"a-typical-hard-surface-modeling-workflow\">\u003Cstrong>A Typical Hard Surface Modeling Workflow\u003C/strong>\u003C/h2>\u003Cp>Hard surface modelers typically follow the same steps, sometimes with extra iterations to get the 3D model right:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Gather references\u003C/strong>—Gathering references involves collecting images, blueprints, and any available materials of the object you intend to model to understand its proportions, details, and features. You look for different angles, close-up details, and technical drawings that can provide insights into how each part of the object connects and functions.\u003C/li>\u003Cli>\u003Cstrong>Separate the parts\u003C/strong> - Once you have your references, the next step is to analyze the object and break it down into simpler components/parts. Being able to understand how the object is constructed greatly helps. You can then tackle each part individually to make the modeling process more manageable, but it's also important to consider how each part connects or interacts with others.\u003C/li>\u003Cli>\u003Cstrong>Block-out model\u003C/strong> - The block-out is where you create a basic, low-detail version of your model using simple shapes to define the overall form and proportions and make sure everything is sized correctly and positioned accurately before adding more detail. It’s an iterative process that requires adjustments to achieve the desired proportions and relationships between parts.\u003C/li>\u003Cli>\u003Cstrong>Detailing\u003C/strong> - With the block-out complete, you can refine the model and add more intricate features that define a hard surface object, like bevels, creases, edge loops, and other trims to add realism.\u003C/li>\u003Cli>\u003Cstrong>Texturing\u003C/strong> - Texturing is\u003Ca href=\"https://blog.cg-wire.com/texturing-shading-animation/\"> \u003Cu>the process of applying images\u003C/u>\u003C/a> (textures) to your model to create surface patterns and details like color, roughness, and metallic properties.\u003C/li>\u003Cli>\u003Cstrong>Rigging\u003C/strong> - In some cases, the model is intended for animation. Rigging is\u003Ca href=\"https://blog.cg-wire.com/rigging-in-animation/\"> \u003Cu>creating an animation skeleton\u003C/u>\u003C/a> to allow the model to be posed or animated―setting up joints, controls, and any necessary constraints to have the model function correctly. For example, a car should have spinning wheels.\u003C/li>\u003C/ol>\u003Cp>That's pretty much it, but 3D models can also require more edits during animation.\u003C/p>\u003Chr>\u003Ch2 id=\"modeling-techniques-and-tools-for-hard-surfaces\">\u003Cstrong>Modeling Techniques And Tools For Hard Surfaces\u003C/strong>\u003C/h2>\u003Ch3 id=\"polygonal-modeling\">\u003Cstrong>Polygonal Modeling\u003C/strong>\u003C/h3>\u003Cp>Polygonal modeling is a standard technique for creating 3D models out of polygons, primarily triangles and quadrilaterals. These polygons form the surface mesh of the 3D model. Artists control the model by manipulating vertices, edges, and faces.\u003C/p>\u003Ch3 id=\"subdivision-modeling\">\u003Cstrong>Subdivision Modeling\u003C/strong>\u003C/h3>\u003Cp>Subdivision modeling creates smooth, high-resolution meshes from a low-resolution base model.\u003C/p>\u003Cp>This technique is important because it simplifies workflows by allowing artists to work with simple meshes while being able to produce high-quality outputs during rendering.\u003C/p>\u003Cp>Subdivision modeling starts with a low-polygon mesh and then processes it through a series of iterations where each polygon is divided into more polygons. These subdivisions lead to a higher resolution and smoother appearance, all while maintaining original proportions and details as controlled by edge loops and crease weights.\u003C/p>\u003Ch3 id=\"box-modeling\">\u003Cstrong>Box Modeling\u003C/strong>\u003C/h3>\u003Cp>Box modeling is a simple form of polygonal modeling used extensively for creating both organic and hard surface models.\u003C/p>\u003Cp>Starting with a basic primitive shape like cubes (or \"boxes\"), the modeler extrudes, scales, and subdivides faces, edges, and vertices to gradually transform the shape into the desired model.\u003C/p>\u003Ch3 id=\"kitbashing\">\u003Cstrong>Kitbashing\u003C/strong>\u003C/h3>\u003Cp>Kitbashing is using pre-made assets to create complex structures or enhance detail on models. Artists can select, modify, and combine these \"kits\" to build complex models. Typically, these kits include detailed components like pipes, panels, and engines, which can be reused across different projects.\u003C/p>\u003Cp>It's crucial for saving time and effort, especially in productions with tight deadlines. It also encourages creativity by allowing artists to quickly experiment with various combinations of existing model components.\u003C/p>\u003Ch3 id=\"nurbs\">\u003Cstrong>NURBS\u003C/strong>\u003C/h3>\u003Cp>NURBS (Non-Uniform Rational B-Splines) create mathematically accurate curves and surfaces. This technique is common in industries that require high levels of precision, like in automotive and industrial design.\u003C/p>\u003Cp>NURBS defines surfaces with control points, weights, and knot vectors. The surfaces are generated through the manipulation of these control points, which influence the curvature and shape of the model dynamically.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>In summary, hard surface modeling is a vital skill for 3D animation that allows the creation of detailed, realistic objects, from vehicles to architectural structures. The article explored the main techniques and tools to understand how hard surface modeling works.\u003C/p>\u003Cp>AI is predicted to revolutionize hard surface modeling by automating repetitive and time-consuming tasks to allow artists to focus on more creative aspects of their craft. But the expertise of skilled modelers remains indispensable: animators will always need clean topology and smart rigging to make their work easier during production. Keep creating!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":973,"comment_id":974,"feature_image":975,"featured":105,"visibility":10,"created_at":976,"updated_at":977,"custom_excerpt":978,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":979,"primary_tag":980,"url":981,"excerpt":978,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":982},"9ab53252-aeeb-41f8-a807-b5a8e06c5b2f","67fcb49a0097450001312560","https://images.unsplash.com/photo-1518732714860-b62714ce0c59?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDF8fDNEJTIwbW9kZWx8ZW58MHx8fHwxNzQ0NjE1MDEwfDA&ixlib=rb-4.0.3&q=80&w=2000","2025-04-14T09:09:14.000+02:00","2026-03-26T10:35:37.000+01:00","Hard surface modeling is essential to 3D animation—from coffee cups to spacecraft. Learn how artists build detailed environments with the right topology, clean geometry, and time-saving techniques like kitbashing and subdivision modeling.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/hard-surface-modeling/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@neonbrand?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Kenny Eliason\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/hard-surface-modeling","2025-04-14T10:00:49.000+02:00",{"title":968},"hard-surface-modeling","posts/hard-surface-modeling",[989],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"z09oC9OHDliLBt4nIAeWptxHAQV0qSGXOrVW320fRSI",{"id":992,"title":993,"authors":994,"body":7,"description":7,"extension":8,"html":996,"meta":997,"navigation":14,"path":1008,"published_at":1009,"seo":1010,"slug":1011,"stem":1012,"tags":1013,"__hash__":1015,"uuid":998,"comment_id":999,"feature_image":1000,"featured":105,"visibility":10,"created_at":1001,"updated_at":1002,"custom_excerpt":1003,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1004,"primary_tag":1005,"url":1006,"excerpt":1003,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1007},"ghost/posts:exaggeration-animation-principle.json","Exaggeration Principle (2026): Why Bigger Emotions Make Better Animation",[995],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🔉\u003C/div>\u003Cdiv class=\"kg-callout-text\">Animation is all about amplifying reality—and exaggeration is your tool to do just that.\u003C/div>\u003C/div>\u003Cp>In the early days of animation, Disney animators often found themselves confused by what seemed to be conflicting instructions from Walt Disney himself: \"When Walt asked for realism, he wanted a caricature of realism. [...] If a character was to be sad, make him sadder;\"\u003C/p>\u003Cp>The realist approach gets in the way of the magic that animation has the power to harness: what Walt wanted to convey was something that resonated deeply with the child within, yet differed noticeably from the mundane reality viewers experienced every day.\u003C/p>\u003Cp>This balance between reality and exaggeration is hard to get: from character designs to poses and facial expressions, exaggeration encompasses the whole production process.\u003C/p>\u003Cp>In this article, we explore the basics of exaggeration―what it is and why it's important―as well as simple elements to take into account to implement this principle in your animation.\u003C/p>\u003Chr>\u003Ch2 id=\"whats-exaggeration\">\u003Cstrong>What's Exaggeration\u003C/strong>\u003C/h2>\u003Cp>Exaggeration is about making elements of animation like size, emotions, actions, or expressions larger, better, worse, or more important than they are in reality.\u003C/p>\u003Cp>The goal is not to distort reality entirely but to amplify the essence of an idea or action to make it more vivid to the audience. When a character is excited, their movements should display more energy. When a significant moment in the storytelling occurs, a camera can show dramatic angles. Audio cues can highlight important actions.\u003C/p>\u003Cp>Exaggeration plays an important role, even in more realistic animations.\u003C/p>\u003Chr>\u003Ch2 id=\"why-this-principle-is-important\">\u003Cstrong>Why This Principle Is Important\u003C/strong>\u003C/h2>\u003Cp>Exaggeration makes actions clearer for audiences to immediately understand character movements and emotions, even in complex or fast-paced sequences. A character's joy might be shown through a leap that defies gravity, or their anger could be expressed through exaggerated facial expressions that go beyond normal human capabilities.\u003C/p>\u003Cp>Exaggeration also plays a role in emphasizing story points: overly dramatic camera zooms or the use of slow motion and exaggerated smirks are for example often used in plot twists.\u003C/p>\u003Cp>All these little details help engage viewers emotionally to draw them deeper into the narrative, but it's still important to balance exaggerated and non-exaggerated elements to maintain a believable world. Not just to avoid overwhelming the audience, but also to let those exaggerated moments stand out effectively.\u003C/p>\u003Cp>There are many ways to integrate the exaggeration principle in animation, but in this article we focus on four basics.\u003C/p>\u003Chr>\u003Ch2 id=\"1-character-design\">\u003Cstrong>1. Character Design\u003C/strong>\u003C/h2>\u003Cp>One way to use exaggeration is through character design to help viewers instantly recognize and understand characters.\u003C/p>\u003Cp>Through\u003Ca href=\"https://blog.cg-wire.com/character-shape-language/\"> \u003Cu>shape language\u003C/u>\u003C/a>, animators emphasize character attributes using simple forms like rounded shapes to convey a friendly and approachable character or sharp, angular forms to suggest a villainous or aggressive personality. It visually communicates the essence of a character at a glance.\u003C/p>\u003Cp>Playing with\u003Ca href=\"https://blog.cg-wire.com/character-color-palettes/\"> \u003Cu>the color palette\u003C/u>\u003C/a> is also a great way to exaggerate personalities. For example, by using bold, contrasting colors to intensify the character's presence and mood: bright, vibrant colors might be used for a lively character, while dark, muted shades could underscore a more mysterious presence.\u003C/p>\u003Cp>Props and their relations to the character design are another way to exaggerate personalities. Zoro from One Piece is hard worker who aims to be \"the greatest swordman\", so Eiichiro Oda gave him 3 swords to wield―it's a form of exaggeration that makes the character iconic:\u003C/p>\u003Chr>\u003Ch2 id=\"2-poses-facial-expressions\">\u003Cstrong>2. Poses &amp; Facial Expressions\u003C/strong>\u003C/h2>\u003Cp>Another obvious way to integrate the exaggeration principle is to work on your poses.\u003C/p>\u003Cp>Animators can heighten the visual impact of a scene by deliberately slightly pushing poses beyond their natural extremes. Take this comparative picture for example:\u003C/p>\u003Cp>Exaggerated poses also emphasize key moments, clarify intent, and communicate strong emotional states. Goku going super saiyan is a classic example, but Dragon Ball features many more iconic poses:\u003C/p>\u003Cp>Similarly, facial expressions are exaggerated to display emotions more vividly. It's especially effective for comedic effects:\u003C/p>\u003Cp>Applying\u003Ca href=\"https://blog.cg-wire.com/solid-drawing/\"> \u003Cu>the solid drawing principle\u003C/u>\u003C/a> with for example a strong line of action or by relying on asymmetric poses is a great way to obtain convincing poses with realistic exaggeration. Look at how quickly you can grasp the following poses thanks to the line of action:\u003C/p>\u003Chr>\u003Ch2 id=\"3-timing\">\u003Cstrong>3. Timing\u003C/strong>\u003C/h2>\u003Cp>Timing highlights the key actions that need exaggeration: holding a pose for a longer duration can give it more weight and make it more noticeable, for example.\u003C/p>\u003Cp>Quick timing can be used to exaggerate speed and intensity while stretching out timing can make a movement feel sluggish to exaggerate a character's fatigue or laziness. Longer hang times during leaps or fast impacts on landing accentuate the heaviness or lightness of a character. Quick changes in expression intensify the feeling of surprise or shock, while prolonged expressions enhance the sensation of sadness or contemplation.\u003C/p>\u003Cp>Timing differences create contrast, which is a core element of exaggeration. For example, you can use it to build up anticipation before an exaggerated action: by slowing down or pausing just before a major action, animators build up the suspense to make the exaggerated moment more impactful.\u003C/p>\u003Chr>\u003Ch2 id=\"4-sound-effects\">\u003Cstrong>4. Sound Effects\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/sound-design-in-animation-definition-process-challenges/\">\u003Cu>Sound effects are underrated elements\u003C/u>\u003C/a> of exaggeration: a simple movement like a character jumping can be exaggerated with the sound of a powerful whoosh or a comical boing. Sound creates a hyper-realistic world that goes beyond what is possible in reality: whether it's exaggerated footsteps, overly loud slurping noises, or unrealistically booming explosions, they all contribute to \"enhancing\" reality.\u003C/p>\u003Cp>Animes are well known for this. In Fire Force, energy blasts use bass-boosted sound cues to increase their impact. In Hajime no Ippo, punching sounds turn into jet noise.\u003C/p>\u003Cp>We can all remember a sad scene underscored with an exaggerated, melodramatic violin sound, or a joyful scene accompanied by over-the-top, whimsical musical cues or cheerful jingles.\u003C/p>\u003Cp>Unique sound effects, sometimes entire themes, are often assigned to specific characters to exaggerate their traits or behaviors. Rengoku's theme from Demon Slayer reinforces his fiery personality traits and fire-themed character design:\u003C/p>\u003Chr>\u003Ch2 id=\"5-camera-work\">\u003Cstrong>5. Camera Work\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/camera-work-in-animation/\">\u003Cu>The way animators use the camera\u003C/u>\u003C/a> is also a great way to exaggerate story points.\u003C/p>\u003Cp>Dramatic zooms quickly move the camera closer to or further from a subject to emphasize an emotion or action.\u003C/p>\u003Cp>Quick cuts are rapid transitions between different camera angles or scenes, while jump cuts are abrupt edits within a continuous shot. These techniques show chaos, urgency, or heightened emotions by quickly shifting the viewer's focus and exaggerating the energy of a scene.\u003C/p>\u003Cp>A Dutch angle tilts the camera to one side to create a diagonal horizon line that distorts perspective. It can exaggerate disorientation or confusion in a scene by making the viewer feel off-balance alongside the characters.\u003C/p>\u003Cp>You can also use follow shots to add momentum to a character's movements, wide shots to show the character's loneliness, etc.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>The exaggeration principle is a fundamental tool to create engaging animations. Poses and facial expressions are one way to apply this principle, but you can also use character design, timing, sound effects, or camera techniques. Your creativity is the only limit!\u003C/p>\u003Cp>Exaggeration also plays a huge role in\u003Ca href=\"https://blog.cg-wire.com/12-principles-animation/\"> \u003Cu>other animation principles\u003C/u>\u003C/a> like timing, anticipation, or squash and stretch, so you not only need to master each principle individually but also understand how they relate to and complement each other.\u003C/p>\u003Cp>Experiment with different degrees of exaggeration and observe how it transforms your scenes to find the right balance between realism and storytelling and ultimately find your own style!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":998,"comment_id":999,"feature_image":1000,"featured":105,"visibility":10,"created_at":1001,"updated_at":1002,"custom_excerpt":1003,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1004,"primary_tag":1005,"url":1006,"excerpt":1003,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1007},"f248972d-6ef7-4bbd-bfad-fbf4c8169bcd","67f3e4ad5f2fdd0001789c24","https://images.unsplash.com/photo-1628260412297-a3377e45006f?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDV8fGNhcnRvb24lMjBleHByZXNzaW9ufGVufDB8fHx8MTc0NDAzNzI3M3ww&ixlib=rb-4.0.3&q=80&w=2000","2025-04-07T16:43:57.000+02:00","2026-03-26T10:33:47.000+01:00","Exaggeration is a key animation principle that pushes emotion, timing, and poses beyond realism to tell more impactful stories. Learn how animators use it across design, sound, and camera work to create iconic scenes.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/exaggeration-animation-principle/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@theshubhamdhage?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Shubham Dhage\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/exaggeration-animation-principle","2025-04-08T10:00:34.000+02:00",{"title":993},"exaggeration-animation-principle","posts/exaggeration-animation-principle",[1014],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"JBuJ1tck4ECIFiAAwGUTu0iAJeihjPD1lbvNyNoZSUs",{"id":1017,"title":1018,"authors":1019,"body":7,"description":7,"extension":8,"html":1021,"meta":1022,"navigation":14,"path":1033,"published_at":1034,"seo":1035,"slug":1036,"stem":1037,"tags":1038,"__hash__":1040,"uuid":1023,"comment_id":1024,"feature_image":1025,"featured":105,"visibility":10,"created_at":1026,"updated_at":1027,"custom_excerpt":1028,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1029,"primary_tag":1030,"url":1031,"excerpt":1028,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1032},"ghost/posts:lod-levels-of-detail.json","How LOD (Levels of Detail) Saves Time in 3D Animation (2026)",[1020],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🖌️\u003C/div>\u003Cdiv class=\"kg-callout-text\">Not every 3D model needs the highest level of detail! Learn how LOD (Levels of Detail) helps animators optimize rendering without compromising quality. 🎨\u003C/div>\u003C/div>\u003Cp>Animators don’t always work with the highest level of detail.\u003C/p>\u003Cp>It’s similar to watching YouTube videos—sometimes, you prioritize loading speed and lower the resolution to the minimum watchable quality. Other times, you want the full cinematic experience and opt for 4K resolution.\u003C/p>\u003Cp>Likewise, animators adjust levels of detail (LOD) to balance visual fidelity with performance efficiency. When refining a scene’s timing, high-detail models aren’t necessary. But in post-production, ensuring the final render meets quality standards is essential.\u003C/p>\u003Cp>This article explores the importance of LOD, how it optimizes production without compromising quality, and key techniques like bump mapping, retopology, texture baking, and displacement map baking.\u003C/p>\u003Chr>\u003Ch2 id=\"whats-lod\">\u003Cstrong>What's LOD?\u003C/strong>\u003C/h2>\u003Cp>In animation, the level of detail (LOD) defines the varying degrees of complexity applied to models or scenes, especially when objects are viewed at different distances by a viewer or camera.\u003C/p>\u003Cp>When an object is far from the camera, it's unnecessary to render all its fine details in high geometric detail so lower LOD models with fewer polygons are used: a tree seen in the foreground might have detailed leaves and branches, while a distant tree might be a simple shape with textures to conserve processing power.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXdd1XLupXEQ0BjsLyQ98mUTgu2DLdonnRTJc1_KEOp1KUFEhjEQfjiT4JlUerYtotKclSgvNbDZbTf9rxNkw2JjlvZbklU2vjG31gzjWvyFbkR3ujE6W1t4VSyZMKUJXJecWLCq3A?key=S5xdGiACLTcKa2a_84MnLfXD\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"351\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: ArtStation\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"why-levels-of-details-matter\">\u003Cstrong>Why Levels Of Details Matter\u003C/strong>\u003C/h2>\u003Cp>LOD allows animators and\u003Ca href=\"https://blog.cg-wire.com/rendering-explained/\"> \u003Cu>rendering artists\u003C/u>\u003C/a> to optimize computational resources: objects far away can be rendered with less detail without noticeable loss in visual quality, reducing the load on the processor. If your animated film is one hour long, you need to render 108,000 frames at 30 frames per second, so these processing optimizations quickly stack up.\u003C/p>\u003Cp>Different levels of detail also mean only the necessary amount of polygons or textures are processed, saving up storage memory.\u003C/p>\u003Cp>This is particularly crucial for real-time rendering, where you need to maintain high frame rates without bloating the animator's PC, but it's also important for the whole production pipeline to reduce rendering costs.\u003C/p>\u003Chr>\u003Ch2 id=\"1-bump-mapping\">\u003Cstrong>1. Bump Mapping\u003C/strong>\u003C/h2>\u003Cp>Bump mapping is a computer graphics technique that simulates bumps and wrinkles on an object's surface to create the illusion of intricate textures without increasing the model's geometric complexity with additional polygons.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXc6bHhb0m4SyOIQMsKI7T2vdZtGN5_rMgymT4vLb_5OZpLH1CJfVBm8L8v_fW3JfI-3pPOVYAdHvHcUvRURZfbAnwuX1SC638aK06a5spApwS1BM1j1KKP8xN4WGnIO_giaRVyMjQ?key=S5xdGiACLTcKa2a_84MnLfXD\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"209\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Wikipedia\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>It works by changing the appearance of the surface texture by manipulating the lighting calculations during rendering rather than changing the actual geometry. This is done by using a texture known as a bump map, which is typically a grayscale image where the intensity of the color represents the height of the surface perturbation.\u003C/p>\u003Cp>Traditionally, modeling each brick in a brick wall with its unique surface details would require a dense mesh, with each bump and groove captured by additional polygons, which can be computationally expensive.\u003C/p>\u003Cp>Instead, you can just create a bump map where the lighter areas indicate raised sections of the brick texture, while darker areas correspond to deeper sections like the mortar lines. When applied to a simple flat plane with minimal polygons, the rendering engine uses this bump map during shading calculations to perturb the surface normals. The interplay of light and shadow on these altered normals gives the viewer the illusion that the flat plane has all the intricate geometry of an actual brick wall.\u003C/p>\u003Chr>\u003Ch2 id=\"2-retopology\">\u003Cstrong>2. Retopology\u003C/strong>\u003C/h2>\u003Cp>Retopology is a process in 3D modeling where the topology, or the structure of the mesh surface, is redefined to achieve better geometry flow and reduce polygon count while preserving the original shape and detail of the model.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXfjaHAvyccmdo6np7QPaMO5CyCKvtmE-frtI7-V5wObEkiiTNZ_IDBVZ12ME_ZFSgIPTu-BhDcWjyCdcWFRQjhZX5HvDLo4lIjfJNamn8-rAGbCJfmfkZ9WPEUX0lUKBmwP70UIsQ?key=S5xdGiACLTcKa2a_84MnLfXD\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"307\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: people.wku.edu\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Imagine you start with a highly detailed character model created using sculpting software like ZBrush. This model might have millions of polygons capturing every intricate detail. The retopology process would go like this:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Simplification\u003C/strong> - The software creates a new mesh on top of the high-poly model using retopology techniques (automatic like ZBrush's ZRemesher or manual for more granular edits) to define a more manageable polygon structure while capturing the character's essential forms.\u003C/li>\u003Cli>\u003Cstrong>Flow utilization\u003C/strong> - The tool considers edge loops around significant anatomical features like the eyes, mouth, and joints to help animation deformation (bending and stretching).\u003C/li>\u003Cli>\u003Cstrong>Polygon reduction\u003C/strong> - The new topology should have significantly fewer polygons.\u003C/li>\u003C/ol>\u003Cp>Retopology is important to create clean, efficient mesh structures. It's especially important to create multiple versions of the character model corresponding to different levels of detail:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>LOD0\u003C/strong> - The most detailed version is used when the character is close to the camera.\u003C/li>\u003Cli>\u003Cstrong>LOD1\u003C/strong> - A less detailed version for mid-range shots.\u003C/li>\u003Cli>\u003Cstrong>LOD2 and beyond\u003C/strong> - Even simpler versions for distance shots.\u003C/li>\u003Cli>\u003Cstrong>Vertex Weighting\u003C/strong> - The lower detail versions maintain essential shape and silhouette but use minimal geometry to optimize rendering performance.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"3-texture-baking\">\u003Cstrong>3. Texture Baking\u003C/strong>\u003C/h2>\u003Cp>Texture baking pre-calculates and transfers various surface details and lighting information into a texture map. These baked textures include shadows, reflections, global illumination, ambient occlusion, or complex material properties that are computationally expensive to calculate in real time. Once baked, this data is applied to simpler versions of the 3D models to optimize performance without sacrificing quality.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXetwGb0Q9Xf_Ggr4xp_tpwmI5Wtic2BSkLdobK9A2F3Bg45Ol9Yjkdr_fYMQbG6UTqqz3F4kND05SZAcoi2skNfEvFVazF6JEt0PbbQyKgUc85hnJQgZB-56ZcmfpmyYdtQiiCRlQ?key=S5xdGiACLTcKa2a_84MnLfXD\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"550\" height=\"502\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Blender Developers Blog\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>By baking textures, the artist ensures visual consistency across different levels of detail: features like shadow placement and surface details can be preserved even with reduced geometric complexity. Lower LODs require less processing power to render, allowing more straightforward shading techniques to be used.\u003C/p>\u003Cp>An intricate 3D model of a building that will be part of a large city scene in a real-time application has highly detailed geometry and complex materials with bump maps and reflective surfaces, which can be rendered beautifully in high-end renders but are too resource-demanding for real-time rendering during production:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>High-Resolution Model Preparation\u003C/strong> - We start with a high-resolution version of the building model, where lighting and material effects are meticulously applied.\u003C/li>\u003Cli>\u003Cstrong>Baking Process\u003C/strong> - Using 3D DCC software, we bake the model's lighting information into a texture map―shadows, highlights, and any ambient occlusion effects into a 2D texture. You'll typically bake several maps, like diffuse, normal, and specular maps.\u003C/li>\u003Cli>\u003Cstrong>Create LOD Models\u003C/strong> - We generate several lower-resolution versions of the building model with fewer vertices and simplified geometry.\u003C/li>\u003Cli>\u003Cstrong>Apply Baked Textures\u003C/strong> - Artists apply the baked textures to these LOD models. Even though they have a reduced vertex count, the baked textures convey intricate details and lighting effects, keeping the visual quality high.\u003C/li>\u003Cli>\u003Cstrong>Implementation\u003C/strong> - In the real-time environment, the LOD system selects which version of the model to render based on the camera distance or screen space size. The pre-baked textures ensure that even the simplest model retains much of the visual complexity of the high-resolution asset.\u003C/li>\u003C/ol>\u003Chr>\u003Ch2 id=\"4-displacement-map-baking\">\u003Cstrong>4. Displacement Map Baking\u003C/strong>\u003C/h2>\u003Cp>Displacement map baking transfers the high-resolution details of a 3D model onto a lower-resolution version. This is also achieved by generating a texture, the displacement map, that stores the height differences from the low-resolution model's surface to the high-resolution model's surface. This texture can then be used to recreate the appearance of the high-resolution model during rendering without having to load the full complexity all the time.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXfxmHx2XuYjmoNGQ9vy5MrprEB58pDW0OVLWaf2dZFgF4cb5tx9C0FVcSQCe8wu-VXlS1c-qOVSxgjB-Qpp7FdXLOqv8YNMFPCecqHpgMCLaAK0KPg2FZFX82G4icQk19hEgLHRAA?key=S5xdGiACLTcKa2a_84MnLfXD\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"351\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: nutelZ on Youtube\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Col>\u003Cli>\u003Cstrong>Create High and Low-Resolution Models\u003C/strong> - We start with a high-resolution 3D model that has all the detailed features like bumps, creases, and other surface details. Then, we create a simplified version of this model with a reduced polygon count.\u003C/li>\u003Cli>\u003Cstrong>Bake the Displacement Map\u003C/strong> - Using software like ZBrush, Blender, or Maya, we calculate the difference between the two models' surfaces. The result is the displacement map, a grayscale texture where the intensity of the color indicates how much to displace the surface of the low-res model to match the high-res model's details.\u003C/li>\u003Cli>\u003Cstrong>Apply the Map to Low-Resolution Model\u003C/strong> - Once the map is baked, it's applied to the low-resolution model. The rendering engine will use the map information to simulate the high-detail surface during real-time rendering or animation playback.\u003C/li>\u003Cli>\u003Cstrong>LOD Implementation\u003C/strong> - We add the low-res model with the displacement map into a LOD system, which swaps out different levels of detail depending on the camera distance. When the model is far enough away, the low-res model appears as detailed as the high-res one thanks to the displacement map, saving processing power.\u003C/li>\u003C/ol>\u003Cp>Displacement maps allow different LOD versions of a scene to share high-resolution details without the computational overhead, saving memory and simplifying asset management.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>A level of detail is an important concept in animation to increase your animation pipeline's efficiency by adjusting the complexity of your 3D models based on parameters like distance from the camera or production environments.\u003C/p>\u003Cp>Several techniques like bump mapping, retopology, texture baking, and displacement map baking allow animators to adapt their model's LOD, but there are more, and our article only gives a glimpse of how they work: you'll have to figure out by yourself how to make the best of them in your DCC tool. Consider playing with these techniques in your software and see how they impact the polygons and visual quality of your 3D models!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1023,"comment_id":1024,"feature_image":1025,"featured":105,"visibility":10,"created_at":1026,"updated_at":1027,"custom_excerpt":1028,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1029,"primary_tag":1030,"url":1031,"excerpt":1028,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1032},"5f04f99e-8f8a-4556-8177-f07bebb3f668","67c929ecc288b6000147a838","https://images.unsplash.com/photo-1516382799247-87df95d790b7?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDF8fG1hZ25pZnlpbmclMjBnbGFzc3xlbnwwfHx8fDE3NDEyNDAwNDF8MA&ixlib=rb-4.0.3&q=80&w=2000","2025-03-06T05:51:56.000+01:00","2026-02-20T06:04:45.000+01:00","Levels of detail (LOD) help animators optimize performance by adjusting rendering complexity based on a scene’s needs. From bump mapping to texture baking, learn how LOD keeps 3D animation efficient without sacrificing quality.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/lod-levels-of-detail/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@olloweb?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Agence Olloweb\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/lod-levels-of-detail","2025-03-31T10:00:14.000+02:00",{"title":1018},"lod-levels-of-detail","posts/lod-levels-of-detail",[1039],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"2dzn-lw4Jf2uw_vYMUitQ8oAi6i1gl3sSiwth6Q8DBo",{"id":1042,"title":1043,"authors":1044,"body":7,"description":7,"extension":8,"html":1046,"meta":1047,"navigation":14,"path":1057,"published_at":1058,"seo":1059,"slug":1060,"stem":1061,"tags":1062,"__hash__":1064,"uuid":1048,"comment_id":1049,"feature_image":1050,"featured":105,"visibility":10,"created_at":1051,"updated_at":261,"custom_excerpt":1052,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1053,"primary_tag":1054,"url":1055,"excerpt":1052,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1056},"ghost/posts:slow-in-out.json","Slow In and Out (2026): Why Easing Makes Animation More Engaging",[1045],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🎈\u003C/div>\u003Cdiv class=\"kg-callout-text\">Objects in the real world don’t start or stop instantly—they ease in and out of motion. Great animation follows the same rule!\u003C/div>\u003C/div>\u003Cp>In the real world, objects never start or stop moving instantaneously―a car will gradually pick up speed and then slow down before coming to a halt.\u003C/p>\u003Cp>The\u003Ca href=\"https://blog.cg-wire.com/12-principles-animation/\"> \u003Cu>Slow In and Out principle\u003C/u>\u003C/a> replicates this natural acceleration and deceleration to match basic physics and create more engaging animations.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXfereWkVt083bHGS-wn-N2Ko38h-TI5EgqExVM29yjfP2M0CKwHirNqEMKWmfFQucpGKb-6lZ1og9zorIb1sKtc3Mn95cHmbGYxPq4OxGPKiiSqrWhKtOdKZfkt7VsIGqdz3gTHnw?key=M91ppFxZ4Gl8NmaA242FhRO_\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"183\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Suresh V. Selvaraj on Medium\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>But how does an animator use this principle in practice? In this article, we give you five \u003Cstrong>actionable tips\u003C/strong> to better implement smooth transitions. You'll discover how to manipulate ease curves, optimize secondary actions, and use audio cues to amplify your storytelling, motion trails, ghosting, and precise keyframe placement.\u003C/p>\u003Chr>\u003Ch2 id=\"why-slow-in-and-slow-out\">\u003Cstrong>Why Slow In And Slow Out?\u003C/strong>\u003C/h2>\u003Cp>Transitions look smoother when the frames are gradually spaced closer together at the movement's start (slow in) and end (slow out).\u003C/p>\u003Cp>The pacing of your transition is a great storytelling tool: a character who slowly raises their eyebrows before they widen quickly in shock uses this principle to underscore the surprise element.\u003C/p>\u003Chr>\u003Ch2 id=\"1-master-ease-curves-in-animation-software\">\u003Cstrong>1. Master Ease Curves in Animation Software\u003C/strong>\u003C/h2>\u003Cp>Ease curves, also known as motion graphs, represent how the speed of an animation changes over time.\u003C/p>\u003Cp>By understanding and manipulating these curves, animators gain precise control over the acceleration and deceleration of their animated objects to make them look more natural.\u003C/p>\u003Cp>This is particularly useful in creating motions that mirror real-world physics because objects rarely move at a constant speed: they accelerate and decelerate gradually, influenced by forces like gravity and inertia.\u003C/p>\u003Cp>If you were to animate a bouncing ball without adjusting ease curves, the ball might move up and down at a constant rate, losing its sense of weight. But you can adjust bezier handles on the animation curve by using the graph editor within your animation software to create a gradual slowdown as the ball reaches its peak height, simulating the natural deceleration due to gravity. Similarly, by accelerating the curve as the ball descends, you can mimic the ball picking up speed as it falls back to the ground. These details make all the difference.\u003C/p>\u003Cp>Different types of ease curves produce varying effects on animation. There are three main categories:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Ease-in\u003C/strong> curves create a slow start and a faster finish, which is perfect for objects coming to life or launching from a standstill.\u003C/li>\u003Cli>\u003Cstrong>Ease-out\u003C/strong> curves facilitate a fast start with a gradual end for simulating objects coming to a rest.\u003C/li>\u003Cli>\u003Cstrong>Ease-in-out\u003C/strong> curves combine both principles for smooth starts and finishes.\u003C/li>\u003C/ul>\u003Cp>You can then have slight variations of each category like sinusoidal, cubic, or quadratic curves for various effects. And of course you can use your DCC tool to create a custom curve manually:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXforb2ePxMY_klk-HjNn4vWWMJzuhL-qEXTjmXWuXedbwVZ6KqvRZoHvDzkfDDwD0X4KTL9_7yiGvNSCrcNIJEr6Ahtprnr8gsDDMbZ_kY_Za4H6QKT-V91ftSjOeBr_JHiuq3CjQ?key=M91ppFxZ4Gl8NmaA242FhRO_\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"329\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Blender Stack Exchange\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"2-focus-on-secondary-action\">\u003Cstrong>2. Focus on Secondary Action\u003C/strong>\u003C/h2>\u003Cp>Secondary actions are smaller, complementary motions that add depth and nuance to the primary actions in a scene.\u003C/p>\u003Cp>When applied to secondary actions, the slow-in-out principle ensures that these motions blend smoothly with the primary actions.\u003C/p>\u003Cp>Let's take the example of a character waving. The primary action might be the arm moving back and forth, driven by the character's intention to greet someone. The secondary action could involve the subtle swaying of the character's ponytail, a gentle rise and fall of the shoulders, or the slight shift of weight from one foot to the other to signal excitement. These secondary actions should ideally begin with a subtle, gradual build-up (slow in), reaching a peak motion in harmony with the primary action, and then gently retreating (slow out).\u003C/p>\u003Cp>Slight variations are sometimes necessary for stylistic or narrative purposes: if the animatic calls for a comedic effect, the secondary actions could exaggerate the ease or lack thereof, creating a more playful or exaggerated motion to fit the tone.\u003C/p>\u003Chr>\u003Ch2 id=\"3-synchronize-with-audio-cues\">\u003Cstrong>3. Synchronize with Audio Cues\u003C/strong>\u003C/h2>\u003Cp>Another effective way to improve the impact of your ease in and out transitions is by aligning them with audio cues.\u003C/p>\u003Cp>These audio cues can underscore emotional beats, build anticipation, or provide comedic relief.\u003C/p>\u003Cp>And when animations are synchronized with these audio cues, they can highlight and accentuate important movements and transitions within the animation, to make sure viewers feel the moment.\u003C/p>\u003Cp>In a scene where a character is leaping off a cliff, you create a sense of propulsion by applying slow-out as the character pushes away from the edge. As the character soars through the air, the slow-in, when they begin to slow toward the apex of their jump, can be accentuated by a musical crescendo.\u003C/p>\u003Chr>\u003Ch2 id=\"4-use-motion-trails-and-ghosting\">\u003Cstrong>4. Use Motion Trails and Ghosting\u003C/strong>\u003C/h2>\u003Cp>Motion trails and ghosting offer animators a visual representation of the path of action to iteratively refine the easing in their animations.\u003C/p>\u003Cp>Motion trails are particularly useful to visualize the flow of a moving object. They provide a line or curve that showcases the path through which an object travels to convey information about its velocity and trajectory.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXdcbCEOGgSQK9owdF2kfAt7te2L3k4Yo_UdaFshQfdVkp1ZcplPGvEr7_pDYaI6j-B0SCADR1Aaaqtedz3DARzZyF9NqKfvx6KqX7TN-B5n-luUqCtFECyVwiWovMB5vk4mqQSD4w?key=M91ppFxZ4Gl8NmaA242FhRO_\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"316\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Blender\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Ghosting, on the other hand, creates a series of semi-transparent frames that represent an object's past or future positions so that animators can see multiple stages of motion simultaneously and get a comprehensive view of the action as it unfolds.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXcGfFgnIrLKM5XACQa3vMVIBsNRlEziGm7oGwZFJc8MCc66TX6hbYKRJRW7Fc_a3Hbx9issIIDBxIh5Um9WrSnYVptAd6qK6_mlbQfcW7jqxMZI2hPRJIjup0f-gc9bLwzks1S56g?key=M91ppFxZ4Gl8NmaA242FhRO_\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"364\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Cascadeur\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Animators can then evaluate and adjust the timing and spacing of an animation with great precision.\u003C/p>\u003Cp>Consider the example of a sword swing animation. With motion trails, an animator can visualize the arc through which the sword travels to adjust the movement. Ghosting further enhances this process by allowing the animator to see the sword's position at various intervals.\u003C/p>\u003Chr>\u003Ch2 id=\"5-optimize-keyframe-placement\">\u003Cstrong>5. Optimize Keyframe Placement\u003C/strong>\u003C/h2>\u003Cp>By carefully \u003Ca href=\"https://blog.cg-wire.com/stepped-animation/\">\u003Cu>determining where to place keyframes\u003C/u>\u003C/a>, animators can maximize their control over the movements' start, middle, and end to improve the generated ease in and ease out transitions by in-betweening.\u003C/p>\u003Cp>In a walk cycle, keyframes must be placed with precision to capture critical moments like the heel strike and toe-off phases of the step. The heel strike is the point where the heel meets the ground, and the toe-off is when the foot pushes away. By accurately defining these points with appropriately placed keyframes, animators can achieve precise control over easing the foot’s movement.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXe2dqY5TmOnkmfl62PG0TPNLdA74_0ie9ANo9AFRhUjNtm9FGAvOq5uPC9m9BtYWkD42gq2Dsn3dHATc7glIGZDtAoM4tOKR3zrFd5t9BZkv8OE0xjfNZ_iHME10F2IY0eAQLc4-g?key=M91ppFxZ4Gl8NmaA242FhRO_\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"580\" height=\"363\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: AngryAnimator.com\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Strategic keyframe positions also allow animators to avoid unnecessary clutter and create smoother transitions.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>The slow in and out principle is a given nowadays: every quality animation relies on smooth transitions, and easing curves are key in this aspect.\u003C/p>\u003Cp>But mastering this principle is not easy. You need a good sense of timing and storytelling to match audio cues with secondary actions and keyframes. You also need to be comfortable with tools like motion trails, ghosting, and motion graphs.\u003C/p>\u003Cp>When you work with a limited budget, smooth animations aren't often a priority, and you must do with what you have. A good animator can work around those constraints with efficient use of keyframes, tweening, and reusable assets, including animation cycles.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1048,"comment_id":1049,"feature_image":1050,"featured":105,"visibility":10,"created_at":1051,"updated_at":261,"custom_excerpt":1052,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1053,"primary_tag":1054,"url":1055,"excerpt":1052,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1056},"3b32c7a8-71d9-47a5-9588-b1221dd02ceb","67c929f0c288b6000147a83e","https://images.unsplash.com/photo-1723145886817-1a2ee70a251b?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDEwfHxzbG93JTIwaW4lMjBhbmQlMjBvdXR8ZW58MHx8fHwxNzQxMjM5MjEyfDA&ixlib=rb-4.0.3&q=80&w=2000","2025-03-06T05:52:00.000+01:00","The Slow In and Out principle makes animations feel more natural by replicating real-world acceleration and deceleration. Learn how to use easing curves, secondary actions, motion trails, and keyframe placement to create smoother transitions in animation.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/slow-in-out/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@westhephotographer?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Wesley Armstrong\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/slow-in-out","2025-03-24T10:00:19.000+01:00",{"title":1043},"slow-in-out","posts/slow-in-out",[1063],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"NyrdLz8BQ0-MMckck4U7-eyAN9Yze2auq0THnWfyAdQ",{"id":1066,"title":1067,"authors":1068,"body":7,"description":7,"extension":8,"html":1070,"meta":1071,"navigation":14,"path":1082,"published_at":1083,"seo":1084,"slug":1085,"stem":1086,"tags":1087,"__hash__":1089,"uuid":1072,"comment_id":1073,"feature_image":1074,"featured":105,"visibility":10,"created_at":1075,"updated_at":1076,"custom_excerpt":1077,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1078,"primary_tag":1079,"url":1080,"excerpt":1077,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1081},"ghost/posts:previs-artist.json","Previs in Animation (2026): Mapping Out the Perfect Shot",[1069],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🎨\u003C/div>\u003Cdiv class=\"kg-callout-text\">Great animation starts with great planning. \u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Previsualization\u003C/strong>\u003C/b> (\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">previs\u003C/strong>\u003C/b>) is where ideas take shape before production begins!\u003C/div>\u003C/div>\u003Cp>Animation is incredibly complex, like writing a novel. You don't just write a first draft; you usually start with an outline to get an idea of how the novel will evolve without going into too much detail.\u003C/p>\u003Cp>Similarly, productions don't just happen in a vacuum: you start with previsualization to explore an idea and plan the rest of the work.\u003C/p>\u003Cp>This article describes the vital role of previs artists and how they transform ideas into guides for animators. We'll cover how these artists meticulously plan complex scenes and how they ensure every camera angle and movement fits.\u003C/p>\u003Chr>\u003Ch2 id=\"whats-previsualization\">\u003Cstrong>What's Previsualization\u003C/strong>\u003C/h2>\u003Cp>Previsualization, or previs, is the process in which animators create a preliminary visualization of sequences using 3D animation tools.\u003C/p>\u003Cp>A previs artist is responsible for crafting these rough animated versions to help visualize how a scene unfolds.\u003C/p>\u003Cp>For example, in a car chase scene, a previs artist would use 3D software to simulate the camera angles, car movements, and timing, allowing the supervisors and animators to see how the action will play out before going further in the production process.\u003C/p>\u003Chr>\u003Ch2 id=\"why-previs\">\u003Cstrong>Why Previs\u003C/strong>\u003C/h2>\u003Cp>Previs allows animators to \u003Cstrong>visualize complex scenes before production\u003C/strong> to identify potential challenges.\u003C/p>\u003Cp>By identifying issues and perfecting scenes early, previs can significantly reduce production costs by minimizing the need for reshoots or corrections in later stages.\u003C/p>\u003Cp>Previsualization also helps with technical planning by mapping out camera angles, movement, lighting, and effects, ensuring all technical aspects are covered.\u003C/p>\u003Cp>The deliverables are a common visual language that facilitates better communication among team members, from directors to animators to VFX artists.\u003C/p>\u003Chr>\u003Ch2 id=\"the-previs-items\">\u003Cstrong>The Previs Items\u003C/strong>\u003C/h2>\u003Cul>\u003Cli>\u003Cstrong>Concept art\u003C/strong> -\u003Ca href=\"https://blog.cg-wire.com/concept-in-animation-definition-process-and-challenges/\"> \u003Cu>Concept art\u003C/u>\u003C/a> is a detailed visual representation that illustrates the characters' style, color, design, visuals, environments, props, and overall project look. It serves as a visual reference and guide for the production team to set the aesthetic direction of the project.\u003C/li>\u003Cli>\u003Cstrong>Storyboard\u003C/strong> - A storyboard is\u003Ca href=\"https://blog.cg-wire.com/storyboard-animation/\"> \u003Cu>a sequence of quick sketches\u003C/u>\u003C/a> representing the shots intended for an animation. Each panel depicts a specific moment or action and accompanying notes on dialogue, camera angles, and movements. Storyboards help visualize the story in a clear narrative flow.\u003C/li>\u003Cli>\u003Cstrong>Animatic\u003C/strong> - An animatic is a more advanced version of a storyboard. It's\u003Ca href=\"https://blog.cg-wire.com/how-animatics-bring-stories-to-life/\"> \u003Cu>a video version of the storyboard\u003C/u>\u003C/a> edited with sound to create a rough animation version. Animatics may include temporary music tracks, dialogue, and sound effects. They give a clearer sense of timing and pacing and how sound and visuals will work together, but most importantly, they can prevent costly mistakes by uncovering potential issues in story flow and transition before expensive animation work begins.\u003C/li>\u003C/ul>\u003Cp>Storyboards focus on the story and shot composition. Animatics emphasize timing, pacing, and integrating sound with visuals. Concept art centers on the design and aesthetic aspects.\u003C/p>\u003Chr>\u003Ch2 id=\"1-mastering-camera-movement\">\u003Cstrong>1. Mastering Camera Movement\u003C/strong>\u003C/h2>\u003Cp>It's important to focus on developing a robust understanding of\u003Ca href=\"https://blog.cg-wire.com/camera-work-in-animation/\"> \u003Cu>camera movement techniques\u003C/u>\u003C/a> to create more engaging sequences. Camera work directs the audience's attention but also can make or break the emotional impact of a scene.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Plan your shots with storyboards\u003C/strong> - Plan your camera shots with detailed storyboards to make sure each camera move enhances the storytelling rather than distracts from it.\u003C/li>\u003Cli>\u003Cstrong>Use dynamic angles\u003C/strong> - Incorporate a mix of camera angles and heights to add variety and maintain viewer interest. For example, high-angle shots give a sense of vulnerability, while low-angle shots make characters appear more powerful.\u003C/li>\u003Cli>\u003Cstrong>Embrace smooth camera transitions\u003C/strong> - Pay attention to how your camera transitions between shots. Avoid jarring cuts by using smooth dolly moves or whip pans to keep the motion fluid.\u003C/li>\u003C/ul>\u003Cp>For example, in an action scene, experimenting with quick pans or zooms might be a good way to amplify a sense of speed and urgency and pull the viewer deeper into the action.\u003C/p>\u003Chr>\u003Ch2 id=\"2-proficiency-in-layout-design\">\u003Cstrong>2. Proficiency in Layout Design\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/animation-layout/\">\u003Cu>Layout skills\u003C/u>\u003C/a> ensure each frame is visually balanced to guide the viewer's attention naturally toward key elements.\u003C/p>\u003Cp>In a crowded market scene, arranging characters and props strategically prevents visual clutter and maintains the focus on the main character.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Rule of thirds\u003C/strong> - Divide the frame into a 3x3 grid and place key elements at the intersections or along the lines to direct attention.\u003C/li>\u003Cli>\u003Cstrong>Depth and layering\u003C/strong> - Use foreground, midground, and background layers to create depth in the scene and allow the audience's eye to travel naturally through the composition.\u003C/li>\u003Cli>\u003Cstrong>Leading lines and eye paths\u003C/strong> - add lines, whether they are structural elements or shapes, that guide the viewer’s eyes toward the focal point. This can be achieved through angled props, character gazes, or pathways within the scene.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"3-effective-communication\">\u003Cstrong>3. Effective Communication\u003C/strong>\u003C/h2>\u003Cp>Conveying your ideas and feedback clearly across departments is paramount to make sure everyone is on the same page. Previsualization deliverables are going to define the bulk of the work after all, so the whole team needs to get them right to prevent misunderstandings.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Use visual aids\u003C/strong> - Storyboards, animatics, and early visual models are meant to communicate ideas more effectively by bridging language gaps.\u003C/li>\u003Cli>\u003Cstrong>Establish a feedback loop\u003C/strong> - Create\u003Ca href=\"https://blog.cg-wire.com/how-to-give-efficient-animation-feedback/\"> \u003Cu>a structured feedback process\u003C/u>\u003C/a> where constructive criticism can be shared freely and frequently. Set regular review meetings or check-ins where team members from different departments can provide input.\u003C/li>\u003Cli>\u003Cstrong>Document everything\u003C/strong> - Keep thorough documentation of all creative decisions, changes, and feedback. This includes maintaining a version history of previs assets and clearly marking the reasons for revisions. This documentation ensures that everyone involved can trace the evolution of the project to reduce the risk of repeating past mistakes or overlooking important feedback.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"4-animation-timing\">\u003Cstrong>4. Animation Timing\u003C/strong>\u003C/h2>\u003Cp>Timing is extremely important for previs to make sequences more engaging. It sets the pace of the sequence and helps convey the intent of each action to the audience. For example, it sets the tone for dramatic tension or comedic effects.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Use of beats\u003C/strong> - Break down scenes into distinct beats to highlight key actions or transitions. It not only helps in maintaining audience focus but also ensures that important narrative elements are given the appropriate screen time.\u003C/li>\u003Cli>\u003Cstrong>Variable pacing\u003C/strong> - Experiment with different rates of action to create varying emotional impacts: fast pacing can build excitement or tension, while slower actions can enrich dramatic moments or reinforce character development.\u003C/li>\u003Cli>\u003Cstrong>Playback speed adjustment\u003C/strong> - Regularly review sequences at different playback speeds. Slowing down the sequence can help detect timing misalignments or subtle animation errors, while speeding it up can test the flow and energy of the scene.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"5-developing-a-cinematic-eye\">\u003Cstrong>5. Developing a Cinematic Eye\u003C/strong>\u003C/h2>\u003Cp>A good previs artist can perceive scenes through a cinematic lens: they pay attention to aspects like camera and layout/composition but also lighting and the overall mood.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Study film and photography\u003C/strong> - Pay close attention to the directors' use of camera angles, movements, and shot compositions. Analyze photographs to understand lighting, framing, and the rule of thirds.\u003C/li>\u003Cli>\u003Cstrong>Practice sketching\u003C/strong> - Regularly sketching scenes allows you to experiment with different visual ideas without investing too much time in 3D software. It helps train your eye to visualize how scenes will translate into film, emphasizing the importance of perspective, scale, and visual flow.\u003C/li>\u003Cli>\u003Cstrong>Experiment with different lighting and mood setups\u003C/strong> - Lighting isn't just about illuminating a scene—it's about setting the tone. Experiment with various lighting setups to understand how they affect mood and storytelling. Try replicating lighting scenarios from famous movies to see first-hand how light can transform a scene.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Previsualization is so important, it can make or break a production.\u003C/p>\u003Cp>It's not just a technical challenge where you need to master the camera, layout, and timing, but also a collaborative challenge to align the work of the whole animation studio in a single vision. The resulting concept art, storyboards, and animatics lay the foundations of everything else.\u003C/p>\u003Cp>Once the previs artist's work is completed, the production stage begins. But that's not where the job ends: previs artists help other animators understand the deliverables and translate them into end-products while maintaining the creative vision.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1072,"comment_id":1073,"feature_image":1074,"featured":105,"visibility":10,"created_at":1075,"updated_at":1076,"custom_excerpt":1077,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1078,"primary_tag":1079,"url":1080,"excerpt":1077,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1081},"7da9bb9e-9545-47b9-b1ef-c9f9bfc2cc2b","67c929f4c288b6000147a844","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/03/PIX-2-Halon_FordvFerrari1.jpg","2025-03-06T05:52:04.000+01:00","2026-03-26T10:39:39.000+01:00","Previsualization (previs) is the key to planning complex animation scenes before production. From storyboards to animatics, previs artists shape the blueprint for smooth, cost-effective workflows. Learn how previs enhances storytelling, camera work, and timing!",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/previs-artist/","\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: VFX Voice\u003C/em>\u003C/i>","/posts/previs-artist","2025-03-17T10:00:36.000+01:00",{"title":1067},"previs-artist","posts/previs-artist",[1088],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"qDedGluNpKSc4v89EFdG-dRGBwoJ1M0lDy_MfrvPRH0",{"id":1091,"title":1092,"authors":1093,"body":7,"description":7,"extension":8,"html":1095,"meta":1096,"navigation":14,"path":1107,"published_at":1108,"seo":1109,"slug":1110,"stem":1111,"tags":1112,"__hash__":1114,"uuid":1097,"comment_id":1098,"feature_image":1099,"featured":105,"visibility":10,"created_at":1100,"updated_at":1101,"custom_excerpt":1102,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1103,"primary_tag":1104,"url":1105,"excerpt":1102,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1106},"ghost/posts:follow-through-overlapping-action.json","Follow-Through & Overlapping Action (2026): The Key to Fluid Motion",[1094],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🏃\u003C/div>\u003Cdiv class=\"kg-callout-text\">Ever noticed how a character’s hair keeps moving even after they stop running? That’s follow-through &amp; overlapping action in motion!\u003C/div>\u003C/div>\u003Cp>Understanding\u003Ca href=\"https://blog.cg-wire.com/12-principles-animation/\"> \u003Cu>the 12 principles of animation\u003C/u>\u003C/a> is one of the first steps for every professional animator. Among them, the follow-through and overlapping action principle is one of the most recognizable: you can see it in any animated project, no matter how barebones it is.\u003C/p>\u003Cp>This article explores best practices for this animation principle: from understanding the lingering motions after a character lands from a jump to mastering the choreography of secondary movements through layered animation techniques, we cover all the basics.\u003C/p>\u003Cp>By the end, you should have a good understanding of what this principle is about and have good actionable ideas to use in your own animation projects.\u003C/p>\u003Chr>\u003Ch2 id=\"whats-follow-through\">\u003Cstrong>What's Follow-Through\u003C/strong>\u003C/h2>\u003Cp>The follow-through principle refers to the continuation of motion in different elements of a character or object after the main action has stopped.\u003C/p>\u003Cp>For example, when an animated character stops running suddenly, their long hair might move forward briefly before settling.\u003C/p>\u003Chr>\u003Ch2 id=\"whats-overlapping-action\">\u003Cstrong>What's Overlapping Action\u003C/strong>\u003C/h2>\u003Cp>When a character runs, their hair or clothing moves as well. The slight delay or variation in the timing of different parts of a character or object as they move is called overlapping action.\u003C/p>\u003Chr>\u003Ch2 id=\"why-this-principle-is-important\">\u003Cstrong>Why This Principle Is Important\u003C/strong>\u003C/h2>\u003Cp>Follow-through and overlapping action communicate the weight and mass of objects or characters, giving the animation a sense of gravity and inertia.\u003C/p>\u003Cp>These principles introduce natural delays and fluidity to movements, making animations less mechanical.\u003C/p>\u003Cp>How a character moves reveals volumes about their emotional state, and you can also use overlap to build suspense or highlight key moments within a scene.\u003C/p>\u003Chr>\u003Ch2 id=\"1-prioritize-weight-and-momentum\">\u003Cstrong>1. Prioritize Weight And Momentum\u003C/strong>\u003C/h2>\u003Cp>To accurately portray weight and momentum, the animator must first understand how different objects and characters react under gravity, inertia, and other forces.\u003C/p>\u003Cp>When a character jumps, the action doesn't simply conclude when the character lands. Instead, their body parts, hair, or loose clothing will continue to move, propelled by the momentum generated during the leap. This lingering motion is a perfect opportunity for animators to emphasize the downward force exerted during the landing. It can be observed in how a character's hair bounces or how loose garments flutter and settle gradually after the movement has appeared to conclude.\u003C/p>\u003Chr>\u003Ch2 id=\"2-use-layered-animation-techniques\">\u003Cstrong>2. Use Layered Animation Techniques\u003C/strong>\u003C/h2>\u003Cp>Layered animation involves separating different aspects of motion into distinct layers that can be individually manipulated, like primary and secondary motion, to make it easier to adjust overlaps.\u003C/p>\u003Cp>Animators should start with major body movements, like those of the torso: the torso serves as the central axis of a character's body. It is often the source of primary motions, so establishing the motion of the torso first creates a solid foundation upon which all other actions can be realistically based. Once the primary motion is in place, animators can add secondary movements, including overlapping actions.\u003C/p>\u003Cp>Once the main motion has been animated, attention can then shift to additional follow-through animations. By addressing these components separately, animators can ensure that each movement flows naturally and transitions smoothly from one phase to the next.\u003C/p>\u003Cp>You can also add accessories like hats, necklaces, and other props to provide the animation an extra layer of depth.\u003C/p>\u003Chr>\u003Ch2 id=\"3-use-reference-footage\">\u003Cstrong>3. Use Reference Footage\u003C/strong>\u003C/h2>\u003Cp>One effective strategy for mastering this principle is using reference footage: analyzing and breaking down live-action reference footage allows animators to grasp nuanced motion details that might be challenging to visualize through imagination alone.\u003C/p>\u003Cp>Reference footage provides fine details that may be overlooked without visual aids yet significantly convey a sense of realism. For example, how a dancer’s body continues to move fluidly after a jump or how an actor’s hair follows their head’s motion provides invaluable insights into creating lifelike follow-through in animation. By observing how these movements occur in reality, animators can replicate or even exaggerate them to increase their impact.\u003C/p>\u003Cp>When animating sports scenes, studying slow-motion videos of athletes in action is particularly beneficial. By dissecting these sequences frame by frame, animators can better understand how the body’s various parts contribute to a cohesive motion.\u003C/p>\u003Cp>The same goes for complex facial expressions. The face has 43 muscles controlling expressions, each with overlapping actions and sometimes follow-through animations (e.g., shaking eyes aftershock).\u003C/p>\u003Chr>\u003Ch2 id=\"4-integrate-secondary-animation-early\">\u003Cstrong>4. Integrate Secondary Animation Early\u003C/strong>\u003C/h2>\u003Cp>Plan and integrate secondary actions during the initial stages of animation to maintain consistency.\u003C/p>\u003Cp>By doing so, animators can prevent potential disconnects between primary and secondary actions to avoid mismatched motions that often require extensive corrections, thus reducing time spent making revisions.\u003C/p>\u003Cp>For example, animators can incorporate likely follow-through and overlapping actions when proposing initial sketches or storyboards.\u003C/p>\u003Cp>Consider a scene involving a group of birds taking flight. In the initial planning stages, it's vital to visualize how each bird's wings, tails, and bodies will move independently yet harmoniously as they soar into the air. Sketches might include the subtle sway of feathers lagging behind the wing movements or the natural bobbing of their bodies. Presenting these considerations early ensures the team can build on a solid foundation.\u003C/p>\u003Chr>\u003Ch2 id=\"5-optimize-timing-for-narrative-impact\">\u003Cstrong>5. Optimize Timing for Narrative Impact\u003C/strong>\u003C/h2>\u003Cp>Effective timing controls the rhythm and conveys the appropriate energy or emotion in a scene. Similarly, you must adjust timing within follow-through and overlaps to enhance storytelling.\u003C/p>\u003Cp>In a suspenseful scene where a character stealthily enters a dimly lit room, the timing of follow-through in actions such as a swinging door becomes crucial. By extending the animation of the door's swing, animators can build anticipation and create an atmosphere of tension. As the door slowly comes to a halt with a prolonged creak, the viewer senses the character's cautious entry, heightening the suspense of the follow-through of the door opening.\u003C/p>\u003Chr>\u003Ch2 id=\"6-implement-advanced-rigging-solutions\">\u003Cstrong>6. Implement Advanced Rigging Solutions\u003C/strong>\u003C/h2>\u003Cp>A rig is\u003Ca href=\"https://blog.cg-wire.com/rigging-in-animation/\"> \u003Cu>the skeletal structure or control system within a digital character model\u003C/u>\u003C/a> that animators use to create movement. It acts as an intermediary between the animator and the 3D model, allowing for the manipulation of the model with greater ease and precision.\u003C/p>\u003Cp>A robust rig can significantly reduce the workload and complexity of animating expressive follow-through and overlapping actions, so you should design and use rigs accordingly.\u003C/p>\u003Cp>Without a well-constructed rig, animators may find themselves bogged down in the minutiae of manually adjusting every component of a character's movement, leading to inefficient use of time and potential inconsistencies in animation.\u003C/p>\u003Cp>For example, create a custom rig with dynamic Inverse Kinematics (IK) and Forward Kinematics (FK) switch controls to handle the fluid motion of a character's tail without constant manual adjustments:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Define the Structure\u003C/strong> - Begin by identifying the major parts of the character's tail that will require movement and flexibility—often breaking them down into several segments or \"bones\" within your 3D software. Each segment should be able to move independently yet remain connected to form a cohesive whole.\u003C/li>\u003Cli>\u003Cstrong>Set Up IK Controls\u003C/strong> - Implement Inverse Kinematics (IK) for the tail to allow animators to move the end of the tail, and have the rest of the segments follow accordingly. This is useful for quickly positioning the tail, especially when it interacts with other objects or needs to maintain contact with a surface.\u003C/li>\u003Cli>\u003Cstrong>Configure FK Controls\u003C/strong> - Alongside IK, establish Forward Kinematics (FK) controls that give animators the ability to rotate each segment of the tail independently. This is essential for fine-tuning arcs and adding natural, flowing movements.\u003C/li>\u003Cli>\u003Cstrong>Create a Switching System\u003C/strong> - Provide a seamless transition between IK and FK controls by incorporating an IK/FK switch within the rig. This switch allows animators to toggle between the two systems depending on what is needed for a particular action, combining the strengths of both techniques for optimal tail animation.\u003C/li>\u003Cli>\u003Cstrong>Add Dynamic Features\u003C/strong> - Enhance the rig with dynamics or secondary motion control systems that can simulate natural physics and add an extra layer of realism to the tail movement, like auto-sway or bounce features that react to the character's primary movements.\u003C/li>\u003C/ol>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Adding follow-through and overlapping actions can be complex, but the result is worth it! Make sure to follow best practices to ease your work:\u003C/p>\u003Cul>\u003Cli>Be aware of weight and momentum\u003C/li>\u003Cli>Use layers\u003C/li>\u003Cli>Plan secondary actions\u003C/li>\u003Cli>Use reference footage\u003C/li>\u003Cli>Leverage timing for emotional impact\u003C/li>\u003Cli>Advanced rigs\u003C/li>\u003C/ul>\u003Cp>Animation is not just about the story, it's also about how you tell it with subtle details: that's the huge difference between boring and great animation. Follow-through and overlapping actions are key in this regard, so don't neglect them!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1097,"comment_id":1098,"feature_image":1099,"featured":105,"visibility":10,"created_at":1100,"updated_at":1101,"custom_excerpt":1102,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1103,"primary_tag":1104,"url":1105,"excerpt":1102,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1106},"597abdcb-2891-4fef-a2cb-926e7805ff9b","67c92446c288b6000147a817","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/03/Follow-Through-and-Overlapping-Action-3.jpg","2025-03-06T05:27:50.000+01:00","2026-03-26T10:34:19.000+01:00","Follow-through & overlapping action bring realism and fluidity to animation. From hair swaying after a stop to clothing trailing behind movement, these principles add weight and believability. Learn how to apply them effectively in our latest article!",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/follow-through-overlapping-action/","\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Animost Studio\u003C/em>\u003C/i>","/posts/follow-through-overlapping-action","2025-03-10T10:00:12.000+01:00",{"title":1092},"follow-through-overlapping-action","posts/follow-through-overlapping-action",[1113],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"xYsd6kHNHZx6HvEwrhJb3iNVqvXzBdVoPHR1mvkjiU8",{"id":1116,"title":1117,"authors":1118,"body":7,"description":7,"extension":8,"html":1120,"meta":1121,"navigation":14,"path":1133,"published_at":1134,"seo":1135,"slug":1136,"stem":1137,"tags":1138,"__hash__":1140,"uuid":1122,"comment_id":1123,"feature_image":1124,"featured":105,"visibility":10,"created_at":1125,"updated_at":1126,"custom_excerpt":1127,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1128,"primary_tag":1129,"url":1130,"excerpt":1127,"reading_time":1131,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1132},"ghost/posts:squash-stretch-principle.json","(2026) Mastering the Squash & Stretch Principle in Animation",[1119],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🐶\u003C/div>\u003Cdiv class=\"kg-callout-text\">Squash &amp; Stretch isn’t just for cartoons—it’s the secret to making animations feel natural, expressive, and full of life!\u003C/div>\u003C/div>\u003Cp>When people say an animation looks \"\u003Cstrong>cartoony\u003C/strong>,\" they often mean that animators use exaggerated squash and stretch effects throughout.\u003C/p>\u003Cp>Animators call squash and stretch the most important of the 12 principles because it's such an iconic characteristic of Western animation, though it's pretty simple in practice―just imagine the object or character has the physical properties of a rubber ball! Every action or force applied to a character should slightly deform it before it regains its original shape.\u003C/p>\u003Cp>I can list at least three reasons why squash and stretch is such an important principle to master, even with more realistic animations.\u003C/p>\u003Chr>\u003Ch2 id=\"why-squash-stretch\">\u003Cstrong>Why Squash &amp; Stretch\u003C/strong>\u003C/h2>\u003Cp>The squash and stretch principle helps convey the mass and weight of objects and characters to make movements look more grounded in physics: as a ball hits the ground, it should squash to show the impact force and compression under its weight. As the ball rebounds, it should stretch to convey the speed and energy of the bounce. This tells the audience a lot about the object's properties—whether it is heavy or light, rigid or flexible.\u003C/p>\u003Cp>Animators also use squash and stretch to animate emotions. When a character is surprised, their eyes widen, and their mouth stretches open. But when a character is sad, their face squashes inward with drooping eyelids and a downturned mouth. These exaggerated facial features communicate the character’s emotional state to the audience without needing to explain anything.\u003C/p>\u003Cp>Even in more realistic animations, squash and stretch contribute to the quality of an animation by preventing it from appearing too stiff or mechanical. When a person runs, their muscles and skin will naturally squash and stretch. The head will also bob from side to side like a bouncing ball.\u003C/p>\u003Chr>\u003Ch2 id=\"preserving-volume\">\u003Cstrong>Preserving Volume\u003C/strong>\u003C/h2>\u003Cp>When an object or character moves, its volume should stay consistent. But its shape can change to reflect forces like gravity and momentum.\u003C/p>\u003Cp>As a rubber ball hits the ground and squashes, it should get wider, not just flatter, to maintain the same overall mass. When it stretches as it rebounds, it should become taller without losing mass. To keep this in check, you need to constantly visualize the object's volume as a fixed quantity that reshapes but doesn’t vanish.\u003C/p>\u003Cp>Guides can be incredibly helpful to ensure that your character or object retains its volume during transformations.\u003C/p>\u003Cp>For example,\u003Ca href=\"https://blog.cg-wire.com/character-shape-language/\"> \u003Cu>basic volume shapes like cubes, spheres, or cylinders can be used to serve as anchors\u003C/u>\u003C/a> while animating. These guides can be adjusted along the animation path to assist you in maintaining proper proportions.\u003C/p>\u003Cp>A classic exercise is to animate a bag of flour or water balloon that is being bounced around and distorted. Animators focus on how the mass inside the object flows and reshapes.\u003C/p>\u003Chr>\u003Ch2 id=\"timing-and-spacing\">\u003Cstrong>Timing and Spacing\u003C/strong>\u003C/h2>\u003Cp>Timing is how long an action takes, while spacing is where the object is placed from frame to frame.\u003C/p>\u003Cp>When working with squash and stretch, timing and spacing are adjusted to reflect the object's characteristics like speed and weight: a quick, sharp stretch suggests speed, while a slower, smaller squash indicates a heavy object.\u003C/p>\u003Cp>As a rule of thumb, heavier objects require less squash and stretch and move with quick timing, while lighter objects need more exaggerated distortions and slower movements.\u003C/p>\u003Cp>The amount of squash and stretch should also reflect the object's material properties. Even if they have the same mass, a rubber ball would show significant squash and stretch, while a bowling ball would barely change shape.\u003C/p>\u003Cp>Squash occurs when there's a force or contact with another surface. Stretch typically occurs before and after the contact during anticipation and follow-through, respectively. The transitions need to be smooth to pull off the effect effectively. Example for a character jump:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Anticipation\u003C/strong> - Add a few frames where your character crouches to load energy before your character jumps. The squashing of the leg can serve as a visual cue to the viewer that a big action is about to happen.\u003C/li>\u003Cli>\u003Cstrong>Squash and stretch\u003C/strong> - As they leap, use the stretch in the upward motion to emphasize the speed and direction of the movement.\u003C/li>\u003Cli>\u003Cstrong>Follow-through\u003C/strong> - When they land, use squash as they bend their knees to cushion the impact and naturally lead into a stand.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"a-tool-for-expression\">\u003Cstrong>A Tool For Expression\u003C/strong>\u003C/h2>\u003Cp>As previously mentioned, the squash and stretch principle is great at\u003Ca href=\"https://blog.cg-wire.com/character-design-animation/\"> \u003Cu>expressing emotions and personalities\u003C/u>\u003C/a> effectively by exaggerating movements: different levels of compression and elasticity reflect different emotional and mental states.\u003C/p>\u003Cp>A character that is feeling excited or joyful could be depicted almost as light as a feather, with more stretch in the animation.\u003C/p>\u003Cp>A heavy, grumpy character might use squash and stretch more in the squash side when they move, with minimal stretch to indicate their heavy, sluggish personality by keeping most of their body condensed and low to the ground.\u003C/p>\u003Cp>Anime aesthetics heavily leverage temporary facial exaggeration to emphasize sudden emotional changes to draw the viewer's attention, most notably by squashing or stretching the eyes and the mouth.\u003C/p>\u003Chr>\u003Ch2 id=\"usage-in-animation-cycles\">\u003Cstrong>Usage in Animation Cycles\u003C/strong>\u003C/h2>\u003Cp>Movements rarely progress linearly, so animators experiment with different easing curves in their animation software to see what best matches the timing of the movement they're aiming for. For example, you have to display more energy at the beginning of the sprint when a character runs, so the animator would add more squash and stretch within a shorter time.\u003C/p>\u003Cp>Talking about running, balance is crucial in loop animations (cycles): too much repetition leads to monotony so you need to introduce variation.\u003C/p>\u003Cp>Minor tweaks in the squash and stretch timing or scale for different cycles add a lot to the quality of the animation. It's as simple as changing the speed of a step, a posture, or the amplitude of squash and stretch. For example:\u003C/p>\u003Cfigure class=\"kg-card kg-embed-card\">\u003Ciframe width=\"200\" height=\"113\" src=\"https://www.youtube.com/embed/neiPpkXD7F8?feature=oembed\" frameborder=\"0\" allow=\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share\" referrerpolicy=\"strict-origin-when-cross-origin\" allowfullscreen=\"\" title=\"Animating a Heavy Run With ProRigs - Head Squash And Stretch\">\u003C/iframe>\u003C/figure>\u003Cp>Pay particular attention to how the body compresses and elongates naturally. The body's weight shifts with each step in a walk cycle. As the character's foot touches the ground, introduce a slight squash to convey impact. As the character pushes off the ground, you can slightly stretch the leading leg to show propulsion.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Mastering the squash and stretch principle is key for animators, whether you're aiming for a cartoony or realistic aesthetic.\u003C/p>\u003Cp>It's a cornerstone of animation because it allows animators to convey not only the physical properties of objects like weight and speed but also the emotional states of characters through visual cues. You just have to be mindful of volume, timing, and applied forces to bridge the gap between art and physics.\u003C/p>\u003Cp>Combined with the other 11 principles of animation, squash, and stretch is a great way to level up your animation!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1122,"comment_id":1123,"feature_image":1124,"featured":105,"visibility":10,"created_at":1125,"updated_at":1126,"custom_excerpt":1127,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1128,"primary_tag":1129,"url":1130,"excerpt":1127,"reading_time":1131,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1132},"ea190022-05c6-4191-ac6f-618a6f23f954","67aad70be95d410001686289","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/02/squash-and-stretch.webp","2025-02-11T05:50:19.000+01:00","2026-03-26T10:42:37.000+01:00","Squash & Stretch isn’t just for cartoons—it’s key to making animation feel natural and dynamic. Learn how this principle adds weight, flexibility, and believability to motion in our latest article.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/squash-stretch-principle/",4,"\u003Ci>\u003Cb>\u003Cstrong class=\"italic\" style=\"white-space: pre-wrap;\">Source: Tom's Game Design Blog\u003C/strong>\u003C/b>\u003C/i>","/posts/squash-stretch-principle","2025-03-03T10:00:27.000+01:00",{"title":1117},"squash-stretch-principle","posts/squash-stretch-principle",[1139],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"LIy8IGrUz-hYlPOMf0erQAHITfSJ82IiQwWkCkIcjug",{"id":1142,"title":1143,"authors":1144,"body":7,"description":7,"extension":8,"html":1146,"meta":1147,"navigation":14,"path":1158,"published_at":1159,"seo":1160,"slug":1161,"stem":1162,"tags":1163,"__hash__":1165,"uuid":1148,"comment_id":1149,"feature_image":1150,"featured":105,"visibility":10,"created_at":1151,"updated_at":1152,"custom_excerpt":1153,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1154,"primary_tag":1155,"url":1156,"excerpt":1153,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1157},"ghost/posts:anticipation-principle.json","Anticipation in Animation (2026): The Key to Impactful Motion",[1145],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🏃\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Great animation isn’t just about movement—it’s about setting the stage for it. Anticipation is the key to making actions feel natural, impactful, and engaging!\u003C/strong>\u003C/b>\u003C/div>\u003C/div>\u003Cp>If you animate a character punching another, you don't just go straight to punching: you let the character time to frown, send their arm back, and then let them hit their target.\u003C/p>\u003Cp>That's building up \u003Cstrong>anticipation\u003C/strong>, which makes the punching so satisfying!\u003C/p>\u003Cp>Animators have a term for it: the \u003Cstrong>\u003Cu>anticipation principle\u003C/u>\u003C/strong>. They use it to prepare the audience for an action and maximize its impact.\u003C/p>\u003Cp>Whether it's jumping, initiating dialogue, or fending foes, your animation can benefit from it.\u003C/p>\u003Cp>This article explores why anticipation is important and how to make the most of it to elevate your animation skills in an actionable way. Enjoy!\u003C/p>\u003Chr>\u003Ch2 id=\"why-this-principle-is-important\">\u003Cstrong>Why This Principle Is Important\u003C/strong>\u003C/h2>\u003Cp>Anticipation prepares the audience for what is about to happen to make an action more understandable. Hinting at the upcoming movement guides the viewer's attention to reduce confusion. A character preparing to jump would crouch down slightly before leaping into the air, for example. This crouching motion signals to the audience that the character is about to jump, setting up the expectation.\u003C/p>\u003Cp>In real life, the\u003Cstrong> \u003C/strong>most significant actions involve preparatory movements that help in building the necessary momentum and power. For this reason, anticipation echoes the natural physics and mechanics of real-world actions, which is key for realism.\u003C/p>\u003Cp>Different styles of anticipation can give\u003Ca href=\"https://blog.cg-wire.com/character-design-animation/\"> \u003Cu>insights into a character's mood or personality\u003C/u>\u003C/a>. A mischievous character ready to sneak behind another character could look side to side, grin slyly, and slowly bend their knees. Anticipation provides animators with an opportunity to express subtle cues that add depth to the character while making any animation interesting to look at.\u003C/p>\u003Chr>\u003Ch2 id=\"the-basics\">\u003Cstrong>The Basics\u003C/strong>\u003C/h2>\u003Cp>Anticipation acts as a setup for the main action in animation, which can be broken down into three elements:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>The setup\u003C/strong> - Introducing subtle movements that signal a forthcoming major action.\u003C/li>\u003Cli>\u003Cstrong>The build-up\u003C/strong> - Increasing tension and directing viewer focus.\u003C/li>\u003Cli>\u003Cstrong>The action\u003C/strong> - Delivering the anticipated movement or event.\u003C/li>\u003C/ul>\u003Cp>This is especially important for key scenes.\u003C/p>\u003Cp>In Vinland Saga, the twist when Askeladd meets the king is beautifully anticipated \u003Cstrong>(spoiler alert 🚨)\u003C/strong>.\u003C/p>\u003Cp>The scene begins with the setup where the king thanks his benefactors, including Askeladd.\u003C/p>\u003Cp>Then, a series of actions builds up to a climax. First, the king asks Askeladd to kill Canute, then Askeladd reverses the power balance, going from being submissive to threatening, and then the king orders his men to kill Askeladd.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/02/AD_4nXfwhjdsWZ9sCb-axj7hUtTOGVfM88sx1WDy3PR2SCliNHCQPQ68IVbcMJq5-0N5ysGGTKkCLmwJawJYp1hE-5bG_4T5NFZoYrBBwJys0P89T3EgSUI0cwHy2wYxeoHEaj0TgogQ7g.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"782\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/02/AD_4nXfwhjdsWZ9sCb-axj7hUtTOGVfM88sx1WDy3PR2SCliNHCQPQ68IVbcMJq5-0N5ysGGTKkCLmwJawJYp1hE-5bG_4T5NFZoYrBBwJys0P89T3EgSUI0cwHy2wYxeoHEaj0TgogQ7g.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/02/AD_4nXfwhjdsWZ9sCb-axj7hUtTOGVfM88sx1WDy3PR2SCliNHCQPQ68IVbcMJq5-0N5ysGGTKkCLmwJawJYp1hE-5bG_4T5NFZoYrBBwJys0P89T3EgSUI0cwHy2wYxeoHEaj0TgogQ7g.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/02/AD_4nXfwhjdsWZ9sCb-axj7hUtTOGVfM88sx1WDy3PR2SCliNHCQPQ68IVbcMJq5-0N5ysGGTKkCLmwJawJYp1hE-5bG_4T5NFZoYrBBwJys0P89T3EgSUI0cwHy2wYxeoHEaj0TgogQ7g.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Finally, we arrive at the action of Askeladd beheading the king, effectively altering the fate of all main characters in the show.\u003C/p>\u003Cp>How you do your anticipation should match the tone and style of your animation. A cartoonish character might have exaggerated and dramatically long anticipations, while in a thriller, you leverage more sounds, camera movements, and subtle character cues like a sweating face or facial expressions.\u003C/p>\u003Cp>As we'll see in other sections, anticipation also leverages animation principles like exaggeration, timing, and staging to maximize its impact.\u003C/p>\u003Chr>\u003Ch2 id=\"physical-and-emotional-anticipation\">\u003Cstrong>Physical and Emotional Anticipation\u003C/strong>\u003C/h2>\u003Cp>Anticipation isn't solely used for physical movement.\u003C/p>\u003Cp>Emotional anticipation operates similarly but relies on more subtle cues. For example, a character on the verge of tears or laughter showcases a gradual build-up of emotion on their face—like a quivering lip or a glimmer in their eye—preparing the audience for the emotional release.\u003C/p>\u003Cp>In this context, anticipation involves a character's facial expressions gradually changing or their body language shifting to convey feelings.\u003C/p>\u003Cp>Animators can also leverage the scene's environment to create anticipation. In the opening scene of Ninja Kamui, animators use bird-view camera angles, the train rails animation, and fog effects to signify the impending arrival of a group of ninjas in pursuit of the featured character.\u003C/p>\u003Cp>The strength of anticipation lies in these subtle changes. As animators, we observe these real-life cues and bring them to the screen to create compelling characters.\u003C/p>\u003Chr>\u003Ch2 id=\"using-exaggeration\">\u003Cstrong>Using Exaggeration\u003C/strong>\u003C/h2>\u003Cp>Exaggeration is a powerful tool in an animator's toolbox, especially when applied to anticipation.\u003C/p>\u003Cp>Pushing the boundaries of a preparatory action by exaggerating its amplitude, like the depth of a character's crouch before a jump, significantly enhances the drama of the resulting action.\u003C/p>\u003Cp>A right balance between subtlety and excess is key, though: too much exaggeration can transform a nuanced build-up into a comedic gesture (unless comedy is the desired effect). The key is to maintain a hint of realism within the exaggeration.\u003C/p>\u003Cp>In Naruto, the infamous Naruto vs Pain fight has great examples of exaggeration going overboard, including some anticipatory actions of Pain before he throws a punch:\u003C/p>\u003Cp>The resulting animation can be considered good because it adds speed to the fight, but the facial expression doesn't fit the stoic nature of the character.\u003C/p>\u003Chr>\u003Ch2 id=\"the-importance-of-timing\">\u003Cstrong>The Importance of Timing\u003C/strong>\u003C/h2>\u003Cp>Timing affects the speed of movement and shapes a scene's emotional impact.\u003C/p>\u003Cp>Similarly, the way anticipation is timed has dramatic effects: a slow, lingering build-up creates tension or suspense, while a quick build-up conveys urgency or surprise.\u003C/p>\u003Cp>By adjusting the number of frames allocated to anticipation versus the actual action, animators can experiment with different pacing. Dedicating more frames to anticipation creates a sense of looming inevitability while using fewer frames results in an explosive, dynamic quality.\u003C/p>\u003Cp>Varying the spacing between frames also creates an illusion of acceleration or deceleration, adding intensity and weight to the movement.\u003C/p>\u003Chr>\u003Ch2 id=\"staging\">\u003Cstrong>Staging\u003C/strong>\u003C/h2>\u003Cp>Staging is another incredibly important animation principle that builds anticipation. You need to\u003Ca href=\"https://blog.cg-wire.com/animation-layout/\"> \u003Cu>set up the scene\u003C/u>\u003C/a> so that the audience's attention is directed where it needs to be, and you can use it to highlight anticipatory actions and guide viewers to observe important details.\u003C/p>\u003Cp>Camera angles and character positioning emphasize expected movements. A close-up shot can draw attention to a subtle yet significant facial expression, while a wide shot can showcase the full action taking place.\u003C/p>\u003Cp>The background and setting also play a role: simplifying the elements around an action can minimize distractions and keep the viewer focused on the primary movement.\u003C/p>\u003Cp>The way you stage each scene can enhance or weaken the sense of anticipation.\u003C/p>\u003Chr>\u003Ch2 id=\"reverse-anticipation\">\u003Cstrong>Reverse-anticipation\u003C/strong>\u003C/h2>\u003Cp>There are also cases of reverse anticipation where the setup and build-up steps occur after the action has taken place, and you animate the reveal of its consequences.\u003C/p>\u003Cp>For example, a character is stabbed by surprise, you see his face contort in pain, but you're not sure what happened, and then you see blood dripping on the floor before the reveal.\u003C/p>\u003Cp>Reverse anticipation is a powerful tool to create a sense of surprise or shock by first depicting the consequences, prompting the audience to mentally fill in or anticipate the cause of the event.\u003C/p>\u003Cp>The scene where Ace dies protecting Luffy in One Piece is a great example. We only processed what happened after a few anticipatory frames:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/02/AD_4nXfs0EB_ObDT5XIdjHlxp-vKE1Kpcjwrnsj-6vcLELKN_2r1YpCJeGG8M7dQCo197k7Q_xcStolDnCn-EoDgDNo3TGQlhQ9qc0DIKBnFWfztvzfVcAVLUD1-shODpjw624eMFlRy-w-1.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"729\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/02/AD_4nXfs0EB_ObDT5XIdjHlxp-vKE1Kpcjwrnsj-6vcLELKN_2r1YpCJeGG8M7dQCo197k7Q_xcStolDnCn-EoDgDNo3TGQlhQ9qc0DIKBnFWfztvzfVcAVLUD1-shODpjw624eMFlRy-w-1.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/02/AD_4nXfs0EB_ObDT5XIdjHlxp-vKE1Kpcjwrnsj-6vcLELKN_2r1YpCJeGG8M7dQCo197k7Q_xcStolDnCn-EoDgDNo3TGQlhQ9qc0DIKBnFWfztvzfVcAVLUD1-shODpjw624eMFlRy-w-1.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/02/AD_4nXfs0EB_ObDT5XIdjHlxp-vKE1Kpcjwrnsj-6vcLELKN_2r1YpCJeGG8M7dQCo197k7Q_xcStolDnCn-EoDgDNo3TGQlhQ9qc0DIKBnFWfztvzfVcAVLUD1-shODpjw624eMFlRy-w-1.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/02/AD_4nXeDjaS-PfbAyYlIKiSianVX_cBEr0ZerfGXxPrSRs056GPTCY1hdFEuxQgU6iLiYCfpO2lxk5L_JUsgTzNCIp72zaeOfz-PYKsI6kw7CEx9jntCXAhyuqIrIq5cpoqZsJ0Eac8VPw.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"756\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/02/AD_4nXeDjaS-PfbAyYlIKiSianVX_cBEr0ZerfGXxPrSRs056GPTCY1hdFEuxQgU6iLiYCfpO2lxk5L_JUsgTzNCIp72zaeOfz-PYKsI6kw7CEx9jntCXAhyuqIrIq5cpoqZsJ0Eac8VPw.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/02/AD_4nXeDjaS-PfbAyYlIKiSianVX_cBEr0ZerfGXxPrSRs056GPTCY1hdFEuxQgU6iLiYCfpO2lxk5L_JUsgTzNCIp72zaeOfz-PYKsI6kw7CEx9jntCXAhyuqIrIq5cpoqZsJ0Eac8VPw.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/02/AD_4nXeDjaS-PfbAyYlIKiSianVX_cBEr0ZerfGXxPrSRs056GPTCY1hdFEuxQgU6iLiYCfpO2lxk5L_JUsgTzNCIp72zaeOfz-PYKsI6kw7CEx9jntCXAhyuqIrIq5cpoqZsJ0Eac8VPw.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/02/AD_4nXcp1of88VBEhwiQKElXR_UcnHlpfZX2ugVKIgDNGud3Jf6bilAicsiXarI3EjO5DRNIdIcxsjI7nzqJAjlh02mb3W-c5kF_g0u2jUbXMAVUeZAZ3qBUVcku9yFoKgiWrDTWDPtg8w.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"743\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/02/AD_4nXcp1of88VBEhwiQKElXR_UcnHlpfZX2ugVKIgDNGud3Jf6bilAicsiXarI3EjO5DRNIdIcxsjI7nzqJAjlh02mb3W-c5kF_g0u2jUbXMAVUeZAZ3qBUVcku9yFoKgiWrDTWDPtg8w.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/02/AD_4nXcp1of88VBEhwiQKElXR_UcnHlpfZX2ugVKIgDNGud3Jf6bilAicsiXarI3EjO5DRNIdIcxsjI7nzqJAjlh02mb3W-c5kF_g0u2jUbXMAVUeZAZ3qBUVcku9yFoKgiWrDTWDPtg8w.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/02/AD_4nXcp1of88VBEhwiQKElXR_UcnHlpfZX2ugVKIgDNGud3Jf6bilAicsiXarI3EjO5DRNIdIcxsjI7nzqJAjlh02mb3W-c5kF_g0u2jUbXMAVUeZAZ3qBUVcku9yFoKgiWrDTWDPtg8w.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/02/AD_4nXeGwliFS4EcFdoMPPJnVcC_P04QV1QsqedCnWyCc-LSBImuphwsDo0wxL6Sj56x7cLCGB9vYXTMJfnSFWdVT1x9EXqFT33knb94jP4r-pfFcYfxtG5r-rA8o48Yk4yDu2IWkPnbcw.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"747\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/02/AD_4nXeGwliFS4EcFdoMPPJnVcC_P04QV1QsqedCnWyCc-LSBImuphwsDo0wxL6Sj56x7cLCGB9vYXTMJfnSFWdVT1x9EXqFT33knb94jP4r-pfFcYfxtG5r-rA8o48Yk4yDu2IWkPnbcw.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/02/AD_4nXeGwliFS4EcFdoMPPJnVcC_P04QV1QsqedCnWyCc-LSBImuphwsDo0wxL6Sj56x7cLCGB9vYXTMJfnSFWdVT1x9EXqFT33knb94jP4r-pfFcYfxtG5r-rA8o48Yk4yDu2IWkPnbcw.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/02/AD_4nXeGwliFS4EcFdoMPPJnVcC_P04QV1QsqedCnWyCc-LSBImuphwsDo0wxL6Sj56x7cLCGB9vYXTMJfnSFWdVT1x9EXqFT33knb94jP4r-pfFcYfxtG5r-rA8o48Yk4yDu2IWkPnbcw.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Anticipation is more than just a precursor to movement―it's' a powerful storytelling tool that adds depth to your animation.\u003C/p>\u003Cp>By combining physical, emotional, and environmental cues and using other animation principles like exaggeration, timing, and staging, animators create incredible scenes that pull the viewers right into their world.\u003C/p>\u003Cp>Make sure to read about the other animation principles in our blog to get a better understanding of how they all fit together.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1148,"comment_id":1149,"feature_image":1150,"featured":105,"visibility":10,"created_at":1151,"updated_at":1152,"custom_excerpt":1153,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1154,"primary_tag":1155,"url":1156,"excerpt":1153,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1157},"81725c74-2bf2-4d78-a59a-c685e468e310","67aa9db4e95d410001686220","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/02/c3b6n-hareket-anticipation.jpg","2025-02-11T01:45:40.000+01:00","2026-03-27T11:02:02.000+01:00","Anticipation is key to making animation feel natural and impactful. Learn how this principle guides movement, builds tension, and enhances storytelling.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/anticipation-principle/","\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Pinterest\u003C/em>\u003C/i>","/posts/anticipation-principle","2025-02-17T10:00:57.000+01:00",{"title":1143},"anticipation-principle","posts/anticipation-principle",[1164],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"LITQgFqMKQLOtxKSJ4muiAB6GPbGQtDpOIZM-ZKzgw8",{"id":1167,"title":1168,"authors":1169,"body":7,"description":7,"extension":8,"html":1171,"meta":1172,"navigation":14,"path":1183,"published_at":1184,"seo":1185,"slug":1186,"stem":1187,"tags":1188,"__hash__":1190,"uuid":1173,"comment_id":1174,"feature_image":1175,"featured":105,"visibility":10,"created_at":1176,"updated_at":1177,"custom_excerpt":1178,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1179,"primary_tag":1180,"url":1181,"excerpt":1178,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1182},"ghost/posts:12-principles-animation.json","The 12 Principles of Animation (2026): A Timeless Guide for Animators",[1170],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🖌️\u003C/div>\u003Cdiv class=\"kg-callout-text\">Mastering animation starts with understanding the \u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">12 core principles\u003C/strong>\u003C/b>\u003C/div>\u003C/div>\u003Cp>In 1981, during the Golden Age of American animation, Disney animators \u003Cstrong>Frank Thomas\u003C/strong> and \u003Cstrong>Ollie Johnston\u003C/strong> published \u003Cem>\"Disney Animation: The Illusion of Life.\"\u003C/em> This book describes the behind-the-scenes of a Disney production, including a quick overview of the character animation process.\u003C/p>\u003Cp>In retrospect, The Illusion of Life was among the first resources to present a framework for animators, which we would later know as the 12 principles of animation.\u003C/p>\u003Cp>Walt Disney Co. is now worth $193 billion, and it all started by creating compelling animated characters!\u003C/p>\u003Cp>In this article, we give you a quick overview of the 12 principles, but you can find\u003Ca href=\"https://blog.cg-wire.com/\"> \u003Cu>more detailed articles on our blog\u003C/u>\u003C/a> about animation principles with additional actionable tips and illustrated examples.\u003C/p>\u003Chr>\u003Ch2 id=\"1-squash-and-stretch\">\u003Cstrong>1. Squash and Stretch\u003C/strong>\u003C/h2>\u003Cp>A bouncing rubber ball squashes when it hits the ground, then stretches as it rises back into the air.\u003C/p>\u003Cp>Similarly, squash and stretch is used to exaggerate the actions and emotions of a character, giving it cartoon-like aesthetics.\u003C/p>\u003Cp>Even in more realistic animations, squash and stretch subtly enhance the sense of weight and timing: when a person runs, their members, head, and skin will naturally squash and stretch.\u003C/p>\u003Chr>\u003Ch2 id=\"2-anticipation\">\u003Cstrong>2. Anticipation\u003C/strong>\u003C/h2>\u003Cp>Anticipation is the preparation for a significant action. It sets the audience for what will happen, making the movement more realistic.\u003C/p>\u003Cp>In real life, actions have preparatory movements: a person crouches down before jumping, and it could feel weird if you don't include this anticipation movement.\u003C/p>\u003Cp>Anticipation also guides the audience's attention to where the main action will occur. This is particularly important in scenes with multiple characters or complex backgrounds.\u003C/p>\u003Cp>You can also use anticipation to add drama and increase impact. In a suspenseful moment where a character is about to open a mysterious box, a slow build-up with hesitant movements and a tense pause add suspense.\u003C/p>\u003Chr>\u003Ch2 id=\"3-staging\">\u003Cstrong>3. Staging\u003C/strong>\u003C/h2>\u003Cp>Staging is about placing characters, props, cameras, and lighting in environments for a scene to convey a story. It guides the audience's eye to the most important aspects of a scene: you can control what the viewers see and when they see it.\u003C/p>\u003Cp>But staging isn't just about where you place things.\u003C/p>\u003Cp>By arranging characters and their actions purposefully, you make sure the audience understands the context without confusion: if a character is supposed to feel isolated in a crowd, staging them at the edge of a large gathering, looking towards the center, will visually communicate their loneliness. Simple but effective.\u003C/p>\u003Cp>Staging elements like lighting, camera angles, and composition significantly influence the mood of a scene. In a suspenseful scene, for example, a character in a low-lit room with looming shadows creates tension.\u003C/p>\u003Chr>\u003Ch2 id=\"4-straight-ahead-action-and-pose-to-pose\">\u003Cstrong>4. Straight-Ahead Action and Pose-to-Pose\u003C/strong>\u003C/h2>\u003Cp>Straight-ahead action means drawing each frame in sequence, one at a time, from start to finish. This method emphasizes fluid, organic movements for maximum creative exploration―a bit like improvising in music or dance. It's perfect for elements like fire, smoke, or character animations where spontaneity is key.\u003C/p>\u003Cp>On the opposite side, pose-to-pose is all about planning and control. You\u003Ca href=\"https://blog.cg-wire.com/stepped-animation/\"> \u003Cu>start with key poses\u003C/u>\u003C/a> to define major movements or expressions, then fill in with in-betweens. This approach is key for clarity and timing, especially when animating complex scenes that require specific keyframes, like a character performing a dance move. This way, each step or pose is correctly timed and executed to maintain the beat of the dance.\u003C/p>\u003Cp>Most professional animations use a blend of both. Creating key poses first with Pose-to-Pose ensures that the primary story points and actions are clear and effectively conveyed. Then, using straight-head action between these keys can add fluidity and life to certain movements.\u003C/p>\u003Chr>\u003Ch2 id=\"5-follow-through-and-overlapping-action\">\u003Cstrong>5. Follow Through and Overlapping Action\u003C/strong>\u003C/h2>\u003Cp>Follow-through and overlapping action describe how parts of a character or object continue moving even after a primary action has been completed.\u003C/p>\u003Cp>When characters or objects move, they don't stop all at once. If a character with long hair comes to a sudden stop, the hair will continue moving forward for a bit before settling. This principle mimics the laws of physics and makes the animation feel more alive.\u003C/p>\u003Cp>Overlapping action makes sure that different parts of a character move at different rates to smooth the whole movement. A character throwing a ball would first have their arm accelerate before the hand follows through to release the ball. Overlapping these actions makes the throw more dynamic by breaking the motion into parts.\u003C/p>\u003Cp>How a character moves reveals volumes about their personality or emotional state: a confident character might have a sharp, controlled follow-through, while an indecisive one could have a more jittery, prolonged movement.\u003C/p>\u003Chr>\u003Ch2 id=\"6-slow-in-and-slow-out\">\u003Cstrong>6. Slow In and Slow Out\u003C/strong>\u003C/h2>\u003Cp>As previously mentioned, objects never start or stop moving instantaneously in the real world: a car will gradually pick up speed and then slow down before coming to a halt.\u003C/p>\u003Cp>Slow In and Out replicate this natural acceleration and deceleration to make animations more realistic. The transition looks smoother when the frames are gradually spaced closer together at the start (slow in) and end (slow out) of a movement.\u003C/p>\u003Cp>The pacing of your transition is a great storytelling tool. A character who slowly raises their eyebrows before they widen quickly in shock uses this principle to underscore the surprise element.\u003C/p>\u003Chr>\u003Ch2 id=\"7-arc\">\u003Cstrong>7. Arc\u003C/strong>\u003C/h2>\u003Cp>Because of gravity, movements are rarely linear and instead follow curved, arched paths.\u003C/p>\u003Cp>Following arcs in your animations mimic this natural motion. For example, a person's arm swings when they walk following a gentle arc rather than moving up and down in a straight line.\u003C/p>\u003Cp>Arcs are also way more expressive than linear motions. Exaggerated arcs can also serve storytelling purposes,\u003Ca href=\"https://blog.cg-wire.com/character-design-animation/\"> \u003Cu>adding to character designs\u003C/u>\u003C/a>.\u003C/p>\u003Chr>\u003Ch2 id=\"8-secondary-action\">\u003Cstrong>8. Secondary Action\u003C/strong>\u003C/h2>\u003Cp>Animated movements can be broken down into functional primary actions and aesthetic secondary actions that support the former.\u003C/p>\u003Cp>A character walking could have their hair bouncing or their arms swinging slightly as secondary actions. This adds depth to the animation while making the primary action (walking) appear more natural. In the real world, movements are rarely isolated. Multiple things often happen at once. This variety prevents animations from looking too mechanical.\u003C/p>\u003Cp>Secondary actions can also give additional insight into a character's emotional state. You could have, for example, a character nervously tap their foot while talking. This additional movement provides a clue to the viewer about their anxious state without disrupting the main dialogue. These little touches can really level up your animation.\u003C/p>\u003Chr>\u003Ch2 id=\"9-timing\">\u003Cstrong>9. Timing\u003C/strong>\u003C/h2>\u003Cp>Timing determines the speed of an animation, or a part of it, to control its narrative rhythm.\u003C/p>\u003Cp>Timing greatly impacts how an audience perceives a scene. For example, a quick movement conveys that a character is excited, energetic, or perhaps even nervous, while slow movements can suggest a character is relaxed, tired, or feeling depressed.\u003C/p>\u003Cp>Timing also contributes to the realism of motion by obeying physical laws like gravity, momentum, and inertia: the timing of a bouncing ball communicates its weight and material. Like a bowling ball, a heavy ball will have slower bounces with more hang time between each contact with the ground, while a light, bouncy ball, like a beach ball, will have quicker, more frequent bounces.\u003C/p>\u003Cp>Lastly, timing sets the rhythm of a scene to create moments of tension or comedic relief. In a suspenseful scene where a character slowly reaches for a door handle, the extended time before touching the handle builds anticipation and suspense. Comedic timing could involve a series of rapid actions followed by a pause to let the audience absorb the humor.\u003C/p>\u003Chr>\u003Ch2 id=\"10-exaggeration\">\u003Cstrong>10. Exaggeration\u003C/strong>\u003C/h2>\u003Cp>Exaggeration is the principle of stretching reality to add energy and dramatics to an animation.\u003C/p>\u003Cp>In animation, clarity often relies on larger-than-life actions to convey the intended message quickly and effectively. When animating a fast action like a punch, exaggeration depicts powerful movements. By extending the arc, making the first move slightly faster, and adding an unrealistic but impactful follow-through, you can highlight the speed and force of the punch. This makes the action more readable for the viewer to understand, even in a fraction of a second.\u003C/p>\u003Cp>Too much realism can remove the fun of an animation, while strategic exaggeration emphasizes emotions and actions―a character experiencing joy should have a broad smile and gleaming eyes! It's an important tool for animators to express themselves.\u003C/p>\u003Chr>\u003Ch2 id=\"11-solid-drawing\">\u003Cstrong>11. Solid Drawing\u003C/strong>\u003C/h2>\u003Cp>Solid drawing emphasizes creating characters and objects that feel three-dimensional.\u003C/p>\u003Cp>For example,\u003Ca href=\"https://blog.cg-wire.com/character-shape-language/\"> \u003Cu>thinking about basic shapes\u003C/u>\u003C/a> like spheres, cubes, and cylinders help form a clear visual structure to maintain consistency in shape and proportion when viewed from different angles.\u003C/p>\u003Cp>This principle involves understanding the basics of drawing, like anatomy, composition, balance, and perspective, to create consistent scenes.\u003C/p>\u003Cp>Solid drawing shines when animators need to create dynamic poses effectively.\u003C/p>\u003Chr>\u003Ch2 id=\"12-appeal\">\u003Cstrong>12. Appeal\u003C/strong>\u003C/h2>\u003Cp>Appeal is about creating animations that grab the audience's attention: regardless of their role as heroes or villains, characters should be engaging.\u003C/p>\u003Cp>Just like a charismatic actor can hold an audience's attention, characters developed with a unique design and personality facilitate the viewer's connection to the story.\u003C/p>\u003Cp>This doesn't necessarily mean that the character has to be cute or pretty―unique quirks or exaggerated features can make a character memorable. Shrek is an ogre, not traditionally beautiful, but his personality and design have undeniable appeal.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>The 12 animation principles are foundational best practices―you can find them in any animation project!\u003C/p>\u003Cp>As you acquire more experience, you'll notice that each principle overlaps with the others. For example, it's not possible to master slow in and out without a good sense of timing, or you can't do anticipation without a bit of exaggeration, and so on.\u003C/p>\u003Cp>But the twelve principles are not the end of the journey. They were developed when traditional hand-drawn animation was the dominant form, and we have since developed many new techniques to create art. Eastern animation is a prime example: you can easily guess which is which from glancing at a single frame. Many techniques also originate from cinema, like the extensive use of virtual cameras.\u003C/p>\u003Cp>In any case, it's important for you as an animator not to stick rigidly to these principles and instead find your own workflows and techniques.\u003C/p>\u003Cp>Animation is also not limited to character animation―environments and props are just as important!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you!\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1173,"comment_id":1174,"feature_image":1175,"featured":105,"visibility":10,"created_at":1176,"updated_at":1177,"custom_excerpt":1178,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1179,"primary_tag":1180,"url":1181,"excerpt":1178,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1182},"5392990b-4ac9-4cd1-bd5a-525d6aa96d29","67a96b1438d33400019cb33f","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/02/12-principles-of-animation.jpg","2025-02-10T03:57:24.000+01:00","2026-03-26T10:02:20.000+01:00","The 12 principles of animation are the foundation of every great animation. Developed by Disney animators in the 1980s, these timeless techniques bring characters to life with weight, movement, and emotion. Learn how squash and stretch, anticipation, and staging can elevate your work!",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/12-principles-animation/","\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Animatron\u003C/em>\u003C/i>","/posts/12-principles-animation","2025-02-11T10:00:07.000+01:00",{"title":1168},"12-principles-animation","posts/12-principles-animation",[1189],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"agY_-cZYYRRa5SP01I8DNm21Yd1lpwFCPhKgUGx6W-Y",{"id":1192,"title":1193,"authors":1194,"body":7,"description":7,"extension":8,"html":1196,"meta":1197,"navigation":14,"path":1208,"published_at":1209,"seo":1210,"slug":1211,"stem":1212,"tags":1213,"__hash__":1215,"uuid":1198,"comment_id":1199,"feature_image":1200,"featured":105,"visibility":10,"created_at":1201,"updated_at":1202,"custom_excerpt":1203,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1204,"primary_tag":1205,"url":1206,"excerpt":1203,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1207},"ghost/posts:solid-drawing.json","Mastering Solid Drawing (2026): A Core Animation Principle",[1195],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">✏️\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Solid drawing is the ability to make forms appear three-dimensional.\u003C/strong>\u003C/b>\u003C/div>\u003C/div>\u003Cp>If you're animating a bouncing ball, it's tempting to draw a circle in the first frame and continue with that shape in each frame as it rises and falls.\u003C/p>\u003Cp>To apply the solid drawing principle, you would think about how the ball changes shape as it moves: at the point of impact, the ball squashes slightly to show weight and impact before stretching as it rebounds. As the ball moves towards or away from the viewer, the animator adjusts its shape slightly to show distance, with the edges tapering inwards when it's further away.\u003C/p>\u003Cp>But bouncing balls aren't all there is to animation: you need a few guiding principles to apply the same thinking to other types of animation. That's what we provide in this article.\u003C/p>\u003Chr>\u003Ch2 id=\"why-solid-drawing\">\u003Cstrong>Why Solid Drawing?\u003C/strong>\u003C/h2>\u003Cp>Animation is all about optical illusions, and at its core are the 12 principles of animation—a set of guidelines that animators have used for decades to create believable animation.\u003C/p>\u003Cp>Among these principles, solid drawing creates a well-defined sense of three-dimensionality within two-dimensional frames to engage viewers.\u003C/p>\u003Cp>A crucial aspect of solid drawing is achieving a well-defined silhouette for characters and objects.\u003C/p>\u003Cp>In animation, silhouettes play a significant role in making a scene visually readable. They help distinguish different characters and objects from one another at a glance. Without a strong silhouette, characters can appear flat, or hard to distinguish from their backgrounds.\u003C/p>\u003Cp>Solid drawing also helps animators draw characters consistently as they move through different poses and perspectives without losing what makes their design unique.\u003C/p>\u003Chr>\u003Ch2 id=\"1-the-basics\">\u003Cstrong>1. The Basics\u003C/strong>\u003C/h2>\u003Cp>Solid drawing comes down to several foundational concepts. The first one is to understand the basic terms that define a drawing:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Form\u003C/strong> - For an animator, bringing a character or object to life starts with turning the basic shapes into forms with depth. Form refers to the three-dimensional nature of an object: by visualizing characters as three-dimensional forms like spheres, cubes, and cylinders, they can maintain consistency across different poses.\u003C/li>\u003Cli>\u003Cstrong>Volume\u003C/strong> - Volume is about how much space an object occupies. It prevents characters from appearing flat and inconsistent when moving in the animated world―an object must retain its volume as it moves through space, bending or twisting without awkward stretching or shrinking.\u003C/li>\u003Cli>\u003Cstrong>Proportions\u003C/strong> - Proportion is about relationships—the relative sizes between different parts of an object or character. Getting proportions right is key to producing balanced, natural, and relatable animations.\u003C/li>\u003Cli>\u003Cstrong>Perspective\u003C/strong> - Perspective creates an illusion of depth and space, giving the audience a sense of where objects and characters are positioned. It allows animators to shift viewpoints, guide the audience's focal point, and enhance the storytelling of the scene.\u003C/li>\u003Cli>\u003Cstrong>Shape\u003C/strong> - Shapes are the 2D version of forms. Elipsis, rectangles, triangles, and lines convey emotions, define characters, and establish visual themes. Look at our dedicated article on shape language to learn more about their complex usage.\u003C/li>\u003C/ul>\u003Cp>After practicing these basics, an animator will move on to more complex concepts to make their drawings more solid.\u003C/p>\u003Chr>\u003Ch2 id=\"2-line-of-action\">\u003Cstrong>2. Line of Action\u003C/strong>\u003C/h2>\u003Cp>The line of action is the invisible line that guides a pose's primary axis and flow.\u003C/p>\u003Cp>Every hero leaping through the air or villain slinking through shadows has one thing in common: a powerful line of action. This line dictates their movement's energy and direction, like the pose's backbone.\u003C/p>\u003Cp>The line of action isn't arbitrary―its direction and shape add meaning to a scene while guiding the viewer's attention.\u003C/p>\u003Cp>It's a tool for clarity in complex scenes and a way to add dramatic effects. The line of action also facilitates smoother transitions between poses by suggesting key frames.\u003C/p>\u003Chr>\u003Ch2 id=\"3-balance\">\u003Cstrong>3. Balance\u003C/strong>\u003C/h2>\u003Cp>One of the core concepts in achieving balanced drawings is avoiding symmetry, often called twinning―the unintentional mirroring of elements that would look stiff and unnatural in the real world.\u003C/p>\u003Cp>A character standing with arms down their sides perfectly symmetrically would look strange.\u003C/p>\u003Cp>Instead, animators add a touch of asymmetry, using strong silhouettes, twists, and counterposes. They would, for example, vary the angles of the arms or shift the weight to one leg.\u003C/p>\u003Cp>A strong silhouette communicates the character's actions clearly and quickly. When designing poses, animators reduce their characters to a simple, recognizable shape. The pose has a strong silhouette if the action is easily interpreted, even in its most simplified form devoid of details.\u003C/p>\u003Cp>Incorporating twists and counterposes in your drawings introduces dynamic motion and a sense of energy.\u003C/p>\u003Cp>A twist is when different body parts face different directions, like the torso turning one way while the hips face another, to reflect better how the human body naturally moves.\u003C/p>\u003Cp>Similarly, counterposes involve positioning the hips and shoulders to face opposite directions.\u003C/p>\u003Chr>\u003Ch2 id=\"4-turnarounds\">\u003Cstrong>4. Turnarounds\u003C/strong>\u003C/h2>\u003Cp>A turnaround is a drawing of a character as it rotates 360 degrees to provide a comprehensive view from multiple angles.\u003C/p>\u003Cp>Typically, a turnaround includes front, side, back, and three-quarter views to help animators maintain consistency throughout production.\u003C/p>\u003Cp>Design discrepancies can detract the viewer's experience, so keeping width, height, and design details similar is important.\u003C/p>\u003Cp>Creating a turnaround also forces the artist to understand every facet of the character, which implies grappling with how various body parts interact in different poses and angles.\u003C/p>\u003Cp>In an animation studio, a well-made turnaround is a reliable reference for the entire animation team. It streamlines the production process by allowing different animators to work on the same character without needing constant guidance or corrections. Retakes and redesigns are costly, so it's best to avoid those.\u003C/p>\u003Cp>An often-quoted example is\u003Ca href=\"https://blog.cg-wire.com/character-design-animation/\"> \u003Cu>the character design sheets\u003C/u>\u003C/a> used in major animation studios, where each character undergoes extensive turnaround drawings to solidify their design.\u003C/p>\u003Chr>\u003Ch2 id=\"5-posing-life-drawing\">\u003Cstrong>5. Posing / Life Drawing\u003C/strong>\u003C/h2>\u003Cp>Another way to get better at solid drawing is to practice life drawing.\u003C/p>\u003Cp>Life drawing is sketching live models to capture the human form quickly and accurately. It typically involves short, timed poses known as gesture drawing, where the artist rapidly captures the model's form.\u003C/p>\u003Cp>The emphasis here is on speed and fluidity rather than precision, so you're forced to apply solid drawing concepts.\u003C/p>\u003Cp>It's an excellent way to train an animator's eye and hand to capture the essence of form, motion, and weight distribution: you have to distill the complex human form into its fundamental elements while avoiding unnecessary details that would weigh down your characters.\u003C/p>\u003Cp>Consider attending local life drawing classes or joining online sessions if available. You could also commit a few minutes daily to sketch quick poses using online resources or apps designed for gesture drawing, or you could simply sketch a person sitting in your favorite café or library.\u003C/p>\u003Chr>\u003Ch2 id=\"6-lighting\">\u003Cstrong>6. Lighting\u003C/strong>\u003C/h2>\u003Cp>Without any shading, a simple sphere looks flat. But when you introduce a light source, the highlight, mid-tones, and shadows all contribute to a realistic drawing.\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/how-light-shapes-emotion-in-animation/\">\u003Cu>Lighting affects how we perceive forms.\u003C/u>\u003C/a> It determines how effectively an object's form and volume are portrayed.\u003C/p>\u003Cp>As previously mentioned, perspective focuses on creating a believable representation of a three-dimensional world, and lighting is your ally to emphasize the spatial relationship between objects: where you position a light source influences how an object casts shadows and catches highlights, reinforcing its placement within the environment. It clarifies which areas are closer to the viewer or receding into the distance, guiding the eyes across the canvas.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Solid drawing connects together form, movement, and dimension to create more engaging animations. Mastering its principles allows you to turn even the most mundane drawings into captivating stories.\u003C/p>\u003Cp>Solid drawing is not the only best practice to take into account in your animation, though: there are 12 principles of animation to master, and you still need the discipline to keep practising each one.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you!\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1198,"comment_id":1199,"feature_image":1200,"featured":105,"visibility":10,"created_at":1201,"updated_at":1202,"custom_excerpt":1203,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1204,"primary_tag":1205,"url":1206,"excerpt":1203,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1207},"cac0b9d1-e69d-44fe-b821-0bdc7afb3956","6774dddc0262320001308ed1","https://images.unsplash.com/photo-1582134534988-f8bcfc928273?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDd8fHNvbGlkJTIwZHJhd2luZ3xlbnwwfHx8fDE3MzU3MTI0MzB8MA&ixlib=rb-4.0.3&q=80&w=2000","2025-01-01T07:17:00.000+01:00","2026-03-26T10:42:04.000+01:00","Solid drawing is a cornerstone of the 12 principles of animation, giving characters and objects depth, volume, and movement. Learn how to bring your sketches to life with techniques like form, perspective, balance, and lighting in our latest blog post.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/solid-drawing/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@kellysikkema?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Kelly Sikkema\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/solid-drawing","2025-02-03T09:55:57.000+01:00",{"title":1193},"solid-drawing","posts/solid-drawing",[1214],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"_2L-UjjjPM6wcOrQKBvSRokkkJRijW4ZatiE8F5DylI",{"id":1217,"title":1218,"authors":1219,"body":7,"description":7,"extension":8,"html":1221,"meta":1222,"navigation":14,"path":1233,"published_at":1234,"seo":1235,"slug":1236,"stem":1237,"tags":1238,"__hash__":1240,"uuid":1223,"comment_id":1224,"feature_image":1225,"featured":105,"visibility":10,"created_at":1226,"updated_at":1227,"custom_excerpt":1228,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1229,"primary_tag":1230,"url":1231,"excerpt":1228,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1232},"ghost/posts:animation-scripts.json","Writing Scripts for Animation: A Step-by-Step Guide (2026)",[1220],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📖\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Animation is about storytelling\u003C/strong>\u003C/b>\u003C/div>\u003C/div>\u003Cp>Just like a novel engages readers with a narrative, an animation captivates viewers with dialogue and visual storytelling. And just like a novel needs a manuscript, an animation needs a script.\u003C/p>\u003Cp>Whether it's a short animated series or a feature film, most animations start with a script.\u003C/p>\u003Cp>It can be a surprise if you're just beginning to learn more about animation, but you need to write if you're going to work with other animators, producers, or consultants.\u003C/p>\u003Cp>And because it's not always easy to know where to start, we walk you through the basics of writing a script for animation in this article.\u003C/p>\u003Chr>\u003Ch2 id=\"why-a-script\">\u003Cstrong>Why a Script?\u003C/strong>\u003C/h2>\u003Cp>A script lays down ideas in a structured way. It's essential to convey your vision to potential investors, producers, and collaborators because pitching projects relies on the clarity and originality of your script to show its potential.\u003C/p>\u003Cp>A script also acts as a single source of truth for animation projects: it guides the storyline and defines visual elements, character dynamics, and narrative flow.\u003C/p>\u003Cp>From pre-production to the final cut, every team member uses the script with\u003Ca href=\"https://blog.cg-wire.com/storyboard-animation/\"> \u003Cu>storyboards\u003C/u>\u003C/a>,\u003Ca href=\"https://blog.cg-wire.com/how-animatics-bring-stories-to-life/\"> \u003Cu>animatics\u003C/u>\u003C/a>, and other design packages to work together.\u003C/p>\u003Cp>Directors and supervisors use scripts to streamline workflows and coordinate team efforts. Concise actions and dialogue lines help animators, voice artists, directors, and editors understand their roles and do their job.\u003C/p>\u003Chr>\u003Ch2 id=\"formatting\">\u003Cstrong>Formatting\u003C/strong>\u003C/h2>\u003Cp>Before diving into storytelling, let's tackle an essential aspect of scriptwriting: formatting.\u003C/p>\u003Cp>Proper script formatting creates an organized template that answers the crucial \"5Ws\"—each scene's who, what, where, when, and why.\u003C/p>\u003Cp>This is typically done through four elements:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Scene heading\u003C/strong> - Also known as a slugline, it indicates the location and time of day. For example, \"EXT. FOREST - DAY\" sets the stage immediately.\u003C/li>\u003Cli>\u003Cstrong>Scene descriptions\u003C/strong> - The descriptions depict the scene's environment and notable actions. Brief but descriptive, they lay down visual and audible expectations.\u003C/li>\u003Cli>\u003Cstrong>Character names for dialogue\u003C/strong> - Scriptwriters name all the characters who will speak in a scene. Their unique vocabulary and tone help distinguish characters.\u003C/li>\u003Cli>\u003Cstrong>Dialogue\u003C/strong> - Dialogue is key for conveying emotions and driving the narrative. Well-crafted dialogue is concise yet loaded with meaning.\u003C/li>\u003C/ul>\u003Cp>While these basic elements provide structure, how scriptwriters use them varies greatly: there is no unique way to write a screenplay, so you can add your own style to the script.\u003C/p>\u003Cp>Hayao Miyazaki from Studio Ghibli doesn't write a script, per see. He just starts with storyboards and annotates dialogues in the margins for voice actors (available scripts are just transcripts).\u003C/p>\u003Chr>\u003Ch2 id=\"1-narrative-arcs\">\u003Cstrong>1. Narrative Arcs\u003C/strong>\u003C/h2>\u003Cp>A story needs a cohesive structure that viewers can easily follow, so writers come up with narrative arcs.\u003C/p>\u003Cp>Traditional storytelling follows five arcs: exposition, rising action, climax, falling action, and resolution. Novels are broken down into parts and chapters to help readers discern when an arc begins or ends.\u003C/p>\u003Cp>In animation, shorter scripts are divided into acts: Act I ends after a problem occurs, Act II continues up until a climax, and Act III describes the resolution.\u003C/p>\u003Cp>The way you write your script is up to you, of course, but keep in mind the following elements while structuring your script:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Goal, audience, themes\u003C/strong> - At this stage, the writer thinks of the ultimate goal of the narrative, taking into account the target audience and weaving in underlying themes. You have things you want to tell, and you need to think about how you want to bring them forth for maximum emotional impact.\u003C/li>\u003Cli>\u003Cstrong>Storytelling archetypes\u003C/strong> - Broadly, narratives follow one of the traditional archetypes—tragic, comedic, hero's journey, rags-to-riches, voyage and return, etc. Understanding these archetypes helps determine how you can structure your story.\u003C/li>\u003Cli>\u003Cstrong>Breaking the rules\u003C/strong> - Understanding narrative techniques provides a good foundation for storytelling, but you also want to subvert your audience's expectations by avoiding clichés. For example, animators experiment with nonlinear storytelling (White Fox's Steins Gate includes complex time loops) or unique perspectives (Inside Out by Pixar personifies emotions).\u003C/li>\u003C/ul>\u003Cp>In any case, the simplest way to go about constructing a story is to start with an outline highlighting the key narrative elements you want to include.\u003C/p>\u003Chr>\u003Ch2 id=\"2-character-description\">\u003Cstrong>2. Character Description\u003C/strong>\u003C/h2>\u003Cp>Character descriptions are especially important in scriptwriting to drive the narrative,\u003Ca href=\"https://blog.cg-wire.com/character-design-animation/\"> \u003Cu>inspire character design\u003C/u>\u003C/a>, and plan out their development.\u003C/p>\u003Cp>A character's personality is conveyed via dialogue and description, which in turn dictates their design: a villain has a mischievous smile, a hero exudes sympathy, etc. Every physical trait reveals something deeper.\u003C/p>\u003Cp>Characters evolve, influenced by circumstances and challenges. Their personal growth—or lack thereof—guides plot progression and engage audiences. Descriptions are used to indicate changes.\u003C/p>\u003Cp>Sometimes, it's better for scriptwriters to use character sheets instead of adding lengthy descriptions. They include detailed notes on every aspect of a character, from physical appearance and personality traits to backstory and personal goals, to help writers create consistent yet multi-dimensional and believable characters.\u003C/p>\u003Cp>Character descriptions also help animators and voice actors bring characters to life, though much creativity is needed to get all the details right.\u003C/p>\u003Chr>\u003Ch2 id=\"3-scene-description\">\u003Cstrong>3. Scene Description\u003C/strong>\u003C/h2>\u003Cp>Like character descriptions, scene descriptions provide a visual framework from which animators can draw inspiration. They include three elements: layout, environment, and props.\u003C/p>\u003Cp>When describing the layout of a scene, you must imagine how the space is occupied and perceived by viewers. Think of it as a camera angle that directs the audience's eye. Defining the layout helps animators decide how characters move and interact within the space.\u003C/p>\u003Cp>The environment and background set the tone of a scene. It's not just about where the scene takes place but the emotions it triggers. Is the environment warm and inviting, with soft colors and gentle lighting, or dark and cold, with stark contrasts and deep shadows?\u003Ca href=\"https://blog.cg-wire.com/character-color-palettes/\"> \u003Cu>Describe textures, colors, and lighting\u003C/u>\u003C/a> to tell a story.\u003C/p>\u003Cp>Props are also powerful storytelling tools to reveal character traits, drive the plot, and improve your world-building. A seemingly mundane object, when used creatively, can take on significant meaning: the magic carpet in Disney’s Aladdin is not merely a mode of transport―it's a character. In your script, detail not only the appearance of props but also their relationship with the characters and their influence on the story.\u003C/p>\u003Cp>By combining each element, you give precious indications to animators.\u003C/p>\u003Chr>\u003Ch2 id=\"4-dialogues\">\u003Cstrong>4. Dialogues\u003C/strong>\u003C/h2>\u003Cp>Dialogues play a pivotal role in character design.\u003C/p>\u003Cp>It's important to note that in animation, not every message needs to be communicated through dialogue. \"Show, don't tell.\" Use your animation's visual power to tell the story whenever possible, using dialogue as merely a tool for pacing.\u003C/p>\u003Cp>Think of scenes like WALL-E's silent moments, where expressions and actions convey depth without a single spoken word.\u003C/p>\u003Cp>Like in any great writing, what's unsaid is often more powerful, so encourage viewers to read between the lines by writing dialogue rich with subtext, where characters say one thing but imply another, for example.\u003C/p>\u003Cp>But dialogue and sound remain crucial components of animation to elevate the story, build the world, and bring out the distinct characteristics of each character.\u003C/p>\u003Cp>Each line reflects a character's unique personality, background, and motivations, so write dialogues that feel authentic to their voice. Good dialogue needs to sound natural. A simple tip is to read your lines aloud or have others perform them. This exercise helps catch awkward phrasings or unintentional rhymes that could distract from the narrative. It's also the voice actor's role to work with and interpret the source material.\u003C/p>\u003Cp>Animation thrives on rhythm, so animators avoid long-winded speeches that slow the action. Instead, they use snappy, impactful lines to keep the story moving.\u003C/p>\u003Chr>\u003Ch2 id=\"get-inspired\">\u003Cstrong>Get Inspired\u003C/strong>\u003C/h2>\u003Cp>Every great writer stands on the shoulders of scripts that came before.\u003C/p>\u003Cp>Online databases, script archives, and film schools are great starting points for accessing animated scripts of varying genres and complexities:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/01/CleanShot-2025-01-01-at-3-.00.49@2x.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"2000\" height=\"1127\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/01/CleanShot-2025-01-01-at-3-.00.49@2x.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/01/CleanShot-2025-01-01-at-3-.00.49@2x.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1600/2025/01/CleanShot-2025-01-01-at-3-.00.49@2x.png 1600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w2400/2025/01/CleanShot-2025-01-01-at-3-.00.49@2x.png 2400w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Developing the habit of reading and analyzing scripts is a way to get better at scriptwriting. You can, for example, annotate scripts and summarize key plot points, character arcs, and critical strains in dialogue that add depth to storytelling.\u003C/p>\u003Cp>You also don't have to start writing scripts from scratch. Adapting existing works into animations is a great way to learn the ropes of scriptwriting. Adaptations require understanding but also deviating from original texts while staying true to core elements to respect pacing, budget, and feasible run time. Scripts provide a framework to navigate these challenges.\u003C/p>\u003Cp>You don't need to worry about copyrights either: there is plenty of free material out there! For example, Gutenberg has tens of thousands of classic books in the public domain.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Scripting is not just a side note in animation: it's the very first step to transforming ideas into animations. A well-crafted script doesn't just tell a story—it builds worlds, designs characters, and weaves complex narratives.\u003C/p>\u003Cp>Scriptwriters usually start with an outline to structure the main narrative acts, then process with scene headings, descriptions, and dialogues. Character designs are suggested via descriptions, character sheets, and accompanying storyboards or concept art.\u003C/p>\u003Cp>If you're inspired and eager to create your own animation productions someday, scriptwriting is a valuable skill to add to your arsenal, so don't underestimate it! Behind-the-scenes documentaries, scriptwriting databases, and books like \"Animation Writing And Development\" are amazing starting points.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you!\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1223,"comment_id":1224,"feature_image":1225,"featured":105,"visibility":10,"created_at":1226,"updated_at":1227,"custom_excerpt":1228,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1229,"primary_tag":1230,"url":1231,"excerpt":1228,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1232},"18737000-2d13-49fa-b524-2fea68b2cb50","6774d9270262320001308eae","https://images.unsplash.com/photo-1571232151946-f7f00c61ade7?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDIxfHxmaWxtJTIwc2NyaXB0fGVufDB8fHx8MTczNTcxMTQyM3ww&ixlib=rb-4.0.3&q=80&w=2000","2025-01-01T06:56:55.000+01:00","2026-03-26T10:23:39.000+01:00","Every great animation begins with a strong script. Learn the essentials of animation scriptwriting in our latest blog post, from formatting and narrative arcs to crafting compelling character dialogues. Discover how to transform your ideas into powerful visual stories.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/animation-scripts/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@waldemarbrandt67w?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Waldemar\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/animation-scripts","2025-01-27T09:55:40.000+01:00",{"title":1218},"animation-scripts","posts/animation-scripts",[1239],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"vXJ-zH6b4x1N7EsozHrYDdgj8kw2uJGotb7i_OSXrdE",{"id":1242,"title":1243,"authors":1244,"body":7,"description":7,"extension":8,"html":1246,"meta":1247,"navigation":14,"path":1258,"published_at":1259,"seo":1260,"slug":1261,"stem":1262,"tags":1263,"__hash__":1265,"uuid":1248,"comment_id":1249,"feature_image":1250,"featured":105,"visibility":10,"created_at":1251,"updated_at":1252,"custom_excerpt":1253,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1254,"primary_tag":1255,"url":1256,"excerpt":1253,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1257},"ghost/posts:animation-inspiration.json","How To Break Free from Creative Blocks: Animation Inspiration For 2026",[1245],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🏋️\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Consistent practice is key for animators\u003C/strong>\u003C/b>\u003C/div>\u003C/div>\u003Cp>But what if you find yourself out of ideas? Sitting at your desk, sketchbook in hand, ready to work on your portfolio but faced with total creative block.\u003C/p>\u003Cp>Having a list of animation prompts can initially seem like a solution, but it's not a sustainable method―generating ideas is a muscle one needs to train!\u003C/p>\u003Cp>Whether you know it or not, your life is an endless source of inspiration for animation. You just need a bit of know-how and some practice.\u003C/p>\u003Cp>In this article, we break free from the constraints of static lists to provide you with dynamic strategies to cultivate a constant flow of animation ideas. We hope these creative techniques will help you in the long run!\u003C/p>\u003Chr>\u003Ch2 id=\"its-all-about-storytelling\">\u003Cstrong>It's All About Storytelling\u003C/strong>\u003C/h2>\u003Cp>A great animation comes from a great story.\u003C/p>\u003Cp>Animation, after all, is a medium of storytelling, whether a short clip or a full-length feature.\u003C/p>\u003Cp>Before putting pencil to paper (or stylus or tablet), think about the stories you want to tell. What inspires you? What stories have stuck with you through the years? What themes are you drawn to?\u003C/p>\u003Cp>Your story ideas will be as unique as your perspective, but the key is to let them reveal themselves naturally through thoughtful contemplation.\u003C/p>\u003Cp>You can't always force ideas to come to you, but you can always put all the good luck on your side with discipline.\u003C/p>\u003Cp>Here are a few ways to go about it:\u003C/p>\u003Chr>\u003Ch2 id=\"1-fan-fiction\">\u003Cstrong>1. Fan Fiction\u003C/strong>\u003C/h2>\u003Cp>Think about when children draw their favorite characters from TV shows or movies. They add their own imaginative touches―new outfits, sidekicks, or even missions to accomplish.\u003C/p>\u003Cp>This childlike creativity is a powerful tool.\u003C/p>\u003Cp>With fan fiction, you can experiment with storytelling and animation techniques, all while connecting with other fans in the community.\u003C/p>\u003Cp>Consider starting with a character you resonate with and ask yourself, \"\u003Cstrong>What if?\u003C/strong>\" What if your favorite hero had to face their greatest fear? What if two characters from different universes met? The possibilities are endless.\u003C/p>\u003Cp>A great example is \"\u003Cstrong>Legend, a Dragon Ball Tal\u003C/strong>e\" produced in 2022 by animator Agent Mystery Meat (The New Challenger Productions). It features the classic Dragon Ball characters, with the animator's unique take on the art style and an original plot.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXcwhjCMYFfL2VbIndksIJkO2ZM0g6TkXUczVX7ItocoO50m4NA6jPm0oSa-7criZ2rSXDj3VAgHGEyzTBr7x0M7oARbUcXIPpkx168nPi0xG_6ihTrvukVkSuQz4IEv6vf3aRTyrg?key=_vb9Zsdr4pgEXJ2WtnaL9Txn\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"408\">\u003C/figure>\u003Cp>Anime edits and anime music videos are also popular fan-made creations that are great for learning video and audio editing while developing an audience on social media.\u003C/p>\u003Chr>\u003Ch2 id=\"2-get-the-basics-right\">\u003Cstrong>2. Get The Basics Right\u003C/strong>\u003C/h2>\u003Cp>Another way to get animation ideas is to go back to the foundations of the craft.\u003C/p>\u003Cp>It's not about achieving technical perfection with the basics, like animating a ball bounce, but deeply understanding and integrating the language and processes that define the world of animation.\u003C/p>\u003Cp>For example, the 12 principles of animation teach you how to infuse your characters with emotion and personality.\u003C/p>\u003Cp>You could start by practising each principle individually in small projects. For example, animate a simple character using exaggeration to emphasize their feelings and see how a slight change can impact the animation. As you experiment and practice, your understanding of these principles will naturally expand, and they will begin to inform and inspire new ideas.\u003C/p>\u003Cp>As previously mentioned, creativity is a muscle, and muscles need fuel to function and grow. Consuming art and seeking new experiences is essential. Watching different animation and art styles opens your eyes to the myriad ways of storytelling.\u003C/p>\u003Cp>Passive consumption is not the way, though. You need to engage with what you consume, whether it's through analysis, reproduction, or creation. Ask yourself why a particular animation style is effective. How does a specific animation technique convey emotion or motion?\u003C/p>\u003Cp>Sampling animations also develops your own taste. What do you like, don't like, or wish to see more of.\u003C/p>\u003Cp>Start a creative journal where you jot down specific scenes from animations that inspire you. Write about why they made an impact—was it the way the character moved, or perhaps the interplay of color and shadow? By documenting your thoughts, you sharpen your analytical skills and create a repository of ideas from which to draw.\u003C/p>\u003Cp>Once you get the principles, you want to look into\u003Ca href=\"https://blog.cg-wire.com/3d-animation-process/\"> \u003Cu>how animators work, the technical terms they use, and their processes\u003C/u>\u003C/a>.\u003C/p>\u003Cp>You can\u003Ca href=\"https://blog.cg-wire.com/\"> \u003Cu>look at our blog\u003C/u>\u003C/a> to learn more about the entire animation process. You can also participate in animation communities or forums or watch behind-the-scenes documentaries of your favorite animations to see how those concepts are translated into action.\u003C/p>\u003Cp>Understanding the '\u003Cstrong>why\u003C/strong>' and '\u003Cstrong>how\u003C/strong>' behind a process unlocks your ability to innovate and reapply those ideas in your work.\u003C/p>\u003Chr>\u003Ch2 id=\"3-the-introspective-way\">\u003Cstrong>3. The Introspective Way\u003C/strong>\u003C/h2>\u003Cp>Some stories are found within: explore what you want to contribute to the world!\u003C/p>\u003Cp>You could, for example, solve a problem through animation. If you are passionate about environmental issues, create an animated short that illustrates the impact of plastic pollution on modern society. Humans are visual animals: what you make visible raises awareness and inspires change more engagingly than any amount of debating.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXcKit56y8xbHsi70qz1VzRyfU2lnRa7DKX3z6kEHnirWaDrkXAwEesfmDqIJi8k2k5MjN5tYJpUYCE_0h0U9E4nMtMDVQP--jHRZ4_0qvwSjQMhWv7nhjZfzNrlJaq49wBCTkn9Vw?key=_vb9Zsdr4pgEXJ2WtnaL9Txn\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"531\" height=\"347\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Waltz with Bashir, Ari Folman\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Adapting a beloved manga, comics, or novel is also\u003Ca href=\"https://blog.cg-wire.com/how-to-build-an-animation-portfolio/\"> \u003Cu>a great way to showcase your animation skills\u003C/u>\u003C/a>. You can pick a less popular but intriguing and copyright-free story, like a classic fable or myth, and reinterpret it with a modern twist or set it within a different cultural context.\u003C/p>\u003Cp>Incorporating autobiographical elements into your work is another way to get the ball rolling. If you had a profound personal experience, why not animate it? There is always an audience for personal stories.\u003C/p>\u003Chr>\u003Ch2 id=\"4-document-dont-create\">\u003Cstrong>4. Document, Don't Create\u003C/strong>\u003C/h2>\u003Cp>Many powerful animations aren't created from scratch and simply reflect daily life, its joys, and its struggles.\u003C/p>\u003Cp>If you're learning to animate facial expressions―instead of starting with a blank slate and trying to design a character from scratch―you could document your process by filming everyday expressions around you. Capture yourself or friends showing various emotions and use those as direct references to animate.\u003C/p>\u003Cp>Share your progress as you follow tutorials or tackle new courses and hobbies. It doesn't have to be about animation: you can simply combine interests to learn more about both.\u003C/p>\u003Cp>Say you're learning history, mathematics, or playing badminton―anything can become an interesting animation subject.\u003Ca href=\"https://blog.cg-wire.com/youtube-animation-channel/\"> \u003Cu>Look up explainer videos on YouTube\u003C/u>\u003C/a> and see how animations are used to convey complex ideas and skills.\u003C/p>\u003Cp>This approach helps chart your growth and provides a learning experience others can benefit from. Think of it as a visual diary chronicling your journey.\u003C/p>\u003Cp>Becoming a teacher of what you learn is a sure way to solidify new concepts in your mind while forging a tangible connection with viewers who appreciate your transparency.\u003C/p>\u003Chr>\u003Ch2 id=\"5-start-with-the-audience\">\u003Cstrong>5. Start With The Audience\u003C/strong>\u003C/h2>\u003Cp>For a more business-oriented approach, start with your prospective audience in mind: discover what people want to see and animate content that entertains or educates them.\u003C/p>\u003Cp>This approach can be particularly complementary for animators interested in the management side of an animation studio.\u003C/p>\u003Cp>Understanding your audience is crucial because the content you create has to be both relevant and impactful to increase the probability of success.\u003C/p>\u003Cp>You can for example engage with communities or track trending topics across social media platforms.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXdg-3vZ6q-X11KqgX5A394lWC8Ynad8JyiSeXHhXsgmX0P8Db6L346h60l6-PD1SjoYmnGx1UanH-aNo2pl9Jgbs-pXvTC0BPdOy9JowS-C0oWY3iOHGWG-5uAwOW-sx2_OdqOFXA?key=_vb9Zsdr4pgEXJ2WtnaL9Txn\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"418\" height=\"344\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Google Trends on the trend “anime”\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Keep in mind however that the most successful animation studios manage to align their creative vision with their audience's interests. You don't have to please your audience at all costs to be successful. Again, you create your own audience.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>There are many ways to find inspiration for your next animation: from simple practice exercises to deeply personal experiences, all you need is an exciting story you want to tell! Start from there, take baby steps, and eventually, you'll learn all you need.\u003C/p>\u003Cp>Your journey doesn't end with an idea, though: animations require action to turn an abstract thought into a full-fledged production. Begin by conceptualizing your story and imagining the world you want to bring to life. From there, write a script, break this script down into manageable scenes, and create a storyboard. Get funding, assemble a team, and share your story with the world.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you!\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1248,"comment_id":1249,"feature_image":1250,"featured":105,"visibility":10,"created_at":1251,"updated_at":1252,"custom_excerpt":1253,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1254,"primary_tag":1255,"url":1256,"excerpt":1253,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1257},"36a3a9f2-fe7d-47af-adaa-f4d33e1627f0","6774d4140262320001308e90","https://images.unsplash.com/photo-1512314889357-e157c22f938d?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDZ8fGlkZWFzfGVufDB8fHx8MTczNTU5OTY0MHww&ixlib=rb-4.0.3&q=80&w=2000","2025-01-01T06:35:16.000+01:00","2026-02-20T06:03:49.000+01:00","Discover how to break free from creative blocks and find inspiration for your animations. From storytelling techniques to documenting everyday life, this guide is packed with actionable strategies to keep your ideas flowing and elevate your craft.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/animation-inspiration/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@alterego_swiss?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">AbsolutVision\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/animation-inspiration","2025-01-20T09:55:19.000+01:00",{"title":1243},"animation-inspiration","posts/animation-inspiration",[1264],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"OXkggZwR91hIs9_9D5i00Z61Ma_AM0NgMCzgLLMKlco",{"id":1267,"title":1268,"authors":1269,"body":7,"description":7,"extension":8,"html":1271,"meta":1272,"navigation":14,"path":1283,"published_at":1284,"seo":1285,"slug":1286,"stem":1287,"tags":1288,"__hash__":1290,"uuid":1273,"comment_id":1274,"feature_image":1275,"featured":105,"visibility":10,"created_at":1276,"updated_at":1277,"custom_excerpt":1278,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1279,"primary_tag":1280,"url":1281,"excerpt":1278,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1282},"ghost/posts:rendering-explained.json","Rendering Explained (2026): The Key to Stunning 3D Animation",[1270],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">ℹ️\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Animation rendering is the process of generating the final 2D visual output from a 3D scene\u003C/strong>\u003C/b>\u003C/div>\u003C/div>\u003Cp>The rendering engine in your digital content creation tool takes all the elements you've created—3D models, textures, lighting, and special effects—and translates them into a series of images or videos you can watch or share.\u003C/p>\u003Cp>This guide explores why rendering is a cornerstone of modern animation, how it's done through various techniques, and the challenges that make it a critical stage in production.\u003C/p>\u003Chr>\u003Ch2 id=\"why-rendering\">\u003Cstrong>Why Rendering?\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/3d-animation-process/\">\u003Cu>All 3D productions include rendering\u003C/u>\u003C/a> as part of their animation pipeline. Not just before delivery but also throughout the entire process in an iterative cycle to gather feedback and perform edits: a well-honed rendering workflow is key to avoiding delays in big production.\u003C/p>\u003Cp>However, rendering is not limited to converting 3D models into images. It also includes 2D computer-generated images and visual effects (VFX). For example, animators use rendering to create water simulations or for 2D interpolation.\u003C/p>\u003Cp>With new technologies, rendering allows animators to create hyper-realistic graphics―a feat that was once only possible with expensive, time-consuming hardware. It has now become more accessible for independent filmmakers and small animation studios to achieve cinematic-quality results with free, open-source tools like Blender and a $2,000 laptop.\u003C/p>\u003Cp>For all these reasons, animators need a basic understanding of what rendering entails to make the most of the creative features enabled by modern digital content creation tools.\u003C/p>\u003Chr>\u003Ch2 id=\"render-passes\">\u003Cstrong>Render Passes\u003C/strong>\u003C/h2>\u003Cp>Digital content creation tools break down models and scenes into separate layers for greater control and readability.\u003C/p>\u003Cp>During rendering, each of these layers corresponds to a single pass, and manipulating these passes allows animators to create various effects by combining different rendering techniques.\u003C/p>\u003Cp>For example, the background could be composed of a layer with a gradient color and another with a generated cloud texture. The character in the foreground would have different layers for each body part, texture, shaders, etc.\u003C/p>\u003Cp>By isolating these layers, animators can focus on separate details without disturbing the work of others, and adjustments can be added at later stages.\u003C/p>\u003Cp>You'll then have shadow passes to add depth by defining contours and suggesting space for the whole scene, as well as highlights to illuminate surfaces and key areas.\u003C/p>\u003Chr>\u003Ch2 id=\"rendering-techniques\">\u003Cstrong>Rendering Techniques\u003C/strong>\u003C/h2>\u003Cp>As we just mentioned in the last section, rendering is not a single technique. Different techniques bring different pros, cons, and artistic values―rendering shaders will be vastly different from rendering character hair in real-time.\u003C/p>\u003Cp>You'll, however, need to understand the following basic techniques:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Raytracing\u003C/strong> - Raytracing simulates the path of light as it interacts with objects to create accurate reflections and shadows. Its complexity comes at a high computational cost and requires high processing power.\u003C/li>\u003Cli>\u003Cstrong>Scanline\u003C/strong> - Scanline rendering is a fast algorithm to identify what surfaces can be seen from a particular angle by processing images row-by-row or line-by-line instead of polygon-by-polygon or pixel-by-pixel. It's a preferred method for real-time applications where speed is crucial over photorealistic accuracy.\u003C/li>\u003Cli>\u003Cstrong>Radiosity\u003C/strong> - Radiosity focuses on accurately simulating diffused lighting between surfaces by capturing subtle inter-surface light interactions. It's ideal for scenarios needing soft, ambient lighting effects.\u003C/li>\u003Cli>\u003Cstrong>Rasterization\u003C/strong> transforms 3D models represented as polygons into pixels or dots for display. While faster, it may lack the sophisticated lighting effects achievable with raytracing.\u003C/li>\u003C/ul>\u003Cp>This list is non-exhaustive, but it will give you an idea of how rendering works. An animator knows each layer has its own rendering techniques to achieve specific results.\u003C/p>\u003Chr>\u003Ch2 id=\"rendering-resolution\">\u003Cstrong>Rendering Resolution\u003C/strong>\u003C/h2>\u003Cp>Resolution is the number of pixels in an image.\u003C/p>\u003Cp>Higher-resolution images offer a clearer and more detailed quality but require more processing power and storage space.\u003C/p>\u003Cp>The choice of resolution dramatically affects the viewer's experience, so productions aim for the highest resolution possible. For example, animators would choose an 8K resolution to ensure every minute detail is visible on a big screen. But if your animation is viewed on smartphones, a 1080p resolution is more efficient.\u003C/p>\u003Cp>This decision impacts the rendering process tremendously since you need to render 8,294,400 pixels for 4K against 993,600 pixels for 1080p―more than eight times more pixels to render!\u003C/p>\u003Cp>For this reason, it's common to work with lower resolutions during production to quickly get feedback on your animation―over long periods, each minute saved not rendering scenes rapidly adds up.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/01/image.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1348\" height=\"705\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/01/image.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/01/image.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/01/image.png 1348w\" sizes=\"(min-width: 720px) 720px\">\u003Cfigcaption>\u003Cspan style=\"white-space: pre-wrap;\">Source :&nbsp;\u003C/span>\u003Ca href=\"https://notrianglestudio.com/?ref=blog.cg-wire.com\" target=\"_blank\">\u003Cspan style=\"white-space: pre-wrap;\">NoTriangle Studio\u003C/span>\u003C/a>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"rendering-engines\">\u003Cstrong>Rendering Engines\u003C/strong>\u003C/h2>\u003Cp>Choosing a rendering engine depends on project-specific requirements. Animation studios and freelancers rely on various rendering engines: some are optimized for hyper-realistic environments, while others handle non-photorealistic styles or real-time rendering.\u003C/p>\u003Cp>The choice of rendering engines is sometimes limited by your DCC tool.\u003Ca href=\"https://blog.cg-wire.com/getting-started-with-blender-rendering/\"> \u003Cu>Take Blender, for instance.\u003C/u>\u003C/a> The open-source 3D editor has several rendering options: the Cycles engine for raytracing, FreeStyle for non-photorealistic rendering, and EEVEE for real-time. Unreal Engine, on the other hand, has a completely different (but similar) set of rendering settings. Depending on your team of animators, the studio will have to pick the rendering engine they are most familiar with.\u003C/p>\u003Cp>A crucial consideration in the rendering process is understanding the roles of CPUs (Central Processing Units) and GPUs (Graphics Processing Units). Blender's Cycles engine allows animators to use one, the other, or both.\u003C/p>\u003Cp>The CPU is a general-purpose processor that can handle a wide range of computational tasks. The GPU is designed to perform rapid matrix computation, which is ideal for the kind of parallel processing tasks required in rendering. GPUs can significantly speed up rendering times, so GPU-based engines are often used in workflows that demand quick turnarounds. They are also key for rendering photorealistic graphics.\u003C/p>\u003Cp>Choosing between a CPU or GPU rendering engine often depends on the nature of your project, the hardware at your disposal, and the quality versus speed compromise you're willing to make.\u003C/p>\u003Chr>\u003Ch2 id=\"real-time-vs-non-real-time-rendering\">\u003Cstrong>Real-Time vs Non-Real-Time Rendering\u003C/strong>\u003C/h2>\u003Cp>Another thing to understand is the difference between real-time and non-real-time rendering.\u003C/p>\u003Cp>Real-time rendering is the near-instantaneous creation of images as events happen within a virtual environment. Think of it as the technology that powers video games or virtual reality experiences, where objects and characters appear and respond seamlessly as the user interacts. This form of rendering is especially useful during development when you need immediate feedback on a change in your 3D model.\u003C/p>\u003Cp>Non-real-time rendering pre-calculates the images or frames before they are viewed. This method is used during the delivery phase for high-end animations where the quality of visuals is a priority. The process can take anywhere from minutes to days, depending on the level of detail and photorealism aimed for in the final output.\u003C/p>\u003Chr>\u003Ch2 id=\"render-farms\">\u003Cstrong>Render Farms\u003C/strong>\u003C/h2>\u003Cp>Now that you know how rendering works, you might have an idea of the amount of computer power you need to create an animated movie―far beyond the capability of a single computer or even a few high-end workstations.\u003C/p>\u003Cp>This is where render farms come into play.\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/partnership-with-ranch-computing/\">\u003Cu>A render farm is a high-performance computer cluster\u003C/u>\u003C/a> designed specifically to tackle the demanding computational workload of rendering 3D scenes. These farms consist of numerous interconnected computers, often called nodes, working collaboratively to convert 3D models, textures, lighting, and animations into final high-resolution images or frames that make up the movie.\u003C/p>\u003Cp>Animation teams use render farms because they significantly reduce the time to render these frames. Why invest tens of thousands of dollars into high-end machines for a one-off project when you can just lease it for a hundredth of the price?\u003C/p>\u003Cp>Once an animation is ready to be rendered, the scenes are divided into smaller tasks. These tasks are then distributed among the available nodes in the render farm. This distribution is managed by specialized software that oversees the load balancing across the farm. Each node receives its assigned task and independently processes the frames of the animation. This decentralization allows multiple frames to be rendered simultaneously. As nodes complete their tasks, the rendered frames are collected and assembled back into the sequence or shot that forms part of the final animation. The rendered frames are then subjected to rigorous quality checks to ensure they meet the desired visual standards.\u003C/p>\u003Cp>Render farms are the backbone of large-scale animation productions and an essential tool that democratizes the ability to create high-end visual effects, making ambitious projects feasible even for smaller studios.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Rendering is a necessary step to turn ideas into shareable animations. It's a highly technical job that greatly impacts the output quality.\u003C/p>\u003Cp>As an animator, mastering rendering is key to pushing the boundaries of what you can create. In larger studios, pipeline management, and rendering optimization are especially important to scale a team's efforts. Even at an individual scale, making the best of rendering previews changes your productivity.\u003C/p>\u003Cp>Make sure to experiment with your DCC tool's rendering settings, or try out a render farm for a few dollars.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you!\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1273,"comment_id":1274,"feature_image":1275,"featured":105,"visibility":10,"created_at":1276,"updated_at":1277,"custom_excerpt":1278,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1279,"primary_tag":1280,"url":1281,"excerpt":1278,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1282},"5ab51201-f8ac-4518-ac1c-a2baef21422f","6774cc7d0262320001308e65","https://images.unsplash.com/photo-1685222325356-c9ef9bc2fb7b?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDM1fHxyZW5kZXJpbmd8ZW58MHx8fHwxNzM1NzA4NjA4fDA&ixlib=rb-4.0.3&q=80&w=2000","2025-01-01T06:02:53.000+01:00","2026-03-26T10:40:03.000+01:00","Rendering transforms 3D models into stunning 2D visuals, bringing animated worlds to life. Dive into the key techniques, tools, and processes that make this essential step in animation production possible.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/rendering-explained/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@leo_gogh_22?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Leonardo Martínez\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/rendering-explained","2025-01-13T09:55:46.000+01:00",{"title":1268},"rendering-explained","posts/rendering-explained",[1289],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"RxMJ7l45Pn8zJacdejsV18exXhzVadh2Uyo5OKRpVEc",{"id":1292,"title":1293,"authors":1294,"body":7,"description":7,"extension":8,"html":1296,"meta":1297,"navigation":14,"path":1308,"published_at":1309,"seo":1310,"slug":1311,"stem":1312,"tags":1313,"__hash__":1315,"uuid":1298,"comment_id":1299,"feature_image":1300,"featured":105,"visibility":10,"created_at":1301,"updated_at":1302,"custom_excerpt":1303,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1304,"primary_tag":1305,"url":1306,"excerpt":1303,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1307},"ghost/posts:animation-for-beginners.json","Animation for Beginners: Your Ultimate Getting-Started Guide For 2026",[1295],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🤔\u003C/div>\u003Cdiv class=\"kg-callout-text\">Have you ever dreamt about working in animation?\u003C/div>\u003C/div>\u003Cp>Maybe you’ve grown up watching cartoons and anime, and now you have your own ideas to bring to life. Or perhaps you want to create ads for your business. Maybe you’re a teacher looking to liven up your lectures, or an artist eager to experiment with a new art form.\u003C/p>\u003Cp>No matter where you come from, animation is for everyone—\u003Cstrong>\u003Cem>you just need to get started!\u003C/em>\u003C/strong>\u003C/p>\u003Cp>If you’re unsure where to begin, you’re in the right place. This article covers the essentials, from understanding different animation types to mastering core principles. By the end, you’ll have a roadmap to guide you as you set out to animate your stories.\u003C/p>\u003Chr>\u003Ch2 id=\"understanding-different-animation-types\">\u003Cstrong>Understand\u003C/strong>ing\u003Cstrong> Different Animation Types\u003C/strong>\u003C/h2>\u003Cp>One common misconception is that you must be an accomplished artist to create animations. While drawing skills are key to getting your wildest ideas out there, they are not prerequisites―animation is more about storytelling than photorealism.\u003C/p>\u003Cp>When people think of traditional animation, they often imagine \u003Ca href=\"https://blog.cg-wire.com/what-is-2d-animation/\">\u003Cu>2D animation\u003C/u>\u003C/a>―creating hand-drawn images sketched on paper or digitally rendered with software. Motion graphics also fit into this category, frequently used for explainer videos, infographics, and even some mobile applications. 2D animation can involve characters or abstract visual effects created on a two-dimensional plane with something as simple as Microsoft Paint or pen and paper.\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/3d-animation-process/\">\u003Cu>3D animation\u003C/u>\u003C/a> uses digital modelling to allow creators to build three-dimensional models in a virtual space. Although some familiarity with modelling tools is beneficial, many assets are ready to use, allowing even beginners to dive into this medium. 13-year-old teens leverage popular game engines like Grand Theft Auto (GTA) to create animated shorts and even full-length films!\u003C/p>\u003Cp>Claymation uses sculptures out of materials like clay or plasticine and then photographing them frame by frame to create motion. This style gained popularity with films like \"\u003Cstrong>Wallace &amp; Gromit\u003C/strong>\" and \"\u003Cstrong>The Nightmare Before Christmas\u003C/strong>\". Stop-motion animation can also employ other materials such as paper cutouts or even household objects. For instance, many creators use LEGO bricks to create engaging and imaginative films, a technique often referred to as \"brickfilm.\"\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/the-main-types-of-animation/\">\u003Cu>Understanding these types of animation\u003C/u>\u003C/a> lets you decide which path to start, and you need very little when you have a big imagination. Animation can be created using virtually any medium!\u003C/p>\u003Chr>\u003Ch2 id=\"start-with-the-12-principles-of-animation\">\u003Cstrong>Start With The 12 Principles Of Animation\u003C/strong>\u003C/h2>\u003Cp>Once you've picked a medium and started playing around with it, you'll probably want to level up the quality of your projects. The 12 principles of animation is a popular framework used in art schools to create more convincing animations using simple (yet difficult to master) techniques:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Squash and Stretch\u003C/strong> - To give the illusion of weight and volume as objects move. Think of a bouncing ball: it squashes when it hits the ground and stretches as it lifts off.\u003C/li>\u003Cli>\u003Cstrong>Anticipation\u003C/strong> - Prepare the audience for an action to improve its impact—for example, a character drawing back their leg before kicking a ball.\u003C/li>\u003Cli>\u003Cstrong>Staging\u003C/strong> - The presentation of a scene so it's unmistakably clear using camera angles and composition as you would in a film to highlight what's important.\u003C/li>\u003Cli>\u003Cstrong>Straight-Ahead Action and Pose-to-Pose\u003C/strong> - Straight-ahead action uses drawing frame-by-frame from start to finish, while pose-to-pose uses keyframes. Combining both offers fluid and controlled movement.\u003C/li>\u003Cli>\u003Cstrong>Follow Through and Overlapping Action\u003C/strong> - This principle makes movement more natural. When a character stops, body parts can continue to move—a scarf in the wind, for instance.\u003C/li>\u003Cli>\u003Cstrong>Slow In and Slow Out\u003C/strong> - Refers to the acceleration and deceleration of movement, like a car gaining speed or coming to a halt smoothly.\u003C/li>\u003Cli>\u003Cstrong>Arc\u003C/strong> - Natural actions follow an arched trajectory. Whether it's a pendulum swing or a character's head turn.\u003C/li>\u003Cli>\u003Cstrong>Secondary Action\u003C/strong> - Adds depth by supporting the main action. For example, a character walking might be swinging its arms or whistling a tune.\u003C/li>\u003Cli>\u003Cstrong>Timing\u003C/strong> - Proper timing makes the physics of your animation believable: fast motions for quick actions like a punch and slower timing for gentler motions.\u003C/li>\u003Cli>\u003Cstrong>Exaggeration\u003C/strong> - Emphasizing actions or emotions. For example, a character's surprise with enlarged eyes and a dramatically open mouth for comedic effect.\u003C/li>\u003Cli>\u003Cstrong>Solid Drawing\u003C/strong> - Refers to the form and weight of the characters. Even in 3D, you must ensure your figures have depth and dimension.\u003C/li>\u003Cli>\u003Cstrong>Appeal\u003C/strong> - Lastly, characters must be engaging. Appeal doesn't mean cute or pretty but interesting and well-designed.\u003C/li>\u003C/ol>\u003Cp>A great way to work on applying the 12 principles to your animation can be to start with a reference video: identify key poses, draw in-betweens for smooth transitions, and add details to improve quality.\u003C/p>\u003Chr>\u003Ch2 id=\"animation-is-an-art\">\u003Cstrong>Animation Is An Art\u003C/strong>\u003C/h2>\u003Cp>Once you learn these fundamentals, you'll find it's only the tip of the iceberg. Animation is an art form that rewards continuous learning and development. For example:\u003C/p>\u003Cul>\u003Cli>Consider learning to draw. You just need a sketchbook to start. Then, a graphic tablet can be a valuable tool for colorizing, editing, and creating animations.\u003C/li>\u003Cli>Experiment with different software: programs like Adobe After Effects, Blender, and Toon Boom offer various 2D and 3D animation features depending on what you aim for.\u003C/li>\u003Cli>Learn more about the animation process, from scripting and storyboarding to rendering and production management.\u003C/li>\u003C/ul>\u003Cp>Building your skill set not only refines your technique but also opens pathways to a professional career in animation.\u003C/p>\u003Cp>Like exercise, the key is to progressively take on harder challenges.\u003C/p>\u003Cp>In 2024, there are plenty of online communities to share your progress while keeping yourself committed, and you can find all sorts of tutorials on YouTube and other social media platforms.\u003C/p>\u003Chr>\u003Ch2 id=\"you-dont-need-money-to-start\">\u003Cstrong>You Don't Need Money To Start\u003C/strong>\u003C/h2>\u003Cp>The beauty of animation lies in its accessibility: you don't need to invest heavily in gear like graphics tablets or expensive software when free tools like \u003Cstrong>Krita\u003C/strong> for 2D animation or \u003Cstrong>Blender\u003C/strong> for 3D offer robust starting points.\u003C/p>\u003Cp>It's important to avoid falling into the procrastination trap by endlessly studying or buying costly courses. While education is vital, practical experience is irreplaceable. Walt Disney honed his skills through evening classes and correspondence courses—far from today's digital learning resources―but quickly shared his work.\u003C/p>\u003Cp>Whether working solo or collaborating with a team, managing a project from idea to delivery is crucial to learning how to overcome creative challenges. It doesn't mean you need to complete a 1-hour feature film to consider yourself an animator: just take baby steps and don't wait to share your work!\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Animation is for everyone, regardless of background or experience. Start where you are with the tools available and grow from there, one creative challenge at a time. The journey should feel as rewarding as the destination, but you must still walk the talk.\u003C/p>\u003Cp>Financially speaking, the animation industry is constantly evolving and offering plenty of opportunities for skilled animators. If you're a student, don't hesitate to join an animation program because there are many job prospects available that make this field not just a passion but also a viable career path. You also don't need to work for a studio to make a living, it’s never been easier to just build your own and use distribution channels like Instagram or Tiktok to acquire work.\u003C/p>\u003Cp>For those who are considering a career switch later in life, there's an abundance of resources aimed at helping you make the transition smoothly: online courses, workshops, and boot camps can offer you the skills needed. Even if animation is a hobby you wish to pursue more casually, there's a wealth of free and affordable resources to guide your learning journey further.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you!\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1298,"comment_id":1299,"feature_image":1300,"featured":105,"visibility":10,"created_at":1301,"updated_at":1302,"custom_excerpt":1303,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1304,"primary_tag":1305,"url":1306,"excerpt":1303,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1307},"65268101-1e74-405f-a155-acb6d83b430a","6746bf97a8ea760001536e79","https://images.unsplash.com/flagged/photo-1572609239482-d3a83f976aa0?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDIwfHxjb21wdXRlciUyMGFuaW1hdGlvbnxlbnwwfHx8fDE3MzI2OTAxMTZ8MA&ixlib=rb-4.0.3&q=80&w=2000","2024-11-27T07:43:35.000+01:00","2026-03-26T10:14:04.000+01:00","Animation is for everyone, from storytellers and marketers to teachers and hobbyists. Our beginner’s guide covers animation types, essential tools, and the 12 principles of animation, providing a roadmap to help you bring your ideas to life.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/animation-for-beginners/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@moniz437?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Chauhan Moniz\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/animation-for-beginners","2025-01-06T09:55:04.000+01:00",{"title":1293},"animation-for-beginners","posts/animation-for-beginners",[1314],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"sDyNA45VNmLxNeryG_iUFA2ceb3Rsa2-AqciSShfD4o",{"id":1317,"title":1318,"authors":1319,"body":7,"description":7,"extension":8,"html":1329,"meta":1330,"navigation":14,"path":1341,"published_at":1342,"seo":1343,"slug":1344,"stem":1345,"tags":1346,"__hash__":1348,"uuid":1331,"comment_id":1332,"feature_image":1333,"featured":105,"visibility":10,"created_at":1334,"updated_at":1335,"custom_excerpt":1336,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1337,"primary_tag":1338,"url":1339,"excerpt":1336,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1340},"ghost/posts:animation-layout.json","Animation Layout (2026): Crafting the Foundation of Immersive Scenes",[1320],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"68d2f1e036b5be000835a0db","Frank Rousseau","frankrousseau","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2021/01/photo_identite.png","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2024/05/Annecy-Booth_Linework_004-2-1.jpg","CEO &Founder of CGWire","https://addictedtointer.net","https://blog.cg-wire.com/author/frankrousseau/","\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🎭\u003C/div>\u003Cdiv class=\"kg-callout-text\">Animation isn't just about characters—it's about the worlds they inhabit and how we, as viewers, experience them.&nbsp;\u003C/div>\u003C/div>\u003Cp>Just like directors of photography, layout artists orchestrate everything from the angle of a shot to the flow of action within a scene. Animators then use layouts to plan the visual skeleton of animated worlds.\u003C/p>\u003Cp>This article explores the work of layout artists.&nbsp;\u003C/p>\u003Chr>\u003Ch2 id=\"what-is-layout-in-animation\">\u003Cstrong>What Is Layout In Animation\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/storyboard-animation/\">\u003Cu>Storyboards\u003C/u>\u003C/a> resemble a comic strip that outlines the storyline. They offer a glimpse into the plot and highlight where key moments occur within a scene. \u003Ca href=\"https://blog.cg-wire.com/how-animatics-bring-stories-to-life/\">\u003Cu>Animatics\u003C/u>\u003C/a>, on the other hand, turn these static frames into animation previews.\u003C/p>\u003Cp>The layout is another intermediate stage that expands on storyboards and animatics by meticulously planning the logistics of each frame – camera angles, staging, and the introduction of backgrounds.\u003C/p>\u003Cp>Imagine animating a zoom on a forest: the layout artist is the one who decides the angle from which you view the towering trees, the path that wanders through the foliage, and how light filters through the leaves.\u003C/p>\u003Chr>\u003Ch2 id=\"why-layout\">\u003Cstrong>Why Layout?\u003C/strong>\u003C/h2>\u003Cp>The layout guides the viewer's eyes to focus on elements that drive the story forward, creating a visual hierarchy. For example, during an action scene, the layout artist manipulates elements to lead your gaze from a hero racing through crowded streets to the villain lurking in the shadows, sharpening the narrative's tension and dynamism.\u003C/p>\u003Cp>A well-crafted layout enhances the story's emotional weight. For a lonely character in a vast desert landscape, a layout could emphasize the space around the character to amplify feelings of isolation and vulnerability. Far from a simple visual choice, it's a story-driven decision.\u003C/p>\u003Cp>Consistency throughout scenes is also crucial for maintaining narrative flow. If a character picks up an object with their right hand in one scene, you expect them to hold it in the subsequent scenes. The layout ensures details like this remain consistent.\u003C/p>\u003Chr>\u003Ch2 id=\"1-camera\">\u003Cstrong>1. Camera\u003C/strong>\u003C/h2>\u003Cp>Camera work within animation layout focuses on the frame's perspective, which is as critical as in live-action filmmaking. By emulating real camera behaviours like motion blur, depth of field, and focal length adjusted by 3D software, layout artists can create authentic cinematographic experiences.\u003C/p>\u003Cp>A thoughtful camera layout serves multiple purposes: it establishes mood, suggests narrative depth, and conveys subtle emotional cues, like the creeping push-in from a wide shot to a close-up of a character's face.\u003C/p>\u003Cul>\u003Cli>Layout artists spend time experimenting with their DCC tool to understand the features of virtual cameras. They try common techniques like panning, tilting, and tracking to see how they affect the storytelling.\u003C/li>\u003Cli>They practice setting up scenes with different focal lengths. A wide-angle lens can give a scene a broader sense of space and context, whereas a telephoto lens can isolate elements and focus the viewer's attention on specific details.\u003C/li>\u003Cli>The depth of field in scenes guides the viewer's attention. You can keep the background blurry while focusing on a character to emphasize emotional reactions or reverse it to showcase the environment's significance.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"2-composition\">\u003Cstrong>2. Composition\u003C/strong>\u003C/h2>\u003Cp>Layout artists break down storyboards and animatics, turning initial sketches and sequences into layouts.\u003C/p>\u003Cp>Composition in animation layout refers to the strategic arrangement of visual elements within a frame. It's the art of balancing subjects in a way that guides viewers' attention, maintains visual interest, and supports narrative intent.\u003C/p>\u003Cp>Effective composition can turn a chaotic scene into a coherent visual story: in a battle scene where chaos reigns, good composition could direct the viewer's eye to critical characters amidst the frenzy.\u003C/p>\u003Cul>\u003Cli>Layout artists start with basic shapes like circles, squares, and triangles to represent characters and major elements. This approach helps them pre-visualize spatial relationships and action flows, making sure each element occupies the right space relative to others. For example, if you're setting up a conversation between two characters, simple shapes can help you quickly decide their placement and interactions in the frame.\u003C/li>\u003Cli>They can also practice sketching small, quick thumbnails to test multiple composition ideas rapidly.\u003C/li>\u003Cli>The Rule of Thirds is a foundational tool for composition: you divide your frame into a grid with two vertical and two horizontal lines. Placing key elements at the intersections or along these lines can create more balanced and engaging compositions. For example, positioning a character's face at an intersection can naturally draw the viewer's attention.\u003C/li>\u003Cli>Layouts also use leading lines to guide the viewer's eye through the scene. They can be natural elements like roads, rivers, or even how characters are posed. In a chase scene, you might use a leading line created by a winding path to draw viewers' attention toward the character being pursued.\u003C/li>\u003Cli>It's important to maintain a clear focal point by reducing clutter around it. You can achieve this by using color contrast, blurring background details, or adjusting the lighting. In a scene in a crowded marketplace where the protagonist needs to stand out, a layout artist could use brighter colors or more defined lines for the protagonist.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"3-staging\">\u003Cstrong>3. Staging\u003C/strong>\u003C/h2>\u003Cp>Staging refers to characters and objects' position, scale, and angle within a scene.\u003C/p>\u003Cp>It's the difference between an audience passively viewing a scene and becoming emotionally engaged. The subtle tilt of a character's head or the looming shadow cast by an ominous object can reveal critical story plots.\u003C/p>\u003Cul>\u003Cli>Slight changes in scaling and perspective can dramatically alter a scene's emotional tone: a high-angle shot makes a character appear vulnerable compared to a low-angle shot to convey power.\u003C/li>\u003Cli>Layout artists change the position of characters, adjust the lighting, or modify the camera angle and observe how each variation impacts the scene. For example, repositioning a character from the background to the foreground to see how it shifts the focus.\u003C/li>\u003Cli>Another technique is to look at a scene in silhouette to check if the emotion and action read clearly without facial features or details.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"4-setting\">\u003Cstrong>4. Setting\u003C/strong>\u003C/h2>\u003Cp>The setting establishes the scene's time, place, and mood through lighting, set dressing density, prop variation, and architectural uniqueness.\u003C/p>\u003Cp>The setting dictates how a scene feels: a dimly lit alley cluttered with debris sets a vastly different tone than a sunlit meadow.\u003C/p>\u003Cul>\u003Cli>A base grayscale shader helps strip away the distraction of color, allowing the artists to focus on how lights and shadows affect the mood of a scene.\u003C/li>\u003Cli>Layout can also have a varying density of set dressing and props to shape the scene's atmosphere: a cluttered, dense set can create a sense of chaos or intimacy, while a sparse setup suggests loneliness or cleanliness.\u003C/li>\u003Cli>Unique architectural features define the setting's personality: exaggerated lines, distinct shapes, or unusual materials make the setting memorable.\u003C/li>\u003Cli>Adjusting lighting to frame essential areas guides the viewer's eye and evokes specific emotions. Same with different light sources―e.g. a soft diffused light for tranquillity or harsh directional light for tension.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"5-consistency-clarity\">\u003Cstrong>5. Consistency &amp; Clarity\u003C/strong>\u003C/h2>\u003Cp>Consistency and clarity in animation layout involve maintaining visual continuity across shots to make each transition natural and logical.\u003C/p>\u003Cp>Consider a scene transition where a character walks from one room to another. Without consistent lighting, camera angles, and character scale, the sequence would feel jarring, disrupting the viewer's immersion.\u003C/p>\u003Cul>\u003Cli>It's commonplace to document specific guidelines for each scene to act as a reference, including preferred camera angles, lighting conditions, and character positioning.\u003C/li>\u003Cli>After choosing camera angles that best fit the tone and action of a scene, layout artists maintain these angles when appropriate to avoid disorienting the audience.\u003C/li>\u003Cli>The light source and intensity also have to remain consistent within a scene unless a significant reason demands variation, like a change in time of day or dramatic impact.\u003C/li>\u003Cli>Layout artists pay attention to character sizes in relation to their environment. Clear character turnaround sheets help with consistency.\u003C/li>\u003Cli>They frequently revisit sequences in animatic form to spot and address inconsistencies.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>The layout of a scene is key to creating engaging stories. From camera work that mirrors real cinematography to carefully crafting settings that anchor audiences in imaginary worlds, layout sets the stage for every animated masterpiece.\u003C/p>\u003Cp>But it's not the final production step—far from it! Once the team completes, it moves on to secondary animations, adding details, textures, secondary props, and so forth, in an iterative loop before post-production and delivery. You can \u003Ca href=\"https://blog.cg-wire.com/\">\u003Cu>read more about it on our blog\u003C/u>\u003C/a>!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you!\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1331,"comment_id":1332,"feature_image":1333,"featured":105,"visibility":10,"created_at":1334,"updated_at":1335,"custom_excerpt":1336,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1337,"primary_tag":1338,"url":1339,"excerpt":1336,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1340},"dc87fbd9-99f4-4766-b066-9a4e86926056","6746bc7fa8ea760001536e52","https://images.unsplash.com/photo-1503095396549-807759245b35?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDF8fHN0YWdlfGVufDB8fHx8MTczMjY4OTI0M3ww&ixlib=rb-4.0.3&q=80&w=2000","2024-11-27T07:30:23.000+01:00","2026-03-26T10:22:00.000+01:00","Animation layout is the foundation of immersive storytelling, guiding camera angles, staging, and composition to create visually engaging scenes. Discover how layout artists craft the worlds of animation and bring stories to life with precision and creativity.",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/animation-layout/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@kyleunderscorehead?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Kyle Head\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/animation-layout","2024-12-30T15:34:23.000+01:00",{"title":1318},"animation-layout","posts/animation-layout",[1347],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"56-p-dwHXVbXaAg2xjSLPsnBGigmH7LBOcICLwzq4yw",{"id":1350,"title":1351,"authors":1352,"body":7,"description":7,"extension":8,"html":1354,"meta":1355,"navigation":14,"path":1366,"published_at":1367,"seo":1368,"slug":1369,"stem":1370,"tags":1371,"__hash__":1373,"uuid":1356,"comment_id":1357,"feature_image":1358,"featured":105,"visibility":10,"created_at":1359,"updated_at":1360,"custom_excerpt":1361,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1362,"primary_tag":1363,"url":1364,"excerpt":1361,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1365},"ghost/posts:character-shape-language.json","Character Shape Language (2026): Designing Personalities Through Geometry",[1353],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">😴\u003C/div>\u003Cdiv class=\"kg-callout-text\">Close your eyes and picture a character as a simple black silhouette—completely stripped of color and detail.\u003C/div>\u003C/div>\u003Cp>Can you still identify who it is?\u003C/p>\u003Cp>This magic lies in the power of shape. The most iconic characters all originate from mere outlines and are instantly recognizable even in their simplest form.\u003C/p>\u003Cp>This concept, known as shape language, is not just an artistic choice but a tool to weave stories.\u003C/p>\u003Cp>In this article, we study different basic shapes and how to use them for storytelling, using examples from famous animations. By its end, you'll have a general idea of what to look for in your next \u003Ca href=\"https://blog.cg-wire.com/character-color-palettes/\">\u003Cu>character design\u003C/u>\u003C/a> and perhaps useful references to learn more about shape language!\u003C/p>\u003Chr>\u003Ch2 id=\"whats-shape-language\">\u003Cstrong>What's Shape Language?\u003C/strong>\u003C/h2>\u003Cp>Shape language uses psychological and emotional associations we have with different shapes. It is the art of using familiar geometric forms to convey a character's essence, personality, and role within a narrative.&nbsp;\u003C/p>\u003Cp>Take, for example, the work of Genndy Tartakovsky, the creative genius behind \u003Cem>Dexter's Laboratory\u003C/em> and \u003Cem>Samurai Jack\u003C/em>. Tartakovsky expertly uses basic geometries to telegraph character traits: Samurai Jack's sleek, angular design speaks to his righteousness and determination, whereas Dexter's boxy form highlights his squareness, orderliness, and methodical nature. This brilliant use of shape language makes his characters both instantly recognizable and deeply memorable.\u003C/p>\u003Chr>\u003Ch2 id=\"why-shape-language\">\u003Cstrong>Why Shape Language?\u003C/strong>\u003C/h2>\u003Cp>Shape language is as crucial to character design as color.\u003C/p>\u003Cp>First, it tells a story. Just as words form sentences, shapes build narratives that audiences can follow visually. Po from Kung Fu Panda has round shapes to tell us he is warm and friendly.\u003C/p>\u003Cp>It reveals personality. Designers can suggest subtleties of character traits. You can instinctively tell who is a bad guy and who is a hero. In The Lion King, Scar is designed with angular, sharp edges and narrow features to communicate his villainous nature, while Mufasa's large, round, and robust shapes emphasize his strength and benevolence.\u003C/p>\u003Cp>As previously mentioned, it contributes to good character design with visually cohesive and expressive characters. In Pixar's \"Incredibles,\" each family member's shape reflects their superpower, like the blocky, solid build of Mr. Incredible suggests his strength, and the slim, elongated design of Elastigirl showcases flexibility and agility.\u003C/p>\u003Cp>Let's have a look at common shapes to learn how animators use them.\u003C/p>\u003Chr>\u003Ch2 id=\"line\">\u003Cstrong>Line\u003C/strong>\u003C/h2>\u003Cp>A filiform, or thread-like, character shape often denotes weakness, elegance, or comedic value. Take Luffy from \u003Cem>One Piece\u003C/em> for example. His elongated form suggests a carefree and flexible nature, aligning perfectly with his adventurous spirit.\u003C/p>\u003Cp>Lines in character shapes are also key to conveying emotional tones and physical traits.\u003C/p>\u003Cp>In terms of line direction, a horizontal line suggests calmness and stability to depict tranquil and grounded characters or settings. Totoro from \u003Cem>My Neighbor Totoro\u003C/em> has a round, horizontal orientation to give a sense of calm and stability while symbolizing the peaceful nature of the forest spirit.\u003C/p>\u003Cp>Vertical lines, in contrast, express power, growth, or ambition, suitable for characters that embody strength or upward mobility. The tall, vertical stature of Superman, along with his iconic upright flying pose, conveys his strength and morality.\u003C/p>\u003Cp>Diagonal lines, charged with dynamism and unrest, indicate change and development, adding tension and excitement to a scene. Think of Spider-Man swinging through the cityscape. The diagonal lines formed by his body as he moves through the air echo how he constantly evolves to adapt to the challenges he faces.\u003C/p>\u003Chr>\u003Ch2 id=\"circle\">\u003Cstrong>Circle\u003C/strong>\u003C/h2>\u003Cp>The circle is frequently used to imbue characters with a friendly and approachable vibe. The round edges imply safety, softness, and welcoming nature, so characters with circular features often come across as amiable and open-hearted.\u003C/p>\u003Cp>Circles also symbolize unity and wholeness and are changeable, allowing for a range of emotions from joy to surprise.\u003C/p>\u003Cp>Baymax from \u003Cem>Big Hero 6\u003C/em> has a circular design that immediately communicates his role as a nurturing and protective healthcare robot. His round form emphasizes his harmlessness, and his soft, squishy quality makes him infinitely huggable and endearing to audiences. When Baymax is weaponized to fight crime, the contrast with the shape adds depth to the character and subverts the audience's expectations for drama.\u003C/p>\u003Chr>\u003Ch2 id=\"square\">\u003Cstrong>Square\u003C/strong>\u003C/h2>\u003Cp>Squares and rectangles are synonymous with stability, strength, and reliability. These shapes portray characters with solid, dependable, and supportive characteristics, while they can also hint at stubbornness or heavy-handedness.\u003C/p>\u003Cp>Consider Sulley from \u003Cem>Monsters, Inc.\u003C/em> His broad, rectangular stature underscores his robustness and dependability as Monstropolis' top scarer. Yet, despite his formidable frame, his character arc reveals a softness, balancing his physical presence with emotional depth.\u003C/p>\u003Chr>\u003Ch2 id=\"triangle\">\u003Cstrong>Triangle\u003C/strong>\u003C/h2>\u003Cp>Triangles introduce a sense of dynamism to character design: with their sharp angles and directional points, triangles can signify danger, unpredictability, and movement. The sharpness also suggests an edge or cunning, making them ideal for characters that evoke caution or intrigue.\u003C/p>\u003Cp>In The Lion King, Scar's triangular design contrasts Mufasa's squared design. His angular features underscore his menacing and calculating nature, reinforcing his role as a villain in a visual language that speaks volumes before he even utters a word.\u003C/p>\u003Chr>\u003Ch2 id=\"spirals\">\u003Cstrong>Spirals\u003C/strong>\u003C/h2>\u003Cp>Spirals are fascinating shapes often used to symbolize cycles of life, creativity, and growth. They provide a dynamic flow, mimicking natural patterns found in everything from galaxies to seashells and storm systems.\u003C/p>\u003Cp>The spiral is artistically explored in works like Junji Ito's \u003Cem>Uzumaki,\u003C/em> where it symbolizes chaos and an uncontrollable force of nature. The Uzumaki family symbol in \u003Cem>Naruto\u003C/em> represents the ongoing cycles of life and personal growth.\u003C/p>\u003Cp>Spirals are especially common for eye designs. In One Piece, Sanji's eyebrows are spiral-shaped.\u003C/p>\u003Cp>Japanese animation often relies on spiral eye patterns to symbolize strong wills or commanding presences, while Western cartoons only use them to imply unconsciousness or confusion.\u003C/p>\u003Chr>\u003Ch2 id=\"shape-synergy\">\u003Cstrong>Shape Synergy\u003C/strong>\u003C/h2>\u003Cp>While individual shapes carry distinct meanings, blending shapes allows for intricate character designs that harmoniously reflect multifaceted personalities. Characters are rarely just squares or circles. The genie in Disney's Aladdin has a swirling, smoke-like lower body to underscore his magical and fluid nature, while his muscular upper body conveys strength and confidence.\u003C/p>\u003Cp>Different characters can also have similar or contrasting shapes to denote their relationships. In a character team like the Powerpuff Girls, each character embodies unique shapes that correlate with their personalities, but they look like a cohesive unit together.\u003C/p>\u003Cp>Same with synergy between character shapes and background designs. Legendary animator Paul Grimault uses vertical elements to introduce awe and height, as seen in the towering structures of \u003Cem>The King and the Mockingbird,\u003C/em> to contrast with the characters and obtain a surrealist feel.\u003C/p>\u003Chr>\u003Ch2 id=\"break-the-rules\">\u003Cstrong>Break the Rules\u003C/strong>\u003C/h2>\u003Cp>While traditional symbolism holds significant power, there's an undeniable allure in breaking the rules to surprise and intrigue viewers. By subverting expectations, animators add layers of complexity to make characters more memorable.\u003C/p>\u003Cp>Spongebob Squarepants' square body humorously hints at rigidity and steadfastness, contrasting with his carefree and joyous personality.\u003C/p>\u003Cp>But before you can effectively break the rules, you need to thoroughly understand traditional symbolism and design principles. Knowing the norms allows for a mindful rupture that feels deliberate and impactful rather than accidental. It's a powerful tool to encourage viewers to change their assumptions.\u003C/p>\u003Cp>While subverting expectations can add intrigue, maintaining harmony in your composition is key. You can, for example, break conventional form but still use the golden ratio to ensure visual appeal.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Shape language is an essential element of character design, so make sure to think with shapes during your initial sketches! We covered basic shapes, but there are many more out there you can use for different effects.\u003C/p>\u003Cp>It's a powerful visual tool that, when combined with \u003Ca href=\"https://blog.cg-wire.com/character-color-palettes/\">\u003Cu>thoughtful color design\u003C/u>\u003C/a> and consideration of character personality and the story they inhabit, contributes to creating unforgettable characters. Just as shapes guide the structure, colors add depth and emotion, resulting in a harmonious.\u003C/p>\u003Cp>If you want a final reference, Les Shadoks is a great example of pushing shape language to its limits. The series uses simple, geometric shapes to craft its unique character and world designs, predominantly featuring ovals, triangles, and rectangles. This minimalistic approach demands that each form communicates beyond its basic structure, making every character instantly recognizable and emotionally resonant through exaggerated and abstracted silhouettes. The stark, often absurd lines and shapes capture the essence of the characters' whimsical and nonsensical world, proving that complexity, including colors, is not necessary to achieve rich storytelling.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you!\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1356,"comment_id":1357,"feature_image":1358,"featured":105,"visibility":10,"created_at":1359,"updated_at":1360,"custom_excerpt":1361,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1362,"primary_tag":1363,"url":1364,"excerpt":1361,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1365},"7353a738-e6bb-497f-b577-ae75cfa1d20d","6746b4b3a8ea760001536e37","https://images.unsplash.com/photo-1508700193932-2293b4385ab9?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDF8fHNpbG91ZXR8ZW58MHx8fHwxNzMyNjg3NjM1fDA&ixlib=rb-4.0.3&q=80&w=2000","2024-11-27T06:57:07.000+01:00","2026-03-26T10:29:28.000+01:00","Shape language uses geometric forms like circles, squares, and triangles to convey personality, emotion, and narrative roles in character design. \n\nLearn how animators use this powerful tool to craft iconic characters and elevate your own designs.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/character-shape-language/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@nseylubangi?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Nsey Benajah\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/character-shape-language","2024-12-23T15:34:02.000+01:00",{"title":1351},"character-shape-language","posts/character-shape-language",[1372],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"1_ATBPFAdgLEHtYuNNYDfvwVSdHqMTgVEy7vxlygZ2Y",{"id":1375,"title":1376,"authors":1377,"body":7,"description":7,"extension":8,"html":1379,"meta":1380,"navigation":14,"path":1391,"published_at":1392,"seo":1393,"slug":1394,"stem":1395,"tags":1396,"__hash__":1398,"uuid":1381,"comment_id":1382,"feature_image":1383,"featured":105,"visibility":10,"created_at":1384,"updated_at":1385,"custom_excerpt":1386,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1387,"primary_tag":1388,"url":1389,"excerpt":1386,"reading_time":1131,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1390},"ghost/posts:smear-frames.json","(2026) How Smear Frames Enhance Animation",[1378],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">ℹ️\u003C/div>\u003Cdiv class=\"kg-callout-text\">A smear frame is a single frame in an animation sequence that depicts motion through the exaggeration or distortion of an object or character.\u003C/div>\u003C/div>\u003Cp>Let's look at an example from \u003Cstrong>The Simpsons\u003C/strong>:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-15.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"852\" height=\"480\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/03/image-15.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/03/image-15.png 852w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Unlike standard frames that might show a clear picture of a character at a specific point in time, a smear frame blurs these distinctions, presenting a visual interpretation of motion that guides the viewer's eye across the screen.\u003C/p>\u003Cp>Another example, if you recall the fast-paced antics of characters like Road Runner or Bugs Bunny in classic \u003Cstrong>Looney Tunes\u003C/strong>, those swift movements are often animated with the help of smear frames to smoothly transition from one point to another within a split second.\u003C/p>\u003Chr>\u003Ch2 id=\"why-animators-use-smear-frames\">\u003Cstrong>Why Animators Use Smear Frames\u003C/strong>\u003C/h2>\u003Cp>The most common use of smear frames is to simulate motion blur. This technique mimics the effect created by a camera when capturing fast movement, and it adds a realistic touch to animations. For instance, when an arrow is shot from a bow, a smear frame might depict its flight with a stretched appearance, hinting at its rapid journey across the screen.\u003C/p>\u003Cp>While smear frames originated in traditional 2D animation, their impact isn't confined solely to this medium; they are prevalent in 3D animation as well. For example, animators in animated films like \"\u003Cstrong>Spider-Man: Into the Spider-Verse\u003C/strong>\" use smear frames to maintain a comic book feel.\u003C/p>\u003Cp>Smear frames emphasize an object's motion path, seamlessly blending transitions between poses or actions. Characters performing acrobatics, like somersaults or spinning kicks, often use smear frames to highlight fluidity and grace, capturing the audience's imagination.\u003C/p>\u003Cp>Sometimes, multiple frames can be combined into one smear frame to condense action. This is particularly useful in fast sequences to capture critical elements without overwhelming the viewer with too many individual frames.\u003C/p>\u003Chr>\u003Ch2 id=\"1-when-to-use-a-smear-frame\">\u003Cstrong>1. When To Use A Smear Frame\u003C/strong>\u003C/h2>\u003Cp>While they inject energy into scenes, their effectiveness relies on strategic placement.\u003C/p>\u003Cp>Smear frames are best during rapid movement or transition moments, like a character lunging forward, an item being thrown, or exaggerated comedic actions.\u003C/p>\u003Cp>In fighting animations, each swing of the fist or blade is accentuated with a smear frame to portray speed and aggression.\u003C/p>\u003Cp>But it's important to maintain consistency—the technique should remain fluid throughout the scene.\u003C/p>\u003Cp>Animators also avoid overuse, as excessive smears can clutter visuals and confuse the viewer.\u003C/p>\u003Chr>\u003Ch2 id=\"2-two-smear-frame-techniques\">\u003Cstrong>2. Two Smear Frame Techniques\u003C/strong>\u003C/h2>\u003Cp>Animators work with a sequence of frames shown in rapid succession. Motion blur is achieved by simulating what happens when the camera's shutter stays open for a duration while capturing multiple positions of a moving object within that single frame.\u003C/p>\u003Cp>There are two types of smear frames:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Elongated in-between\u003C/strong> - This technique elongates the subject to bridge the gap between key poses over one or two frames. Some animations in Naruto Shippuden are famous for their exaggerated smear frames:\u003C/li>\u003Cli>\u003Cstrong>Multiples\u003C/strong> - Here, animators duplicate parts or the entirety of the subject along its motion path while adding a slight blur. Here is one from Looney Tunes:\u003C/li>\u003C/ul>\u003Cp>In 2D animation, smear frames rely on stretching, warping, or multiplying elements within a scene. Balance is key to make sure the smears enhance rather than overwhelm the action.&nbsp;\u003C/p>\u003Cp>Modern VFX and photo editing software often include motion blur settings to digitally simulate the smear frame effect via algorithms, giving animators new tools to achieve the desired effect with greater control and efficiency.\u003C/p>\u003Cp>Using smear frames in 3D animation involves manipulating the 3D models themselves or through technical effects such as adjusting the mesh. Characters and objects can be stretched or distorted to match the speed and style of 2D smear frames. Animators can also multiply meshes or integrate motion blur to achieve the desired illusion.\u003C/p>\u003Chr>\u003Ch2 id=\"3-movement-breakdown\">\u003Cstrong>3. Movement Breakdown\u003C/strong>\u003C/h2>\u003Cp>While smear frames and in-betweening occur during movement breakdown, their roles differ significantly. In-betweening focuses on creating smooth transitions through evenly spaced, accurate frames. Smear frames, on the other hand, are more about exaggerating motion to convey speed and dynamism at key points. Both techniques complement each other: while in-betweening ensures consistency, smear frames add randomness.\u003C/p>\u003Cp>During breakdown, the creation of smear frames usually goes as follows:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Identify the motion\u003C/strong> - First, identify the keyframes between which the fast motion occurs. These could be anything from a character throwing a punch to a ball being kicked. Smear frames are particularly useful in actions that need to convey speed.\u003C/li>\u003Cli>\u003Cstrong>Plan the timing\u003C/strong> - Determine where the smear should occur in the movement. Smears typically fall between two extremes or key poses in an action. For instance, if a character is quickly turning their head, the smear would occur between the start and end of the head turn.\u003C/li>\u003Cli>\u003Cstrong>Sketch the smear frame\u003C/strong> - Begin by sketching the object or character in the distorted state using the techniques mentioned earlier (duplication, stretching, or motion lines).\u003C/li>\u003Cli>\u003Cstrong>Use exaggeration\u003C/strong> - Don't be afraid to go over the top with exaggeration. Smear frames are not about realism―but about conveying energy and speed. For example, when animating a character's punch, extend the arm to appear longer than it actually is.\u003C/li>\u003Cli>\u003Cstrong>In-between frame placement\u003C/strong> - Place your smear frame between two keyframes. It can be a single frame in a 24 fps animation, just enough to provide a brief glimpse to enhance the motion.\u003C/li>\u003Cli>\u003Cstrong>Test with timing\u003C/strong> - Test the animation to see how it flows once your smear frame is in place. Adjust the timing if necessary.\u003C/li>\u003C/ol>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Animators are illusionists, and smear frames are a prime example of this. Smear frames add an illusion of speed by playing a simple trick on our eyes, and the result is memorable.\u003C/p>\u003Cp>Unfortunately, this technique is becoming less common with software features to mimic motion blur, but it’s an integral part of what makes vintage, old-school animation great. New animation productions rely on smear frames to give a retro look, or simply to add a sense of surrealism.\u003C/p>\u003Cp>If you want more examples of surprising smear frames, check out the \u003Ca href=\"https://www.reddit.com/r/Smearframes/?ref=blog.cg-wire.com\">\u003Cu>r/smearframes subreddit\u003C/u>\u003C/a>. Note that you can also use video players like VLC Media Player to view videos frame by frame to catch smear frames in the wild.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you!\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1381,"comment_id":1382,"feature_image":1383,"featured":105,"visibility":10,"created_at":1384,"updated_at":1385,"custom_excerpt":1386,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1387,"primary_tag":1388,"url":1389,"excerpt":1386,"reading_time":1131,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1390},"1b5efe42-2b94-4031-9cb2-5ccce8f607fe","6746b0bca8ea760001536e18","https://images.unsplash.com/photo-1429857950654-539591eef320?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDF8fG1vdGlvbiUyMGJsdXJ8ZW58MHx8fHwxNzMyNjg1OTAxfDA&ixlib=rb-4.0.3&q=80&w=2000","2024-11-27T06:40:12.000+01:00","2026-03-26T10:41:40.000+01:00","Smear frames are the secret to creating fast, dynamic motion in animation. This article explores how animators use them in 2D and 3D work, with tips to bring energy and speed to your scenes.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/smear-frames/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@danist07?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Danist Soh\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/smear-frames","2024-12-16T14:54:30.000+01:00",{"title":1376},"smear-frames","posts/smear-frames",[1397],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"b8PHfHof6RDF8J1LphLd2IHuu5FkIiYok-rJfgIgKZ4",{"id":1400,"title":1401,"authors":1402,"body":7,"description":7,"extension":8,"html":1404,"meta":1405,"navigation":14,"path":1416,"published_at":1417,"seo":1418,"slug":1419,"stem":1420,"tags":1421,"__hash__":1423,"uuid":1406,"comment_id":1407,"feature_image":1408,"featured":105,"visibility":10,"created_at":1409,"updated_at":1410,"custom_excerpt":1411,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1412,"primary_tag":1413,"url":1414,"excerpt":1411,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1415},"ghost/posts:stepped-animation.json","(2026) The Art of Stepped Animation: Bringing Key Poses to Life",[1403],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🎥\u003C/div>\u003Cdiv class=\"kg-callout-text\">Stepped animation refers to \"step mode\" on animation software.\u003C/div>\u003C/div>\u003Cp>An animation is a series of frames. In step mode, animators focus on \"key poses\" or keyframes, capturing important moments of positions. In this mode, animations do not transition smoothly from one frame to another. Instead, they jump from one key position to the next. The in-between frames are left unrefined or omitted entirely.\u003C/p>\u003Cp>This approach allows animators to clearly define the most crucial moments of movement or expression without the distraction of constant fluidity.\u003C/p>\u003Cp>For example, consider a character preparing to jump. In step mode, the key poses would include a crouch, a jump, and a landing.\u003C/p>\u003Cp>In contrast to stepped animation, spline mode involves smooth transitions between poses, with in-betweens generated automatically via interpolation. Spline animation is production-ready and used for final animations.\u003C/p>\u003Ch2 id=\"why-stepped-animation\">\u003Cstrong>Why Stepped Animation\u003C/strong>\u003C/h2>\u003Cp>Step mode allows animators to overview an action and its pacing before committing to more detailed work.\u003C/p>\u003Cp>Key poses are the most important elements of a movement. By concentrating on these keyframes, animators can simplify their workflow to focus on the most impactful actions. There is no transition between keyframes, so when you hit play, the software displays each pose sequentially as if flipping through a series of drawings before diving into more complex inbetweening to smoothen the animation.\u003C/p>\u003Cp>Stepped animation also helps develop a basic sense of timing: with clear keyframes, you can easily test how long each pose is held and the rhythm of the overall animation. This method allows for quick iterations: if a pose feels too slow or fast, you can adjust the timing parameters without worrying about how it affects the in-betweens.\u003C/p>\u003Chr>\u003Ch2 id=\"1-key-frames\">\u003Cstrong>1. Key Frames\u003C/strong>\u003C/h2>\u003Cp>Animators begin by deciding the action's starting point and endpoint―the initial and final keyframes. For example, if you were animating a character that waves, you might start with the arm down and end with the arm fully raised.\u003C/p>\u003Cp>We would then use your animation software to create keyframes at these points. Most programs allow the precise specification of frame numbers to provide clarity in the editing timeline.\u003C/p>\u003Cp>It's important to control the timing between keyframes to achieve the desired movement. The time between keyframes is crucial: if a character snaps their fingers, you can space the frames close to give a sense of a sudden movement or further apart to emphasize it.\u003C/p>\u003Cp>After placing keyframes, we usually play back the animation until it feels right, looking for any unintended motions or inefficiencies in timing.\u003C/p>\u003Cp>While the focus of step mode is on the keyframes that define distinct movements, subtle adjustments in the poses can add depth. You can, for example, consider incorporating slight anticipations or follow-through animations at certain keyframes for better results.\u003C/p>\u003Cp>If you take a bouncing ball as an example, you could end up with something like this:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Key Frame 1\u003C/strong>: The ball is at its highest point.\u003C/li>\u003Cli>\u003Cstrong>Key Frame 2\u003C/strong>: The ball is halfway down to the ground.\u003C/li>\u003Cli>\u003Cstrong>Key Frame 3\u003C/strong>: The ball touches the ground.\u003C/li>\u003Cli>\u003Cstrong>Key Frame 4\u003C/strong>: The ball is at its lowest point (compressed).\u003C/li>\u003Cli>\u003Cstrong>Key Frame 5\u003C/strong>: The ball returns to the initial height.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXdW02zi9telPjDOADrB3Au-oq_zeJv47yCVag7iRp4we_CHzIVcorW1EU_affKBTAY7nR6DnlqAaAK5SQQLJuNHX5HocqSDc14U8BqIoS4kq7oNRlmRfOETZa3Lh2l1-VfURdNMOUiURFnlzjhPN3wlZgkR?key=FWj1XlzybXOodEra6azNbA\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"580\" height=\"363\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: AngryAnimator.com\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"2-storyboarding-animatic\">\u003Cstrong>2. Storyboarding &amp; Animatic\u003C/strong>\u003C/h2>\u003Cp>The selected keyframes are used during pre-production for storyboards and animatics.\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/storyboard-animation/\">\u003Cu>Storyboarding is creating a visual representation of a sequence of actions\u003C/u>\u003C/a> or events in the form of a series of images arranged in the order they will appear. It's a blueprint to plan scenes, transitions, and key actions before moving into the animation phase.\u003C/p>\u003Cp>A storyboard clarifies the sequence of keyframes for stepped animation.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXfzl-IyWRwQVFIj5jd-TbqtRHlO2JFmjpfhjZ_ASxbUHHQoJZ9JpLHvsuPd6jfRqi0QNvtVI6OBHRFTckuWahjTIX-TLBnQF4Za_tBeCyMdvRVQVw9q3ouWC8PhXSTQNRyei5dP8LCaHvWch65XsupyJMc?key=FWj1XlzybXOodEra6azNbA\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"616\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Wallace And Gromit\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>An animatic is a preliminary version of an animation combining static images from storyboards with a timeline, sound, and sometimes voiceover to offer a clearer vision of how the story unfolds.\u003C/p>\u003Cp>Both help avoid costly revisions during later stages of production, providing a structure that simplifies the decision-making process for animators. This is particularly important in stepped animation―where there are no transitions between poses―to understand how these keyframes relate to one another.\u003C/p>\u003Chr>\u003Ch2 id=\"3-switching-to-spline-mode\">\u003Cstrong>3. Switching To Spline Mode\u003C/strong>\u003C/h2>\u003Cp>As animators begin production, spline mode will slowly replace step mode.\u003C/p>\u003Cp>The main challenge when working with stepped animation is getting a feel of how timing will translate into the final rendered movement.\u003C/p>\u003Cp>When you switch from stepped to spline, the interpolation creates a smoother movement that may not have the desired energy. While your brain can fill in the gaps during the stepped stage, the computer's interpolation can create a smoother but less impactful motion. If a character jumps, the snappy ascent created in stepped mode can become a lagging glide with interpolation.\u003C/p>\u003Cp>For this reason, it's important to frequently switch between spline and step mode during production to obtain the desired result.\u003C/p>\u003Cp>In digital content creation software, there's often a simple command or option to convert your stepped keys to spline. This action will change the interpolation type, allowing the software to generate intermediate frames.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXd0ObRVuOnlLQ5WiOtCsPNfmX18K0twN7UDRxeHWqTmm-9JEiuZKS4wxGbeZQ8Nc9WbxZVyxzvYSCYm5ASe2IlYtOxf8Jh65Ut8k3YspGxpe0FbzSmNOS43woZBy6MsHzT9PUt-aoE-t54d-r-bD9LGmrKs?key=FWj1XlzybXOodEra6azNbA\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"351\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Wobbe Koning on YouTube\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Animators then play back the animation frequently after making adjustments in iterative steps.\u003C/p>\u003Cp>Another key element to take into account between step and split mode is the interpolation curve and algorithm.\u003C/p>\u003Chr>\u003Ch2 id=\"4-interpolation\">\u003Cstrong>4. Interpolation\u003C/strong>\u003C/h2>\u003Cp>Interpolation is a mathematical process determining intermediate values between two values depending on an evolution curve. In interpolation mode, the computer generates in-between frames by mathematically calculating the motion along the trajectory established by the keyframes.\u003C/p>\u003Cp>Rather than manual drawing or image creation for each frame, interpolation can automate the creation of in-between frames based on an animation sequence's defined start and endpoints.\u003C/p>\u003Cp>Different interpolation curves/algorithms bring different results.\u003C/p>\u003Cp>Sometimes, you need a linear evolution. The frames change at a constant speed. For example, a car on a highway would have its wheels turn at the same speed.\u003C/p>\u003Cp>In other situations, you might want the interpolation to get faster toward the end to make a punch more impactful or the beginning of the movement if you animate a sprinter.\u003C/p>\u003Cp>Animators inspect the motion curves in their DCC software's graph editor. This step is critical because, despite the automatic interpolation, you will likely need to refine the curves to reclaim that sense of snappiness that might have been lost going from step to spline mode.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXf31yL4--hQ-qLESuun7nwSEpyEK_YpvGqNGqty6aH_eTY1hdW9qJgIp18Cg3jkFtZ6U8TJGOWKZ2U8UQ6MMXE27_kmt62FQRlelhYtPBwYjAScqEld4yIaPSqdvhRkVfufGFdVNdu8q840qNOpbi19bd_V?key=FWj1XlzybXOodEra6azNbA\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"351\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: 3D Blender Tutorials by ianscott888 on Youtube\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>By concentrating on keyframes and leaving in-betweens until later, animators can refine the flow of their animations. This technique forms the groundwork for more intricate details using spline animation, where interpolation creates smooth transitions. As the animation progresses from the structured jumps of stepped mode to the fluidity of spline, the careful balance of timing and motion is preserved.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you!\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1406,"comment_id":1407,"feature_image":1408,"featured":105,"visibility":10,"created_at":1409,"updated_at":1410,"custom_excerpt":1411,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1412,"primary_tag":1413,"url":1414,"excerpt":1411,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1415},"6e42db03-1461-4812-a323-83ba63fe422a","672aef7a6d9acc0001dddaa5","https://images.unsplash.com/photo-1549816198-3c2704fdf06f?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDQyfHxjaGFyYWN0ZXJ8ZW58MHx8fHwxNzMzOTI0NTQ1fDA&ixlib=rb-4.0.3&q=80&w=2000","2024-11-06T05:24:26.000+01:00","2026-02-20T06:04:58.000+01:00","Stepped animation allows animators to highlight essential movements by focusing on keyframes first, saving the finer details for spline animation later. Discover how this approach, using step mode in digital content creation tools, streamlines workflow and perfects timing for impactful animation.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/stepped-animation/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@feymarin?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Fey Marin\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/stepped-animation","2024-12-11T14:49:59.000+01:00",{"title":1401},"stepped-animation","posts/stepped-animation",[1422],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"hMK8PsmpTsLOBRZsIXjEKSzHHo8kD2MYaKaCa3jTu9s",{"id":1425,"title":1426,"authors":1427,"body":7,"description":7,"extension":8,"html":1429,"meta":1430,"navigation":14,"path":1441,"published_at":1442,"seo":1443,"slug":1444,"stem":1445,"tags":1446,"__hash__":1448,"uuid":1431,"comment_id":1432,"feature_image":1433,"featured":105,"visibility":10,"created_at":1434,"updated_at":1435,"custom_excerpt":1436,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1437,"primary_tag":1438,"url":1439,"excerpt":1436,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1440},"ghost/posts:animation-laptops.json","Choosing the Best Laptop for Animation: A Complete Guide (2026)",[1428],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">💻\u003C/div>\u003Cdiv class=\"kg-callout-text\">Most animation work happens on a computer, from digital content creation tools to planning.\u003C/div>\u003C/div>\u003Cp>Choosing one can feel overwhelming considering all the available options: laptop, tablet, PC build, pre-built tower, refurbished computer... the list of terminals just never ends.\u003C/p>\u003Cp>Though most professional animators use a dedicated PC build for the processing power, laptops can be mandatory during studies, while you're on the road, or while doing presentations.\u003C/p>\u003Cp>But even, how does one go about picking a laptop? What to look for? Where?\u003C/p>\u003Cp>In this article, we guide you through technical and usage specifications to take into account. We won't give you specific laptops to buy since the market changes daily, and you'll probably find better deals researching yourself, but we'll tell you what to look for and prioritize your choices.\u003C/p>\u003Cp>First, let's look at the main criteria for choosing a laptop.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2024/11/image-6.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"613\" height=\"398\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2024/11/image-6.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2024/11/image-6.png 613w\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: 4WinKey\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"criteria\">\u003Cstrong>Criteria\u003C/strong>\u003C/h2>\u003Cp>There are three main criteria you need to consider when choosing a laptop.\u003C/p>\u003Cp>First, your chosen operating system (OS) significantly impacts your animation tools and workflows. Different animation software has specific OS requirements. Windows is widely used in the animation industry thanks to its compatibility with a broad range of software. MacOS is favored among many creative professionals, especially those using software like Final Cut Pro and Motion. While less common for laptops, Linux systems can be a viable option for animators who are comfortable with open-source software like Blender.\u003C/p>\u003Cp>The type of animation you intend to work on also dictates the specifications you should prioritize in a laptop. For 2D animation and motion graphics, you can often get away with less robust hardware. Tablet-like laptops with a stylus can also be used for 2D animation. If you're focused on 3D animation, the requirements become more demanding.\u003C/p>\u003Cp>Lastly, your budget is often the determining factor. Setting a clear budget lets you target machines that provide the best value for your desired features. As you'll read in this guide, however, there are solutions to get by with cheap laptops while you save up.\u003C/p>\u003Chr>\u003Ch2 id=\"understanding-laptop-specifications\">\u003Cstrong>Understanding Laptop Specifications\u003C/strong>\u003C/h2>\u003Cp>If you're not a computer nerd, it can feel difficult to understand what you need to look for in the endless laptop options available to you.\u003C/p>\u003Cp>For an animator, you only need to focus on four specs:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>CPU &amp; RAM\u003C/strong> - The Central Processing Unit (CPU) is often referred to as the brain of your laptop. For animation tasks, a powerful CPU can handle multiple tasks simultaneously when rendering complex animations. Aim for at least an Intel Core i5 or AMD Ryzen 5. Intel i7, i9, or AMD Ryzen 7 and 9 mean more processing cores and threads, significantly speeding up rendering times. A minimum of 8Go of Rapid Access Memory (RAM) is advised to handle more in-memory operations.\u003C/li>\u003Cli>\u003Cstrong>SSD\u003C/strong> - Solid State Drives (SSDs) are essential for a smooth animation experience because they handle all the file storage. Unlike traditional Hard Disk Drives (HDDs), SSDs offer significantly faster read and write speeds, which means quicker load times for your animation software and faster file access. Look for laptops with at least 512GB SSDs; however, 1TB or more is advisable for handling large animation files and applications.\u003C/li>\u003Cli>\u003Cstrong>GPU\u003C/strong> - Graphics Processing Units (GPUs) are about 50 to 100 times faster than CPUs at rendering animation, but they are also the most expensive part of a laptop. Nvidia GPUs are usually the go-to GPUs for 3D rendering, but you can also have AMD or Intel GPUs.\u003C/li>\u003Cli>\u003Cstrong>Screen resolution\u003C/strong> - Screen resolution is another important spec that can greatly impact your animation workflow: a higher resolution means better clarity and color accuracy, allowing you to see finer details in your animations. Full HD (1920 x 1080) display is standard. 4K (3840 x 2160) display is better for more accurate color grading but also impacts battery life.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXct0x3uzBiob4txe5ek_Mkyb_nJPYcy7pd3wC1mgAt2dqE-fxphCDjWS-FxyTodFBIyTmmp4pLOTBRnLUHOEOlzRnA2ep1f-17SckSJGmgBXnXinOLz_ktScGEytuJUkqcia3ODUrocY3ypoHP_b8e014k?key=bDBc3RwqJv352PgTyU9KOw\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"391\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: iD Tech\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"graphic-tablet-support\">\u003Cstrong>Graphic Tablet Support\u003C/strong>\u003C/h2>\u003Cp>Sometimes, you can stumble upon laptops offering touch screens, like this one:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://lh7-rt.googleusercontent.com/docsz/AD_4nXe7yS94W2DggSNarDK3y1KWUVwnm8do6rsfNA9FabQgoUjPLkya5DMRh3yOPTmPO4mzFgKrZIOHRgZjM13IhSOojPHMvEzb8AFu-tC6NCcXn-0RQiEH6ts5UDqVYAzpgaxi8LaB9aFuFkboaP4Z3Yln85E?key=bDBc3RwqJv352PgTyU9KOw\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"624\" height=\"351\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Forbes\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>However, they often result in a significant price hike without providing an ideal drawing experience.\u003C/p>\u003Cp>Instead, it’s wise to invest in a quality graphic tablet, which offers superior precision and control. Graphic tablets are essential for animators, so they’re well worth using.  Just make sure the laptop you choose has the necessary ports (USB-C, USB-A, etc.) to support your graphic tablet.\u003C/p>\u003Chr>\u003Ch2 id=\"solutions-for-low-budgets\">\u003Cstrong>Solutions for Low Budgets\u003C/strong>\u003C/h2>\u003Cp>If your budget is tight―less than $500―don't despair! There are solutions.\u003C/p>\u003Cp>If you mainly use Adobe products, you can use their web-based platform. Thanks to cloud rendering, even a $300 laptop can smoothly run Adobe Photoshop, Adobe Premiere, or even Adobe After Effects—no need to purchase expensive machines. As a student, chances are you'll have access to cheap Adobe licenses, too!\u003C/p>\u003Cp>For 3D animations, you can\u003Ca href=\"https://blog.cg-wire.com/partnership-with-ranch-computing/\"> \u003Cu>use render farms\u003C/u>\u003C/a> to handle intense rendering tasks without needing high-end GPUs. A render farm is simply a managed service where you can upload 3D assets to render, and the result is delivered to you via the web interface. The pricing depends on your usage (e.g., 1 hour of rendering for $0.008), but it can be quite cheap: Ranch Computing offers 50% discounts for students on non-commercial projects, for example.\u003C/p>\u003Cp>If you have a powerful PC built at home, you can also consider remote desktop solutions to use it remotely from your laptop.\u003C/p>\u003Cp>Using these solutions, even $200 Chromebooks can be a supplementary device for lighter animation work and administration tasks.\u003C/p>\u003Chr>\u003Ch2 id=\"what-about-macos\">\u003Cstrong>What About MacOS?\u003C/strong>\u003C/h2>\u003Cp>MacBooks and Mac Minis remain popular among animators despite their higher price points. With macOS, you gain access to a robust ecosystem of creative tools optimized for artistic tasks, offering seamless hardware-software integration.\u003C/p>\u003Cp>MacBook Pros deliver exceptional performance and battery life, making them an attractive option for animators on the go, while Mac Minis provide desktop-level performance in a compact form factor.\u003C/p>\u003Cp>However, owning a Mac is far from mandatory—even if you aspire to work at Pixar (they’ll provide a laptop, no worries).\u003C/p>\u003Chr>\u003Ch2 id=\"consider-refurbished-laptops\">\u003Cstrong>Consider Refurbished Laptops\u003C/strong>\u003C/h2>\u003Cp>Opting for refurbished laptops can provide cost-effective alternatives without compromising on quality.\u003C/p>\u003Cp>Not only do refurbished laptops help reduce electronic waste, but they also offer substantial savings and reliable options with warranties and certified checks.\u003Cbr>\u003Cbr>At 50-80% discounts, it could be a no-brainer! Don't forget to ask your school/university if they also have any laptop lending programs.\u003C/p>\u003Chr>\u003Ch2 id=\"picking-a-laptop\">\u003Cstrong>Picking A Laptop\u003C/strong>\u003C/h2>\u003Cp>Now you know everything about picking a laptop, it's time to shop:\u003C/p>\u003Col>\u003Cli>Create a spreadsheet and add eight columns: title, OS, CPU, GPU, SSD, screen resolution, asking price, and shopping URL\u003C/li>\u003Cli>Go through refurbished laptop websites like Dell Refurbished, Backmarket, or Amazon Renewed\u003C/li>\u003Cli>Go through more traditional e-commerce websites or IT shops\u003C/li>\u003Cli>Rank all the options listed in your spreadsheet by price\u003C/li>\u003Cli>Look for responsible financing options (pay in 3 times, low-interested credit card loan, etc.) (disclaimer: no financial advice, be responsible)\u003C/li>\u003C/ol>\u003Cp>And voilà! \u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Selecting a laptop is an important step in your journey as an animator, but there’s no need to overthink it or break the bank. Start by understanding operating system compatibilities, assessing the requirements of your animation projects, and working within your budget constraints.\u003C/p>\u003Cp>By focusing on key specifications like CPU performance, storage capacity, GPU power, and screen resolution, you can narrow down the ideal machine for your needs. Even on a tight budget, options like cloud-based rendering and refurbished devices offer cost-effective alternatives without compromising quality.\u003C/p>\u003Cp>If possible, consider building a PC instead of purchasing a more expensive laptop, as this often provides better performance for your money. Plus, a PC is easy to upgrade over time, allowing it to serve you well for years to come.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you!\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1431,"comment_id":1432,"feature_image":1433,"featured":105,"visibility":10,"created_at":1434,"updated_at":1435,"custom_excerpt":1436,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1437,"primary_tag":1438,"url":1439,"excerpt":1436,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1440},"e6411267-b657-4574-a0c3-dee97f1320fe","672b11a46d9acc0001dddaca","https://images.unsplash.com/photo-1515378960530-7c0da6231fb1?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDExfHxsYXB0b3B8ZW58MHx8fHwxNzMwODE3NTU4fDA&ixlib=rb-4.0.3&q=80&w=2000","2024-11-06T07:50:12.000+01:00","2026-02-20T06:03:50.000+01:00","Choosing the right laptop for animation can be daunting with so many options and specs to consider. This guide breaks down the essentials—from CPU power to screen resolution—to help you find the best laptop for your animation needs, even on a budget.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/animation-laptops/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@christinhumephoto?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Christin Hume\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/animation-laptops","2024-11-21T10:00:50.000+01:00",{"title":1426},"animation-laptops","posts/animation-laptops",[1447],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"eIN4iDG-4RUZQvpJmZAX4dII7rRvVesE6QYSfTv2Nrs",{"id":1450,"title":1451,"authors":1452,"body":7,"description":7,"extension":8,"html":1454,"meta":1455,"navigation":14,"path":1466,"published_at":1467,"seo":1468,"slug":1469,"stem":1470,"tags":1471,"__hash__":1473,"uuid":1456,"comment_id":1457,"feature_image":1458,"featured":105,"visibility":10,"created_at":1459,"updated_at":1460,"custom_excerpt":1461,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1462,"primary_tag":1463,"url":1464,"excerpt":1461,"reading_time":240,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1465},"ghost/posts:character-color-palettes.json","Creating Character Color Palettes (2026): Tips and Techniques for Animators",[1453],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🎨\u003C/div>\u003Cdiv class=\"kg-callout-text\">Color is a powerful communicator, especially when it's used in character design.\u003C/div>\u003C/div>\u003Cp>In fact, great character design is almost inseparable from its color palette: take your favorite character, change the colors, and it’s as if you have an entirely different character.\u003C/p>\u003Cp>How do animators come up with great color palettes, then? There are a few principles to follow, and the rest is creative genius. Only experience can bring the latter, but we can learn about the former today: in this article, we explore color theory and best practices to help you easily develop character color palettes.\u003C/p>\u003Cp>First, let’s understand what color theory is.\u003C/p>\u003Chr>\u003Ch2 id=\"what-is-color-theory\">\u003Cstrong>What is Color Theory?\u003C/strong>\u003C/h2>\u003Cp>Color theory studies how colors interact, how they can be combined, and how they affect emotions and perceptions.\u003C/p>\u003Cp>At its core, color theory encompasses concepts such as the color wheel, color harmony, and the psychological effects of color. The color wheel organizes hues to demonstrate their relationships, including primary, secondary, and tertiary colors. This foundational tool allows animators to select colors that complement each other or create contrast, guiding the viewer's emotional response.\u003C/p>\u003Cp>Let's say we pick a color palette for a heroic knight. Our primary colors could be blue and gold: blue for trust, loyalty, and calmness, and gold for wealth, success, and optimism. This combination suggests strength and reliability, positioning the knight as a noble character.\u003C/p>\u003Chr>\u003Ch2 id=\"why-characters-need-a-color-palette\">\u003Cstrong>Why Characters Need A Color Palette\u003C/strong>\u003C/h2>\u003Cp>Color plays a significant role in conveying emotions and themes: it sets the tone for a character's personality and helps the audience quickly identify their emotional state. It's\u003Ca href=\"https://blog.cg-wire.com/character-design-animation/\"> \u003Cu>a fundamental aspect of character design\u003C/u>\u003C/a>.\u003C/p>\u003Cp>A character designed with cool, muted tones might suggest melancholy or introspection.\u003C/p>\u003Cp>A character experiencing growth could start with darker colors and gradually transition to lighter, more vibrant hues to represent their development.\u003C/p>\u003Cp>A distinctive color palette makes characters more recognizable and memorable to the audience. Iconic palettes, like Naruto's orange, yellow, and blue, create strong associations.\u003C/p>\u003Cp>In animation, color palettes also serve as a tool for world-building: they reflect the tone and style of the world in which the characters exist. For example, in \u003Cem>Avatar: The Last Airbender\u003C/em>, the colors associated with each Nation (Earth, Water, Fire, Air) not only represent the elements but also reflect their cultural identities.\u003C/p>\u003Chr>\u003Ch2 id=\"1-copy-what-works\">\u003Cstrong>1. Copy What Works\u003C/strong>\u003C/h2>\u003Cp>One of the easiest ways to develop character color palettes is to study successful characters from existing media—be it animations, video games, or films—to understand how their color choices enhance their personality, role, and story. Many established characters already apply principles of color theory like harmony, contrast, and complementary colors. Learning from these examples helps you apply these concepts effectively in your own palette.\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Define your character\u003C/strong> - Write a detailed list of bullet points that describe your character: role/archetype (hero, mentor, sidekick, antagonist), occupation (mage, warrior, scientist, merchant), age (child, young adult, middle-aged, elder), skills (powers, combat abilities, intelligence, charm) and core attributes (heroic or villainous, joyful or melancholic).\u003C/li>\u003Cli>\u003Cstrong>Research reference material\u003C/strong> - Use platforms like Pinterest or art databases to find character designs that align with your list. Search specifically for color palettes used in similar roles or archetypes (old wise man, queen, warrior, joker, etc.): James Bond is typically clothed in neutral and dark colors to signal sophistication and authority, while characters like Mario use bright primary colors to convey energy and friendliness.\u003C/li>\u003Cli>\u003Cstrong>Analyze and adapt\u003C/strong> - Consider what color schemes are most prevalent among characters that fill similar roles or share traits with your character. Choose hues that resonate with your character's traits while modifying them to create something original. For example, if you notice wise mentors have earthy tones, but your character has an adventurous edge, you could pair deep greens with vibrant golds to create a more dynamic palette.\u003C/li>\u003Cli>\u003Cstrong>Create a cohesive palette\u003C/strong> - Outline a color palette that reflects your character's description while ensuring visual harmony with primary, secondary, and highlight colors for details. A guardian character could have, for example, a palette of deep blue (trust), silver (wisdom), and bright gold (valor).\u003C/li>\u003C/ol>\u003Chr>\u003Ch2 id=\"2-use-color-symbolism\">\u003Cstrong>2. Use Color Symbolism\u003C/strong>\u003C/h2>\u003Cp>Color symbolism is using colors to represent certain ideas, emotions, or values within the context of storytelling and character design.\u003C/p>\u003Cp>Each color can trigger specific feelings or imply particular traits and roles. Red is often associated with passion, danger, or power. Blue usually signifies calmness, trustworthiness, or sadness.\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Character roles and relationships\u003C/strong> - Consider your characters' status and relevance within the story. They often symbolize specific archetypes, so their color palettes should reflect their roles. A princess might wear soft pastels or rich jewel tones to indicate elegance and status. This differentiation in the palette is not arbitrary―it supports the narrative by cueing the audience about character dynamics. If you wish to create a more nuanced character, like a general with a surprisingly gentle side, you could play with lighter shades or softer tones in their palette to highlight these hidden traits.\u003C/li>\u003Cli>\u003Cstrong>Socioeconomic indicators\u003C/strong> - Color also codifies socioeconomic status. In many stories, characters from privileged backgrounds wear brighter, high-contrast colors to signify wealth, while those from less affluent backgrounds wear more monochromatic hues to reflect their struggles. This visual disparity immediately communicates their respective social standings.\u003C/li>\u003Cli>\u003Cstrong>Cultural and religious associations\u003C/strong> - Colors often carry specific, sometimes different, meanings across different cultures and religions, and these associations provide depth to your character design. In Western cultures, white symbolizes purity, often linked to weddings, while in some Eastern cultures, it may represent mourning. Incorporating these cultural nuances enriches a character's background and ties in beautifully with your world-building. In a fantasy world, you can create your own fictitious meanings. For example, suppose your animation features a culture that reveres nature. In that case, green tones might symbolize nobility, while earth tones could indicate a character who is more grounded or of lesser status.\u003C/li>\u003Cli>\u003Cstrong>Break the rules\u003C/strong> - Don't be afraid to experiment with color combinations. Start with a basic palette and then adjust based on the evolving narrative. You could have a villain disguised as a pure white knight (e.g., Griffith from the Berserk anime). Queen Elza from Frozen has a cold, icy color palette despite being portrayed as deeply caring and self-sacrificing.\u003C/li>\u003C/ol>\u003Chr>\u003Ch2 id=\"3-leverage-color-palette-generators\">\u003Cstrong>3. Leverage Color Palette Generators\u003C/strong>\u003C/h2>\u003Cp>A color palette generator helps users create harmonious color combinations. With appropriate layers in your digital content creation software, generators allow you to explore various color schemes quickly.\u003C/p>\u003Cp>You can also use generators to keep track of your chosen colors and keep your designs consistent:\u003C/p>\u003Cp>Popular tools like Adobe Color, Coolors, and Paletton work similarly:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Start with a base color\u003C/strong> - Use your character's primary trait to determine a base color. Input this color into the palette generator to create a foundational palette.\u003C/li>\u003Cli>\u003Cstrong>Explore different color schemes\u003C/strong> - Use the various options provided by the generator to explore different color schemes: experiment with complementary colors that sit opposite each other on the color wheel to create visual tension or analogous colors that sit beside each other for a more harmonious look.\u003C/li>\u003Cli>\u003Cstrong>Test the palette in context\u003C/strong> - Once you've generated a few palettes, apply them to test sketches of your character. This contextual application reveals how the colors interact with one another in practice.\u003C/li>\u003Cli>\u003Cstrong>Document your palette\u003C/strong>: Document your chosen palette with HEX, RGB, HSL, or CMYK values for easy reference during the animation process.\u003C/li>\u003C/ol>\u003Chr>\u003Ch2 id=\"4-find-inspiration-from-around-you\">\u003Cstrong>4. Find Inspiration From Around You\u003C/strong>\u003C/h2>\u003Cp>Finding inspiration from the environment around you means observing nature, architecture, fashion, and everyday life to extract colors that resonate with your characters.\u003C/p>\u003Cp>It adds a sense of realism and relatability to your designs, grounding them in a world that audiences connect with: the real world is rich with color variations to create unique and nuanced palettes rather than relying solely on common tropes.\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Photo to color palette\u003C/strong> - Try finding or taking photos of striking color combinations that catch your eye―anything from a vibrant street mural to the subtle shades of a sunset. Make a library of your favorite photographs and organize them based on themes or emotions to serve as a quick reference when you need inspiration.\u003C/li>\u003Cli>\u003Cstrong>Study nature and surroundings\u003C/strong> - Spend time outdoors and observe how colors interact in various environments. Notice how greens in a forest differ in shade and hue depending on the time of day or weather conditions. Recording these observations helps you understand how to mix colors effectively. You can also create mood boards by selecting images from magazines, online sources, or your own photography.\u003C/li>\u003Cli>\u003Cstrong>Pay attention to fashion and interior design\u003C/strong> - Examine current fashion trends or interior design palettes. Designers often draw color choices based on cultural influences, seasonal changes, or emotional themes. Analyze why certain combinations work and how they convey feelings.\u003C/li>\u003C/ol>\u003Chr>\u003Ch2 id=\"5-dont-forget-lighting\">\u003Cstrong>5. Don't Forget Lighting\u003C/strong>\u003C/h2>\u003Cp>In animation,\u003Ca href=\"https://blog.cg-wire.com/how-light-shapes-emotion-in-animation/\"> \u003Cu>lighting is a fundamental element\u003C/u>\u003C/a> that dictates visibility, mood, atmosphere, and visual storytelling. It includes dealing with natural light, artificial sources, and their interplay with color and shadows.\u003C/p>\u003Cp>Colors can appear different based on lighting conditions. A character's vibrant hue may look muted in a darkened room under bright daylight. The type of lighting can evoke various emotional responses, and selecting your character's color palette with this emotional context in mind helps reinforce the intended mood of a scene. Strategic lighting enhances or diminishes aspects of a character's design, allowing you to guide the viewer's eye toward crucial features or away from unnecessary details.\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Experiment with color temperature\u003C/strong> - Start by determining the color temperature that reflects your character's mood. Create a few sketches using warm, cool, and neutral light palettes. Observe how these choices influence the perception of your character's colors. For example, use soft, golden lighting for a romantic or nostalgic character and bright, icy blue for a more aloof or mysterious persona.\u003C/li>\u003Cli>\u003Cstrong>Use color grading tools\u003C/strong> - Digital animation software often includes color grading tools that allow you to adjust lighting effects virtually to see how colors react to different lighting scenarios.\u003C/li>\u003Cli>\u003Cstrong>Test your palette in diverse lighting scenarios\u003C/strong> - Create a set of character designs using your preliminary color palette, and then render these designs with different lighting setups to identify how your color choices hold up in varied environments. You might find vibrant hues in one light setting muted or clash in another, prompting necessary revisions.\u003C/li>\u003Cli>\u003Cstrong>Incorporate shadows and highlights\u003C/strong> - Light and shadow create depth in your character designs, so choose colors that can be lightened or darkened effectively.\u003C/li>\u003C/ol>\u003Chr>\u003Ch2 id=\"6-account-for-the-environment\">\u003Cstrong>6. Account For The Environment\u003C/strong>\u003C/h2>\u003Cp>Consider the environment in which your characters will exist: the colors you choose for your characters must work with the backgrounds and other elements of your animation.\u003C/p>\u003Cp>The environment encompasses everything from background scenery to other characters. Each element has its own color palette, which can complement or clash with your character designs.\u003C/p>\u003Cp>If you select colors in isolation, you risk creating a disjointed visual experience that detracts from your storytelling. By designing your character colors with their environment in mind, you can better establish the visual hierarchy within your scenes: main characters should stand out. In contrast, secondary characters or background elements can be subtler, creating a dynamic yet balanced composition.\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Design in context\u003C/strong> - Always incorporate your character designs within their intended environments and avoid designing characters in isolation. Draft color schemes while clearly viewing the backdrop and other elements that will appear alongside your characters.\u003C/li>\u003Cli>\u003Cstrong>Establish character roles\u003C/strong> - Consider the role of each character. For example, the main player character should have the most eye-catching colors, using bold shades and contrasts that naturally draw attention. Secondary characters would have slightly muted tones, while background characters wear even more subdued colors.\u003C/li>\u003Cli>\u003Cstrong>Use visual references\u003C/strong> - Create a montage of your character variations in your preferred graphics editor. Take a background illustration and overlay the different character designs on top to provide a visual reference.\u003C/li>\u003Cli>\u003Cstrong>Test contrast and readability\u003C/strong> - Make sure your character designs are easily distinguishable from the background. An effective way to do this is to test your designs against monochrome background variations. If a character blends into the scenery, adjust the palette until it stands out sufficiently while maintaining the desired aesthetic harmony.\u003C/li>\u003C/ol>\u003Cp>It's not uncommon to change character clothes and accessories to match a new environment or a different group of characters in the same scene:\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>In animation, the thoughtful selection and application of color transcend artistic preference; it’s a key narrative pillar. \u003C/p>\u003Cp>By understanding and leveraging color theory, tapping into established palettes, using color symbolism, and drawing inspiration from the world around us, animators can build characters that resonate deeply with audiences. Integrating tools like palette generators alongside considerations for lighting and environment ensures that a character’s colors not only captivate the viewer but also enhance the impact of their story.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you!\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1456,"comment_id":1457,"feature_image":1458,"featured":105,"visibility":10,"created_at":1459,"updated_at":1460,"custom_excerpt":1461,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1462,"primary_tag":1463,"url":1464,"excerpt":1461,"reading_time":240,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1465},"f5a1699f-9158-4679-9ff1-5585c37ff285","6731a758284220000111e573","https://images.unsplash.com/photo-1495996278086-d589e29619ea?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDQwfHxjb2xvciUyMGNoYXJhY3RlcnxlbnwwfHx8fDE3MzEzNzQ2MzF8MA&ixlib=rb-4.0.3&q=80&w=2000","2024-11-11T07:42:32.000+01:00","2026-03-26T10:28:17.000+01:00","Color is a powerful tool in character design, setting the tone for personality and making characters instantly recognizable. This article dives into color theory and essential principles animators use to create impactful color palettes, from symbolic choices to practical tips for harmonious design.",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/character-color-palettes/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@mettyunuabona?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Ehimetalor Akhere Unuabona\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/character-color-palettes","2024-11-14T09:36:36.000+01:00",{"title":1451},"character-color-palettes","posts/character-color-palettes",[1472],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"phlaxlcSkenGCSGY-ptS3XSmP5nN1_RakHT10sfN8WE",{"id":1475,"title":1476,"authors":1477,"body":7,"description":7,"extension":8,"html":1479,"meta":1480,"navigation":14,"path":1492,"published_at":1493,"seo":1494,"slug":1495,"stem":1496,"tags":1497,"__hash__":1499,"uuid":1481,"comment_id":1482,"feature_image":1483,"featured":105,"visibility":10,"created_at":1484,"updated_at":1485,"custom_excerpt":1486,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1487,"primary_tag":1488,"url":1489,"excerpt":1486,"reading_time":214,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":1490,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1491},"ghost/posts:what-is-2d-animation.json","(2026) What is 2D Animation?",[1478],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🖌️\u003C/div>\u003Cdiv class=\"kg-callout-text\">2D animation is the illusion of movement in a two-dimensional space.\u003C/div>\u003C/div>\u003Cp>2D animation involves the manipulation of flat images or drawings along a horizontal and vertical plane to create an illusion of depth, whereas 3D animation has an additional axis for depth and volume where characters can be rotated, light can be applied from any angle, and environments can be rendered with realistic textures.\u003C/p>\u003Cp>While both forms of animation share the common goal of storytelling through movement, the skill sets and the creative approaches differ significantly. Even in 2D animation alone, it's important for new animators to try different styles to understand which ones resonate with them:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Computer Animation\u003C/strong>: Animators use digital tools to create characters, props, and environments. For example, using software like Adobe Animate allows for the creation of frame-by-frame animations and the manipulation of 2D assets in a timeline.\u003C/li>\u003Cli>\u003Cstrong>Stop Motion\u003C/strong> - In this technique, the animator photographs physical objects and characters in incremental movements to create a seamless motion when played in sequence. Classic examples are the beloved characters Wallace and Gromit or films like Coraline. The tactile nature of stop motion provides a unique aesthetic.\u003C/li>\u003Cli>\u003Cstrong>Motion Graphics\u003C/strong> - Motion graphics blend graphic design and animation in commercial settings for advertisements, title sequences, or explainer videos. They include animated text, logos, and icons to emphasize brand identity.\u003C/li>\u003Cli>\u003Cstrong>Whiteboard Animation\u003C/strong> - This style mimics the effect of a marker drawing on a whiteboard, usually paired with a voiceover for explanatory purposes. It's often used for educational content because it keeps the viewer engaged through visual storytelling.\u003C/li>\u003Cli>\u003Cstrong>Rotoscope\u003C/strong> - Animators trace live-action footage frame by frame to create realistic animations. It has historical relevance, with early examples found in films by Max Fleischer and more contemporary applications in productions like \"A Scanner Darkly.\"\u003C/li>\u003C/ul>\u003Cp>Though all these 2D animation styles are different, they go through a similar creative process. Each phase requires specialized expertise, and thus a new animator will strive to understand the differences in picking a vocation.\u003C/p>\u003Chr>\u003Ch2 id=\"1-concept-development\">\u003Cstrong>1. Concept Development\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/concept-in-animation-definition-process-and-challenges/\">\u003Cu>Concept development\u003C/u>\u003C/a> is about brainstorming and refining ideas to establish a clear vision for an animation production―the themes, character designs, and overall visual style. It requires a deep understanding of storytelling and visual communication.\u003C/p>\u003Cp>Understanding the target audience during concept development allows animators to craft narratives that resonate, and a solid concept helps streamline the production process by reducing reworks later in the pipeline.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Brainstorming Sessions\u003C/strong> - The director gathers the team for collaborative sessions to encourage free thinking and idea generation using techniques like mind mapping or word association to explore potential themes and characters.\u003C/li>\u003Cli>\u003Cstrong>Research\u003C/strong> - A deep dive into relevant topics to identify trends, gather inspiration, and understand audience preferences. Producers might, for example, analyze successful animations for insights into storytelling and character dynamics, while illustrators might work on mood boards for concept designs.\u003C/li>\u003Cli>\u003Cstrong>Scriptwriting\u003C/strong> - Writers prepare a preliminary script to convey the story's dialogue, action, and important milestones. This script is a reference point for animators and voice actors alike, so it usually takes a long time to get right.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"2-character-design\">\u003Cstrong>2. Character Design\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/character-design-animation/\">\u003Cu>Character design\u003C/u>\u003C/a> is the process of creating the visual appearance, personality, and unique traits of a character for animation―translating concepts and narrative intentions into a distinctive visual form, including everything from the character's shape, color palette, and clothing to the details of their facial expressions and body language.\u003C/p>\u003Cp>Characters drive the narrative, so effective design helps communicate the story's themes, motivations, and emotional arcs. A character with exaggerated features may represent innocence or comedy, while a more refined design could convey sophistication or seriousness.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Research and Concept Development\u003C/strong> - Character designers begin by understanding the context and purpose of their character: researching the character's background, role in the story, and the project's themes. They also consider the character's age, environment, and relationships to contextualize the design.\u003C/li>\u003Cli>\u003Cstrong>Sketching\u003C/strong> - They then start with rough sketches to explore different possibilities, experimenting with shapes, sizes, and different poses through multiple iterations to refine ideas. The key is to explore and be creative.\u003C/li>\u003Cli>\u003Cstrong>Character Profiles\u003C/strong> - A character profile outlines personality traits, likes and dislikes, fears, and aspirations. This document guides design choices and ensures consistency in portrayal.\u003C/li>\u003Cli>\u003Cstrong>Color and Style Exploration\u003C/strong> - A color palette reflects the character's personality and the overall tone of the animation. Warm colors may evoke friendliness, for example.\u003C/li>\u003Cli>\u003Cstrong>Refinement and Finalization\u003C/strong> - Once a design is selected, we refine the character by paying attention to small details that can enhance the design, like textures in clothing or unique features like scars or accessories. The designer creates a final turn-around sheet to guide animators on how the character looks from different angles.\u003C/li>\u003C/ul>\u003Cp>Pencils, sketchbooks, and markers are timeless tools for concept artists because they allow freehand experimentation. But it's now common to use software tools like Adobe Photoshop, Corel Painter, or Krita to manipulate colors, shapes, and layers easily.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2024/11/image.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"500\" height=\"447\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Nilah Ate The Blog\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"3-storyboarding\">\u003Cstrong>3. Storyboarding\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/storyboard-animation/\">\u003Cu>Storyboarding creates a series of images\u003C/u>\u003C/a> displayed in sequence to previsualize a scene. Each frame in a storyboard represents a specific moment in the narrative and serves as a blueprint for the animation to define essential elements like composition, movement, and timing.\u003C/p>\u003Cp>A storyboard allows animators to see how their story unfolds visually before investing time in the animation process. It provides a clear picture of the narrative flow and helps identify potential issues early.\u003C/p>\u003Cp>It's a vital communication tool among team members to make sure everyone has a unified understanding of the project.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Script Analysis\u003C/strong> - Storyboard artists analyze the script to identify key scenes, actions, and dialogues. They consider the emotional tone and target audience to ensure the storyboard aligns with the intended message.\u003C/li>\u003Cli>\u003Cstrong>Thumbnail Sketches\u003C/strong> - Before diving into detailed drawings, they create small thumbnail sketches that outline each major scene to focus on composition, camera angles, and the arrangement of characters and backgrounds. Sketches are kept simple to make quick changes.\u003C/li>\u003Cli>\u003Cstrong>Scene Breakdown\u003C/strong> - Artists determine the sequence of events within each scene: transitions, character movements, and key actions that drive the narrative.\u003C/li>\u003Cli>\u003Cstrong>Adding Dialogue and Annotations\u003C/strong> - The team often includes dialogue text, sound cues, and any essential notes regarding the action in each frame for the animators to understand character intentions and emotional delivery.\u003C/li>\u003Cli>\u003Cstrong>Revisions and Feedback\u003C/strong> - Storyboarding is an iterative process, like most animation phases, so it's important to share the storyboard with team members to gather feedback and make revisions to enhance the clarity of the storyboard.\u003C/li>\u003C/ul>\u003Cp>Many animators still prefer using pen and paper for storyboarding.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2024/11/image-1.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"700\" height=\"687\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2024/11/image-1.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2024/11/image-1.png 700w\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: James Novy\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"4-animatic\">\u003Cstrong>4. Animatic\u003C/strong>\u003C/h2>\u003Cp>An animatic is a preliminary version of an animation that combines artwork and audio to create a moving storyboard. It's a draft that showcases how the animation unfolds visually with rough animations.\u003C/p>\u003Cp>An animatic helps identify issues with composition and staging before more resources are committed to full animation. It provides a clear visual layout that can highlight potential problems in framing scenes. By previewing actions and transitions between scenes, creators can also tweak dialogue and visual gags for maximum impact.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Storyboarding integration\u003C/strong> - The artists start with traditional storyboards that outline each scene in the animation and import them into video editing software for animation, like Toon Boom Harmony.\u003C/li>\u003Cli>\u003Cstrong>Audio integration\u003C/strong> - Animators record basic voiceovers, dialogues, and sound effects to add life to the storyboard and help set the pacing for visual sequences.\u003C/li>\u003Cli>\u003Cstrong>Timing setup\u003C/strong> - The animation team establishes the timing for each shot by assigning duration to each storyboard panel, syncing the visuals with the audio.\u003C/li>\u003Cli>\u003Cstrong>Movement and transitions\u003C/strong> - Motion is key in animatics—even if it's just simple pan and zoom effects on still images. Visualizing how scenes will transition and create a dynamic viewing experience helps animators during the production phase.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2024/11/image-2.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"672\" height=\"189\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2024/11/image-2.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2024/11/image-2.png 672w\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Dream Farm Studios\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"5-animation-production\">\u003Cstrong>5. Animation Production\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/how-to-prepare-your-production-part-1-the-contracts/\">\u003Cu>Animation production\u003C/u>\u003C/a> is the core process of turning concepts into animated content.\u003C/p>\u003Cp>This is where the bulk of the work occurs. Animators use traditional hand-drawn or 2D digital animation methods to create the actual frames that will animate the characters and scenes.\u003C/p>\u003Cp>Toon Boom Harmony is one of the most widely used software programs for 2D animation.\u003C/p>\u003Cp>It allows you to import your storyboard images or create animatics. It supports various formats, enabling you to work seamlessly with hand-drawn or digital artwork from graphic tablets.\u003C/p>\u003Cp>Animators use different layers to separate elements like characters, backgrounds, and UI. This organization makes it easier to manipulate, colorize, and animate each component later in production.\u003C/p>\u003Cp>You can set durations for each scene, adjust frame rates, and create smooth transitions that align with the audio tracks. You can change the audio timing directly on the timeline, like in any video editing software.\u003C/p>\u003Cp>Alternatives include OpenToonz, Adobe Animate, and Moho, among others.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2024/11/image-3.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1121\" height=\"630\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2024/11/image-3.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2024/11/image-3.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2024/11/image-3.png 1121w\" sizes=\"(min-width: 720px) 720px\">\u003Cfigcaption>\u003Cspan style=\"white-space: pre-wrap;\">Source: Toon Boom Harmony\u003C/span>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"6-vfx\">\u003Cstrong>6. VFX\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/vfx-artist-definition-process-challenges/\">\u003Cu>Visual Effects (VFX) encompass a wide range of techniques\u003C/u>\u003C/a> to create or enhance imagery that's less convenient to draw: motion blur, lighting, texture, fire, etc.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Layering and compositing\u003C/strong> - Animators manipulate multiple layers of animation to create depth: animated characters with backgrounds but also VFX elements like smoke, fire, or magical sparks to increase detail levels.\u003C/li>\u003Cli>\u003Cstrong>Creating particle effects\u003C/strong> - A common task is designing and animating particle effects like rain, snow, or explosions using physics engines.\u003C/li>\u003Cli>\u003Cstrong>Animating special effects\u003C/strong> - Effects like glow, distortion, and transitions can also enhance the quality of movement animations.\u003C/li>\u003Cli>\u003Cstrong>Rendering\u003C/strong> - Like 3D animation, VFX animations require a rendering step to adjust output settings for quality and delivery format. For resource-intensive rendering like crowd animation, animators can rely on rendering farms. It's a highly technical step that sometimes requires specialized expertise.\u003C/li>\u003C/ul>\u003Cp>One of the most widely used tools for VFX in 2D animation is Adobe After Effects. You can use it to adjust opacity, blend modes, and layer styles to create compositions and take advantage of built-in effects like particles, lights, and camera motion. Using presets streamlines the animator's workflow.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2024/11/image-5.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1004\" height=\"530\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2024/11/image-5.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2024/11/image-5.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2024/11/image-5.png 1004w\" sizes=\"(min-width: 720px) 720px\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: 3D Art\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"7-sound-design\">\u003Cstrong>7. Sound Design\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/sound-design-in-animation-definition-process-challenges/\">\u003Cu>Sound design\u003C/u>\u003C/a> is about creating, recording, editing, and generating audio elements. It encompasses everything from dialogue and sound effects to ambient sounds and musical scores.\u003C/p>\u003Cp>The right music or sound effects can evoke laughter, fear, sadness, or excitement to impact how viewers connect with the animation directly.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Foley\u003C/strong> - Foley artists find or create sound effects (SFX) and ambient sounds. They complement musicians creating musical scores to set the tone of a scene and make the animated universe come alive.\u003C/li>\u003Cli>\u003Cstrong>Recording\u003C/strong> - When predetermined sounds aren't available, animators often record their audio, capturing voiceovers or unique sound effects. Voice artists play a key role there.\u003C/li>\u003Cli>\u003Cstrong>Sound editing\u003C/strong> - When sounds are sourced or recorded, they must be edited for clarity, volume, and alignment with the animation―cutting, layering, and adjusting sound elements to ensure they sync perfectly with the visuals.\u003C/li>\u003Cli>\u003Cstrong>Mixing\u003C/strong> - Mixing balances different audio tracks — dialogue, sound effects, and music to create a cohesive and engaging auditory experience. Proper mixing ensures that no single element overpowers another.\u003C/li>\u003Cli>\u003Cstrong>Sound design implementation\u003C/strong> - The edited and mixed sounds are integrated into the animation software by adjusting levels, adding spatial effects, and fine-tuning the audio.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"8-supervision-management\">\u003Cstrong>8. Supervision &amp; Management\u003C/strong>\u003C/h2>\u003Cp>2D animation is a team effort. Supervision and management refer to the organizational and leadership processes that guide an animation project from inception to completion―overseeing various teams, ensuring that creative and production goals align, and maintaining a cohesive workflow.\u003C/p>\u003Cp>Supervisors are responsible for various activities, from creative direction to administrative oversight, to ensure production meets deadlines, budgets, and quality standards. Because animation projects involve multiple specialists, they also help with cross-functional collaboration. A systematic approach to resource distribution provides each team member with what they need to succeed without unnecessary waste. Animation production often operates on tight schedules, so a supervisor keeps track of progress and facilitates adjustments to keep the project on track.\u003C/p>\u003Cp>It's a full-time job:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Project planning\u003C/strong> - Establishing a project timeline and setting milestones to divide tasks among various teams and determine how those tasks will fit together.\u003C/li>\u003Cli>\u003Cstrong>Team management\u003C/strong> - Engaging with team members to motivate and inspire creative output with regular check-ins and feedback sessions to maintain team morale and create alignment.\u003C/li>\u003Cli>\u003Cstrong>Creative oversight\u003C/strong> - Supervisors often review and approve storyboards, character designs, and animations.\u003C/li>\u003Cli>\u003Cstrong>Problem-solving\u003C/strong> - As projects unfold, challenges arise. Supervisors must be adept at identifying issues early and implementing solutions, whether they are related to workload, creative discrepancies, or technical difficulties.\u003C/li>\u003Cli>\u003Cstrong>Budget management\u003C/strong> - Keeping track of financial expenditures can make or break a production, so the project must remain within budget constraints while achieving high-quality outputs.\u003C/li>\u003C/ul>\u003Cp>\u003Ca href=\"https://www.cg-wire.com/kitsu?ref=blog.cg-wire.com\">\u003Cu>Pipeline trackers like Kitsu\u003C/u>\u003C/a>, which also help with asset management and reviews, keep the animation team agile while respecting project constraints.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>There are countless opportunities in 2D animation, with various techniques, roles, and a vast array of unique projects to explore. \u003C/p>\u003Cp>That’s why gaining experience across different areas can be incredibly valuable before choosing a specific path. With the rise of mainstream anime and the growing use of motion graphics in marketing, 2D animation is as relevant as ever—so dive in and explore!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about creating 2D animations, \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you!\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1481,"comment_id":1482,"feature_image":1483,"featured":105,"visibility":10,"created_at":1484,"updated_at":1485,"custom_excerpt":1486,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1487,"primary_tag":1488,"url":1489,"excerpt":1486,"reading_time":214,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":1490,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1491},"356ccf80-0dea-48b6-9665-65cbb217c439","6724955c348d5600018648ae","https://images.unsplash.com/photo-1620928572438-075c466c48da?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDEwfHxjYXJ0b29ufGVufDB8fHx8MTczMDQ1NDA1MXww&ixlib=rb-4.0.3&q=80&w=2000","2024-11-01T09:46:20.000+01:00","2026-03-26T10:45:32.000+01:00","Explore the art of 2D animation, where movement and storytelling come to life through techniques like computer animation, stop motion, and motion graphics. From concept to final output, this post breaks down the key stages that make each project unique and engaging.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/what-is-2d-animation/","Explore the art of 2D animation, where movement and storytelling come to life.","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@venczakjanos?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">János Venczák\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/what-is-2d-animation","2024-11-04T00:20:09.000+01:00",{"title":1476},"what-is-2d-animation","posts/what-is-2d-animation",[1498],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"jvEvNLiy2sgNOen_gmx0ZaOv8zIdwCMPUTFy1rtCxYA",{"id":1501,"title":1502,"authors":1503,"body":7,"description":7,"extension":8,"html":1505,"meta":1506,"navigation":14,"path":1519,"published_at":1520,"seo":1521,"slug":1522,"stem":1523,"tags":1524,"__hash__":1526,"uuid":1507,"comment_id":1508,"feature_image":1509,"featured":105,"visibility":10,"created_at":1510,"updated_at":1511,"custom_excerpt":1512,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1513,"primary_tag":1514,"url":1515,"excerpt":1512,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":1516,"meta_title":7,"meta_description":1517,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1518},"ghost/posts:3d-animation-process.json","A Deep Dive into the 3D Animation Process (2026)",[1504],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🤔\u003C/div>\u003Cdiv class=\"kg-callout-text\">Have you ever wondered how characters like Elsa from Disney's Frozen or Joy from Pixar's Inside Out 2 seem so life-like on screen?\u003C/div>\u003C/div>\u003Cp>\u003Cstrong>3D animation \u003C/strong>is the process of \u003Cstrong>creating moving images in a digital, three-dimensional environment\u003C/strong>, and it's quite complex! It's common for animation studios to have dozens of employees, each 3D artist specializing in one part of the animation pipeline or another.&nbsp;\u003C/p>\u003Cp>Unlike traditional 2D animation, 3D animation uses computer-generated models designed with height, width, and depth to simulate realistic motion and physics. Artists then manipulate these models using keyframes or motion capture to bring them to life.\u003C/p>\u003Cp>But that's not all there is to it! Here is a breakdown of how 3D animations come to life.\u003C/p>\u003Chr>\u003Ch2 id=\"1-concept-storyboarding\">\u003Cstrong>1. Concept &amp; Storyboarding\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>Concept development\u003C/strong> is the very first phase of the process, where concept artists, writers, and directors conceive the core idea, story, and characters.&nbsp;\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/concept-in-animation-definition-process-and-challenges/\" rel=\"noreferrer\">\u003Cu>They brainstorm and sketch rough ideas\u003C/u>\u003C/a>, exploring visual styles, themes, and character designs to ensure the concept aligns with the story’s vision.\u003C/p>\u003Cp>The team visualizes the story through a script and then \u003Cstrong>storyboards a series of sequential drawings\u003C/strong>, much like a comic strip, that map out the flow of the story scene by scene.&nbsp;\u003C/p>\u003Cp>Storyboarding helps the production team visualize the film’s structure, camera angles, character positions, and timing of key moments. It’s the blueprint for the production to plan shots, scenes, and transitions.\u003C/p>\u003Cp>Production―the execution phase where animators create 3D assets―starts from there.\u003C/p>\u003Chr>\u003Ch2 id=\"2-modeling-texturing\">\u003Cstrong>2. Modeling &amp; Texturing\u003C/strong>\u003C/h2>\u003Cp>In 3D modeling, \u003Cstrong>artists define the characters, objects, and environments in a virtual 3D space\u003C/strong>.&nbsp;\u003C/p>\u003Cp>3D modelers and texture artists work closely with concept artists to ensure they faithfully translate the designs created in the pre-production phase.\u003C/p>\u003Cp>They use \u003Ca href=\"https://blog.cg-wire.com/3d-modeling-animation/\">\u003Cu>specialized software like Maya, Blender, or 3ds Max to build 3D models\u003C/u>\u003C/a> from polygons―the building blocks of 3D models used to form the shapes of objects in the animation.&nbsp;\u003C/p>\u003Cp>Models can range from simple, low-poly designs to complex, highly-detailed creations, depending on the animation’s needs.\u003C/p>\u003Cp>The next step is to apply textures to give them their final look.\u003C/p>\u003Cp>Texturing is about \u003Cstrong>applying 2D images (textures) onto the 3D model\u003C/strong> and adjusting how light interacts with the surface to create realism or stylization.&nbsp;\u003C/p>\u003Cp>Texture artists take plain 3D models and add color, patterns, and surface details like skin, fabric, metal, or wood, depending on the object's representation.&nbsp;\u003C/p>\u003Chr>\u003Ch2 id=\"3-rigging\">\u003Cstrong>3. Rigging\u003C/strong>\u003C/h2>\u003Cp>Rigging is \u003Cstrong>creating a digital skeleton or framework within the 3D model\u003C/strong>.&nbsp;\u003C/p>\u003Cp>Just as a human skeleton allows the body to move, a rig gives the 3D model the necessary structure to animate.&nbsp;\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/rigging-in-animation/\">\u003Cu>Rigging artists build this underlying system\u003C/u>\u003C/a> using joints, bones, and controls, allowing animators to manipulate and pose the model in various ways.&nbsp;\u003C/p>\u003Cp>This skeleton determines how different parts of the model move―bend, stretch, and perform actions realistically or as stylized as required.\u003C/p>\u003Cp>The 3D model is then attached to the rig. This step, known as skinning,\u003Cstrong> ensures that the surface of the model (the \"skin\") follows the rig's movements correctly\u003C/strong>.&nbsp;\u003C/p>\u003Cp>The model will deform naturally when the rig moves, meaning bending limbs, facial expressions, or other animations will look smooth and believable—poor skinning results in unnatural or distorted movements.\u003C/p>\u003Chr>\u003Ch2 id=\"4-animation\">\u003Cstrong>4. Animation\u003C/strong>\u003C/h2>\u003Cp>3D animators bring the previously rigged models to life by \u003Cstrong>manipulating them to create motion, expressions, and gestures according to scripts and storyboards\u003C/strong>.&nbsp;\u003C/p>\u003Cp>They can use keyframe animation, where major poses are set at specific frames, and the software automatically fills in the motion between them, or more rarely with frame-by-frame animation, where each frame is manually adjusted.&nbsp;\u003C/p>\u003Cp>Animators focus on bringing out the characters' personalities and making their movements feel natural, dynamic, and in sync with the scene.\u003C/p>\u003Cp>Motion capture (or \"mocap\") is used in productions aiming for realistic human movement: real actors perform actions while wearing suits with sensors that capture their movements. These movements are then transferred to the 3D model.\u003C/p>\u003Ch2 id=\"5-lighting\">\u003Cstrong>5. Lighting\u003C/strong>\u003C/h2>\u003Cp>Lighting artists specialize in creating the ideal lighting setup for each scene.\u003C/p>\u003Cp>\u003Cstrong>They use virtual lights within the 3D environment to mimic real-world lighting effects like sunlight or indoor lighting\u003C/strong>―not only to make the characters and objects visible but also to enhance the scene's mood, depth, and texture. Whether it's a bright, cheerful day or a dark, ominous night, the lighting choices significantly influence how the audience experiences the animation.\u003C/p>\u003Cp>Lighting artists typically position light sources, adjust brightness, and fine-tune shadows and reflections to influence a scene.&nbsp;\u003C/p>\u003Chr>\u003Ch2 id=\"6-camera-work\">\u003Cstrong>6. Camera Work\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/camera-work-in-animation/\">\u003Cu>Virtual cameras frame and capture the action\u003C/u>\u003C/a>, much like in live-action filmmaking. Camera or layout artists typically perform this role to make the visual storytelling more engaging and cinematic, but also to guide the audience’s focus.\u003C/p>\u003Cp>\u003Cstrong>Camera artists place and adjust virtual cameras within the 3D environment to capture the scenes\u003C/strong> while ensuring the framing, composition, and movement effectively tell the story.&nbsp;\u003C/p>\u003Cp>They pay attention to camera angles, determine the distance from characters or objects, and decide whether the camera will be static or move dynamically within the scene.&nbsp;\u003C/p>\u003Chr>\u003Ch2 id=\"7-rendering\">\u003Cstrong>7. Rendering\u003C/strong>\u003C/h2>\u003Cp>In the rendering phase, rendering artists or technical directors generate the final images or frames of the 3D animation by combining all previous elements—lighting, textures, camera angles, and models.&nbsp;\u003C/p>\u003Cp>\u003Cstrong>Rendering converts the 3D scene into 2D images\u003C/strong> that the audience will ultimately see.&nbsp;\u003C/p>\u003Cp>Depending on the scene's complexity—number of characters, level of detail, lighting effects, etc.—\u003Ca href=\"https://blog.cg-wire.com/partnership-with-ranch-computing/\">\u003Cu>rendering can be highly time-consuming\u003C/u>\u003C/a>, often requiring powerful computers and render farms (clusters of powerful computers used to process animation) to process the frames efficiently.&nbsp;\u003C/p>\u003Cp>Each frame must be rendered individually, with animations typically running 24 to 30 frames per second.\u003C/p>\u003Chr>\u003Ch2 id=\"8-post-production\">\u003Cstrong>8. Post-Production\u003C/strong>\u003C/h2>\u003Cp>The rendered animation is polished in post-production by editing, adding visual effects (VFX), compositing, sound design, music, and final color grading.&nbsp;\u003C/p>\u003Cp>The animation must be visually cohesive and aligned with the original vision.\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/compositing-in-animation-definition-process-challenges/\">\u003Cu>Compositing artists\u003C/u>\u003C/a>\u003Cstrong> are responsible for merging the rendered frames with other assets\u003C/strong> like background effects, visual effects (VFX), and, if necessary, live-action footage. Compositors also adjust layers, color correct, and add effects like depth of field or motion blur.\u003C/p>\u003Cp>Once all the frames and effects are ready, \u003Cstrong>the editing team assembles the shots into a coherent sequence\u003C/strong>. Editors focus on timing, pacing, and smooth transitions between scenes to enhance the flow of the story.\u003C/p>\u003Cp>In parallel with the visual work, \u003Cstrong>sound designers and audio engineers add sound effects, music, and dialogue to the animation\u003C/strong>. \u003Ca href=\"https://blog.cg-wire.com/sound-design-in-animation-definition-process-challenges/\">\u003Cu>Sound design brings another dimension\u003C/u>\u003C/a> to the final product, enhancing emotional beats, adding depth to the environment, and immersing the audience fully into the world, whether it's a subtle background noise, a powerful musical score, or the sync of character voices.\u003C/p>\u003Chr>\u003Ch2 id=\"9-final-output\">\u003Cstrong>9. Final Output\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>The completed animation must be in a file format suitable for its intended distribution platform\u003C/strong>.\u003C/p>\u003Cp>This task involves adjusting the resolution, frame rate, bit rate, etc., to optimize visual quality.&nbsp;\u003C/p>\u003Cp>The resulting exported file undergoes a review to catch any mismatches in colors, audio sync, or other critical aspects before it is deemed ready for delivery.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>From initial concept to final output, each stage of the 3D animation pipeline contributes to crafting immersive, lifelike experiences. Whether the detailed modeling or the final touches in post-production, the process is highly technical and creative.\u003C/p>\u003Cp>This overview offers insight into the key steps involved in 3D animation productions. Still, there is usually a lot more behind the scenes: from pipeline managers taking care of all aspects of the production pipeline to supervisors going back and forth between multiple studios, 3D animation productions are complex.\u003C/p>\u003Cp>The 3D animation process is also not linear either: it's highly iterative, with multiple rounds of feedback and re-modeling needed to achieve the director's desired results.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about creating 3D animations, \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you!\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":1507,"comment_id":1508,"feature_image":1509,"featured":105,"visibility":10,"created_at":1510,"updated_at":1511,"custom_excerpt":1512,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1513,"primary_tag":1514,"url":1515,"excerpt":1512,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":1516,"meta_title":7,"meta_description":1517,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1518},"3b27853a-1b1a-4f7f-a4f3-33fccccb68fe","66fcc7a620627d00018c5131","https://images.unsplash.com/photo-1547194936-28214bd75193?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDE1fHxjb21wdXRlciUyMGFuaW1hdGlvbnxlbnwwfHx8fDE3Mjc4NDMwODF8MA&ixlib=rb-4.0.3&q=80&w=2000","2024-10-02T06:10:14.000+02:00","2026-03-26T10:04:11.000+01:00","Have you ever wondered how characters like Elsa from Disney's Frozen or Joy from Pixar's Inside Out 2 seem so life-like on screen? Here is a breakdown of how 3D animations come to life.",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/3d-animation-process/","Have you ever wondered how characters like Elsa from Disney's Frozen seem so life-like on screen?","Have you ever wondered how characters like Elsa from Disney's Frozen or Joy from Pixar's Inside Out 2 seem so life-like on screen?","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@ion66574?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Ion (Ivan) Sipilov\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/3d-animation-process","2024-10-02T17:05:37.000+02:00",{"title":1502},"3d-animation-process","posts/3d-animation-process",[1525],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"9e1zLqQd3m3qOCKJGg2teaI89TowYYrM63IM755JaUg",{"id":1528,"title":1529,"authors":1530,"body":7,"description":7,"extension":8,"html":1532,"meta":1533,"navigation":14,"path":1543,"published_at":1544,"seo":1545,"slug":1546,"stem":1547,"tags":1548,"__hash__":1550,"uuid":1534,"comment_id":1535,"feature_image":1536,"featured":105,"visibility":10,"created_at":1537,"updated_at":1538,"custom_excerpt":1539,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1540,"primary_tag":1541,"url":1542,"excerpt":1539,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1007},"ghost/posts:working-with-multiple-digital-content-creation-tools.json","(2026) Working With Multiple Digital Content Creation Tools",[1531],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>From character design to rendering, animators use many software tools in their everyday work. But with so many options, choosing the right one can feel like navigating a labyrinth.\u003C/p>\u003Cp>Truth is, there's no single perfect tool: each program has its own strengths and weaknesses. Wouldn't it be better to be able to leverage any tools depending on their advantages or animators' personal preferences?\u003C/p>\u003Cp>With the right production pipeline, this is a possibility. In this article, we explore four key points to take into account when designing your pipeline for multiple DCC tools: creating a single source of truth, using a review engine, rendering, and versioning.\u003C/p>\u003Ch2 id=\"why-use-multiple-tools\">\u003Cstrong>Why use multiple tools\u003C/strong>\u003C/h2>\u003Cp>Studios rely on a vast arsenal of digital tools, but it's common for project teams to agree on using a single digital content creation tool―Maya, Blender, Unity, etc. However, allowing the use of multiple DCC tools brings several benefits.\u003C/p>\u003Cp>First, it fosters agility. Each animator can use whatever tool they prefer and leave the integration phase to the production pipeline using standard file formats. They can be productive from day one and keep the same pace throughout the production.\u003C/p>\u003Cp>Not only is it good for productivity, but having a range of accepted tools expands the potential talent pool you can hire from. Studios can attract artists with specific skill sets honed in different software, allowing them to assemble a dream team perfectly equipped to tackle the project's unique demands.\u003C/p>\u003Cp>This approach isn't without challenges, of course. Compatibility issues can arise, forcing artists to jump through hoops to import or export files between programs. Maintaining a consistent visual style across various tools can also be tricky, requiring meticulous planning and communication.\u003C/p>\u003Cp>Despite this, when you have the right production pipeline to help you streamline workflows, the pros can outweigh the cons. Let's see how.\u003C/p>\u003Ch2 id=\"1-creating-a-single-source-of-truth\">\u003Cstrong>1. Creating a single source of truth\u003C/strong>\u003C/h2>\u003Cp>Let's say each animator uses their favorite 3D graphics tool. How do we combine each asset together to create scenes? How do we preserve rigging information? What about editing? We need to maintain consistency and avoid information silos.\u003C/p>\u003Cp>To make things simpler, the pipeline should include a single source of truth (SSOT): a central repository that stores all your project's critical information, acting as the definitive reference point for all teams to eliminate the pitfalls of scattered data―redundant files, outdated versions, and wasted time spent chasing down the latest iteration. While your animation software is dedicated to creating and editing assets, a dedicated SSOT acts as a central hub for storing asset versions.\u003C/p>\u003Cp>To set this SSOT,  you can use asset managers like \u003Ca href=\"https://ayon.ynput.io/?ref=blog.cg-wire.com\" rel=\"noreferrer\">Ayon\u003C/a> or \u003Ca href=\"https://prism-pipeline.com/?ref=blog.cg-wire.com\" rel=\"noreferrer\">Prism Pipeline\u003C/a>. They will allow you to manage file locations and versioning via any software. If you want to go further and build a file asset library for future reuse, we recommend you to look at \u003Ca href=\"https://das-element.com/?ref=blog.cg-wire.com\">dasElement\u003C/a>, a full system to browse and organize all your files. \u003C/p>\u003Cp>Last but not least, with a production tracker like \u003Ca href=\"https://cg-wire.com/kitsu?ref=blog.cg-wire.com\" rel=\"noreferrer\">Kitsu\u003C/a>, you can import asset information from any content creation tool to build the project's creative asset library and keep a history of all artistic decisions and deliveries.\u003C/p>\u003Ch2 id=\"2-review-engine\">\u003Cstrong>2. Review engine\u003C/strong>\u003C/h2>\u003Cp>In animation, the creative process is rarely linear: ideas evolve, revisions are made, and constant back-and-forth between creation and review is essential for crafting a polished final product. This iterative nature demands a robust system for seamlessly transitioning between editing software and feedback sessions.\u003C/p>\u003Cp>Traditionally, this might involve exporting files, sending them to reviewers, and then waiting for feedback before returning to the editing software. This disrupts workflow and creates bottlenecks, especially when everyone uses different DCC tools―different formats, conventions, etc.\u003C/p>\u003Cp>That's where a production tracker is required. It acts as a central hub for delivery previews and validations. It allows reviewers to provide detailed feedback directly on top of the animation itself. This feedback can then be easily integrated back into the editing software via asset managers.\u003C/p>\u003Cp>Production trackers are software agnostic, so they can be integrated with any software. Which allows for the review of any deliveries, whatever the source software is.\u003C/p>\u003Ch2 id=\"3-versioning\">\u003Cstrong>3. Versioning\u003C/strong>\u003C/h2>\u003Cp>As your assets get refined, keeping track of different versions becomes crucial to track changes or revert to previous versions if necessary.\u003C/p>\u003Cp>The challenge will be to export results in formats supported by your target DCC.  Maintaining a consistent file format is also a good option for cross-integration between tools. Popular formats like FBX or USD, facilitate smooth data exchange between most animation software. By using a standard format, you avoid compatibility issues and ensure assets can be readily imported and manipulated in different applications. \u003C/p>\u003Cp>With asset managers, you can manage different versions of your working files, whether they are Blender models, Maya animations, or Unity scenes. Platforms keep track of each iteration, allowing you to see the history of changes and easily revert to a previous version if needed.\u003C/p>\u003Cp>Once your working files are exported to the right place, you will be able to build your scenes with the right version of any elements involved in a shot context. You will also be able to change elements easily every time a new version is published.\u003C/p>\u003Ch2 id=\"4-rendering\">\u003Cstrong>4. Rendering\u003C/strong>\u003C/h2>\u003Cp>Coordinating rendering tasks across multiple DCC tools can also be a headache. Each software has its own rendering engine, settings, and output formats, making it is challenging to maintain a consistent look across scenes.\u003C/p>\u003Cp>This is why most studios use a scene assembler like Mercenaries Guerilla for instance or SideFX Solaris. If you set proper versioning of your files, because most render farms can support different setups and rendering software you should be able to render any scene initially built with various software. \u003C/p>\u003Cp>From your pipeline, you can easily set an efficient iteration loop: export deliveries, assemble scenes, render them, push the result to the production tracker, and send feedback from the review engine. Then, your team will create new versions and run the process again.\u003C/p>\u003Ch2 id=\"5-software-setup\">\u003Cstrong>5. Software setup\u003C/strong>\u003C/h2>\u003Cp>Another challenge you will face is managing the different software installed on artist machines. It is very important to keep consistent software versions used by the artists, or you will end up with a pipeline nightmare. \u003C/p>\u003Cp>The most common solution is to rely on:\u003C/p>\u003Cul>\u003Cli>Ghost images of your installations to set new machines in a glimpse\u003C/li>\u003Cli>Package managers like \u003Ca href=\"https://rez.readthedocs.io/en/stable/?ref=blog.cg-wire.com\" rel=\"noreferrer\">Rez\u003C/a> to organize all your software and libraries\u003C/li>\u003Cli>A firm policy about the available versions\u003C/li>\u003C/ul>\u003Cp>It will contribute to the artist's experience. They will enjoy you making things clear from the beginning.\u003C/p>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Using multiple digital content creation tools can contribute significantly to your success. You unlock a wider creative toolbox and enhance your team's ability to bring your vision to life.\u003C/p>\u003Cp>However, it comes with a cost: it requires setting up a stronger pipeline. It will require setting up an asset manager and a production tracker as the glue that holds everything together. They will be needed to link every building step together and track and review all the work done.\u003C/p>\u003Cp>In the end, using multiple software to get the most out of them will require particular discipline. You will need to document and prepare your pipeline: to explain the rationale behind the choice and select the right file formats to ensure proper compatibility between the different software you use. Once you build your stack, we recommend modifying your software list as little as possible to avoid extra work and unexpected situations during production.\u003C/p>\u003Cp>If you do your homework, using many different software will be seamless, and your team's creativity will be unleashed. Setting clean and innovative pipelines is always a challenge, but it will make teams happy, which is worth the effort!\u003C/p>\u003Cp>\u003Cem>Come say hi in\u003C/em>\u003Ca href=\"https://discord.com/invite/VbCxtKN?ref=blog.cg-wire.com\">\u003Cem> \u003Cu>our Kitsu Community Discord of 1500+ \u003C/u>\u003C/em>\u003C/a>\u003Cem>\u003Cu>animation/VFX professionals\u003C/u> and share your tips!\u003C/em>\u003C/p>\u003Cp>\u003C/p>",{"uuid":1534,"comment_id":1535,"feature_image":1536,"featured":105,"visibility":10,"created_at":1537,"updated_at":1538,"custom_excerpt":1539,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1540,"primary_tag":1541,"url":1542,"excerpt":1539,"reading_time":115,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1007},"dbbfb141-bdcf-49df-9f19-506063db70d7","66a7cb4713c4750001ef394d","https://images.unsplash.com/photo-1639815188508-13f7370f664a?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDI4fHxjb25uZWN0fGVufDB8fHx8MTcyMjI3MjU5NHww&ixlib=rb-4.0.3&q=80&w=2000","2024-07-29T19:03:03.000+02:00","2026-02-20T06:05:08.000+01:00","From character design to rendering, animators use many software tools in their everyday work. But with so many options, choosing the right one can feel like navigating a labyrinth.",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/working-with-multiple-digital-content-creation-tools/","/posts/working-with-multiple-digital-content-creation-tools","2024-08-17T20:40:14.000+02:00",{"title":1529},"working-with-multiple-digital-content-creation-tools","posts/working-with-multiple-digital-content-creation-tools",[1549],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"k-3PLpScWbxPCLSqRFUYI3cjsXNOTwontkWJJSEr3xA",{"id":1552,"title":1553,"authors":1554,"body":7,"description":7,"extension":8,"html":1556,"meta":1557,"navigation":14,"path":1567,"published_at":1568,"seo":1569,"slug":1570,"stem":1571,"tags":1572,"__hash__":1576,"uuid":1558,"comment_id":1559,"feature_image":1560,"featured":105,"visibility":10,"created_at":1561,"updated_at":952,"custom_excerpt":1562,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1563,"primary_tag":1564,"url":1565,"excerpt":1562,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1566},"ghost/posts:physics-and-mechanics-principles-in-animation.json","Must-know Physics And Mechanics Principles Used In Animation (2026)",[1555],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cp>Animation is a reflection of the real world: it has to be just realistic enough to be believable, but also engaging and entertaining. Spider-Man’s movements are inspired by real-life parkour and breakdancing to make his web-slinging feel grounded and dynamic.&nbsp;\u003C/p>\u003Cp>To achieve this, animators use basic physics and mechanics principles they can bend for a more stylized look. In this article, we go through a few examples of physics and mechanics principles to better put words on animation techniques to create more engaging stories. \u003C/p>\u003Ch2 id=\"1-body-mechanics\">\u003Cstrong>1. Body Mechanics\u003C/strong>\u003C/h2>\u003Cp>Bringing your characters to life requires understanding how real bodies move.\u003C/p>\u003Cp>Body mechanics are the foundations of animating realistic actions like walking, running, and climbing stairs. For example, idle animations show characters standing and breathing subtly. You can choose any pose, neutral or action-oriented, and design it to loop seamlessly for a continuous effect. Other cycles and loops where the first and last keyframes are identical create the illusion of continuous movement.\u003C/p>\u003Cp>Other body parts like hair, clothes, or even inanimate objects might require dynamic simulations using physics principles. For example, to simulate flowing hair or billowing fabric, enhance the believability of secondary movements (a cape fluttering in the wind), or breathe life into props (fall, bounce, collide, etc.).\u003C/p>\u003Cp>For speech animation, a phoneme represents a specific mouth shape associated with a particular vowel or consonant sound. By mastering phonemes, you can create realistic lip movements that perfectly match your character's dialogue.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-us.googleusercontent.com/docsz/AD_4nXfmh7szja65dXpTNw1I3NqJjqWlqkWOEre2kR_PUATu_zw5G9V0w3h3Kt9nHKAgBPz16APcum-OeBwcr13Bz7Q-0fBwjViTZazr2JFNiNAJgu1mHXzJx14Ptjeqf9dVYa9_7u-L6_hPktMHZ2remH2LsFKm?key=wGN05F489POzdh7HFSn1cg\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1440\" height=\"638\">\u003C/figure>\u003Cp>\u003Cem>Source: Blender Artists\u003C/em>\u003C/p>\u003Ch2 id=\"2-timing-and-spacing\">\u003Cstrong>2. Timing and Spacing\u003C/strong>\u003C/h2>\u003Cp>Animation is not merely the art of drawing cool poses: timing and spacing are equally important to create believable movement, whether it's a powerful jump or a slow walk.\u003C/p>\u003Cp>Think of timing as the number of frames dedicated to each action. It controls the speed of your animation. Fewer frames create a faster motion, while more frames slow things down. Imagine a bouncing ball: the higher the bounce, the more frames you'll need to show its peak and descent smoothly.\u003C/p>\u003Cp>Frame rate (FPS) refers to the number of images displayed per second. It's the foundation on which timing is built.\u003C/p>\u003Cp>Spacing refers to the distance an object moves between frames. It works hand-in-hand with timing to create the illusion of weight and speed. Wider spacing between frames suggests faster movement, while smaller spacing creates a slower, more deliberate action.\u003C/p>\u003Cp>You can also combine timing and spacing with motion blur to simulate the blurring of objects during rapid movement. Imagine a fast-moving car – our eyes perceive a blur, not a series of crystal clear frames.\u003C/p>\u003Ch2 id=\"3-weight-and-mass\">\u003Cstrong>3. Weight and Mass\u003C/strong>\u003C/h2>\u003Cp>By understanding the difference between weight and mass, and how 3D software translates these concepts into animation tools, you can make informed decisions about how your objects move. While weight and mass are often used interchangeably, they hold distinct meanings.\u003C/p>\u003Cp>Mass is the amount of matter an object contains. It's a constant value regardless of gravity. Imagine a massive boulder on Earth versus the moon – it has the same mass, but its weight differs due to varying gravitational forces.\u003C/p>\u003Cp>Weight is the force exerted on an object due to gravity. A heavier object experiences a greater gravitational pull, making it require more force to move and stop.\u003C/p>\u003Cp>3D animation software doesn't directly store weight or mass data for objects. However, it offers tools to mimic their effects on movement:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Animation Curves\u003C/strong> - By adjusting animation curves, you control how an object accelerates, reaches peak velocity, and decelerates. Slower curves for heavier objects create a sluggish, delayed response, while sharper curves for lighter objects depict quick changes in direction.\u003C/li>\u003Cli>\u003Cstrong>Force Fields\u003C/strong> - Some software allows applying forces like gravity or wind to objects. These forces influence the object's movement, mimicking how weight and external forces affect real-world objects.\u003C/li>\u003Cli>\u003Cstrong>Physics Simulations\u003C/strong> - Advanced software offers physics simulations that take into account factors like mass, gravity, and friction. These simulations can create highly realistic movements, but require careful setup and can be computationally expensive.\u003C/li>\u003C/ul>\u003Cp>Pay close attention to how objects of different weights behave. While grounding your animation in a sense of physical reality is important, you'll often benefit from slight exaggerations.\u003C/p>\u003Ch2 id=\"4-inertia-momentum-and-force\">\u003Cstrong>4. Inertia, Momentum, and Force\u003C/strong>\u003C/h2>\u003Cp>Inertia, momentum, and force form the foundation of how objects move in the real world.\u003C/p>\u003Cp>Imagine a bowling ball rolling down a lane. It keeps moving even after it leaves the bowler's hand. This is inertia in action. Inertia is an object's tendency to resist changes in its state of motion. An object at rest stays at rest, and an object in motion stays in motion (at a constant speed and in a straight line) unless acted upon by an unbalanced force. Inertia helps us depict weight and gradual stops. A character running won't come to a halt instantly. They'll skid a bit, their body continuing to move forward even as their feet come to a stop.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-us.googleusercontent.com/docsz/AD_4nXfUzHbz1NDC8yZNwvtoFPfEzr5rGEcOP9vXKTUtLyR873-Nrgpr_iqAEyTQDGiPt6xiRDadHhVZ_V2-qSPVa_7KwxoAWWkHgDvFOE9SXNAcKIJm9jC43ZjobIvZrI2wa-aykFiXSAG6xizii_4ZMjpH_qan?key=wGN05F489POzdh7HFSn1cg\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"768\" height=\"774\">\u003C/figure>\u003Cp>\u003Cem>Source: Dreamstime.com\u003C/em>\u003C/p>\u003Cp>Momentum is the mass of an object times its velocity. Simply put, it's a measure of how much \"oomph\" a moving object carries. A heavier object or one moving faster will have greater momentum. A character throwing a punch transfers momentum from their body to their fist, resulting in a powerful impact. Likewise, a car with high momentum will take longer to stop than a bicycle.\u003C/p>\u003Cp>Force is anything that causes an object to accelerate. It's the push or pull that disrupts an object's inertia and changes its state of motion. For example, friction between a shoe and the ground is a force that slows down a running character. A strong force applied to a character will result in a faster or more dramatic change in movement, while a weak force might lead to a subtle sway or wobble.\u003C/p>\u003Cp>For every action, there's an equal and opposite reaction. This is Newton's Third Law of Motion. When a character throws a punch, their arm exerts a force forward (action). In reaction, the character's body experiences an opposite force pushing them back slightly. A character swinging a sword should experience a slight recoil as the blade connects with an object.\u003C/p>\u003Ch2 id=\"5-center-of-gravity\">\u003Cstrong>5. Center of Gravity\u003C/strong>\u003C/h2>\u003Cp>The force that keeps our characters grounded (and makes them fall when they lose balance) is gravity. The Center of Gravity (CoG) is that special point where all the object's weight is perfectly balanced. As animators, we constantly consider the CoG to ensure our characters move realistically and maintain proper balance.\u003C/p>\u003Cp>The position of the CoG significantly impacts how your animated character interacts with gravity.\u003C/p>\u003Cp>When the CoG falls directly within the base of support (think of the area where your character's feet touch the ground), they'll stand firmly balanced. We call this stable equilibrium. This is the typical pose for a character at rest.\u003C/p>\u003Cp>If the CoG shifts outside the base of support, your character becomes unbalanced and starts to tip. This can be used for dynamic actions like leaning into a turn or falling over. By understanding how the CoG moves with different poses, you can create natural-looking weight shifts and falls.\u003C/p>\u003Ch2 id=\"6-squash-and-stretch\">\u003Cstrong>6. Squash and Stretch\u003C/strong>\u003C/h2>\u003Cp>Squash and stretch involves exaggerating the way objects deform during movement, creating a sense of weight, flexibility, and humor.\u003C/p>\u003Cp>When a bouncy ball hits the ground, it squashes downwards momentarily before stretching back up. This squash and stretch is what we amplify in animation. We might squash a character down as they jump, then stretch them out in mid-air, all while maintaining their overall volume. This exaggeration is what breathes life into the movement.\u003C/p>\u003Ch2 id=\"7-green-screen-chroma-key\">\u003Cstrong>7. Green Screen / Chroma Key\u003C/strong>\u003C/h2>\u003Cp>Let's say you animate a character swimming through a coral reef. Instead of building a miniature reef set, a green screen allows you to film your character in front of a green backdrop. During editing, this green background is replaced with the vibrant underwater scene you envisioned.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-us.googleusercontent.com/docsz/AD_4nXdp_3vZIuY5XbYM97M1JQDVswcx3G7pivCPDnLpfqkd1hC1bDVWyiUcYDQbI8q-xAreAvQBVU6DRpG9CJo__fIxWNZQqy_1gOeG99wJG10POKAHgRwGWZ7H4gZphhQFi94CS7TbQS4XYZDRGGRsnn33Wq_v?key=wGN05F489POzdh7HFSn1cg\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"664\" height=\"840\">\u003C/figure>\u003Cp>\u003Cem>Image by Wikipedia\u003C/em>\u003C/p>\u003Cp>But why green? The magic lies in color differentiation. Green typically doesn't clash with the color range used for characters or objects. This allows digital creation software to easily isolate the filmed element (your character) from the green background: you can just load the animation sequence, adjust the chroma key to identify the green background color, and replace the green background with something else.\u003C/p>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Understanding physics and mechanics principles is essential for creating believable and engaging animation. They provide the foundation for realistic movement, weight distribution, and interaction with the environment. By mastering these concepts, animators can breathe life into their characters and tell compelling stories.\u003C/p>\u003Cp>But animation isn't just about following cold, hard physics principles: it's an art form that thrives on creativity and expression. Animators often employ artistic license by subtly (or not so subtly) exaggerating movements or squashing and stretching objects to enhance character traits, evoke emotions, or inject humor. This artistic twist, grounded in a solid understanding of physics, is what separates a robotic imitation from a truly captivating animated performance.\u003C/p>\u003Cp>\u003Cem>Make sure to come say hi in\u003C/em>\u003Ca href=\"https://discord.com/invite/VbCxtKN?ref=blog.cg-wire.com\">\u003Cem> \u003Cu>our Discord community of 1000+ \u003C/u>\u003C/em>\u003C/a>\u003Cem>\u003Cu>animation professionals\u003C/u> and share your tips!\u003C/em>\u003C/p>",{"uuid":1558,"comment_id":1559,"feature_image":1560,"featured":105,"visibility":10,"created_at":1561,"updated_at":952,"custom_excerpt":1562,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1563,"primary_tag":1564,"url":1565,"excerpt":1562,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1566},"32582b9d-7bdb-40b6-9dcb-46b0b8ff8ee3","6671fb82ab09a1000107da2e","https://images.unsplash.com/photo-1718536669027-4ebd2b932ece?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8YWxsfDR8fHx8fHwyfHwxNzE4NzQ2MDE1fA&ixlib=rb-4.0.3&q=80&w=2000","2024-06-18T23:26:26.000+02:00","Animators use basic physics and mechanics principles they can bend for a more stylized look. In this article, we go through a few examples of physics and mechanics principles to better put words on animation techniques to create more engaging stories.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/physics-and-mechanics-principles-in-animation/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@parisbilal?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Paris Bilal\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/physics-and-mechanics-principles-in-animation","2024-06-19T10:39:57.000+02:00",{"title":1553},"physics-and-mechanics-principles-in-animation","posts/physics-and-mechanics-principles-in-animation",[1573,1574],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},{"id":1575,"name":60,"slug":63,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":59},"64875aac7093d20001dbafe6","JE0ojmjUgAN5SqJP-QTeYU07Ps0vkU1taeR_dQxnOZw",{"id":1578,"title":1579,"authors":1580,"body":7,"description":7,"extension":8,"html":1582,"meta":1583,"navigation":14,"path":1595,"published_at":1596,"seo":1597,"slug":1598,"stem":1599,"tags":1600,"__hash__":1602,"uuid":1584,"comment_id":1585,"feature_image":1586,"featured":105,"visibility":10,"created_at":1587,"updated_at":1588,"custom_excerpt":1589,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1590,"primary_tag":1591,"url":1592,"excerpt":1589,"reading_time":1593,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1594},"ghost/posts:engaging-animation.json","Techniques To Create More Engaging Animations In 2026",[1581],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Ch2 id=\"introduction\">\u003Cstrong>Introduction\u003C/strong>\u003C/h2>\u003Cp>You pour your heart and soul into crafting an animation, only to find the final result feels... well, a bit bland. It moves, it looks alright, but the spark is missing. The sequence needs some more work, maybe some suggestions from colleagues or a supervisor.\u003C/p>\u003Cp>Inversely, look at your favorite animated show, and try to notice what makes it so attractive you don’t feel time passing. Is it the plot, the voice acting, the music, the depicted emotions?\u003C/p>\u003Cp>In this article, we explore 16 animation techniques to make your work more engaging. It usually doesn’t take much to get it right, but the devil is in the details, and a single edit can make your animation memorable.\u003C/p>\u003Cp>This list isn’t exhaustive, so don’t hesitate to share more with us in \u003Ca href=\"https://discord.com/invite/VbCxtKN?ref=blog.cg-wire.com\">\u003Cu>our Discord community of 1000+ animators\u003C/u>\u003C/a>. Without further ado, let’s dig in!\u003C/p>\u003Ch2 id=\"1-overlapping-action\">\u003Cstrong>1. Overlapping Action\u003C/strong>\u003C/h2>\u003Cp>Take a character jumping. In real life, the legs push off first, then the torso lifts, and finally, the head follows. Overlapping action replicates this by animating different body parts at slightly offset timings. Some parts initiate the movement (leading the action), while others react with a delay (following the action).\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-us.googleusercontent.com/docsz/AD_4nXe3SW25VtrtomdJ7k3OzdThKU0CtQ5CEgEyMc_b7RqPxQVeUjH_pdZX0aaClYrczjgwYUtsT4YJP4a94qYdot1zQfENLQOE-vV83VJ4poF-r_IO6gGrDkOYwZ5-qQWJk4g3KcSAeq7MAItCXXlZvP5beK1t?key=uzxSnDJSAdmuIOsXchZl3A\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"800\" height=\"450\">\u003C/figure>\u003Cp>\u003Cem>Source: animost.com\u003C/em>\u003C/p>\u003Cp>Overlapping action mimics the way our bodies move. By animating body parts with independent timing, you create a sense of weight, momentum, and fluidity that static, perfectly synchronized movements lack. It can be used to emphasize a character's emotions and intentions. A character with a lagging arm after throwing a punch conveys exhaustion, while exaggerated hair flicks during a run showcase excitement.\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Identify the Leading and Following Parts\u003C/strong> - Break down the action into its core components. Which body part initiates the movement? Which parts react with a delay due to inertia or weight?\u003C/li>\u003Cli>\u003Cstrong>Stagger the Timing\u003C/strong> - Animate the leading part first, followed by the parts that react in sequence. For example, in a jump, animate the legs pushing off the ground before the torso rises.\u003C/li>\u003Cli>\u003Cstrong>Consider Weight and Momentum\u003C/strong> - Heavier body parts will naturally lag behind lighter ones. Use this principle to create a sense of physicality.\u003C/li>\u003C/ol>\u003Ch2 id=\"2-snappy-animation\">\u003Cstrong>2. Snappy Animation\u003C/strong>\u003C/h2>\u003Cp>Snappy animation is a technique that injects dynamism into your characters by creating a strong contrast between slow and fast movements. Imagine your character striking a powerful pose, holding it for a beat, and then transitioning quickly and decisively into the next action. This \"snap\" between poses grabs the viewer's attention and emphasizes the impact of the animation.\u003C/p>\u003Cp>The sharp contrast between slow and fast movements keeps the audience hooked. They're drawn in by the anticipation building during the held pose and then surprised and delighted by the snappy transition. Snappy animation allows for a more exaggerated portrayal of emotions. The held poses become moments for the character to express their feelings clearly, while the snappy transitions emphasize the intensity of those emotions. By emphasizing key poses, snappy animation makes the action easier to follow for the viewer. The audience can clearly see what's happening on screen without getting lost in a blur of movement.\u003C/p>\u003Cp>Snappy animation can be found in a variety of styles, from classic cartoons like Looney Tunes to modern anime. Look for moments where characters hold a powerful pose after a punch or strike a dramatic silhouette during a jump. These quick transitions between poses are hallmarks of snappy animation.\u003C/p>\u003Ch2 id=\"3-breaking-joints\">\u003Cstrong>3. Breaking Joints\u003C/strong>\u003C/h2>\u003Cp>Have you ever wondered why cartoon fight scenes can feel so much more dynamic than their live-action counterparts? It's because of a technique called \"breaking joints\"―exaggerating the bend and movement of limbs to create a more visually appealing animation.\u003C/p>\u003Cp>Breaking joints essentially means pushing the natural range of motion for a limb or joint during animation. This can involve extreme bends, stretches, and twists that wouldn't be possible in real life. Instead of a standard arm extension when a character throws a punch, the animator might exaggerate the elbow bend and forearm twist to create a powerful and exaggerated arc.\u003C/p>\u003Ch2 id=\"4-fish-eyes\">\u003Cstrong>4. Fish Eyes\u003C/strong>\u003C/h2>\u003Cp>Another technique to consider is the fish-eye pose, where the eyes are positioned further apart than usual.\u003C/p>\u003Cp>The fish-eye pose is a powerful tool for comedic effect, surprise, or even fear. It disrupts the natural eye placement, instantly grabbing the viewer's attention.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-us.googleusercontent.com/docsz/AD_4nXckZnsOWuv4cOji19npXsxcInkLORzu8Ao4B0aAJwoPL3jPdzTmypzY4tE8plra23u3rTA1X3wdOrSDqwFoS2PXnQ2S98VnLMNC87O2ar4ejGte2PZQiRv_k495UzlFyjIadvK6q2T3l4o7KtCgUt0MRNLm?key=uzxSnDJSAdmuIOsXchZl3A\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"1194\">\u003C/figure>\u003Cp>\u003Cem>Source: Naruto\u003C/em>\u003C/p>\u003Ch2 id=\"5-settle\">\u003Cstrong>5. Settle\u003C/strong>\u003C/h2>\u003Cp>The final moments of an animation can be just as impactful as the grand opening: a well-executed settle, the brief animation frames used to ease an action into a rest pose, add a touch of realism.\u003C/p>\u003Cp>A bouncing ball wouldn't come to a complete stop instantly. There'd be a slight wobble and a final compression before it settles still. This subtle movement is what a settle captures in animation.\u003C/p>\u003Cp>Abrupt stops look unnatural. Settles bridge the gap between movement and stillness, mimicking the inertia of real-world objects. A clear rest pose signifies the end of an action. Without a settle, viewers might be left confused about whether the movement has truly finished. This can be particularly important for fast-paced animations where clarity is key.\u003C/p>\u003Ch2 id=\"6-overshoot\">\u003Cstrong>6. Overshoot\u003C/strong>\u003C/h2>\u003Cp>Take the example of a bouncing ball again: as it hits the ground, it compresses slightly before launching back up. This momentary overshoot adds a sense of weight and responsiveness to the animation. Overshoot is the act of an animation element temporarily exceeding its final resting position before returning smoothly.\u003C/p>\u003Cp>As long as you keep it subtle, overshoot adds a touch of pep and energy to your animations. It conveys a sense of momentum and weight, making even simple movements feel livelier and more engaging.\u003C/p>\u003Ch2 id=\"7-animation-layer\">\u003Cstrong>7. Animation Layer\u003C/strong>\u003C/h2>\u003Cp>Think of animation layers like transparencies on an overhead projector. Your base layer lays the foundation, typically containing the main movement of your animation. Secondary layers are then added on top, introducing subtle details and flourishes.\u003C/p>\u003Cp>Imagine animating a dog fetching a frisbee. The base layer would show the dog running (primary movement). A secondary layer could be used for the dog's tail wagging (subtle, independent action). With layers, you can easily adjust the intensity of the wag or even swap it out for a panting animation – all without affecting the core running motion.\u003C/p>\u003Cp>Considering all the possible dimensions of an apparently simple movement adds depth to your animation.\u003C/p>\u003Ch2 id=\"8-moving-hold\">\u003Cstrong>8. Moving Hold\u003C/strong>\u003C/h2>\u003Cp>Engaging animation doesn't just rely on flashy movement; it also needs moments of stillness that feel alive. This is where the concept of a moving hold comes in.\u003C/p>\u003Cp>A moving hold is a technique where a character appears to hold a pose, but with subtle, almost imperceptible movements. This slight animation keeps the character from looking frozen or lifeless.\u003C/p>\u003Cp>For example, when a character clenches their fist, you could add a slight tremor in the hand to suggest simmering anger. The movement should be barely noticeable like slight breathing motions, weight shifts, or involuntary twitches, yet effective in conveying purpose.\u003C/p>\u003Ch2 id=\"9-rhythm\">\u003Cstrong>9. Rhythm\u003C/strong>\u003C/h2>\u003Cp>Rhythm is the result of timing, spacing, and intensity:\u003C/p>\u003Cul>\u003Cli>Timing is the speed at which objects or characters move within a scene. Fast movements create a sense of urgency or excitement, while slower movements can build tension or emphasize a particular detail.\u003C/li>\u003Cli>Spacing involves the distance between objects or characters throughout their animation. Wide spacing can create a sense of isolation or emphasize the scale of an environment, while closer spacing can build a feeling of urgency or connection.\u003C/li>\u003Cli>Intensity refers to the power or force behind an action or gesture. A forceful punch will have a different rhythm than a gentle wave, and this difference can be used to convey a range of emotions and ideas.\u003C/li>\u003C/ul>\u003Cp>Consider a scene of a character running towards a cliff edge. The animation might start with slow, deliberate movements as the character builds up speed, gradually increasing the timing and intensity to create a sense of urgency. Just before reaching the edge, there could be a dramatic pause, followed by a quick jump or desperate scramble for safety. Each element of this scene, from the initial slow build-up to the final burst of movement, contributes to the rhythm.\u003C/p>\u003Cp>Effective rhythm helps direct the viewer's attention to specific parts of the scene, ensuring they don't miss important details. Carefully timed pauses or slow build-ups can create suspense and anticipation for the next action. The rhythm of an animation can directly influence the emotions it evokes. A fast-paced, jerky movement might suggest fear or panic, while a smooth, flowing movement could portray calmness or grace.\u003C/p>\u003Ch2 id=\"10-smear-frames\">\u003Cstrong>10. Smear Frames\u003C/strong>\u003C/h2>\u003Cp>A smear frame is a duplicated or heavily stretched version of the previous or next frame, strategically inserted for just a single frame. This creates a blurred effect that emphasizes the swiftness of the action.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-us.googleusercontent.com/docsz/AD_4nXcdl7ai-dL-4UrL7YtkGLEwZYszBfCuzPCgg70SQUATDx0lIdnPikFXFQtCDXbkxD7ywU6t6lEX3aVR9f5A2QJItXhvwX4ReyuIo08j0WVh8m-WpxmG20mv2vQcyHvcN46X__65uGIsaNTNST308uEd34Wx?key=uzxSnDJSAdmuIOsXchZl3A\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"720\" height=\"540\">\u003C/figure>\u003Cp>\u003Cem>Source: The Simpsons\u003C/em>\u003C/p>\u003Cp>Real-world movement isn't perfectly crisp. Our eyes perceive a blur during rapid motion. Smear frames mimic this natural blur. This technique truly shines when you want to showcase extreme speed or force. Imagine a superhero streaking across the screen or a powerful punch landing – smear frames can amplify the impact and make the movement feel more impactful.\u003C/p>\u003Ch2 id=\"11-accents\">\u003Cstrong>11. Accents\u003C/strong>\u003C/h2>\u003Cp>Just like actors emphasize key moments in a performance, animators can use accents to make specific parts of their animation truly shine. Accents are deliberate deviations from the standard animation style or pacing:\u003C/p>\u003Cul>\u003Cli>Exaggerated movements or expressions - Think wide-eyed surprise, a dramatic leap of joy, or a character's jaw dropping in disbelief.\u003C/li>\u003Cli>Shifts in color or lighting - A sudden burst of brightness to highlight a key moment, or a character bathed in a different colored light to create a distinct mood.\u003C/li>\u003Cli>Changes in animation style - A brief switch to a more cartoony style for a humorous moment, or a shift to a more detailed, realistic portrayal for dramatic emphasis.\u003C/li>\u003C/ul>\u003Cp>They act like a visual spotlight, ensuring the audience focuses on the accented moment while effectively communicating joy, fear, anger, or any other emotion you want viewers to feel. A well-placed accent can prevent the animation from becoming predictable and keep viewers engaged.\u003C/p>\u003Ch2 id=\"12-attitude\">\u003Cstrong>12. Attitude\u003C/strong>\u003C/h2>\u003Cp>Character attitude is the underlying emotional state and personality that guides a character's actions and reactions. It encompasses their confidence level, temperament, and overall approach to the situation.\u003C/p>\u003Cp>When viewers understand a character's attitude, they can empathize, anticipate, and connect with them on a deeper level.\u003C/p>\u003Cp>Attitude drives a character's choices and reactions, propelling the narrative forward in a natural, engaging way.\u003C/p>\u003Cp>When you animate, you should always have a clear picture in mind of the type of attitude the character has to better perform the scene!\u003C/p>\u003Ch2 id=\"13-silhouette\">\u003Cstrong>13. Silhouette\u003C/strong>\u003C/h2>\u003Cp>While it might seem like a basic black-and-white image, a silhouette is used to check the clarity of a pose: by reducing the character to its essential form, you can easily identify if an action is readable or if the body language is conveying the intended emotion.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-us.googleusercontent.com/docsz/AD_4nXde56gox9eV6DnhuFGDvSodj4dBwPI1uJh6dank8z4-TmTT47uQBWwzqm3iBdmGvrzbv0-_hqsevc1wbaOvEUTImehDKpCyasejpb_zCed79ZORYwuOaM8HreEuWIvmjT9dKgM8wJtQoc2ti0DUze44NpxT?key=uzxSnDJSAdmuIOsXchZl3A\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"800\" height=\"600\">\u003C/figure>\u003Cp>\u003Cem>Source: Animator Island\u003C/em>\u003C/p>\u003Ch2 id=\"14-reference\">\u003Cstrong>14. Reference\u003C/strong>\u003C/h2>\u003Cp>A reference is the use of live-action video footage shot specifically to inspire your animation: animators act out the scene themselves, film it, and then use that footage as a guide for their animation.\u003C/p>\u003Cp>Live-action footage provides a wealth of information about human body mechanics, weight distribution, and how clothing interacts with movement. Studying references helps you create animations that feel natural and believable. They also allow you to capture key poses.\u003C/p>\u003Ch2 id=\"15-balance\">\u003Cstrong>15. Balance\u003C/strong>\u003C/h2>\u003Cp>Balance refers to the visual distribution of weight in your animation. Make sure it's believable.\u003C/p>\u003Cp>If you draw a line through the center of your character, a balanced pose keeps most of the character's mass on one side of that line to create a sense of stability and prevent your characters from looking like they're about to topple over.\u003C/p>\u003Cp>A well-balanced animation feels more natural and believable to the viewer. Even when characters are jumping or performing acrobatics, a sense of weight distribution helps us understand the forces acting on them. If a character is leaning too far in one direction, it can be unclear what they're about to do next.\u003C/p>\u003Ch2 id=\"16-use-verticality\">\u003Cstrong>16. Use Verticality\u003C/strong>\u003C/h2>\u003Cp>Our natural world is full of towering trees, majestic mountains, and sprawling landscapes. But in animation, relying solely on flat horizons can leave your scenes feeling, well, flat. By incorporating vertical elements, you create a sense of dimension and draw the viewer's eye into the frame.\u003C/p>\u003Cp>Studio Ghibli, renowned for its breathtaking animation, is a master of using verticality: take a look at any Hayao Miyazaki film, and you'll see towering trees reaching towards the sky, characters traversing layered landscapes, and buildings that stretch upwards. This wasn't by accident. Miyazaki himself was heavily influenced by the work of French animator Paul Grimault, whose film \"Le Roi et l'Oiseau\" (The King and the Bird) is a prime example of how verticality can be used to create stunning depth in animation:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-us.googleusercontent.com/docsz/AD_4nXc_ttHfpcrXipiI7s8hZzc5c-SiJoVkBs0c87wlGoCvPvMTc1C6tAmLt-5nTiLah9Mt0ATBh5BZFFG6yLG1DF27a2Vip0nEw2uTtrxYDQAyZ4EwuTtGHjKdselST1mdmXoSVBWaNo4fGwFri2ht-BYLhmKM?key=uzxSnDJSAdmuIOsXchZl3A\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1280\" height=\"720\">\u003C/figure>\u003Cp>Don't be afraid to fill the frame with elements like towering trees or buildings in the foreground or background. This creates a sense of layering and draws the viewer's eye deeper into the scene. Instead of a single-level landscape, create environments with different heights. Think rolling hills, bridges connecting elevated areas, or characters climbing structures. This adds complexity and visual interest. Use camera movements that pan upwards or downwards, showcasing the scale of your environment. Play with perspective to emphasize verticality.\u003C/p>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>And that's a wrap. Hope you learned something!\u003C/p>\u003Cp>Capturing the audience's attention is key, and it's often the subtle details that elevate an animation from ordinary to extraordinary.\u003C/p>\u003Cp>Great animation takes inspiration from the real world. Whether it's the way a bird takes flight or the subtle shift in a person's posture that conveys nervousness, studying real-life movement helps imbue your animations with authenticity. This authenticity, in turn, fosters a connection with viewers.\u003C/p>\u003Cp>Don't be afraid to experiment with different techniques, try new software, and push your creative boundaries. The more you practice, the more comfortable you'll become!\u003C/p>\u003Cp>\u003Cem>Make sure to come say hi in\u003C/em>\u003Ca href=\"https://discord.com/invite/VbCxtKN?ref=blog.cg-wire.com\">\u003Cem> \u003Cu>our Discord community of 1000+ animators\u003C/u>\u003C/em>\u003C/a>\u003Cem> and share your tips!\u003C/em>\u003C/p>",{"uuid":1584,"comment_id":1585,"feature_image":1586,"featured":105,"visibility":10,"created_at":1587,"updated_at":1588,"custom_excerpt":1589,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1590,"primary_tag":1591,"url":1592,"excerpt":1589,"reading_time":1593,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1594},"9fd71756-631f-4f51-97f6-7f06352b8f07","66644033adc9120001420e10","https://images.unsplash.com/photo-1558368315-d44d7462073e?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDEwfHxwdXBwZXR8ZW58MHx8fHwxNzE3ODQ2MjM2fDA&ixlib=rb-4.0.3&q=80&w=2000","2024-06-08T13:27:47.000+02:00","2026-02-20T06:04:23.000+01:00","In this article, we explore 16 animation techniques to make your work more engaging. It usually doesn’t take much to get it right, but the devil is in the details and a single edit can make your animation memorable.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/engaging-animation/",10,"\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@eric_masur?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Eric Masur\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/engaging-animation","2024-06-08T13:33:07.000+02:00",{"title":1579},"engaging-animation","posts/engaging-animation",[1601],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"jB4iiHDzwHrD1eqaymPxe7hZ_RqwvvexkzxjhfeZMwQ",{"id":1604,"title":1605,"authors":1606,"body":7,"description":7,"extension":8,"html":1608,"meta":1609,"navigation":14,"path":1620,"published_at":1621,"seo":1622,"slug":1623,"stem":1624,"tags":1625,"__hash__":1627,"uuid":1610,"comment_id":1611,"feature_image":1612,"featured":105,"visibility":10,"created_at":1613,"updated_at":1614,"custom_excerpt":1615,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1616,"primary_tag":1617,"url":1618,"excerpt":1615,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1619},"ghost/posts:camera-work-in-animation.json","Camera Work In Animation (2026): 10 Basic Techniques To Master",[1607],{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},"\u003Cp>\u003C/p>\u003Cp>As our invisible eyes, the camera in animation has an active role. By meticulously controlling its movement, animators achieve a variety of effects to engage the viewers―far beyond simply showing us what's happening on screen, it's a powerful storytelling tool often unseen.\u003C/p>\u003Cp>Camera animation guides our attention, drawing our focus to specific details or characters. It helps establish the vastness of a sprawling cityscape or the intimacy of a whispered conversation. Most importantly, camera animation plays a crucial role in setting the mood and tone of the story. A sweeping shot across a majestic landscape evokes a sense of awe, while a tightly framed close-up builds suspense or reveals a character's inner turmoil.\u003C/p>\u003Cp>In the following sections, we explore the various camera techniques employed in animation and how they contribute to creating engaging stories.\u003C/p>\u003Ch2 id=\"camera-properties\">\u003Cstrong>Camera properties\u003C/strong>\u003C/h2>\u003Cp>Just like a real camera, a virtual one has several key properties that animators manipulate to create specific effects and guide the viewer's eye:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Position\u003C/strong> - the camera's location in the 3D space of the animation. By moving the camera closer or farther away from the subject, animators control the size of objects in the frame.\u003C/li>\u003Cli>\u003Cstrong>Rotation\u003C/strong> - rotating a camera allows animators to pan across a scene, reveal hidden details, or create a sense of dynamism by following a moving character.\u003C/li>\u003Cli>\u003Cstrong>Field of view\u003C/strong> - the width of the scene captured by the camera lens. A wide field of view encompasses a larger area, useful for establishing shots, while a narrow field of view zooms in, focusing attention on specific elements.\u003C/li>\u003Cli>\u003Cstrong>Focal length\u003C/strong> - Focal length essentially controls the perspective and distortion in the shot. By adjusting it, animators make objects appear larger or smaller within the frame, even without changing the camera's position. Different focal lengths also affect how background elements appear – a shorter focal length creates a more dramatic sense of depth, while a longer focal length compresses the background, making it seem flatter. By manipulating focal length, animators create a sense of focus, emphasize specific elements, or even subtly distort reality for a more stylized look.\u003C/li>\u003C/ul>\u003Cp>Understanding and mastering these camera properties is how animators come up with the techniques in the next sections.\u003C/p>\u003Ch2 id=\"1-camera-shake\">\u003Cstrong>1. Camera shake\u003C/strong>\u003C/h2>\u003Cp>A camera shake simulates the jittery movement of a handheld camera, ranging from subtle tremors to full-on wobble.\u003C/p>\u003Cp>Animators achieve camera shake by introducing small, rapid movements to the camera's position and rotation. These movements aren't random, but carefully crafted to create a sense of realism or stylization.\u003C/p>\u003Cp>Imagine a scene where a character is running away from danger. The camera might shake slightly with each footfall, conveying the character's urgency and fear. As the danger gets closer, the shaking intensifies, mimicking the character's rising panic.\u003C/p>\u003Cp>A shaky camera during an action sequence throws the audience right into the heart of the battle, making them feel the chaos and intensity of the fight. In horror or suspenseful moments, subtle camera shakes build unease and anticipation, making viewers feel on edge. They can also be used for comedic effect, emphasizing a character's clumsiness or a lighthearted situation.\u003C/p>\u003Ch2 id=\"2-zoom\">\u003Cstrong>2. Zoom\u003C/strong>\u003C/h2>\u003Cp>One of the most fundamental camera techniques is the zoom: by adjusting the focal length, a zoom alters the visual depth of field, creating a dynamic relationship between the subject and its surroundings.\u003C/p>\u003Cp>Zooming in allows for a tighter focus on a character's expression, amplifying emotions like surprise, fear, or determination. It also draws the audience's attention to a specific detail within the scene. Inversely, zooming out broadens the perspective, to establish the setting or create a sense of awe by revealing a grander scale.\u003C/p>\u003Cp>A classic example of the zoom's emotional impact can be found in the close-up zoom on Simba's face in \"The Lion King\" as he realizes the truth about his father's death. The zoom emphasizes Simba's grief and disbelief, drawing the audience into his emotional turmoil.\u003C/p>\u003Ch2 id=\"3-pan\">\u003Cstrong>3. Pan\u003C/strong>\u003C/h2>\u003Cp>The pan is a horizontal swivel, revealing more of the environment from left to right or vice versa. The camera itself stays locked in one spot, but its head smoothly turns.\u003C/p>\u003Cp>Pans are incredibly versatile tools for animators. They can be used to establish a scene, showcasing its vastness or cluttered details. A slow pan across a breathtaking landscape evokes awe, while a frantic pan across a chaotic marketplace builds tension. Pans can also be used to follow a character's movement or track an object of interest, keeping the audience engaged in the action.\u003C/p>\u003Cp>A classic example of a pan might be at the beginning of a scene. The camera pans across a sleeping character's room, slowly revealing the alarm clock that jolts them awake. This simple technique not only sets the location but also establishes the emotional tone of the scene.\u003C/p>\u003Ch2 id=\"4-tilt-shot\">\u003Cstrong>4. Tilt shot\u003C/strong>\u003C/h2>\u003Cp>A tilt shot is a camera movement where the viewpoint pivots up or down vertically, revealing more of the scene above or below the frame. It actively influences how the audience perceives the scene.\u003C/p>\u003Cp>The direction of the tilt unlocks a range of emotional responses. Tilting upwards creates a sense of awe and wonder, emphasizing towering structures or a character looking skyward. Tilting downwards suggests vulnerability, dominance, or even chaos. For example, a scene tilting down from a powerful character looking down on a protagonist, or a tilt following a falling object to heighten the tension.\u003C/p>\u003Cp>They can also be used for dramatic reveals or transitions. Tilting up might unveil a hidden threat lurking in the shadows while tilting down could introduce a new character entering the scene. Consider a scene where a lone character stands before a giant, menacing castle. A slow tilt upwards starting from the character's feet and ending at the castle spires effectively conveys the overwhelming scale and power of the obstacle.\u003C/p>\u003Ch2 id=\"5-dolly-zoom\">\u003Cstrong>5. Dolly zoom\u003C/strong>\u003C/h2>\u003Cp>A dolly zoom, also known as the vertigo effect, is a technique that combines camera movement with focal length adjustments to create a visually striking effect.\u003C/p>\u003Cp>Imagine a camera mounted on a track. As the camera physically moves toward the subject (dolly-in) the lens simultaneously zooms out. Conversely, the camera can dolly away (move backward) while zooming in. This creates a sense of distortion where the background seems to stretch or compress, while the subject remains relatively the same size in the frame.\u003C/p>\u003Cp>The dolly zoom is a powerful tool for manipulating the viewer's perception. A dolly-in with a zoom-out creates feelings of isolation or disorientation, perfect for suspenseful scenes, while a dolly away with a zoom-in makes the background feel overwhelming or claustrophobic.\u003C/p>\u003Cp>This technique is often used to highlight a character's emotional state or draw focus to a critical moment in the story. A classic example of the dolly zoom can be seen in Alfred Hitchcock's film Vertigo. As the character Scottie descends a staircase, the camera dollies in while zooming out, warping the background and amplifying his fear of heights.\u003C/p>\u003Cp>While traditionally used in live-action films, the dolly zoom can be effectively recreated in animation using 3D software.\u003C/p>\u003Ch2 id=\"6-truck-shot\">\u003Cstrong>6. Truck shot\u003C/strong>\u003C/h2>\u003Cp>A truck shot involves moving the camera laterally, left or right, along a virtual track. Unlike a pan which swivels the camera without changing its position, a truck shot physically repositions the camera's viewpoint.\u003C/p>\u003Cp>A truck shot is used to slowly unveil a scene, building anticipation or suspense. Imagine for example a truck shot to the right, starting on a closed door and gradually revealing a character standing triumphantly on the other side. Trucking alongside a moving character or object creates a feeling of speed and dynamism. It puts the viewer right in the action, like being in the passenger seat of a car speeding down a highway. Trucking across a vast landscape or a towering building effectively communicates its size and grandeur.\u003C/p>\u003Ch2 id=\"7-pedestal-shot\">\u003Cstrong>7. Pedestal shot\u003C/strong>\u003C/h2>\u003Cp>The pedestal camera technique raises or lowers the camera vertically on a fixed axis. Unlike a tilt, which changes the camera angle while remaining horizontal, the pedestal shot keeps the camera perfectly level as it moves.\u003C/p>\u003Cp>A slow \"pedestal up\" shot dramatically reveals a towering skyscraper or a majestic mountain range, or introduces a character with authority. Inversely, a \"pedestal down\" shot on a character makes them feel small and powerless.\u003C/p>\u003Cp>For example, in a scene where a lone astronaut stands on the surface of the moon, a slow pedestal up could reveal the vast emptiness of space, highlighting the astronaut's isolation.\u003C/p>\u003Ch2 id=\"8-arc-shot\">\u003Cstrong>8. Arc shot\u003C/strong>\u003C/h2>\u003Cp>The arc shot, also known as a 360-degree shot or 360 tracking shot, is a camera technique where the viewpoint revolves around a subject in a curved path. Imagine the camera smoothly gliding on a circular track, capturing the scene from ever-changing angles.\u003C/p>\u003Cp>By circling a character, the arc shot keeps them in focus while revealing their surroundings and establishing them as the center of attention. The gradual reveal of an environment creates a sense of mystery, leaving the audience wondering what lies beyond the character's immediate frame. A slow arc conveys a sense of awe and wonder, while a faster, more erratic one might build tension or excitement.\u003C/p>\u003Cp>For a classic example, look no further than the Matrix scene where Neo effortlessly dodges bullets while the camera elegantly circles him.\u003C/p>\u003Ch2 id=\"9-follow-shot\">\u003Cstrong>9. Follow shot\u003C/strong>\u003C/h2>\u003Cp>The follow technique keeps a character or object in the frame as the camera moves alongside it through panning, tilting, or even a combination of both movements.\u003C/p>\u003Cp>A well-executed follow shot puts the audience right in the heart of the action, making them feel the rush of adrenaline alongside the character. For example, seeing a character racing through a forest. Following a character as they walk with their head down communicates feelings of sadness or defeat, following an enthusiastic character skipping down the street portrays joy and excitement.\u003C/p>\u003Ch2 id=\"10-fly-through-shot\">\u003Cstrong>10. Fly-through shot\u003C/strong>\u003C/h2>\u003Cp>A fly-through shot takes viewers on a journey through a virtual space. The camera movement mimics the feeling of flying, swiftly moving forward while panning and tilting to reveal the environment.\u003C/p>\u003Cp>Soaring through a grand landscape, a bustling cityscape, or even a fantastical world is incredibly immersive. They efficiently introduce viewers to a new environment, giving them a quick overview of the space, but also build up excitement and tension, especially if it leads towards a specific destination or reveals a hidden element.\u003C/p>\u003Cp>Pixar's A Bug's Life opening scene is a great example of a fly-through shot. The camera swoops through the grass, revealing the bustling ant colony.\u003C/p>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>It's the subtle art of camera work that breathes life into these creations: more than just framing a shot, camera techniques are powerful tools to elevate a scene or rest the viewer's eyes.\u003C/p>\u003Cp>This article explored the various camera angles, movements, and shot types that animators use. By understanding how these elements influence the viewer's perception, you can use them to evoke emotions, establish character dominance, and guide the audience's focus.\u003C/p>\u003Cp>While the final product might appear effortless, effective camera work is a complex topic. Remember to incorporate camera techniques into your storyboarding process to plan out how each scene will unfold visually.\u003C/p>\u003Cp>Take the time to learn the fundamental camera movements and angles, then experiment with innovative approaches!\u003C/p>\u003Cp>\u003Cem>If you're looking to start a collaborative project, don't hesitate to reach out on our \u003C/em>\u003Ca href=\"https://discord.com/invite/VbCxtKN?ref=blog.cg-wire.com\" rel=\"noreferrer\">\u003Cem>Discord server\u003C/em>\u003C/a>\u003Cem> to get advice from other studios who have already gone through the process!\u003C/em>\u003C/p>",{"uuid":1610,"comment_id":1611,"feature_image":1612,"featured":105,"visibility":10,"created_at":1613,"updated_at":1614,"custom_excerpt":1615,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1616,"primary_tag":1617,"url":1618,"excerpt":1615,"reading_time":140,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1619},"82cd2f66-38a4-42cf-9bbd-9dd43d25797e","664dbf4c70d4320001284b2c","https://images.unsplash.com/photo-1520904541532-f47ac41fec59?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDJ8fGNhbWVyYSUyMG1vdmllfGVufDB8fHx8MTcxNjM3NTUzNXww&ixlib=rb-4.0.3&q=80&w=2000","2024-05-22T11:47:56.000+02:00","2026-03-26T10:27:50.000+01:00","As our invisible eyes, the camera in animation has an active role. By meticulously controlling its movement, animators achieve a variety of effects to engage the viewers―far beyond simply showing us what's happening on screen, it's a powerful storytelling tool often unseen.",{"id":589,"name":590,"slug":591,"profile_image":592,"cover_image":7,"bio":593,"website":7,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":596},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/camera-work-in-animation/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@sharegrid?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">ShareGrid\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/camera-work-in-animation","2024-05-22T12:59:53.000+02:00",{"title":1605},"camera-work-in-animation","posts/camera-work-in-animation",[1626],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"7vCk1wbyTh76GvRypaz8WuZheto778xOlU_pXCJfMxA",{"id":1629,"title":1630,"authors":1631,"body":7,"description":7,"extension":8,"html":1633,"meta":1634,"navigation":14,"path":1645,"published_at":1646,"seo":1647,"slug":1648,"stem":1649,"tags":1650,"__hash__":1654,"uuid":1635,"comment_id":1636,"feature_image":1637,"featured":105,"visibility":10,"created_at":1638,"updated_at":1639,"custom_excerpt":1640,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1641,"primary_tag":1642,"url":1643,"excerpt":1640,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1644},"ghost/posts:animation-asset-security.json","Protecting Your Animation Studio’s Assets (2026): Security and Compliance",[1632],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Ch2 id=\"introduction\">\u003Cstrong>Introduction\u003C/strong>\u003C/h2>\u003Cp>Animation feels like magic. But behind the scenes, a complex network of people, processes, and software known as the animation pipeline fuels this magic by handling a treasure trove of intellectual property (IP), including original character designs, storyboards, and the intricate animation sequences that form the heart of the final product.\u003C/p>\u003Cp>The very nature of the animation pipeline, with its collaborative workflows and reliance on digital assets, creates a unique set of security challenges: protecting these valuable IP assets from unauthorized access, leaks, or malicious attacks is crucial for studios of all sizes. This article dives into the security landscape of the animation industry―vulnerabilities, best practices, and emerging technologies that animation studios can leverage to secure their work.\u003C/p>\u003Ch2 id=\"why-security\">\u003Cstrong>Why Security\u003C/strong>\u003C/h2>\u003Cp>Clients entrust studios with their creative vision and confidential information. But original animation productions also generate a lot of sensitive data: character designs, storyboards, 3D models, animation sequences, and even sometimes proprietary software. \u003Cstrong>Protecting these assets isn't just about protecting the studio's creative efforts―it's a critical business imperative.\u003C/strong>\u003C/p>\u003Cp>Without robust security measures, animation studios become vulnerable to data breaches: malicious actors can exploit weaknesses in security systems to gain unauthorized access to sensitive information, leading to the theft of intellectual property, financial data, and even personal information of employees and clients. \u003Cstrong>The consequences can be devastating in terms of financial losses, reputational damage, and even legal repercussions.\u003C/strong>\u003C/p>\u003Cp>Fortunately, the technology is already there to drastically reduce risks.\u003C/p>\u003Ch2 id=\"1-access-control\">\u003Cstrong>1. Access Control\u003C/strong>\u003C/h2>\u003Cp>Access control acts as the gatekeeper, regulating who can access what and at what level.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Authentication\u003C/strong> - Just as a physical key unlocks specific doors, user authentication ensures that only authorized individuals can access a system or information. This typically involves verifying a user's identity through credentials like usernames, passwords, or multi-factor authentication.\u003C/li>\u003Cli>\u003Cstrong>Authorization\u003C/strong> - Once a user is authenticated, authorization determines the level of access they have within the system―read access, write access, admin, etc. This is where role-based access control (RBAC) comes into play.\u003C/li>\u003Cli>\u003Cstrong>Role-based access control\u003C/strong> - RBAC assigns different permission levels to individual users based on their roles within the studio. For instance, an animator might have access to their specific animation files and storyboard revisions, while a project manager might have broader access to manage project timelines and resources. This granular control ensures that users only have access to the information and functionalities necessary to perform their duties, effectively minimizing the risk of unauthorized access and data breaches.\u003C/li>\u003C/ul>\u003Ch2 id=\"2-encryption\">\u003Cstrong>2. Encryption\u003C/strong>\u003C/h2>\u003Cp>Data, even when protected by access controls, is still vulnerable during transmission and storage. This is where encryption steps in by transforming sensitive information into an unreadable format. Encryption adds an extra layer of protection, making it significantly more difficult for unauthorized individuals to gain access to sensitive information, even if they manage to bypass other security measures.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Encryption in transit\u003C/strong> - Animation studios often collaborate with remote teams or outsource specific tasks. This necessitates the secure transfer of sensitive data across networks. Encryption in transit scrambles data as it travels between systems, ensuring that even if intercepted by unauthorized individuals, it remains unreadable and unusable. For instance, Kitsu, our production tracker, uses HTTPS to transmit data securely over the internet.\u003C/li>\u003Cli>\u003Cstrong>Encryption at rest\u003C/strong> - Even when data is not actively being transferred, it needs robust protection in the event of a leak. Encryption at rest encrypts data while it's stored on servers, hard drives, or any other storage medium. This ensures that even if an attacker gains access to the storage device, the data itself remains inaccessible without the appropriate decryption key. \u003C/li>\u003C/ul>\u003Cp>The effectiveness of encryption hinges on the strength of the encryption algorithms employed. Studios should opt for industry-standard algorithms that utilize complex mathematical formulas to make data virtually impossible to decipher without the decryption key. Additionally, regular rotation of encryption keys further enhances security by mitigating the risk of compromise even if a key is somehow compromised. \u003C/p>\u003Cp>You can rely on third-party products as we propose at CGWire and ask for media encryption if you don't want to manage that part. Kitsu can encrypt assets using AES-256, a widely recognized encryption standard.\u003C/p>\u003Ch2 id=\"3-audit-trail\">\u003Cstrong>3. Audit Trail\u003C/strong>\u003C/h2>\u003Cp>Access control and encryption form the backbone of a secure environment while audit trails act as the vigilant eyes―monitoring and recording user activities in the form of logs. These detailed logs play a crucial role in enforcing security and fostering accountability.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Logging of user activities\u003C/strong> - Audit trails meticulously record and document all user activities within the production tracker. This includes actions like accessing specific files, modifying project details, or making changes to user permissions. By capturing a comprehensive timeline of user activity, audit trails provide valuable insights into who did what, when, and how. This is useful for post-mortem analysis, compliance, and security investigations, but also for alerting in case of unauthorized access.\u003C/li>\u003Cli>\u003Cstrong>Traceability and accountability\u003C/strong> - In the unfortunate event of a security breach or suspicious activity, audit trails offer a clear trail for investigation. By analyzing the logs, security personnel can identify the source of the activity, trace the sequence of events, and determine the individuals involved. This facilitates accountability by enabling studios to identify and address any unauthorized or malicious actions.\u003C/li>\u003C/ul>\u003Ch2 id=\"4-secure-integration\">\u003Cstrong>4. Secure Integration\u003C/strong>\u003C/h2>\u003Cp>The digital landscape rarely operates in isolation: animation studios often integrate their digital creation tools with various other software applications, such as rendering engines, asset management systems, and collaboration tools. These integrations allow for a seamless flow of data between different systems at the price of introducing new security considerations:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>API security measures\u003C/strong> - APIs (Application Programming Interfaces) act as the bridges that enable communication and data exchange between different software applications. To ensure the security of these connections, API key management is crucial: secure key generation, storage, rotation, authorization, etc.\u003C/li>\u003Cli>\u003Cstrong>Secure data exchange protocols\u003C/strong> - Beyond API security measures, studios should also ensure that the protocols used for data exchange are secure and reliable. For example, HTTPS encrypts communication between applications and SFTP (Secure File Transfer Protocol) for secure file sharing.\u003C/li>\u003C/ul>\u003Ch2 id=\"5-alerting\">\u003Cstrong>5. Alerting\u003C/strong>\u003C/h2>\u003Cp>As we saw in the audit trail section, real-time monitoring of assets, systems, and user activity allow animation studios to proactively identify potential security breaches, technical faults, or unauthorized access attempts. But monitoring alone isn't enough: it's crucial to have a robust alerting system that notifies security teams of any suspicious activities or anomalies in real-time.\u003C/p>\u003Cp>Automated alerts triggered by suspicious events or anomalies give security teams the opportunity to take swift corrective actions, minimizing the impact of a security incident:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Establish Clear Thresholds\u003C/strong> - Define critical thresholds for different performance metrics and security indicators like unusual network traffic, unauthorized login attempts, or changes to essential assets. Alerts should be triggered when these thresholds are crossed.\u003C/li>\u003Cli>\u003Cstrong>Focus on Actionable Alerts\u003C/strong> - Alerting systems should be designed to generate clear, actionable alerts that help security teams prioritize and respond effectively. Avoid excessive alerts that could lead to \"alert fatigue\" by false positives. Prioritize alerts based on severity using a system that distinguishes between critical, high, medium, and low-risk events.\u003C/li>\u003Cli>\u003Cstrong>Test and Refine Regularly\u003C/strong> - Regularly test and refine your alerting system to ensure its effectiveness. Adapt thresholds and configurations as needed to minimize false positives and ensure meaningful alerts.\u003C/li>\u003Cli>\u003Cstrong>Centralize Monitoring and Alerting\u003C/strong> - Instead of scattering your tools everywhere, consider a centralized platform to aggregate data from various systems and provides a consolidated view of alerts and security events.\u003C/li>\u003Cli>\u003Cstrong>Have a Clear Response Plan\u003C/strong> - Develop and document detailed procedures for investigating and responding to various types of alerts, including roles, responsibilities, and escalation processes for effective incident resolution.\u003C/li>\u003C/ul>\u003Ch2 id=\"6-disaster-recovery-and-backup\">\u003Cstrong>6. Disaster Recovery and Backup\u003C/strong>\u003C/h2>\u003Cp>While robust security measures go a long way in safeguarding assets, even the most well-prepared studios need a safety net in the face of unforeseen circumstances. This is where disaster recovery and backup plans become crucial.\u003C/p>\u003Cp>Despite stringent security measures, accidents, technical glitches, or even natural disasters can lead to data loss. \u003Cstrong>Regular backups\u003C/strong> create redundant copies of essential data, including animation assets, project files, and production tracker information. These backups serve as a digital lifeline, allowing studios to recover lost data and resume operations swiftly. At CGWire, all data is backed up daily and stored in multiple locations to ensure its availability in case of a disaster.\u003C/p>\u003Cp>\u003Cstrong>Disaster recovery plans\u003C/strong> encompass a comprehensive strategy outlining the steps to be taken in the event of a major disruption: What are the most crucial data and systems that need immediate recovery in case of a disaster? What steps are involved in restoring data and systems from backups? Which communication channels to use for all stakeholders involved in the recovery process? How to minimize downtime?\u003C/p>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Behind the vibrant animations lies a crucial foundation – data security. Animation studios handle a wealth of data that needs protection.\u003C/p>\u003Cp>Compliance with industry standards is not just a regulatory requirement; it is a commitment to fostering trust with clients and employees.\u003C/p>\u003Cp>Production trackers equipped with robust security features play a critical role: from access control and encryption to monitoring, alerting, and disaster recovery, they empower studios to create a secure environment where innovation can flourish.\u003C/p>\u003Cp>The animation industry is constantly evolving, and so too should its approach to security: AI poses new security challenges, studios are growing bigger and global, tools are increasingly used in cloud environments, etc. The call to action is clear: prioritize security in your animation studio. Invest in reliable production trackers with built-in security features, implement comprehensive security protocols, and embrace a culture of security awareness within your team! It's your studio's future at stake.\u003C/p>\u003Cp>\u003Cem>If you're looking for security best practices and advice on how to implement them, reach out to our Discord server to get advice from other studios that have already gone through the process!\u003C/em>\u003C/p>",{"uuid":1635,"comment_id":1636,"feature_image":1637,"featured":105,"visibility":10,"created_at":1638,"updated_at":1639,"custom_excerpt":1640,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1641,"primary_tag":1642,"url":1643,"excerpt":1640,"reading_time":165,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1644},"63161fc4-9689-4946-802a-bc13ea77071d","661e99afce5ed70001ab41bd","https://images.unsplash.com/photo-1584433144859-1fc3ab64a957?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDE4fHxzZWN1cml0eXxlbnwwfHx8fDE3MTMyODE1MDZ8MA&ixlib=rb-4.0.3&q=80&w=2000","2024-04-16T17:30:55.000+02:00","2026-02-20T06:03:45.000+01:00","The very nature of the animation pipeline, with its collaborative workflows and reliance on digital assets, creates a unique set of security challenges: protecting these valuable IP assets from unauthorized access, leaks, or malicious attacks",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/animation-asset-security/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@danny144?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Dan Nelson\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/animation-asset-security","2024-04-16T17:38:25.000+02:00",{"title":1630},"animation-asset-security","posts/animation-asset-security",[1651,1652],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},{"id":1653,"name":71,"slug":78,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":75},"5fff0e4b653a0c003924f7f0","NBJF8ZlDkKVOXw7Q-smgeRQkZJUcqrZUX7C8Nfxg_y0",{"id":1656,"title":1657,"authors":1658,"body":7,"description":7,"extension":8,"html":1660,"meta":1661,"navigation":14,"path":1672,"published_at":1673,"seo":1674,"slug":1675,"stem":1676,"tags":1677,"__hash__":1679,"uuid":1662,"comment_id":1663,"feature_image":1664,"featured":105,"visibility":10,"created_at":1665,"updated_at":1666,"custom_excerpt":1667,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1668,"primary_tag":1669,"url":1670,"excerpt":1667,"reading_time":240,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1671},"ghost/posts:artificial-intelligence-in-animation-state-of-the-art-february-2024.json","Artificial Intelligence in Animation: state of the art February 2024",[1659],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>Artificial Intelligence (AI) was on everyone’s lips in 2023, but we all have yet to know what’s in store for 2024 and what it entails for animation studios.\u003C/p>\u003Cp>Few topics are as divisive as AI. On one hand, you find outraged artists whose artworks are being illegally ingested by algorithms. On the other, a new wave of creators leveraging AI for self-expression or monetary gains.\u003C/p>\u003Cp>Whatever your opinion is, we found it essential to give the animation industry an overview of available tools, as well as their practical use cases and how they might affect your job as an animator: not only will this article help you find ways to differentiate yourself from generic AI art, but also how to incorporate it as another tool in your toolset when it’s relevant.\u003C/p>\u003Cp>The following list is non-exhaustive but tries to cover all the steps of the production process, from concept art to rendering. Feel free to send us your recommendations!\u003C/p>\u003Ch2 id=\"1-text-generation\">\u003Cstrong>1. Text generation\u003C/strong>\u003C/h2>\u003Cp>The first and most mediatic use of AI is text generation: using artificial intelligence to automatically write text based on an initial prompt―you give it a few words, and it will generate a full text based on what it has learned from a large corpus of data. Large language models can complete a variety of general and specialized tasks for animation studios:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Scriptwriting ideation\u003C/strong> - To provide suggestions for scene or character descriptions, generate dialogue, or even propose ideas for plot twists.\u003C/li>\u003Cli>\u003Cstrong>Scene descriptions\u003C/strong> - Generate detailed scene descriptions to help animators visualize scenes, determine camera angles, and establish the overall mood of an animation.\u003C/li>\u003Cli>\u003Cstrong>Character backstories\u003C/strong> - By specifying key traits, animators can play with different character nuances to create more well-rounded and compelling personas.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-us.googleusercontent.com/wvTd8njx-ETQhnsMwFkjCoPNIfwgf_cpk-iVm3YM_GxGLidzybLsS4z9zl0rV6T_7InVmlaK2LRcwz3omv4xp8lX8s5b7-TPZosTr9_PN3FS_6a__cLWh5vMNHASFuYsQPIIPk6luVlwI-B9opfvqRU\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1520\" height=\"906\">\u003C/figure>\u003Cp>\u003Cstrong>Links and references\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Ca href=\"https://www.perplexity.ai/?ref=blog.cg-wire.com\">\u003Cu>Perplexity AI\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://openai.com/blog/chatgpt?ref=blog.cg-wire.com\">\u003Cu>ChatGPT\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://blog.google/technology/ai/google-gemini-ai/?ref=blog.cg-wire.com\">\u003Cu>Gemini\u003C/u>\u003C/a>\u003C/li>\u003C/ul>\u003Ch2 id=\"2-image-generation\">\u003Cstrong>2. Image generation\u003C/strong>\u003C/h2>\u003Cp>Text isn’t the only format AI can play with. Perhaps the most controversial technology of 2023, image generation models like DALL·E and MidJourney use advanced neural networks to generate images from textual prompts or from another image:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Concept art\u003C/strong> - An animation studio can quickly produce a variety of concept art\u003Cstrong>,\u003C/strong> exploring different design possibilities for a new project from just a script―or at least a textual description.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-us.googleusercontent.com/79QQNAPxFSazsU67TTKE47nuLbFucOXqrW4XSIT7jhZlTTK1-Jl89CM2q67V_iym-T3rEPpPCw0jeOj-ncdGEU1ETTasluJ95nMllcF73Uh4o9SQ2_TLXmpiKfZbQLRGqqzPBvhSUiT2OUnaGttXZn4\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"834\" height=\"599\">\u003C/figure>\u003Cul>\u003Cli>\u003Cstrong>Turn shapes into complete illustrations\u003C/strong> - Midjourney can understand a rough sketch and turn it into a complete illustration.\u003C/li>\u003Cli>\u003Cstrong>Character and environment design\u003C/strong> - AI-generated images can be used as a starting point for character design or to explore different ideas for environments and layouts, providing a visual reference for animators to build upon.\u003C/li>\u003Cli>\u003Cstrong>Texture generation\u003C/strong> - There are already specialized models like Dream Textures (Blender plugin) for generating textures, which can be used for characters, objects, or environments.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-us.googleusercontent.com/wFDBKTIWq6Dtc8b6tFY_WzuRdxbBMiGpR2_x-7WM4N09dKHKv0q0Xf6pXmahaeJamG2OarKQkVDNdqMtJCXEXt4yFJgjp_iweilKu0LhaU_8QybCLurkuL6mo0QeSgSiQlzmZokMAnw-uAFRin8ViAU\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"127\">\u003C/figure>\u003Cp>\u003Cem>Example of texture generation using Dream Textures (Blender)\u003C/em>\u003C/p>\u003Cp>Combined with text generation, it’s possible to generate entire concept books with little effort. This is obviously huge for small studios or indie animators wanting to pitch concepts to producers at little to no cost.\u003C/p>\u003Cp>\u003Cstrong>Links and references\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Ca href=\"https://artisticrender.com/dream-textures-ai-texture-generator-for-blender/?ref=blog.cg-wire.com\">\u003Cu>Dream Textures\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://legacy.midjourney.com/showcase/recent/?ref=blog.cg-wire.com\">\u003Cu>Midjourney\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://openai.com/dall-e-3?ref=blog.cg-wire.com\">\u003Cu>Dalle3\u003C/u>\u003C/a>\u003C/li>\u003C/ul>\u003Ch2 id=\"3-upscaler\">\u003Cstrong>3. Upscaler\u003C/strong>\u003C/h2>\u003Cp>AI upscalers enhance the resolution and quality of images or videos without manual intervention.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Faster and cheaper rendering\u003C/strong> - Studios need to meet tight deadlines and deliver content fast, but rendering is often the main bottleneck in the feedback loop: AI upscalers can take low-quality renders and output high-quality previews comparable to regular renders in a fraction of the time.\u003Ca href=\"https://github.com/jarrellmark/ai_upscaler_for_blender?ref=blog.cg-wire.com#ai-upscaler-for-blender\"> \u003Cu>Blender upscaler\u003C/u>\u003C/a>, for example, can render a similar quality image down from 37 minutes to 5 minutes (86.5% faster):\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-us.googleusercontent.com/oDluygZ66Z75GHT3qib8vb8LM2DEX1buNWZmiEEnLrMrW336K_zobc34kQUSFwYi1ONuS7jKibJQCIxC65FK6gwdxVyBwPQ52CwRwt560fUD0f0xWMGsTk-LzXSnh0SIkW7dNWvH0xtOiqjCBDiBjcc\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"889\">\u003C/figure>\u003Cul>\u003Cli>\u003Cstrong>Add realism\u003C/strong> - Upscalers like Photoshop Upscaler or Magnific AI can quickly add details to any render to make it look more detailed and/or realistic. This is especially useful when you need to quickly add details to a scene or to create photo-realistic characters from low-resolution images.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-us.googleusercontent.com/vAVOY3pwRLS65yWgpREfSroemFHJONFksPpCSoqvrrlisuWe9ozfT-x7B0LuMKFq6G2oa-4_f9rfMZh7hmMLcvrSWdQUCQJ0-oHQYMqRaTOS48dOMfCz1zk-Cshs-6QG0X3e9Ip_gq2DeZIUxS9vVsY\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"944\" height=\"806\">\u003C/figure>\u003Cp>\u003Cstrong>Links and references\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Ca href=\"https://github.com/jarrellmark/ai_upscaler_for_blender?ref=blog.cg-wire.com#ai-upscaler-for-blender\">\u003Cu>Blender upscaler\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://www.adobe.com/uk/creativecloud/photography/discover/image-upscale.html?ref=blog.cg-wire.com\">\u003Cu>Photoshop upscaler\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://magnific.ai/?ref=blog.cg-wire.com\">\u003Cu>Magnific AI\u003C/u>\u003C/a>\u003C/li>\u003C/ul>\u003Ch2 id=\"4-model-generation\">\u003Cstrong>4. Model generation\u003C/strong>\u003C/h2>\u003Cp>The technology is moving so fast we are going a step beyond images: there are already proofs of concept to turn a picture from your phone into production-ready 3D assets.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Automated asset creation\u003C/strong> - A production needs a lot of assets, and creating them is often a tedious and time-consuming process. AI can generate 3D models from images, enabling animators to focus more on adding details and polishing the final result.\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-us.googleusercontent.com/GG-Ta1ifFtO7NN-WD0imA3i-Q53KUAApZz2emM9w7UfK4QOCkX6_bSC1iRMmstjGYAijCMRzcSdizUPF4CBDHL2-kN_YpwaOYtsEQvgnTIYfHiJT3mO4zxA32PQMXxe31bUi1rpHRNIkIZf2bOdwI-I\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1000\" height=\"483\">\u003C/figure>\u003Cul>\u003Cli>\u003Cstrong>Character customization and variation\u003C/strong> - AI-driven 3D model generation facilitates character customization by automatically generating variations in appearance, clothing, and accessories.\u003C/li>\u003Cli>\u003Cstrong>Procedural animation\u003C/strong> - For complex environments or large crowds, AI can generate diverse 3D models and animations procedurally at scale much more efficiently:\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-embed-card\">\u003Ciframe width=\"200\" height=\"113\" src=\"https://www.youtube.com/embed/Hqqq6LIhRb8?feature=oembed\" frameborder=\"0\" allow=\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share\" allowfullscreen=\"\" title=\"Create a Crowd Simulation in Blender Using AI Generated Models - Blender Tutorial\">\u003C/iframe>\u003C/figure>\u003Cp>\u003Cstrong>Links and references\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Ca href=\"https://techcrunch.com/2023/11/02/stability-ais-latest-tool-uses-ai-to-generate-3d-models/?ref=blog.cg-wire.com\">\u003Cu>Stable 3D\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://www.adobe.com/creativecloud/3d-ar/campaign/pricing.html?sdid=JVLHVY6X&mv=search&mv2=paidsearch&gad_source=1&gclid=CjwKCAiAqY6tBhAtEiwAHeRopa4H7iHlU_3HjA3OuFW5qx4ZSGKxdrBtvbrylZdCnAOsVSXMyM6YvhoC2LsQAvD_BwE&ref=blog.cg-wire.com\">\u003Cu>Adobe Substance 3D\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://www.youtube.com/watch?v=Hqqq6LIhRb8&ref=blog.cg-wire.com\">\u003Cu>Create a Crowd Simulation in Blender Using AI Generated Models\u003C/u>\u003C/a>\u003C/li>\u003C/ul>\u003Ch2 id=\"5-video-generation\">\u003Cstrong>5. Video generation\u003C/strong>\u003C/h2>\u003Cp>Enter a prompt and an optional image, and the AI will generate a video for you!\u003C/p>\u003Cp>If you can generate images, you can also generate videos. But the main technical difficulty at the moment is to generate consistent frames at scale. The technology is still in its infancy, but it’s already possible to generate short videos with a few seconds of footage that can be used for \u003Cstrong>storyboarding\u003C/strong>.\u003C/p>\u003Cp>For example, you’ve perhaps seen viral clips of the Carrot Saga on Tiktok or YouTube, making millions of views:\u003C/p>\u003Cfigure class=\"kg-card kg-embed-card\">\u003Ciframe width=\"200\" height=\"113\" src=\"https://www.youtube.com/embed/UROWs9HTsbk?feature=oembed\" frameborder=\"0\" allow=\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share\" allowfullscreen=\"\" title=\"The Carrot Saga | Part 1 - The Fall of the Broccoli Empire\">\u003C/iframe>\u003C/figure>\u003Cp>\u003Cstrong>Links and references\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Ca href=\"https://www.adobe.com/products/firefly.html?ref=blog.cg-wire.com\">\u003Cu>Adobe Firefly\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://3dvf.com/la-recherche-sur-l-ia-generative-chez-adobe-video-radi-raf-2023/?ref=blog.cg-wire.com\">\u003Cu>Generative AI at Adobe (French)\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://stability.ai/news/stable-video-diffusion-open-ai-video-model?ref=blog.cg-wire.com\">\u003Cu>Stable Video Diffusion\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://pika.art/?ref=blog.cg-wire.com\">\u003Cu>Pika Art\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://www.youtube.com/watch?v=UROWs9HTsbk&ref=blog.cg-wire.com\">\u003Cu>The Carrot Saga (AI animation)\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Cstrong>\u003Cu>Sora\u003C/u>\u003C/strong>\u003C/li>\u003C/ul>\u003Ch2 id=\"6-real-time-rendering\">\u003Cstrong>6. Real-time rendering\u003C/strong>\u003C/h2>\u003Cp>Real-time rendering is the process of generating animation frames in milliseconds for direct display. Rendering is traditionally a computationally-expensive task, but AI-powered rendering can provide near-immediate results for a variety of tasks:\u003C/p>\u003Cfigure class=\"kg-card kg-embed-card\">\u003Ciframe width=\"200\" height=\"113\" src=\"https://www.youtube.com/embed/-IWPDt4_jjU?feature=oembed\" frameborder=\"0\" allow=\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share\" allowfullscreen=\"\" title=\"Real time Rendering Performance Breakthrough Boosted by AI   DLSS 3 Supported in D5 Render\">\u003C/iframe>\u003C/figure>\u003Cul>\u003Cli>\u003Cstrong>Pre-visualization\u003C/strong> - Real-time rendering provides animators with immediate feedback on movement, expressions, and interactions to create more engaging characters and environments.\u003C/li>\u003Cli>\u003Cstrong>Interactive storytelling\u003C/strong> - With real-time rendering, animation studios can create interactive narratives where user choices dynamically influence the storyline. AI algorithms contribute to rendering alternate scenes, characters, and outcomes, providing a more immersive experience for audiences.\u003C/li>\u003Cli>\u003Cstrong>Collaborative prototyping\u003C/strong> - Real-time rendering is invaluable in the prototyping phase, enabling animators to quickly test different visual styles, lighting setups, and camera angles. Artists working on different aspects of a project can see immediate updates, fostering a more efficient collaborative workflow.\u003C/li>\u003C/ul>\u003Cp>\u003Cstrong>Links and references\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Ca href=\"https://www.youtube.com/watch?v=pNyIp73zva8&ref=blog.cg-wire.com\">\u003Cu>Real-Time AI Rendering with ComfyUI and 3ds Max\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://www.d5render.com/?ref=blog.cg-wire.com\">\u003Cu>D5 Render\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://www.youtube.com/watch?v=tQWzzACUbw0&ref=blog.cg-wire.com\">\u003Cu>Real-time ray tracing by Nvidia\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://kesen.realtimerendering.com/sig2023.html?ref=blog.cg-wire.com\">\u003Cu>Published papers on real-time rendering\u003C/u>\u003C/a>\u003C/li>\u003C/ul>\u003Ch2 id=\"7-keyframe-animation\">\u003Cstrong>7. Keyframe animation\u003C/strong>\u003C/h2>\u003Cp>Keyframe animation is a technique that involves creating a sequence of frames to define the start and end points of a movement. An AI tool like Cascadeur can save animators countless hours:\u003C/p>\u003Cfigure class=\"kg-card kg-embed-card\">\u003Ciframe width=\"200\" height=\"113\" src=\"https://www.youtube.com/embed/R3pJ2HHFaTo?feature=oembed\" frameborder=\"0\" allow=\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share\" allowfullscreen=\"\" title=\"Cascadeur - AI-Assisted Keyframe Animation Software\">\u003C/iframe>\u003C/figure>\u003Cul>\u003Cli>\u003Cstrong>Automated interpolation\u003C/strong> - AI-assisted interpolation is another method to generate the frames between keyframes. From a few poses, Cascadeur can generate realistic motion animations, including keyframes and secondary motion.\u003C/li>\u003Cli>\u003Cstrong>Rig generation\u003C/strong> - Cascadeur can also auto-generate rigs for complex 3D models.\u003C/li>\u003C/ul>\u003Cp>\u003Cstrong>Links and references\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Ca href=\"https://cascadeur.com/?ref=blog.cg-wire.com\">\u003Cu>Cascadeur\u003C/u>\u003C/a>\u003C/li>\u003C/ul>\u003Ch2 id=\"8-rotoscopic-animation\">\u003Cstrong>8. Rotoscopic animation\u003C/strong>\u003C/h2>\u003Cp>Rotoscopic animation is a technique that involves tracing over live-action footage to create realistic animations. AI can assist animators in the rotoscoping process while providing a variety of benefits:\u003C/p>\u003Cfigure class=\"kg-card kg-embed-card\">\u003Ciframe width=\"200\" height=\"113\" src=\"https://www.youtube.com/embed/e818LgnJ9rI?feature=oembed\" frameborder=\"0\" allow=\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share\" allowfullscreen=\"\" title=\"Revealing How CodeMiko Is Made\">\u003C/iframe>\u003C/figure>\u003Cul>\u003Cli>\u003Cstrong>Vtuber\u003C/strong> - Combined with real-time rendering, AI-assisted rotoscope animation can be used to create virtual avatars for live streaming or other video content.\u003C/li>\u003Cli>\u003Cstrong>Automatic frame detection\u003C/strong> - AI algorithms can automatically detect key frames in live-action footage, streamlining the initial phase of the rotoscoping process. This reduces the manual effort required for frame-by-frame tracing.\u003C/li>\u003Cli>\u003Cstrong>Tracing assistance\u003C/strong> - AI can assist animators by automating certain tracing tasks like outlining characters or objects.\u003C/li>\u003C/ul>\u003Cp>\u003Cstrong>Links and references\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Ca href=\"https://live3d.io/vtuber_maker?ref=blog.cg-wire.com\">\u003Cu>VTuber Maker\u003C/u>\u003C/a>\u003C/li>\u003C/ul>\u003Ch2 id=\"9-image-recognition\">\u003Cstrong>9. Image recognition\u003C/strong>\u003C/h2>\u003Cp>Image recognition is the process of identifying and classifying objects within images.&nbsp;\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh7-us.googleusercontent.com/cKQc3uKYI5x81TPlKMGcCWH9Zx2zhCycqnYKw2J0RjZliCiIY4mXiX2Fa8IGkkSSg1HIix1trB5QACARX3GP-EJ32SPmToOQqThaIf_jYb97uteWKjhsf0yzG2Dhw-roD6x67kVuC_UShQZ4wFsLLCc\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"607\" height=\"456\">\u003C/figure>\u003Cul>\u003Cli>\u003Cstrong>Scene breakdown and analysis\u003C/strong> - AI algorithms can analyze complex scenes, automatically identifying and categorizing elements like characters, objects, backgrounds, and lighting conditions. This feature simplifies the scene breakdown process, providing a detailed analysis of each frame and facilitating a more efficient understanding of the visual components within a scene for faster reviews.\u003C/li>\u003Cli>\u003Cstrong>Annotations\u003C/strong> - Combined with text generation tools, AI can automatically annotate storyboards with descriptions or notes to simplify the communication between different teams involved in the animation process, ensuring that everyone has a clear understanding of the intended visual and narrative elements in each preview frame.\u003C/li>\u003Cli>\u003Cstrong>Facial recognition and expression analysis\u003C/strong> - Animators can leverage motion tracking for realistic animations. This is how Vtuber avatars implement lip-syncing or hand-syncing.\u003C/li>\u003Cli>\u003Cstrong>Quality control and error detection\u003C/strong> - AI can be employed for quality control―automatically detecting anomalies, errors, or inconsistencies within images to ensure a higher level of accuracy in the animation process and help studios identify and rectify issues early in the production pipeline.\u003C/li>\u003C/ul>\u003Cp>\u003Cstrong>Links and references\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Ca href=\"https://openai.com/gpt-4?ref=blog.cg-wire.com\">\u003Cu>GPT4 With Vision\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://roboflow.com/?ref=blog.cg-wire.com\">\u003Cu>Roboflow\u003C/u>\u003C/a>\u003C/li>\u003C/ul>\u003Ch2 id=\"10-voice-acting\">\u003Cstrong>10. Voice acting\u003C/strong>\u003C/h2>\u003Cp>AI-assisted voice acting involves generating or enhancing voice performances for animated characters or other audio content.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Text-to-speech synthesis\u003C/strong> - AI can convert written text into spoken words with natural-sounding intonation and expression. Animation studios can use TTS for quick prototyping, generating placeholder voiceovers, or experimenting with dialogue variations before engaging human voice actors.\u003C/li>\u003Cli>\u003Cstrong>Voice cloning and replication\u003C/strong> - AI can analyze and replicate a specific voice actor's style, tone, and nuances―effectively cloning voices. This feature is useful for maintaining consistency across projects or creating additional lines of dialogue without requiring the original voice actor's availability.\u003C/li>\u003Cli>\u003Cstrong>Multilingual voice generation\u003C/strong> - AI-powered voice generation can produce speech in multiple languages, offering flexibility for global audiences: animation studios can easily localize content, ensuring that characters speak authentically in different languages without the need for extensive manual voice recording.\u003C/li>\u003C/ul>\u003Cp>\u003Cstrong>Links and references\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Ca href=\"https://elevenlabs.io/?ref=blog.cg-wire.com\">\u003Cu>ElevenLabs\u003C/u>\u003C/a>\u003C/li>\u003Cli>\u003Ca href=\"https://platform.openai.com/docs/models/tts?ref=blog.cg-wire.com\">\u003Cu>OpenAI Text-to-Speech\u003C/u>\u003C/a>\u003C/li>\u003C/ul>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>AI is already transforming the animation industry, and it will continue to do so in the coming years. While it’s still in its early days, we can already see the potential of AI for animation studios, from concept art to rendering and distribution.\u003C/p>\u003Cp>For animation artists, AI is a powerful tool to streamline the production process and allow for more creativity regardless of your initial skills. \u003C/p>\u003Cp>It’s important to remember that AI is not a replacement for human creativity but another tool in the animator’s toolkit, providing new ways to express ideas and bring them to life: we can expect in the near future a new wave of one-person animation studios, but also more partnerships between studios, and of course more projects thanks to the decrease in labor costs. \u003C/p>\u003Cp>Last but not least AI platforms will have to deal with author rights and find the right fit to be widely spread among productions. The art generation cannot thrive without the acknowledgment of artists. Once these aspects are cleared, creativity will benefit from this new technology for the pleasure of our eyes!\u003C/p>\u003Cp>\u003Cem>Make sure to \u003C/em>\u003Ca href=\"https://discord.com/invite/VbCxtKN?ref=blog.cg-wire.com\">\u003Cem>join us on Discord\u003C/em>\u003C/a>\u003Cem> if you want to discuss the future of creative pipelines or just want to hang out with 1000+ animation experts from all over the world!\u003C/em>\u003C/p>",{"uuid":1662,"comment_id":1663,"feature_image":1664,"featured":105,"visibility":10,"created_at":1665,"updated_at":1666,"custom_excerpt":1667,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1668,"primary_tag":1669,"url":1670,"excerpt":1667,"reading_time":240,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":1671},"2d40e247-f494-478f-aea2-1b1a5bbd9d2f","65d4a90d9836da000132f531","https://images.unsplash.com/photo-1678390210450-22bd7664dc05?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDMyfHxBcnRpZmljaWFsJTIwaW50ZWxsaWd8ZW58MHx8fHwxNzA4NDM4NTU1fDA&ixlib=rb-4.0.3&q=80&w=2000","2024-02-20T14:28:45.000+01:00","2026-03-26T10:26:23.000+01:00","Few topics are as divisive as AI. On one hand, you find outraged artists whose artworks are being illegally ingested by algorithms. On the other, a new wave of creators leveraging AI for self-expression or monetary gains.",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/artificial-intelligence-in-animation-state-of-the-art-february-2024/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@gabimedia?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Gabriel Vasiliu\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/artificial-intelligence-in-animation-state-of-the-art-february-2024","2024-02-21T15:39:57.000+01:00",{"title":1657},"artificial-intelligence-in-animation-state-of-the-art-february-2024","posts/artificial-intelligence-in-animation-state-of-the-art-february-2024",[1678],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"Y-7ePycciSbc9UGIPUFlKxhNxxZa6tFJO9fPqLaYQFg",{"id":1681,"title":1682,"authors":1683,"body":7,"description":7,"extension":8,"html":1685,"meta":1686,"navigation":14,"path":1696,"published_at":1697,"seo":1698,"slug":1699,"stem":1700,"tags":1701,"__hash__":1703,"uuid":1687,"comment_id":1688,"feature_image":1689,"featured":105,"visibility":10,"created_at":1690,"updated_at":1691,"custom_excerpt":1692,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1693,"primary_tag":1694,"url":1695,"excerpt":1692,"reading_time":1131,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:prism-and-kitsu-integration-a-full-stack-for-your-production-workflow.json","Prism and Kitsu Integration: A  Full Stack For Your Production Workflow",[1684],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>The seamless integration of various tools and platforms has become necessary in the ever-evolving domain of animation and VFX productions. For efficient communication, every studio needs a production tracker and an asset manager. The former allows managing tasks and delivery validation, while the other takes care of files and creative tools.\u003C/p>\u003Cp>The integration between Prism Pipeline, a popular asset management system, and Kitsu, our collaboration platform, plugs together the two main parts of a production pipeline. This way, it ensures a complete suite to manage your studio. \u003C/p>\u003Cp>Now, let's see how this new stack can bring you new benefits and features!\u003C/p>\u003Ch2 id=\"introduction-to-prism-pipeline-and-kitsu\">Introduction to Prism Pipeline and Kitsu\u003C/h2>\u003Cp>Before we explore the integration, I would like to give you a few words about Prism and Kitsu.\u003C/p>\u003Ch3 id=\"prism-pipeline\">Prism Pipeline\u003C/h3>\u003Cp>Prism Pipeline is an open-source asset management system that has gained popularity for its versatility and user-friendliness. Designed to streamline the workflow of CG artists, it allows them to manage files and assets right from the DCC. \u003Cbr>\u003Ca href=\"https://prism-pipeline.com/?ref=blog.cg-wire.com\">https://prism-pipeline.com/\u003C/a>\u003C/p>\u003Ch3 id=\"kitsu\">Kitsu\u003C/h3>\u003Cp>Kitsu, our collaboration platform, allows teams to track the progress of their projects in real-time, offering tools for assignments, data sharing, and reviews.\u003Cbr>\u003Ca href=\"cg-wire.com/kitsu\">https://cg-wire.com/kitsu\u003C/a>\u003C/p>\u003Ch2 id=\"the-benefits-of-the-integration\">The benefits of the integration\u003C/h2>\u003Cp>To make it short, integrating Prism Pipeline and Kitsu will put your studio under steroïds. We will list below the main advantages it will bring to your studio.\u003C/p>\u003Cp>NB: As a side note, the Prism team made the Kitsu and Prism integration a paying plugin. So it will require extra bucks from your side to make it work.\u003C/p>\u003Ch3 id=\"seamless-communication\">Seamless Communication\u003C/h3>\u003Cp>The communication between different teams is streamlined: artists and managers can exchange information and feedback directly. Information can be sent right from DCCs or via the web platform. People can share data and information from anywhere. This means fewer errors and misunderstandings.\u003C/p>\u003Ch3 id=\"managed-asset-management-lifecycle\">Managed Asset Management Lifecycle\u003C/h3>\u003Cp>Prism offers you the framework to build and store your assets. From the Kitsu data, it will be able to organize all your files properly. When a scene is ready, Prism can push the playblasts generated from the scene into Kitsu. The Director will be able to perform reviews into Kitsu (or RV) and send the feedback right to the artist. Your asset lifecycle is under control.\u003C/p>\u003Ch3 id=\"real-time-progress-tracking\">Real-Time Progress Tracking\u003C/h3>\u003Cp>The integration allows for real-time progress tracking, with team members being able to monitor the status of various tasks and milestones directly within Kitsu. This facilitates better planning and resource allocation, ensuring that projects stay on track and within the stipulated timelines.\u003C/p>\u003Ch3 id=\"enhanced-feedback-loop\">Enhanced Feedback Loop\u003C/h3>\u003Cp>Supervisors and team lead can quickly provide feedback and approve assets directly within Kitsu or RV. They speed up the review process and ensure that feedback is implemented promptly and accurately. It allows us to perform more iterations.\u003C/p>\u003Cp>\u003C/p>\u003Ch2 id=\"how-the-integration-work\">How the integration work\u003Cbr>\u003C/h2>\u003Ch3 id=\"kitsu-project-synchronization\">\u003Cstrong>Kitsu Project Synchronization\u003C/strong>\u003C/h3>\u003Cp>The first step is to create your production into Kitsu by listing all your assets, shots, and tasks. Then, you have to set up Prism on all artist's workstations. Once done, you can install the Kitsu plugin into Prism. It will sync your project with Prism installations. All additions done to your Kitsu project will be reflected in Prism.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2023/11/kitsu_prism_1.webp\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1501\" height=\"844\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2023/11/kitsu_prism_1.webp 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2023/11/kitsu_prism_1.webp 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2023/11/kitsu_prism_1.webp 1501w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Ch3 id=\"file-management\">\u003Cstrong>File Management\u003C/strong>\u003C/h3>\u003Cp>Prism will handle the file organization. With the Kitsu plugin activated, your file hierarchy will be created from the elements set into Kitsu. Your artists are ready to work by opening files matching Kitsu tasks. Prism is capable of using any CGI software to deal with your files. Each task can handle several files and multiple versions for each. You can use multiple tools to work on a task. It will cover all your needs to build any kind of scene.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2023/11/image.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1436\" height=\"960\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2023/11/image.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2023/11/image.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2023/11/image.png 1436w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2023/11/image-1.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1433\" height=\"956\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2023/11/image-1.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2023/11/image-1.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2023/11/image-1.png 1433w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Ch3 id=\"publishing\">\u003Cstrong>Publishing\u003C/strong>\u003C/h3>\u003Cp>Once done, with a few clicks, you can generate a playblast for your review session. With the integration, Prism can create comments and publish the result into Kitsu by creating a new review revision.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2023/11/image-2.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1246\" height=\"1194\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2023/11/image-2.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2023/11/image-2.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2023/11/image-2.png 1246w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Ch3 id=\"get-feedback\">Get feedback\u003C/h3>\u003Cp>Via Kitsu, Supervisors and Directors can provide feedback. This feedback can be pulled into Prism. That way, artists can stay in their tools while being aware of what is expected from their work or if it has been validated.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2023/11/kitsu_prism_4.webp\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1501\" height=\"844\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2023/11/kitsu_prism_4.webp 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2023/11/kitsu_prism_4.webp 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2023/11/kitsu_prism_4.webp 1501w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Ch3 id=\"rv-integration\">RV Integration\u003C/h3>\u003Cp>Last but not least, Directors can plug RV into the Kitsu playlists. Once the playlist is prepared into Kitsu, it can be downloaded to RV via Prism. All comments written in RV are sent to Kitsu and, that way, pushed to artists' Prism installations.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2023/11/kitsu_prism_7.webp\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1501\" height=\"844\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2023/11/kitsu_prism_7.webp 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2023/11/kitsu_prism_7.webp 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2023/11/kitsu_prism_7.webp 1501w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Ch2 id=\"full-illustration\">Full illustration\u003C/h2>\u003Cp>Gaurav Mathur, Solutions Architect at Epic Games and former CG Supervisor at TheMill, presented a complete integration of both technologies. You will see how he set up an entire pipeline based on both technologies.\u003C/p>\u003Cfigure class=\"kg-card kg-embed-card\">\u003Ciframe width=\"200\" height=\"113\" src=\"https://www.youtube.com/embed/MtXxnvgQYko?start=18452&amp;feature=oembed\" frameborder=\"0\" allow=\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share\" allowfullscreen=\"\" title=\"Unreal Fest Day 1 | Livestream 3\">\u003C/iframe>\u003C/figure>\u003Ch2 id=\"conclusion\">Conclusion\u003C/h2>\u003Cp>The integration between Prism Pipeline and Kitsu constitutes a full project management set. By amalgamating asset management and collaboration functionalities, it brings all the features needed to create seamless communication into your productions.\u003C/p>\u003Cp>These new capabilities are crucial when you work with several studios and with remote artists. Working with bigger distributed teams is now possible. With the Prism / Kitsu integration, scaling your studio has never been easier!\u003C/p>\u003Cp>\u003Cem>Make sure to \u003C/em>\u003Ca href=\"https://discord.com/invite/VbCxtKN?ref=blog.cg-wire.com\">\u003Cem>join us on Discord\u003C/em>\u003C/a>\u003Cem> if you need additional help with creative project collaboration or just want to hang out with 1000+ animation experts from all over the world!\u003C/em>\u003C/p>",{"uuid":1687,"comment_id":1688,"feature_image":1689,"featured":105,"visibility":10,"created_at":1690,"updated_at":1691,"custom_excerpt":1692,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1693,"primary_tag":1694,"url":1695,"excerpt":1692,"reading_time":1131,"access":14,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"1fa8e325-29b1-4863-b928-71f8e29a65a2","650b33d788d73200015d6d26","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2023/11/kitsu-prism.png","2023-09-20T20:03:03.000+02:00","2026-03-26T10:54:26.000+01:00","The seamless integration of various tools and platforms has become necessary in the ever-evolving domain of animation and VFX productions. For efficient communication, every studio needs a production tracker and an asset manager. ",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/prism-and-kitsu-integration-a-full-stack-for-your-production-workflow/","/posts/prism-and-kitsu-integration-a-full-stack-for-your-production-workflow","2023-11-29T12:03:58.000+01:00",{"title":1682},"prism-and-kitsu-integration-a-full-stack-for-your-production-workflow","posts/prism-and-kitsu-integration-a-full-stack-for-your-production-workflow",[1702],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"iF-A5z32TAby0uN3Y_t5bFUTFCCxI9H8c1rzt4ElGTY",{"id":1705,"title":1706,"authors":1707,"body":7,"description":7,"extension":8,"html":1709,"meta":1710,"navigation":14,"path":1724,"published_at":1714,"seo":1725,"slug":1726,"stem":1727,"tags":1728,"__hash__":1730,"uuid":1711,"comment_id":1712,"feature_image":1713,"featured":105,"visibility":10,"created_at":1714,"updated_at":1715,"custom_excerpt":1716,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1717,"primary_tag":1718,"url":1719,"excerpt":1716,"reading_time":1720,"access":14,"comments":105,"og_image":1721,"og_title":1722,"og_description":1716,"twitter_image":1721,"twitter_title":1722,"twitter_description":1716,"meta_title":7,"meta_description":1723,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:list-of-open-source-technologies-for-your-cg-pipeline.json","List of Open Source Technologies for Your CG Pipeline (2026)",[1708],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>When we started CGWire we were looking for open-source technologies to go faster to build CG pipelines. We found numerous of them but it took us a lot of time and we didn’t have a central repository to share them.\u003C/p>\u003Cp>At the same time we noticed that in the world of stoftware engineering, it’s a common practice to build “\u003Ca href=\"https://github.com/sindresorhus/awesome?ref=blog.cg-wire.com\" rel=\"noopener\">awesome\u003C/a>” lists of technologies and resources related to a field. Those lists are parcitpative, everyone can contribute by bringing its additions. It makes things easier for everyone: users can find resources easily and authors can reference their work.\u003C/p>\u003Cp>It was obvious to us that it was a missing part of the CG world. So, we decided to create an awesome list of free and open source technologies related to CG pipeline. It already contains more than 70 technologies and several people contributed to it. If you are interested in it, we invite you to visit it and add your own links to it by clicking on the link below:\u003C/p>\u003Cfigure class=\"kg-card kg-bookmark-card\">\u003Ca class=\"kg-bookmark-container\" href=\"https://github.com/cgwire/awesome-cg-pipeline?ref=blog.cg-wire.com\">\u003Cdiv class=\"kg-bookmark-content\">\u003Cdiv class=\"kg-bookmark-title\">cgwire/awesome-cg-pipeline\u003C/div>\u003Cdiv class=\"kg-bookmark-description\">awesome-cg-pipeline - List of open-source technologies that help in the process of building a pipeline for CG…\u003C/div>\u003Cdiv class=\"kg-bookmark-metadata\">\u003Cspan class=\"kg-bookmark-author\">github.com\u003C/span>\u003C/div>\u003C/div>\u003C/a>\u003C/figure>\u003Cp>\u003Cem>This blog is dedicated to CG pipeline and production management. If you are interested in software strategy, you will probably enjoy our articles. We have a Discord channel too where you can discuss about your own problems / solutions and learn from others.\u003C/em>\u003C/p>",{"uuid":1711,"comment_id":1712,"feature_image":1713,"featured":105,"visibility":10,"created_at":1714,"updated_at":1715,"custom_excerpt":1716,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1717,"primary_tag":1718,"url":1719,"excerpt":1716,"reading_time":1720,"access":14,"comments":105,"og_image":1721,"og_title":1722,"og_description":1716,"twitter_image":1721,"twitter_title":1722,"twitter_description":1716,"meta_title":7,"meta_description":1723,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ac5807a2-2296-4940-b82c-dd755ef95af6","767ef36a7d44","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/800/1-YO1Kkcm9-zq28_f6PeUmzA.png","2018-05-14T11:11:12.000+02:00","2026-03-27T10:50:03.000+01:00","When we started CGWire we were looking for open-source technologies to go faster to build CG pipelines. We found numerous of them but it…",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/list-of-open-source-technologies-for-your-cg-pipeline/",1,"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/1014/1-YO1Kkcm9-zq28_f6PeUmzA.png","List of Open Source Technologies for Your CG Pipeline","When we started CGWire we were looking for open-source technologies to go faster to build CG pipelines. We found numerous of them but it took us a lot of time and we didn’t have a central repository…","/posts/list-of-open-source-technologies-for-your-cg-pipeline",{"title":1706},"list-of-open-source-technologies-for-your-cg-pipeline","posts/list-of-open-source-technologies-for-your-cg-pipeline",[1729],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"JKASlyau6u-pvTEkHu5z8TpUhDQvm77frkvyIHbL3e8",{"id":1732,"title":1733,"authors":1734,"body":7,"description":7,"extension":8,"html":1736,"meta":1737,"navigation":14,"path":1750,"published_at":1741,"seo":1751,"slug":1752,"stem":1753,"tags":1754,"__hash__":1756,"uuid":1738,"comment_id":1739,"feature_image":1740,"featured":105,"visibility":10,"created_at":1741,"updated_at":1742,"custom_excerpt":1743,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1744,"primary_tag":1745,"url":1746,"excerpt":1743,"reading_time":1131,"access":14,"comments":105,"og_image":1747,"og_title":1748,"og_description":1743,"twitter_image":1747,"twitter_title":1748,"twitter_description":1743,"meta_title":7,"meta_description":1749,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:agile-cg-pipeline.json","On Agile CG Pipelines (2026)",[1735],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>Agile methodologies are well known among the web industry. They are designed to handle unpredictability of building software, especially in fast-changing environment. They could fit very well with the development of a CG studio pipeline. Nevertheless few actually apply them and instead, prefer to respond to the continuous flow of unexpected needs on a day-to-day basis. This occurs because the stress is very high and what is built during a production is not easily reusable.\u003C/p>\u003Cp>On the opposite side, sometimes studios want to build big projects from scratch that cover all aspects of a CG pipeline. It takes years of development, and most of the time leads to many frustration to finally never ship.\u003C/p>\u003Cp>\u003Cstrong>Essence of Agile methodologies\u003C/strong>\u003C/p>\u003Cp>So why use agile methodologies while developing your pipeline? Agile is well fitted to make you more comfortable with unpredictable environments and it manages the chaos well.\u003C/p>\u003Cp>The first thing to take in consideration is that to ensure a project success, the most crucial part is communication. Every one needs to always be on the same page. That’s what agile does. The main idea behind it, is to keep everyone on track by ensuring that all shareholders communicate properly through regular rituals.\u003C/p>\u003Cp>Agile is aimed at being adapted for your team. What works for a studio or company may not work for you. No matter what you start with, it’s better to begin from the two most widely used frameworks: SCRUM and Kanban. Then we encourage you to remove, modify or add rituals to make it fit better with your culture.\u003C/p>\u003Cp>\u003Cstrong>Kanban\u003C/strong>\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/800/1-iSHSRId5FAmZz3lnMqebMA.png\" class=\"kg-image\" alt loading=\"lazy\">\u003C/figure>\u003Cp>Kanban is well suited for maintenance and it can be utilized to deal with your daily CG studios need. It is simple, everyone follows the same board made of post-it columns. The left column is continuously alimented by demands. Each column represent a step of the build process (code, test, validation for instance). You show progress on a task by moving the related post-it in the column describing the task status. And it comes with one simple rule: you are not allowed to have more than x cards at the same step. So, you stop everything when there is bottleneck in a column until it is removed.\u003C/p>\u003Cp>From time to time meetings are organized to discuss the current state of development and to see what could be improved.\u003C/p>\u003Cp>Overall, the goal is to make sure that everyone knows what’s happening and nothing get stuck waiting for something. It is great for usage on a daily basis but it does make it harder to envision the long term.\u003C/p>\u003Cp>\u003Cstrong>SCRUM\u003C/strong>\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/800/1-e8poplNLosYKCYVKMmlLKA.png\" class=\"kg-image\" alt loading=\"lazy\">\u003C/figure>\u003Cp>\u003Ca href=\"https://www.scrum.org/resources/what-is-scrum/?ref=blog.cg-wire.com\" rel=\"noopener\">SCRUM\u003C/a> is better for mid-long term developments. It makes sure that everyone agree on the priority and includes regular demonstration of the progress. Every thing follows the rythm of the sprints. What will be done is decided at the beginning of a sprint and doesn’t change until the next sprint.\u003C/p>\u003Cp>Precise roles are given to each member of the team: product owner (handles priorities and functional requirements), developer (build the product) and scrum masters (make sure that the agile methodology is well understood and properly applied).\u003C/p>\u003Cp>You can roughly determine what will be achieved during upcoming sprints. That way you can build a roadmap that can be communicated to all departments and to the top management.\u003C/p>\u003Cp>SCRUM is great for to keep people focused and motivated. It’s good to report to your hierarchy too but it’s not really designed to deal with emergency.\u003C/p>\u003Cp>\u003Cstrong>Conclusion\u003C/strong>\u003C/p>\u003Cp>Agile means adaptive. So it’s no surprise it could be used in a production environment too. At CGWire, we highly recommend you to apply them to build your pipeline and production tools. It will make every department interaction much more fluid.\u003C/p>\u003Cp>Once agile is accepted by everyone, the biggest challenge you will face will be to apply rigorously the rituals. You will notice that it’s very easy to dismiss them (non-tech people tend to always find a good reason to not be there). But they are the backbone of the methodology, once your meeting are not planned every time on the same schedule, things will fall apart and you will be back to your day-to-day organization. So be tough with rituals.\u003C/p>\u003Cp>As a take away, this is our advice to you: start from Kanban method for the running needs and the SCRUM method for more reusable development. Adapt them to your culture and your schedule. Once you are comfortable \u003Cbr>with it, be very strict and be disciplined. Making a movie is hard, building software is hard, if you want to be above the competition, you can’t afford approximation.\u003C/p>\u003Cp>\u003Cem>This blog is dedicated to CG pipeline and production management. Interested in software strategy and animation movies? You will probably enjoy \u003C/em>\u003Ca href=\"https://medium.com/@cgwire/?ref=blog.cg-wire.com\">\u003Cem>all our articles\u003C/em>\u003C/a>\u003Cem>. We have a \u003C/em>\u003Ca href=\"http://forum.cg-wire.com/?ref=blog.cg-wire.com\" rel=\"nofollow noopener noopener noopener\">\u003Cem>forum\u003C/em>\u003C/a>\u003Cem> too where you can discuss about your own problems/solutions and learn from others.\u003C/em>\u003C/p>",{"uuid":1738,"comment_id":1739,"feature_image":1740,"featured":105,"visibility":10,"created_at":1741,"updated_at":1742,"custom_excerpt":1743,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1744,"primary_tag":1745,"url":1746,"excerpt":1743,"reading_time":1131,"access":14,"comments":105,"og_image":1747,"og_title":1748,"og_description":1743,"twitter_image":1747,"twitter_title":1748,"twitter_description":1743,"meta_title":7,"meta_description":1749,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"cec8ee6b-403f-4087-92cb-23e89c7e63e3","3005f5f7f7f1","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/2560/1-nt3EFutd4ODGvnzJ_yK_cw.jpeg","2018-03-20T01:40:41.000+01:00","2026-02-20T06:03:44.000+01:00","Agile methodologies are well known among the web industry. They are designed to handle unpredictability of building software, especially in…",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/agile-cg-pipeline/","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/1200/1-nt3EFutd4ODGvnzJ_yK_cw.jpeg","Agile CG Pipeline","Agile methodologies are well known among the web industry. They are designed to handle unpredictability of building software, especially in fast-changing environment. They could fit very well with…","/posts/agile-cg-pipeline",{"title":1733},"agile-cg-pipeline","posts/agile-cg-pipeline",[1755],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"Hy5P9q7ZcSUVso_iPpOG__AM56uulDnevTcKLm2feLA",{"id":1758,"title":1759,"authors":1760,"body":7,"description":7,"extension":8,"html":1762,"meta":1763,"navigation":14,"path":1775,"published_at":1767,"seo":1776,"slug":1777,"stem":1778,"tags":1779,"__hash__":1781,"uuid":1764,"comment_id":1765,"feature_image":1766,"featured":105,"visibility":10,"created_at":1767,"updated_at":1768,"custom_excerpt":1769,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1770,"primary_tag":1771,"url":1772,"excerpt":1769,"reading_time":48,"access":14,"comments":105,"og_image":1766,"og_title":1773,"og_description":1769,"twitter_image":1766,"twitter_title":1773,"twitter_description":1769,"meta_title":7,"meta_description":1774,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:cg-pipeline-shot-casting.json","Shot Casting In A CG Pipeline (2026)",[1761],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>Every shot in a CG production is made of assets built mainly by Modeling and Setup departments. These assets can be of any kind: character, props, environment, and the list goes on. The asset list can be roughly determined in the storyboard and become more precise at the layout step. But it is something that can change until the very end, even after rendering.\u003C/p>\u003Cp>Every fabrication step requires a clear list of what is present in the scene to allow efficient among the departments. Which is why in the following, we will discuss the levels of precision you can have and in which case it can be useful to make transition smoother.\u003C/p>\u003Ch4 id=\"list-of-assets-present-in-the-shot\">List of assets present in the shot\u003C/h4>\u003Cp>The most basic thing you can have is the list of assets that will appear in a shot. This list will be used mainly by the production managers. It will allow them to know which shots are impacted by a change on an asset. It is also a good way to estimate the importance of an asset all along the movie.\u003C/p>\u003Cp>The casting is great for Pipeline TDs too. From that information, they can make a simple scene builder that will import automatically all the assets present in the scene. TDs can also help Production Managers to build the list with some tools. Because building this listing can take a lot of time.\u003C/p>\u003Cp>\u003Cem>Exemple : Shot 01 is made of Agent327, SuperEvil, Gun, Cars, Street\u003C/em>\u003C/p>\u003Ch4 id=\"list-and-number-of-assets-present-in-a-shot\">List and number of assets present in a shot\u003C/h4>\u003Cp>The next information you can add is the number of assets present in the shot. It’s not very important for production management but it will make scene builders more accurate. The artist won’t need to duplicate an imported model and will be able to guess how complex the scene will be.\u003C/p>\u003Cp>\u003Cem>Exemple : Shot 01 is made of Agent327 (1), SuperEvil (1), Gun (1), Cars (3) Street (1)\u003C/em>\u003C/p>\u003Ch4 id=\"list-of-instance-of-assets-present-in-a-shot\">List of instance of assets present in a shot\u003C/h4>\u003Cp>The most accurate solution is to store an entry for each asset instance present in a shot. It can be cumbersome, so we recommend to use this solution only if you have enough time to do it properly.\u003C/p>\u003Cp>With the instance list, you will have two new possibilities :\u003C/p>\u003Cul>\u003Cli>Track the work done and the state of a single instance. It’s common to have to change a model or a setup only for a given scene. Production managers can know which asset caused extra work on a shot.\u003C/li>\u003Cli>Generate and import files per instance. It will allow you to not recompute too much things in case you change a single element of your scene.\u003C/li>\u003C/ul>\u003Cp>Listing all instances will bring a lot to your automation and communication. But it can make things more complex and will require a significant extra amount of work. So be careful when including this kind of data in your production.\u003C/p>\u003Cp>\u003Cem>Exemple : Shot 01 is made of Agent327–1-wounded, SuperEvil-1, Gun-1, Car-1-blue, Car-2-red, Car-3-broken, Street-1\u003C/em>\u003C/p>\u003Ch4 id=\"final-words\">Final words\u003C/h4>\u003Cp>In this article we covered the casting of a single shot. But sometimes you need to see the casting at a higher level : at the sequence level or at the episode level (especially for TV shows). Of course, you can apply the same principle as well, but the basic listing should be enough.\u003C/p>\u003Cp>That’s it about casting! It may sound simple but it is a tedious thing to manage and it’s one of the most important information to share on a production. That’s why we decided to cover it. But we are certain that you probably have your own point of view about it. So feel free to share your opinion in the comments!\u003C/p>\u003Cp>\u003Cem>Did you like this article? We invite you to read our blog post about \u003C/em>\u003Ca href=\"https://medium.com/@cgwire/cg-pipeline-asset-management-and-dependencies-634b28a1a49a?ref=blog.cg-wire.com\">\u003Cem>asset management and dependencies\u003C/em>\u003C/a>\u003Cem>, it is related and could be helpful to understand how a good pipeline could save your production!\u003C/em>\u003C/p>",{"uuid":1764,"comment_id":1765,"feature_image":1766,"featured":105,"visibility":10,"created_at":1767,"updated_at":1768,"custom_excerpt":1769,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1770,"primary_tag":1771,"url":1772,"excerpt":1769,"reading_time":48,"access":14,"comments":105,"og_image":1766,"og_title":1773,"og_description":1769,"twitter_image":1766,"twitter_title":1773,"twitter_description":1769,"meta_title":7,"meta_description":1774,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"3440a99d-3aef-43e4-9605-599226a40374","6410cb090b12","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/1200/1-PjYKKKZV0g3la2qV43qKtw.jpeg","2017-12-12T02:25:14.000+01:00","2026-02-20T06:04:12.000+01:00","Every shot in a CG production is made of assets built mainly by Modeling and Setup departments. These assets can be of any kind: character…",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/cg-pipeline-shot-casting/","CG Pipeline: Shot Casting","Every shot in a CG production is made of assets built mainly by Modeling and Setup departments. These assets can be of any kind: character, props, environment, and the list goes on. The asset list…","/posts/cg-pipeline-shot-casting",{"title":1759},"cg-pipeline-shot-casting","posts/cg-pipeline-shot-casting",[1780],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"vNncf9lI8miSF1_Nw9P-mfr1hpOvhqOVvQIMvunTgs4",{"id":1783,"title":1784,"authors":1785,"body":7,"description":7,"extension":8,"html":1787,"meta":1788,"navigation":14,"path":1801,"published_at":1792,"seo":1802,"slug":1803,"stem":1804,"tags":1805,"__hash__":1807,"uuid":1789,"comment_id":1790,"feature_image":1791,"featured":105,"visibility":10,"created_at":1792,"updated_at":1793,"custom_excerpt":1794,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":1795,"canonical_url":7,"primary_author":1796,"primary_tag":1797,"url":1798,"excerpt":1794,"reading_time":1131,"access":14,"comments":105,"og_image":1791,"og_title":1799,"og_description":1794,"twitter_image":1791,"twitter_title":1799,"twitter_description":1794,"meta_title":7,"meta_description":1800,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:cg-pipeline-in-house-development-vs-commercial-solution.json","In-House Development vs Commercial Solution For A CG Pipeline (2026)",[1786],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>We all have our own way of working. Depending on our context, our team or our culture we can approach the exact same problem with very different styles. This explains why such diversity exists in the way productions are built. Which is why Pipeline TDs tend to make tools that match the specificities of their studio; it makes their pipeline more accurate and powerful.\u003C/p>\u003Cp>\u003Cem>Disclaimer: CGWire proposes off-the-shelf solutions but in this article, we’ll do our best to stay objective!\u003C/em>\u003C/p>\u003Cp>However, it’s undeniable that sometimes, buying off-the-shelf solutions is much faster, plus it can come with extra features that could not have been developed in a pinch of time. It may sound better and easier but not without a downside: the fact that you will have to adapt to the software. So what do you do? Well, in this article, we are going to explore the pros and cons of both ways in order to help you taking the decision that is right for you.\u003C/p>\u003Ch4 id=\"in-house-development\">In-house development\u003C/h4>\u003Cp>First off, what we mean by In-house development: it’s when you build a tool from scratch and use it. Only your team knows the internals. It could hardly be shipped in another studio.\u003C/p>\u003Cp>\u003Cem>Pros\u003C/em>\u003C/p>\u003Cul>\u003Cli>In-house tools match accurately the needs of a given production or a given process\u003C/li>\u003Cli>You notice the benefits quickly\u003C/li>\u003Cli>They are easy to learn: since you made it, it’s easier to explain how it works\u003C/li>\u003Cli>It can be a competitive advantage for your studio\u003C/li>\u003Cli>Through iterations, it can lead to great tools\u003C/li>\u003C/ul>\u003Cp>\u003Cem>Cons\u003C/em>\u003C/p>\u003Cul>\u003Cli>Lesser quality: most of the time in-house software are not as battle-tested as commercial ones and they don’t follow a strict QA process that a software vendor can have\u003C/li>\u003Cli>It may become costly over time : managing the maintenance can be very time consuming\u003C/li>\u003Cli>Once you make a tool for a production, it may be useless for the next ones. So many in-house development will be trashed. It’s not really an issue but you still have to keep it in mind.\u003C/li>\u003C/ul>\u003Ch4 id=\"commercial-solution\">Commercial solution\u003C/h4>\u003Cp>A commercial solution is a software that you can buy through a website or a vendor e.g.: Shotgun, Ftrack or Arnold.\u003C/p>\u003Cp>\u003Cem>Pros\u003C/em>\u003C/p>\u003Cul>\u003Cli>You add a lot of features to your pipeline in a second\u003C/li>\u003Cli>You take advantage of years of development and expertise\u003C/li>\u003Cli>A dedicated support is available\u003C/li>\u003Cli>Your artists may be familiar with it already because they saw it in another studios\u003C/li>\u003Cli>Great when handling standard duties that any studio face\u003C/li>\u003C/ul>\u003Cp>\u003Cem>Cons\u003C/em>\u003C/p>\u003Cul>\u003Cli>You are dependent on the shop behind the software (it can close or change the pricing for instance)\u003C/li>\u003Cli>There are many hidden costs (support, extensions, configuration…)\u003C/li>\u003Cli>There is no silver bullet: no commercial software will fit perfectly to your needs. It means you will have to adapt your process to it\u003C/li>\u003C/ul>\u003Ch4 id=\"decision-factors\">Decision factors\u003C/h4>\u003Cp>Additionally, we consider that three points should be taken in consideration when doing your choice: strategy, culture and budget. Some studios consider that R&amp;D is their key advantage, others consider it as their network. Some studios needs to reach the expectation of a given client they need to improve their tooling, others will have to ship productions in a short time frame, etc. Studio goals will have a big influence on your decision.\u003C/p>\u003Cp>The budget is obviously an important parameter. Having a team of engineer is expensive and it doesn’t bring immediate revenue. It’s a long term investment. If you’re tight on budget, it’s probably better to look for commercial solutions than in-house development. Last but not least the culture of the studio has a deep impact on the workings of things.\u003C/p>\u003Cp>For instance, in France we have \u003Cem>Buf\u003C/em> studio who was famous for having only in-house software. It made them stand out of the crowd for decades. \u003Cem>Illumination\u003C/em> made big efforts on their render tools and asset manager while they rely heavily on Shotgun for their production management. \u003Cem>Cube Creative\u003C/em> is known for its automation and its capability to ship a lot of shots. \u003Cem>Unit Image\u003C/em> is known for the quality of this picture. The style of your studio will have a deep influence on deciding whether to write quick and dirty developments with fast results, to build quality tools that gives you key advantage or simply relying on rock solid tools with proven track record.\u003C/p>\u003Ch4 id=\"to-conclude\">To conclude\u003C/h4>\u003Cp>When choosing between building a software and buying it, there is no black and white answer. It depends on several factors such as budget, team size, studio culture or strategy of the studio. Most likely, in the end, you will probably have a combination of both options. It boils down to finding a balance between the two options.\u003C/p>\u003Cp>As Douglas from Blur Studio suggested on our Slack channel, the most important thing is to keep your architecture modular. This way, you can change components easily when your context evolve or if you consider that you made the wrong choice on a given part. So be prepared to replace components of your pipeline on a regular basis.\u003C/p>\u003Cp>\u003Cem>NB: If you want to dig further in modular architecture, we recommend you \u003C/em>\u003Ca href=\"https://8thlight.com/blog/uncle-bob/2012/08/13/the-clean-architecture.html?ref=blog.cg-wire.com\" rel=\"noopener\">\u003Cem>the clean architecture article\u003C/em>\u003C/a>\u003Cem> of Uncle Bob.\u003C/em>\u003C/p>\u003Cp>In short: keep your pipeline agile and flexible by making it modular. Choose commercial solutions for big and common tasks, build your own tools to develop your competitive advantage and do that in respect of the nature of your studio!\u003C/p>\u003Cp>\u003Cem>This blog is dedicated to CG pipeline and production management. If you are interested in software strategy, you will probably enjoy our articles. We have a \u003C/em>\u003Ca href=\"http://forum.cg-wire.com/?ref=blog.cg-wire.com\" rel=\"noopener\">\u003Cem>forum\u003C/em>\u003C/a>\u003Cem> too where you can discuss about your own problems and solutions and learn from others.\u003C/em>\u003C/p>",{"uuid":1789,"comment_id":1790,"feature_image":1791,"featured":105,"visibility":10,"created_at":1792,"updated_at":1793,"custom_excerpt":1794,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":1795,"canonical_url":7,"primary_author":1796,"primary_tag":1797,"url":1798,"excerpt":1794,"reading_time":1131,"access":14,"comments":105,"og_image":1791,"og_title":1799,"og_description":1794,"twitter_image":1791,"twitter_title":1799,"twitter_description":1794,"meta_title":7,"meta_description":1800,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"192457d3-dd4f-4f90-ad67-a576d480fdf5","29666fc635d9","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/1200/1-oZXmY8cSXYGvImmJe4x2Dg.jpeg","2017-10-24T19:58:46.000+02:00","2026-02-20T06:04:10.000+01:00","We all have our own way of working. Depending on our context, our team or our culture we can approach the exact same problem with very…","custom-table-of-contents",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/cg-pipeline-in-house-development-vs-commercial-solution/","CG Pipeline: In-House Development vs Commercial Solution","We all have our own way of working. Depending on our context, our team or our culture we can approach the exact same problem with very different styles. This explains why such diversity exists in the…","/posts/cg-pipeline-in-house-development-vs-commercial-solution",{"title":1784},"cg-pipeline-in-house-development-vs-commercial-solution","posts/cg-pipeline-in-house-development-vs-commercial-solution",[1806],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"0qTw2emqhCPs4eEJKuHcG_5_4yLujuOR2TzoaBSiTfY",{"id":1809,"title":1810,"authors":1811,"body":7,"description":7,"extension":8,"html":1813,"meta":1814,"navigation":14,"path":1826,"published_at":1818,"seo":1827,"slug":1828,"stem":1829,"tags":1830,"__hash__":1833,"uuid":1815,"comment_id":1816,"feature_image":1817,"featured":105,"visibility":10,"created_at":1818,"updated_at":1819,"custom_excerpt":1820,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1821,"primary_tag":1822,"url":1823,"excerpt":1820,"reading_time":1720,"access":14,"comments":105,"og_image":1824,"og_title":1810,"og_description":1820,"twitter_image":1824,"twitter_title":1810,"twitter_description":1820,"meta_title":7,"meta_description":1825,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:say-welcome-to-our-brand-new-forum.json","Say Welcome To Our Brand New Forum",[1812],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>A few months ago, we started a community of CG Pipeline Engineers. To achieve that, we invited people to discuss on a private Slack channel. We were pleasantly surprised by the result. It was a field of ideas, on top of that, there were many tips and knowledge on best practices that were shared. People from different studios were able to meet and discuss on these topics. However, at some point we faced two limitations: the content could not be indexed on a search engine and the Slack history is deleted on a regular basis.\u003C/p>\u003Cp>Which is why we decided to open a public \u003Ca href=\"https://forum.cg-wire.com/?ref=blog.cg-wire.com\" rel=\"noopener\">forum\u003C/a> where people could talk about CG Pipeline and CG Production Management. This forum will be a good place for the community to share different topics and to keep track of most important subjects. Search engine indexation will also allow new people to find the forum and the community. Of course, a category is dedicated to CGWire software support. Such as Free and Open Source technology, we think it’s important for users to be able to help each other easily. Finally we will be posting feedback requests about our next upcoming features here in the forum.\u003C/p>\u003Cp>The technology that is being used to manage the forum is called \u003Ca href=\"https://www.discourse.org/?ref=blog.cg-wire.com\" rel=\"noopener\">Discourse\u003C/a>. It’s a proven software that provides a very nice user interface. We hope you’ll enjoy it! We are very excited to see what could come out of this. Don’t hesitate to participate and ask any questions you want, we’ll be glad to answer and help you! Join us on \u003Ca href=\"https://forum.cg-wire.com/?ref=blog.cg-wire.com\" rel=\"noopener\">https://forum.cg-wire.com\u003C/a> !\u003C/p>",{"uuid":1815,"comment_id":1816,"feature_image":1817,"featured":105,"visibility":10,"created_at":1818,"updated_at":1819,"custom_excerpt":1820,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1821,"primary_tag":1822,"url":1823,"excerpt":1820,"reading_time":1720,"access":14,"comments":105,"og_image":1824,"og_title":1810,"og_description":1820,"twitter_image":1824,"twitter_title":1810,"twitter_description":1820,"meta_title":7,"meta_description":1825,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ee81e153-06d4-491f-ab78-73537955d1e8","aafb325121e6","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/800/1-tue5tT1QZwaBn74i_QY96w.png","2017-10-16T00:32:27.000+02:00","2021-01-14T14:52:49.000+01:00","A few months ago, we started a community of CG Pipeline Engineers. To achieve that, we invited people to discuss on a private Slack…",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/say-welcome-to-our-brand-new-forum/","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/1151/1-tue5tT1QZwaBn74i_QY96w.png","A few months ago, we started a community of CG Pipeline Engineers. To achieve that, we invited people to discuss on a private Slack channel. We were pleasantly surprised by the result. It was a field…","/posts/say-welcome-to-our-brand-new-forum",{"title":1810},"say-welcome-to-our-brand-new-forum","posts/say-welcome-to-our-brand-new-forum",[1831,1832],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},{"id":1653,"name":71,"slug":78,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":75},"F6-3UlDM-PX0A7yK2YUi4UMb-pLw8HoH-ZML5FCNugY",{"id":1835,"title":1836,"authors":1837,"body":7,"description":7,"extension":8,"html":1839,"meta":1840,"navigation":14,"path":1853,"published_at":1844,"seo":1854,"slug":1855,"stem":1856,"tags":1857,"__hash__":1859,"uuid":1841,"comment_id":1842,"feature_image":1843,"featured":105,"visibility":10,"created_at":1844,"updated_at":1845,"custom_excerpt":1846,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1847,"primary_tag":1848,"url":1849,"excerpt":1846,"reading_time":447,"access":14,"comments":105,"og_image":1850,"og_title":1851,"og_description":1846,"twitter_image":1850,"twitter_title":1851,"twitter_description":1846,"meta_title":7,"meta_description":1852,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:cg-pipeline-the-best-graph-database-for-your-cg-production-data.json","The Best Graph Database for Your CG Production Data In 2026",[1838],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>As we mentioned in a previous blog post, A CG production can be represented as a graph structure. A movie is made of shots which are generated from scene files which are themselves made of elements linked by relationships. Nevertheless, when we store production data into a database, we tend to use a flat description of the data. And when it’s time to chose a database, the most common choice is to rely on relational databases.\u003C/p>\u003Cp>Using a relational database is a good choice: it’s safe and does the job well. But, nowadays, a few database technologies propose to store your data directly formatted as graphs. Initially, they are mostly used to deal with social networks or banking use cases. But it’s no suprise that they caught the attention of many Technical Directors and Developers from CG studios. Because of the growing interest for graph databases, we decided to look closer at them.\u003C/p>\u003Cp>The information of a graph will make you more agile. Graph storage allows to save the dependencies of all your assets and set the versions of the elements casted in a shot. And because stored graphs are directed, you can easily compute a sequence of operations to build or rebuild an element of the scene. Which means more reactivity when the director wants to try new things.\u003C/p>\u003Cp>Now we have a good incentive to use graph databases, we are going to have a look at major open source graph databases available on the market.\u003C/p>\u003Ch4 id=\"example-use-case\">Example use case\u003C/h4>\u003Cp>To explore these databases, we propose to implement the data graph of the props animation described in our previous article named \u003Ca href=\"https://medium.com/@cgwire/cg-pipeline-asset-management-and-dependencies-634b28a1a49a?ref=blog.cg-wire.com\">CG production as a Graph\u003C/a>. The approach will be to store the steps required to build the props and include it in a given shot.\u003C/p>\u003Cp>The most common thing we want to do with graph is to obtain all the impacts of a change on a given element. To illustrate this, we will perform a query that retrieve the elements impacted by the change on the mesh of the props.\u003C/p>\u003Cp>We’ll provide Python snippets to show how to use each database. Then we’ll run a quick benchmark. We will compare how long it takes to run 10 000 times our sample query on a i7–6700 CPU @ 3.40GHz . Note that this benchmark includes the Python client, we consider that you will only use your database through it. That’s why we include it in our measures.\u003C/p>\u003Ch4 id=\"main-databases\">Main databases\u003C/h4>\u003Cp>The main databases we will study are the following:\u003C/p>\u003Cul>\u003Cli>\u003Ca href=\"https://neo4j.com/?ref=blog.cg-wire.com\" rel=\"noopener\">Neo4j\u003C/a> (Java)\u003C/li>\u003Cli>\u003Ca href=\"https://www.arangodb.com/?ref=blog.cg-wire.com\" rel=\"noopener\">ArangoDB\u003C/a> (C++)\u003C/li>\u003Cli>\u003Ca href=\"https://cayley.io/?ref=blog.cg-wire.com\" rel=\"noopener\">Cayley\u003C/a> (Go)\u003C/li>\u003Cli>\u003Ca href=\"https://orientdb.com/?ref=blog.cg-wire.com\" rel=\"noopener\">OrientDB\u003C/a> (Java)\u003C/li>\u003C/ul>\u003Ch4 id=\"cayley\">Cayley\u003C/h4>\u003Cp>Cayley is a graph database distributed by Google written in Go. It looks promising on many aspects (configurable backend, community driven) but currently the documentation is close to inexistant. Whatever, let’s see what we can do with.\u003C/p>\u003Cp>First, download the binaries related to your platform, initialize the database and run the http server which will that allow us to perfoms queries. Database initialization doesn’t mean you have to give data, it’s just needed to create the database files../cayley init -db bolt -dbpath /tmp/testdb\u003Cbr>./cayley http --dbpath=/tmp/testdb  --host 0.0.0.0 --port 64210\u003C/p>\u003Cp>You can notice here that another DB technology is involved (Bolt). It’s because Cayley is a layer above an existing database. You can either use traditional key value stores or relational database as backend.\u003C/p>\u003Cp>Now let’s go with the Python client code. We want to store all our assets, scenes, shots and their relations. To achieve that, we need to install the Python driver:pip install pyley\u003C/p>\u003Cp>Cayley is based on the concept of triplet. Everything is a vertex linked to another one: the triplet is made of three vertices: the two elements we want to link and the link vertex (kind of edge). You can add a label on each triplet, so in Cayley the term for this data structure is “quads”. \u003Cbr>Unfortunately the Python client is not complete and does not support Quad creation. So we need to create our quads via requests, a standard Python HTTP client (Cayley provied a REST API):def create_quad(quad):\u003Cbr>    path = “\u003Ca href=\"http://localhost:64210/api/v1/write?ref=blog.cg-wire.com\" rel=\"nofollow\">http://localhost:64210/api/v1/write\u003C/a>\"\u003Cbr>    return requests.post(path, json=[quad])\u003C/p>\u003Cp>Now let’s proceed to the quad creation:quads = [\u003Cbr>    {\u003Cbr>        “subject”: “props1-concept”, \u003Cbr>        “predicate”: “dependencyof”, \u003Cbr>        “object”: “props1-texture”\u003Cbr>    },\u003Cbr>    {\u003Cbr>        “subject”: “props1-concept”, \u003Cbr>        “predicate”: “dependencyof”, \u003Cbr>        “object”: “props1-mesh”\u003Cbr>    },\u003Cbr>    {\u003Cbr>        “subject”: “props1-texture”, \u003Cbr>        “predicate”: “dependencyof”, \u003Cbr>        “object”: “props1-model”\u003Cbr>    },\u003Cbr>    {\u003Cbr>        “subject”: “props1-mesh”, \u003Cbr>        “predicate”: “dependencyof”, \u003Cbr>        “object”: “props1-model”\u003Cbr>    },\u003Cbr>    {\u003Cbr>        “subject”: “props1-mesh”, \u003Cbr>        “predicate”: “dependencyof”, \u003Cbr>        “object”: “props1-rig”\u003Cbr>    },\u003Cbr>    {\u003Cbr>        “subject”: “props1-mesh”, \u003Cbr>        “predicate”: “dependencyof”, \u003Cbr>        “object”: “props1-keys”\u003Cbr>    }\u003Cbr>    {\u003Cbr>        “subject”: “props1-rig”, \u003Cbr>        “predicate”: “dependencyof”, \u003Cbr>        “object”: “props1-keys”\u003Cbr>    },\u003Cbr>    {\u003Cbr>        “subject”: “props1-model”, \u003Cbr>        “predicate”: “dependencyof”, \u003Cbr>        “object”: “shot1-image-sequence”\u003Cbr>    },\u003Cbr>    {\u003Cbr>        “subject”: “props1-keys”, \u003Cbr>        “predicate”: “dependencyof”, \u003Cbr>        “object”: “shot1-image-sequence”\u003Cbr>    }\u003Cbr>]for quad in quads:\u003Cbr>   create_quad(quad)\u003C/p>\u003Cp>That’s it. As you can see we already have stored all our data and set relation between them. If you create again similar quads, nothing will change and there will be no duplicates.\u003C/p>\u003Cp>Now let’s perform our query about the impact of a rig change on the production:from pyley import CayleyClient, GraphObject\u003Cbr>client = CayleyClient(\"\u003Ca href=\"http://localhost:64210/?ref=blog.cg-wire.com\" rel=\"nofollow\">http://localhost:64210\u003C/a>\", \"v1\")graph = GraphObject()\u003Cbr>query = graph.V(“props1-mesh”)\u003Cbr>    .Out()\u003Cbr>    .All()\u003C/p>\u003Cp>To get our desired data, we had to specify which vertex (here our texture) of which we want to study the impact of. Then we just asked the outer the vertex of wich the texture is element of. We can chain the call depending on the depth of the impact we want to study. A recursive traversal is available but the Python client doesn’t implement it yet. Finally we made our performance tests. It took 50 seconds to run ten thousands time this query.\u003C/p>\u003Cp>The visualization UI doesn’t work well and is not very intuitive to use. Which is sad because Neo4j and Arango have working UIs that allow to display your graph.\u003C/p>\u003Cp>Cayley is a very simple database. With a single concept, the quad representation, it allows to represent our data. Querying is very easy too and based on standard graph query language such as \u003Ca href=\"http://tinkerpop.apache.org/docs/current/reference/?ref=blog.cg-wire.com\" rel=\"noopener\">Gremlin\u003C/a> (you can chose your favorite query language). Unfortunately the project is still poorly documented and the Python client is uncomplete. That’s why despite its clean and simple design we cannot recommend to use Cayley in production.\u003C/p>\u003Ch4 id=\"neo4j\">Neo4j\u003C/h4>\u003Cp>Neo4j is the most mature solution of all. The enterprise behind it offers compelling entreprise solution for support and \u003Ca href=\"https://neo4j.com/editions/?ref=blog.cg-wire.com\" rel=\"noopener\">extra features\u003C/a> (monitoring, backup, improved querying…). That’s a big advantage if you need to feel very safe due to hard contracts with your clients. But to start with it, we reommend using the community edition. This is this version that we’ll cover in this article.\u003C/p>\u003Cp>Because we are just experimenting, we are going to use the official Docker to play with Neo4j:\u003C/p>\u003Cpre>\u003Ccode>docker run \\    --publish=7474:7474 --publish=7687:7687 \\    --volume=$HOME/neo4j/data:/data \\    neo4j\u003C/code>\u003C/pre>\u003Cp>Now we can install the Python driver:pip install neo4j-driver\u003C/p>\u003Cp>First things first, let’s initialize the connection with the database and the query session. At first connection they will ask you to set a password, you can do it through the last line of the snippet below:from neo4j.v1 import GraphDatabase, basic_authdriver = GraphDatabase.driver(\u003Cbr>    \"bolt://localhost:7687\",\u003Cbr>    auth=basic_auth(\"neo4j\", \"tests\")\u003Cbr>)\u003Cbr>session = driver.session()\u003Cbr># session.run(\"CALL dbms.changePassword('tests')\")\u003C/p>\u003Cp>Then let’s add helpers to create asset nodes, shot nodes and relation edges. The python client does not provide a strong API, it justs allow to perform requests directly with the in-house language of Neo4j named Cypher. There is CREATE command but we’ll use MERGE because it acts as CREATE if not exists:def create_asset(name):\u003Cbr>    session.run(\u003Cbr>        \"MERGE (a:Asset { name: $name })\",\u003Cbr>        name=name\u003Cbr>    )def create_shot(name):\u003Cbr>    session.run(\u003Cbr>        \"MERGE (a:Shot { name: $name })\",\u003Cbr>        name=name\u003Cbr>    )def create_relation(asset1, asset2):\u003Cbr>    session.run(\u003Cbr>        \"MATCH (a:Asset { name: $asset1 }), (b:Asset { name: $asset2 })\"\u003Cbr>        \"MERGE (a)-[r:ELEMENT_OF]-&gt;(b)\",\u003Cbr>        asset1=asset1, asset2=asset2\u003Cbr>    )def create_casting(asset, shot):\u003Cbr>    session.run(\u003Cbr>        \"MATCH (a:Asset { name: $asset }), (b:Shot { name: $shot })\"\u003Cbr>        \"MERGE (a)-[r:CASTED_IN]-&gt;(b)\",\u003Cbr>        asset=asset, shot=shot\u003Cbr>    )\u003C/p>\u003Cp>As you can see the syntax is easy to read and learn. We can add as many fields we want on a single node.\u003C/p>\u003Cp>Now we have our functions, let’s populate our graph:create_asset(\"Props 1 concept\")\u003Cbr>create_asset(\"Props 1 mesh\")\u003Cbr>create_asset(\"Props 1 texture\")\u003Cbr>create_asset(\"Props 1 rig\")\u003Cbr>create_asset(\"Props 1 model\")\u003Cbr>create_asset(\"Props 1 keys\")\u003Cbr>create_shot(\"Shot 1\")create_relation(\"Props 1 concept\", \"Props 1 texture\")\u003Cbr>create_relation(\"Props 1 concept\", \"Props 1 mesh\")\u003Cbr>create_relation(\"Props 1 mesh\", \"Props 1 model\")\u003Cbr>create_relation(\"Props 1 texture\", \"Props 1 model\")\u003Cbr>create_relation(\"Props 1 mesh\", \"Props 1 rig\")\u003Cbr>create_relation(\"Props 1 mesh\", \"Props 1 keys\")\u003Cbr>create_relation(\"Props 1 rig\", \"Props 1 keys\")create_casting(\"Props 1 model\", \"Shot 1\")\u003Cbr>create_casting(\"Props 1 keys\", \"Shot 1\")\u003C/p>\u003Cp>Now we can take advantage of the expressive query language to perform our traversal. Note the star inside the arrow. It means that will traverse all nodes until there is no more out connections.result = session.run(\u003Cbr>    \"MATCH (:Asset { name: 'Props 1 mesh' })-[*]-&gt;(out)\"\u003Cbr>    \"RETURN out.name as name\"\u003Cbr>)for record in result:\u003Cbr>    print(\"%s\" % record[\"name\"])session.close()\u003C/p>\u003Cp>We’re done! Result records are easy to display and analyze. They are Python dicts containing the fields specified at creation. Running ten thousand times our request lasted 3.5 seconds (it drops to 17 seconds if you open/close the session each time).\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/800/1-jxPmsYBVm3zMdPOgDejKfw.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"651\" height=\"534\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/max/800/1-jxPmsYBVm3zMdPOgDejKfw.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/800/1-jxPmsYBVm3zMdPOgDejKfw.png 651w\">\u003Cfigcaption>\u003Cspan style=\"white-space: pre-wrap;\">Displaying of the graph in the neo4j&nbsp;UI\u003C/span>\u003C/figcaption>\u003C/figure>\u003Cp>Overall, Neo4j is full featured and does the job well and it’s fast compared to others. Its strong query language and its many features will allow to perform the most common use cases you will have with your graph. The official Python client is a bit thin, but the community provides an interesting alternative with \u003Ca href=\"https://github.com/robinedwards/neomodel?ref=blog.cg-wire.com\" rel=\"noopener\">a client built like an ORM\u003C/a>. Last but not least, the database is here since a long time and the entreprise behind it is very active. So, it makes Neo4j the safer choice of this review.\u003C/p>\u003Cp>NB: here is \u003Ca href=\"https://hackernoon.com/life-after-1-year-of-using-neo4j-4eca5ce95bf5?ref=blog.cg-wire.com\" rel=\"noopener\">a real life feedback about Neo4j\u003C/a>.\u003C/p>\u003Ch4 id=\"with-arangodb\">With ArangoDB\u003C/h4>\u003Cp>ArangoDB is a versatile database that allows document storage and graph storage all along. Recently, it have gained in popularity, it’s the reason why we included it to the test. It comes with handful features like easy deployment on a cloud infrastructure and helpers to build REST API. But for this article we’ll focus on the graph storage and its query system.\u003C/p>\u003Cp>Let’s code! To make our testing we need first an Arango instance up and running. Let’s use Docker again to spawn it:docker run -p 8529:8529 -e ARANGO_ROOT_PASSWORD=openSesame arangodb/arangodb:3.2.1\u003C/p>\u003Cp>Then we install the Python client:pip install python-arango\u003C/p>\u003Cp>Now we can write our Python script, the first step will be to initialize our database:from arango.client import ArangoClientclient = ArangoClient(username='root', password='openSesame')\u003Cbr>db = client.create_database('cgproduction')\u003C/p>\u003Cp>As you can see the database creation is very straightforward. The only problem is that it raises an exception if the database already exists. It means that if you want to achieve idempotence with your script, you will have to write your own “get or create” method. It’s the same for every creation we’ll do in the following. Be prepared to augment this Python driver.\u003C/p>\u003Cp>The next step is to define our graph and configure the collections that will store vertices and edges information:dependencies = db.create_graph('dependencies')shots = dependencies.create_vertex_collection('shots')\u003Cbr>assets = dependencies.create_vertex_collection('assets')casting = dependencies.create_edge_definition(\u003Cbr>    name='casting',\u003Cbr>    from_collections=['assets'],\u003Cbr>    to_collections=['shots']\u003Cbr>)\u003Cbr>elements = dependencies.create_edge_definition(\u003Cbr>    name='element',\u003Cbr>    from_collections=['assets'],\u003Cbr>    to_collections=['assets']\u003Cbr>)\u003C/p>\u003Cp>Arango graph storage is based on its own document storage system. Each vertex is stored as a json entry in a collection. Edges are a little bit different. They are stored in a similar fashion, but the collection definition requires more information: the inner vertex collection and the outer one. Edges are always directed.\u003C/p>\u003Cp>Now we have our database properly configured, we can add our data:# Insert vertices\u003Cbr>assets.insert(\u003Cbr>    {'_key': 'props1-concept', 'name': 'Props 1 Concept'})\u003Cbr>assets.insert(\u003Cbr>    {'_key': 'props1-texture', 'name': 'Props 1 Texture'})\u003Cbr>assets.insert(\u003Cbr>    {'_key': 'props1-mesh', 'name': 'Props 1 Mesh'})\u003Cbr>assets.insert({'_key': 'props1-rig', 'name': 'Props 1 Rig'})\u003Cbr>assets.insert({'_key': 'props1-model', 'name': 'Props 1 Model'})\u003Cbr>assets.insert({'_key': 'props1-keys', 'name': 'Props 1 Keys'})\u003Cbr>shots.insert(\u003Cbr>    {'_key': 'shot1-image-sequence', \u003Cbr>     'name': 'Shot 1 Image sequence'})# Insert edges\u003Cbr>elements.insert(\u003Cbr>    {'_from': 'assets/props1-concept', \u003Cbr>     '_to': 'assets/props1-texture'})\u003Cbr>elements.insert(\u003Cbr>    {'_from': 'assets/props1-concept', \u003Cbr>     '_to': 'assets/props1-mesh'})\u003Cbr>elements.insert(\u003Cbr>    {'_from': 'assets/props1-texture', \u003Cbr>     '_to': 'assets/props1-model'})\u003Cbr>elements.insert(\u003Cbr>    {'_from': 'assets/props1-mesh', \u003Cbr>     '_to': 'assets/props1-rig'})\u003Cbr>elements.insert(\u003Cbr>    {'_from': 'assets/props1-mesh', \u003Cbr>     '_to': 'assets/props1-model'})\u003Cbr>elements.insert(\u003Cbr>    {'_from': 'assets/props1-mesh', \u003Cbr>     '_to': 'assets/props1-keys'})\u003Cbr>elements.insert(\u003Cbr>    {'_from': 'assets/props1-rig', \u003Cbr>     '_to': 'assets/props1-keys'})\u003Cbr>casting.insert(\u003Cbr>    {'_from': 'assets/props1-model', \u003Cbr>     '_to': 'shots/shot1-image-sequence'})\u003Cbr>casting.insert(\u003Cbr>    {'_from': 'assets/props1-keys', \u003Cbr>     '_to': 'shots/shot1-image-sequence'})\u003C/p>\u003Cp>Once our data properly imported, we can proceed to our query:traversal_results = dependencies.traverse(\u003Cbr> start_vertex=’assets/props1-mesh’,\u003Cbr> direction=’outbound’\u003Cbr>)for result in traversal_results[“vertices”]:\u003Cbr>   print(result[“name”])\u003C/p>\u003Cp>With this simple request we get all our impact of a modification of the props 1 mesh. The result is easy to analyze and the query is configurable (for instance you can chose between a depth first traversal and a breath first traversal).\u003C/p>\u003Cp>Arango provides a traversal object that allows you to build particular path. Some helpers are available too, like shortest path finding or path length retrieval. It should cover most of your needs in term of graph querying.\u003C/p>\u003Cp>Last but not least, you can visualize your graph in the Arango web UI:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/800/1-WSrxSHlMqQ9JiMG6SlrmWg.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"605\" height=\"475\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/max/800/1-WSrxSHlMqQ9JiMG6SlrmWg.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/800/1-WSrxSHlMqQ9JiMG6SlrmWg.png 605w\">\u003Cfigcaption>\u003Cspan style=\"white-space: pre-wrap;\">The overall web UI is slicker than neo4j UI but the graph representation is less&nbsp;explicit\u003C/span>\u003C/figcaption>\u003C/figure>\u003Cp>Overall, the ArangoDB and Python client are simple to understand and well documented. It provides many helpers to play with our graph and the visualization tools makes things even easier. But it looks slower than neo4j. Running 10 000 times our query took 26s. Despite these results, it’s still our favorite database of this test. Arango is very developer-friendly. It is the best choice to experiment quickly with graph databases. And because the company behind looks very active, it seems to be a safe choice for a production usage too.\u003C/p>\u003Ch4 id=\"orientdb\">OrientDB\u003C/h4>\u003Cp>OrientDB is here for a while now (since 2010). But because of the very bad feedback about it (see comments too), we decided to not cover this database in this article. It’s too risky to use it in a CG production environment.\u003C/p>\u003Ch4 id=\"alternatives\">Alternatives\u003C/h4>\u003Cp>There are still alternatives. By playing with traditional database, you can have similar features as with graph database. One option is to use Postgres with its recursive joins. It will allow you to cover simple use cases of graph traversal.\u003C/p>\u003Cp>Another option, which looks great if you want to be able to do fuzzy searches, is to use Elastic Search and store all vertices and edges as JSON documents (similar approach as ArangoDB). Read this \u003Ca href=\"https://medium.com/@imriqwe/elasticsearch-as-a-graph-database-bc0eee7f7622?ref=blog.cg-wire.com\">full article\u003C/a> to have more information about the subject.\u003C/p>\u003Ch4 id=\"visualisation\">Visualisation\u003C/h4>\u003Cp>Having graph data is great but you may want to build tools that shows your data at some point (and outside of the built-in UIs).\u003C/p>\u003Cp>There are two good libraries for Qt that allows to build graph easily:\u003C/p>\u003Cul>\u003Cli>\u003Ca href=\"https://github.com/clemenssielaff/ZodiacGraph?ref=blog.cg-wire.com\" rel=\"noopener\">ZodiacGraph\u003C/a>: a powerful C++ library which is fast and flexible.\u003C/li>\u003Cli>\u003Ca href=\"https://github.com/LeGoffLoic/Nodz?ref=blog.cg-wire.com\" rel=\"noopener\">Nodz\u003C/a>: a Python library easy to use.\u003C/li>\u003C/ul>\u003Cp>Another option is to use Javascript libraries for in-browser or \u003Ca href=\"https://electron.atom.io/?ref=blog.cg-wire.com\" rel=\"noopener\">Electron\u003C/a> applications. Here are some:\u003C/p>\u003Cul>\u003Cli>\u003Ca href=\"http://sigmajs.org/?ref=blog.cg-wire.com\" rel=\"noopener\">SigmaJS\u003C/a>: fast and well documented library\u003C/li>\u003Cli>\u003Ca href=\"http://js.cytoscape.org/?ref=blog.cg-wire.com\" rel=\"noopener\">Cytoscape\u003C/a>: versatile and robust.\u003C/li>\u003Cli>\u003Ca href=\"https://d3js.org/?ref=blog.cg-wire.com\" rel=\"noopener\">d3.js\u003C/a>: harder to use but limitless.\u003C/li>\u003C/ul>\u003Ch4 id=\"to-conclude\">To conclude\u003C/h4>\u003Cp>From our study, it looks like ArangoDB is the most user friendly database and its document storage aspect will make your production data management easier. But it’s still a young DB. If you need speed or if there is a lot of money at stake and if you are looking for a safer choice go for Neo4j, which does the job well and looks more robust. Finally Cayley looks good on many aspects has a great design and could be the best choice to complement an already existing relational database, but is still too undocumented and young to be used in production. So, to sum up: try ArangoDB first!\u003C/p>\u003Cp>The question about what problems solve graph representation and storage for pipeline TDs remain. The main use case for us is to generate easily the sequence of actions needed to rebuild a shot when a change occurs. The other one is to provide easily a representation of the production on which people can discuss.\u003C/p>\u003Cp>We hope you enjoy this article. We are still very new to graph databases. We would be glad to know what you think about it and read your production experience with these technologies: comments are welcome!\u003C/p>\u003Cp>\u003Cem>This blog is dedicated to CG pipeline and production management. If you are interested in graph databases for CG productions, you will probably enjoy all our articles. Read our \u003C/em>\u003Ca href=\"https://medium.com/@cgwire/facilitating-cg-graphists-to-focus-more-on-the-artistic-aa8f801edf20?ref=blog.cg-wire.com\">\u003Cem>first blog post\u003C/em>\u003C/a>\u003Cem> to know more about us!\u003C/em>\u003C/p>",{"uuid":1841,"comment_id":1842,"feature_image":1843,"featured":105,"visibility":10,"created_at":1844,"updated_at":1845,"custom_excerpt":1846,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1847,"primary_tag":1848,"url":1849,"excerpt":1846,"reading_time":447,"access":14,"comments":105,"og_image":1850,"og_title":1851,"og_description":1846,"twitter_image":1850,"twitter_title":1851,"twitter_description":1846,"meta_title":7,"meta_description":1852,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"f1d45cdb-64ab-4845-8b93-7f2f792e49f3","4cedc9e49065","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/2560/1-I4a6MOBxYhgQsBRcwbShRw.jpeg","2017-09-04T10:56:01.000+02:00","2026-03-26T10:53:20.000+01:00","As we mentioned in a previous blog post, A CG production can be represented as a graph structure. A movie is made of shots which are…",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/cg-pipeline-the-best-graph-database-for-your-cg-production-data/","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/1200/1-I4a6MOBxYhgQsBRcwbShRw.jpeg","CG Pipeline: The Best Graph Database for Your CG Production Data","As we mentioned in a previous blog post, A CG production can be represented as a graph structure. A movie is made of shots which are generated from scene files which are themselves made of elements…","/posts/cg-pipeline-the-best-graph-database-for-your-cg-production-data",{"title":1836},"cg-pipeline-the-best-graph-database-for-your-cg-production-data","posts/cg-pipeline-the-best-graph-database-for-your-cg-production-data",[1858],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"alQcQf_eJ_tEX2UOV9OeLeXUE0lu-t7VQ_w5id6urb0",{"id":1861,"title":1862,"authors":1863,"body":7,"description":7,"extension":8,"html":1865,"meta":1866,"navigation":14,"path":1878,"published_at":1870,"seo":1879,"slug":1880,"stem":1881,"tags":1882,"__hash__":1884,"uuid":1867,"comment_id":1868,"feature_image":1869,"featured":105,"visibility":10,"created_at":1870,"updated_at":1871,"custom_excerpt":1872,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1873,"primary_tag":1874,"url":1875,"excerpt":1872,"reading_time":1720,"access":14,"comments":105,"og_image":1869,"og_title":1876,"og_description":1872,"twitter_image":1869,"twitter_title":1876,"twitter_description":1872,"meta_title":7,"meta_description":1877,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:cg-pipeline-files-and-nodes-metadata.json","Files and Nodes Metadata In A CG Pipeline (2026)",[1864],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>Having a shared database for all your tools is mandatory to ensure a good flow between them. But sometimes you need a simpler access to particular data. Reaching information about a node or a file directly is more efficient than querying a database. But the question is how to store and query your metadata locally?\u003C/p>\u003Cp>There are two simple ways to achieve that. The first one is to add a text file in the same folder as your file. Inside it, data can be structured following standard syntax like \u003Ca href=\"https://en.wikipedia.org/wiki/TOML?ref=blog.cg-wire.com\" rel=\"noopener\">TOML\u003C/a>, \u003Ca href=\"https://fr.wikipedia.org/wiki/YAML?ref=blog.cg-wire.com\" rel=\"noopener\">YAML\u003C/a> or \u003Ca href=\"https://fr.wikipedia.org/wiki/YAML?ref=blog.cg-wire.com\" rel=\"noopener\">JSON\u003C/a>. The second way is to rely on node properties. Most modeling tools propose to add key/value pairs to a node. These simple pairs allow you to add many interesting information to all the part of your scene. For instance, you can set :\u003C/p>\u003Cul>\u003Cli>Links with assets or shots from the database\u003C/li>\u003Cli>Which part of the scene it is (body, face, environment, etc)\u003C/li>\u003Cli>Add informative tags like: hires, lowres, proxy, tocache, etc.\u003C/li>\u003C/ul>\u003Cp>A good use case for tags is to identify what should be exported from the scene during the \u003Ca href=\"https://medium.com/@cgwire/cg-pipeline-publishing-preview-and-production-management-c51d7ae7ffec?ref=blog.cg-wire.com\">publishing\u003C/a> of a fabrication step.\u003C/p>\u003Cp>That’s still a good idea to store this meta data in a shared database. Provide a quick access to information doesn’t mean you shouldn’t share it with the other tools.\u003C/p>\u003Cp>NB: Our \u003Ca href=\"https://zou.cg-wire.com/?ref=blog.cg-wire.com\" rel=\"noopener\">Zou API\u003C/a> allows to store non structured data on all assets and file entries. We simply added a JSON field to our main types. That way you can add specific information to your shared production data without thinking about the schema of your entities.\u003C/p>\u003Cp>The more information you share, the more efficient the team will be. Every bit of additional information you can give is useful. Especially when you have to deal with complex scenes involving thousands of assets. That’s why local metadata can improve the productivity of your production.\u003C/p>\u003Cp>\u003Cem>This blog is dedicated to CG pipeline and production management. If you are interested in asset nodes metadata, you will probably enjoy our articles. Read our \u003C/em>\u003Ca href=\"https://medium.com/@cgwire/facilitating-cg-graphists-to-focus-more-on-the-artistic-aa8f801edf20?ref=blog.cg-wire.com\">\u003Cem>first blog post\u003C/em>\u003C/a>\u003Cem> to understand who we are and what we do.\u003C/em>\u003C/p>",{"uuid":1867,"comment_id":1868,"feature_image":1869,"featured":105,"visibility":10,"created_at":1870,"updated_at":1871,"custom_excerpt":1872,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1873,"primary_tag":1874,"url":1875,"excerpt":1872,"reading_time":1720,"access":14,"comments":105,"og_image":1869,"og_title":1876,"og_description":1872,"twitter_image":1869,"twitter_title":1876,"twitter_description":1872,"meta_title":7,"meta_description":1877,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"5d5bb6d7-4ed7-4e0a-a59f-11238e8a57d4","fc5ac7d3748e","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/1200/1-adh4mxqlTdZX_tsADuoIbw.jpeg","2017-07-21T00:57:58.000+02:00","2026-02-20T06:04:09.000+01:00","Having a shared database for all your tools is mandatory to ensure a good flow between them. But sometimes you need a simpler access to…",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/cg-pipeline-files-and-nodes-metadata/","CG Pipeline: Files and Nodes Metadata","Having a shared database for all your tools is mandatory to ensure a good flow between them. But sometimes you need a simpler access to particular data. Reaching information about a node or a file…","/posts/cg-pipeline-files-and-nodes-metadata",{"title":1862},"cg-pipeline-files-and-nodes-metadata","posts/cg-pipeline-files-and-nodes-metadata",[1883],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"Zsk-B_c8ZoMKJPnCH3OyTHc16CYsy7tb09GL6XYuIqA",{"id":1886,"title":1887,"authors":1888,"body":7,"description":7,"extension":8,"html":1890,"meta":1891,"navigation":14,"path":1903,"published_at":1895,"seo":1904,"slug":1905,"stem":1906,"tags":1907,"__hash__":1909,"uuid":1892,"comment_id":1893,"feature_image":1894,"featured":105,"visibility":10,"created_at":1895,"updated_at":1871,"custom_excerpt":1896,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1897,"primary_tag":1898,"url":1899,"excerpt":1896,"reading_time":1900,"access":14,"comments":105,"og_image":1894,"og_title":1901,"og_description":1896,"twitter_image":1894,"twitter_title":1901,"twitter_description":1896,"meta_title":7,"meta_description":1902,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:cg-pipeline-file-cleaning-and-quality-checking.json","File Cleaning And Quality Checking In CG Pipelines (2026)",[1889],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>For CG artists, working on a new file which is clean means more productivity. When nodes are properly named, when positions of elements are standard, when there is no remaining artefact and when all references are properly done, it’s much easier to do a good job. You understand the scene quicker and it’s easier to select nodes. Unfortunately, clean files are not really common. So, the first steps achieved by CG artists, before jumping into the artistic stuff, are boring cleaning actions.\u003C/p>\u003Cp>But why people deliver dirty files? Because it’s hard to remember what is required once the job is done. After long hours of work, CG artists are not really motivated any more to handle the cleaning. To make things worst they don’t always know what is expected to make the file “clean”. They can be tempted to ship without doing anything in order to make the file proper.\u003C/p>\u003Cp>But what is great with boring tasks is that they can be automated. Most CG software allow Python scripting. It means you can build a simple tool that handle all the dirty work for you. By implementing quality checking scripts, you can ensure that the file is ready to be published. They can be run independently and provide a report of what is wrong or good (a list of status for each criteria).\u003C/p>\u003Cp>It can be compared to the practice of unit testing used by software engineers. A main script runs many sub-scripts. Each subscripts perform tests. The the main script provides a report listing successful and failed sub-scripts. It allows to know what works and what need to be fixed into the code.\u003C/p>\u003Cp>Another thing you can automate is the fixing of the failed checks. When there is something wrong (like, for instance, special chars in a node name), you can include another script that will perform the fix required (in our example, it would remove special chars from the node name).\u003C/p>\u003Cp>Automating the action of checking a scene and fixing what is wrong can save a tremendous time to CG artists. It doesn’t require a lot of development and quality checks can be coded by technical artists. If you want to implement a pipeline, it is a great task to start with. You will avoid bad surprises and artists will be happier!\u003C/p>\u003Cp>\u003Cem>CGWire is a software shop that crafts \u003C/em>\u003Ca href=\"https://github.com/cgwire?ref=blog.cg-wire.com\" rel=\"nofollow noopener noopener\">\u003Cem>open source tools\u003C/em>\u003C/a>\u003Cem> to make your pipeline more efficient. If you are interesting in what we do and think it can help, feel free to \u003C/em>\u003Ca href=\"mailto:contact@cg-wire.com\">\u003Cem>contact us\u003C/em>\u003C/a>\u003Cem>!\u003C/em>\u003C/p>",{"uuid":1892,"comment_id":1893,"feature_image":1894,"featured":105,"visibility":10,"created_at":1895,"updated_at":1871,"custom_excerpt":1896,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1897,"primary_tag":1898,"url":1899,"excerpt":1896,"reading_time":1900,"access":14,"comments":105,"og_image":1894,"og_title":1901,"og_description":1896,"twitter_image":1894,"twitter_title":1901,"twitter_description":1896,"meta_title":7,"meta_description":1902,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"0fd25c7f-d7c2-43d8-84f0-eaa636245f73","39e47604aab2","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/1200/1-yfEu1ov2TXZ4AA2Tg8jguA.jpeg","2017-07-11T09:12:05.000+02:00","For CG artists, working on a new file which is clean means more productivity. When nodes are properly named, when positions of elements are…",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/cg-pipeline-file-cleaning-and-quality-checking/",2,"CG Pipeline: File Cleaning And Quality Checking","For CG artists, working on a new file which is clean means more productivity. When nodes are properly named, when positions of elements are standard, when there is no remaining artefact and when all…","/posts/cg-pipeline-file-cleaning-and-quality-checking",{"title":1887},"cg-pipeline-file-cleaning-and-quality-checking","posts/cg-pipeline-file-cleaning-and-quality-checking",[1908],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"sL4bdHagIioUYidligWhIxRG2qc1MYYigYpVLVMSKpA",{"id":1911,"title":1912,"authors":1913,"body":7,"description":7,"extension":8,"html":1915,"meta":1916,"navigation":14,"path":1929,"published_at":1920,"seo":1930,"slug":1931,"stem":1932,"tags":1933,"__hash__":1935,"uuid":1917,"comment_id":1918,"feature_image":1919,"featured":105,"visibility":10,"created_at":1920,"updated_at":1921,"custom_excerpt":1922,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1923,"primary_tag":1924,"url":1925,"excerpt":1922,"reading_time":48,"access":14,"comments":105,"og_image":1926,"og_title":1927,"og_description":1922,"twitter_image":1926,"twitter_title":1927,"twitter_description":1922,"meta_title":7,"meta_description":1928,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:cg-pipeline-asset-management-and-dependencies.json","Asset Management And Dependencies In CG Pipelines (2026)",[1914],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>One the principal reason to track every file and asset in a production is to manage the dependencies between them. It’s crucial to know what needs to be recomputed and validated when a change occurs. As the number of elements grows, it becomes more tedious to follow everything properly. If you draw every connection between each element, it will lead you to a giant directed graph that is difficult to manage. Nevertheless, it’s still mandatory to deal with it properly.\u003C/p>\u003Cp>Tracking files is basically what your pipeline does. It stores the location of all your files and makes sure that it is related to a build step. It’s even better when your pipeline stores the dependencies between files.\u003C/p>\u003Cp>But looking at files is like looking at a low level representation of your production. Working at the file level is great when you think locally about a problem (like referencing an element) but when you want to discuss about what happens on a larger scale (like the final rendering of a shot), this representation is too granular. That’s why we often prefer to deal with assets.\u003C/p>\u003Cp>Assets are abstraction of files, it’s like talking of concepts instead of precise elements. For instance it happens when you talk only about textures, not about .jpeg or .png files. Assets can be anything like models, animation keys, shaders, FX, etc. Here we don’t talk about folders or file name. Dealing with assets allow to not think anymore in output file and working files, it allows to focus on the main concept of the production.\u003C/p>\u003Cp>If needed, you can even group assets together to add another level of abstraction. For instance, an animation group contains rigs and animation keys, a texture group contains flat textures and shaders.\u003C/p>\u003Cp>Once you can represent things with assets, it’s easier to master your production graph. You can manage more efficiently your build process and dependencies. When you change an asset you know what assets need to be recomputed instantly. It’s a great communication tool for your team and for your softwares. All the stackholders of the project can exchange ideas around it.\u003C/p>\u003Cp>We talked a lot about graphs. Graphs are composed of vertices (nodes) and edges. Our assets can be considered as the vertices (nodes) but what the edges represent? Edges are worflow steps. A step takes one or several asset as input and leads to another asset as output. For instance, modeling is the action that melt textures, meshes and shaders into a full model. Setup action takes the mesh as input and leads to rigs.\u003C/p>\u003Cp>To conclude and illustrate that article let see some representations of production elements:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/800/1-PiZJ-_jBwCApGZf3d15_3w.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"481\" height=\"401\">\u003Cfigcaption>\u003Cspan style=\"white-space: pre-wrap;\">Graph representation of a props&nbsp;building\u003C/span>\u003C/figcaption>\u003C/figure>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/800/1-RPwf2k5MvjwLt8DJGAXgxQ.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"501\" height=\"481\">\u003Cfigcaption>\u003Cspan style=\"white-space: pre-wrap;\">Graph representation of a shot&nbsp;building\u003C/span>\u003C/figcaption>\u003C/figure>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/800/1-tniNe0RXXWZU1IQNezn3_Q.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"800\" height=\"532\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/max/800/1-tniNe0RXXWZU1IQNezn3_Q.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/800/1-tniNe0RXXWZU1IQNezn3_Q.png 800w\" sizes=\"(min-width: 720px) 720px\">\u003Cfigcaption>\u003Cspan style=\"white-space: pre-wrap;\">Example: it’s easy to see the impact of a retake on a&nbsp;rig\u003C/span>\u003C/figcaption>\u003C/figure>\u003Cp>As you can see, it’s easy to notice the impact of a change once we have our graph representation and the right tools to manage it!\u003C/p>\u003Cp>That’s all for this article. We hope it will convince you to have a graph presentation of your production. Using nodes and edges will improve your communication with CG artists, supervisors and production managers… Which means more time spent on the artistic and less stressful situation!\u003C/p>\u003Cp>\u003Cem>Writing mainly about production and pipeline doesn’t mean we don’t enjoy looking at beautiful pictures too. If you would like broader topics and curated content about the CG industry in general, you can follow us on \u003C/em>\u003Ca href=\"https://x.com/cgwirekitsu?ref=blog.cg-wire.com\" rel=\"noreferrer\">\u003Cem>Twitter\u003C/em>\u003C/a>\u003Cem>!\u003C/em>\u003C/p>",{"uuid":1917,"comment_id":1918,"feature_image":1919,"featured":105,"visibility":10,"created_at":1920,"updated_at":1921,"custom_excerpt":1922,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1923,"primary_tag":1924,"url":1925,"excerpt":1922,"reading_time":48,"access":14,"comments":105,"og_image":1926,"og_title":1927,"og_description":1922,"twitter_image":1926,"twitter_title":1927,"twitter_description":1922,"meta_title":7,"meta_description":1928,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"e81e95fd-5a3f-4bb8-a11b-0a10918d5df6","634b28a1a49a","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/800/1-tniNe0RXXWZU1IQNezn3_Q.png","2017-07-05T09:42:24.000+02:00","2026-03-26T10:58:03.000+01:00","One the principal reason to track every file and asset in a production is to manage the dependencies between them. It’s crucial to know…",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/cg-pipeline-asset-management-and-dependencies/","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/907/1-tniNe0RXXWZU1IQNezn3_Q.png","CG Pipeline: Asset Management And Dependencies","One the principal reason to track every file and asset in a production is to manage the dependencies between them. It’s crucial to know what needs to be recomputed and validated when a change occurs…","/posts/cg-pipeline-asset-management-and-dependencies",{"title":1912},"cg-pipeline-asset-management-and-dependencies","posts/cg-pipeline-asset-management-and-dependencies",[1934],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"D3DdgCn7RIJsBkJCtf8G_p9bT_azXMLeUaUbnsq9muY",{"id":1937,"title":1938,"authors":1939,"body":7,"description":7,"extension":8,"html":1941,"meta":1942,"navigation":14,"path":1955,"published_at":1946,"seo":1956,"slug":1957,"stem":1958,"tags":1959,"__hash__":1962,"uuid":1943,"comment_id":1944,"feature_image":1945,"featured":105,"visibility":10,"created_at":1946,"updated_at":1947,"custom_excerpt":1948,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1949,"primary_tag":1950,"url":1951,"excerpt":1948,"reading_time":48,"access":14,"comments":105,"og_image":1952,"og_title":1953,"og_description":1948,"twitter_image":1952,"twitter_title":1953,"twitter_description":1948,"meta_title":7,"meta_description":1954,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:book-review-production-pipeline-fundamentals-for-film-and-games.json","Production Pipeline Fundamentals for Films and Games: Book Review (2026)",[1940],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>In any field, whatever your expertise level is, it’s a good idea to stop by in order to study the state of the art of your profession. Our favourite way to achieve that is to read books considered as references. But in the CG industry, the good readings are rare. Fortunately, when we decided to look for a good book, after a few searches, we noticed that one was standing over all others: \u003Ca href=\"https://www.crcpress.com/Production-Pipeline-Fundamentals-for-Film-and-Games/Dunlop/p/book/9780415812290?ref=blog.cg-wire.com\" rel=\"noopener\">Production Pipeline Fundamentals for Film and Games\u003C/a>.\u003C/p>\u003Cp>It’s a book written by Renée Dunlop who acted for 20 years as an editor, technical writer and journalist in the CG industry. She shares with us what she learned about pipelines by observing the major actors of a production. To illustrate this and have different point of views, the book is completed by interviews of experienced people who worked on a CG or game pipeline.\u003C/p>\u003Cp>\u003Cem>NB: in this blog post we will focus only on film pipelines.\u003C/em>\u003C/p>\u003Ch4 id=\"production\">Production\u003C/h4>\u003Cp>CG Productions have a deterministic canvas but the content varies a lot and they involve a lot of creative work. That’s why they require a particular approach and a lot of flexibility.\u003C/p>\u003Cp>So, to Renée, transparency and clear communication are key elements of a CG production success. In a first place, she explains the \u003Ca href=\"https://medium.com/@cgwire/cg-production-pre-production-production-and-post-production-c4723a62ca1c?ref=blog.cg-wire.com\">main stages of a production\u003C/a>. Then she goes into the details by describing each steps from concept to compositing, she describes everything. She considers that everything should be documented to improve communication between stackholders. Then she tackles all the things management should handle: budget forecast, hiring calendar, hardware requirements and service provider booking.\u003C/p>\u003Cp>Then Renée describes how validation are processed: where it happens, how you should compare shots, how to do proper note tracking, how to review delivery, and how to ship the final delivery by handling colorimetry and managing sound synchronisation.\u003C/p>\u003Ch4 id=\"pipeline\">Pipeline\u003C/h4>\u003Cp>This is the main subject of the book. It emphasizes the need of a proper compartmentalization of the fabrication steps. For that Renée encourages you to document properly what is required and created at each step of the production. It will make the life much easier for your software team when implementing tools that manage automatically the transition between steps.\u003C/p>\u003Cp>But, automating non-artistic steps face a difficulty: file management. Dealing with a lot of big files, leads quickly to a mess. Which is why, she explores solutions for your folder structure, file naming and metadata storage. Through an in-depth comparison, she discussed the pros and cons of version control versus (file versioning on the disk) vs source control (central repository for versions, the file system is not accessible directly).\u003C/p>\u003Cp>Renée will introduce you too to the best practices in management of software projects. CG studios are not always familiar with it. So her insights can be very helpful. She notably compared waterfall and agile methodologies.\u003C/p>\u003Cp>Finally she encourages you to adapt your pipeline to your context. to understand the values of the company, the type of production built and what is expected from the people involved.\u003C/p>\u003Ch4 id=\"hardware\">Hardware\u003C/h4>\u003Cp>The book describes the impact of the tooling on the hardware. All the computing, working files and outputs require tremendous storage, CPU capabilities and network bandwidth. She proposes strategies to deal with that complexity based on configuration management, cloud rendering, caching of most accessed files and smart backuping.\u003C/p>\u003Ch4 id=\"the-future\">The Future\u003C/h4>\u003Cp>The last chapter is dedicated to upcoming trends. It gives you hints about what to follow like virtual machines based workstations, collaborative modeling through WebGL and file standardization via open formats. It’s great source of ideas for your Research and Development team.\u003C/p>\u003Cp>Production Pipeline Fundamentals is a must read for anyone new coming into the CG industry. It covers all the main aspects mandatory of a CG pipeline. Chapters are accurate and complete and the interviews added all along the way will give you different perspectives.\u003C/p>\u003Cp>For professionals with years of experience, it’s still a very good reading. You can find new ideas, learn how it is done in video game industry and see how big studios deal with the complexity of a CG pipeline.\u003C/p>\u003Cp>To sum up: Production Pipeline Fundamentals is highly recommended!\u003C/p>\u003Cp>\u003Cem>This blog is dedicated to CG pipeline and production management. If you are interested in this book, you will probably enjoy our articles. Read our \u003C/em>\u003Ca href=\"https://medium.com/@cgwire/facilitating-cg-graphists-to-focus-more-on-the-artistic-aa8f801edf20?ref=blog.cg-wire.com\">\u003Cem>first blog post\u003C/em>\u003C/a>\u003Cem> to understand who we are and what we do.\u003C/em>\u003C/p>",{"uuid":1943,"comment_id":1944,"feature_image":1945,"featured":105,"visibility":10,"created_at":1946,"updated_at":1947,"custom_excerpt":1948,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1949,"primary_tag":1950,"url":1951,"excerpt":1948,"reading_time":48,"access":14,"comments":105,"og_image":1952,"og_title":1953,"og_description":1948,"twitter_image":1952,"twitter_title":1953,"twitter_description":1948,"meta_title":7,"meta_description":1954,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"8e6a0263-aebe-48d5-adef-c2523ddfc898","8efc50fadae6","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/800/1-533IOIVQKJArjDNdcztcEg.jpeg","2017-06-13T11:21:49.000+02:00","2026-02-20T06:04:06.000+01:00","In any field, whatever your expertise level is, it’s a good idea to stop by in order to study the state of the art of your profession. Our…",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":1653,"name":71,"slug":78,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":75},"https://blog.cg-wire.com/book-review-production-pipeline-fundamentals-for-film-and-games/","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/384/1-533IOIVQKJArjDNdcztcEg.jpeg","Book Review: Production Pipeline Fundamentals for Film and Games","In any field, whatever your expertise level is, it’s a good idea to stop by in order to study the state of the art of your profession. Our favourite way to achieve that is to read books considered as…","/posts/book-review-production-pipeline-fundamentals-for-film-and-games",{"title":1938},"book-review-production-pipeline-fundamentals-for-film-and-games","posts/book-review-production-pipeline-fundamentals-for-film-and-games",[1960,1961],{"id":1653,"name":71,"slug":78,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":75},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"2Mt_n7MDTlNxKNiXRyHgrfHQQM1DkHYNx78K_lc8pM4",{"id":1964,"title":1965,"authors":1966,"body":7,"description":7,"extension":8,"html":1968,"meta":1969,"navigation":14,"path":1981,"published_at":1973,"seo":1982,"slug":1983,"stem":1984,"tags":1985,"__hash__":1987,"uuid":1970,"comment_id":1971,"feature_image":1972,"featured":105,"visibility":10,"created_at":1973,"updated_at":1768,"custom_excerpt":1974,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1975,"primary_tag":1976,"url":1977,"excerpt":1974,"reading_time":1900,"access":14,"comments":105,"og_image":1978,"og_title":1979,"og_description":1974,"twitter_image":1978,"twitter_title":1979,"twitter_description":1974,"meta_title":7,"meta_description":1980,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:cg-pipeline-take-advantage-of-your-render-farm-at-every-step-of-the-production.json","Take Advantage of Your Render Farm at Every Step of the CG Production (2026)",[1967],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>The render farm allows graphic designers and render team to submit computing jobs to a range of server. That way they can handle very intensive computing and make sure that workstations stay available for designers. The render farm is a mandatory component to make sure that all shots will be delivered on time.\u003C/p>\u003Cp>Rendering the final shots is the most common use case of a render farm. But what else makes it game changing? Beside allowing you to deliver your final output, they can fasten your production at each step. In this article, we’ll review all what you can delegate to it.\u003C/p>\u003Cp>\u003Cstrong>Modeling and image rendering\u003C/strong>\u003C/p>\u003Cp>Early on during the modeling process, graphic designers needs to render their scene to test material parameters. Hi-poly meshes and complex texturing lead quickly to hours of rendering. Being able to compute this rendering on a dedicated group of computers can save tons of times by avoiding to block the workstaton for long hours.\u003C/p>\u003Cp>\u003Cstrong>Texture baking\u003C/strong>\u003C/p>\u003Cp>It’s the process of pre-rendering textures. The main idea is to turn the render of complex materials in a simple texture. Even if it’s just done for one object, it may require several long renderings. It’s a good idea to send it to the render farm too.\u003C/p>\u003Cp>\u003Cstrong>Animation cache\u003C/strong>\u003C/p>\u003Cp>Animators like to provide variants of their animations. These tests require the computation of cache files, the coordinates of all vertices for each frame of the animation. Each generation can take long minutes and sometimes hours. No doubt that you will prefer to have this happening on your render farm that on you graphic designer machine.\u003C/p>\u003Cp>\u003Cstrong>FX Simulation\u003C/strong>\u003C/p>\u003Cp>FX simulations requires huge computation. It is hardly parrallelizable (it means it will stick long on one core of your farm). So, the render farm won’t make it much faster but it will allow to run plenty of them at the same time and will free the workstation.\u003C/p>\u003Cp>\u003Cstrong>Preview generation\u003C/strong>\u003C/p>\u003Cp>If you do a proper validation tracking, you need a preview to discuss on at every step validation. It can be generated very fast but sometimes, it could lead to dozens of minute rendering. It’s better if it’s done on the render farm and that the result is sent directly to your \u003Ca href=\"https://medium.com/@cgwire/cg-pipeline-publishing-preview-and-production-management-c51d7ae7ffec?ref=blog.cg-wire.com\">validation tracking\u003C/a> tool.\u003C/p>\u003Cp>\u003Cstrong>Shot generation\u003C/strong>\u003C/p>\u003Cp>It’s obvious but the final shot rendering is the most intensive computing activity. Generating the final output is the primary goal of the render farm. The question here is mostly how to prioritise the renderings. Most render farm managers allow to give priority to important jobs. But it’s not always easy to determinate which one matters the most.\u003C/p>\u003Cp>\u003Cstrong>Compositing\u003C/strong>\u003C/p>\u003Cp>Complex compositing rendering, especially for high resolution, is another good thing to push to your farm. It will allow you to make more tests on tuning the parameters.\u003C/p>\u003Cp>In this article we covered what can be done in a render farm. Once you set up a proper pipeline to allow people to push things to it, you will notice a lot of productivity and qualitiy improvement. But you are going to have a new problem soon. How you are going to manage the workload? Render farms consume a lot of network bandwidth and file storage. Even if they can handle many jobs, people will push too much computation the farm can deal with. These problens can quickly become an headache. Fortunately we are here to help. So stay tune for our upcoming articles or feel free to \u003Ca href=\"mailto:contact@cg-wire.com\">contact us\u003C/a>!\u003C/p>\u003Cp>\u003Cem>CGWire is a software shop that crafts \u003C/em>\u003Ca href=\"https://github.com/cgwire?ref=blog.cg-wire.com\" rel=\"noopener\">\u003Cem>open source tools\u003C/em>\u003C/a>\u003Cem> to make your pipeline more efficient. If you are interesting in what we do and think it can help, feel free to \u003C/em>\u003Ca href=\"mailto:contact@cg-wire.com\">\u003Cem>contact us\u003C/em>\u003C/a>\u003Cem>!\u003C/em>\u003C/p>",{"uuid":1970,"comment_id":1971,"feature_image":1972,"featured":105,"visibility":10,"created_at":1973,"updated_at":1768,"custom_excerpt":1974,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":1975,"primary_tag":1976,"url":1977,"excerpt":1974,"reading_time":1900,"access":14,"comments":105,"og_image":1978,"og_title":1979,"og_description":1974,"twitter_image":1978,"twitter_title":1979,"twitter_description":1974,"meta_title":7,"meta_description":1980,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"dbfc2466-a6b3-4a94-bc90-c6bab9ebb4a1","94dbea36744","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/1200/1-ykAEE4nsK7Sy9zwpVDWsTg.jpeg","2017-06-08T20:28:41.000+02:00","The render farm allows graphic designers and render team to submit computing jobs to a range of server. That way they can handle very…",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/cg-pipeline-take-advantage-of-your-render-farm-at-every-step-of-the-production/","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/1024/1-ykAEE4nsK7Sy9zwpVDWsTg.jpeg","CG Pipeline: Take Advantage of Your Render Farm at Every Step of the Production","The render farm allows graphic designers and render team to submit computing jobs to a range of server. That way they can handle very intensive computing and make sure that workstations stay…","/posts/cg-pipeline-take-advantage-of-your-render-farm-at-every-step-of-the-production",{"title":1965},"cg-pipeline-take-advantage-of-your-render-farm-at-every-step-of-the-production","posts/cg-pipeline-take-advantage-of-your-render-farm-at-every-step-of-the-production",[1986],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"tEMr9y9Ce0Eg0yr2bpF_UFGpv6qqmIi6aBJwXdi6i6U",{"id":1989,"title":1990,"authors":1991,"body":7,"description":7,"extension":8,"html":1993,"meta":1994,"navigation":14,"path":2007,"published_at":1998,"seo":2008,"slug":2009,"stem":2010,"tags":2011,"__hash__":2014,"uuid":1995,"comment_id":1996,"feature_image":1997,"featured":105,"visibility":10,"created_at":1998,"updated_at":1999,"custom_excerpt":2000,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":2001,"primary_tag":2002,"url":2003,"excerpt":2000,"reading_time":48,"access":14,"comments":105,"og_image":2004,"og_title":2005,"og_description":2000,"twitter_image":2004,"twitter_title":2005,"twitter_description":2000,"meta_title":7,"meta_description":2006,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:cg-pipeline-publishing-preview-and-production-management.json","Validation, Preview and Publishing In A CG Production (2026)",[1992],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>One of the most critical action of a production is the validation step. Every assets and components built must be reviewed by directors and supervisors before being shipped. Every validation is a strong decision and can be either technical or artistic. If the reviewed component is not accepted, it leads to more work and cost for the whole studio. On the other side, when enough shots have been validated, parts of the budget are unblocked and fuel the studio. Hence, it matters a lot for the production management to have a proper validation tracking.\u003C/p>\u003Cp>In this blog post we’ll discuss what should be tracked and how your pipeline can help to make the validation faster.\u003C/p>\u003Ch4 id=\"history-tracking\">History tracking\u003C/h4>\u003Cp>Because every validation decision implies commitment and budget impacts, you should better know who said what and when. The most common tool we see in studios is Google Spreadsheet. You can build very informative tables with it and you can access it from anywhere. Which makes it very useful. But, unfortunately, except the production team, people are not very eager to fill it. Worst, previous changes are hard to access.\u003C/p>\u003Cp>That’s why we recommend the usage of more complete solutions like Shotgun or Ftrack. It will allow to have a clear history of the conversations and you will be able to browse easily the state of your production. Their price may seem high but they will save you tons of time. Additionally, knowing how many back and forth happened is important to identify which shots or assets require double attention.\u003C/p>\u003Ch4 id=\"preview\">Preview\u003C/h4>\u003Cp>For validating an item, going on the machine of the graphist and see the result is not very effective. So, you should think about building a preview publishing process. Whether it is to store movies or pictures of rendered items \u003Ca href=\"https://medium.com/@cgwire/cg-pipeline-a-proposal-for-your-file-hierarchy-7825a163de1e?ref=blog.cg-wire.com\">in a dedicated folder \u003C/a>or uploading it in a specialised software, you will need to establish a tracking system too. When an artist consider that his work is done he should tell you that a preview is ready to be seen and should note which working file was used to make this preview. On the same way, every comment should be linked to related preview to avoid any misunderstanding.\u003C/p>\u003Ch4 id=\"delivery\">Delivery\u003C/h4>\u003Cp>Once the component validated, things are not done yet. You should build the resulting file for the team who will handle the next step. The generated files are different from the working files (cache files for animation, low resolution and high resolution models for the modeling…). That’s why source files must be tracked too. The created file should be linked to a validation and a working file. It’s useful for validation and in case something should be rendered again.\u003C/p>\u003Ch4 id=\"pipeling-the-whole-thing\">Pipeling the whole thing\u003C/h4>\u003Cp>The good news is that among all the grunt work required to make this, you can automate several things. The generation of the preview can be done via scripts/plugins and be sent to your tracking software on a simple click from the graphist. Output file can be automatically generated after a validation. Relations between your comments, your working files, your output files and your previews can be stored in a indexed databased. From that data store your R&amp;D team can build tables and charts to provide you with an overview and details of the status of your production.\u003C/p>\u003Cp>Following the progress of a CG movie production is tedious. It requires patience, commitment and excellent communication. Fortunately, production team, supervisors and graphists can be assisted by software solution in this task. All this hard work may look secondary at first but when it’s done properly it changes completely the production. Bad surprises are avoided and more flexibility is brought to your fabrication. It leads to less stress and faster results which means more time spent on the artistic and better overall quality!\u003C/p>\u003Cp>\u003Cem>We are a software shop that crafts \u003C/em>\u003Ca href=\"http://www.cg-wire.com/?ref=blog.cg-wire.com\" rel=\"noopener\">\u003Cem>open source tools\u003C/em>\u003C/a>\u003Cem> to make your pipeline more efficient. If you are interesting in what we do and think it can help, feel free to \u003C/em>\u003Ca href=\"mailto:contact@cg-wire.com\" rel=\"noopener\">\u003Cem>contact us\u003C/em>\u003C/a>\u003Cem>!\u003C/em>\u003C/p>",{"uuid":1995,"comment_id":1996,"feature_image":1997,"featured":105,"visibility":10,"created_at":1998,"updated_at":1999,"custom_excerpt":2000,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":2001,"primary_tag":2002,"url":2003,"excerpt":2000,"reading_time":48,"access":14,"comments":105,"og_image":2004,"og_title":2005,"og_description":2000,"twitter_image":2004,"twitter_title":2005,"twitter_description":2000,"meta_title":7,"meta_description":2006,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"b1dad9e0-b505-4154-90e0-8612aea65bba","c51d7ae7ffec","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/2560/1-Aww5zN-UY_33lX7DgspgqA.jpeg","2017-05-22T14:44:48.000+02:00","2026-03-27T11:01:08.000+01:00","One of the most critical action of a production is the validation step. Every assets and components built must be reviewed by directors and…",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":1653,"name":71,"slug":78,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":75},"https://blog.cg-wire.com/cg-pipeline-publishing-preview-and-production-management/","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/1200/1-Aww5zN-UY_33lX7DgspgqA.jpeg","CG Production: Validation, Preview and Publishing","One of the most critical action of a production is the validation step. Every assets and components built must be reviewed by directors and supervisors before being shipped. Every validation is a…","/posts/cg-pipeline-publishing-preview-and-production-management",{"title":1990},"cg-pipeline-publishing-preview-and-production-management","posts/cg-pipeline-publishing-preview-and-production-management",[2012,2013],{"id":1653,"name":71,"slug":78,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":75},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"JtbbRAL5xi1AL1nROpMjA8oafO0jT7pLpJsLqbJEIeM",{"id":2016,"title":2017,"authors":2018,"body":7,"description":7,"extension":8,"html":2020,"meta":2021,"navigation":14,"path":2034,"published_at":2025,"seo":2035,"slug":2036,"stem":2037,"tags":2038,"__hash__":2040,"uuid":2022,"comment_id":2023,"feature_image":2024,"featured":105,"visibility":10,"created_at":2025,"updated_at":2026,"custom_excerpt":2027,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":2028,"primary_tag":2029,"url":2030,"excerpt":2027,"reading_time":48,"access":14,"comments":105,"og_image":2031,"og_title":2032,"og_description":2027,"twitter_image":2031,"twitter_title":2032,"twitter_description":2027,"meta_title":7,"meta_description":2033,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:cg-pipeline-a-proposal-for-your-file-hierarchy.json","CG Pipeline: A Proposal For Your File Hierarchy (2026)",[2019],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>File storage is still a big issue for any CG production. The \u003Ca href=\"https://medium.com/@cgwire/cg-pipeline-series-list-of-the-most-popular-cg-tools-7fb39ff6d062?ref=blog.cg-wire.com\">many software involved\u003C/a> lead to a constellation of files in which it’s hard to find his way.\u003C/p>\u003Cp>In a perfect world, you would use a dedicated database where files can be queried, accessed, and stored through their metadata. Unfortunately, most studios cannot afford to set up this kind of solution. The simplest alternative is to rely on a classic file system. But to avoid any confusion, it’s mandatory to enforce a file system structure. This is what we propose to discuss in this blog post.\u003C/p>\u003Cp>\u003Cem>Note that all our paths are written in lowercase, without special chars or space. These rules allow maximizing readiness and compatibility with software and tools.\u003C/em>\u003C/p>\u003Ch4 id=\"the-root-folder\">\u003Cstrong>The Root folder\u003C/strong>\u003C/h4>\u003Cp>Your root folder name should be very explicit composed of \u003Cem>the productions\u003C/em> folder and the name of the production:\u003C/p>\u003Cpre>\u003Ccode>productions/big_buck_bunny\n\u003C/code>\u003C/pre>\n\u003Ch4 id=\"the-state-folders\">The state folders\u003C/h4>\u003Cp>Before describing our content We need one folder for the working files and one for the published files\u003C/p>\u003Cpre>\u003Ccode>productions/big_buck_bunny/working\nproductions/big_buck_bunny/export\n\u003C/code>\u003C/pre>\n\u003Ch4 id=\"the-type-folders\">\u003Cstrong>The type folders\u003C/strong>\u003C/h4>\u003Cp>In your production, you will mainly build assets and shots. So let’s separate clearly these two concepts:\u003C/p>\u003Cpre>\u003Ccode>productions/big_buck_bunny/working/assets\nproductions/big_buck_bunny/working/shots\u003C/code>\u003C/pre>\u003Ch4 id=\"the-asset-folders\">The asset folders\u003C/h4>\u003Cp>To fold properly your assets, we propose you make folders by asset categories. Then each asset should have its distinct folder. Finally, we think, it’s better to have a directory for the main steps of the asset building. Don’t be too specific or you will end with myriads of folders:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">prod...ing/assets/characters/rabbit/modeling\nprod...ing/assets/characters/rabbit/rigging\nprod...ing/assets/characters/rabbit/texturing\u003C/code>\u003C/pre>\u003Ch4 id=\"the-shot-folders\">The shot folders\u003C/h4>\u003Cp>The folders should describe the shot hierarchy (episode/chapter, sequence, and shot). Then for each shots, we store the files in a folder describing the main steps of the shot building.\u003C/p>\u003Cpre>\u003Ccode>prod...ing/shots/ep001/se001/sh001/animation\nprod...ing/shots/ep001/se001/sh001/fx\nprod...ing/shots/ep001/se001/sh001/compositing\u003C/code>\u003C/pre>\u003Cp>If you don’t have an episode or chapter you can skip the \u003Cem>ep001\u003C/em> folder.\u003C/p>\u003Cp>The main issue here is sometimes you have to deal with animation running on several shots. We recommend using either symbolic links or to work only on the first shot concerned by the animation.\u003C/p>\u003Ch4 id=\"the-asset-in-a-shot-folders\">The asset in a shot folders\u003C/h4>\u003Cp>It happens that variants of assets must be done for a specific shot or that animation is built separately for each asset. In that case, we create an \u003Cem>assets\u003C/em> folder inside the shot folder. In that folder, we’ll create a folder per asset. No need to add a folder for the asset type.\u003C/p>\u003Cpre>\u003Ccode>prod...ing/shots/ep001/se001/sh001/assets/rabbit/animation\nprod...ing/shots/ep001/se001/sh001/assets/rabbit/modeling\n\u003C/code>\u003C/pre>\u003Ch4 id=\"file-naming\">File naming\u003C/h4>\u003Cp>It’s better to transcript all the information in the file name too. In some software, only the file name is displayed in the window title. So, it’s better to make it explicit.\u003C/p>\u003Cp>asset: \u003C/p>\u003Cpre>\u003Ccode>big_buck_bunny_assets_characters_rabbit_modeling.max\u003C/code>\u003C/pre>\u003Cp>shot: \u003C/p>\u003Cpre>\u003Ccode>big_buck_bunny_ep001_se001_sh001_animation.max\u003C/code>\u003C/pre>\u003Cp>asset in shot: \u003C/p>\u003Cpre>\u003Ccode>big_buck_bunny_ep001_se001_sh001_rabbit_animation.max\u003C/code>\u003C/pre>\u003Ch4 id=\"departments-and-specific-file-hierarchy\">Departments and specific file hierarchy\u003C/h4>\u003Cp>Some departments may have different ways to deal with files. It can be a good idea to have a different file hierarchy for each department. In that case, make sure to document it properly.\u003C/p>\u003Ch4 id=\"final-words\">Final words\u003C/h4>\u003Cp>That’s it! With this simple file hierarchy, you should be able to store properly all your files. Which means less headache and less stress for your next productions.\u003C/p>\u003Cp>Enforcing a file structure can lead to file duplication and sometimes more processes for your CG artists. You shouldn’t be scared of that. The file structure is part of the communication between people. And in a project, good communication is what matters the most. So it’s better to accept minor drawbacks. You will gain much more inefficiency when looking for the right files.\u003C/p>\u003Cp>Even better, it will allow you to develop tools that will be able to work automatically on your file system. Pipeline tooling is mandatory to scale your studio. In later blog posts, we’ll give you ideas on what to build and how to make it.\u003C/p>\u003Cp>\u003Cem>Writing mainly about production management and pipelines doesn’t mean we don’t enjoy looking at beautiful pictures too. If you would like broader topics and curated content about the CG industry in general, you can follow us on \u003C/em>\u003Ca href=\"https://x.com/cgwirekitsu?ref=blog.cg-wire.com\" rel=\"noreferrer\">\u003Cem>Twitter\u003C/em>\u003C/a>\u003Cem>!\u003C/em>\u003C/p>",{"uuid":2022,"comment_id":2023,"feature_image":2024,"featured":105,"visibility":10,"created_at":2025,"updated_at":2026,"custom_excerpt":2027,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":2028,"primary_tag":2029,"url":2030,"excerpt":2027,"reading_time":48,"access":14,"comments":105,"og_image":2031,"og_title":2032,"og_description":2027,"twitter_image":2031,"twitter_title":2032,"twitter_description":2027,"meta_title":7,"meta_description":2033,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"dd26ae0b-0906-4e0e-a44a-eab9398856fb","7825a163de1e","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/800/1-NRDdvzPCpmkgCu17iuO4pQ.jpeg","2017-05-15T23:57:32.000+02:00","2026-03-26T10:57:46.000+01:00","File storage is still a big issue for any CG production. The many softwares involved lead to a constellation of files in which it’s hard to…",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/cg-pipeline-a-proposal-for-your-file-hierarchy/","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/max/1200/1-NRDdvzPCpmkgCu17iuO4pQ.jpeg","CG Pipeline: A Proposal For Your File Hierarchy","File storage is still a big issue for any CG production. The many softwares involved lead to a constellation of files in which it’s hard to find his way. In a perfect world, you would use a dedicated…","/posts/cg-pipeline-a-proposal-for-your-file-hierarchy",{"title":2017},"cg-pipeline-a-proposal-for-your-file-hierarchy","posts/cg-pipeline-a-proposal-for-your-file-hierarchy",[2039],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"E2GZKE9nqHTFDfbOAHbezsNiyLbqXTEFbqoYgRdL_UA",{"id":2042,"title":2043,"authors":2044,"body":7,"description":7,"extension":8,"html":2046,"meta":2047,"navigation":14,"path":2057,"published_at":2050,"seo":2058,"slug":2059,"stem":2060,"tags":2061,"__hash__":2063,"uuid":2048,"comment_id":2049,"feature_image":7,"featured":105,"visibility":10,"created_at":2050,"updated_at":2051,"custom_excerpt":2052,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":2053,"primary_tag":2054,"url":2055,"excerpt":2052,"reading_time":1720,"access":14,"comments":105,"og_image":7,"og_title":2043,"og_description":2052,"twitter_image":7,"twitter_title":2043,"twitter_description":2052,"meta_title":7,"meta_description":2056,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:facilitating-cg-graphists-to-focus-more-on-the-artistic.json","Facilitating CG Artists To Focus More On The Artistic",[2045],{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},"\u003Cp>CGWire is a new company that allows graphic designers and producers to focus more on the artistic side of their work. We propose software solutions and services to make both your production management and pipeline more efficient. We want to share what we learn throughout our journey, which is the reason behind this blog.\u003C/p>\u003Cp>Producers and TDs are the backbone of the CG production. They offer the framework to graphic designers to showcase their artistic realisations. There are already numerous content platform that talks about how to make beautiful pictures. But posts about wiring fabrication steps together with budgeting are rare. Which is why on this blog, we will write about the invisible work of the people behind the scenes of CG production.\u003C/p>\u003Cp>Our content will be divided into three categories :\u003C/p>\u003Cul>\u003Cli>CG Production related content : interviews and best practices on validation tracking, hiring, cost tracking and more.\u003C/li>\u003Cli>CG Pipeline related content : interviews and best practices on setting up a pipeline for a CG production.\u003C/li>\u003Cli>Software development tutorials : how to add more value to your common CG tools by writing scripts.\u003C/li>\u003C/ul>\u003Cp>We hope you will enjoy our mission and we can’t wait to read your comments! If you want to meet us and help us bringing more Production/Pipeline related content, we’ll be glad to talk with you. For that \u003Ca href=\"http://contact@cg-wire.com/?ref=blog.cg-wire.com\" rel=\"noopener\">drop us an email!\u003C/a>\u003C/p>\u003Cp>\u003Cem>Focusing on “behind the scene” doesn’t mean we don’t enjoy looking at beautiful pictures too. If you would like broader topics and curated content about the CG industry in general, you can follow us on \u003C/em>\u003Ca href=\"https://x.com/cgwirekitsu?ref=blog.cg-wire.com\" rel=\"noreferrer\">\u003Cem>Twitter\u003C/em>\u003C/a>\u003Cem> !\u003C/em>\u003C/p>",{"uuid":2048,"comment_id":2049,"feature_image":7,"featured":105,"visibility":10,"created_at":2050,"updated_at":2051,"custom_excerpt":2052,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":2053,"primary_tag":2054,"url":2055,"excerpt":2052,"reading_time":1720,"access":14,"comments":105,"og_image":7,"og_title":2043,"og_description":2052,"twitter_image":7,"twitter_title":2043,"twitter_description":2052,"meta_title":7,"meta_description":2056,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"4c22f2bb-949c-4fd3-a7a7-4537dc415a7f","aa8f801edf20","2017-04-15T17:32:54.000+02:00","2026-03-26T10:58:57.000+01:00","CGWire is a new company that allows graphic designers and producers to focus more on the artistic side of their work. We propose software…",{"id":1321,"name":1322,"slug":1323,"profile_image":1324,"cover_image":1325,"bio":1326,"website":1327,"location":594,"facebook":7,"twitter":595,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":1328},{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/facilitating-cg-graphists-to-focus-more-on-the-artistic/","CGWire is a new company that allows graphic designers and producers to focus more on the artistic side of their work. We propose software solutions and services to make both your production…","/posts/facilitating-cg-graphists-to-focus-more-on-the-artistic",{"title":2043},"facilitating-cg-graphists-to-focus-more-on-the-artistic","posts/facilitating-cg-graphists-to-focus-more-on-the-artistic",[2062],{"id":113,"name":13,"slug":17,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"uud-FdXsF8DUd91olljgerNcFWIdFWWLv42qO6rdpjA",[2065,2069,2073,2077,2081,2085,2089],{"id":21,"title":22,"body":7,"description":7,"extension":8,"meta":2066,"name":22,"navigation":14,"path":27,"seo":2068,"slug":29,"stem":29,"__hash__":30},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":2067,"url":26},{"posts":25},{"description":7},{"id":32,"title":33,"body":7,"description":7,"extension":8,"meta":2070,"name":38,"navigation":14,"path":39,"seo":2072,"slug":41,"stem":41,"__hash__":42},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":2071,"url":37},{"posts":36},{"description":7},{"id":44,"title":45,"body":7,"description":7,"extension":8,"meta":2074,"name":45,"navigation":14,"path":50,"seo":2076,"slug":52,"stem":52,"__hash__":53},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":2075,"url":49},{"posts":48},{"description":7},{"id":55,"title":56,"body":7,"description":7,"extension":8,"meta":2078,"name":60,"navigation":14,"path":61,"seo":2080,"slug":63,"stem":63,"__hash__":64},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":2079,"url":59},{"posts":25},{"description":7},{"id":5,"title":6,"body":7,"description":7,"extension":8,"meta":2082,"name":13,"navigation":14,"path":15,"seo":2084,"slug":17,"stem":17,"__hash__":18},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":2083,"url":12},{"posts":3},{"description":7},{"id":70,"title":71,"body":7,"description":7,"extension":8,"meta":2086,"name":71,"navigation":14,"path":76,"seo":2088,"slug":78,"stem":78,"__hash__":79},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":2087,"url":75},{"posts":74},{"description":7},{"id":81,"title":82,"body":7,"description":7,"extension":8,"meta":2090,"name":82,"navigation":14,"path":86,"seo":2092,"slug":88,"stem":88,"__hash__":89},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":2091,"url":85},{"posts":48},{"description":7},[2094,2098,2102,2106,2110,2114,2118],{"id":21,"title":22,"body":7,"description":7,"extension":8,"meta":2095,"name":22,"navigation":14,"path":27,"seo":2097,"slug":29,"stem":29,"__hash__":30},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":2096,"url":26},{"posts":25},{"description":7},{"id":32,"title":33,"body":7,"description":7,"extension":8,"meta":2099,"name":38,"navigation":14,"path":39,"seo":2101,"slug":41,"stem":41,"__hash__":42},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":2100,"url":37},{"posts":36},{"description":7},{"id":44,"title":45,"body":7,"description":7,"extension":8,"meta":2103,"name":45,"navigation":14,"path":50,"seo":2105,"slug":52,"stem":52,"__hash__":53},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":2104,"url":49},{"posts":48},{"description":7},{"id":55,"title":56,"body":7,"description":7,"extension":8,"meta":2107,"name":60,"navigation":14,"path":61,"seo":2109,"slug":63,"stem":63,"__hash__":64},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":2108,"url":59},{"posts":25},{"description":7},{"id":5,"title":6,"body":7,"description":7,"extension":8,"meta":2111,"name":13,"navigation":14,"path":15,"seo":2113,"slug":17,"stem":17,"__hash__":18},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":2112,"url":12},{"posts":3},{"description":7},{"id":70,"title":71,"body":7,"description":7,"extension":8,"meta":2115,"name":71,"navigation":14,"path":76,"seo":2117,"slug":78,"stem":78,"__hash__":79},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":2116,"url":75},{"posts":74},{"description":7},{"id":81,"title":82,"body":7,"description":7,"extension":8,"meta":2119,"name":82,"navigation":14,"path":86,"seo":2121,"slug":88,"stem":88,"__hash__":89},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":2120,"url":85},{"posts":48},{"description":7},1776340304245]